hexsha
string
size
int64
ext
string
lang
string
max_stars_repo_path
string
max_stars_repo_name
string
max_stars_repo_head_hexsha
string
max_stars_repo_licenses
list
max_stars_count
int64
max_stars_repo_stars_event_min_datetime
string
max_stars_repo_stars_event_max_datetime
string
max_issues_repo_path
string
max_issues_repo_name
string
max_issues_repo_head_hexsha
string
max_issues_repo_licenses
list
max_issues_count
int64
max_issues_repo_issues_event_min_datetime
string
max_issues_repo_issues_event_max_datetime
string
max_forks_repo_path
string
max_forks_repo_name
string
max_forks_repo_head_hexsha
string
max_forks_repo_licenses
list
max_forks_count
int64
max_forks_repo_forks_event_min_datetime
string
max_forks_repo_forks_event_max_datetime
string
content
string
avg_line_length
float64
max_line_length
int64
alphanum_fraction
float64
qsc_code_num_words_quality_signal
int64
qsc_code_num_chars_quality_signal
float64
qsc_code_mean_word_length_quality_signal
float64
qsc_code_frac_words_unique_quality_signal
float64
qsc_code_frac_chars_top_2grams_quality_signal
float64
qsc_code_frac_chars_top_3grams_quality_signal
float64
qsc_code_frac_chars_top_4grams_quality_signal
float64
qsc_code_frac_chars_dupe_5grams_quality_signal
float64
qsc_code_frac_chars_dupe_6grams_quality_signal
float64
qsc_code_frac_chars_dupe_7grams_quality_signal
float64
qsc_code_frac_chars_dupe_8grams_quality_signal
float64
qsc_code_frac_chars_dupe_9grams_quality_signal
float64
qsc_code_frac_chars_dupe_10grams_quality_signal
float64
qsc_code_frac_chars_replacement_symbols_quality_signal
float64
qsc_code_frac_chars_digital_quality_signal
float64
qsc_code_frac_chars_whitespace_quality_signal
float64
qsc_code_size_file_byte_quality_signal
float64
qsc_code_num_lines_quality_signal
float64
qsc_code_num_chars_line_max_quality_signal
float64
qsc_code_num_chars_line_mean_quality_signal
float64
qsc_code_frac_chars_alphabet_quality_signal
float64
qsc_code_frac_chars_comments_quality_signal
float64
qsc_code_cate_xml_start_quality_signal
float64
qsc_code_frac_lines_dupe_lines_quality_signal
float64
qsc_code_cate_autogen_quality_signal
float64
qsc_code_frac_lines_long_string_quality_signal
float64
qsc_code_frac_chars_string_length_quality_signal
float64
qsc_code_frac_chars_long_word_length_quality_signal
float64
qsc_code_frac_lines_string_concat_quality_signal
float64
qsc_code_cate_encoded_data_quality_signal
float64
qsc_code_frac_chars_hex_words_quality_signal
float64
qsc_code_frac_lines_prompt_comments_quality_signal
float64
qsc_code_frac_lines_assert_quality_signal
float64
qsc_codepython_cate_ast_quality_signal
float64
qsc_codepython_frac_lines_func_ratio_quality_signal
float64
qsc_codepython_cate_var_zero_quality_signal
bool
qsc_codepython_frac_lines_pass_quality_signal
float64
qsc_codepython_frac_lines_import_quality_signal
float64
qsc_codepython_frac_lines_simplefunc_quality_signal
float64
qsc_codepython_score_lines_no_logic_quality_signal
float64
qsc_codepython_frac_lines_print_quality_signal
float64
qsc_code_num_words
int64
qsc_code_num_chars
int64
qsc_code_mean_word_length
int64
qsc_code_frac_words_unique
null
qsc_code_frac_chars_top_2grams
int64
qsc_code_frac_chars_top_3grams
int64
qsc_code_frac_chars_top_4grams
int64
qsc_code_frac_chars_dupe_5grams
int64
qsc_code_frac_chars_dupe_6grams
int64
qsc_code_frac_chars_dupe_7grams
int64
qsc_code_frac_chars_dupe_8grams
int64
qsc_code_frac_chars_dupe_9grams
int64
qsc_code_frac_chars_dupe_10grams
int64
qsc_code_frac_chars_replacement_symbols
int64
qsc_code_frac_chars_digital
int64
qsc_code_frac_chars_whitespace
int64
qsc_code_size_file_byte
int64
qsc_code_num_lines
int64
qsc_code_num_chars_line_max
int64
qsc_code_num_chars_line_mean
int64
qsc_code_frac_chars_alphabet
int64
qsc_code_frac_chars_comments
int64
qsc_code_cate_xml_start
int64
qsc_code_frac_lines_dupe_lines
int64
qsc_code_cate_autogen
int64
qsc_code_frac_lines_long_string
int64
qsc_code_frac_chars_string_length
int64
qsc_code_frac_chars_long_word_length
int64
qsc_code_frac_lines_string_concat
null
qsc_code_cate_encoded_data
int64
qsc_code_frac_chars_hex_words
int64
qsc_code_frac_lines_prompt_comments
int64
qsc_code_frac_lines_assert
int64
qsc_codepython_cate_ast
int64
qsc_codepython_frac_lines_func_ratio
int64
qsc_codepython_cate_var_zero
int64
qsc_codepython_frac_lines_pass
int64
qsc_codepython_frac_lines_import
int64
qsc_codepython_frac_lines_simplefunc
int64
qsc_codepython_score_lines_no_logic
int64
qsc_codepython_frac_lines_print
int64
effective
string
hits
int64
aebb8adcd88914fdcbc1609287d82164e8bbd739
28
py
Python
toproxy/__init__.py
uxlsl/toproxy
863be623ef7036fb2195c933f376d5559f01734b
[ "MIT" ]
305
2015-08-22T17:13:52.000Z
2022-03-03T19:58:32.000Z
toproxy/__init__.py
uxlsl/toproxy
863be623ef7036fb2195c933f376d5559f01734b
[ "MIT" ]
8
2017-01-07T13:03:47.000Z
2019-03-12T00:59:21.000Z
toproxy/__init__.py
uxlsl/toproxy
863be623ef7036fb2195c933f376d5559f01734b
[ "MIT" ]
126
2015-01-03T13:03:16.000Z
2021-09-29T01:10:22.000Z
from proxy import run_proxy
14
27
0.857143
5
28
4.6
0.8
0
0
0
0
0
0
0
0
0
0
0
0.142857
28
1
28
28
0.958333
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
aef4437b3652e0e9215fcdb470e59b9b3174621c
21
py
Python
__init__.py
timlukins/pylcs
ff54bcd440eb7497fc56dcfbf1b4b8583e927932
[ "MIT" ]
6
2016-04-25T12:45:33.000Z
2020-04-09T18:55:09.000Z
__init__.py
timlukins/pylcs
ff54bcd440eb7497fc56dcfbf1b4b8583e927932
[ "MIT" ]
null
null
null
__init__.py
timlukins/pylcs
ff54bcd440eb7497fc56dcfbf1b4b8583e927932
[ "MIT" ]
3
2016-06-01T15:36:07.000Z
2019-06-13T00:25:48.000Z
from xcs import xcs
10.5
20
0.761905
4
21
4
0.75
0
0
0
0
0
0
0
0
0
0
0
0.238095
21
1
21
21
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
aef516ac9a527c68813b6a9bab8c36217d2f735e
44
py
Python
python/cudf/cudf/bindings/groupby/__init__.py
rajkaramchedu-nvidia/cudf
b06e0ef22c5271697d0533c1bb0355964f51cc41
[ "Apache-2.0" ]
null
null
null
python/cudf/cudf/bindings/groupby/__init__.py
rajkaramchedu-nvidia/cudf
b06e0ef22c5271697d0533c1bb0355964f51cc41
[ "Apache-2.0" ]
1
2020-10-23T17:44:07.000Z
2020-10-23T17:44:07.000Z
python/cudf/cudf/bindings/groupby/__init__.py
rajkaramchedu-nvidia/cudf
b06e0ef22c5271697d0533c1bb0355964f51cc41
[ "Apache-2.0" ]
null
null
null
from cudf.bindings.groupby.groupby import *
22
43
0.818182
6
44
6
0.833333
0
0
0
0
0
0
0
0
0
0
0
0.090909
44
1
44
44
0.9
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
9d7abae0aff53055faff999f2023fd0690899d6e
103
py
Python
mne/export/__init__.py
stevemats/mne-python
47051833f21bb372d60afc3adbf4305648ac7f69
[ "BSD-3-Clause" ]
1,953
2015-01-17T20:33:46.000Z
2022-03-30T04:36:34.000Z
mne/export/__init__.py
LiFeng-SECUC/mne-python
732bb1f994e64e41a8e95dcc10dc98c22cac95c0
[ "BSD-3-Clause" ]
8,490
2015-01-01T13:04:18.000Z
2022-03-31T23:02:08.000Z
mne/export/__init__.py
LiFeng-SECUC/mne-python
732bb1f994e64e41a8e95dcc10dc98c22cac95c0
[ "BSD-3-Clause" ]
1,130
2015-01-08T22:39:27.000Z
2022-03-30T21:44:26.000Z
from ._export import export_raw, export_epochs, export_evokeds from ._egimff import export_evokeds_mff
34.333333
62
0.864078
15
103
5.466667
0.533333
0.292683
0
0
0
0
0
0
0
0
0
0
0.097087
103
2
63
51.5
0.88172
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
9dbd885516c9302c1d2a0b193cc8f65e0ea546d4
5,145
py
Python
tests/test_errors.py
lesssn/sanic_crud
931faaffd2fa46a868a6ae9df2dec0c2c0b0d3b2
[ "MIT" ]
58
2017-02-06T02:03:47.000Z
2021-11-09T15:48:25.000Z
tests/test_errors.py
lesssn/sanic_crud
931faaffd2fa46a868a6ae9df2dec0c2c0b0d3b2
[ "MIT" ]
24
2017-01-29T06:26:54.000Z
2019-01-06T21:17:21.000Z
tests/test_errors.py
lesssn/sanic_crud
931faaffd2fa46a868a6ae9df2dec0c2c0b0d3b2
[ "MIT" ]
14
2017-04-06T20:18:59.000Z
2020-07-14T07:10:31.000Z
from sanic.utils import sanic_endpoint_test import json # ------------------------------------------------------------ # # GET # ------------------------------------------------------------ # def test_get_non_existant_record(app): request, response = sanic_endpoint_test(app, uri='/person/404', method='get') expected_response = {'data': {}, 'status_code': 404, 'message': "Resource with id '404' does not exist"} assert json.loads(response.text) == expected_response # ------------------------------------------------------------ # # POST # ------------------------------------------------------------ # def test_post_invalid_json(app): payload = '{"name": invalid}' headers = {'content-type': 'application/json'} request, response = sanic_endpoint_test(app, data=payload, headers=headers, uri='/person', method='post') expected_response = {'data': None, 'status_code': 400, 'message': 'Invalid JSON input'} assert json.loads(response.text) == expected_response def test_post_invalid_field(app): payload = {'name': 'Knackles the Echidna', 'email': 'gottapunchfeast@punch.com', 'job': 1, 'yee': 1} headers = {'content-type': 'application/json'} request, response = sanic_endpoint_test(app, data=json.dumps(payload), headers=headers, uri='/person', method='post') assert json.loads(response.text).get('status_code') == 400 def test_post_missing_required_field(app): payload = {'email': 'gottapunchfeast@punch.com', 'job': 1} headers = {'content-type': 'application/json'} request, response = sanic_endpoint_test(app, data=json.dumps(payload), headers=headers, uri='/person', method='post') assert json.loads(response.text).get('status_code') == 400 def test_post_int_out_of_range(app): payload = {'name': 'Dictator', 'description': 'Ruler of the world', 'base_pay': 3000000000} headers = {'content-type': 'application/json'} request, response = sanic_endpoint_test(app, data=json.dumps(payload), headers=headers, uri='/job', method='post') expected_response = {'data': None, 'status_code': 400, 'message': "Invalid range for field 'base_pay', must be between -2147483647 and 2147483647"} assert json.loads(response.text) == expected_response # ------------------------------------------------------------ # # PUT # ------------------------------------------------------------ # def test_put_non_existant_record(app): payload = {'email': 'knacklessucks@fast.com'} headers = {'content-type': 'application/json'} request, response = sanic_endpoint_test(app, data=json.dumps(payload), headers=headers, uri='/person/404', method='put') expected_response = {'data': {}, 'status_code': 404, 'message': "Resource with id '404' does not exist"} assert json.loads(response.text) == expected_response def test_put_invalid_json(app): payload = '{"name": invalid}' headers = {'content-type': 'application/json'} request, response = sanic_endpoint_test(app, data=payload, headers=headers, uri='/person/1', method='put') expected_response = {'data': None, 'status_code': 400, 'message': 'Invalid JSON input'} assert json.loads(response.text) == expected_response def test_put_invalid_field(app): payload = {'name': 'Knackles the Echidna', 'email': 'gottapunchfeast@punch.com', 'job': 1, 'yee': 1} headers = {'content-type': 'application/json'} request, response = sanic_endpoint_test(app, data=json.dumps(payload), headers=headers, uri='/person/1', method='put') assert json.loads(response.text).get('status_code') == 400 def test_put_int_out_of_range(app): payload = {'base_pay': 3000000000} headers = {'content-type': 'application/json'} request, response = sanic_endpoint_test(app, data=json.dumps(payload), headers=headers, uri='/job/1', method='put') expected_response = {'data': None, 'status_code': 400, 'message': "Invalid range for field 'base_pay', must be between -2147483647 and 2147483647"} assert json.loads(response.text) == expected_response # ------------------------------------------------------------ # # DELETE # ------------------------------------------------------------ # def test_delete_non_existant_record(app): request, response = sanic_endpoint_test(app, uri='/person/2', method='delete') expected_response = {'data': {}, 'status_code': 404, 'message': "Resource with id '2' does not exist"} assert json.loads(response.text) == expected_response def test_put_invalid_json(app): request, response = sanic_endpoint_test(app, uri='/job/2', method='delete') expected_response = {'data': {}, 'status_code': 404, 'message': "Resource with id '2' does not exist"} assert json.loads(response.text) == expected_response
42.520661
124
0.578814
548
5,145
5.259124
0.145985
0.088827
0.070784
0.10687
0.912561
0.909438
0.88272
0.88272
0.866065
0.866065
0
0.028281
0.195918
5,145
121
125
42.520661
0.668359
0.10068
0
0.68
0
0
0.253799
0.021059
0
0
0
0
0.146667
1
0.146667
false
0
0.026667
0
0.173333
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
9dbe6dce9e47c3a7bb6263894a4354678efb865b
8,619
py
Python
src/stim/simulators/matched_error_pybind_test.py
noajshu/Stim
503de420b1e56e90d7f44337ead1065a2ae26740
[ "Apache-2.0" ]
null
null
null
src/stim/simulators/matched_error_pybind_test.py
noajshu/Stim
503de420b1e56e90d7f44337ead1065a2ae26740
[ "Apache-2.0" ]
null
null
null
src/stim/simulators/matched_error_pybind_test.py
noajshu/Stim
503de420b1e56e90d7f44337ead1065a2ae26740
[ "Apache-2.0" ]
null
null
null
import stim def test_CircuitErrorLocationStackFrame(): v1 = stim.CircuitErrorLocationStackFrame( instruction_offset=1, iteration_index=2, instruction_repetitions_arg=3, ) assert v1.instruction_offset == 1 assert v1.iteration_index == 2 assert v1.instruction_repetitions_arg == 3 v2 = stim.CircuitErrorLocationStackFrame( instruction_offset=2, iteration_index=3, instruction_repetitions_arg=5, ) assert v1 != v2 assert v1 == v1 assert len({v1, v1, v2}) == 2 # Check hashable. assert eval(repr(v1), {"stim": stim}) == v1 assert eval(repr(v2), {"stim": stim}) == v2 assert str(v1) == repr(v1) def test_GateTargetWithCoords(): v1 = stim.GateTargetWithCoords( gate_target=stim.target_x(5), coords=[1, 2, 3], ) assert v1.gate_target == stim.GateTarget(stim.target_x(5)) assert v1.coords == [1, 2, 3] v2 = stim.GateTargetWithCoords( gate_target=stim.GateTarget(4), coords=[1, 2], ) assert v1 != v2 assert v1 == v1 assert len({v1, v1, v2}) == 2 # Check hashable. assert eval(repr(v1), {"stim": stim}) == v1 assert eval(repr(v2), {"stim": stim}) == v2 assert str(v1) == 'X5[coords 1,2,3]' def test_DemTargetWithCoords(): v1 = stim.DemTargetWithCoords( dem_target=stim.DemTarget.relative_detector_id(5), coords=[1, 2, 3], ) assert v1.dem_target == stim.DemTarget.relative_detector_id(5) assert v1.coords == [1, 2, 3] v2 = stim.DemTargetWithCoords( dem_target=stim.DemTarget.logical_observable_id(3), coords=(), ) assert v1 != v2 assert v1 == v1 assert len({v1, v1, v2}) == 2 # Check hashable. assert eval(repr(v1), {"stim": stim}) == v1 assert eval(repr(v2), {"stim": stim}) == v2 assert str(v1) == 'D5[coords 1,2,3]' def test_FlippedMeasurement(): v1 = stim.FlippedMeasurement( record_index=5, observable=[ stim.GateTargetWithCoords( gate_target=stim.target_x(5), coords=[1, 2, 3]), ], ) assert v1.record_index == 5 assert v1.observable == [ stim.GateTargetWithCoords( gate_target=stim.target_x(5), coords=[1, 2, 3]), ] v2 = stim.FlippedMeasurement( record_index=5, observable=[], ) assert v1 != v2 assert v1 == v1 assert len({v1, v1, v2}) == 2 # Check hashable. assert eval(repr(v1), {"stim": stim}) == v1 assert eval(repr(v2), {"stim": stim}) == v2 assert str(v1) == repr(v1) def test_CircuitTargetsInsideInstruction(): v1 = stim.CircuitTargetsInsideInstruction( gate="X_ERROR", args=[0.25], target_range_start=2, target_range_end=5, targets_in_range=[ stim.GateTargetWithCoords(gate_target=5, coords=[1, 2]), stim.GateTargetWithCoords(gate_target=6, coords=[1, 3]), stim.GateTargetWithCoords(gate_target=7, coords=[]), ], ) assert v1.gate == "X_ERROR" assert v1.args == [0.25] assert v1.target_range_start == 2 assert v1.target_range_end == 5 assert v1.targets_in_range == [ stim.GateTargetWithCoords(gate_target=5, coords=[1, 2]), stim.GateTargetWithCoords(gate_target=6, coords=[1, 3]), stim.GateTargetWithCoords(gate_target=7, coords=[]), ] v2 = stim.CircuitTargetsInsideInstruction( gate="Z_ERROR", args=[0.125], target_range_start=3, target_range_end=3, targets_in_range=[], ) assert v1 != v2 assert v1 == v1 assert len({v1, v1, v2}) == 2 # Check hashable. assert eval(repr(v1), {"stim": stim}) == v1 assert eval(repr(v2), {"stim": stim}) == v2 assert str(v1) == "X_ERROR(0.25) 5[coords 1,2] 6[coords 1,3] 7" def test_CircuitErrorLocation(): m = stim.FlippedMeasurement( record_index=5, observable=[ stim.GateTargetWithCoords( gate_target=stim.target_x(5), coords=[1, 2, 3]), ], ) p = [ stim.GateTargetWithCoords( gate_target=stim.target_y(6), coords=[1, 2, 3]), ] t = stim.CircuitTargetsInsideInstruction( gate="X_ERROR", args=[0.25], target_range_start=2, target_range_end=5, targets_in_range=[ stim.GateTargetWithCoords(gate_target=5, coords=[1, 2]), stim.GateTargetWithCoords(gate_target=6, coords=[1, 3]), stim.GateTargetWithCoords(gate_target=7, coords=[]), ], ) s = [ stim.CircuitErrorLocationStackFrame( instruction_offset=1, iteration_index=2, instruction_repetitions_arg=3, ) ] * 2 v1 = stim.CircuitErrorLocation( tick_offset=5, flipped_pauli_product=p, flipped_measurement=m, instruction_targets=t, stack_frames=s, ) assert v1.tick_offset == 5 assert v1.flipped_pauli_product == p assert v1.flipped_measurement == m assert v1.instruction_targets == t assert v1.stack_frames == s v2 = stim.CircuitErrorLocation( tick_offset=5, flipped_pauli_product=[], flipped_measurement=None, instruction_targets=t, stack_frames=[], ) assert v2.flipped_measurement is None assert v1 != v2 assert v1 == v1 assert len({v1, v1, v2}) == 2 # Check hashable. assert eval(repr(v1), {"stim": stim}) == v1 assert eval(repr(v2), {"stim": stim}) == v2 assert str(v1) == """CircuitErrorLocation { flipped_pauli_product: Y6[coords 1,2,3] flipped_measurement.measurement_record_index: 5 flipped_measurement.measured_observable: X5[coords 1,2,3] Circuit location stack trace: (after 5 TICKs) at instruction #2 (a REPEAT 3 block) in the circuit after 2 completed iterations at instruction #2 (X_ERROR) in the REPEAT block at targets #3 to #5 of the instruction resolving to X_ERROR(0.25) 5[coords 1,2] 6[coords 1,3] 7 }""" def test_MatchedError(): m = stim.FlippedMeasurement( record_index=5, observable=[ stim.GateTargetWithCoords( gate_target=stim.target_x(5), coords=[1, 2, 3]), ], ) p = [ stim.GateTargetWithCoords( gate_target=stim.target_y(6), coords=[1, 2, 3]), ] t = stim.CircuitTargetsInsideInstruction( gate="X_ERROR", args=[0.25], target_range_start=2, target_range_end=5, targets_in_range=[ stim.GateTargetWithCoords(gate_target=5, coords=[1, 2]), stim.GateTargetWithCoords(gate_target=6, coords=[1, 3]), stim.GateTargetWithCoords(gate_target=7, coords=[]), ], ) s = [ stim.CircuitErrorLocationStackFrame( instruction_offset=1, iteration_index=2, instruction_repetitions_arg=3, ) ] * 2 e = stim.CircuitErrorLocation( tick_offset=5, flipped_pauli_product=p, flipped_measurement=m, instruction_targets=t, stack_frames=s, ) v1 = stim.ExplainedError( dem_error_terms=[stim.DemTargetWithCoords( dem_target=stim.DemTarget.relative_detector_id(5), coords=[1, 2, 3], )], circuit_error_locations=[e], ) assert v1.dem_error_terms == [stim.DemTargetWithCoords( dem_target=stim.DemTarget.relative_detector_id(5), coords=[1, 2, 3], )] assert v1.circuit_error_locations == [e] v2 = stim.ExplainedError( dem_error_terms=[], circuit_error_locations=[], ) assert v1 != v2 assert v1 == v1 assert len({v1, v1, v2}) == 2 # Check hashable. assert eval(repr(v1), {"stim": stim}) == v1 assert eval(repr(v2), {"stim": stim}) == v2 assert str(v1) == """ExplainedError { dem_error_terms: D5[coords 1,2,3] CircuitErrorLocation { flipped_pauli_product: Y6[coords 1,2,3] flipped_measurement.measurement_record_index: 5 flipped_measurement.measured_observable: X5[coords 1,2,3] Circuit location stack trace: (after 5 TICKs) at instruction #2 (a REPEAT 3 block) in the circuit after 2 completed iterations at instruction #2 (X_ERROR) in the REPEAT block at targets #3 to #5 of the instruction resolving to X_ERROR(0.25) 5[coords 1,2] 6[coords 1,3] 7 } }"""
31.456204
68
0.596589
1,033
8,619
4.814134
0.090997
0.056304
0.043435
0.136738
0.814599
0.787452
0.763523
0.763523
0.744621
0.734969
0
0.054006
0.280311
8,619
273
69
31.571429
0.747703
0.012879
0
0.620155
0
0.011628
0.150706
0.025176
0
0
0
0
0.248062
1
0.027132
false
0
0.003876
0
0.031008
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
9de2914b258c54cfb91d0122aac790a836c72d9a
34,388
py
Python
daemon/tests/test_gui.py
rudyeila/core
1c11c6c573eccc666492f25091043ec923a79f09
[ "BSD-2-Clause" ]
null
null
null
daemon/tests/test_gui.py
rudyeila/core
1c11c6c573eccc666492f25091043ec923a79f09
[ "BSD-2-Clause" ]
null
null
null
daemon/tests/test_gui.py
rudyeila/core
1c11c6c573eccc666492f25091043ec923a79f09
[ "BSD-2-Clause" ]
null
null
null
""" Tests for testing tlv message handling. """ import os import time import mock import pytest from core import CoreError from core.api.tlv import coreapi from core.emane.ieee80211abg import EmaneIeee80211abgModel from core.emulator.enumerations import ( ConfigFlags, ConfigTlvs, EventTlvs, EventTypes, ExecuteTlvs, FileTlvs, LinkTlvs, MessageFlags, NodeTlvs, NodeTypes, RegisterTlvs, SessionTlvs, ) from core.location.mobility import BasicRangeModel from core.nodes.ipaddress import Ipv4Prefix def dict_to_str(values): return "|".join("%s=%s" % (x, values[x]) for x in values) class TestGui: @pytest.mark.parametrize( "node_type, model", [ (NodeTypes.DEFAULT, "PC"), (NodeTypes.EMANE, None), (NodeTypes.HUB, None), (NodeTypes.SWITCH, None), (NodeTypes.WIRELESS_LAN, None), (NodeTypes.TUNNEL, None), (NodeTypes.RJ45, None), ], ) def test_node_add(self, coreserver, node_type, model): node_id = 1 message = coreapi.CoreNodeMessage.create( MessageFlags.ADD.value, [ (NodeTlvs.NUMBER, node_id), (NodeTlvs.TYPE, node_type.value), (NodeTlvs.NAME, "n1"), (NodeTlvs.X_POSITION, 0), (NodeTlvs.Y_POSITION, 0), (NodeTlvs.MODEL, model), ], ) coreserver.request_handler.handle_message(message) assert coreserver.session.get_node(node_id) is not None def test_node_update(self, coreserver): node_id = 1 coreserver.session.add_node(_id=node_id) x = 50 y = 100 message = coreapi.CoreNodeMessage.create( 0, [ (NodeTlvs.NUMBER, node_id), (NodeTlvs.X_POSITION, x), (NodeTlvs.Y_POSITION, y), ], ) coreserver.request_handler.handle_message(message) node = coreserver.session.get_node(node_id) assert node is not None assert node.position.x == x assert node.position.y == y def test_node_delete(self, coreserver): node_id = 1 coreserver.session.add_node(_id=node_id) message = coreapi.CoreNodeMessage.create( MessageFlags.DELETE.value, [(NodeTlvs.NUMBER, node_id)] ) coreserver.request_handler.handle_message(message) with pytest.raises(CoreError): coreserver.session.get_node(node_id) def test_link_add_node_to_net(self, coreserver): node_one = 1 coreserver.session.add_node(_id=node_one) switch = 2 coreserver.session.add_node(_id=switch, _type=NodeTypes.SWITCH) ip_prefix = Ipv4Prefix("10.0.0.0/24") interface_one = ip_prefix.addr(node_one) message = coreapi.CoreLinkMessage.create( MessageFlags.ADD.value, [ (LinkTlvs.N1_NUMBER, node_one), (LinkTlvs.N2_NUMBER, switch), (LinkTlvs.INTERFACE1_NUMBER, 0), (LinkTlvs.INTERFACE1_IP4, interface_one), (LinkTlvs.INTERFACE1_IP4_MASK, 24), ], ) coreserver.request_handler.handle_message(message) switch_node = coreserver.session.get_node(switch) all_links = switch_node.all_link_data(0) assert len(all_links) == 1 def test_link_add_net_to_node(self, coreserver): node_one = 1 coreserver.session.add_node(_id=node_one) switch = 2 coreserver.session.add_node(_id=switch, _type=NodeTypes.SWITCH) ip_prefix = Ipv4Prefix("10.0.0.0/24") interface_one = ip_prefix.addr(node_one) message = coreapi.CoreLinkMessage.create( MessageFlags.ADD.value, [ (LinkTlvs.N1_NUMBER, switch), (LinkTlvs.N2_NUMBER, node_one), (LinkTlvs.INTERFACE2_NUMBER, 0), (LinkTlvs.INTERFACE2_IP4, interface_one), (LinkTlvs.INTERFACE2_IP4_MASK, 24), ], ) coreserver.request_handler.handle_message(message) switch_node = coreserver.session.get_node(switch) all_links = switch_node.all_link_data(0) assert len(all_links) == 1 def test_link_add_node_to_node(self, coreserver): node_one = 1 coreserver.session.add_node(_id=node_one) node_two = 2 coreserver.session.add_node(_id=node_two) ip_prefix = Ipv4Prefix("10.0.0.0/24") interface_one = ip_prefix.addr(node_one) interface_two = ip_prefix.addr(node_two) message = coreapi.CoreLinkMessage.create( MessageFlags.ADD.value, [ (LinkTlvs.N1_NUMBER, node_one), (LinkTlvs.N2_NUMBER, node_two), (LinkTlvs.INTERFACE1_NUMBER, 0), (LinkTlvs.INTERFACE1_IP4, interface_one), (LinkTlvs.INTERFACE1_IP4_MASK, 24), (LinkTlvs.INTERFACE2_NUMBER, 0), (LinkTlvs.INTERFACE2_IP4, interface_two), (LinkTlvs.INTERFACE2_IP4_MASK, 24), ], ) coreserver.request_handler.handle_message(message) all_links = [] for node_id in coreserver.session.nodes: node = coreserver.session.nodes[node_id] all_links += node.all_link_data(0) assert len(all_links) == 1 def test_link_update(self, coreserver): node_one = 1 coreserver.session.add_node(_id=node_one) switch = 2 coreserver.session.add_node(_id=switch, _type=NodeTypes.SWITCH) ip_prefix = Ipv4Prefix("10.0.0.0/24") interface_one = ip_prefix.addr(node_one) message = coreapi.CoreLinkMessage.create( MessageFlags.ADD.value, [ (LinkTlvs.N1_NUMBER, node_one), (LinkTlvs.N2_NUMBER, switch), (LinkTlvs.INTERFACE1_NUMBER, 0), (LinkTlvs.INTERFACE1_IP4, interface_one), (LinkTlvs.INTERFACE1_IP4_MASK, 24), ], ) coreserver.request_handler.handle_message(message) switch_node = coreserver.session.get_node(switch) all_links = switch_node.all_link_data(0) assert len(all_links) == 1 link = all_links[0] assert link.bandwidth is None bandwidth = 50000 message = coreapi.CoreLinkMessage.create( 0, [ (LinkTlvs.N1_NUMBER, node_one), (LinkTlvs.N2_NUMBER, switch), (LinkTlvs.INTERFACE1_NUMBER, 0), (LinkTlvs.BANDWIDTH, bandwidth), ], ) coreserver.request_handler.handle_message(message) switch_node = coreserver.session.get_node(switch) all_links = switch_node.all_link_data(0) assert len(all_links) == 1 link = all_links[0] assert link.bandwidth == bandwidth def test_link_delete_node_to_node(self, coreserver): node_one = 1 coreserver.session.add_node(_id=node_one) node_two = 2 coreserver.session.add_node(_id=node_two) ip_prefix = Ipv4Prefix("10.0.0.0/24") interface_one = ip_prefix.addr(node_one) interface_two = ip_prefix.addr(node_two) message = coreapi.CoreLinkMessage.create( MessageFlags.ADD.value, [ (LinkTlvs.N1_NUMBER, node_one), (LinkTlvs.N2_NUMBER, node_two), (LinkTlvs.INTERFACE1_NUMBER, 0), (LinkTlvs.INTERFACE1_IP4, interface_one), (LinkTlvs.INTERFACE1_IP4_MASK, 24), (LinkTlvs.INTERFACE2_IP4, interface_two), (LinkTlvs.INTERFACE2_IP4_MASK, 24), ], ) coreserver.request_handler.handle_message(message) all_links = [] for node_id in coreserver.session.nodes: node = coreserver.session.nodes[node_id] all_links += node.all_link_data(0) assert len(all_links) == 1 message = coreapi.CoreLinkMessage.create( MessageFlags.DELETE.value, [ (LinkTlvs.N1_NUMBER, node_one), (LinkTlvs.N2_NUMBER, node_two), (LinkTlvs.INTERFACE1_NUMBER, 0), (LinkTlvs.INTERFACE2_NUMBER, 0), ], ) coreserver.request_handler.handle_message(message) all_links = [] for node_id in coreserver.session.nodes: node = coreserver.session.nodes[node_id] all_links += node.all_link_data(0) assert len(all_links) == 0 def test_link_delete_node_to_net(self, coreserver): node_one = 1 coreserver.session.add_node(_id=node_one) switch = 2 coreserver.session.add_node(_id=switch, _type=NodeTypes.SWITCH) ip_prefix = Ipv4Prefix("10.0.0.0/24") interface_one = ip_prefix.addr(node_one) message = coreapi.CoreLinkMessage.create( MessageFlags.ADD.value, [ (LinkTlvs.N1_NUMBER, node_one), (LinkTlvs.N2_NUMBER, switch), (LinkTlvs.INTERFACE1_NUMBER, 0), (LinkTlvs.INTERFACE1_IP4, interface_one), (LinkTlvs.INTERFACE1_IP4_MASK, 24), ], ) coreserver.request_handler.handle_message(message) switch_node = coreserver.session.get_node(switch) all_links = switch_node.all_link_data(0) assert len(all_links) == 1 message = coreapi.CoreLinkMessage.create( MessageFlags.DELETE.value, [ (LinkTlvs.N1_NUMBER, node_one), (LinkTlvs.N2_NUMBER, switch), (LinkTlvs.INTERFACE1_NUMBER, 0), ], ) coreserver.request_handler.handle_message(message) switch_node = coreserver.session.get_node(switch) all_links = switch_node.all_link_data(0) assert len(all_links) == 0 def test_link_delete_net_to_node(self, coreserver): node_one = 1 coreserver.session.add_node(_id=node_one) switch = 2 coreserver.session.add_node(_id=switch, _type=NodeTypes.SWITCH) ip_prefix = Ipv4Prefix("10.0.0.0/24") interface_one = ip_prefix.addr(node_one) message = coreapi.CoreLinkMessage.create( MessageFlags.ADD.value, [ (LinkTlvs.N1_NUMBER, node_one), (LinkTlvs.N2_NUMBER, switch), (LinkTlvs.INTERFACE1_NUMBER, 0), (LinkTlvs.INTERFACE1_IP4, interface_one), (LinkTlvs.INTERFACE1_IP4_MASK, 24), ], ) coreserver.request_handler.handle_message(message) switch_node = coreserver.session.get_node(switch) all_links = switch_node.all_link_data(0) assert len(all_links) == 1 message = coreapi.CoreLinkMessage.create( MessageFlags.DELETE.value, [ (LinkTlvs.N1_NUMBER, switch), (LinkTlvs.N2_NUMBER, node_one), (LinkTlvs.INTERFACE2_NUMBER, 0), ], ) coreserver.request_handler.handle_message(message) switch_node = coreserver.session.get_node(switch) all_links = switch_node.all_link_data(0) assert len(all_links) == 0 def test_session_update(self, coreserver): session_id = coreserver.session.id name = "test" message = coreapi.CoreSessionMessage.create( 0, [(SessionTlvs.NUMBER, str(session_id)), (SessionTlvs.NAME, name)] ) coreserver.request_handler.handle_message(message) assert coreserver.session.name == name def test_session_query(self, coreserver): coreserver.request_handler.dispatch_replies = mock.MagicMock() message = coreapi.CoreSessionMessage.create(MessageFlags.STRING.value, []) coreserver.request_handler.handle_message(message) args, _ = coreserver.request_handler.dispatch_replies.call_args replies = args[0] assert len(replies) == 1 def test_session_join(self, coreserver): coreserver.request_handler.dispatch_replies = mock.MagicMock() session_id = coreserver.session.id message = coreapi.CoreSessionMessage.create( MessageFlags.ADD.value, [(SessionTlvs.NUMBER, str(session_id))] ) coreserver.request_handler.handle_message(message) assert coreserver.request_handler.session.id == session_id def test_session_delete(self, coreserver): assert len(coreserver.server.coreemu.sessions) == 1 session_id = coreserver.session.id message = coreapi.CoreSessionMessage.create( MessageFlags.DELETE.value, [(SessionTlvs.NUMBER, str(session_id))] ) coreserver.request_handler.handle_message(message) assert len(coreserver.server.coreemu.sessions) == 0 def test_file_hook_add(self, coreserver): state = EventTypes.DATACOLLECT_STATE.value assert coreserver.session._hooks.get(state) is None file_name = "test.sh" file_data = "echo hello" message = coreapi.CoreFileMessage.create( MessageFlags.ADD.value, [ (FileTlvs.TYPE, "hook:%s" % state), (FileTlvs.NAME, file_name), (FileTlvs.DATA, file_data), ], ) coreserver.request_handler.handle_message(message) hooks = coreserver.session._hooks.get(state) assert len(hooks) == 1 name, data = hooks[0] assert file_name == name assert file_data == data def test_file_service_file_set(self, coreserver): node = coreserver.session.add_node() service = "DefaultRoute" file_name = "defaultroute.sh" file_data = "echo hello" message = coreapi.CoreFileMessage.create( MessageFlags.ADD.value, [ (FileTlvs.NODE, node.id), (FileTlvs.TYPE, "service:%s" % service), (FileTlvs.NAME, file_name), (FileTlvs.DATA, file_data), ], ) coreserver.request_handler.handle_message(message) service_file = coreserver.session.services.get_service_file( node, service, file_name ) assert file_data == service_file.data def test_file_node_file_copy(self, coreserver): file_name = "/var/log/test/node.log" node = coreserver.session.add_node() node.makenodedir() file_data = "echo hello" message = coreapi.CoreFileMessage.create( MessageFlags.ADD.value, [ (FileTlvs.NODE, node.id), (FileTlvs.NAME, file_name), (FileTlvs.DATA, file_data), ], ) coreserver.request_handler.handle_message(message) directory, basename = os.path.split(file_name) created_directory = directory[1:].replace("/", ".") create_path = os.path.join(node.nodedir, created_directory, basename) assert os.path.exists(create_path) def test_exec_node_tty(self, coreserver): coreserver.request_handler.dispatch_replies = mock.MagicMock() node = coreserver.session.add_node() node.startup() message = coreapi.CoreExecMessage.create( MessageFlags.TTY.value, [ (ExecuteTlvs.NODE, node.id), (ExecuteTlvs.NUMBER, 1), (ExecuteTlvs.COMMAND, "bash"), ], ) coreserver.request_handler.handle_message(message) args, _ = coreserver.request_handler.dispatch_replies.call_args replies = args[0] assert len(replies) == 1 def test_exec_local_command(self, coreserver): coreserver.request_handler.dispatch_replies = mock.MagicMock() node = coreserver.session.add_node() node.startup() message = coreapi.CoreExecMessage.create( MessageFlags.TEXT.value | MessageFlags.LOCAL.value, [ (ExecuteTlvs.NODE, node.id), (ExecuteTlvs.NUMBER, 1), (ExecuteTlvs.COMMAND, "echo hello"), ], ) coreserver.request_handler.handle_message(message) args, _ = coreserver.request_handler.dispatch_replies.call_args replies = args[0] assert len(replies) == 1 def test_exec_node_command(self, coreserver): coreserver.request_handler.dispatch_replies = mock.MagicMock() node = coreserver.session.add_node() node.startup() message = coreapi.CoreExecMessage.create( MessageFlags.TEXT.value, [ (ExecuteTlvs.NODE, node.id), (ExecuteTlvs.NUMBER, 1), (ExecuteTlvs.COMMAND, "echo hello"), ], ) coreserver.request_handler.handle_message(message) args, _ = coreserver.request_handler.dispatch_replies.call_args replies = args[0] assert len(replies) == 1 @pytest.mark.parametrize( "state", [ EventTypes.SHUTDOWN_STATE, EventTypes.RUNTIME_STATE, EventTypes.DATACOLLECT_STATE, EventTypes.CONFIGURATION_STATE, EventTypes.DEFINITION_STATE, ], ) def test_event_state(self, coreserver, state): message = coreapi.CoreEventMessage.create(0, [(EventTlvs.TYPE, state.value)]) coreserver.request_handler.handle_message(message) assert coreserver.session.state == state.value def test_event_schedule(self, coreserver): coreserver.session.add_event = mock.MagicMock() node = coreserver.session.add_node() message = coreapi.CoreEventMessage.create( MessageFlags.ADD.value, [ (EventTlvs.TYPE, EventTypes.SCHEDULED.value), (EventTlvs.TIME, str(time.time() + 100)), (EventTlvs.NODE, node.id), (EventTlvs.NAME, "event"), (EventTlvs.DATA, "data"), ], ) coreserver.request_handler.handle_message(message) coreserver.session.add_event.assert_called_once() def test_event_save_xml(self, coreserver, tmpdir): xml_file = tmpdir.join("session.xml") file_path = xml_file.strpath coreserver.session.add_node() message = coreapi.CoreEventMessage.create( 0, [(EventTlvs.TYPE, EventTypes.FILE_SAVE.value), (EventTlvs.NAME, file_path)], ) coreserver.request_handler.handle_message(message) assert os.path.exists(file_path) def test_event_open_xml(self, coreserver, tmpdir): xml_file = tmpdir.join("session.xml") file_path = xml_file.strpath node = coreserver.session.add_node() coreserver.session.save_xml(file_path) coreserver.session.delete_node(node.id) message = coreapi.CoreEventMessage.create( 0, [(EventTlvs.TYPE, EventTypes.FILE_OPEN.value), (EventTlvs.NAME, file_path)], ) coreserver.request_handler.handle_message(message) assert coreserver.session.get_node(node.id) @pytest.mark.parametrize( "state", [ EventTypes.START, EventTypes.STOP, EventTypes.RESTART, EventTypes.PAUSE, EventTypes.RECONFIGURE, ], ) def test_event_service(self, coreserver, state): coreserver.session.broadcast_event = mock.MagicMock() node = coreserver.session.add_node() node.startup() message = coreapi.CoreEventMessage.create( 0, [ (EventTlvs.TYPE, state.value), (EventTlvs.NODE, node.id), (EventTlvs.NAME, "service:DefaultRoute"), ], ) coreserver.request_handler.handle_message(message) coreserver.session.broadcast_event.assert_called_once() @pytest.mark.parametrize( "state", [ EventTypes.START, EventTypes.STOP, EventTypes.RESTART, EventTypes.PAUSE, EventTypes.RECONFIGURE, ], ) def test_event_mobility(self, coreserver, state): message = coreapi.CoreEventMessage.create( 0, [(EventTlvs.TYPE, state.value), (EventTlvs.NAME, "mobility:ns2script")] ) coreserver.request_handler.handle_message(message) def test_register_gui(self, coreserver): coreserver.request_handler.master = False message = coreapi.CoreRegMessage.create(0, [(RegisterTlvs.GUI, "gui")]) coreserver.request_handler.handle_message(message) assert coreserver.request_handler.master is True def test_register_xml(self, coreserver, tmpdir): xml_file = tmpdir.join("session.xml") file_path = xml_file.strpath node = coreserver.session.add_node() coreserver.session.save_xml(file_path) coreserver.session.delete_node(node.id) message = coreapi.CoreRegMessage.create( 0, [(RegisterTlvs.EXECUTE_SERVER, file_path)] ) coreserver.session.instantiate() coreserver.request_handler.handle_message(message) assert coreserver.server.coreemu.sessions[2].get_node(node.id) def test_register_python(self, coreserver, tmpdir): xml_file = tmpdir.join("test.py") file_path = xml_file.strpath with open(file_path, "w") as f: f.write("coreemu = globals()['coreemu']\n") f.write("session = coreemu.sessions[1]\n") f.write("session.add_node()\n") message = coreapi.CoreRegMessage.create( 0, [(RegisterTlvs.EXECUTE_SERVER, file_path)] ) coreserver.session.instantiate() coreserver.request_handler.handle_message(message) assert len(coreserver.session.nodes) == 1 def test_config_all(self, coreserver): node = coreserver.session.add_node() message = coreapi.CoreConfMessage.create( MessageFlags.ADD.value, [ (ConfigTlvs.OBJECT, "all"), (ConfigTlvs.NODE, node.id), (ConfigTlvs.TYPE, ConfigFlags.RESET.value), ], ) coreserver.session.location.reset = mock.MagicMock() coreserver.request_handler.handle_message(message) coreserver.session.location.reset.assert_called_once() def test_config_options_request(self, coreserver): message = coreapi.CoreConfMessage.create( 0, [ (ConfigTlvs.OBJECT, "session"), (ConfigTlvs.TYPE, ConfigFlags.REQUEST.value), ], ) coreserver.request_handler.handle_broadcast_config = mock.MagicMock() coreserver.request_handler.handle_message(message) coreserver.request_handler.handle_broadcast_config.assert_called_once() def test_config_options_update(self, coreserver): test_key = "test" test_value = "test" values = {test_key: test_value} message = coreapi.CoreConfMessage.create( 0, [ (ConfigTlvs.OBJECT, "session"), (ConfigTlvs.TYPE, ConfigFlags.UPDATE.value), (ConfigTlvs.VALUES, dict_to_str(values)), ], ) coreserver.request_handler.handle_message(message) assert coreserver.session.options.get_config(test_key) == test_value def test_config_location_reset(self, coreserver): message = coreapi.CoreConfMessage.create( 0, [ (ConfigTlvs.OBJECT, "location"), (ConfigTlvs.TYPE, ConfigFlags.RESET.value), ], ) coreserver.session.location.refxyz = (10, 10, 10) coreserver.request_handler.handle_message(message) assert coreserver.session.location.refxyz == (0, 0, 0) def test_config_location_update(self, coreserver): message = coreapi.CoreConfMessage.create( 0, [ (ConfigTlvs.OBJECT, "location"), (ConfigTlvs.TYPE, ConfigFlags.UPDATE.value), (ConfigTlvs.VALUES, "10|10|70|50|0|0.5"), ], ) coreserver.request_handler.handle_message(message) assert coreserver.session.location.refxyz == (10, 10, 0.0) assert coreserver.session.location.refgeo == (70, 50, 0) assert coreserver.session.location.refscale == 0.5 def test_config_metadata_request(self, coreserver): message = coreapi.CoreConfMessage.create( 0, [ (ConfigTlvs.OBJECT, "metadata"), (ConfigTlvs.TYPE, ConfigFlags.REQUEST.value), ], ) coreserver.request_handler.handle_broadcast_config = mock.MagicMock() coreserver.request_handler.handle_message(message) coreserver.request_handler.handle_broadcast_config.assert_called_once() def test_config_metadata_update(self, coreserver): test_key = "test" test_value = "test" values = {test_key: test_value} message = coreapi.CoreConfMessage.create( 0, [ (ConfigTlvs.OBJECT, "metadata"), (ConfigTlvs.TYPE, ConfigFlags.UPDATE.value), (ConfigTlvs.VALUES, dict_to_str(values)), ], ) coreserver.request_handler.handle_message(message) assert coreserver.session.metadata.get_config(test_key) == test_value def test_config_broker_request(self, coreserver): server = "test" host = "10.0.0.1" port = 50000 message = coreapi.CoreConfMessage.create( 0, [ (ConfigTlvs.OBJECT, "broker"), (ConfigTlvs.TYPE, ConfigFlags.UPDATE.value), (ConfigTlvs.VALUES, "%s:%s:%s" % (server, host, port)), ], ) coreserver.session.broker.addserver = mock.MagicMock() coreserver.session.broker.setupserver = mock.MagicMock() coreserver.request_handler.handle_message(message) coreserver.session.broker.addserver.assert_called_once_with(server, host, port) coreserver.session.broker.setupserver.assert_called_once_with(server) def test_config_services_request_all(self, coreserver): message = coreapi.CoreConfMessage.create( 0, [ (ConfigTlvs.OBJECT, "services"), (ConfigTlvs.TYPE, ConfigFlags.REQUEST.value), ], ) coreserver.request_handler.handle_broadcast_config = mock.MagicMock() coreserver.request_handler.handle_message(message) coreserver.request_handler.handle_broadcast_config.assert_called_once() def test_config_services_request_specific(self, coreserver): node = coreserver.session.add_node() message = coreapi.CoreConfMessage.create( 0, [ (ConfigTlvs.NODE, node.id), (ConfigTlvs.OBJECT, "services"), (ConfigTlvs.TYPE, ConfigFlags.REQUEST.value), (ConfigTlvs.OPAQUE, "service:DefaultRoute"), ], ) coreserver.request_handler.handle_broadcast_config = mock.MagicMock() coreserver.request_handler.handle_message(message) coreserver.request_handler.handle_broadcast_config.assert_called_once() def test_config_services_request_specific_file(self, coreserver): node = coreserver.session.add_node() message = coreapi.CoreConfMessage.create( 0, [ (ConfigTlvs.NODE, node.id), (ConfigTlvs.OBJECT, "services"), (ConfigTlvs.TYPE, ConfigFlags.REQUEST.value), (ConfigTlvs.OPAQUE, "service:DefaultRoute:defaultroute.sh"), ], ) coreserver.session.broadcast_file = mock.MagicMock() coreserver.request_handler.handle_message(message) coreserver.session.broadcast_file.assert_called_once() def test_config_services_reset(self, coreserver): node = coreserver.session.add_node() service = "DefaultRoute" coreserver.session.services.set_service(node.id, service) message = coreapi.CoreConfMessage.create( 0, [ (ConfigTlvs.OBJECT, "services"), (ConfigTlvs.TYPE, ConfigFlags.RESET.value), ], ) assert coreserver.session.services.get_service(node.id, service) is not None coreserver.request_handler.handle_message(message) assert coreserver.session.services.get_service(node.id, service) is None def test_config_services_set(self, coreserver): node = coreserver.session.add_node() service = "DefaultRoute" values = {"meta": "metadata"} message = coreapi.CoreConfMessage.create( 0, [ (ConfigTlvs.NODE, node.id), (ConfigTlvs.OBJECT, "services"), (ConfigTlvs.TYPE, ConfigFlags.UPDATE.value), (ConfigTlvs.OPAQUE, "service:%s" % service), (ConfigTlvs.VALUES, dict_to_str(values)), ], ) assert coreserver.session.services.get_service(node.id, service) is None coreserver.request_handler.handle_message(message) assert coreserver.session.services.get_service(node.id, service) is not None def test_config_mobility_reset(self, coreserver): wlan = coreserver.session.add_node(_type=NodeTypes.WIRELESS_LAN) message = coreapi.CoreConfMessage.create( 0, [ (ConfigTlvs.OBJECT, "MobilityManager"), (ConfigTlvs.TYPE, ConfigFlags.RESET.value), ], ) coreserver.session.mobility.set_model_config(wlan.id, BasicRangeModel.name, {}) assert len(coreserver.session.mobility.node_configurations) == 1 coreserver.request_handler.handle_message(message) assert len(coreserver.session.mobility.node_configurations) == 0 def test_config_mobility_model_request(self, coreserver): wlan = coreserver.session.add_node(_type=NodeTypes.WIRELESS_LAN) message = coreapi.CoreConfMessage.create( 0, [ (ConfigTlvs.NODE, wlan.id), (ConfigTlvs.OBJECT, BasicRangeModel.name), (ConfigTlvs.TYPE, ConfigFlags.REQUEST.value), ], ) coreserver.request_handler.handle_broadcast_config = mock.MagicMock() coreserver.request_handler.handle_message(message) coreserver.request_handler.handle_broadcast_config.assert_called_once() def test_config_mobility_model_update(self, coreserver): wlan = coreserver.session.add_node(_type=NodeTypes.WIRELESS_LAN) config_key = "range" config_value = "1000" values = {config_key: config_value} message = coreapi.CoreConfMessage.create( 0, [ (ConfigTlvs.NODE, wlan.id), (ConfigTlvs.OBJECT, BasicRangeModel.name), (ConfigTlvs.TYPE, ConfigFlags.UPDATE.value), (ConfigTlvs.VALUES, dict_to_str(values)), ], ) coreserver.request_handler.handle_message(message) config = coreserver.session.mobility.get_model_config( wlan.id, BasicRangeModel.name ) assert config[config_key] == config_value def test_config_emane_model_request(self, coreserver): wlan = coreserver.session.add_node(_type=NodeTypes.WIRELESS_LAN) message = coreapi.CoreConfMessage.create( 0, [ (ConfigTlvs.NODE, wlan.id), (ConfigTlvs.OBJECT, EmaneIeee80211abgModel.name), (ConfigTlvs.TYPE, ConfigFlags.REQUEST.value), ], ) coreserver.request_handler.handle_broadcast_config = mock.MagicMock() coreserver.request_handler.handle_message(message) coreserver.request_handler.handle_broadcast_config.assert_called_once() def test_config_emane_model_update(self, coreserver): wlan = coreserver.session.add_node(_type=NodeTypes.WIRELESS_LAN) config_key = "distance" config_value = "50051" values = {config_key: config_value} message = coreapi.CoreConfMessage.create( 0, [ (ConfigTlvs.NODE, wlan.id), (ConfigTlvs.OBJECT, EmaneIeee80211abgModel.name), (ConfigTlvs.TYPE, ConfigFlags.UPDATE.value), (ConfigTlvs.VALUES, dict_to_str(values)), ], ) coreserver.request_handler.handle_message(message) config = coreserver.session.emane.get_model_config( wlan.id, EmaneIeee80211abgModel.name ) assert config[config_key] == config_value def test_config_emane_request(self, coreserver): message = coreapi.CoreConfMessage.create( 0, [ (ConfigTlvs.OBJECT, "emane"), (ConfigTlvs.TYPE, ConfigFlags.REQUEST.value), ], ) coreserver.request_handler.handle_broadcast_config = mock.MagicMock() coreserver.request_handler.handle_message(message) coreserver.request_handler.handle_broadcast_config.assert_called_once() def test_config_emane_update(self, coreserver): config_key = "eventservicedevice" config_value = "eth4" values = {config_key: config_value} message = coreapi.CoreConfMessage.create( 0, [ (ConfigTlvs.OBJECT, "emane"), (ConfigTlvs.TYPE, ConfigFlags.UPDATE.value), (ConfigTlvs.VALUES, dict_to_str(values)), ], ) coreserver.request_handler.handle_message(message) config = coreserver.session.emane.get_configs() assert config[config_key] == config_value
35.01833
88
0.609719
3,440
34,388
5.8625
0.06657
0.084296
0.094015
0.099668
0.841176
0.804334
0.775326
0.750186
0.734616
0.703724
0
0.01431
0.292806
34,388
981
89
35.054027
0.81496
0.001134
0
0.619105
0
0
0.021665
0.002941
0
0
0
0
0.078597
1
0.060459
false
0
0.012092
0.001209
0.07497
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
d17bab00bf9b8438e723185dec61867293f6abf2
42,874
py
Python
code/annotation/evaluation_plots.py
VitaAmbroz/360Tracking
882ac910726896bfe2e3dd70b62cebca25e6fcbe
[ "MIT" ]
2
2021-05-20T09:56:32.000Z
2021-08-02T11:26:01.000Z
code/annotation/evaluation_plots.py
VitaAmbroz/360Tracking
882ac910726896bfe2e3dd70b62cebca25e6fcbe
[ "MIT" ]
null
null
null
code/annotation/evaluation_plots.py
VitaAmbroz/360Tracking
882ac910726896bfe2e3dd70b62cebca25e6fcbe
[ "MIT" ]
1
2022-01-17T04:28:58.000Z
2022-01-17T04:28:58.000Z
################################################################################################# # Visual object tracking in panoramic video # Master thesis at Brno University of Technology - Faculty of Information Technology # Author: Vít Ambrož (xambro15@stud.fit.vutbr.cz) # Supervisor: Doc. Ing. Martin Čadík, Ph.D. # Module: evaluation_plots.py # Description: Drawing success, precision, variance plots ################################################################################################# # This source code has been inspired by: # https://github.com/visionml/pytracking/blob/master/pytracking/analysis/plot_results.py # -------------------------------------------------------- # pytracking (https://github.com/visionml/pytracking) # Licensed under GPL-3.0 License # Copyright Martin Danelljan, Goutam Bhat # -------------------------------------------------------- ################################################################################################# import sys import glob import os # import tikzplotlib import matplotlib import matplotlib.pyplot as plt import numpy as np import torch class EvaluationPlots: """Drawing success, precision, variance plots""" def __init__(self): # paths for IoU files self.PATH_IOU_DEFAULT = "annotation/results/<TRACKER>/<NUMBER>/<NUMBER>-result-default-iou.txt" self.PATH_IOU_BORDER = "annotation/results/<TRACKER>/<NUMBER>/<NUMBER>-result-border-iou.txt" self.PATH_IOU_NFOV = "annotation/results/<TRACKER>/<NUMBER>/<NUMBER>-result-nfov-iou.txt" # paths for center error files self.PATH_CENTER_ERROR_DEFAULT = "annotation/results/<TRACKER>/<NUMBER>/<NUMBER>-result-default-centererror.txt" self.PATH_CENTER_ERROR_BORDER = "annotation/results/<TRACKER>/<NUMBER>/<NUMBER>-result-border-centererror.txt" self.PATH_CENTER_ERROR_NFOV = "annotation/results/<TRACKER>/<NUMBER>/<NUMBER>-result-nfov-centererror.txt" # constants of trackers names self.TRACKERS = ["ECO","ATOM","DiMP","KYS","DaSiamRPN","Ocean","SiamDW","CSRT","MEDIANFLOW","KCF","MIL","TLD"] # constant of whole dataset with total 21 sequences self.DATASET = ["01","02","03","04","05","06","07","08","09","10","11","12","13","14","15","16","17","18","19","20","21"] # constant of whole dataset with total 13 sequences where object crosses equirectangular border self.DATASET_CROSSING_BORDER = ["01","02","03","04","08","11","12","13","14","15","16","18","21"] # constant of whole dataset with total 8 sequences where object does not crosses equirectangular border self.DATASET_NOT_CROSSING_BORDER = ["05","06","07","09","10","17","19","20"] # paths for result plots in .pdf self.PATH_SUCCESS_PLOT = "annotation/results/<TRACKER>/<NUMBER>/<NUMBER>-success-plot" self.PATH_SUCCESS_PLOT_ALLSEQUENCES = "annotation/results/total-success/<TRACKER>-success-plot" self.PATH_SUCCESS_PLOT_ALLSEQUENCES_VAR = "annotation/results/total-success/<TRACKER>-success-plot-variance" self.PATH_SUCCESS_PLOT_ALLTRACKERS = "annotation/results/total-success/all-trackers-success-plot" self.PATH_SUCCESS_PLOT_ALLTRACKERS_SEQ = "annotation/results/total-success/all-trackers/<NUMBER>-trackers-success-plot" self.PATH_PRECISION_PLOT = "annotation/results/<TRACKER>/<NUMBER>/<NUMBER>-precision-plot" self.PATH_PRECISION_PLOT_ALLSEQUENCES = "annotation/results/total-precision/<TRACKER>-precision-plot" self.PATH_PRECISION_PLOT_ALLSEQUENCES_VAR = "annotation/results/total-precision/<TRACKER>-precision-plot-variance" self.PATH_PRECISION_PLOT_ALLTRACKERS = "annotation/results/total-precision/all-trackers-precision-plot" self.PATH_PRECISION_PLOT_ALLTRACKERS_SEQ = "annotation/results/total-precision/all-trackers/<NUMBER>-trackers-precision-plot" def _parseGivenDataFile(self, path): """Method for parsing float numbers from given file""" dataFile = open(path, 'r') lines = dataFile.readlines() # init empty list floatList = [] # parse lines containing float value of intersection over union for line in lines: f = float(line) floatList.append(f) return floatList def _getPlotDrawStyles(self): """Gets colors and line styles for drawed plot lines""" plot_draw_style = [{'color': (1.0, 0.0, 0.0), 'line_style': '-'}, {'color': (0.0, 1.0, 0.0), 'line_style': '-'}, {'color': (0.0, 0.0, 1.0), 'line_style': '-'}, {'color': (0.0, 0.0, 0.0), 'line_style': '-'}, {'color': (1.0, 0.0, 1.0), 'line_style': '-'}, {'color': (0.0, 1.0, 1.0), 'line_style': '-'}, {'color': (0.5, 0.5, 0.5), 'line_style': '-'}, {'color': (136.0 / 255.0, 0.0, 21.0 / 255.0), 'line_style': '--'}, {'color': (1.0, 127.0 / 255.0, 39.0 / 255.0), 'line_style': '--'}, {'color': (0.0, 162.0 / 255.0, 232.0 / 255.0), 'line_style': '--'}, {'color': (0.0, 0.5, 0.0), 'line_style': '--'}, {'color': (0.2, 0.1, 0.7), 'line_style': '--'}, {'color': (0.4, 0.7, 0.1), 'line_style': '--'}, {'color': (0.1, 0.4, 0.0), 'line_style': '--'}, {'color': (1.0, 0.5, 0.2), 'line_style': '--'}, {'color': (0.6, 0.3, 0.9), 'line_style': '--'}, {'color': (0.7, 0.6, 0.2), 'line_style': '--'}] return plot_draw_style def _plotDrawSave(self, y, x, scores, trackers, plot_draw_styles, plot_opts, save_path): """Draws and save given success or precision plot settings""" # Plot settings font_size = plot_opts.get('font_size', 12) font_size_axis = plot_opts.get('font_size_axis', 12) line_width = plot_opts.get('line_width', 2) font_size_legend = plot_opts.get('font_size_legend', 12) bbox_to_anchor = plot_opts.get('bbox_to_anchor', None) ncol = plot_opts.get('ncol', 1) plot_type = plot_opts['plot_type'] legend_loc = plot_opts['legend_loc'] xlabel = plot_opts['xlabel'] ylabel = plot_opts['ylabel'] xlim = plot_opts['xlim'] ylim = plot_opts['ylim'] xticks = plot_opts.get('xticks', None) xticks_string = plot_opts.get('xticks_string', None) yticks = plot_opts.get('yticks', None) title = plot_opts['title'] matplotlib.rcParams.update({'font.size': font_size}) matplotlib.rcParams.update({'axes.titlesize': font_size_axis}) matplotlib.rcParams.update({'axes.titleweight': 'black'}) matplotlib.rcParams.update({'axes.labelsize': font_size_axis}) fig, ax = plt.subplots() # possible sort according to best auc index_sort = scores.argsort(descending=False) plotted_lines = [] legend_text = [] for _, id_sort in enumerate(index_sort): # line = ax.plot(x.tolist(), y[id_sort, :].tolist(), linewidth=line_width, color=plot_draw_styles[index_sort.numel() - id - 1]['color'], linestyle=plot_draw_styles[index_sort.numel() - id - 1]['line_style']) line = ax.plot(x.tolist(), y[id_sort, :].tolist(), linewidth=line_width, color=plot_draw_styles[id_sort.item()]['color'], linestyle=plot_draw_styles[id_sort.item()]['line_style']) plotted_lines.append(line[0]) tracker = trackers[id_sort] disp_name = tracker legend_text.append('{} [{:.1f}]'.format(disp_name, scores[id_sort])) ax.legend(plotted_lines[::-1], legend_text[::-1], loc=legend_loc, bbox_to_anchor=bbox_to_anchor, ncol=ncol, fancybox=False, edgecolor='black', fontsize=font_size_legend, framealpha=1.0) ax.set(xlabel=xlabel, ylabel=ylabel, xlim=xlim, ylim=ylim, title=title) ax.grid(True, linestyle='-.') fig.tight_layout() # hard define ticks if xticks and xticks_string and yticks: plt.xticks(xticks, xticks_string) plt.yticks(yticks) # save tex # tex_path = save_path + ".tex" # tikzplotlib.save(tex_path) # save eps # eps_path = save_path + ".eps" # fig.savefig(eps_path, dpi=300, format='eps', transparent=True) # save pdf pdf_path = save_path + ".pdf" fig.savefig(pdf_path, dpi=300, format='pdf', transparent=True) plt.draw() print("File " + pdf_path + " has been created.") # plt.show() ################################################################################ ############################### Success plots ################################## ################################################################################ def createSuccessPlot(self, tracker: str, seq_number: str): """Draws and saves success plot for given tracker and sequence""" self.PATH_IOU_DEFAULT = self.PATH_IOU_DEFAULT.replace("<TRACKER>", tracker).replace("<NUMBER>", seq_number) self.PATH_IOU_BORDER = self.PATH_IOU_BORDER.replace("<TRACKER>", tracker).replace("<NUMBER>", seq_number) self.PATH_IOU_NFOV = self.PATH_IOU_NFOV.replace("<TRACKER>", tracker).replace("<NUMBER>", seq_number) iou_default = self._parseGivenDataFile(self.PATH_IOU_DEFAULT) iou_border = self._parseGivenDataFile(self.PATH_IOU_BORDER) iou_nfov = self._parseGivenDataFile(self.PATH_IOU_NFOV) if len(iou_default) > 0 and len(iou_border) > 0 and len(iou_nfov) > 0: plot_bin_gap = 0.05 threshold_set_overlap = torch.arange(0.0, 1.0 + plot_bin_gap, plot_bin_gap, dtype=torch.float64) ave_success_rate_plot_overlap = torch.zeros((3, threshold_set_overlap.numel()), dtype=torch.float32) # transform python list to tensors iou_default_tensor = torch.Tensor(iou_default) iou_border_tensor = torch.Tensor(iou_border) iou_nfov_tensor = torch.Tensor(iou_nfov) # success computing ave_success_rate_plot_overlap[0] = (iou_default_tensor.view(-1, 1) > threshold_set_overlap.view(1, -1)).sum(0).float() / len(iou_default) ave_success_rate_plot_overlap[1] = (iou_border_tensor.view(-1, 1) > threshold_set_overlap.view(1, -1)).sum(0).float() / len(iou_border) ave_success_rate_plot_overlap[2] = (iou_nfov_tensor.view(-1, 1) > threshold_set_overlap.view(1, -1)).sum(0).float() / len(iou_nfov) auc_curve = ave_success_rate_plot_overlap * 100.0 # compute AUC (area under curve for 3 results/IoU) auc1 = auc_curve[0].mean(-1) auc2 = auc_curve[1].mean(-1) auc3 = auc_curve[2].mean(-1) # concatenate to tensor list auc = torch.Tensor([auc1, auc2, auc3]) success_plot_opts = { 'plot_type': 'success', 'legend_loc': 'lower left', 'xlabel': 'Overlap threshold', 'ylabel': 'Overlap Precision [%]', 'xlim': (0, 1.0), 'ylim': (0, 100), 'title': 'Success plot - ' + tracker, 'font_size_legend': 11, 'xticks': [0.0, 0.2, 0.4, 0.6, 0.8, 1.0], 'xticks_string': ['0', '0.2', '0.4', '0.6', '0.8', '1.0'], 'yticks': [20, 40, 60, 80, 100] } # tracker(modified) names of lines in plot tracker_names = [tracker + "-DEFAULT", tracker + "-BORDER", tracker + "-NFOV"] self.PATH_SUCCESS_PLOT = self.PATH_SUCCESS_PLOT.replace("<TRACKER>", tracker).replace("<NUMBER>", seq_number) self._plotDrawSave(auc_curve, threshold_set_overlap, auc, tracker_names, self._getPlotDrawStyles(), success_plot_opts, self.PATH_SUCCESS_PLOT) def createSuccessPlotAllSequences(self, tracker: str): """Draws and saves success plot for given tracker and all 01-21 video sequences in dataset""" plot_bin_gap = 0.05 threshold_set_overlap = torch.arange(0.0, 1.0 + plot_bin_gap, plot_bin_gap, dtype=torch.float64) ave_success_rate_plot_overlap = torch.zeros((len(self.DATASET), 3, threshold_set_overlap.numel()), dtype=torch.float32) for i in range(len(self.DATASET)): # load and parse data path_default = self.PATH_IOU_DEFAULT.replace("<TRACKER>", tracker).replace("<NUMBER>", self.DATASET[i]) path_border = self.PATH_IOU_BORDER.replace("<TRACKER>", tracker).replace("<NUMBER>", self.DATASET[i]) path_nfov = self.PATH_IOU_NFOV.replace("<TRACKER>", tracker).replace("<NUMBER>", self.DATASET[i]) iou_default = self._parseGivenDataFile(path_default) iou_border = self._parseGivenDataFile(path_border) iou_nfov = self._parseGivenDataFile(path_nfov) # transform python lists to tensors iou_default_tensor = torch.Tensor(iou_default) iou_border_tensor = torch.Tensor(iou_border) iou_nfov_tensor = torch.Tensor(iou_nfov) # success computing ave_success_rate_plot_overlap[i,0,:] = (iou_default_tensor.view(-1, 1) > threshold_set_overlap.view(1, -1)).sum(0).float() / len(iou_default) ave_success_rate_plot_overlap[i,1,:] = (iou_border_tensor.view(-1, 1) > threshold_set_overlap.view(1, -1)).sum(0).float() / len(iou_border) ave_success_rate_plot_overlap[i,2,:] = (iou_nfov_tensor.view(-1, 1) > threshold_set_overlap.view(1, -1)).sum(0).float() / len(iou_nfov) # auc_curve as mean of ave_success_rate_plot_overlap tensors auc_curve = ave_success_rate_plot_overlap.mean(0) * 100.0 auc = auc_curve.mean(-1) success_plot_opts = { 'plot_type': 'success', # 'legend_loc': 'upper right', 'legend_loc': 'lower left', 'xlabel': 'Overlap threshold', 'ylabel': 'Overlap Precision [%]', 'xlim': (0, 1.0), 'ylim': (0, 100), 'title': 'Success plot - ' + tracker, 'font_size_legend': 16, 'font_size_axis': 16, 'font_size': 16, 'xticks': [0.0, 0.2, 0.4, 0.6, 0.8, 1.0], 'xticks_string': ['0', '0.2', '0.4', '0.6', '0.8', '1.0'], 'yticks': [20, 40, 60, 80, 100] } # tracker(modified) names of lines in plot tracker_names = [tracker + "-DEFAULT", tracker + "-BORDER", tracker + "-NFOV"] self.PATH_SUCCESS_PLOT_ALLSEQUENCES = self.PATH_SUCCESS_PLOT_ALLSEQUENCES.replace("<TRACKER>", tracker) self._plotDrawSave(auc_curve, threshold_set_overlap, auc, tracker_names, self._getPlotDrawStyles(), success_plot_opts, self.PATH_SUCCESS_PLOT_ALLSEQUENCES) def createSuccessPlotAllTrackersSequence(self, seq_number: str, default=False, border=False, nfov=False): """Draws and saves success plot for all trackers (default/border/nfov) and for given video sequence only""" plot_bin_gap = 0.05 threshold_set_overlap = torch.arange(0.0, 1.0 + plot_bin_gap, plot_bin_gap, dtype=torch.float64) ave_success_rate_plot_overlap = torch.zeros((len(self.TRACKERS), threshold_set_overlap.numel()), dtype=torch.float32) path = "" save_path = self.PATH_SUCCESS_PLOT_ALLTRACKERS_SEQ.replace("<NUMBER>", seq_number) tracker_names = self.TRACKERS title = "Success plot" if default: path = self.PATH_IOU_DEFAULT save_path += "-default" title += " - DEFAULT" elif border: path = self.PATH_IOU_BORDER save_path += "-border" title += " - BORDER" elif nfov: path = self.PATH_IOU_NFOV save_path += "-nfov" title += " - NFOV" for i in range(len(self.TRACKERS)): # load and parse data current_path = path.replace("<TRACKER>", self.TRACKERS[i]).replace("<NUMBER>", seq_number) iou = self._parseGivenDataFile(current_path) # transform python lists to tensors iou_tensor = torch.Tensor(iou) # success computing ave_success_rate_plot_overlap[i,:] = (iou_tensor.view(-1, 1) > threshold_set_overlap.view(1, -1)).sum(0).float() / len(iou) # auc_curve as mean of ave_success_rate_plot_overlap tensors auc_curve = ave_success_rate_plot_overlap * 100.0 auc = [] for i in range(len(self.TRACKERS)): # compute AUC (area under curve for 12 results/IoU) auc_next = auc_curve[i].mean(-1).item() # concatenate to tensor list auc.append(auc_next) # python list to tensor auc = torch.Tensor(auc) success_plot_opts = { 'plot_type': 'success', 'legend_loc': 'upper right', 'xlabel': 'Overlap threshold', 'ylabel': 'Overlap Precision [%]', 'xlim': (0, 1.0), 'ylim': (0, 100), 'title': title + " (Sequence " + seq_number + ")", 'font_size_legend': 10, 'bbox_to_anchor': (1.25, 1.0), 'xticks': [0.0, 0.2, 0.4, 0.6, 0.8, 1.0], 'xticks_string': ['0', '0.2', '0.4', '0.6', '0.8', '1.0'], 'yticks': [20, 40, 60, 80, 100] } self._plotDrawSave(auc_curve, threshold_set_overlap, auc, tracker_names, self._getPlotDrawStyles(), success_plot_opts, save_path) def createSuccessPlotAllTrackers(self, default=False, border=False, nfov=False, onlyBorderCrossing=False, onlyNotBorderCrossing=False): """Draws and saves success plot for all trackers (default/border/nfov) and all 01-21 video sequences in dataset""" dataset = self.DATASET save_path = self.PATH_SUCCESS_PLOT_ALLTRACKERS if onlyBorderCrossing: dataset = self.DATASET_CROSSING_BORDER save_path = self.PATH_SUCCESS_PLOT_ALLTRACKERS + "-crossing" if onlyNotBorderCrossing: dataset = self.DATASET_NOT_CROSSING_BORDER save_path = self.PATH_SUCCESS_PLOT_ALLTRACKERS + "-not-crossing" plot_bin_gap = 0.05 threshold_set_overlap = torch.arange(0.0, 1.0 + plot_bin_gap, plot_bin_gap, dtype=torch.float64) ave_success_rate_plot_overlap = torch.zeros((len(dataset), len(self.TRACKERS), threshold_set_overlap.numel()), dtype=torch.float32) path = "" tracker_names = self.TRACKERS title = "Success plot" if default: path = self.PATH_IOU_DEFAULT save_path += "-default" title += " - DEFAULT" elif border: path = self.PATH_IOU_BORDER save_path += "-border" title += " - BORDER" elif nfov: path = self.PATH_IOU_NFOV save_path += "-nfov" title += " - NFOV" for i in range(len(dataset)): for j in range(len(self.TRACKERS)): # load and parse data current_path = path.replace("<TRACKER>", self.TRACKERS[j]).replace("<NUMBER>", dataset[i]) iou = self._parseGivenDataFile(current_path) # transform python lists to tensors iou_tensor = torch.Tensor(iou) # success computing ave_success_rate_plot_overlap[i,j,:] = (iou_tensor.view(-1, 1) > threshold_set_overlap.view(1, -1)).sum(0).float() / len(iou) # auc_curve as mean of ave_success_rate_plot_overlap tensors auc_curve = ave_success_rate_plot_overlap.mean(0) * 100.0 auc = auc_curve.mean(-1) success_plot_opts = { 'plot_type': 'success', 'legend_loc': 'upper right', 'xlabel': 'Overlap threshold', 'ylabel': 'Overlap Precision [%]', 'xlim': (0, 1.0), 'ylim': (0, 100), 'title': title, 'font_size_legend': 10, 'bbox_to_anchor': (1.25, 1.0), 'xticks': [0.0, 0.2, 0.4, 0.6, 0.8, 1.0], 'xticks_string': ['0', '0.2', '0.4', '0.6', '0.8', '1.0'], 'yticks': [20, 40, 60, 80, 100] } self._plotDrawSave(auc_curve, threshold_set_overlap, auc, tracker_names, self._getPlotDrawStyles(), success_plot_opts, save_path) def createSuccessPlotAllSequencesVariance(self, tracker: str): """Draws and saves success plot for given tracker and all 01-21 video sequences in dataset with variance min and max""" plot_bin_gap = 0.05 threshold_set_overlap = torch.arange(0, 1.0 + plot_bin_gap, plot_bin_gap, dtype=torch.float64) ave_success_rate_plot_overlap = torch.zeros((len(self.DATASET), 3, threshold_set_overlap.numel()), dtype=torch.float32) for i in range(len(self.DATASET)): # load and parse data path_default = self.PATH_IOU_DEFAULT.replace("<TRACKER>", tracker).replace("<NUMBER>", self.DATASET[i]) path_border = self.PATH_IOU_BORDER.replace("<TRACKER>", tracker).replace("<NUMBER>", self.DATASET[i]) path_nfov = self.PATH_IOU_NFOV.replace("<TRACKER>", tracker).replace("<NUMBER>", self.DATASET[i]) iou_default = self._parseGivenDataFile(path_default) iou_border = self._parseGivenDataFile(path_border) iou_nfov = self._parseGivenDataFile(path_nfov) # transform python lists to tensors iou_default_tensor = torch.Tensor(iou_default) iou_border_tensor = torch.Tensor(iou_border) iou_nfov_tensor = torch.Tensor(iou_nfov) # success computing ave_success_rate_plot_overlap[i,0,:] = (iou_default_tensor.view(-1, 1) > threshold_set_overlap.view(1, -1)).sum(0).float() / len(iou_default) ave_success_rate_plot_overlap[i,1,:] = (iou_border_tensor.view(-1, 1) > threshold_set_overlap.view(1, -1)).sum(0).float() / len(iou_border) ave_success_rate_plot_overlap[i,2,:] = (iou_nfov_tensor.view(-1, 1) > threshold_set_overlap.view(1, -1)).sum(0).float() / len(iou_nfov) # auc_curve as mean of ave_success_rate_plot_overlap tensors auc_curve = ave_success_rate_plot_overlap.mean(0) * 100.0 auc = auc_curve.mean(-1) # maximum in tensors max_curve = ave_success_rate_plot_overlap.max(0).values * 100.0 # minimim in tensors min_curve = ave_success_rate_plot_overlap.min(0).values * 100.0 # tracker(modified) names of lines in plot tracker_names = ["DEFAULT", "BORDER", "NFOV"] # tech report pdf bigger size # font_size = 16 # font_size_axis = 16 # font_size_label = 18 # font_size_legend = 16 font_size = 13 font_size_axis = 13 font_size_label = 14 font_size_legend = 13 # tech report pdf bigger size # bbox_to_anchor = (1.25, 1.0) bbox_to_anchor = None line_width = 3 legend_loc = 'upper right' xlabel = 'Overlap threshold' ylabel = 'Overlap Precision [%]' xlim = (0, 1.0) ylim = (0, 100) xticks = [0.0, 0.2, 0.4, 0.6, 0.8, 1.0] xticks_string = ['0', '0.2', '0.4', '0.6', '0.8', '1.0'] yticks = [20, 40, 60, 80, 100] title = 'Success plot - ' + tracker matplotlib.rcParams.update({'font.size': font_size}) matplotlib.rcParams.update({'axes.titlesize': font_size_axis}) matplotlib.rcParams.update({'axes.titleweight': 'black'}) matplotlib.rcParams.update({'axes.labelsize': font_size_label}) fig, ax = plt.subplots() # possible sort according to best auc index_sort = auc.argsort(descending=False) plotted_lines = [] legend_text = [] plot_draw_styles = self._getPlotDrawStyles() # draw lines and background for _, id_sort in enumerate(index_sort): disp_name = tracker_names[id_sort] legend_text.append('{} [{:.1f}]'.format(disp_name, auc[id_sort])) # draw mean line line = ax.plot(threshold_set_overlap.tolist(), auc_curve[id_sort, :].tolist(), linewidth=line_width, color=plot_draw_styles[id_sort.item()]['color'], linestyle='-') plotted_lines.append(line[0]) # draw min and max lines ax.plot(threshold_set_overlap.tolist(), max_curve[id_sort, :].tolist(), linewidth=1, color=plot_draw_styles[id_sort.item()]['color'], linestyle='--') ax.plot(threshold_set_overlap.tolist(), min_curve[id_sort, :].tolist(), linewidth=1, color=plot_draw_styles[id_sort.item()]['color'], linestyle='--') # show light background as variance ax.fill_between(threshold_set_overlap.tolist(), min_curve[id_sort, :].tolist(), max_curve[id_sort, :].tolist(), color=plot_draw_styles[id_sort.item()]['color'], alpha=0.08) ax.legend(plotted_lines[::-1], legend_text[::-1], loc=legend_loc, bbox_to_anchor=bbox_to_anchor, fancybox=False, edgecolor='black', fontsize=font_size_legend, framealpha=1.0) ax.set(xlabel=xlabel, ylabel=ylabel, xlim=xlim, ylim=ylim, title=title) ax.grid(True, linestyle='-.') fig.tight_layout() # hard define ticks plt.xticks(xticks, xticks_string) plt.yticks(yticks) # tikzplotlib.save('{}/{}_plot.tex'.format(result_plot_path, plot_type)) self.PATH_SUCCESS_PLOT_ALLSEQUENCES_VAR = self.PATH_SUCCESS_PLOT_ALLSEQUENCES_VAR.replace("<TRACKER>", tracker) pdf_path = self.PATH_SUCCESS_PLOT_ALLSEQUENCES_VAR + ".pdf" fig.savefig(pdf_path, dpi=300, format='pdf', transparent=True) plt.draw() print("File " + pdf_path + " has been created.") # plt.show() ################################################################################ ############################### Precision plots ################################ ################################################################################ def createPrecisionPlot(self, tracker: str, seq_number: str): """Draws and saves precision plot for given tracker and sequence""" self.PATH_CENTER_ERROR_DEFAULT = self.PATH_CENTER_ERROR_DEFAULT.replace("<TRACKER>", tracker).replace("<NUMBER>", seq_number) self.PATH_CENTER_ERROR_BORDER = self.PATH_CENTER_ERROR_BORDER.replace("<TRACKER>", tracker).replace("<NUMBER>", seq_number) self.PATH_CENTER_ERROR_NFOV = self.PATH_CENTER_ERROR_NFOV.replace("<TRACKER>", tracker).replace("<NUMBER>", seq_number) cerror_default = self._parseGivenDataFile(self.PATH_CENTER_ERROR_DEFAULT) cerror_border = self._parseGivenDataFile(self.PATH_CENTER_ERROR_BORDER) cerror_nfov = self._parseGivenDataFile(self.PATH_CENTER_ERROR_NFOV) if len(cerror_default) > 0 and len(cerror_border) > 0 and len(cerror_nfov) > 0: threshold_set_center = torch.arange(0, 51, dtype=torch.float64) ave_success_rate_plot_center = torch.zeros((3, threshold_set_center.numel()), dtype=torch.float32) # transform python list to tensors cerror_default_tensor = torch.Tensor(cerror_default) cerror_border_tensor = torch.Tensor(cerror_border) cerror_nfov_tensor = torch.Tensor(cerror_nfov) # location error threshold computing ave_success_rate_plot_center[0] = (cerror_default_tensor.view(-1, 1) <= threshold_set_center.view(1, -1)).sum(0).float() / len(cerror_default) ave_success_rate_plot_center[1] = (cerror_border_tensor.view(-1, 1) <= threshold_set_center.view(1, -1)).sum(0).float() / len(cerror_border) ave_success_rate_plot_center[2] = (cerror_nfov_tensor.view(-1, 1) <= threshold_set_center.view(1, -1)).sum(0).float() / len(cerror_nfov) # create curves prec_curve = ave_success_rate_plot_center * 100.0 # score should be counted for max 20 pixel error prec_score = prec_curve[:, 20] precision_plot_opts = { 'plot_type': 'precision', 'legend_loc': 'lower right', 'xlabel': 'Location error threshold [pixels]', 'ylabel': 'Distance Precision [%]', 'xlim': (0, 50), 'ylim': (0, 100), 'title': 'Precision plot - ' + tracker, 'font_size_legend': 11, 'xticks': [0, 10, 20, 30, 40, 50], 'xticks_string': ['0', '10', '20', '30', '40', '50'], 'yticks': [20, 40, 60, 80, 100] } # tracker(modified) names of lines in plot tracker_names = [tracker + "-DEFAULT", tracker + "-BORDER", tracker + "-NFOV"] self.PATH_PRECISION_PLOT = self.PATH_PRECISION_PLOT.replace("<TRACKER>", tracker).replace("<NUMBER>", seq_number) self._plotDrawSave(prec_curve, threshold_set_center, prec_score, tracker_names, self._getPlotDrawStyles(), precision_plot_opts, self.PATH_PRECISION_PLOT) def createPrecisionPlotAllSequences(self, tracker: str): """Draws and saves precision plot for given tracker and all 01-21 video sequences in dataset""" threshold_set_center = torch.arange(0, 51, dtype=torch.float64) ave_success_rate_plot_center = torch.zeros((len(self.DATASET), 3, threshold_set_center.numel()), dtype=torch.float32) for i in range(len(self.DATASET)): # load and parse data path_default = self.PATH_CENTER_ERROR_DEFAULT.replace("<TRACKER>", tracker).replace("<NUMBER>", self.DATASET[i]) path_border = self.PATH_CENTER_ERROR_BORDER.replace("<TRACKER>", tracker).replace("<NUMBER>", self.DATASET[i]) path_nfov = self.PATH_CENTER_ERROR_NFOV.replace("<TRACKER>", tracker).replace("<NUMBER>", self.DATASET[i]) cerror_default = self._parseGivenDataFile(path_default) cerror_border = self._parseGivenDataFile(path_border) cerror_nfov = self._parseGivenDataFile(path_nfov) # transform python lists to tensors cerror_default_tensor = torch.Tensor(cerror_default) cerror_border_tensor = torch.Tensor(cerror_border) cerror_nfov_tensor = torch.Tensor(cerror_nfov) # precision computing ave_success_rate_plot_center[i,0,:] = (cerror_default_tensor.view(-1, 1) <= threshold_set_center.view(1, -1)).sum(0).float() / len(cerror_default) ave_success_rate_plot_center[i,1,:] = (cerror_border_tensor.view(-1, 1) <= threshold_set_center.view(1, -1)).sum(0).float() / len(cerror_border) ave_success_rate_plot_center[i,2,:] = (cerror_nfov_tensor.view(-1, 1) <= threshold_set_center.view(1, -1)).sum(0).float() / len(cerror_nfov) # create curves prec_curve = ave_success_rate_plot_center.mean(0) * 100.0 # score should be counted for max 20 pixel error prec_score = prec_curve[:, 20] precision_plot_opts = { 'plot_type': 'precision', 'legend_loc': 'lower right', 'xlabel': 'Location error threshold [pixels]', 'ylabel': 'Distance Precision [%]', 'xlim': (0, 50), 'ylim': (0, 100), 'title': 'Precision plot - ' + tracker, 'font_size_legend': 16, 'font_size_axis': 16, 'font_size': 16, 'xticks': [0, 10, 20, 30, 40, 50], 'xticks_string': ['0', '10', '20', '30', '40', '50'], 'yticks': [20, 40, 60, 80, 100] } # tracker(modified) names of lines in plot tracker_names = [tracker + "-DEFAULT", tracker + "-BORDER", tracker + "-NFOV"] self.PATH_PRECISION_PLOT_ALLSEQUENCES = self.PATH_PRECISION_PLOT_ALLSEQUENCES.replace("<TRACKER>", tracker) self._plotDrawSave(prec_curve, threshold_set_center, prec_score, tracker_names, self._getPlotDrawStyles(), precision_plot_opts, self.PATH_PRECISION_PLOT_ALLSEQUENCES) def createPrecisionPlotAllTrackersSequence(self, seq_number: str, default=False, border=False, nfov=False): """Draws and saves precision plot for all trackers (default/border/nfov) and for given video sequence only""" threshold_set_center = torch.arange(0, 51, dtype=torch.float64) ave_success_rate_plot_center = torch.zeros((len(self.TRACKERS), threshold_set_center.numel()), dtype=torch.float32) path = "" save_path = self.PATH_PRECISION_PLOT_ALLTRACKERS_SEQ.replace("<NUMBER>", seq_number) tracker_names = self.TRACKERS title = "Precision plot" if default: path = self.PATH_CENTER_ERROR_DEFAULT save_path += "-default" title += " - DEFAULT" elif border: path = self.PATH_CENTER_ERROR_BORDER save_path += "-border" title += " - BORDER" elif nfov: path = self.PATH_CENTER_ERROR_NFOV save_path += "-nfov" title += " - NFOV" for i in range(len(self.TRACKERS)): # load and parse data current_path = path.replace("<TRACKER>", self.TRACKERS[i]).replace("<NUMBER>", seq_number) cerror = self._parseGivenDataFile(current_path) # transform python lists to tensors cerror_tensor = torch.Tensor(cerror) # success computing ave_success_rate_plot_center[i,:] = (cerror_tensor.view(-1, 1) <= threshold_set_center.view(1, -1)).sum(0).float() / len(cerror) # create curves prec_curve = ave_success_rate_plot_center * 100.0 # score should be counted for max 20 pixel error prec_score = prec_curve[:, 20] precision_plot_opts = { 'plot_type': 'precision', 'legend_loc': 'lower right', 'xlabel': 'Location error threshold [pixels]', 'ylabel': 'Distance Precision [%]', 'xlim': (0, 50), 'ylim': (0, 100), 'title': title + " (Sequence " + seq_number + ")", 'font_size_legend': 11, 'xticks': [0, 10, 20, 30, 40, 50], 'xticks_string': ['0', '10', '20', '30', '40', '50'], 'yticks': [20, 40, 60, 80, 100] } self._plotDrawSave(prec_curve, threshold_set_center, prec_score, tracker_names, self._getPlotDrawStyles(), precision_plot_opts, save_path) def createPrecisionPlotAllTrackers(self, default=False, border=False, nfov=False, onlyBorderCrossing=False, onlyNotBorderCrossing=False): """Draws and saves precision plot for all trackers (default/border/nfov) and all 01-21 video sequences in dataset""" dataset = self.DATASET save_path = self.PATH_PRECISION_PLOT_ALLTRACKERS if onlyBorderCrossing: dataset = self.DATASET_CROSSING_BORDER save_path = save_path + "-crossing" if onlyNotBorderCrossing: dataset = self.DATASET_NOT_CROSSING_BORDER save_path = save_path + "-not-crossing" threshold_set_center = torch.arange(0, 51, dtype=torch.float64) ave_success_rate_plot_center = torch.zeros((len(dataset), len(self.TRACKERS), threshold_set_center.numel()), dtype=torch.float32) path = "" tracker_names = self.TRACKERS title = "Precision plot" if default: path = self.PATH_CENTER_ERROR_DEFAULT save_path += "-default" title += " - DEFAULT" elif border: path = self.PATH_CENTER_ERROR_BORDER save_path += "-border" title += " - BORDER" elif nfov: path = self.PATH_CENTER_ERROR_NFOV save_path += "-nfov" title += " - NFOV" for i in range(len(dataset)): for j in range(len(self.TRACKERS)): # load and parse data current_path = path.replace("<TRACKER>", self.TRACKERS[j]).replace("<NUMBER>", dataset[i]) cerror = self._parseGivenDataFile(current_path) # transform python lists to tensors cerror_tensor = torch.Tensor(cerror) # success computing ave_success_rate_plot_center[i,j,:] = (cerror_tensor.view(-1, 1) <= threshold_set_center.view(1, -1)).sum(0).float() / len(cerror) # create curves prec_curve = ave_success_rate_plot_center.mean(0) * 100.0 # score should be counted for max 20 pixel error prec_score = prec_curve[:, 20] precision_plot_opts = { 'plot_type': 'precision', 'legend_loc': 'lower right', 'xlabel': 'Location error threshold [pixels]', 'ylabel': 'Distance Precision [%]', 'xlim': (0, 50), 'ylim': (0, 100), 'title': title, 'font_size_legend': 11, 'xticks': [0, 10, 20, 30, 40, 50], 'xticks_string': ['0', '10', '20', '30', '40', '50'], 'yticks': [20, 40, 60, 80, 100] } self._plotDrawSave(prec_curve, threshold_set_center, prec_score, tracker_names, self._getPlotDrawStyles(), precision_plot_opts, save_path) def createPrecisionPlotAllSequencesVariance(self, tracker: str): """Draws and saves precision plot for given tracker and all 01-21 video sequences in dataset with variance min and max""" threshold_set_center = torch.arange(0, 51, dtype=torch.float64) ave_success_rate_plot_center = torch.zeros((len(self.DATASET), 3, threshold_set_center.numel()), dtype=torch.float32) for i in range(len(self.DATASET)): # load and parse data path_default = self.PATH_CENTER_ERROR_DEFAULT.replace("<TRACKER>", tracker).replace("<NUMBER>", self.DATASET[i]) path_border = self.PATH_CENTER_ERROR_BORDER.replace("<TRACKER>", tracker).replace("<NUMBER>", self.DATASET[i]) path_nfov = self.PATH_CENTER_ERROR_NFOV.replace("<TRACKER>", tracker).replace("<NUMBER>", self.DATASET[i]) cerror_default = self._parseGivenDataFile(path_default) cerror_border = self._parseGivenDataFile(path_border) cerror_nfov = self._parseGivenDataFile(path_nfov) # transform python lists to tensors cerror_default_tensor = torch.Tensor(cerror_default) cerror_border_tensor = torch.Tensor(cerror_border) cerror_nfov_tensor = torch.Tensor(cerror_nfov) # precision computing ave_success_rate_plot_center[i,0,:] = (cerror_default_tensor.view(-1, 1) <= threshold_set_center.view(1, -1)).sum(0).float() / len(cerror_default) ave_success_rate_plot_center[i,1,:] = (cerror_border_tensor.view(-1, 1) <= threshold_set_center.view(1, -1)).sum(0).float() / len(cerror_border) ave_success_rate_plot_center[i,2,:] = (cerror_nfov_tensor.view(-1, 1) <= threshold_set_center.view(1, -1)).sum(0).float() / len(cerror_nfov) # create curves prec_curve = ave_success_rate_plot_center.mean(0) * 100.0 # score should be counted for max 20 pixel error prec_score = prec_curve[:, 20] # maximum in tensors max_curve = ave_success_rate_plot_center.max(0).values * 100.0 # minimim in tensors min_curve = ave_success_rate_plot_center.min(0).values * 100.0 # tracker(modified) names of lines in plot tracker_names = ["DEFAULT", "BORDER", "NFOV"] # tech report bigger size # font_size = 16 # font_size_axis = 16 # font_size_label = 18 # font_size_legend = 16 font_size = 13 font_size_axis = 13 font_size_label = 14 font_size_legend = 13 line_width = 3 # tech report bigger size # bbox_to_anchor = (1.25, 0.0) bbox_to_anchor = None legend_loc = 'lower right' xlabel = 'Location error threshold [pixels]' ylabel = 'Distance Precision [%]' xlim = (0, 50) ylim = (0, 100) xticks = [0, 10, 20, 30, 40, 50] yticks = [20, 40, 60, 80, 100] title = 'Precision plot - ' + tracker matplotlib.rcParams.update({'font.size': font_size}) matplotlib.rcParams.update({'axes.titlesize': font_size_axis}) matplotlib.rcParams.update({'axes.titleweight': 'black'}) matplotlib.rcParams.update({'axes.labelsize': font_size_label}) fig, ax = plt.subplots() # possible sort according to best auc index_sort = prec_score.argsort(descending=False) plotted_lines = [] legend_text = [] plot_draw_styles = self._getPlotDrawStyles() # draw lines and background for _, id_sort in enumerate(index_sort): disp_name = tracker_names[id_sort] legend_text.append('{} [{:.1f}]'.format(disp_name, prec_score[id_sort])) # draw mean line line = ax.plot(threshold_set_center.tolist(), prec_curve[id_sort, :].tolist(), linewidth=line_width, color=plot_draw_styles[id_sort.item()]['color'], linestyle='-') plotted_lines.append(line[0]) # draw min and max lines ax.plot(threshold_set_center.tolist(), max_curve[id_sort, :].tolist(), linewidth=1, color=plot_draw_styles[id_sort.item()]['color'], linestyle='--') ax.plot(threshold_set_center.tolist(), min_curve[id_sort, :].tolist(), linewidth=1, color=plot_draw_styles[id_sort.item()]['color'], linestyle='--') # show light background as variance ax.fill_between(threshold_set_center.tolist(), min_curve[id_sort, :].tolist(), max_curve[id_sort, :].tolist(), color=plot_draw_styles[id_sort.item()]['color'], alpha=0.08) ax.legend(plotted_lines[::-1], legend_text[::-1], loc=legend_loc, bbox_to_anchor=bbox_to_anchor, fancybox=False, edgecolor='black', fontsize=font_size_legend, framealpha=1.0) ax.set(xlabel=xlabel, ylabel=ylabel, xlim=xlim, ylim=ylim, title=title) ax.grid(True, linestyle='-.') fig.tight_layout() # hard define ticks plt.xticks(xticks) plt.yticks(yticks) self.PATH_PRECISION_PLOT_ALLSEQUENCES_VAR = self.PATH_PRECISION_PLOT_ALLSEQUENCES_VAR.replace("<TRACKER>", tracker) pdf_path = self.PATH_PRECISION_PLOT_ALLSEQUENCES_VAR + ".pdf" fig.savefig(pdf_path, dpi=300, format='pdf', transparent=True) plt.draw() print("File " + pdf_path + " has been created.") # plt.show()
50.380729
219
0.604842
5,248
42,874
4.696646
0.071837
0.026939
0.0284
0.036514
0.88141
0.845951
0.806353
0.776452
0.748864
0.733569
0
0.03429
0.24565
42,874
851
220
50.380729
0.727815
0.122638
0
0.654479
0
0
0.122772
0.029221
0
0
0
0
0
1
0.025594
false
0
0.012797
0
0.043876
0.005484
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
d1a3fa3fc204006dec4f6d362d787db4019ff244
5,887
py
Python
utils.py
FYJNEVERFOLLOWS/hw_NLP
c9d72804128dfed3a53e9df40e94b2d53cccacae
[ "MIT" ]
null
null
null
utils.py
FYJNEVERFOLLOWS/hw_NLP
c9d72804128dfed3a53e9df40e94b2d53cccacae
[ "MIT" ]
null
null
null
utils.py
FYJNEVERFOLLOWS/hw_NLP
c9d72804128dfed3a53e9df40e94b2d53cccacae
[ "MIT" ]
null
null
null
import numpy as np from collections import Counter import torch import torch.nn.functional as F from torch import nn from torch.utils.data import Dataset, DataLoader # torch.set_printoptions(profile="full") def build_dict(words, max_words=50000): word_count = Counter() for w in words: word_count[w] += 1 ls = word_count.most_common(max_words) num_words = len(ls) + 1 # return word2idx, idx2word and num_words, respectively return {w[0]: index+1 for (index, w) in enumerate(ls)}, {index+1 : w[0] for (index, w) in enumerate(ls)}, num_words def encode(text, word_to_idx): return [word_to_idx.get(t, -1) for t in text] vocab_path = "/Users/fuyanjie/Desktop/PG/AI/NLP/exp_hw_ZeweiChu/bobsue.voc.txt" with open(vocab_path, "r") as f: text = f.read() f.close() vocab = text.split('\n') dict_word2idx, dict_idx2word, vocab_size = build_dict(vocab[:-1]) print(dict_word2idx) print(dict_idx2word) class BobSue_Dataset(Dataset): def __init__(self, data_path): super().__init__() self.data_path = data_path self.total_x, self.total_y = self._read_file() def __getitem__(self, index): return torch.tensor(self.total_x[index], dtype=torch.long), torch.tensor(self.total_y[index], dtype=torch.long).squeeze() # dtype is torch.long def __len__(self): return len(self.total_x) def _read_file(self): total_x = [] total_y = [] feats, labels = self.load_data(self.data_path) print(f'max_len {self.max_len}') for idx, feat in enumerate(feats): feat_encoded = encode(feat, word_to_idx=dict_word2idx) feat_encoded = F.pad(torch.tensor(feat_encoded, dtype=torch.long), (20 - len(feat_encoded), 0)) # 填充至序列长度为20 # feat_one_hot = F.one_hot(feat_encoded, num_classes=vocab_size) # feat_one_hot.shape: torch.Size([20, 1499]) # feat_encoded.shape: torch.Size([20]) total_x.append(feat_encoded) # label_encoded: [idx], label_encoded.shape: [1] label_encoded = encode(labels[idx], word_to_idx=dict_word2idx) label_encoded = F.pad(torch.tensor(label_encoded, dtype=torch.long), (20 - len(label_encoded), 0)) # 填充至序列长度为20 # label_one_hot = F.one_hot(torch.tensor(label_encoded, dtype=torch.long), num_classes=vocab_size) # label_one_hot.shape: torch.Size([1, 1499]) # label_encoded.shape: torch.Size([20]) total_y.append(label_encoded) return total_x, total_y def load_data(self, path): feats = [] labels = [] self.max_len = 0 with open(path, "r") as f: text = f.read() f.close() sentences = text.split('\n')[:-1] for sen in sentences: words = sen.split(' ') self.max_len = max(self.max_len, len(words)) for i in range(1, len(words)): feats.append(words[:i]) labels.append(words[1:i+1]) return feats, labels class Prevsent_Dataset(Dataset): def __init__(self, data_path): super().__init__() self.data_path = data_path self.total_x, self.total_y = self._read_file() def __getitem__(self, index): return torch.tensor(self.total_x[index], dtype=torch.long), torch.tensor(self.total_y[index], dtype=torch.long).squeeze() # dtype is torch.long def __len__(self): return len(self.total_x) def _read_file(self): total_x = [] total_y = [] feats, labels = self.load_data(self.data_path) print(f'max_len {self.max_len}') for idx, feat in enumerate(feats): feat_encoded = encode(feat, word_to_idx=dict_word2idx) feat_encoded = F.pad(torch.tensor(feat_encoded, dtype=torch.long), (20 - len(feat_encoded), 0)) # 填充至序列长度为20 # feat_one_hot = F.one_hot(feat_encoded, num_classes=vocab_size) # feat_one_hot.shape: torch.Size([20, 1499]) # feat_encoded.shape: torch.Size([20]) total_x.append(feat_encoded) # label_encoded: [idx], label_encoded.shape: [1] label_encoded = encode(labels[idx], word_to_idx=dict_word2idx) label_encoded = F.pad(torch.tensor(label_encoded, dtype=torch.long), (20 - len(label_encoded), 0)) # 填充至序列长度为20 # label_one_hot = F.one_hot(torch.tensor(label_encoded, dtype=torch.long), num_classes=vocab_size) # label_one_hot.shape: torch.Size([1, 1499]) # label_encoded.shape: torch.Size([20]) total_y.append(label_encoded) return total_x, total_y def load_data(self, path): feats = [] labels = [] self.max_len = 0 with open(path, "r") as f: text = f.read() f.close() sentences = text.split('\n')[:-1] for sen in sentences: words = sen.split(' ') self.max_len = max(self.max_len, len(words)) for i in range(1, len(words)): feats.append(words[:i]) labels.append(words[1:i+1]) return feats, labels # if __name__ == '__main__': # train_txt_path = "/Users/fuyanjie/Desktop/PG/AI/NLP/exp_hw_ZeweiChu/bobsue.lm.train.txt" # # feat_one_hot = F.one_hot(torch.tensor([1, 2, 4]), num_classes=5) # # print(feat_one_hot) # # print(feat_one_hot.shape) # # pad = nn.ZeroPad2d((0,0,0,20-feat_one_hot.size(0))) # # feat_one_hot = pad(feat_one_hot) # # print(feat_one_hot) # # print(feat_one_hot.shape) # # train_data = DataLoader(BobSue_Dataset(train_txt_path), batch_size=4, shuffle=True, # num_workers=0) # train_data.shape (batch_x, batch_y) # print(len(train_data)) # len(train_data) is samples / batch_size # print(next(iter(train_data))[0].shape, next(iter(train_data))[1].shape)
40.881944
151
0.625955
842
5,887
4.106888
0.146081
0.036437
0.034702
0.036437
0.756796
0.756796
0.742915
0.733083
0.733083
0.706189
0
0.022411
0.242059
5,887
144
152
40.881944
0.752577
0.275692
0
0.762887
0
0
0.028206
0.015169
0
0
0
0
0
1
0.123711
false
0
0.061856
0.051546
0.309278
0.041237
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
d1c09db6a5aaf2b9eb2063434fa1df872fe47fe1
95
py
Python
ztag/test/__init__.py
justinbastress/ztag
137b754dfe22b7d6e0945ae33def372ec67d092b
[ "Apache-2.0" ]
107
2015-10-13T16:03:21.000Z
2021-11-08T10:53:07.000Z
ztag/test/__init__.py
justinbastress/ztag
137b754dfe22b7d6e0945ae33def372ec67d092b
[ "Apache-2.0" ]
73
2015-10-14T17:27:10.000Z
2018-10-01T14:32:44.000Z
ztag/test/__init__.py
justinbastress/ztag
137b754dfe22b7d6e0945ae33def372ec67d092b
[ "Apache-2.0" ]
36
2015-10-14T17:13:20.000Z
2021-10-05T19:41:10.000Z
from protocols_test import ProtocolNameTestCase from encoding_test import CleanBannerTestCase
23.75
47
0.905263
10
95
8.4
0.7
0.238095
0
0
0
0
0
0
0
0
0
0
0.094737
95
3
48
31.666667
0.976744
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
ae1b836c6f434260dafff529dc56f8ac9338d066
453
py
Python
src/tidygraphtool/edgedataframe.py
jstonge/tidygraphtool
6bf0a0e11d667e7cd1cb8f0ff1f61cb930536ce1
[ "MIT" ]
null
null
null
src/tidygraphtool/edgedataframe.py
jstonge/tidygraphtool
6bf0a0e11d667e7cd1cb8f0ff1f61cb930536ce1
[ "MIT" ]
null
null
null
src/tidygraphtool/edgedataframe.py
jstonge/tidygraphtool
6bf0a0e11d667e7cd1cb8f0ff1f61cb930536ce1
[ "MIT" ]
null
null
null
"""Edge extension for dataframe""" from pandas import DataFrame, Series class EdgeDataFrame(DataFrame): @property def _constructor(self): return EdgeDataFrame @property def _constructor_sliced(self): return EdgeSeries class EdgeSeries(Series): @property def _constructor(self): return EdgeSeries @property def _constructor_sliced(self): return EdgeSeries
21.571429
36
0.646799
41
453
7
0.414634
0.15331
0.30662
0.181185
0.557491
0.334495
0.334495
0
0
0
0
0
0.289183
453
21
37
21.571429
0.891304
0.06181
0
0.733333
0
0
0
0
0
0
0
0
0
1
0.266667
false
0
0.066667
0.266667
0.733333
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
6
ae43ffa04c21f7229388664064959a922528b7ae
48
py
Python
common/__init__.py
kainonly/satis-flow
361ff7b78d4a2df47aac8e0af34e95b8f1120d73
[ "MIT" ]
1
2020-04-18T02:42:27.000Z
2020-04-18T02:42:27.000Z
common/__init__.py
kainonly/satis-flow
361ff7b78d4a2df47aac8e0af34e95b8f1120d73
[ "MIT" ]
null
null
null
common/__init__.py
kainonly/satis-flow
361ff7b78d4a2df47aac8e0af34e95b8f1120d73
[ "MIT" ]
null
null
null
from .config import Config from .oss import Oss
16
26
0.791667
8
48
4.75
0.5
0
0
0
0
0
0
0
0
0
0
0
0.166667
48
2
27
24
0.95
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
88ab4c30c355b18f623b73b22019ae998ee219f3
24,160
py
Python
models/AttentionResnet.py
suikei-wang/Towards-Interpretable-Attention-Networks-for-Cervical-Cancer-Analysis
30b69394cc3fe339d2bc9b4c3b17cd345d088dff
[ "MIT" ]
1
2022-01-19T10:01:15.000Z
2022-01-19T10:01:15.000Z
models/AttentionResnet.py
suikei-wang/Towards-Interpretable-Attention-Networks-for-Cervical-Cancer-Analysis
30b69394cc3fe339d2bc9b4c3b17cd345d088dff
[ "MIT" ]
null
null
null
models/AttentionResnet.py
suikei-wang/Towards-Interpretable-Attention-Networks-for-Cervical-Cancer-Analysis
30b69394cc3fe339d2bc9b4c3b17cd345d088dff
[ "MIT" ]
null
null
null
import torch.nn as nn class ResidualBlock(nn.Module): def __init__(self, input_channels, output_channels, stride=1): super(ResidualBlock, self).__init__() self.input_channels = input_channels self.output_channels = output_channels self.stride = stride self.bn1 = nn.BatchNorm2d(input_channels) self.relu = nn.ReLU(inplace=True) self.conv1 = nn.Conv2d(input_channels, int(output_channels/4), 1, 1, bias = False) self.bn2 = nn.BatchNorm2d(int(output_channels/4)) self.relu = nn.ReLU(inplace=True) self.conv2 = nn.Conv2d(int(output_channels/4), int(output_channels/4), 3, stride, padding = 1, bias = False) self.bn3 = nn.BatchNorm2d(int(output_channels/4)) self.relu = nn.ReLU(inplace=True) self.conv3 = nn.Conv2d(int(output_channels/4), output_channels, 1, 1, bias = False) self.conv4 = nn.Conv2d(input_channels, output_channels , 1, stride, bias = False) def forward(self, x): residual = x out = self.bn1(x) out1 = self.relu(out) out = self.conv1(out1) out = self.bn2(out) out = self.relu(out) out = self.conv2(out) out = self.bn3(out) out = self.relu(out) out = self.conv3(out) if (self.input_channels != self.output_channels) or (self.stride !=1 ): residual = self.conv4(out1) out += residual return out class AttentionModule_pre(nn.Module): def __init__(self, in_channels, out_channels, size1, size2, size3): super(AttentionModule_pre, self).__init__() self.first_residual_blocks = ResidualBlock(in_channels, out_channels) self.trunk_branches = nn.Sequential( ResidualBlock(in_channels, out_channels), ResidualBlock(in_channels, out_channels) ) self.mpool1 = nn.MaxPool2d(kernel_size=3, stride=2, padding=1) self.softmax1_blocks = ResidualBlock(in_channels, out_channels) self.skip1_connection_residual_block = ResidualBlock(in_channels, out_channels) self.mpool2 = nn.MaxPool2d(kernel_size=3, stride=2, padding=1) self.softmax2_blocks = ResidualBlock(in_channels, out_channels) self.skip2_connection_residual_block = ResidualBlock(in_channels, out_channels) self.mpool3 = nn.MaxPool2d(kernel_size=3, stride=2, padding=1) self.softmax3_blocks = nn.Sequential( ResidualBlock(in_channels, out_channels), ResidualBlock(in_channels, out_channels) ) self.interpolation3 = nn.UpsamplingBilinear2d(size=size3) self.softmax4_blocks = ResidualBlock(in_channels, out_channels) self.interpolation2 = nn.UpsamplingBilinear2d(size=size2) self.softmax5_blocks = ResidualBlock(in_channels, out_channels) self.interpolation1 = nn.UpsamplingBilinear2d(size=size1) self.softmax6_blocks = nn.Sequential( nn.BatchNorm2d(out_channels), nn.ReLU(inplace=True), nn.Conv2d(out_channels, out_channels , kernel_size = 1, stride = 1, bias = False), nn.BatchNorm2d(out_channels), nn.ReLU(inplace=True), nn.Conv2d(out_channels, out_channels , kernel_size = 1, stride = 1, bias = False), nn.Sigmoid() ) self.last_blocks = ResidualBlock(in_channels, out_channels) def forward(self, x): x = self.first_residual_blocks(x) out_trunk = self.trunk_branches(x) out_mpool1 = self.mpool1(x) out_softmax1 = self.softmax1_blocks(out_mpool1) out_skip1_connection = self.skip1_connection_residual_block(out_softmax1) out_mpool2 = self.mpool2(out_softmax1) out_softmax2 = self.softmax2_blocks(out_mpool2) out_skip2_connection = self.skip2_connection_residual_block(out_softmax2) out_mpool3 = self.mpool3(out_softmax2) out_softmax3 = self.softmax3_blocks(out_mpool3) # out_interp3 = self.interpolation3(out_softmax3) # print(out_skip2_connection.data) # print(out_interp3.data) out = out_interp3 + out_skip2_connection out_softmax4 = self.softmax4_blocks(out) out_interp2 = self.interpolation2(out_softmax4) out = out_interp2 + out_skip1_connection out_softmax5 = self.softmax5_blocks(out) out_interp1 = self.interpolation1(out_softmax5) out_softmax6 = self.softmax6_blocks(out_interp1) out = (1 + out_softmax6) * out_trunk out_last = self.last_blocks(out) return out_last class AttentionModule_stage0(nn.Module): # input size is 112*112 def __init__(self, in_channels, out_channels, size1=(112, 112), size2=(56, 56), size3=(28, 28), size4=(14, 14)): super(AttentionModule_stage0, self).__init__() self.first_residual_blocks = ResidualBlock(in_channels, out_channels) self.trunk_branches = nn.Sequential( ResidualBlock(in_channels, out_channels), ResidualBlock(in_channels, out_channels) ) self.mpool1 = nn.MaxPool2d(kernel_size=3, stride=2, padding=1) # 56*56 self.softmax1_blocks = ResidualBlock(in_channels, out_channels) self.skip1_connection_residual_block = ResidualBlock(in_channels, out_channels) self.mpool2 = nn.MaxPool2d(kernel_size=3, stride=2, padding=1) # 28*28 self.softmax2_blocks = ResidualBlock(in_channels, out_channels) self.skip2_connection_residual_block = ResidualBlock(in_channels, out_channels) self.mpool3 = nn.MaxPool2d(kernel_size=3, stride=2, padding=1) # 14*14 self.softmax3_blocks = ResidualBlock(in_channels, out_channels) self.skip3_connection_residual_block = ResidualBlock(in_channels, out_channels) self.mpool4 = nn.MaxPool2d(kernel_size=3, stride=2, padding=1) # 7*7 self.softmax4_blocks = nn.Sequential( ResidualBlock(in_channels, out_channels), ResidualBlock(in_channels, out_channels) ) self.interpolation4 = nn.UpsamplingBilinear2d(size=size4) self.softmax5_blocks = ResidualBlock(in_channels, out_channels) self.interpolation3 = nn.UpsamplingBilinear2d(size=size3) self.softmax6_blocks = ResidualBlock(in_channels, out_channels) self.interpolation2 = nn.UpsamplingBilinear2d(size=size2) self.softmax7_blocks = ResidualBlock(in_channels, out_channels) self.interpolation1 = nn.UpsamplingBilinear2d(size=size1) self.softmax8_blocks = nn.Sequential( nn.BatchNorm2d(out_channels), nn.ReLU(inplace=True), nn.Conv2d(out_channels, out_channels, kernel_size=1, stride=1, bias = False), nn.BatchNorm2d(out_channels), nn.ReLU(inplace=True), nn.Conv2d(out_channels, out_channels , kernel_size=1, stride=1, bias = False), nn.Sigmoid() ) self.last_blocks = ResidualBlock(in_channels, out_channels) def forward(self, x): # 112*112 x = self.first_residual_blocks(x) out_trunk = self.trunk_branches(x) out_mpool1 = self.mpool1(x) # 56*56 out_softmax1 = self.softmax1_blocks(out_mpool1) out_skip1_connection = self.skip1_connection_residual_block(out_softmax1) out_mpool2 = self.mpool2(out_softmax1) # 28*28 out_softmax2 = self.softmax2_blocks(out_mpool2) out_skip2_connection = self.skip2_connection_residual_block(out_softmax2) out_mpool3 = self.mpool3(out_softmax2) # 14*14 out_softmax3 = self.softmax3_blocks(out_mpool3) out_skip3_connection = self.skip3_connection_residual_block(out_softmax3) out_mpool4 = self.mpool4(out_softmax3) # 7*7 out_softmax4 = self.softmax4_blocks(out_mpool4) out_interp4 = self.interpolation4(out_softmax4) + out_softmax3 out = out_interp4 + out_skip3_connection out_softmax5 = self.softmax5_blocks(out) out_interp3 = self.interpolation3(out_softmax5) + out_softmax2 # print(out_skip2_connection.data) # print(out_interp3.data) out = out_interp3 + out_skip2_connection out_softmax6 = self.softmax6_blocks(out) out_interp2 = self.interpolation2(out_softmax6) + out_softmax1 out = out_interp2 + out_skip1_connection out_softmax7 = self.softmax7_blocks(out) out_interp1 = self.interpolation1(out_softmax7) + out_trunk out_softmax8 = self.softmax8_blocks(out_interp1) out = (1 + out_softmax8) * out_trunk out_last = self.last_blocks(out) return out_last class AttentionModule_stage1(nn.Module): # input size is 56*56 def __init__(self, in_channels, out_channels, size1=(56, 56), size2=(28, 28), size3=(14, 14)): super(AttentionModule_stage1, self).__init__() self.first_residual_blocks = ResidualBlock(in_channels, out_channels) self.trunk_branches = nn.Sequential( ResidualBlock(in_channels, out_channels), ResidualBlock(in_channels, out_channels) ) self.mpool1 = nn.MaxPool2d(kernel_size=3, stride=2, padding=1) self.softmax1_blocks = ResidualBlock(in_channels, out_channels) self.skip1_connection_residual_block = ResidualBlock(in_channels, out_channels) self.mpool2 = nn.MaxPool2d(kernel_size=3, stride=2, padding=1) self.softmax2_blocks = ResidualBlock(in_channels, out_channels) self.skip2_connection_residual_block = ResidualBlock(in_channels, out_channels) self.mpool3 = nn.MaxPool2d(kernel_size=3, stride=2, padding=1) self.softmax3_blocks = nn.Sequential( ResidualBlock(in_channels, out_channels), ResidualBlock(in_channels, out_channels) ) self.interpolation3 = nn.UpsamplingBilinear2d(size=size3) self.softmax4_blocks = ResidualBlock(in_channels, out_channels) self.interpolation2 = nn.UpsamplingBilinear2d(size=size2) self.softmax5_blocks = ResidualBlock(in_channels, out_channels) self.interpolation1 = nn.UpsamplingBilinear2d(size=size1) self.softmax6_blocks = nn.Sequential( nn.BatchNorm2d(out_channels), nn.ReLU(inplace=True), nn.Conv2d(out_channels, out_channels , kernel_size = 1, stride = 1, bias = False), nn.BatchNorm2d(out_channels), nn.ReLU(inplace=True), nn.Conv2d(out_channels, out_channels , kernel_size = 1, stride = 1, bias = False), nn.Sigmoid() ) self.last_blocks = ResidualBlock(in_channels, out_channels) def forward(self, x): x = self.first_residual_blocks(x) out_trunk = self.trunk_branches(x) out_mpool1 = self.mpool1(x) out_softmax1 = self.softmax1_blocks(out_mpool1) out_skip1_connection = self.skip1_connection_residual_block(out_softmax1) out_mpool2 = self.mpool2(out_softmax1) out_softmax2 = self.softmax2_blocks(out_mpool2) out_skip2_connection = self.skip2_connection_residual_block(out_softmax2) out_mpool3 = self.mpool3(out_softmax2) out_softmax3 = self.softmax3_blocks(out_mpool3) # out_interp3 = self.interpolation3(out_softmax3) + out_softmax2 # print(out_skip2_connection.data) # print(out_interp3.data) out = out_interp3 + out_skip2_connection out_softmax4 = self.softmax4_blocks(out) out_interp2 = self.interpolation2(out_softmax4) + out_softmax1 out = out_interp2 + out_skip1_connection out_softmax5 = self.softmax5_blocks(out) out_interp1 = self.interpolation1(out_softmax5) + out_trunk out_softmax6 = self.softmax6_blocks(out_interp1) out = (1 + out_softmax6) * out_trunk out_last = self.last_blocks(out) return out_last class AttentionModule_stage2(nn.Module): # input image size is 28*28 def __init__(self, in_channels, out_channels, size1=(28, 28), size2=(14, 14)): super(AttentionModule_stage2, self).__init__() self.first_residual_blocks = ResidualBlock(in_channels, out_channels) self.trunk_branches = nn.Sequential( ResidualBlock(in_channels, out_channels), ResidualBlock(in_channels, out_channels) ) self.mpool1 = nn.MaxPool2d(kernel_size=3, stride=2, padding=1) self.softmax1_blocks = ResidualBlock(in_channels, out_channels) self.skip1_connection_residual_block = ResidualBlock(in_channels, out_channels) self.mpool2 = nn.MaxPool2d(kernel_size=3, stride=2, padding=1) self.softmax2_blocks = nn.Sequential( ResidualBlock(in_channels, out_channels), ResidualBlock(in_channels, out_channels) ) self.interpolation2 = nn.UpsamplingBilinear2d(size=size2) self.softmax3_blocks = ResidualBlock(in_channels, out_channels) self.interpolation1 = nn.UpsamplingBilinear2d(size=size1) self.softmax4_blocks = nn.Sequential( nn.BatchNorm2d(out_channels), nn.ReLU(inplace=True), nn.Conv2d(out_channels, out_channels, kernel_size=1, stride=1, bias=False), nn.BatchNorm2d(out_channels), nn.ReLU(inplace=True), nn.Conv2d(out_channels, out_channels, kernel_size=1, stride=1, bias=False), nn.Sigmoid() ) self.last_blocks = ResidualBlock(in_channels, out_channels) def forward(self, x): x = self.first_residual_blocks(x) out_trunk = self.trunk_branches(x) out_mpool1 = self.mpool1(x) out_softmax1 = self.softmax1_blocks(out_mpool1) out_skip1_connection = self.skip1_connection_residual_block(out_softmax1) out_mpool2 = self.mpool2(out_softmax1) out_softmax2 = self.softmax2_blocks(out_mpool2) out_interp2 = self.interpolation2(out_softmax2) + out_softmax1 # print(out_skip2_connection.data) # print(out_interp3.data) out = out_interp2 + out_skip1_connection out_softmax3 = self.softmax3_blocks(out) out_interp1 = self.interpolation1(out_softmax3) + out_trunk out_softmax4 = self.softmax4_blocks(out_interp1) out = (1 + out_softmax4) * out_trunk out_last = self.last_blocks(out) return out_last class AttentionModule_stage3(nn.Module): # input image size is 14*14 def __init__(self, in_channels, out_channels, size1=(14, 14)): super(AttentionModule_stage3, self).__init__() self.first_residual_blocks = ResidualBlock(in_channels, out_channels) self.trunk_branches = nn.Sequential( ResidualBlock(in_channels, out_channels), ResidualBlock(in_channels, out_channels) ) self.mpool1 = nn.MaxPool2d(kernel_size=3, stride=2, padding=1) self.softmax1_blocks = nn.Sequential( ResidualBlock(in_channels, out_channels), ResidualBlock(in_channels, out_channels) ) self.interpolation1 = nn.UpsamplingBilinear2d(size=size1) self.softmax2_blocks = nn.Sequential( nn.BatchNorm2d(out_channels), nn.ReLU(inplace=True), nn.Conv2d(out_channels, out_channels, kernel_size=1, stride=1, bias=False), nn.BatchNorm2d(out_channels), nn.ReLU(inplace=True), nn.Conv2d(out_channels, out_channels, kernel_size=1, stride=1, bias=False), nn.Sigmoid() ) self.last_blocks = ResidualBlock(in_channels, out_channels) def forward(self, x): x = self.first_residual_blocks(x) out_trunk = self.trunk_branches(x) out_mpool1 = self.mpool1(x) out_softmax1 = self.softmax1_blocks(out_mpool1) out_interp1 = self.interpolation1(out_softmax1) + out_trunk out_softmax2 = self.softmax2_blocks(out_interp1) out = (1 + out_softmax2) * out_trunk out_last = self.last_blocks(out) return out_last class AttentionModule_stage1_cifar(nn.Module): # input size is 16*16 def __init__(self, in_channels, out_channels, size1=(16, 16), size2=(8, 8)): super(AttentionModule_stage1_cifar, self).__init__() self.first_residual_blocks = ResidualBlock(in_channels, out_channels) self.trunk_branches = nn.Sequential( ResidualBlock(in_channels, out_channels), ResidualBlock(in_channels, out_channels) ) self.mpool1 = nn.MaxPool2d(kernel_size=3, stride=2, padding=1) # 8*8 self.down_residual_blocks1 = ResidualBlock(in_channels, out_channels) self.skip1_connection_residual_block = ResidualBlock(in_channels, out_channels) self.mpool2 = nn.MaxPool2d(kernel_size=3, stride=2, padding=1) # 4*4 self.middle_2r_blocks = nn.Sequential( ResidualBlock(in_channels, out_channels), ResidualBlock(in_channels, out_channels) ) self.interpolation1 = nn.UpsamplingBilinear2d(size=size2) # 8*8 self.up_residual_blocks1 = ResidualBlock(in_channels, out_channels) self.interpolation2 = nn.UpsamplingBilinear2d(size=size1) # 16*16 self.conv1_1_blocks = nn.Sequential( nn.BatchNorm2d(out_channels), nn.ReLU(inplace=True), nn.Conv2d(out_channels, out_channels, kernel_size=1, stride=1, bias=False), nn.BatchNorm2d(out_channels), nn.ReLU(inplace=True), nn.Conv2d(out_channels, out_channels, kernel_size=1, stride=1, bias = False), nn.Sigmoid() ) self.last_blocks = ResidualBlock(in_channels, out_channels) def forward(self, x): x = self.first_residual_blocks(x) out_trunk = self.trunk_branches(x) out_mpool1 = self.mpool1(x) out_down_residual_blocks1 = self.down_residual_blocks1(out_mpool1) out_skip1_connection = self.skip1_connection_residual_block(out_down_residual_blocks1) out_mpool2 = self.mpool2(out_down_residual_blocks1) out_middle_2r_blocks = self.middle_2r_blocks(out_mpool2) # out_interp = self.interpolation1(out_middle_2r_blocks) + out_down_residual_blocks1 # print(out_skip2_connection.data) # print(out_interp3.data) out = out_interp + out_skip1_connection out_up_residual_blocks1 = self.up_residual_blocks1(out) out_interp2 = self.interpolation2(out_up_residual_blocks1) + out_trunk out_conv1_1_blocks = self.conv1_1_blocks(out_interp2) out = (1 + out_conv1_1_blocks) * out_trunk out_last = self.last_blocks(out) return out_last class AttentionModule_stage2_cifar(nn.Module): # input size is 8*8 def __init__(self, in_channels, out_channels, size=(8, 8)): super(AttentionModule_stage2_cifar, self).__init__() self.first_residual_blocks = ResidualBlock(in_channels, out_channels) self.trunk_branches = nn.Sequential( ResidualBlock(in_channels, out_channels), ResidualBlock(in_channels, out_channels) ) self.mpool1 = nn.MaxPool2d(kernel_size=3, stride=2, padding=1) # 4*4 self.middle_2r_blocks = nn.Sequential( ResidualBlock(in_channels, out_channels), ResidualBlock(in_channels, out_channels) ) self.interpolation1 = nn.UpsamplingBilinear2d(size=size) # 8*8 self.conv1_1_blocks = nn.Sequential( nn.BatchNorm2d(out_channels), nn.ReLU(inplace=True), nn.Conv2d(out_channels, out_channels, kernel_size=1, stride=1, bias=False), nn.BatchNorm2d(out_channels), nn.ReLU(inplace=True), nn.Conv2d(out_channels, out_channels, kernel_size=1, stride=1, bias = False), nn.Sigmoid() ) self.last_blocks = ResidualBlock(in_channels, out_channels) def forward(self, x): x = self.first_residual_blocks(x) out_trunk = self.trunk_branches(x) out_mpool1 = self.mpool1(x) out_middle_2r_blocks = self.middle_2r_blocks(out_mpool1) # out_interp = self.interpolation1(out_middle_2r_blocks) + out_trunk # print(out_skip2_connection.data) # print(out_interp3.data) out_conv1_1_blocks = self.conv1_1_blocks(out_interp) out = (1 + out_conv1_1_blocks) * out_trunk out_last = self.last_blocks(out) return out_last class AttentionModule_stage3_cifar(nn.Module): # input size is 4*4 def __init__(self, in_channels, out_channels, size=(8, 8)): super(AttentionModule_stage3_cifar, self).__init__() self.first_residual_blocks = ResidualBlock(in_channels, out_channels) self.trunk_branches = nn.Sequential( ResidualBlock(in_channels, out_channels), ResidualBlock(in_channels, out_channels) ) self.middle_2r_blocks = nn.Sequential( ResidualBlock(in_channels, out_channels), ResidualBlock(in_channels, out_channels) ) self.conv1_1_blocks = nn.Sequential( nn.BatchNorm2d(out_channels), nn.ReLU(inplace=True), nn.Conv2d(out_channels, out_channels, kernel_size=1, stride=1, bias=False), nn.BatchNorm2d(out_channels), nn.ReLU(inplace=True), nn.Conv2d(out_channels, out_channels, kernel_size=1, stride=1, bias = False), nn.Sigmoid() ) self.last_blocks = ResidualBlock(in_channels, out_channels) def forward(self, x): x = self.first_residual_blocks(x) out_trunk = self.trunk_branches(x) out_middle_2r_blocks = self.middle_2r_blocks(x) # out_conv1_1_blocks = self.conv1_1_blocks(out_middle_2r_blocks) out = (1 + out_conv1_1_blocks) * out_trunk out_last = self.last_blocks(out) return out_last class ResidualAttentionModel_92(nn.Module): # for input size 224 def __init__(self, num_classes): super(ResidualAttentionModel_92, self).__init__() self.num_classes = num_classes self.conv1 = nn.Sequential( nn.Conv2d(3, 64, kernel_size=7, stride=2, padding=3, bias = False), nn.BatchNorm2d(64), nn.ReLU(inplace=True) ) self.mpool1 = nn.MaxPool2d(kernel_size=3, stride=2, padding=1) self.residual_block1 = ResidualBlock(64, 256) self.attention_module1 = AttentionModule_stage1(256, 256) self.residual_block2 = ResidualBlock(256, 512, 2) self.attention_module2 = AttentionModule_stage2(512, 512) self.attention_module2_2 = AttentionModule_stage2(512, 512) # tbq add self.residual_block3 = ResidualBlock(512, 1024, 2) self.attention_module3 = AttentionModule_stage3(1024, 1024) self.attention_module3_2 = AttentionModule_stage3(1024, 1024) # tbq add self.attention_module3_3 = AttentionModule_stage3(1024, 1024) # tbq add self.residual_block4 = ResidualBlock(1024, 2048, 2) self.residual_block5 = ResidualBlock(2048, 2048) self.residual_block6 = ResidualBlock(2048, 2048) self.mpool2 = nn.Sequential( nn.BatchNorm2d(2048), nn.ReLU(inplace=True), nn.AvgPool2d(kernel_size=7, stride=1) ) self.fc = nn.Linear(2048,self.num_classes) def forward(self, x): out = self.conv1(x) out = self.mpool1(out) out = self.residual_block1(out) out = self.attention_module1(out) out = self.residual_block2(out) out = self.attention_module2(out) out = self.attention_module2_2(out) out = self.residual_block3(out) out = self.attention_module3(out) out = self.attention_module3_2(out) out = self.attention_module3_3(out) out = self.residual_block4(out) out = self.residual_block5(out) out = self.residual_block6(out) out = self.mpool2(out) out = out.view(out.size(0), -1) out = self.fc(out) return out
40.536913
116
0.67053
2,926
24,160
5.232399
0.04648
0.094121
0.122861
0.113847
0.838994
0.802155
0.776486
0.756695
0.734618
0.699935
0
0.046705
0.236962
24,160
596
117
40.536913
0.783781
0.025331
0
0.627803
0
0
0
0
0
0
0
0
0
1
0.044843
false
0
0.002242
0
0.091928
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
ee40e0022c38a8d2f4b3e2ac0ed9014142613f04
75
py
Python
autogluon/scheduler/resource/__init__.py
zhanghang1989/autogluon
8bfe6b0da8915020eeb9895fd18d7688c0d604c1
[ "Apache-2.0" ]
62
2020-04-11T01:10:18.000Z
2022-01-20T02:05:58.000Z
autogluon/scheduler/resource/__init__.py
zhanghang1989/autogluon
8bfe6b0da8915020eeb9895fd18d7688c0d604c1
[ "Apache-2.0" ]
14
2020-04-11T01:10:10.000Z
2020-05-13T23:59:30.000Z
autogluon/scheduler/resource/__init__.py
zhanghang1989/autogluon
8bfe6b0da8915020eeb9895fd18d7688c0d604c1
[ "Apache-2.0" ]
7
2020-04-21T13:06:42.000Z
2022-03-14T11:54:39.000Z
from .resource import * from .manager import * from .dist_manager import *
18.75
27
0.76
10
75
5.6
0.5
0.357143
0
0
0
0
0
0
0
0
0
0
0.16
75
3
28
25
0.888889
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
ee48a922ef4b76c8c46765e830e07559af6b45b0
181
py
Python
cosmo/formatters/raw.py
danellis/cosmo
f57ce58b5053012c72b5fad82e226ed1b434ff8c
[ "MIT" ]
null
null
null
cosmo/formatters/raw.py
danellis/cosmo
f57ce58b5053012c72b5fad82e226ed1b434ff8c
[ "MIT" ]
null
null
null
cosmo/formatters/raw.py
danellis/cosmo
f57ce58b5053012c72b5fad82e226ed1b434ff8c
[ "MIT" ]
null
null
null
class RawFormatter(object): def print(self, triples): for page_url, link_type, link_url in triples: print("{} {} {}".format(page_url, link_type, link_url))
30.166667
67
0.635359
24
181
4.541667
0.583333
0.12844
0.201835
0.275229
0.40367
0.40367
0
0
0
0
0
0
0.226519
181
5
68
36.2
0.778571
0
0
0
0
0
0.044444
0
0
0
0
0
0
1
0.25
false
0
0
0
0.5
0.5
1
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
1
0
6
ee768e5e165808f1cd3db803c112734425c479eb
253
py
Python
footy/src/matches/match.py
bryce-klinker/hello-python
c62ac61f40c1d9fcb77dbde49161da399787d96d
[ "MIT" ]
null
null
null
footy/src/matches/match.py
bryce-klinker/hello-python
c62ac61f40c1d9fcb77dbde49161da399787d96d
[ "MIT" ]
null
null
null
footy/src/matches/match.py
bryce-klinker/hello-python
c62ac61f40c1d9fcb77dbde49161da399787d96d
[ "MIT" ]
null
null
null
class Match: @property def host_name(self): return self.match_values[2] @property def visitor_name(self): return self.match_values[3] def __init__(self, match_line): self.match_values = match_line.split(',')
23
49
0.644269
33
253
4.606061
0.454545
0.236842
0.296053
0.236842
0.381579
0.381579
0
0
0
0
0
0.010526
0.249012
253
11
49
23
0.789474
0
0
0.222222
0
0
0.003937
0
0
0
0
0
0
1
0.333333
false
0
0
0.222222
0.666667
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
6
c995709d1127153c694cc2d13813b3a2b21e4a09
127
py
Python
Models_II_Relaciones/core/erp/tests.py
BrianMarquez3/Python-Django
61f84a01b7f57254f9dcbbad86cc4c88c2acf4d7
[ "MIT" ]
2
2020-09-28T21:23:59.000Z
2021-11-10T15:01:15.000Z
Models_II_Relaciones/core/erp/tests.py
BrianMarquez3/Python-Django
61f84a01b7f57254f9dcbbad86cc4c88c2acf4d7
[ "MIT" ]
21
2021-02-04T01:37:44.000Z
2022-03-12T01:00:55.000Z
Models_II_Relaciones/core/erp/tests.py
BrianMarquez3/Python-Django
61f84a01b7f57254f9dcbbad86cc4c88c2acf4d7
[ "MIT" ]
null
null
null
import os os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'Models_II_Relaciones.settings') from core.erp.models import Type
18.142857
80
0.818898
18
127
5.555556
0.777778
0
0
0
0
0
0
0
0
0
0
0
0.086614
127
6
81
21.166667
0.862069
0
0
0
0
0
0.401575
0.401575
0
0
0
0
0
1
0
true
0
0.666667
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
1
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
c99ee321e6cfeba0a07f3b7e61aa652f2a86c9f1
32
py
Python
S4/S4 Library/simulation/performance/__init__.py
NeonOcean/Environment
ca658cf66e8fd6866c22a4a0136d415705b36d26
[ "CC-BY-4.0" ]
1
2021-05-20T19:33:37.000Z
2021-05-20T19:33:37.000Z
S4/S4 Library/simulation/performance/__init__.py
NeonOcean/Environment
ca658cf66e8fd6866c22a4a0136d415705b36d26
[ "CC-BY-4.0" ]
null
null
null
S4/S4 Library/simulation/performance/__init__.py
NeonOcean/Environment
ca658cf66e8fd6866c22a4a0136d415705b36d26
[ "CC-BY-4.0" ]
null
null
null
from native.performance import *
32
32
0.84375
4
32
6.75
1
0
0
0
0
0
0
0
0
0
0
0
0.09375
32
1
32
32
0.931034
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
c9d6aea3a41d91d749a1ebfacb85f8bdb6f1bdd5
11,037
py
Python
k8s_handle/k8s/test_provisioner.py
jetbrains-infra/k8s-handle
5b4a30a719a439dd39ba8cecfd87df6d59e1531a
[ "Apache-2.0" ]
152
2018-08-23T12:41:16.000Z
2022-02-02T15:16:15.000Z
k8s_handle/k8s/test_provisioner.py
jetbrains-infra/k8s-handle
5b4a30a719a439dd39ba8cecfd87df6d59e1531a
[ "Apache-2.0" ]
124
2018-08-20T03:55:18.000Z
2021-09-28T09:01:15.000Z
k8s_handle/k8s/test_provisioner.py
jetbrains-infra/k8s-handle
5b4a30a719a439dd39ba8cecfd87df6d59e1531a
[ "Apache-2.0" ]
32
2018-10-06T00:48:26.000Z
2022-03-24T14:39:44.000Z
import unittest from k8s_handle import settings from k8s_handle.exceptions import ProvisioningError from k8s_handle.templating import get_template_contexts from .adapters import AdapterBuiltinKind from .mocks import K8sClientMock from .provisioner import Provisioner class TestProvisioner(unittest.TestCase): def setUp(self): settings.GET_ENVIRON_STRICT = False def test_deployment_wait_complete_fail(self): client = AdapterBuiltinKind( api=K8sClientMock('test1'), spec={'kind': 'Deployment', 'metadata': {'name': 'test1'}, 'spec': {'replicas': 1}}) with self.assertRaises(RuntimeError) as context: Provisioner('deploy', False, None)._wait_deployment_complete(client, tries=1, timeout=0) self.assertTrue('Deployment not completed for 1 tries' in str(context.exception), context.exception) def test_deployment_wait_complete(self): client = AdapterBuiltinKind( api=K8sClientMock('test2'), spec={'kind': 'Deployment', 'metadata': {'name': 'test1'}, 'spec': {'replicas': 1}}) Provisioner('deploy', False, None)._wait_deployment_complete(client, tries=1, timeout=0) def test_statefulset_wait_complete_fail(self): client = AdapterBuiltinKind(api=K8sClientMock('test1'), spec={'kind': 'StatefulSet', 'metadata': {'name': ''}, 'spec': {'replicas': 1}}) with self.assertRaises(RuntimeError) as context: Provisioner('deploy', False, None)._wait_statefulset_complete(client, tries=1, timeout=0) self.assertTrue('StatefulSet not completed for 1 tries' in str(context.exception), context.exception) def test_statefulset_wait_complete(self): client = AdapterBuiltinKind(api=K8sClientMock('test2'), spec={'kind': 'StatefulSet', 'metadata': {'name': ''}, 'spec': {'replicas': 3}}) Provisioner('deploy', False, None)._wait_statefulset_complete(client, tries=1, timeout=0) def test_daemonset_wait_complete_fail(self): client = AdapterBuiltinKind(api=K8sClientMock('test1'), spec={'kind': 'DaemonSet', 'metadata': {'name': ''}, 'spec': {'replicas': 1}}) with self.assertRaises(RuntimeError) as context: Provisioner('deploy', False, None)._wait_daemonset_complete(client, tries=1, timeout=0) self.assertTrue('DaemonSet not completed for 1 tries' in str(context.exception), context.exception) def test_daemonset_wait_complete(self): client = AdapterBuiltinKind(api=K8sClientMock('test2'), spec={'kind': 'DaemonSet', 'metadata': {'name': ''}, 'spec': {'replicas': 1}}) Provisioner('deploy', False, None)._wait_daemonset_complete(client, tries=1, timeout=0) def test_job_wait_complete_fail(self): client = AdapterBuiltinKind(api=K8sClientMock('test1'), spec={'kind': 'Job', 'metadata': {'name': ''}, 'spec': {'replicas': 1}}) with self.assertRaises(RuntimeError) as context: Provisioner('deploy', False, None)._wait_job_complete(client, tries=1, timeout=0) self.assertTrue('Job running failed' in str(context.exception)) def test_job_wait_complete_conditions_fail(self): client = AdapterBuiltinKind(api=K8sClientMock('test2'), spec={'kind': 'Job', 'metadata': {'name': ''}, 'spec': {'replicas': 1}}) with self.assertRaises(RuntimeError) as context: Provisioner('deploy', False, None)._wait_job_complete(client, tries=1, timeout=0) self.assertTrue('Job not completed for 1 tries' in str(context.exception), context.exception) def test_job_wait_complete(self): client = AdapterBuiltinKind(api=K8sClientMock('test3'), spec={'kind': 'Job', 'metadata': {'name': ''}, 'spec': {'replicas': 1}}) Provisioner('deploy', False, None)._wait_job_complete(client, tries=1, timeout=0) def test_ns_from_template(self): client = AdapterBuiltinKind(api=K8sClientMock('test'), spec={'kind': 'Job', 'metadata': {'name': '', 'namespace': 'test'}, 'spec': {'replicas': 1}}) self.assertEqual(client.namespace, 'test') def test_ns_from_config(self): settings.K8S_NAMESPACE = 'namespace' client = AdapterBuiltinKind(api=K8sClientMock('test'), spec={'kind': 'Job', 'metadata': {'name': ''}, 'spec': {'replicas': 1}}) self.assertEqual(client.namespace, 'namespace') def test_deployment_destruction_wait_fail(self): client = AdapterBuiltinKind( api=K8sClientMock('test1'), spec={'kind': 'Deployment', 'metadata': {'name': 'test1'}, 'spec': {'replicas': 1}}) with self.assertRaises(RuntimeError) as context: Provisioner('destroy', False, None)._wait_destruction_complete(client, 'Deployment', tries=1, timeout=0) self.assertTrue('Deployment destruction not completed for 1 tries' in str(context.exception), context.exception) def test_deployment_destruction_wait_success(self): client = AdapterBuiltinKind( api=K8sClientMock('404'), spec={'kind': 'Deployment', 'metadata': {'name': 'test1'}, 'spec': {'replicas': 1}}) Provisioner('destroy', False, None)._wait_destruction_complete(client, 'Deployment', tries=1, timeout=0) def test_job_destruction_wait_fail(self): client = AdapterBuiltinKind( api=K8sClientMock('test1'), spec={'kind': 'Job', 'metadata': {'name': 'test1'}, 'spec': {'replicas': 1}}) with self.assertRaises(RuntimeError) as context: Provisioner('deploy', True, None)._wait_destruction_complete(client, 'Job', tries=1, timeout=0) self.assertTrue('Job destruction not completed for 1 tries' in str(context.exception), context.exception) def test_job_destruction_wait_success(self): client = AdapterBuiltinKind( api=K8sClientMock('404'), spec={'kind': 'Job', 'metadata': {'name': 'test1'}, 'spec': {'replicas': 1}}) Provisioner('destroy', False, None)._wait_destruction_complete(client, 'Job', tries=1, timeout=0) def test_deploy_replace(self): settings.CHECK_STATUS_TIMEOUT = 0 Provisioner('deploy', False, None).run("k8s_handle/k8s/fixtures/deployment.yaml") def test_deploy_create(self): Provisioner('deploy', False, None).run("k8s_handle/k8s/fixtures/deployment_404.yaml") def test_deploy_unknown_api(self): with self.assertRaises(RuntimeError) as context: Provisioner('deploy', False, None).run("k8s_handle/k8s/fixtures/deployment_no_api.yaml") self.assertTrue('Unknown apiVersion "test" in template "k8s_handle/k8s/fixtures/deployment_no_api.yaml"' in str(context.exception), context.exception) def test_service_replace(self): Provisioner('deploy', False, None).run("k8s_handle/k8s/fixtures/service.yaml") def test_service_replace_no_ports(self): Provisioner('deploy', False, None).run("k8s_handle/k8s/fixtures/service_no_ports.yaml") def test_destroy_unknown_api(self): with self.assertRaises(RuntimeError) as context: Provisioner('destroy', False, None).run("k8s_handle/k8s/fixtures/deployment_no_api.yaml") self.assertTrue('Unknown apiVersion "test" in template "k8s_handle/k8s/fixtures/deployment_no_api.yaml"' in str(context.exception), context.exception) def test_destroy_not_found(self): Provisioner('destroy', False, None).run("k8s_handle/k8s/fixtures/deployment_404.yaml") def test_destroy_fail(self): with self.assertRaises(RuntimeError) as context: Provisioner('destroy', False, None).run("k8s_handle/k8s/fixtures/service.yaml") self.assertTrue('' in str(context.exception), context.exception) def test_destroy_success(self): Provisioner('destroy', False, None).run("k8s_handle/k8s/fixtures/deployment.yaml") def test_pvc_replace_equals(self): Provisioner('deploy', False, None).run("k8s_handle/k8s/fixtures/pvc.yaml") def test_pvc_replace_not_equals(self): with self.assertRaises(ProvisioningError) as context: Provisioner('deploy', False, None).run("k8s_handle/k8s/fixtures/pvc2.yaml") self.assertTrue('Replace persistent volume claim fail' in str(context.exception), context.exception) # https://kubernetes.io/docs/concepts/storage/persistent-volumes/#volume-mode def test_pvc_replace_new_attribute(self): with self.assertRaises(ProvisioningError) as context: Provisioner('deploy', False, None).run("k8s_handle/k8s/fixtures/pvc3.yaml") self.assertTrue('Replace persistent volume claim fail' in str(context.exception)) def test_get_template_contexts(self): with self.assertRaises(StopIteration): next(get_template_contexts('k8s_handle/k8s/fixtures/empty.yaml')) with self.assertRaises(RuntimeError) as context: next(get_template_contexts('k8s_handle/k8s/fixtures/nokind.yaml')) self.assertTrue( 'Field "kind" not found (or empty) in file "k8s_handle/k8s/fixtures/nokind.yaml"' in str(context.exception), context.exception) with self.assertRaises(RuntimeError) as context: next(get_template_contexts('k8s_handle/k8s/fixtures/nometadata.yaml')) self.assertTrue( 'Field "metadata" not found (or empty) in file "k8s_handle/k8s/fixtures/nometadata.yaml"' in str(context.exception), context.exception) with self.assertRaises(RuntimeError) as context: next(get_template_contexts('k8s_handle/k8s/fixtures/nometadataname.yaml')) self.assertTrue( 'Field "metadata->name" not found (or empty) in file "k8s_handle/k8s/fixtures/nometadataname.yaml"' in str(context.exception), context.exception) context = next(get_template_contexts('k8s_handle/k8s/fixtures/valid.yaml')) self.assertEqual(context.get('kind'), 'Service') self.assertEqual(context.get('apiVersion'), 'v1') self.assertEqual(context.get('metadata').get('name'), 'my-service') self.assertEqual(context.get('spec').get('selector').get('app'), 'my-app') context = next(get_template_contexts('k8s_handle/k8s/fixtures/deployment_wo_replicas.yaml')) self.assertEqual(context.get('spec').get('replicas'), 1) class TestKubeObject(unittest.TestCase): def test_replicas_equal(self): replicas = (1, 1, 1) self.assertTrue(Provisioner._replicas_count_are_equal(replicas)) def test_replicas_not_equal(self): replicas = (1, 1, 0) self.assertFalse(Provisioner._replicas_count_are_equal(replicas))
53.839024
120
0.662771
1,226
11,037
5.800979
0.101958
0.029528
0.038808
0.064679
0.854893
0.815101
0.779246
0.734533
0.705849
0.681102
0
0.017396
0.203135
11,037
204
121
54.102941
0.791245
0.006705
0
0.411043
0
0.018405
0.216606
0.084489
0
0
0
0
0.245399
1
0.190184
false
0
0.042945
0
0.245399
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
4e4cdc1231653dafbf488679ce73a1b21bf1e6c9
278
py
Python
Tingstarter.tingapp/kickscraper/__init__.py
986-Studio/Tingstarter
00f9997f5e5305a626b9f9efc20d857121c82d28
[ "MIT" ]
null
null
null
Tingstarter.tingapp/kickscraper/__init__.py
986-Studio/Tingstarter
00f9997f5e5305a626b9f9efc20d857121c82d28
[ "MIT" ]
null
null
null
Tingstarter.tingapp/kickscraper/__init__.py
986-Studio/Tingstarter
00f9997f5e5305a626b9f9efc20d857121c82d28
[ "MIT" ]
null
null
null
from .backends.kickstarter.client import KickStarter from .backends.kickstarter.models import KickStarterProject as Project def search_project(terms): return KickStarter().search_project(terms) def search_projects(terms): return KickStarter().search_projects(terms)
25.272727
70
0.81295
32
278
6.9375
0.4375
0.108108
0.207207
0.252252
0
0
0
0
0
0
0
0
0.107914
278
10
71
27.8
0.895161
0
0
0
0
0
0
0
0
0
0
0
0
1
0.333333
false
0
0.333333
0.333333
1
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
1
1
1
0
0
6
14f3455334b4f76f712ddded86afb4fe2d052244
97
py
Python
pychastic/utils.py
RadostW/stochastic
1d437900e0314f18678353fd4794ecefb197761d
[ "MIT" ]
2
2022-03-01T11:48:21.000Z
2022-03-01T11:48:22.000Z
pychastic/utils.py
RadostW/stochastic
1d437900e0314f18678353fd4794ecefb197761d
[ "MIT" ]
null
null
null
pychastic/utils.py
RadostW/stochastic
1d437900e0314f18678353fd4794ecefb197761d
[ "MIT" ]
2
2021-11-16T15:44:39.000Z
2021-12-15T22:59:49.000Z
import jax.numpy as jnp def contract_all(a, b): return jnp.tensordot(a, b, axes=len(b.shape))
19.4
47
0.71134
19
97
3.578947
0.789474
0.058824
0
0
0
0
0
0
0
0
0
0
0.14433
97
4
48
24.25
0.819277
0
0
0
0
0
0
0
0
0
0
0
0
1
0.333333
false
0
0.333333
0.333333
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
1
1
1
0
0
6
14fc82cf154d132eab8084aa4511484bfb54fe2f
31,358
py
Python
tests/test_branch.py
jpichon/git_wrapper
4ea59b341a2a2d92102300a6bbd1b2bdc28cffe1
[ "MIT" ]
5
2019-01-18T16:16:54.000Z
2019-06-08T12:12:14.000Z
tests/test_branch.py
jpichon/git_wrapper
4ea59b341a2a2d92102300a6bbd1b2bdc28cffe1
[ "MIT" ]
52
2018-06-20T10:56:57.000Z
2021-09-27T14:34:56.000Z
tests/test_branch.py
jpichon/git_wrapper
4ea59b341a2a2d92102300a6bbd1b2bdc28cffe1
[ "MIT" ]
6
2018-06-12T18:22:16.000Z
2021-06-18T16:28:47.000Z
#! /usr/bin/env python """Tests for GitBranch""" from mock import ANY, Mock, patch import git import pytest from git_wrapper.repo import GitRepo from git_wrapper import exceptions def test_on_head_only_all_new(mock_repo): """ GIVEN GitRepo initialized with a path and repo WHEN on_head_only method is called with no upstream equivalent changes THEN a dictionary is returned containing two sha1's and commits """ repo = GitRepo('./', mock_repo) lines = '+ sha1 commit1\n+ sha2 commit2\n+ sha3 commit3' attrs = {'cherry.return_value': lines} mock_repo.git.configure_mock(**attrs) expected = {'sha1': 'commit1', 'sha2': 'commit2', 'sha3': 'commit3'} assert expected == repo.branch.cherry_on_head_only('upstream', 'HEAD') def test_on_head_only_with_mixed(mock_repo): """ GIVEN GitRepo initialized with a path and repo WHEN on_head_only method is called with a mix of upstream equivalent and not equivalent changes THEN a dictionary is returned containing two sha1's and commits """ repo = GitRepo('./', mock_repo) lines = '+ sha1 commit1\n- sha2 commit2\n+ sha3 commit3' attrs = {'cherry.return_value': lines} mock_repo.git.configure_mock(**attrs) expected = {'sha1': 'commit1', 'sha3': 'commit3'} assert expected == repo.branch.cherry_on_head_only('upstream', 'HEAD') def test_on_head_only_no_new(mock_repo): """ GIVEN GitRepo initialized with a path and repo WHEN on_head_only method is called with a only upstream equivalent changes THEN an empty dictionary is returned """ repo = GitRepo('./', mock_repo) lines = '- sha1 commit1\n- sha2 commit2\n- sha3 commit3' attrs = {'cherry.return_value': lines} mock_repo.git.configure_mock(**attrs) assert {} == repo.branch.cherry_on_head_only('upstream', 'HEAD') def test_on_head_only_empty(mock_repo): """ GIVEN GitRepo initialized with a path and repo WHEN on_head_only is called with no changes THEN an empty dictionary is returned """ repo = GitRepo('./', mock_repo) lines = '' attrs = {'cherry.return_value': lines} mock_repo.git.configure_mock(**attrs) assert {} == repo.branch.cherry_on_head_only('upstream', 'HEAD') def test_all_equivalent_changes(mock_repo): """ GIVEN GitRepo initialized with a path and repo WHEN equivalent is called with only equivalent upstream/downstream changes. THEN a dictionary is returned with all changes """ repo = GitRepo('./', mock_repo) lines = '- sha1 commit1\n- sha2 commit2\n- sha3 commit3' attrs = {'cherry.return_value': lines} mock_repo.git.configure_mock(**attrs) expected = {'sha1': 'commit1', 'sha2': 'commit2', 'sha3': 'commit3'} assert expected == repo.branch.cherry_equivalent('upstream', 'HEAD') def test_equivalent_mixed_changes(mock_repo): """ GIVEN GitRepo initialized with a path and repo WHEN equivalent is called with mix equivalent and HEAD changes. THEN a dictionary is returned with only the equivalent changes. """ repo = GitRepo('./', mock_repo) lines = '+ sha1 commit1\n- sha2 commit2\n+ sha3 commit3' attrs = {'cherry.return_value': lines} mock_repo.git.configure_mock(**attrs) expected = {'sha2': 'commit2'} assert expected == repo.branch.cherry_equivalent('upstream', 'HEAD') def test_equivalent_downstream_only(mock_repo): """ GIVEN GitRepo initialized with a path and repo WHEN equivalent is called with mix HEAD only changes. THEN an empty dictionary is returned. """ repo = GitRepo('./', mock_repo) lines = '+ sha1 commit1\n+ sha2 commit2\n+ sha3 commit3' attrs = {'cherry.return_value': lines} mock_repo.git.configure_mock(**attrs) assert {} == repo.branch.cherry_equivalent('upstream', 'HEAD') def test_equivalent_no_changes(mock_repo): """ GIVEN GitRepo initialized with a path and repo WHEN equivalent is called with no changes. THEN an empty dictionary is returned. """ repo = GitRepo('./', mock_repo) lines = '' attrs = {'cherry.return_value': lines} mock_repo.git.configure_mock(**attrs) assert {} == repo.branch.cherry_equivalent('upstream', 'HEAD') def test_rebase(mock_repo): """ GIVEN GitRepo initialized with a path and repo WHEN branch.rebase_to_hash is called with a valid branch name and a valid hash THEN git.checkout called AND git.rebase called """ mock_repo.is_dirty.return_value = False repo = GitRepo('./', mock_repo) with patch('git.repo.fun.name_to_object'): repo.branch.rebase_to_hash('test', '12345') assert repo.repo.git.checkout.called is True assert repo.repo.git.rebase.called is True def test_rebase_dirty_repo(mock_repo): """ GIVEN GitRepo initialized with a path and repo WHEN branch.rebase_to_hash is called on a dirty repository THEN a DirtyRepositoryException is raised """ mock_repo.is_dirty.return_value = True repo = GitRepo('./', mock_repo) with patch('git.repo.fun.name_to_object'): with pytest.raises(exceptions.DirtyRepositoryException): repo.branch.rebase_to_hash('test', '12345') assert mock_repo.is_dirty.called is True def test_rebase_branch_not_found(mock_repo): """ GIVEN GitRepo initialized with a path and repo WHEN branch.rebase_to_hash is called with an invalid branch name THEN a ReferenceNotFoundException is raised AND the exception message contains branch """ mock_repo.is_dirty.return_value = False repo = GitRepo('./', mock_repo) with patch('git.repo.fun.name_to_object') as mock_name_to_object: with pytest.raises(exceptions.ReferenceNotFoundException) as exc_info: mock_name_to_object.side_effect = git.exc.BadName() repo.branch.rebase_to_hash('doesNotExist', '12345') assert 'branch' in str(exc_info.value) def test_rebase_hash_not_found(mock_repo): """ GIVEN GitRepo initialized with a path and repo WHEN branch.rebase_to_hash is called with a valid branch name and an invalid hash THEN a ReferenceNotFoundException is raised AND the exception message contains hash """ mock_repo.is_dirty.return_value = False repo = GitRepo('./', mock_repo) with patch('git.repo.fun.name_to_object') as mock_name_to_object: with pytest.raises(exceptions.ReferenceNotFoundException) as exc_info: # First name_to_object call is to check the branch, let it succeed def side_effect(mock, ref): if ref != "branchA": raise git.exc.BadName mock_name_to_object.side_effect = side_effect repo.branch.rebase_to_hash('branchA', '12345') assert 'hash' in str(exc_info.value) def test_rebase_error_during_checkout(mock_repo): """ GIVEN GitRepo initialized with a path and repo WHEN branch.rebase_to_hash is called with a valid branch name and a valid hash AND checkout fails with an exception THEN a CheckoutException is raised """ mock_repo.is_dirty.return_value = False mock_repo.git.checkout.side_effect = git.GitCommandError('checkout', '') repo = GitRepo('./', mock_repo) with patch('git.repo.fun.name_to_object'): with pytest.raises(exceptions.CheckoutException): repo.branch.rebase_to_hash('branchA', '12345') def test_rebase_error_during_rebase(mock_repo): """ GIVEN GitRepo initialized with a path and repo WHEN branch.rebase_to_hash is called with a valid branch name and a valid hash AND rebase fails with an exception THEN a RebaseException is raised """ mock_repo.is_dirty.return_value = False mock_repo.git.rebase.side_effect = git.GitCommandError('rebase', '') repo = GitRepo('./', mock_repo) with patch('git.repo.fun.name_to_object'): with pytest.raises(exceptions.RebaseException): repo.branch.rebase_to_hash('branchA', '12345') def test_abort_rebase(mock_repo): """ GIVEN GitRepo initialized with a path and repo WHEN branch.abort_rebase is called THEN git.rebase called """ repo = GitRepo('./', mock_repo) repo.branch.abort_rebase() assert repo.repo.git.rebase.called is True def test_abort_rebase_error(mock_repo): """ GIVEN GitRepo initialized with a path and repo WHEN abort_rebase is called AND the abort fails with an exception THEN an AbortException is raised """ mock_repo.git.rebase.side_effect = git.GitCommandError('rebase', '') repo = GitRepo('./', mock_repo) with pytest.raises(exceptions.AbortException): repo.branch.abort_rebase() def test_apply_patch(mock_repo): """ GIVEN GitRepo initialized with a path and repo WHEN apply_patch is called with a valid branch_name and valid path THEN git.am is called with only one argument (path) and no options """ repo = GitRepo('./', mock_repo) with patch('git.repo.fun.name_to_object'): repo.branch.apply_patch('test_branch', './requirements.txt') assert repo.git.am.called is True # The path gets translated to a full path which will change on every # system so we only check there was one argument only, with no other flags repo.git.am.assert_called_with(ANY) def test_apply_patch_with_brackets_preserved(mock_repo): """ GIVEN GitRepo initialized with a path and repo WHEN apply_patch is called with valid parameters AND keep_square_brackets is set to True THEN git.am is called with the --keep-non-patch option """ repo = GitRepo('./', mock_repo) with patch('git.repo.fun.name_to_object'): repo.branch.apply_patch('test_branch', './requirements.txt', keep_square_brackets=True) assert repo.git.am.called is True repo.git.am.assert_called_with('--keep-non-patch', ANY) def test_apply_patch_wrong_branch_name(mock_repo): """ GIVEN GitRepo initialized with a path and repo WHEN apply_patch is called with an invalid branch_name and valid path THEN ReferenceNotFoundExceptionRaised AND git.am not called """ repo = GitRepo('./', mock_repo) with patch('git.repo.fun.name_to_object') as mock_name_to_object: mock_name_to_object.side_effect = git.exc.BadName() with pytest.raises(exceptions.ReferenceNotFoundException): repo.branch.apply_patch('invalid_branch', './requirements.txt') assert repo.git.am.called is False def test_apply_patch_not_a_file(mock_repo): """ GIVEN GitRepo initialized with a path and repo WHEN apply_patch is called with a valid branch_name and invalid path THEN FileDoesntExistException raised AND git.am not called """ repo = GitRepo('./', mock_repo) with patch('git.repo.fun.name_to_object'): with pytest.raises(exceptions.FileDoesntExistException): repo.branch.apply_patch('test_branch', './git_wrapper') assert repo.git.am.called is False def test_apply_patch_checkout_error(mock_repo): """ GIVEN GitRepo initialized with a path and repo WHEN apply_patch is called with a valid branch name and a valid path AND checkout fails with an exception THEN a CheckoutException is raised AND git.am not called """ mock_repo.git.checkout.side_effect = git.GitCommandError('checkout', '') repo = GitRepo('./', mock_repo) with patch('git.repo.fun.name_to_object'): with pytest.raises(exceptions.CheckoutException): repo.branch.apply_patch('test_branch', './requirements.txt') assert repo.git.am.called is False def test_apply_patch_apply_error(mock_repo): """ GIVEN GitRepo initialized with a path and repo WHEN apply_patch is called with a valid branch name and a valid path AND git.am fails with an exception THEN a ChangeNotAppliedException is raised """ mock_repo.git.am.side_effect = git.GitCommandError('am', '') repo = GitRepo('./', mock_repo) with patch('git.repo.fun.name_to_object'): with pytest.raises(exceptions.ChangeNotAppliedException): repo.branch.apply_patch('test_branch', './requirements.txt') def test_apply_diff(mock_repo): """ GIVEN GitRepo initialized with a path and repo WHEN apply_diff is called with a valid branch_name and valid diff_path and valid message THEN index.commit is called """ mock_repo.is_dirty.return_value = False repo = GitRepo('./', mock_repo) with patch('git.repo.fun.name_to_object'): repo.branch.apply_diff('test_branch', './requirements.txt', 'message', True) assert repo.git.add.called is True assert repo.git.apply.called is True assert repo.git.commit.called is True def test_apply_diff_on_invalid_branch(mock_repo): """ GIVEN GitRepo initialized with a path and repo WHEN apply_diff is called with an invalid branch_name and valid path THEN ReferenceNotFoundExceptionRaised AND git.apply not called """ repo = GitRepo('./', mock_repo) with patch('git.repo.fun.name_to_object') as mock_name_to_object: mock_name_to_object.side_effect = git.exc.BadName() with pytest.raises(exceptions.ReferenceNotFoundException): repo.branch.apply_diff('invalid_branch', './requirements.txt', 'message') assert repo.git.apply.called is False def test_apply_diff_on_dirty_workspace(mock_repo): """ GIVEN GitRepo initialized with a path and repo WHEN apply_diff is called on a dirty repository THEN a DirtyRepositoryException is raised AND git.apply not called """ mock_repo.is_dirty.return_value = True repo = GitRepo('./', mock_repo) with patch('git.repo.fun.name_to_object'): with pytest.raises(exceptions.DirtyRepositoryException): repo.branch.apply_diff('test_branch', './requirements.txt', 'message') assert mock_repo.is_dirty.called is True assert repo.git.apply.called is False def test_apply_diff_no_commit_message(mock_repo): """ GIVEN GitRepo initialized with a path and repo WHEN apply_diff is called with valid branch_name, valid diff_path and invalid message THEN CommitMessageMissingException raised AND index.commit not called """ mock_repo.is_dirty.return_value = False repo = GitRepo('./', mock_repo) with patch('git.repo.fun.name_to_object'): with pytest.raises(exceptions.CommitMessageMissingException): repo.branch.apply_diff('test_branch', './requirements.txt', '') assert repo.git.commit.called is False def test_apply_diff_not_a_file(mock_repo): """ GIVEN GitRepo initialized with a path and repo WHEN apply_diff is called with valid parameters THEN FileDoesntExistException raised AND git.apply not called """ mock_repo.is_dirty.return_value = False repo = GitRepo('./', mock_repo) with patch('git.repo.fun.name_to_object'): with pytest.raises(exceptions.FileDoesntExistException): repo.branch.apply_diff('test_branch', 'doesntexist.txt', 'message') assert repo.git.apply.called is False def test_apply_diff_checkout_error(mock_repo): """ GIVEN GitRepo initialized with a path and repo WHEN apply_diff is called with valid parameters AND checkout fails with an exception THEN a CheckoutException is raised AND index.commit not called """ mock_repo.is_dirty.return_value = False mock_repo.git.checkout.side_effect = git.GitCommandError('checkout', '') repo = GitRepo('./', mock_repo) with patch('git.repo.fun.name_to_object'): with pytest.raises(exceptions.CheckoutException): repo.branch.apply_diff('invalid_branch', './requirements.txt', 'my message') assert repo.git.commit.called is False def test_apply_diff_apply_fails(mock_repo): """ GIVEN GitRepo initialized with a path and repo WHEN apply_diff is called with a valid branch_name and valid diff_path and valid message AND git.apply fails with an exception THEN an ChangeNotAppliedException is raised """ mock_repo.is_dirty.return_value = False mock_repo.git.apply.side_effect = git.GitCommandError('apply', '') repo = GitRepo('./', mock_repo) with patch('git.repo.fun.name_to_object'): with pytest.raises(exceptions.ChangeNotAppliedException): repo.branch.apply_diff('test_branch', './requirements.txt', 'message') assert repo.git.commit.called is False def test_apply_diff_apply_nothing_to_commit(mock_repo): """ GIVEN GitRepo initialized with a path and repo WHEN apply_diff is called with a valid branch_name and valid diff_path and valid message WHEN commit is called with a valid message AND there are no diff changes THEN git.apply called AND index.commit not called """ mock_repo.is_dirty.return_value = False repo = GitRepo('./', mock_repo) repo.git.diff.return_value = [] with patch('git.repo.fun.name_to_object'): repo.branch.apply_diff('test_branch', './requirements.txt', 'message') assert repo.git.apply.called is True assert repo.git.commit.called is False def test_abort_patch_apply(mock_repo): """ GIVEN GitRepo initialized with a path and repo WHEN abort_patch_apply is called THEN git.am called """ repo = GitRepo('./', mock_repo) repo.branch.abort_patch_apply() assert repo.git.am.called is True def test_abort_patch_apply_error(mock_repo): """ GIVEN GitRepo initialized with a path and repo WHEN abort_patch_apply is called AND the abort_patch_apply fails with an exception THEN an Abort_Patch_ApplyException is raised """ mock_repo.git.am.side_effect = git.GitCommandError('abort_patch_apply', '') repo = GitRepo('./', mock_repo) with pytest.raises(exceptions.AbortException): repo.branch.abort_patch_apply() def test_reverse_diff(mock_repo): """ GIVEN GitRepo initialized with a path and repo WHEN reverse_diff is called with a valid diff_path THEN git.am called """ repo = GitRepo('./', mock_repo) repo.branch.reverse_diff('./requirements.txt') assert repo.git.apply.called is True def test_reverse_diff_diff_file_doesnt_exist(mock_repo): """ GIVEN GitRepo initialized with a path and repo WHEN reverse_diff is called with and invalid diff_path THEN FileDoesntExistException raised AND git.apply not called """ repo = GitRepo('./', mock_repo) with pytest.raises(exceptions.FileDoesntExistException): repo.branch.reverse_diff('./thisdoesntexist') assert repo.git.apply.called is False def test_reverse_diff_error(mock_repo): """ GIVEN GitRepo initialized with a path and repo WHEN reverse_diff is called with a valid diff_path AND the reverse_diff fails with an exception THEN an RevertException is raised """ mock_repo.git.apply.side_effect = git.GitCommandError('apply', '') repo = GitRepo('./', mock_repo) with pytest.raises(exceptions.RevertException): repo.branch.reverse_diff('./requirements.txt') def test_reset(mock_repo): """ GIVEN GitRepo is initialized with a path and repo WHEN reset is called THEN repo.head.reset is called """ mock_remote = Mock() mock_repo.remote.return_value = mock_remote repo = GitRepo(repo=mock_repo) with patch('git.repo.fun.name_to_object'): repo.branch.hard_reset() assert mock_remote.fetch.called is True # Sync is called assert mock_repo.head.reset.called is True # Reset is called def test_reset_remote_reference_not_found(mock_repo): """ GIVEN GitRepo is initialized with a path and repo WHEN reset is called AND the remote + branch reference doesn't exist THEN ReferenceNotFoundException is raised """ repo = GitRepo(repo=mock_repo) with patch('git.repo.fun.name_to_object') as mock_name_to_object: mock_name_to_object.side_effect = git.exc.BadName() with pytest.raises(exceptions.ReferenceNotFoundException): repo.branch.hard_reset(refresh=False, remote="doesntExist") assert mock_repo.head.reset.called is False def test_reset_checkout_failure(mock_repo): """ GIVEN GitRepo is initialized with a path and repo WHEN reset is called AND git.checkout fails THEN CheckoutException is raised """ mock_repo.git.checkout.side_effect = git.GitCommandError('checkout', '') repo = GitRepo(repo=mock_repo) with patch('git.repo.fun.name_to_object'): with pytest.raises(exceptions.CheckoutException): repo.branch.hard_reset(refresh=False) assert mock_repo.head.reset.called is False def test_reset_reset_failure(mock_repo): """ GIVEN GitRepo is initialized with a path and repo WHEN reset is called AND git.reset fails THEN ResetException is raised """ repo = GitRepo(repo=mock_repo) with patch('git.repo.fun.name_to_object'): mock_repo.head.reset.side_effect = git.GitCommandError('reset', '') with pytest.raises(exceptions.ResetException): repo.branch.hard_reset(refresh=False) def test_reset_to_ref_with_checkout(mock_repo): """ GIVEN GitRepo is initialized with a path and repo WHEN reset is called with checkout THEN repo.head.reset is called AND repo.checkout is called once """ repo = GitRepo(repo=mock_repo) with patch('git.repo.fun.name_to_object'): repo.branch.hard_reset_to_ref("main", "origin/main", checkout=True) assert mock_repo.head.reset.called is True assert mock_repo.git.checkout.call_count == 1 def test_reset_to_ref_detached_head_with_checkout(mock_repo, monkeypatch): """ GIVEN GitRepo is initialized with a path and repo WHEN reset is called with checkout AND the current HEAD is detached THEN repo.head.reset is called AND repo.checkout is called once """ class MockRef: @property def name(self): # Detached heads don't have a name raise TypeError repo = GitRepo(repo=mock_repo) with patch('git.repo.fun.name_to_object'): mock_repo.head.ref = MockRef() repo.branch.hard_reset_to_ref("main", "origin/main", checkout=True) assert mock_repo.head.reset.called is True assert mock_repo.git.checkout.call_count == 1 def test_reset_to_ref_without_checkout(mock_repo): """ GIVEN GitRepo is initialized with a path and repo WHEN reset_to_ref is called with checkout False THEN repo.head.reset is called AND repo.checkout is called twice to return to the original state """ repo = GitRepo(repo=mock_repo) with patch('git.repo.fun.name_to_object'): repo.branch.hard_reset_to_ref("main", "origin/main", checkout=False) assert mock_repo.head.reset.called is True assert mock_repo.git.checkout.call_count == 2 def test_reset_to_ref_without_checkout_fails(mock_repo): """ GIVEN GitRepo is initialized with a path and repo WHEN reset_to_ref is called with checkout False AND switching back fails THEN checkoutException is raised """ mock_repo.git.checkout.side_effect = [None, git.GitCommandError('checkout', '')] repo = GitRepo(repo=mock_repo) with patch('git.repo.fun.name_to_object'): with pytest.raises(exceptions.CheckoutException): repo.branch.hard_reset_to_ref("main", "origin/main", checkout=False) assert mock_repo.head.reset.called is True assert mock_repo.git.checkout.call_count == 2 def test_local_branch_exists(mock_repo): """ GIVEN GitRepo is initialized with a path and repo WHEN branch.exists is called with a valid branch and None remote THEN True is returned """ repo = GitRepo(repo=mock_repo) mock_repo.branches = ["master", "test"] assert repo.branch.exists("test") is True def test_local_branch_doesnt_exist(mock_repo): """ GIVEN GitRepo is initialized with a path and repo WHEN branch.exists is called with an invalid branch and None remote THEN False is returned """ repo = GitRepo(repo=mock_repo) mock_repo.branches = ["master", "test"] assert repo.branch.exists("another-test") is False def test_branch_exists_with_invalid_remote(mock_repo): """ GIVEN GitRepo is initialized with a path and repo WHEN branch.exists is called with a valid branch and invalid remote THEN a RemoteException is raised """ repo = GitRepo(repo=mock_repo) with pytest.raises(exceptions.RemoteException): assert repo.branch.exists("another", "doesntexist") def test_remote_branch_exists(mock_repo): """ GIVEN GitRepo is initialized with a path and repo WHEN branch.exists is called with a valid branch and valid remote THEN True is returned """ repo = GitRepo(repo=mock_repo) remote = Mock(spec=git.Remote) remote.configure_mock(name="testremote", refs=["testbranch"]) mock_repo.remotes.extend([remote]) assert repo.branch.exists("testbranch", "testremote") is True def test_remote_branch_doesnt_exists(mock_repo): """ GIVEN GitRepo is initialized with a path and repo WHEN branch.exists is called with an invalid branch and valid remote THEN True is returned """ repo = GitRepo(repo=mock_repo) remote = Mock(spec=git.Remote) remote.configure_mock(name="testremote", refs=[]) mock_repo.remotes.extend([remote]) assert repo.branch.exists("testbranch", "testremote") is False def test_create_branch(mock_repo): """ GIVEN GitRepo is initialized with a path and repo WHEN branch.create is called with a valid name and start_ref THEN git.branch is called AND git.checkout is not called """ repo = GitRepo(repo=mock_repo) with patch('git.repo.fun.name_to_object'): assert repo.branch.create("test", "123456") is True repo.git.branch.assert_called_with("test", "123456") repo.git.checkout.assert_not_called() def test_create_and_checkout_branch(mock_repo): """ GIVEN GitRepo is initialized with a path and repo WHEN branch.create is called with valid parameters and checkout is True THEN git.branch is called AND git.checkout is called """ repo = GitRepo(repo=mock_repo) with patch('git.repo.fun.name_to_object'): assert repo.branch.create("test", "123456", checkout=True) is True repo.git.branch.assert_called_with("test", "123456") repo.git.checkout.assert_called() def test_create_branch_with_bad_start_ref(mock_repo): """ GIVEN GitRepo is initialized with a path and repo WHEN branch.create is called with a valid name and invalid start_ref THEN a ReferenceNotFoundException is raised """ repo = GitRepo(repo=mock_repo) with patch('git.repo.fun.name_to_object') as mock_name_to_object: mock_name_to_object.side_effect = git.exc.BadName() with pytest.raises(exceptions.ReferenceNotFoundException): assert repo.branch.create("test", "badref") def test_create_branch_already_exists(mock_repo): """ GIVEN GitRepo is initialized with a path and repo WHEN branch.create is called with a valid name and start_ref AND the branch already exists THEN git.branch is not called """ repo = GitRepo(repo=mock_repo) mock_repo.branches = ["test", "master"] with patch('git.repo.fun.name_to_object'): repo.branch.create("test", "123456") assert repo.git.branch.called is False assert repo.git.checkout.called is False def test_create_branch_already_exists_and_check_it_out(mock_repo): """ GIVEN GitRepo is initialized with a path and repo WHEN branch.create is called with valid params and checkout is True AND the branch already exists THEN git.branch is not called AND git.checkout is called """ repo = GitRepo(repo=mock_repo) mock_repo.branches = ["test", "master"] with patch('git.repo.fun.name_to_object'): repo.branch.create("test", "123456", checkout=True) assert repo.git.branch.called is False assert repo.git.checkout.called is True def test_create_branch_already_exists_and_reset_it(mock_repo): """ GIVEN GitRepo is initialized with a path and repo WHEN branch.create is called with a valid name and start_ref AND the branch already exists and reset_if_exists is True THEN hard_reset_to_ref is called """ repo = GitRepo(repo=mock_repo) mock_repo.branches = ["test", "master"] mock_hard_reset = Mock() repo.branch.hard_reset_to_ref = mock_hard_reset with patch('git.repo.fun.name_to_object'): repo.branch.create("test", "123456", True) assert mock_hard_reset.called is True def test_remote_contains_branch_not_found(mock_repo): """ GIVEN GitRepo initialized with a path and repo WHEN branch.remote_contains is called with an invalid branch name THEN a ReferenceNotFoundException is raised AND the exception message contains branch """ repo = GitRepo(repo=mock_repo) with patch('git.repo.fun.name_to_object') as mock_name_to_object: with pytest.raises(exceptions.ReferenceNotFoundException) as exc_info: mock_name_to_object.side_effect = git.exc.BadName() repo.branch.remote_contains('doesNotExist', '12345') assert 'branch' in str(exc_info.value) def test_remote_contains_commit_not_found(mock_repo): """ GIVEN GitRepo initialized with a path and repo WHEN branch.remote_contains is called with an invalid commit hash THEN a ReferenceNotFoundException is raised AND the exception message contains hash """ repo = GitRepo(repo=mock_repo) with patch('git.repo.fun.name_to_object') as mock_name_to_object: with pytest.raises(exceptions.ReferenceNotFoundException) as exc_info: # First name_to_object call is to check the branch, let it succeed def side_effect(mock, ref): if ref != "origin/mybranch": raise git.exc.BadName mock_name_to_object.side_effect = side_effect repo.branch.remote_contains('origin/mybranch', 'doesNotExist') assert 'hash' in str(exc_info.value) def test_remote_contains_with_commit_present(mock_repo): """ GIVEN GitRepo is initialized with a path and repo WHEN branch.remote_contains is called with a valid branch and hash AND git_repo.git.branch returns data THEN branch.remote_contains returns True """ remote_branch = "origin/mybranch" mock_repo.git.branch.return_value = remote_branch repo = GitRepo(repo=mock_repo) with patch('git.repo.fun.name_to_object'): assert repo.branch.remote_contains(remote_branch, '12345') is True def test_remote_contains_with_commit_absent(mock_repo): """ GIVEN GitRepo is initialized with a path and repo WHEN branch.remote_contains is called with a valid branch and hash AND git_repo.git.branch returns empty string THEN branch.remote_contains returns True """ mock_repo.git.branch.return_value = "" repo = GitRepo(repo=mock_repo) with patch('git.repo.fun.name_to_object'): assert repo.branch.remote_contains("origin/mybranch", '12345') is False
34.611479
95
0.710249
4,411
31,358
4.860803
0.052142
0.064922
0.043282
0.054102
0.889931
0.86843
0.82734
0.807425
0.787277
0.771606
0
0.005855
0.199407
31,358
905
96
34.649724
0.848198
0.325882
0
0.665803
0
0
0.144525
0.052304
0
0
0
0
0.178756
1
0.158031
false
0
0.012953
0
0.173575
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
093e6e804fa8f1aa653143bccccb329f6805f7ce
26
py
Python
archive/nexus-api-v2/Database/Web/Interfaces/__init__.py
cloud-hybrid/delta
402b00ed5aaa32ccef628361e9635879b7ace44f
[ "BSD-3-Clause" ]
null
null
null
archive/nexus-api-v2/Database/Web/Interfaces/__init__.py
cloud-hybrid/delta
402b00ed5aaa32ccef628361e9635879b7ace44f
[ "BSD-3-Clause" ]
null
null
null
archive/nexus-api-v2/Database/Web/Interfaces/__init__.py
cloud-hybrid/delta
402b00ed5aaa32ccef628361e9635879b7ace44f
[ "BSD-3-Clause" ]
1
2022-01-03T05:33:15.000Z
2022-01-03T05:33:15.000Z
from ..Imports import *
6.5
23
0.653846
3
26
5.666667
1
0
0
0
0
0
0
0
0
0
0
0
0.230769
26
3
24
8.666667
0.85
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
096273d46ae329b26fd1c6d94f8a583907266eaf
76
py
Python
0x04-python-more_data_structures/2-uniq_add.py
malu17/alx-higher_level_programming
75a24d98c51116b737f339697c75855e34254d3a
[ "MIT" ]
null
null
null
0x04-python-more_data_structures/2-uniq_add.py
malu17/alx-higher_level_programming
75a24d98c51116b737f339697c75855e34254d3a
[ "MIT" ]
null
null
null
0x04-python-more_data_structures/2-uniq_add.py
malu17/alx-higher_level_programming
75a24d98c51116b737f339697c75855e34254d3a
[ "MIT" ]
null
null
null
#!/usr/bin/python3 def uniq_add(my_list=[]): return (sum(set(my_list)))
19
30
0.657895
13
76
3.615385
0.846154
0.255319
0
0
0
0
0
0
0
0
0
0.014925
0.118421
76
3
31
25.333333
0.686567
0.223684
0
0
0
0
0
0
0
0
0
0
0
1
0.5
false
0
0
0.5
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
6
117464578ee3f630ece8575dba07ce597e0f7879
118
py
Python
__init__.py
thedatacycle/thedatacycle
cf59cfbb2bb2a8c1de66c04f3f3ddb13b7a6dc82
[ "MIT" ]
null
null
null
__init__.py
thedatacycle/thedatacycle
cf59cfbb2bb2a8c1de66c04f3f3ddb13b7a6dc82
[ "MIT" ]
null
null
null
__init__.py
thedatacycle/thedatacycle
cf59cfbb2bb2a8c1de66c04f3f3ddb13b7a6dc82
[ "MIT" ]
null
null
null
from thedatacycle import getDefinitions, getStateCodes, getStateVarCodes, getUSVarCodes, getStateData, getUSData
29.5
113
0.838983
9
118
11
1
0
0
0
0
0
0
0
0
0
0
0
0.118644
118
3
114
39.333333
0.951923
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
11c72caecb3e5295994dca87e84995e45a776c29
51
py
Python
multilingual_t5/r_pa_en/__init__.py
sumanthd17/mt5
c99b4e3ad1c69908c852c730a1323ccb52d48f58
[ "Apache-2.0" ]
null
null
null
multilingual_t5/r_pa_en/__init__.py
sumanthd17/mt5
c99b4e3ad1c69908c852c730a1323ccb52d48f58
[ "Apache-2.0" ]
null
null
null
multilingual_t5/r_pa_en/__init__.py
sumanthd17/mt5
c99b4e3ad1c69908c852c730a1323ccb52d48f58
[ "Apache-2.0" ]
null
null
null
"""r_pa_en dataset.""" from .r_pa_en import RPaEn
12.75
26
0.705882
10
51
3.2
0.7
0.1875
0.3125
0
0
0
0
0
0
0
0
0
0.137255
51
3
27
17
0.727273
0.313725
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
eeda7483b536faa34003b7ec3d860e7477d80a4b
257
py
Python
fca/algorithms/__init__.py
ksiomelo/cubix
cd9e6dda6696b302a7c0d383259a9d60b15b0d55
[ "Apache-2.0" ]
3
2015-09-07T00:16:16.000Z
2019-01-11T20:27:56.000Z
fca/algorithms/__init__.py
ksiomelo/cubix
cd9e6dda6696b302a7c0d383259a9d60b15b0d55
[ "Apache-2.0" ]
null
null
null
fca/algorithms/__init__.py
ksiomelo/cubix
cd9e6dda6696b302a7c0d383259a9d60b15b0d55
[ "Apache-2.0" ]
null
null
null
# -*- coding: utf-8 -*- """FCA algorithms""" from fca.algorithms.norris import * from fca.algorithms.covering_relation import * from fca.algorithms.scaling import * from fca.algorithms.filtering import * from fca.algorithms.dg_basis import compute_dg_basis
32.125
52
0.782101
35
257
5.628571
0.428571
0.395939
0.431472
0.467005
0
0
0
0
0
0
0
0.004348
0.105058
257
8
52
32.125
0.852174
0.143969
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
6
eee4fa285229a0e777949818e2733d07f9678912
89
py
Python
proxmin/__init__.py
brianv0/proxmin
244edad59fccc9f233613f9aebb43aa73ef22a85
[ "MIT" ]
71
2018-05-05T11:13:20.000Z
2021-12-12T03:03:58.000Z
proxmin/__init__.py
brianv0/proxmin
244edad59fccc9f233613f9aebb43aa73ef22a85
[ "MIT" ]
9
2018-04-02T15:59:44.000Z
2020-12-28T17:12:58.000Z
proxmin/__init__.py
brianv0/proxmin
244edad59fccc9f233613f9aebb43aa73ef22a85
[ "MIT" ]
19
2018-08-01T12:11:14.000Z
2021-11-12T09:50:43.000Z
from .algorithms import * from .operators import * from . import nmf from . import utils
17.8
25
0.752809
12
89
5.583333
0.5
0.298507
0
0
0
0
0
0
0
0
0
0
0.179775
89
4
26
22.25
0.917808
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
eeee8dc6ff3b58ea3c5ee39070c531208f112dc8
175
py
Python
core/admin.py
georgebcservices/coffeedapp
c0aece544c8493af16fd49ce650f5745564b9adc
[ "MIT" ]
null
null
null
core/admin.py
georgebcservices/coffeedapp
c0aece544c8493af16fd49ce650f5745564b9adc
[ "MIT" ]
null
null
null
core/admin.py
georgebcservices/coffeedapp
c0aece544c8493af16fd49ce650f5745564b9adc
[ "MIT" ]
null
null
null
from django.contrib import admin import core.models as coremodels # Register your models here. admin.site.register(coremodels.Location) admin.site.register(coremodels.Review)
29.166667
40
0.834286
24
175
6.083333
0.625
0.123288
0.232877
0.369863
0
0
0
0
0
0
0
0
0.085714
175
6
41
29.166667
0.9125
0.148571
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
6
6d743969c39e5a8f0518299d4ace525dcb987743
1,387
py
Python
Euler/naloga8.py
justinraisp/Project-Euler
3894effa441f36d10cbcf4209e4f570647603285
[ "MIT" ]
null
null
null
Euler/naloga8.py
justinraisp/Project-Euler
3894effa441f36d10cbcf4209e4f570647603285
[ "MIT" ]
null
null
null
Euler/naloga8.py
justinraisp/Project-Euler
3894effa441f36d10cbcf4209e4f570647603285
[ "MIT" ]
null
null
null
def najvecji_produkt_n_sosednjih_stevil(n): stevilo = str(7316717653133062491922511967442657474235534919493496983520312774506326239578318016984801869478851843858615607891129494954595017379583319528532088055111254069874715852386305071569329096329522744304355766896648950445244523161731856403098711121722383113622298934233803081353362766142828064444866452387493035890729629049156044077239071381051585930796086670172427121883998797908792274921901699720888093776657273330010533678812202354218097512545405947522435258490771167055601360483958644670632441572215539753697817977846174064955149290862569321978468622482839722413756570560574902614079729686524145351004748216637048440319989000889524345065854122758866688116427171479924442928230863465674813919123162824586178664583591245665294765456828489128831426076900422421902267105562632111110937054421750694165896040807198403850962455444362981230987879927244284909188845801561660979191338754992005240636899125607176060588611646710940507754100225698315520005593572972571636269561882670428252483600823257530420752963450) najvecje_do_sedaj = 0 sedajsni = 1 for i in range(len(stevilo) - n): for j in range(n): sedajsni *= int(stevilo[i + j]) if sedajsni > najvecje_do_sedaj: najvecje_do_sedaj = sedajsni sedajsni = 1 print(najvecje_do_sedaj) najvecji_produkt_n_sosednjih_stevil(13)
115.583333
1,019
0.901226
54
1,387
22.851852
0.462963
0.032415
0.048622
0.040519
0.050243
0
0
0
0
0
0
0.783931
0.075703
1,387
12
1,020
115.583333
0.178627
0
0
0.166667
0
0
0
0
0
1
0
0
0
1
0.083333
false
0
0
0
0.083333
0.083333
0
0
1
null
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
1
1
0
0
0
0
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
6
6da57edf69e9657226a90e6edf6e3a13dafadecd
94
py
Python
trunk/VyPy/regression/active_subspace/__init__.py
paulcon/VyPy
5acb40e8d19ea76f3cd45f9cf98f252ca15e23f6
[ "BSD-3-Clause" ]
1
2021-12-28T06:39:54.000Z
2021-12-28T06:39:54.000Z
trunk/VyPy/regression/active_subspace/__init__.py
paulcon/VyPy
5acb40e8d19ea76f3cd45f9cf98f252ca15e23f6
[ "BSD-3-Clause" ]
null
null
null
trunk/VyPy/regression/active_subspace/__init__.py
paulcon/VyPy
5acb40e8d19ea76f3cd45f9cf98f252ca15e23f6
[ "BSD-3-Clause" ]
null
null
null
import learn import inject import project from build_surrogate import build_surrogate
13.428571
44
0.808511
12
94
6.166667
0.583333
0.378378
0
0
0
0
0
0
0
0
0
0
0.191489
94
6
45
15.666667
0.973684
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
6db073c1900c6f68ad3fd5aa758bd703e071fea9
25
py
Python
experiments/tfs/image/__init__.py
vishalbelsare/tanda
83ffe22e3ecd4061e9d96e90d8135fd44cddddce
[ "MIT" ]
166
2017-08-10T17:28:49.000Z
2022-03-15T01:49:09.000Z
experiments/tfs/image/__init__.py
vishalbelsare/tanda
83ffe22e3ecd4061e9d96e90d8135fd44cddddce
[ "MIT" ]
25
2017-08-12T17:08:46.000Z
2022-02-09T23:37:53.000Z
experiments/tfs/image/__init__.py
vishalbelsare/tanda
83ffe22e3ecd4061e9d96e90d8135fd44cddddce
[ "MIT" ]
35
2017-08-26T01:54:45.000Z
2021-12-18T07:22:41.000Z
from .image_tfs import *
12.5
24
0.76
4
25
4.5
1
0
0
0
0
0
0
0
0
0
0
0
0.16
25
1
25
25
0.857143
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
6dbf5b9b322bdd70b13a2d723ee9622854d3a90c
42
py
Python
app/fiftycents/game/__init__.py
Cinquiom/fifty-cents-frontend
946f564a87127f5820111321cd48441cc414d277
[ "MIT" ]
null
null
null
app/fiftycents/game/__init__.py
Cinquiom/fifty-cents-frontend
946f564a87127f5820111321cd48441cc414d277
[ "MIT" ]
null
null
null
app/fiftycents/game/__init__.py
Cinquiom/fifty-cents-frontend
946f564a87127f5820111321cd48441cc414d277
[ "MIT" ]
null
null
null
from .fiftycentsgame import FiftyCentsGame
42
42
0.904762
4
42
9.5
0.75
0
0
0
0
0
0
0
0
0
0
0
0.071429
42
1
42
42
0.974359
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
6dc653797bbf1ccd884e4393e9c66411cc5ce754
139
py
Python
snuba/web/wsgi.py
fpacifici/snuba
cf732b71383c948f9387fbe64e9404ca71f8e9c5
[ "Apache-2.0" ]
null
null
null
snuba/web/wsgi.py
fpacifici/snuba
cf732b71383c948f9387fbe64e9404ca71f8e9c5
[ "Apache-2.0" ]
null
null
null
snuba/web/wsgi.py
fpacifici/snuba
cf732b71383c948f9387fbe64e9404ca71f8e9c5
[ "Apache-2.0" ]
null
null
null
from snuba.environment import setup_logging, setup_sentry setup_logging() setup_sentry() from snuba.web.views import application # noqa
19.857143
57
0.820144
19
139
5.789474
0.578947
0.163636
0.309091
0.418182
0
0
0
0
0
0
0
0
0.115108
139
6
58
23.166667
0.894309
0.028777
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
6
6dd8cf8a8a68a0ff5d7da02d4e85fb2e0ae670e9
143
py
Python
PaxAppProject/settings/prod.py
sandra-platano-zz/shopngo
4cc7d621d4d71b11338471cc3d781f760cb2b3b9
[ "Apache-2.0" ]
null
null
null
PaxAppProject/settings/prod.py
sandra-platano-zz/shopngo
4cc7d621d4d71b11338471cc3d781f760cb2b3b9
[ "Apache-2.0" ]
6
2021-04-30T20:42:08.000Z
2022-03-11T23:37:36.000Z
PaxAppProject/settings/prod.py
sandra-platano-zz/shopngo
4cc7d621d4d71b11338471cc3d781f760cb2b3b9
[ "Apache-2.0" ]
null
null
null
try : from PaxAppProject.PaxAppProject.settings.common import * except: from PaxAppProject.settings.common import * DEBUG = False
23.833333
62
0.741259
15
143
7.066667
0.6
0.320755
0.509434
0.622642
0
0
0
0
0
0
0
0
0.188811
143
6
63
23.833333
0.913793
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.4
0
0.4
0
1
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
6
6def92159b4ff36528ef48cfaf78302759246474
96
py
Python
venv/lib/python3.8/site-packages/cachy/contracts/factory.py
Retraces/UkraineBot
3d5d7f8aaa58fa0cb8b98733b8808e5dfbdb8b71
[ "MIT" ]
2
2022-03-13T01:58:52.000Z
2022-03-31T06:07:54.000Z
venv/lib/python3.8/site-packages/cachy/contracts/factory.py
DesmoSearch/Desmobot
b70b45df3485351f471080deb5c785c4bc5c4beb
[ "MIT" ]
19
2021-11-20T04:09:18.000Z
2022-03-23T15:05:55.000Z
venv/lib/python3.8/site-packages/cachy/contracts/factory.py
DesmoSearch/Desmobot
b70b45df3485351f471080deb5c785c4bc5c4beb
[ "MIT" ]
null
null
null
/home/runner/.cache/pip/pool/17/a5/12/276a281a34ce14d4bc82a98ac60f0b1cadd267646071b071408d5062c1
96
96
0.895833
9
96
9.555556
1
0
0
0
0
0
0
0
0
0
0
0.458333
0
96
1
96
96
0.4375
0
0
0
0
0
0
0
0
1
0
0
0
0
null
null
0
0
null
null
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
1
0
0
0
0
0
0
0
null
1
0
0
0
1
0
0
0
0
0
0
0
0
6
099f628c763bd826ada4296ebd88d2f06949259f
23
py
Python
fine/module/sudoku/solvingSudokuV2.py
Nomeleel/fine_service
2081686a8c4202cacb604b0b52b4ca91512ed164
[ "Apache-2.0" ]
1
2020-06-05T02:43:20.000Z
2020-06-05T02:43:20.000Z
fine/module/sudoku/solvingSudokuV2.py
Nomeleel/fine_service
2081686a8c4202cacb604b0b52b4ca91512ed164
[ "Apache-2.0" ]
null
null
null
fine/module/sudoku/solvingSudokuV2.py
Nomeleel/fine_service
2081686a8c4202cacb604b0b52b4ca91512ed164
[ "Apache-2.0" ]
null
null
null
# TODO imp by Nomeleel.
23
23
0.73913
4
23
4.25
1
0
0
0
0
0
0
0
0
0
0
0
0.173913
23
1
23
23
0.894737
0.913043
0
null
0
null
0
0
null
0
0
1
null
1
null
true
0
0
null
null
null
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
1
0
0
0
1
0
0
0
0
0
0
6
09a31119326e0b8c4b9e5fef8398f64fa8117baa
34
py
Python
application/forms/__init__.py
imghack/image_bot
d686342afa1862f7fba718e86e6737a57f828e1e
[ "MIT" ]
3
2018-01-13T11:57:42.000Z
2018-01-14T12:18:05.000Z
application/forms/__init__.py
imghack/image_bot
d686342afa1862f7fba718e86e6737a57f828e1e
[ "MIT" ]
32
2018-01-11T22:15:28.000Z
2018-03-05T17:09:14.000Z
application/forms/__init__.py
imghack/image_bot
d686342afa1862f7fba718e86e6737a57f828e1e
[ "MIT" ]
1
2018-03-13T00:05:57.000Z
2018-03-13T00:05:57.000Z
from .parse_form import ParseForm
17
33
0.852941
5
34
5.6
1
0
0
0
0
0
0
0
0
0
0
0
0.117647
34
1
34
34
0.933333
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
110bfd4fdfcaa2e015b14c428a30dc381d425831
124
py
Python
prog_python/bibliotecas/mod_circulo.py
TCGamer123/python
82ad1f84b52d6cc7253fb4c5522ae8389824930a
[ "MIT" ]
1
2022-03-08T13:29:59.000Z
2022-03-08T13:29:59.000Z
prog_python/bibliotecas/mod_circulo.py
TCGamer123/python
82ad1f84b52d6cc7253fb4c5522ae8389824930a
[ "MIT" ]
null
null
null
prog_python/bibliotecas/mod_circulo.py
TCGamer123/python
82ad1f84b52d6cc7253fb4c5522ae8389824930a
[ "MIT" ]
null
null
null
Pi = 3.14159; def area(raio): return Pi * (raio ** 2); def comprimento_circunferencia(raio): return 2 * Pi * raio;
17.714286
37
0.629032
18
124
4.277778
0.555556
0.25974
0
0
0
0
0
0
0
0
0
0.083333
0.225806
124
7
38
17.714286
0.71875
0
0
0
0
0
0
0
0
0
0
0
0
1
0.4
false
0
0
0.4
0.8
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
6
1138aac292aa5383d97ffb86c5a3fe701590f6b4
154
py
Python
tests/test_pi.py
rkawala/math-doodles
3989dee0c0736d1f311bc769145d9dbfb416d9d6
[ "BSD-2-Clause" ]
null
null
null
tests/test_pi.py
rkawala/math-doodles
3989dee0c0736d1f311bc769145d9dbfb416d9d6
[ "BSD-2-Clause" ]
null
null
null
tests/test_pi.py
rkawala/math-doodles
3989dee0c0736d1f311bc769145d9dbfb416d9d6
[ "BSD-2-Clause" ]
null
null
null
from doodles.pi import do_iterate from truth.truth import AssertThat def test_three_iterations(): AssertThat(do_iterate(300)).IsWithin(0.01).Of(3.13)
30.8
55
0.792208
25
154
4.72
0.76
0.152542
0
0
0
0
0
0
0
0
0
0.064748
0.097403
154
5
55
30.8
0.784173
0
0
0
0
0
0
0
0
0
0
0
0.5
1
0.25
true
0
0.5
0
0.75
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
1
1
0
1
0
0
0
0
6
febd19a1fd473d4cf38480e061203b449203f95b
41,645
py
Python
2/shortest_palindrome.py
IronCore864/leetcode
a62a4cdde9814ae48997176debcaad537f7ad01f
[ "Apache-2.0" ]
4
2018-03-07T02:56:03.000Z
2021-06-15T05:43:31.000Z
2/shortest_palindrome.py
IronCore864/leetcode
a62a4cdde9814ae48997176debcaad537f7ad01f
[ "Apache-2.0" ]
null
null
null
2/shortest_palindrome.py
IronCore864/leetcode
a62a4cdde9814ae48997176debcaad537f7ad01f
[ "Apache-2.0" ]
1
2021-09-02T12:05:15.000Z
2021-09-02T12:05:15.000Z
class Solution: # KMP http://blog.csdn.net/buaa_shang/article/details/9907183 def shortestPalindrome(self, s): """ :type s: str :rtype: str """ tmp = s + "#" + s[::-1] kmp_table = [0] for i in range(1, len(tmp)): index = kmp_table[i - 1] while index > 0 and tmp[index] != tmp[i]: index = kmp_table[index - 1] kmp_table.append(index + (1 if tmp[index] == tmp[i] else 0)) print(kmp_table) return s[kmp_table[-1]:][::-1] + s s = Solution() print(s.shortestPalindrome('aacecaaa')) print(s.shortestPalindrome('aaaaa')) print(s.shortestPalindrome('abcd')) print(s.shortestPalindrome( "abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyz")) print(s.shortestPalindrome( "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaacdaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"))
1,487.321429
40,010
0.991836
97
41,645
425.752577
0.42268
0.001162
0.002906
0.000581
0
0
0
0
0
0
0
0.00041
0.005163
41,645
27
40,011
1,542.407407
0.996404
0.002041
0
0.105263
0
0
0.984974
0.984541
0
1
0
0
0
1
0.052632
false
0
0
0
0.157895
0.315789
0
0
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
1
0
0
0
0
0
0
1
1
null
1
0
0
0
0
0
0
0
0
0
0
0
0
6
28a9c42911b7a1646db76d2dd6edf8efd33aabfe
5,210
py
Python
demos/login/qrlogin.py
LXG-Shadow/pyncm
15bff192a72506ac4b0a770ba77d50f4382e1e36
[ "Apache-2.0" ]
null
null
null
demos/login/qrlogin.py
LXG-Shadow/pyncm
15bff192a72506ac4b0a770ba77d50f4382e1e36
[ "Apache-2.0" ]
null
null
null
demos/login/qrlogin.py
LXG-Shadow/pyncm
15bff192a72506ac4b0a770ba77d50f4382e1e36
[ "Apache-2.0" ]
null
null
null
'''二维码登录 Demo - 额外依赖 - qrcode - Pillow ''' from io import BytesIO from pyncm.apis.login import GetCurrentLoginStatus, WriteLoginInfo import pyncm from PIL import Image import qrcode,time,base64 # region getting GUI stuff to work cross-platformly def dot_thingy(): while True: s = list(' ') while s.count('.') < len(s): s[s.count('.')] = '.' yield ''.join(s) dot = dot_thingy() im1 = b'iVBORw0KGgoAAAANSUhEUgAAAMIAAAAJCAIAAADvrGQRAAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAAAJcEhZcwAAEnQAABJ0Ad5mH3gAAAPPSURBVFhHrViLsdswDHtzeaDM42m8jIdJAYKkSElJ/e4erk1lSQT4k+zrz32dx8+K1/UO3Ner7igr7/f18tnEcd6+loD9OincFD8O52/UfweIHOdpnh6vyzy5z5dcus9ziG4cNVvM3ZcMGfETNy0wIbQFV8C60dzwRBk4DnfpCb56EdzchkGE8Ahpy1xckSUCkpzpzQLHbftP3RlouaV9wXOPZPg6pUw78yTNmWl7RCqvJSejpEWfLk+PM0YOA4wmIrogxNyWCEvvXOfwLhBzzrvbYlh1Az2bCTUmk4Dmhoe/Yrb+kLEDe8Jcgr1smax5mhhFSV+1zYsX63n4ErGfbQTwULwQjBzsgXfhIfkMpQxT2TNvuzzjIMCfyXsNpscVcLgomQwlGJm1kYXbJFWPuBWRBysJA8+bsuJDDrpuYg3PG0M3UQFC3hF05lKOutmzOVbpZMauPf+F7z/CEScNGLkFH6mPDXYbYRL3Fbexp3gNPhEeZwHMJTiFJ8lWBHjHRw++WSS8Qjyq6UW2iy6O6XELkEsGJIjJuBhybyOLADe+ZJJYR11IsWdI3QLPU5+/Ab5scGQ9D6jcajvQmauLQqm4B6fPFV0o5a76DK+Ivcxa4VzLJHSJulqWSC81EKgiUajIrEGrFgdTsr7EMbtPgIfiGAXrmOQAzUSuvA6APfdHj12oXpi1tY63jY3X28gzUkZjJrRwjRUZU/muW+FaNKgr8VILR1L1CbP2sPMS40iZ0/giM0LRlpC+wk4NNvMPHqsZJK036HaUwL7p2KFsI7OhBWMikZMYYA3nYYdfNVnlNnh4y7x75ePdhiCnfr1aMpNGPN0I0+MWIvaxR8Z/4++o3vAqBmONSirGU1TdQNEqUBdhMc5E6YMdktlzM8vI+2x7ztR2/E4uhC+wLm4BpMPtaRLwu76KccA4izbi951YrDw8sRG4uge/1oe+PmVlnBHtrujeTLHkdmd0GYB3BpGKv2ujqZbeHsiCffwxISFJZP7DJgKSjls/wa6HgE0bscIh8qSNFublyqt+uqBm6u9DgNy+bWYz9eXkpsT0UpswAjdLGmIg+6mIJZ6Nyuc2Cr5WUy8ceLRGa/+gSlNRNpWG0g8Oy0VQu2FIcje+ngCbVwjNntaaLtjIr7qBkc2C0auZ6k+V3jGbuVIRGD45Z/Naq0sgCSoE30LtGVEidYsm5PbaRllFwQmHjCiF5pYt9GPSe66ssXs0IOemKn8H+IBODAkIF0kv3fQ/Y0TuX/Lza2wpomlU8l9lYb85QsnFJhtyT7Ft/RlWfWq93/8AexcHtWBkZx4AAAAASUVORK5CYII=' im2 = b'iVBORw0KGgoAAAANSUhEUgAAANwAAAAJCAIAAADWy9VKAAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAAAJcEhZcwAAEnQAABJ0Ad5mH3gAAAZsSURBVFhHjVc7cuJAEB3vWZACyieAE4ATRaRkKBQJGSGZEwghIyVSgnQCfAKXAqS7aF93z6dHwt595QJppr+ve3qw6fu+qip8OlSb2bG1zy8xEmiPmx8W1E57nG20GwLWjJkdK/oym4peIxnvqdrEG1CtKuVyZFwv4PmnjP6Z6yuMg1EIjqsNHl77HhHGCKu/RMzuPcSVXrGYHY+0GOJULsdJy4oyM5uNA/w/riIppgBAPrrOvAeIbNvKOmG22RB91JSyy5oOlBRbtyoQDa6O3q3ksakgw8pw3FYtTB2dZ2qeyDIhys7RNUo6KhKpbYARyxyNOLRLkCbJI5RInJhRARFIgt9dKqpk/4ZQrZNS7IinYZuItxYp2gUGgqQyDLIkc7zG74ETZzNQ8AsGCRNc0qp8DOY9nkuQtUH5RLgh0DL+FdsuYOx5mwPr8oqOUDY97OYR6cIQF0wMc1OKlncisESKqSHLgCeHskVPuxewEfWIqvywM3AitUf7ausgakQZomVNd/BcOBCXTDz8PnRYGIJsjhVUxDaSENqQsNdQNHhjgDUTdsknL+qckfAgXxhREz/ISiEsDw7Kd4CT8ePGno+BX5YFMD5GGQRWWZRfxnxItYJRhrSNdmChyRkHjqDFnFWTJFwtAMPbmyMPu0AZP5Jp7zD4UdoAC8hoFnkE7ijh3Pwh1ERZIDgdsZo4LhOiEXbhk/7spgUMHnn42MtK5CMWXEocvZ/FL+o76IDfoKsoUIxIkrZH7OEPWf0KUmU2oeZ6wpadMHar4LvIhvKCa8IrG7QmhCBOck8R+/FtASEApYzVxZ5uCP8sfaF+Bowa3S7Qlz1RAMf3Z1I8+uvUmEXxuF73bvPrfd/35wU9vpMmRd2d5nnNuwpdm177ft8czLUyh1NnlwWT4ly0MF3nb0Cy3S7p+01Zud8u9onw9f20T2ZxlgTPi8WZ4mi2y21j94DudKpNm6ZFslshtNvtQg0AijLIkq6w067MF6SxXOZ1/ZxeiwlpswAh0PmQnf/DBXnkOSfFWN7sBuG2fJvf708DWjJDn/upZAX+SHh+OkFTPuXdk3Zbrlmt2V62CW8myVpz2nW1GCGgGn5rkqKEryHkM/LSfJe5NxDqIIRkZbJ9p6Z47GJr3amc7pCMWZhnVOLFOSvn89J8TOBGJQJQX/XX1TQFrxx2wknN5ygco0PtsZDf8ZxAWGrBPfeHBQSTttxSDQmXctR/k+I6LQdtZyaL5D5/K7NH+nlwNdeon8/E5uxPrzR7gB3nMcDlckulmc/B4vKCI5SVRCXnPklBD5Fk7ibr96tVSw2QTeHModkm+T3d0xUKiqfTsNOd8mEacDc+cGNwgTmW/nx2nY3eXtEmNx2X9VGk8A5JTsC1LJcJhTKI2H4S5DhAOQH5X6wQHdQGx5pBKdxL6lqBa3ZC97yRZnAW4E8gH5LVdIpxI4jrUOdlxlQN0Z0+zc5WdvFh1hFRi93KfNuH5j5k1WLS8hkl7FdGClF/Nu/UhucPfo0QmrI+nUKSCt+BJ5p8UdtRkehwo12WZj9oSS7SgU7Rj5A6Xc2aJwSe0tbl5bm8XnkLLkouo51pDeYuZtKnGp8WMpGW3xizH89Dgj4ZyfD5jeG7lnKKD30AB/Xi8mdwMq6sU75f+PBLyzp098aWJYLtWEZ8ydrW6Uz6G5NyoQ2dRajzw3RXFCkuDbsSgKTLbDgsCCBzfbMtQBcETg59uSlLDXvdNzTOR83xG7rTMzvT7GU0nzR3ZAQR87Ypm8O8TIuUqSSoIyPX91FNmgAq0r5JuF3QNYh0UhQuNfBcbd5T4y4cf327lPgOxohq8cuBzkvXmeS59reKNNfbZzuhPkGT7TxpdKo5zuFFQ5D6SnXcpFwUirAWvwU4DEsEP/kJszhT1P+i1wYnGI8nmtPOvN7tTusmGxzsetwjY9T3Z0Jq3B8Cbdd3RJRoAIWL64z26MqNT903ip/1Z5PzVapR55gWjwefl/FRoYalGbrI3v04HwKNBWdPpkMFrdoE/i8Xd0HjqqAGJw/uN6icbPuvi23PGDwlwh696lABrY3nwa6GnSN67gRpqA5CGK8Qwi96cqwE2Dr+y8K/QFjVUb2EBMNPEfkDvLZjlcNm9KPe7sbUsazF0N44Arfyyu4IL2o0csHuxabyJhZZYczBKKqhf0kpEoobi6CVfkig7/8CZR2m6HerSVgAAAAASUVORK5CYII=' # don't mind these.they're base64 encoded images for prompts im1,im2 = base64.b64decode(im1),base64.b64decode(im2) im1,im2 = Image.open(BytesIO(im1)),Image.open(BytesIO(im2)) #endregion uuid = pyncm.login.LoginQrcodeUnikey()['unikey'] url = f'https://music.163.com/login?codekey={uuid}' img = qrcode.make(url) # dimesion will always be 490,490 img.paste(im1,(10,10)) img.paste(im2,(7,430)) img.show() # though tkinter was too expensive to use so here's my repalcement ((( print('[-] UUID:',uuid) while True: rsp = pyncm.login.LoginQrcodeCheck(uuid) if rsp['code'] == 803 or rsp['code'] == 800:break message = f"[!] {rsp['code']} -- {rsp['message']}" print(message,next(dot),end='\r') time.sleep(1) WriteLoginInfo(GetCurrentLoginStatus()) print('[+] Logged in as %s (Last known IP: %s)' % ( pyncm.GetCurrentSession().login_info['content']['profile']['nickname'], pyncm.GetCurrentSession().login_info['content']['profile']['lastLoginIP'] ) )
115.777778
2,345
0.88618
294
5,210
15.690476
0.714286
0.004552
0.006937
0.01344
0.01951
0.01951
0
0
0
0
0
0.123718
0.045873
5,210
45
2,346
115.777778
0.804265
0.04952
0
0.057143
0
0.057143
0.804533
0.764873
0
1
0
0
0
1
0.028571
false
0
0.142857
0
0.171429
0.085714
0
0
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
1
0
0
0
0
0
0
1
1
null
1
0
0
0
0
0
0
0
0
0
0
0
0
6
e9375150d646c69792e41462f297468ef12bce3a
38
py
Python
utils/__init__.py
shinji19/sealed-deck-generator
8321d023fbef3a4b58c37fe36ac9b225b22bb4d1
[ "MIT" ]
null
null
null
utils/__init__.py
shinji19/sealed-deck-generator
8321d023fbef3a4b58c37fe36ac9b225b22bb4d1
[ "MIT" ]
null
null
null
utils/__init__.py
shinji19/sealed-deck-generator
8321d023fbef3a4b58c37fe36ac9b225b22bb4d1
[ "MIT" ]
null
null
null
from .deck_builder import DeckBuilder
19
37
0.868421
5
38
6.4
1
0
0
0
0
0
0
0
0
0
0
0
0.105263
38
1
38
38
0.941176
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
3a60431f782f55131638453c9595902736ec0429
7,070
py
Python
schicexplorer/test/test_scHicClusterCompartments.py
joachimwolff/scHiCExplorer
8aebb444f3968d398c260690c89c9cd0e3186f0e
[ "MIT" ]
10
2019-12-09T04:11:18.000Z
2021-03-24T15:29:06.000Z
schicexplorer/test/test_scHicClusterCompartments.py
joachimwolff/scHiCExplorer
8aebb444f3968d398c260690c89c9cd0e3186f0e
[ "MIT" ]
2
2020-12-24T12:32:18.000Z
2021-01-11T09:03:34.000Z
schicexplorer/test/test_scHicClusterCompartments.py
joachimwolff/scHiCExplorer
8aebb444f3968d398c260690c89c9cd0e3186f0e
[ "MIT" ]
2
2019-12-09T04:11:21.000Z
2020-12-24T12:26:46.000Z
import warnings warnings.simplefilter(action="ignore", category=RuntimeWarning) warnings.simplefilter(action="ignore", category=PendingDeprecationWarning) import pytest import os from tempfile import NamedTemporaryFile, mkdtemp from schicexplorer import scHicClusterCompartments import psutil AVAILABLE_MEMORY = psutil.virtual_memory()[0] // (2**30) ROOT = os.path.join(os.path.dirname(os.path.abspath(__file__)), "test-data/") MEMORY = 2 def are_files_equal(file1, file2, delta=2, skip=0): equal = True if delta: mismatches = 0 with open(file1) as textfile1, open(file2) as textfile2: for i, (x, y) in enumerate(zip(textfile1, textfile2)): if i < skip: continue if x != y: if delta: mismatches += 1 if mismatches > delta: equal = False break else: equal = False break return equal def are_files_equal_clustering(file1, file2, number_of_clusters=3, delta=2, skip=0): equal = True if delta: mismatches = 0 numberOfClusters = set() with open(file1) as textfile1, open(file2) as textfile2: for i, (x, y) in enumerate(zip(textfile1, textfile2)): if i < skip: continue x = x.split(' ') y = y.split(' ') numberOfClusters.add(y[1]) x[0] = x[0].lstrip('/cells/') y[0] = y[0].lstrip('/cells/') if x[0] != y[0]: if delta: mismatches += 1 if mismatches > delta: equal = False break else: equal = False break if len(numberOfClusters) == number_of_clusters: return equal else: return False return equal def test_kmeans_clustering(): outfile = NamedTemporaryFile(suffix='.txt', delete=False) outfile.close() args = "--matrix {} --numberOfClusters {} --clusterMethod {} \ --outFileName {} -t {}".format(ROOT + 'test_matrix.scool', 3, 'kmeans', outfile.name, 4).split() scHicClusterCompartments.main(args) assert are_files_equal_clustering(ROOT + "scHicClusterCompartments/cluster_kmeans.txt", outfile.name, number_of_clusters=3) def test_spectral_clustering(): outfile = NamedTemporaryFile(suffix='.txt', delete=False) outfile.close() args = "--matrix {} --numberOfClusters {} --clusterMethod {} \ --outFileName {} -t {} ".format(ROOT + 'test_matrix.scool', 3, 'spectral', outfile.name, 4).split() scHicClusterCompartments.main(args) assert are_files_equal_clustering(ROOT + "scHicClusterCompartments/cluster_spectral.txt", outfile.name) def test_kmeans_binarization_clustering(): outfile = NamedTemporaryFile(suffix='.txt', delete=False) outfile.close() args = "--matrix {} --numberOfClusters {} --clusterMethod {} \ --outFileName {} -t {} --binarization".format(ROOT + 'test_matrix.scool', 3, 'kmeans', outfile.name, 4).split() scHicClusterCompartments.main(args) assert are_files_equal_clustering(ROOT + "scHicClusterCompartments/cluster_kmeans_binarization.txt", outfile.name) def test_kmeans_histonmark_clustering(): outfile = NamedTemporaryFile(suffix='.txt', delete=False) outfile.close() args = "--matrix {} --numberOfClusters {} --clusterMethod {} \ --outFileName {} -t {} --histonMarkType {} --binarization --norm".format(ROOT + 'test_matrix.scool', 3, 'kmeans', outfile.name, 4, ROOT + 'scHicClusterCompartments/mm9_H3K36me3.bed.gz').split() scHicClusterCompartments.main(args) assert are_files_equal_clustering(ROOT + "scHicClusterCompartments/cluster_kmeans_binarization_norm_histon_track.txt", outfile.name) def test_spectral_histonmark_clustering(): outfile = NamedTemporaryFile(suffix='.txt', delete=False) outfile.close() args = "--matrix {} --numberOfClusters {} --clusterMethod {} \ --outFileName {} -t {} --histonMarkType {} --binarization --norm".format(ROOT + 'test_matrix.scool', 3, 'spectral', outfile.name, 4, ROOT + 'scHicClusterCompartments/mm9_H3K36me3.bed.gz').split() scHicClusterCompartments.main(args) assert are_files_equal_clustering(ROOT + "scHicClusterCompartments/cluster_spectral_binarization_norm_histon_track.txt", outfile.name) def test_spectral_extraTrack_clustering(): outfile = NamedTemporaryFile(suffix='.txt', delete=False) outfile.close() args = "--matrix {} --numberOfClusters {} --clusterMethod {} \ --outFileName {} -t {} --binarization --extraTrack {} --norm".format(ROOT + 'test_matrix.scool', 3, 'spectral', outfile.name, 4, ROOT + 'scHicClusterCompartments/mm9_gene.bed.gz').split() scHicClusterCompartments.main(args) assert are_files_equal_clustering(ROOT + "scHicClusterCompartments/cluster_spectral_binarization_norm_gene_track.txt", outfile.name) def test_kmeans_extraTrack_clustering(): outfile = NamedTemporaryFile(suffix='.txt', delete=False) outfile.close() args = "--matrix {} --numberOfClusters {} --clusterMethod {} \ --outFileName {} -t {} --binarization --extraTrack {} --norm".format(ROOT + 'test_matrix.scool', 3, 'kmeans', outfile.name, 4, ROOT + 'scHicClusterCompartments/mm9_gene.bed.gz').split() scHicClusterCompartments.main(args) assert are_files_equal_clustering(ROOT + "scHicClusterCompartments/cluster_kmeans_binarization_norm_gene_track.txt", outfile.name) def test_kmeans_norm_clustering(): outfile = NamedTemporaryFile(suffix='.txt', delete=False) outfile.close() args = "--matrix {} --numberOfClusters {} --clusterMethod {} \ --outFileName {} -t {} --binarization --norm".format(ROOT + 'test_matrix.scool', 3, 'kmeans', outfile.name, 4).split() scHicClusterCompartments.main(args) assert are_files_equal_clustering(ROOT + "scHicClusterCompartments/cluster_kmeans_binarization_norm.txt", outfile.name) def test_version(): args = "--version".split() with pytest.raises(SystemExit) as pytest_wrapped_e: scHicClusterCompartments.main(args) assert pytest_wrapped_e.type == SystemExit assert pytest_wrapped_e.value.code == 0 def test_help(): args = "--help".split() with pytest.raises(SystemExit) as pytest_wrapped_e: scHicClusterCompartments.main(args) assert pytest_wrapped_e.type == SystemExit assert pytest_wrapped_e.value.code == 0
41.345029
175
0.617115
688
7,070
6.172965
0.162791
0.041441
0.03061
0.089475
0.848834
0.825053
0.812338
0.812338
0.812338
0.812338
0
0.013074
0.264356
7,070
170
176
41.588235
0.803499
0
0
0.62406
0
0
0.133522
0.094625
0
0
0
0
0.090226
1
0.090226
false
0
0.045113
0
0.165414
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
3ad1c42ca9056616b58a25ae445d692f89d35cb3
126
py
Python
src/mgng/__init__.py
StefanUlbrich/MergeGNG
526215ca4874116e7098292dcf0a6a021e79dcf8
[ "MIT" ]
3
2021-11-25T03:39:58.000Z
2022-02-20T16:27:58.000Z
src/mgng/__init__.py
StefanUlbrich/MergeGNG
526215ca4874116e7098292dcf0a6a021e79dcf8
[ "MIT" ]
null
null
null
src/mgng/__init__.py
StefanUlbrich/MergeGNG
526215ca4874116e7098292dcf0a6a021e79dcf8
[ "MIT" ]
null
null
null
from mgng.helpers import get_dymmy_2D_data, lemniscate from mgng.validators import repr_ndarray from mgng.mgng import MergeGNG
42
54
0.873016
20
126
5.3
0.65
0.226415
0
0
0
0
0
0
0
0
0
0.008772
0.095238
126
3
55
42
0.921053
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
c916b6aa1a6a330c155349922c4324414192c7a3
79
py
Python
step5_inheritance/sensors/temperature.py
pting9y/python
a553b3048143f48ed617916335b13e31e4253eb2
[ "MIT" ]
null
null
null
step5_inheritance/sensors/temperature.py
pting9y/python
a553b3048143f48ed617916335b13e31e4253eb2
[ "MIT" ]
null
null
null
step5_inheritance/sensors/temperature.py
pting9y/python
a553b3048143f48ed617916335b13e31e4253eb2
[ "MIT" ]
null
null
null
"""" """ from .sensor import Sensor class TemperatureSensor(Sensor): pass
11.285714
32
0.683544
8
79
6.75
0.75
0
0
0
0
0
0
0
0
0
0
0
0.177215
79
7
33
11.285714
0.830769
0.012658
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0.333333
0.333333
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
1
0
1
0
0
6
a31671424b8ea15dac6c47c68b65d58e3ca46d37
45
py
Python
python/loki/scanners/integrators/__init__.py
agu3rra/loki
0c6e30516f087113340d3f396c13650ca0bd095b
[ "MIT" ]
null
null
null
python/loki/scanners/integrators/__init__.py
agu3rra/loki
0c6e30516f087113340d3f396c13650ca0bd095b
[ "MIT" ]
7
2020-05-09T10:48:07.000Z
2020-05-30T14:00:00.000Z
python/loki/scanners/integrators/__init__.py
agu3rra/goss
0c6e30516f087113340d3f396c13650ca0bd095b
[ "MIT" ]
null
null
null
from .github_advisory import GitHubAdvisory
22.5
44
0.866667
5
45
7.6
1
0
0
0
0
0
0
0
0
0
0
0
0.111111
45
1
45
45
0.95
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
a33f8d04755b1c2d6340f6836be88c3427411c5a
2,085
py
Python
kbcqa/method_sp/grounding/grounding_args.py
nju-websoft/SkeletonKBQA
8cf2e697830ef09dca40692e7d254b61f9ffdf8d
[ "Apache-2.0" ]
6
2021-06-05T02:02:13.000Z
2022-03-14T14:03:54.000Z
kbcqa/method_sp/grounding/grounding_args.py
nju-websoft/SkeletonKBQA
8cf2e697830ef09dca40692e7d254b61f9ffdf8d
[ "Apache-2.0" ]
1
2022-03-16T01:53:38.000Z
2022-03-16T01:53:38.000Z
kbcqa/method_sp/grounding/grounding_args.py
nju-websoft/SkeletonKBQA
8cf2e697830ef09dca40692e7d254b61f9ffdf8d
[ "Apache-2.0" ]
2
2021-06-10T09:17:56.000Z
2022-03-15T00:12:12.000Z
from common import globals_args from method_sp.grounding import grounding_utils import os from common import hand_files q_mode = globals_args.argument_parser.q_mode # 2.2 args if q_mode == 'cwq': oracle_file_root = globals_args.fn_cwq_file.grounded_graph_file+'result/' oracle_all_files_path_names = os.listdir(oracle_file_root) literal_to_id_map = grounding_utils.read_literal_to_id_map(file_root=globals_args.fn_cwq_file.grounded_graph_file) kb_relations = hand_files.read_set(globals_args.kb_freebase_latest_file.freebase_relations_file) mediators_instances_set = hand_files.read_set(globals_args.kb_freebase_latest_file.mediators_instances_file) schema_lines_list = hand_files.read_list(globals_args.kb_freebase_latest_file.schema_file) property_reverse_dict = hand_files.read_dict(globals_args.kb_freebase_latest_file.freebase_reverse_property) literal_property_dict = hand_files.read_dict(globals_args.kb_freebase_latest_file.freebase_literal_property) elif q_mode == 'graphq': oracle_file_root = globals_args.fn_graph_file.grounded_graph_file+'result/' oracle_all_files_path_names = os.listdir(oracle_file_root) literal_to_id_map = grounding_utils.read_literal_to_id_map(file_root=globals_args.fn_graph_file.grounded_graph_file) kb_relations = hand_files.read_set(globals_args.kb_freebase_en_2013.freebase_relations_file) mediators_instances_set = hand_files.read_set(globals_args.kb_freebase_en_2013.mediators_instances_file) schema_lines_list = hand_files.read_list(globals_args.kb_freebase_en_2013.schema_file) property_reverse_dict = hand_files.read_dict(globals_args.kb_freebase_en_2013.freebase_reverse_property_file) literal_property_dict = hand_files.read_dict(globals_args.kb_freebase_en_2013.freebase_literal_property) elif q_mode == 'lcquad': oracle_file_root = globals_args.fn_lcquad_file.grounded_graph_file+'result/' oracle_all_files_path_names = os.listdir(oracle_file_root) kb_relations = hand_files.read_list_yuanshi(globals_args.kb_dbpedia_201604_file.dbpedia_relations_file)
56.351351
120
0.852758
323
2,085
4.931889
0.173375
0.124294
0.089768
0.131827
0.833647
0.818581
0.753923
0.753923
0.753923
0.753923
0
0.014644
0.082974
2,085
36
121
57.916667
0.818515
0.003837
0
0.111111
0
0
0.017358
0
0
0
0
0
0
1
0
false
0
0.148148
0
0.148148
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
a3429a1e004afc34bf367a716d44a6988da804a4
2,821
gyp
Python
binding.gyp
Mikhus/node-murmurhash-native
3945e1fa07002d06ee9d9bd6b89ef5b3d538ad37
[ "MIT" ]
null
null
null
binding.gyp
Mikhus/node-murmurhash-native
3945e1fa07002d06ee9d9bd6b89ef5b3d538ad37
[ "MIT" ]
null
null
null
binding.gyp
Mikhus/node-murmurhash-native
3945e1fa07002d06ee9d9bd6b89ef5b3d538ad37
[ "MIT" ]
null
null
null
{ 'targets': [ { 'target_name': 'murmurhash', 'sources': [ 'src/murmurhash/MurmurHash2.cpp', 'src/murmurhash/PMurHash.cpp', 'src/murmurhash/PMurHash128.cpp', 'src/nodemurmurhash.cc' ], 'include_dirs': [ "<!(node -e \"require('nan')\")", 'src/murmurhash', 'src' ], 'defines': [ # 'NODE_MURMURHASH_TEST_BYTESWAP=1', # 'NODE_MURMURHASH_TEST_ALIGNED=1', 'NODE_MURMURHASH_KEY_BUFFER_SIZE=1024' ], 'conditions': [ ['target_arch!="x64"', { 'defines': [ 'NODE_MURMURHASH_DEFAULT_32BIT', ] }], ['OS=="win"', { 'msvs_settings': { 'VCCLCompilerTool': { 'ExceptionHandling': 1, 'AdditionalOptions': ['/EHsc'], # pre 1.0 node compiler complaining 'DisableSpecificWarnings': ['4506', '4996'] } } }], ['OS!="win"', { "cflags": [ "-Wno-deprecated-declarations", ], "xcode_settings": { "OTHER_CFLAGS": [ "-Wno-deprecated-declarations", ], }, }] ] }, { 'target_name': 'murmurhashincremental', 'sources': [ 'src/murmurhash/PMurHash.cpp', 'src/murmurhash/PMurHash128.cpp', 'src/incremental/hasher.cc' ], 'include_dirs': [ "<!(node -e \"require('nan')\")", 'src/murmurhash', 'src/incremental', 'src' ], 'defines': [ # 'NODE_MURMURHASH_TEST_BYTESWAP=1', # 'NODE_MURMURHASH_TEST_ALIGNED=1', 'NODE_MURMURHASH_KEY_BUFFER_SIZE=1024' ], 'conditions': [ ['target_arch!="x64"', { 'defines': [ 'NODE_MURMURHASH_DEFAULT_32BIT', ] }], ['OS=="win"', { 'msvs_settings': { 'VCCLCompilerTool': { 'ExceptionHandling': 1, 'AdditionalOptions': ['/EHsc'], # pre 1.0 node compiler complaining 'DisableSpecificWarnings': ['4506', '4996'] } } }], ['OS!="win"', { "cflags": [ "-Wno-deprecated-declarations", ], "xcode_settings": { "OTHER_CFLAGS": [ "-Wno-deprecated-declarations", ], }, }] ] }, { "target_name": "action_after_build", "type": "none", "dependencies": [ "murmurhash", "murmurhashincremental" ], "copies": [ { "files": [ "<(PRODUCT_DIR)/murmurhash.node", "<(PRODUCT_DIR)/murmurhashincremental.node" ], "destination": "<(module_path)" } ] } ] }
25.880734
81
0.453031
193
2,821
6.393782
0.357513
0.090762
0.068071
0.100486
0.765802
0.765802
0.765802
0.765802
0.765802
0.678282
0
0.028177
0.383552
2,821
108
82
26.12037
0.681426
0.072669
0
0.644231
0
0
0.452281
0.226524
0
0
0
0
0
1
0
true
0
0
0
0
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
6
a34f1f08449895f9b23ff3f78418b1b984d3729e
114
py
Python
framework/errors.py
sensoraCloud/BanditsFramework
d6c0b577f87dd86a7ab4785a52fed4a7ac258c8e
[ "BSD-3-Clause" ]
1
2019-12-01T15:26:06.000Z
2019-12-01T15:26:06.000Z
framework/errors.py
sensoraCloud/BanditsFramework
d6c0b577f87dd86a7ab4785a52fed4a7ac258c8e
[ "BSD-3-Clause" ]
null
null
null
framework/errors.py
sensoraCloud/BanditsFramework
d6c0b577f87dd86a7ab4785a52fed4a7ac258c8e
[ "BSD-3-Clause" ]
null
null
null
class InvalidActionError(BaseException): pass class InvalidCustomArgumentException(BaseException): pass
16.285714
52
0.807018
8
114
11.5
0.625
0.369565
0
0
0
0
0
0
0
0
0
0
0.140351
114
6
53
19
0.938776
0
0
0.5
0
0
0
0
0
0
0
0
0
1
0
true
0.5
0
0
0.5
0
1
0
1
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
0
0
0
0
0
6
a38ce05f632e87710f0f10f82773196834ee3c6b
114
py
Python
crawling_scraping/chapter02/save_csv_join.py
mmakmo/python
74c577f8d688de62b6e6574ea1457a322450ae64
[ "MIT" ]
null
null
null
crawling_scraping/chapter02/save_csv_join.py
mmakmo/python
74c577f8d688de62b6e6574ea1457a322450ae64
[ "MIT" ]
null
null
null
crawling_scraping/chapter02/save_csv_join.py
mmakmo/python
74c577f8d688de62b6e6574ea1457a322450ae64
[ "MIT" ]
null
null
null
print('rank,city,population') print(','.join(['1', '上海', '24150000'])) print(','.join(['2', 'カラチ', '23500000']))
22.8
41
0.561404
14
114
4.571429
0.785714
0.28125
0
0
0
0
0
0
0
0
0
0.169811
0.070175
114
4
42
28.5
0.433962
0
0
0
0
0
0.394737
0
0
0
0
0
0
1
0
true
0
0
0
0
1
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
6
a39ca6b802a99cb1171a619d1f3c02ff249f863c
25
py
Python
picpy/parsers/__init__.py
begeistert/picpy
62e238a0f71d60ecb2fa1434e25c65045b65bda7
[ "MIT" ]
null
null
null
picpy/parsers/__init__.py
begeistert/picpy
62e238a0f71d60ecb2fa1434e25c65045b65bda7
[ "MIT" ]
null
null
null
picpy/parsers/__init__.py
begeistert/picpy
62e238a0f71d60ecb2fa1434e25c65045b65bda7
[ "MIT" ]
null
null
null
from .asmparser import *
12.5
24
0.76
3
25
6.333333
1
0
0
0
0
0
0
0
0
0
0
0
0.16
25
1
25
25
0.904762
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
6e6b6343573535786873bd929100374ffbd9ce03
13,259
py
Python
tests/persistence/database/operator.py
Chisanan232/pyocean
b5710660652ad4abe6845693e0576e99f9155084
[ "Apache-2.0" ]
null
null
null
tests/persistence/database/operator.py
Chisanan232/pyocean
b5710660652ad4abe6845693e0576e99f9155084
[ "Apache-2.0" ]
null
null
null
tests/persistence/database/operator.py
Chisanan232/pyocean
b5710660652ad4abe6845693e0576e99f9155084
[ "Apache-2.0" ]
null
null
null
from multirunnable.persistence.database.strategy import database_connection_pools, get_connection_pool from ...test_config import Test_Pool_Name, Test_Pool_Size,Database_Config, Database_Pool_Config from ._test_db_implement import MySQLSingleConnection, MySQLDriverConnectionPool, MySQLOperator import pytest _Single_Strategy: MySQLSingleConnection _Pool_Strategy: MySQLDriverConnectionPool _Data_Row_Number = 3 _Fetch_Size = 2 _Test_SQL = f"select * from stock_data_2330 limit {_Data_Row_Number};" @pytest.fixture(scope="function") def opts_with_single_conn_strategy() -> MySQLOperator: global _Single_Strategy _Single_Strategy = MySQLSingleConnection(**Database_Config) return MySQLOperator(conn_strategy=_Single_Strategy) @pytest.fixture(scope="function") def opts_with_conn_pool_strategy() -> MySQLOperator: global _Pool_Strategy Database_Pool_Config.update({ "pool_name": Test_Pool_Name, "pool_size": Test_Pool_Size }) _Pool_Strategy = MySQLDriverConnectionPool(**Database_Pool_Config) _Pool_Strategy.current_pool_name = Test_Pool_Name return MySQLOperator(conn_strategy=_Pool_Strategy) class TestPersistenceDatabaseOperatorWithSingleConnection: def test__connection(self, opts_with_single_conn_strategy: MySQLOperator): assert opts_with_single_conn_strategy._connection is _Single_Strategy.connection, f"For SingleConnection strategy, it shuold initial a database connection instance after we instantiate it." def test_initial_cursor(self, opts_with_single_conn_strategy: MySQLOperator): _conn = opts_with_single_conn_strategy._db_connection _cursor = opts_with_single_conn_strategy.initial_cursor(connection=_conn) assert _cursor is not None, f"For SingleConnection strategy, it shuold initial a database cursor instance after we instantiate strategy." def test__cursor(self, opts_with_single_conn_strategy: MySQLOperator): assert opts_with_single_conn_strategy._db_cursor is not None, f"For SingleConnection strategy, it shuold initial a database cursor instance when we call the '_cursor' property." @pytest.mark.skip(reason="Not implement testing logic. Consider about the feature's necessary.") def test_column_names(self, opts_with_single_conn_strategy: MySQLOperator): _column_names = opts_with_single_conn_strategy.column_names @pytest.mark.skip(reason="Not implement testing logic. Consider about the feature's necessary.") def test_row_count(self, opts_with_single_conn_strategy: MySQLOperator): _row_count = opts_with_single_conn_strategy.row_count @pytest.mark.skip(reason="Not implement testing logic. Consider about the feature's necessary.") def test_next(self, opts_with_single_conn_strategy: MySQLOperator): opts_with_single_conn_strategy.next() def test_execute(self, opts_with_single_conn_strategy: MySQLOperator): try: opts_with_single_conn_strategy.execute(_Test_SQL) except Exception as e: assert False, f"It should work finely without any issue." else: assert True, f"It work finely!" _data = opts_with_single_conn_strategy.fetch_all() assert _data is not None and len(_data) == _Data_Row_Number, f"It should get the data from the cursor instance with target SQL and the data row number should be '{_Data_Row_Number}'." @pytest.mark.skip(reason="Not finish this feature testing yet.") def test_execute_many(self, opts_with_single_conn_strategy: MySQLOperator): try: opts_with_single_conn_strategy.execute_many(_Test_SQL) except Exception as e: assert False, f"It should work finely without any issue." else: assert True, f"It work finely!" _data = opts_with_single_conn_strategy.fetch_all() assert _data is not None and len(_data) == _Data_Row_Number, f"It should get the data from the cursor instance with target SQL and the data row number should be '{_Data_Row_Number}'." @pytest.mark.skip(reason="This feature not support in MySQL of Python library..") def test_fetch(self, opts_with_single_conn_strategy: MySQLOperator): opts_with_single_conn_strategy.execute(_Test_SQL) _data = opts_with_single_conn_strategy.fetch() assert _data is not None, f"" def test_fetch_one(self, opts_with_single_conn_strategy: MySQLOperator): _row_number = 0 opts_with_single_conn_strategy.execute(_Test_SQL) _data = opts_with_single_conn_strategy.fetch_one() assert _data is not None and _data != [], f"It should get the data row (only one) from the cursor instance with target SQL." _row_number += 1 while _data is not None or _data != []: _data = opts_with_single_conn_strategy.fetch_one() if _row_number == _Data_Row_Number and (_data == [] or _data is None): break _row_number += 1 assert _row_number == _Data_Row_Number, f"It should get the data from the cursor instance with target SQL and the data row number should be '{_Data_Row_Number}'." def test_fetch_many(self, opts_with_single_conn_strategy: MySQLOperator): _row_number = 0 opts_with_single_conn_strategy.execute(_Test_SQL) _data = opts_with_single_conn_strategy.fetch_many(size=_Fetch_Size) assert _data is not None and _data != [], f"It should get the data row (row number as '{_Fetch_Size}') from the cursor instance with target SQL." if _Fetch_Size < _Data_Row_Number and _Data_Row_Number > 1: assert len(_data) < _Data_Row_Number and len(_data) == _Fetch_Size, f"The data row number should be equal to fetch size and less than the limit data row number." _row_number += len(_data) while _data is not None or _data != []: _data = opts_with_single_conn_strategy.fetch_many(size=_Fetch_Size) if _row_number == _Data_Row_Number and _data == []: break _row_number += len(_data) assert _row_number == _Data_Row_Number, f"It should get the data from the cursor instance with target SQL and the data row number should be '{_Data_Row_Number}'." def test_fetch_all(self, opts_with_single_conn_strategy: MySQLOperator): opts_with_single_conn_strategy.execute(_Test_SQL) _data = opts_with_single_conn_strategy.fetch_all() assert _data is not None and len(_data) == _Data_Row_Number, f"It should get the data from the cursor instance with target SQL and the data row number should be '{_Data_Row_Number}'." @pytest.mark.skip(reason="Not implement testing logic. Consider about the feature's necessary.") def test_reset(self, opts_with_single_conn_strategy: MySQLOperator): opts_with_single_conn_strategy.reset() @pytest.mark.skip(reason="For debug the testing code.") def test_close(self, opts_with_single_conn_strategy: MySQLOperator): try: opts_with_single_conn_strategy.close() except Exception as e: assert False, f"" else: assert True, f"" class TestPersistenceDatabaseOperatorWithConnectionPool: def test_initial(self, opts_with_conn_pool_strategy: MySQLOperator): _all_conn_pools = database_connection_pools() assert _all_conn_pools != {}, f"The database connection pools should not be empty." assert Test_Pool_Name in _all_conn_pools.keys(), f"The pool name should be in the database connection pools." assert _all_conn_pools[Test_Pool_Name] is not None, f"The database connection pool should exist with the pool name (from database_connection_pools)." assert get_connection_pool(pool_name=Test_Pool_Name) is not None, f"The database connection pool should exist with the pool name (from get_connection_pool)." def test__connection(self, opts_with_conn_pool_strategy: MySQLOperator): assert opts_with_conn_pool_strategy._connection is not None, f"The database connection should be instantiate." def test__cursor(self, opts_with_conn_pool_strategy: MySQLOperator): assert opts_with_conn_pool_strategy._cursor is not None, f"The database cursor should be instantiate." @pytest.mark.skip(reason="Not implement testing logic. Consider about the feature's necessary.") def test_column_names(self, opts_with_conn_pool_strategy: MySQLOperator): _column_names = opts_with_conn_pool_strategy.column_names @pytest.mark.skip(reason="Not implement testing logic. Consider about the feature's necessary.") def test_row_count(self, opts_with_conn_pool_strategy: MySQLOperator): _row_count = opts_with_conn_pool_strategy.row_count @pytest.mark.skip(reason="Not implement testing logic. Consider about the feature's necessary.") def test_next(self, opts_with_conn_pool_strategy: MySQLOperator): opts_with_conn_pool_strategy.next() def test_execute(self, opts_with_conn_pool_strategy: MySQLOperator): try: opts_with_conn_pool_strategy.execute(_Test_SQL) except Exception as e: assert False, f"It should work finely without any issue." else: assert True, f"It work finely!" _data = opts_with_conn_pool_strategy.fetch_all() assert _data is not None and len(_data) == _Data_Row_Number, f"It should get the data from the cursor instance with target SQL and the data row number should be '{_Data_Row_Number}'." @pytest.mark.skip(reason="Not finish this feature testing yet.") def test_execute_many(self, opts_with_conn_pool_strategy: MySQLOperator): try: opts_with_conn_pool_strategy.execute_many(_Test_SQL) except Exception as e: assert False, f"It should work finely without any issue." else: assert True, f"It work finely!" _data = opts_with_conn_pool_strategy.fetch_all() assert _data is not None and len(_data) == _Data_Row_Number, f"It should get the data from the cursor instance with target SQL and the data row number should be '{_Data_Row_Number}'." @pytest.mark.skip(reason="This feature not support in MySQL of Python library..") def test_fetch(self, opts_with_conn_pool_strategy: MySQLOperator): opts_with_conn_pool_strategy.execute(_Test_SQL) _data = opts_with_conn_pool_strategy.fetch() assert _data is not None, f"" def test_fetch_one(self, opts_with_conn_pool_strategy: MySQLOperator): _row_number = 0 opts_with_conn_pool_strategy.execute(_Test_SQL) _data = opts_with_conn_pool_strategy.fetch_one() assert _data is not None and _data != [], f"It should get the data row (only one) from the cursor instance with target SQL." _row_number += 1 while _data is not None or _data != []: _data = opts_with_conn_pool_strategy.fetch_one() if _row_number == _Data_Row_Number and (_data == [] or _data is None): break _row_number += 1 assert _row_number == _Data_Row_Number, f"It should get the data from the cursor instance with target SQL and the data row number should be '{_Data_Row_Number}'." def test_fetch_many(self, opts_with_conn_pool_strategy: MySQLOperator): _row_number = 0 opts_with_conn_pool_strategy.execute(_Test_SQL) _data = opts_with_conn_pool_strategy.fetch_many(size=_Fetch_Size) assert _data is not None and _data != [], f"It should get the data row (row number as '{_Fetch_Size}') from the cursor instance with target SQL." if _Fetch_Size < _Data_Row_Number and _Data_Row_Number > 1: assert len(_data) < _Data_Row_Number and len(_data) == _Fetch_Size, f"The data row number should be equal to fetch size and less than the limit data row number." _row_number += len(_data) while _data is not None or _data != []: _data = opts_with_conn_pool_strategy.fetch_many(size=_Fetch_Size) if _row_number == _Data_Row_Number and _data == []: break _row_number += len(_data) assert _row_number == _Data_Row_Number, f"It should get the data from the cursor instance with target SQL and the data row number should be '{_Data_Row_Number}'." def test_fetch_all(self, opts_with_conn_pool_strategy: MySQLOperator): opts_with_conn_pool_strategy.execute(_Test_SQL) _data = opts_with_conn_pool_strategy.fetch_all() assert _data is not None and len(_data) == _Data_Row_Number, f"It should get the data from the cursor instance with target SQL and the data row number should be '{_Data_Row_Number}'." @pytest.mark.skip(reason="Not implement testing logic. Consider about the feature's necessary.") def test_reset(self, opts_with_conn_pool_strategy: MySQLOperator): opts_with_conn_pool_strategy.reset() @pytest.mark.skip(reason="Consider this feature testing logic.") def test_close(self, opts_with_conn_pool_strategy: MySQLOperator): try: opts_with_conn_pool_strategy.close() except Exception as e: assert False, f"" else: assert True, f""
47.353571
197
0.731805
1,869
13,259
4.813269
0.069556
0.065807
0.066474
0.076034
0.855047
0.842152
0.80847
0.776456
0.759004
0.753224
0
0.001509
0.200166
13,259
279
198
47.523297
0.84677
0
0
0.583333
0
0.0625
0.265576
0.01946
0
0
0
0
0.203125
1
0.15625
false
0
0.020833
0
0.197917
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
6ec46cc0eef450cff541ed94045719a4162186ed
27
py
Python
bmtk/simulator/filternet/default_setters/__init__.py
tjbanks/bmtk
52fee3b230ceb14a666c46f57f2031c38f1ac5b1
[ "BSD-3-Clause" ]
1
2019-03-27T12:23:09.000Z
2019-03-27T12:23:09.000Z
bmtk/simulator/filternet/default_setters/__init__.py
tjbanks/bmtk
52fee3b230ceb14a666c46f57f2031c38f1ac5b1
[ "BSD-3-Clause" ]
null
null
null
bmtk/simulator/filternet/default_setters/__init__.py
tjbanks/bmtk
52fee3b230ceb14a666c46f57f2031c38f1ac5b1
[ "BSD-3-Clause" ]
null
null
null
from cell_loaders import *
13.5
26
0.814815
4
27
5.25
1
0
0
0
0
0
0
0
0
0
0
0
0.148148
27
1
27
27
0.913043
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
6ee9d52cd90689ba8f4d7ffc8fc73edef9c3b929
53
py
Python
utils/__init__.py
vietbt/EVRPpp
76aa3549a6a2481fb01725d8d95bfb8c55537534
[ "Apache-2.0" ]
5
2021-07-21T04:14:08.000Z
2022-01-03T14:22:45.000Z
utils/__init__.py
vietbt/EVRPpp
76aa3549a6a2481fb01725d8d95bfb8c55537534
[ "Apache-2.0" ]
null
null
null
utils/__init__.py
vietbt/EVRPpp
76aa3549a6a2481fb01725d8d95bfb8c55537534
[ "Apache-2.0" ]
null
null
null
from utils.utils import * from utils.config import *
26.5
26
0.773585
8
53
5.125
0.5
0.439024
0
0
0
0
0
0
0
0
0
0
0.150943
53
2
27
26.5
0.911111
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
42d5943e6dad2ffe2f2320a1645a1481d6ead4ce
136
py
Python
ravel/ext/sqlalchemy/store/__init__.py
gigaquads/pybiz
e9654592246be06a777934e889e03407c5c1673e
[ "MIT" ]
2
2021-02-26T15:30:44.000Z
2021-05-22T14:06:17.000Z
ravel/ext/sqlalchemy/store/__init__.py
gigaquads/ravel
e9654592246be06a777934e889e03407c5c1673e
[ "MIT" ]
null
null
null
ravel/ext/sqlalchemy/store/__init__.py
gigaquads/ravel
e9654592246be06a777934e889e03407c5c1673e
[ "MIT" ]
null
null
null
from .sqlalchemy_store import SqlalchemyStore from .sqlalchemy_table_builder import SqlalchemyTableBuilder from .dialect import Dialect
34
60
0.889706
15
136
7.866667
0.6
0.237288
0
0
0
0
0
0
0
0
0
0
0.088235
136
3
61
45.333333
0.951613
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
42de8ffc5c4dd550bf226d40a06e466e42884c6c
185
py
Python
teem/commands/__init__.py
Robyo12121/teem
46b2807412f2d96b98e16a483bea7724fb920008
[ "Unlicense" ]
null
null
null
teem/commands/__init__.py
Robyo12121/teem
46b2807412f2d96b98e16a483bea7724fb920008
[ "Unlicense" ]
null
null
null
teem/commands/__init__.py
Robyo12121/teem
46b2807412f2d96b98e16a483bea7724fb920008
[ "Unlicense" ]
null
null
null
from .change import * from .checkin import * from .delete import * from .reservations import * from .reserve import * from .rooms import * from .users import * from .configure import *
20.555556
27
0.740541
24
185
5.708333
0.416667
0.510949
0
0
0
0
0
0
0
0
0
0
0.172973
185
8
28
23.125
0.895425
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
42e6123aab16b63eb07e90fdd3265904e45c081a
70
py
Python
pyplotlm/__init__.py
esmondhkchu/pyplotlm
23de6f133ef792588964aaa45f08e06dee2e9ff8
[ "MIT" ]
null
null
null
pyplotlm/__init__.py
esmondhkchu/pyplotlm
23de6f133ef792588964aaa45f08e06dee2e9ff8
[ "MIT" ]
null
null
null
pyplotlm/__init__.py
esmondhkchu/pyplotlm
23de6f133ef792588964aaa45f08e06dee2e9ff8
[ "MIT" ]
null
null
null
from .tools import * from .influence import * from .pyplotlm import *
17.5
24
0.742857
9
70
5.777778
0.555556
0.384615
0
0
0
0
0
0
0
0
0
0
0.171429
70
3
25
23.333333
0.896552
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
6e09279ba9abab5e55700c3bde687841b8f7d746
65
py
Python
lex/core/__init__.py
daemon/lex
a79c222f0dd3b8fce26a3e5033c53ceb41bbb587
[ "MIT" ]
null
null
null
lex/core/__init__.py
daemon/lex
a79c222f0dd3b8fce26a3e5033c53ceb41bbb587
[ "MIT" ]
null
null
null
lex/core/__init__.py
daemon/lex
a79c222f0dd3b8fce26a3e5033c53ceb41bbb587
[ "MIT" ]
null
null
null
from .bot import * from .settings import * from .intent import *
16.25
23
0.723077
9
65
5.222222
0.555556
0.425532
0
0
0
0
0
0
0
0
0
0
0.184615
65
3
24
21.666667
0.886792
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
2822b93e1a730d544ebcd7cebc8e20dd2078ae75
30
py
Python
src/architectures/readout/__init__.py
isaachenrion/jets
59aeba81788d0741af448192d9dfb764fb97cf8d
[ "BSD-3-Clause" ]
9
2017-10-09T17:01:52.000Z
2018-06-12T18:06:05.000Z
src/architectures/readout/__init__.py
isaachenrion/jets
59aeba81788d0741af448192d9dfb764fb97cf8d
[ "BSD-3-Clause" ]
31
2017-11-01T14:39:02.000Z
2018-04-18T15:34:24.000Z
src/architectures/readout/__init__.py
isaachenrion/jets
59aeba81788d0741af448192d9dfb764fb97cf8d
[ "BSD-3-Clause" ]
10
2017-10-17T19:23:14.000Z
2020-07-05T04:44:45.000Z
from .readout import READOUTS
15
29
0.833333
4
30
6.25
1
0
0
0
0
0
0
0
0
0
0
0
0.133333
30
1
30
30
0.961538
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
954c97a3ad933ebb32fc242937866922f6143e9f
37
py
Python
valuebot/points/__init__.py
ecobogdan/valuebot
e13552c06271f4038cb6e8af774a2fe75254c319
[ "MIT" ]
3
2019-07-08T05:42:20.000Z
2021-10-02T07:59:15.000Z
valuebot/points/__init__.py
ecobogdan/valuebot
e13552c06271f4038cb6e8af774a2fe75254c319
[ "MIT" ]
3
2019-06-04T19:53:16.000Z
2021-10-02T12:45:51.000Z
valuebot/points/__init__.py
ecobogdan/valuebot
e13552c06271f4038cb6e8af774a2fe75254c319
[ "MIT" ]
null
null
null
from .cog import * from .db import *
12.333333
18
0.675676
6
37
4.166667
0.666667
0
0
0
0
0
0
0
0
0
0
0
0.216216
37
2
19
18.5
0.862069
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
958d5e7c04fba304be769a00399e3a4c3cf18fbf
241
py
Python
quotebook.py
Johnsoneer/The-Quote-Book
deaf7d8b80524b81b317542c85017691766e91c5
[ "MIT" ]
null
null
null
quotebook.py
Johnsoneer/The-Quote-Book
deaf7d8b80524b81b317542c85017691766e91c5
[ "MIT" ]
2
2020-05-17T03:57:04.000Z
2020-05-25T22:51:45.000Z
quotebook.py
Johnsoneer/The-Quote-Book
deaf7d8b80524b81b317542c85017691766e91c5
[ "MIT" ]
null
null
null
from app import app, db from app.models import users,phrases, quotes,people_quoted @app.shell_context_processor def make_shell_context(): return {'db':db,'users':users, 'phrases':phrases,'quotes':quotes,'people_quoted':people_quoted}
26.777778
99
0.775934
35
241
5.142857
0.457143
0.2
0.2
0
0
0
0
0
0
0
0
0
0.095436
241
8
100
30.125
0.825688
0
0
0
0
0
0.138075
0
0
0
0
0
0
1
0.2
true
0
0.4
0.2
0.8
0
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
1
1
0
0
6
95b32d955ed5a2f50673c2afc1ff94fae95d9ada
149
py
Python
em/admin.py
iKozzz/MTBRB
cdde6141add15ec629c7dd8c356afa0c10b94f53
[ "MIT" ]
null
null
null
em/admin.py
iKozzz/MTBRB
cdde6141add15ec629c7dd8c356afa0c10b94f53
[ "MIT" ]
null
null
null
em/admin.py
iKozzz/MTBRB
cdde6141add15ec629c7dd8c356afa0c10b94f53
[ "MIT" ]
null
null
null
from django.contrib import admin from em.models import * @admin.register(Rider, Stage, Track, Result) class RiderAdmin(admin.ModelAdmin): pass
18.625
44
0.765101
20
149
5.7
0.8
0.192982
0
0
0
0
0
0
0
0
0
0
0.14094
149
7
45
21.285714
0.890625
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0.2
0.4
0
0.6
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
1
0
1
0
0
6
25132cbf67e929e6c31f1ed18381ef1d9538f50d
106
py
Python
george/testing/__init__.py
kastnerkyle/george
8c33a837e8922be142bf7adbe80726dc611c9b25
[ "MIT" ]
1
2019-05-24T02:30:22.000Z
2019-05-24T02:30:22.000Z
george/testing/__init__.py
kastnerkyle/george
8c33a837e8922be142bf7adbe80726dc611c9b25
[ "MIT" ]
null
null
null
george/testing/__init__.py
kastnerkyle/george
8c33a837e8922be142bf7adbe80726dc611c9b25
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- __all__ = ["test_basic", "test_kernels"] from . import test_basic, test_kernels
17.666667
40
0.669811
14
106
4.5
0.642857
0.285714
0.412698
0.634921
0
0
0
0
0
0
0
0.011111
0.150943
106
5
41
21.2
0.688889
0.198113
0
0
0
0
0.26506
0
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0
1
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
6
c2f9c75cb485bb39811871bbd00de9e074bbc0ed
39
py
Python
pm4pymdl/visualization/__init__.py
dorian1000/pm4py-mdl
71e0c2425abb183da293a58d31e25e50137c774f
[ "MIT" ]
5
2021-01-31T22:45:29.000Z
2022-02-22T14:26:06.000Z
pm4pymdl/visualization/__init__.py
Javert899/pm4py-mdl
4cc875999100f3f1ad60b925a20e40cf52337757
[ "MIT" ]
3
2021-07-07T15:32:55.000Z
2021-07-07T16:15:36.000Z
pm4pymdl/visualization/__init__.py
dorian1000/pm4py-mdl
71e0c2425abb183da293a58d31e25e50137c774f
[ "MIT" ]
9
2020-09-23T15:34:11.000Z
2022-03-17T09:15:40.000Z
from pm4pymdl.visualization import mvp
19.5
38
0.871795
5
39
6.8
1
0
0
0
0
0
0
0
0
0
0
0.028571
0.102564
39
1
39
39
0.942857
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
6c3cffda9addf549b436b8981191a784c537fc00
2,788
py
Python
platforms/tofino/config/configure_source.py
nhnghia/int-platforms
aa0b7be45ae97b2a1f1d5fea2938f9c75aefc9d7
[ "Apache-2.0" ]
11
2021-10-01T14:22:04.000Z
2022-03-24T21:34:52.000Z
platforms/tofino/config/configure_source.py
nhnghia/int-platforms
aa0b7be45ae97b2a1f1d5fea2938f9c75aefc9d7
[ "Apache-2.0" ]
4
2021-09-26T07:56:40.000Z
2022-03-22T09:08:50.000Z
platforms/tofino/config/configure_source.py
nhnghia/int-platforms
aa0b7be45ae97b2a1f1d5fea2938f9c75aefc9d7
[ "Apache-2.0" ]
3
2021-11-22T18:24:56.000Z
2021-12-22T14:55:46.000Z
p4 = bfrt.int.pipe.Ingress.Int_source.tb_int_source def setUp(): global p4 from ipaddress import ip_address p4.add_with_configure_source(srcAddr=ip_address("10.0.1.1"), srcAddr_mask=0xFFFFFFFF, dstAddr=ip_address("10.0.2.2"), dstAddr_mask=0xFFFFFFFF, l4_src=0x11FF, l4_src_mask=0x0000, l4_dst=0x22FF, l4_dst_mask=0x0000, max_hop = 4, hop_metadata_len = 10, ins_cnt = 8, ins_mask = 0xFF) p4.add_with_configure_source(srcAddr=ip_address("10.0.3.3"), srcAddr_mask=0xFFFFFFFF, dstAddr=ip_address("10.0.4.4"), dstAddr_mask=0xFFFFFFFF, l4_src=0x11FF, l4_src_mask=0x0000, l4_dst=0x4268, l4_dst_mask=0x0000, max_hop = 4, hop_metadata_len = 6, ins_cnt = 4, ins_mask = 0xCC) p4.dump() # modify an existing entry p4.mod_with_configure_source(srcAddr=ip_address("10.0.3.3"), srcAddr_mask=0xFFFFFFFF, dstAddr=ip_address("10.0.5.5"), dstAddr_mask=0xFFFFFFFF, l4_src=0x11FF, l4_src_mask=0x0000, l4_dst=0x4268, l4_dst_mask=0x0000, max_hop = 4, hop_metadata_len = 6, ins_cnt = 4, ins_mask = 0xCC) p4.dump() # clear entries # p4.clear() setUp()
53.615385
83
0.286944
188
2,788
3.946809
0.287234
0.084906
0.088949
0.097035
0.773585
0.773585
0.773585
0.773585
0.719677
0.719677
0
0.128314
0.661765
2,788
51
84
54.666667
0.658537
0.017575
0
0.651163
0
0
0.017576
0
0
0
0.052728
0
0
1
0.023256
false
0
0.023256
0
0.046512
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
6c3e4fde5ae6fda3857a33d82acb0e48abbbd6a2
76
py
Python
csv_importer/models/__init__.py
SpiritualDixit/csv_importer
18776757dda914655e3cf0bafb8348e424d3b22f
[ "MIT" ]
2
2018-08-16T17:35:28.000Z
2019-08-26T01:00:52.000Z
csv_importer/models/__init__.py
SpiritualDixit/csv_importer
18776757dda914655e3cf0bafb8348e424d3b22f
[ "MIT" ]
null
null
null
csv_importer/models/__init__.py
SpiritualDixit/csv_importer
18776757dda914655e3cf0bafb8348e424d3b22f
[ "MIT" ]
3
2017-05-30T07:02:53.000Z
2017-09-11T13:36:37.000Z
# -*- coding: utf-8 -*- from . import installer from . import csv_importer
15.2
26
0.671053
10
76
5
0.8
0.4
0
0
0
0
0
0
0
0
0
0.016129
0.184211
76
4
27
19
0.790323
0.276316
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
6c5778841a684c4d147b651caba4db611414b23f
213
py
Python
Code/config.py
mfineman/Picture_of_Health
da8aba630e9065dc0e17000801a09e98b2922473
[ "MIT" ]
null
null
null
Code/config.py
mfineman/Picture_of_Health
da8aba630e9065dc0e17000801a09e98b2922473
[ "MIT" ]
null
null
null
Code/config.py
mfineman/Picture_of_Health
da8aba630e9065dc0e17000801a09e98b2922473
[ "MIT" ]
null
null
null
# postgresql and mapbox password password = 'Parvin123!!' # (postgresql username = postgres) # mapbox key accessToken: "pk.eyJ1IjoibWZpbmVtYW4iLCJhIjoiY2tpc3pxM29vMHk3dzJ6b3o1OGl2c3N0aSJ9.c48ksBNPsYVcz9wX9eWZ0A"
30.428571
105
0.835681
15
213
11.866667
0.8
0
0
0
0
0
0
0
0
0
0
0.113402
0.089202
213
7
105
30.428571
0.804124
0.347418
0
0
0
0
0.742647
0.661765
0
0
0
0
0
1
0
false
0.5
0
0
0
0
1
0
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
1
1
null
0
0
0
0
0
0
0
1
0
0
0
0
0
6
6c660c137fcb5eb5d2af52450f1067bdb632250b
2,210
py
Python
django_db_meter/migrations/0002_appwiseaggregatedmetric_dbwiseaggregatedmetric_tablewiseaggregatedmetric.py
djangothon/django-db-meter
2a96b32b5cc1a926832316841afd5da7d90a0b8f
[ "Apache-2.0" ]
null
null
null
django_db_meter/migrations/0002_appwiseaggregatedmetric_dbwiseaggregatedmetric_tablewiseaggregatedmetric.py
djangothon/django-db-meter
2a96b32b5cc1a926832316841afd5da7d90a0b8f
[ "Apache-2.0" ]
null
null
null
django_db_meter/migrations/0002_appwiseaggregatedmetric_dbwiseaggregatedmetric_tablewiseaggregatedmetric.py
djangothon/django-db-meter
2a96b32b5cc1a926832316841afd5da7d90a0b8f
[ "Apache-2.0" ]
null
null
null
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('django_db_meter', '0001_initial'), ] operations = [ migrations.CreateModel( name='AppWiseAggregatedMetric', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('timestamp', models.DateTimeField()), ('num_queries', models.PositiveIntegerField(default=0)), ('average_query_time', models.FloatField(default=0.0)), ('num_joined_queries', models.PositiveIntegerField(default=0)), ('app_name', models.CharField(max_length=255)), ], options={ 'abstract': False, }, ), migrations.CreateModel( name='DBWiseAggregatedMetric', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('timestamp', models.DateTimeField()), ('num_queries', models.PositiveIntegerField(default=0)), ('average_query_time', models.FloatField(default=0.0)), ('num_joined_queries', models.PositiveIntegerField(default=0)), ('db_name', models.CharField(max_length=255)), ], options={ 'abstract': False, }, ), migrations.CreateModel( name='TableWiseAggregatedMetric', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('timestamp', models.DateTimeField()), ('num_queries', models.PositiveIntegerField(default=0)), ('average_query_time', models.FloatField(default=0.0)), ('num_joined_queries', models.PositiveIntegerField(default=0)), ('table_name', models.CharField(max_length=255)), ], options={ 'abstract': False, }, ), ]
38.77193
114
0.554751
184
2,210
6.456522
0.298913
0.060606
0.166667
0.20202
0.781987
0.781987
0.781987
0.781987
0.781987
0.739057
0
0.017117
0.31267
2,210
56
115
39.464286
0.764977
0.009502
0
0.66
0
0
0.149063
0.032007
0
0
0
0
0
1
0
false
0
0.04
0
0.1
0
0
0
0
null
0
0
1
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
6c7fe5e05cb0870d6e361e63376f034213ef47cb
31
py
Python
util/math/fekete/__init__.py
tchlux/util
eff37464c7e913377398025adf76b057f9630b35
[ "MIT" ]
4
2021-04-22T20:19:40.000Z
2022-01-30T18:57:23.000Z
util/math/fekete/__init__.py
tchlux/util
eff37464c7e913377398025adf76b057f9630b35
[ "MIT" ]
1
2022-01-24T14:10:27.000Z
2022-01-30T16:42:53.000Z
util/math/fekete/__init__.py
tchlux/util
eff37464c7e913377398025adf76b057f9630b35
[ "MIT" ]
2
2019-05-19T07:44:28.000Z
2021-04-22T20:20:40.000Z
from .fekete_from_py import *
10.333333
29
0.774194
5
31
4.4
0.8
0
0
0
0
0
0
0
0
0
0
0
0.16129
31
2
30
15.5
0.846154
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
6c8c647b6ca6e0beaa2332b038c3d967f7b6b647
6,345
py
Python
tests/convert_music/test_parse_n_check.py
Robpol86/general
3bfab875be4b3137a2ed30c8ae8d94302aa3ea72
[ "MIT" ]
1
2016-08-20T10:49:55.000Z
2016-08-20T10:49:55.000Z
tests/convert_music/test_parse_n_check.py
Robpol86/general
3bfab875be4b3137a2ed30c8ae8d94302aa3ea72
[ "MIT" ]
null
null
null
tests/convert_music/test_parse_n_check.py
Robpol86/general
3bfab875be4b3137a2ed30c8ae8d94302aa3ea72
[ "MIT" ]
null
null
null
import pytest from docopt import docopt from convert_music import __doc__ as convert_music__doc__, parse_n_check def test_good_values(capsys, threads): """Test for valid values.""" config_expected = dict( flac_bin='/bin/bash', lame_bin='/bin/bash', ignore_art=False, ignore_lyrics=False, threads=threads, flac_dir='/tmp', mp3_dir='/tmp', quiet=False, ) argv = ['/tmp/', '/tmp', '--flac-bin-path=/bin/bash', '--lame-bin-path=/bin/bash'] cli_config_settings = parse_n_check(docopt(convert_music__doc__, argv=argv)) assert config_expected == cli_config_settings stdout_actual, stderr_actual = capsys.readouterr() stdout_expected = "" stderr_expected = "" assert stdout_expected == stdout_actual assert stderr_expected == stderr_actual def test_alpha_threads(capsys): """Test for non-numeric threads value.""" argv = ['/tmp', '/tmp', '--flac-bin-path=/bin/bash', '--lame-bin-path=/bin/bash', '--threads=abc'] with pytest.raises(ValueError): parse_n_check(docopt(convert_music__doc__, argv=argv)) stdout_actual, stderr_actual = capsys.readouterr() stdout_expected = "" stderr_expected = "--threads is not an integer or is zero: abc\n" assert stdout_expected == stdout_actual assert stderr_expected == stderr_actual def test_zero_threads(capsys): """Test for zero threads value.""" argv = ['/tmp', '/tmp', '--flac-bin-path=/bin/bash', '--lame-bin-path=/bin/bash', '--threads=0'] with pytest.raises(ValueError): parse_n_check(docopt(convert_music__doc__, argv=argv)) stdout_actual, stderr_actual = capsys.readouterr() stdout_expected = "" stderr_expected = "--threads is not an integer or is zero: 0\n" assert stdout_expected == stdout_actual assert stderr_expected == stderr_actual def test_paths_not_exist(capsys): """Makes sure the proper error occurs when specifying a path that doesn't exist.""" argv = ['/does_not_exist', '/tmp', '--flac-bin-path=/bin/bash', '--lame-bin-path=/bin/bash'] with pytest.raises(ValueError): parse_n_check(docopt(convert_music__doc__, argv=argv)) stdout_actual, stderr_actual = capsys.readouterr() stdout_expected = "" stderr_expected = "<flac_dir> is not a directory or does not exist: /does_not_exist\n" assert stdout_expected == stdout_actual assert stderr_expected == stderr_actual argv = ['/tmp', '/does_not_exist', '--flac-bin-path=/bin/bash', '--lame-bin-path=/bin/bash'] with pytest.raises(ValueError): parse_n_check(docopt(convert_music__doc__, argv=argv)) stdout_actual, stderr_actual = capsys.readouterr() stdout_expected = "" stderr_expected = "<mp3_dir> is not a directory or does not exist: /does_not_exist\n" assert stdout_expected == stdout_actual assert stderr_expected == stderr_actual argv = ['/tmp', '/tmp', '--flac-bin-path=/does_not_exist', '--lame-bin-path=/bin/bash'] with pytest.raises(ValueError): parse_n_check(docopt(convert_music__doc__, argv=argv)) stdout_actual, stderr_actual = capsys.readouterr() stdout_expected = "" stderr_expected = "--flac-bin-path is not a file or does not exist: /does_not_exist\n" assert stdout_expected == stdout_actual assert stderr_expected == stderr_actual argv = ['/tmp', '/tmp', '--flac-bin-path=/bin/bash', '--lame-bin-path=/does_not_exist'] with pytest.raises(ValueError): parse_n_check(docopt(convert_music__doc__, argv=argv)) stdout_actual, stderr_actual = capsys.readouterr() stdout_expected = "" stderr_expected = "--lame-bin-path is not a file or does not exist: /does_not_exist\n" assert stdout_expected == stdout_actual assert stderr_expected == stderr_actual def test_paths_not_readable(capsys): """Makes sure the proper error occurs when specifying a path that exists but has no read permissions.""" argv = ['/var/db/sudo', '/tmp', '--flac-bin-path=/bin/bash', '--lame-bin-path=/bin/bash'] with pytest.raises(ValueError): parse_n_check(docopt(convert_music__doc__, argv=argv)) stdout_actual, stderr_actual = capsys.readouterr() stdout_expected = "" stderr_expected = "<flac_dir> is not readable or no execute permissions: /var/db/sudo\n" assert stdout_expected == stdout_actual assert stderr_expected == stderr_actual argv = ['/tmp', '/var/db/sudo', '--flac-bin-path=/bin/bash', '--lame-bin-path=/bin/bash'] with pytest.raises(ValueError): parse_n_check(docopt(convert_music__doc__, argv=argv)) stdout_actual, stderr_actual = capsys.readouterr() stdout_expected = "" stderr_expected = "<mp3_dir> is not readable, writable, or no execute permissions: /var/db/sudo\n" assert stdout_expected == stdout_actual assert stderr_expected == stderr_actual argv = ['/tmp', '/tmp', '--flac-bin-path=/etc/sudoers', '--lame-bin-path=/bin/bash'] with pytest.raises(ValueError): parse_n_check(docopt(convert_music__doc__, argv=argv)) stdout_actual, stderr_actual = capsys.readouterr() stdout_expected = "" stderr_expected = "--flac-bin-path is not readable or no execute permissions: /etc/sudoers\n" assert stdout_expected == stdout_actual assert stderr_expected == stderr_actual argv = ['/tmp', '/tmp', '--flac-bin-path=/bin/bash', '--lame-bin-path=/etc/sudoers'] with pytest.raises(ValueError): parse_n_check(docopt(convert_music__doc__, argv=argv)) stdout_actual, stderr_actual = capsys.readouterr() stdout_expected = "" stderr_expected = "--lame-bin-path is not readable or no execute permissions: /etc/sudoers\n" assert stdout_expected == stdout_actual assert stderr_expected == stderr_actual def test_paths_not_writable(capsys): """Test for mp3_dir that is readable but not writable.""" argv = ['/tmp', '/etc/pam.d/', '--flac-bin-path=/bin/bash', '--lame-bin-path=/bin/bash'] with pytest.raises(ValueError): parse_n_check(docopt(convert_music__doc__, argv=argv)) stdout_actual, stderr_actual = capsys.readouterr() stdout_expected = "" stderr_expected = "<mp3_dir> is not readable, writable, or no execute permissions: /etc/pam.d\n" assert stdout_expected == stdout_actual assert stderr_expected == stderr_actual
45.321429
108
0.69803
853
6,345
4.90973
0.106682
0.0468
0.047755
0.066858
0.862464
0.856017
0.856017
0.854107
0.854107
0.844556
0
0.001327
0.168479
6,345
139
109
45.647482
0.792456
0.049803
0
0.608696
0
0.017391
0.254918
0.103034
0
0
0
0
0.217391
1
0.052174
false
0
0.026087
0
0.078261
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
665ff09f2d4d3190e333302b7c23497393a787b7
130
py
Python
myvenv/lib/python3.5/site-packages/IPython/utils/pickleutil.py
tuvapp/tuvappcom
5ca2be19f4b0c86a1d4a9553711a4da9d3f32841
[ "MIT" ]
6,989
2017-07-18T06:23:18.000Z
2022-03-31T15:58:36.000Z
SLpackage/private/thirdparty/pythonpkgs/ipython/ipython_4.0.0/lib/python2.7/site-packages/IPython/utils/pickleutil.py
fanglab/6mASCOPE
3f1fdcb7693ff152f17623ce549526ec272698b1
[ "BSD-3-Clause" ]
1,978
2017-07-18T09:17:58.000Z
2022-03-31T14:28:43.000Z
SLpackage/private/thirdparty/pythonpkgs/ipython/ipython_4.0.0/lib/python2.7/site-packages/IPython/utils/pickleutil.py
fanglab/6mASCOPE
3f1fdcb7693ff152f17623ce549526ec272698b1
[ "BSD-3-Clause" ]
1,228
2017-07-18T09:03:13.000Z
2022-03-29T05:57:40.000Z
from warnings import warn warn("IPython.utils.pickleutil has moved to ipykernel.pickleutil") from ipykernel.pickleutil import *
21.666667
66
0.815385
17
130
6.235294
0.647059
0.358491
0
0
0
0
0
0
0
0
0
0
0.115385
130
5
67
26
0.921739
0
0
0
0
0
0.446154
0.184615
0
0
0
0
0
1
0
true
0
0.666667
0
0.666667
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
66777b8b0f1a5f7d3b9cadff18a963574216c51b
59
py
Python
python/dataingest/core/bp/__init__.py
jiportilla/ontology
8a66bb7f76f805c64fc76cfc40ab7dfbc1146f40
[ "MIT" ]
null
null
null
python/dataingest/core/bp/__init__.py
jiportilla/ontology
8a66bb7f76f805c64fc76cfc40ab7dfbc1146f40
[ "MIT" ]
null
null
null
python/dataingest/core/bp/__init__.py
jiportilla/ontology
8a66bb7f76f805c64fc76cfc40ab7dfbc1146f40
[ "MIT" ]
null
null
null
from .ingest_api import IngestAPI, main as call_ingest_api
29.5
58
0.847458
10
59
4.7
0.8
0.382979
0
0
0
0
0
0
0
0
0
0
0.118644
59
1
59
59
0.903846
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
6680b48aba9874dc66558ab60d21bce40f3394b8
41
py
Python
geojson_rewind/__init__.py
chris48s/geojson-rewind
fc0fe3e64cb2228b4aa35788866eb3ab973f6a94
[ "MIT" ]
15
2019-02-22T15:43:35.000Z
2021-12-16T14:31:33.000Z
geojson_rewind/__init__.py
chris48s/geojson-rewind
fc0fe3e64cb2228b4aa35788866eb3ab973f6a94
[ "MIT" ]
18
2019-06-12T08:58:50.000Z
2022-01-31T02:06:17.000Z
geojson_rewind/__init__.py
chris48s/geojson-rewind
fc0fe3e64cb2228b4aa35788866eb3ab973f6a94
[ "MIT" ]
2
2019-07-19T17:29:23.000Z
2021-11-10T16:56:44.000Z
from .rewind import rewind # noqa: F401
20.5
40
0.731707
6
41
5
0.833333
0
0
0
0
0
0
0
0
0
0
0.090909
0.195122
41
1
41
41
0.818182
0.243902
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
6690647c9867aa5fe77e6e61daf230cb5197ac4f
80
py
Python
core/beam/__init__.py
VasilyevEvgeny/self-focusing_3D
c90b4d78d2d72365566f8a49b325bd48127b1e44
[ "MIT" ]
null
null
null
core/beam/__init__.py
VasilyevEvgeny/self-focusing_3D
c90b4d78d2d72365566f8a49b325bd48127b1e44
[ "MIT" ]
null
null
null
core/beam/__init__.py
VasilyevEvgeny/self-focusing_3D
c90b4d78d2d72365566f8a49b325bd48127b1e44
[ "MIT" ]
null
null
null
from .beam_x import BeamX from .beam_r import BeamR from .beam_xy import BeamXY
20
27
0.8125
15
80
4.133333
0.6
0.387097
0
0
0
0
0
0
0
0
0
0
0.15
80
3
28
26.666667
0.911765
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
66abe2a7fbb409d637ee1269733ac274a59cf269
44
py
Python
maui63_postprocessing/cv/__init__.py
Christophe-Foyer/maui63_postprocessing
1b1324d91ddc9469c946adbf8dd1dff74cbb5b76
[ "MIT" ]
null
null
null
maui63_postprocessing/cv/__init__.py
Christophe-Foyer/maui63_postprocessing
1b1324d91ddc9469c946adbf8dd1dff74cbb5b76
[ "MIT" ]
null
null
null
maui63_postprocessing/cv/__init__.py
Christophe-Foyer/maui63_postprocessing
1b1324d91ddc9469c946adbf8dd1dff74cbb5b76
[ "MIT" ]
null
null
null
from .cv import process_video, process_image
44
44
0.863636
7
44
5.142857
0.857143
0
0
0
0
0
0
0
0
0
0
0
0.090909
44
1
44
44
0.9
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
66b9e58f1575a75cd3d2a9fe0011d1492cfdc090
44
py
Python
userbot/modules/testing.py
Zehubiel/Zehubiel-USERBOT-saya
c74980c4427e49344b3c99d1d513f97de98655e7
[ "Naumen", "Condor-1.1", "MS-PL" ]
5
2020-06-07T12:45:21.000Z
2020-10-21T03:37:21.000Z
userbot/modules/testing.py
Zehubiel/Zehubiel-USERBOT-saya
c74980c4427e49344b3c99d1d513f97de98655e7
[ "Naumen", "Condor-1.1", "MS-PL" ]
4
2020-06-10T09:44:34.000Z
2020-07-28T16:17:17.000Z
userbot/modules/testing.py
Fernando2807/PersonalBot
e1b18b6c46dbf2e0ebb1acf2248485591189256e
[ "Naumen", "Condor-1.1", "MS-PL" ]
78
2020-03-11T10:59:44.000Z
2022-01-13T15:50:46.000Z
import datetime from telethon import events
14.666667
27
0.863636
6
44
6.333333
0.833333
0
0
0
0
0
0
0
0
0
0
0
0.136364
44
2
28
22
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
dda0e4ed6b3c13319cd39d89a1e34c2eb2b2c813
232
py
Python
deep-rl/lib/python2.7/site-packages/OpenGL/GLES3/__init__.py
ShujaKhalid/deep-rl
99c6ba6c3095d1bfdab81bd01395ced96bddd611
[ "MIT" ]
210
2016-04-09T14:26:00.000Z
2022-03-25T18:36:19.000Z
deep-rl/lib/python2.7/site-packages/OpenGL/GLES3/__init__.py
ShujaKhalid/deep-rl
99c6ba6c3095d1bfdab81bd01395ced96bddd611
[ "MIT" ]
72
2016-09-04T09:30:19.000Z
2022-03-27T17:06:53.000Z
deep-rl/lib/python2.7/site-packages/OpenGL/GLES3/__init__.py
ShujaKhalid/deep-rl
99c6ba6c3095d1bfdab81bd01395ced96bddd611
[ "MIT" ]
64
2016-04-09T14:26:49.000Z
2022-03-21T11:19:47.000Z
"""OpenGL.EGL the portable interface to GL environments""" from OpenGL.raw.GLES3._types import * from OpenGL.GLES2.VERSION.GLES2_2_0 import * from OpenGL.GLES3.VERSION.GLES3_3_0 import * from OpenGL.GLES3.VERSION.GLES3_3_1 import *
38.666667
58
0.801724
38
232
4.710526
0.5
0.223464
0.268156
0.189944
0.391061
0.391061
0.391061
0.391061
0
0
0
0.062201
0.099138
232
5
59
46.4
0.794258
0.224138
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
6
06c760330afdac6e0cea5b301fd3279abe1fd961
96
py
Python
simio/handler/__init__.py
RB387/simio
f799a08b0dc8871d6fc5eebe4e8635881721b511
[ "Apache-2.0" ]
null
null
null
simio/handler/__init__.py
RB387/simio
f799a08b0dc8871d6fc5eebe4e8635881721b511
[ "Apache-2.0" ]
null
null
null
simio/handler/__init__.py
RB387/simio
f799a08b0dc8871d6fc5eebe4e8635881721b511
[ "Apache-2.0" ]
null
null
null
from simio.handler.routes import Router from simio.handler.entities import R router = Router()
19.2
39
0.802083
14
96
5.5
0.571429
0.233766
0.415584
0
0
0
0
0
0
0
0
0
0.125
96
4
40
24
0.916667
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.666667
0
0.666667
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
6
b081a91df381e982e5f37a27fb3b6e1e3542036c
32
py
Python
repoze.lru/run_test.py
nikicc/anaconda-recipes
9c611a5854bf41bbc5e7ed9853dc71c0851a62ef
[ "BSD-3-Clause" ]
130
2015-07-28T03:41:21.000Z
2022-03-16T03:07:41.000Z
repoze.lru/run_test.py
nikicc/anaconda-recipes
9c611a5854bf41bbc5e7ed9853dc71c0851a62ef
[ "BSD-3-Clause" ]
147
2017-08-13T04:31:27.000Z
2022-03-07T11:22:23.000Z
repoze.lru/run_test.py
nikicc/anaconda-recipes
9c611a5854bf41bbc5e7ed9853dc71c0851a62ef
[ "BSD-3-Clause" ]
72
2015-07-29T02:35:56.000Z
2022-02-26T14:31:15.000Z
from repoze.lru import LRUCache
16
31
0.84375
5
32
5.4
1
0
0
0
0
0
0
0
0
0
0
0
0.125
32
1
32
32
0.964286
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
9fe79f0db1f2a7ea91cfbeae0e09d7cdc72da637
49
py
Python
files/python/gist01.py
ajlopez/PLangRe
d5069967a1bcf7e9c2e10524b1c8cb779b6619fc
[ "MIT" ]
null
null
null
files/python/gist01.py
ajlopez/PLangRe
d5069967a1bcf7e9c2e10524b1c8cb779b6619fc
[ "MIT" ]
null
null
null
files/python/gist01.py
ajlopez/PLangRe
d5069967a1bcf7e9c2e10524b1c8cb779b6619fc
[ "MIT" ]
null
null
null
#!/usr/bin/python print "Hello, Python World!"
16.333333
29
0.673469
7
49
4.714286
0.857143
0
0
0
0
0
0
0
0
0
0
0
0.142857
49
2
30
24.5
0.785714
0.326531
0
0
0
0
0.666667
0
0
0
0
0
0
0
null
null
0
0
null
null
1
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
1
0
null
0
0
0
0
1
0
0
0
0
0
0
1
0
6
b0263eb85526afcb81d1a94fd8366ac7c7c5c4ed
144
py
Python
macropodus/summarize/yongzhuo_nlg/__init__.py
leileixiao/Macropodus
9de38c06d332bd26e704fd4afd8f44678de7f44f
[ "MIT" ]
485
2019-12-31T16:53:28.000Z
2022-03-31T08:01:30.000Z
macropodus/summarize/yongzhuo_nlg/__init__.py
xiankaigit/Macropodus
1d7b8f9938cb8b6d7744e9caabc3eb41c8891283
[ "MIT" ]
14
2020-03-07T04:17:47.000Z
2022-03-14T01:08:23.000Z
macropodus/summarize/yongzhuo_nlg/__init__.py
xiankaigit/Macropodus
1d7b8f9938cb8b6d7744e9caabc3eb41c8891283
[ "MIT" ]
85
2020-01-16T05:03:07.000Z
2022-03-03T11:42:07.000Z
# !/usr/bin/python # -*- coding: utf-8 -*- # @time : 2020/5/14 21:11 # @author : Mo # @function: nlg-yongzhuo from nlg_yongzhuo import *
14.4
28
0.597222
21
144
4.047619
0.904762
0.258824
0
0
0
0
0
0
0
0
0
0.105263
0.208333
144
9
29
16
0.640351
0.715278
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
b0598372a83bdbe277a404e547b689252c034d19
66
py
Python
py_tdlib/constructors/file_type_video_note.py
Mr-TelegramBot/python-tdlib
2e2d21a742ebcd439971a32357f2d0abd0ce61eb
[ "MIT" ]
24
2018-10-05T13:04:30.000Z
2020-05-12T08:45:34.000Z
py_tdlib/constructors/file_type_video_note.py
MrMahdi313/python-tdlib
2e2d21a742ebcd439971a32357f2d0abd0ce61eb
[ "MIT" ]
3
2019-06-26T07:20:20.000Z
2021-05-24T13:06:56.000Z
py_tdlib/constructors/file_type_video_note.py
MrMahdi313/python-tdlib
2e2d21a742ebcd439971a32357f2d0abd0ce61eb
[ "MIT" ]
5
2018-10-05T14:29:28.000Z
2020-08-11T15:04:10.000Z
from ..factory import Type class fileTypeVideoNote(Type): pass
11
30
0.772727
8
66
6.375
0.875
0
0
0
0
0
0
0
0
0
0
0
0.151515
66
5
31
13.2
0.910714
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0.333333
0.333333
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
1
0
1
0
0
6
c6633bbc4cfec91b15549c2dadf4379bbf4c7ea8
129
py
Python
slock/admin.py
arcamens/django-slock
5916f56f7a110dd59c23ee1adef4dd5b3c959274
[ "MIT" ]
null
null
null
slock/admin.py
arcamens/django-slock
5916f56f7a110dd59c23ee1adef4dd5b3c959274
[ "MIT" ]
6
2020-02-12T02:35:25.000Z
2022-02-10T10:01:28.000Z
slock/admin.py
arcamens/django-slock
5916f56f7a110dd59c23ee1adef4dd5b3c959274
[ "MIT" ]
null
null
null
from django.contrib import admin import slock.models # Register your models here. admin.site.register(slock.models.BasicUser)
16.125
43
0.806202
18
129
5.777778
0.666667
0.211538
0
0
0
0
0
0
0
0
0
0
0.116279
129
7
44
18.428571
0.912281
0.20155
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.666667
0
0.666667
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
c6764dc9b04d5c4f5cc8596b6a01f67693211d34
6,600
py
Python
multco_permits_api/models.py
hackoregon/provisional-transportation-api
15a74f82f751e9bc60c00eacbe6f16bb2d8905e0
[ "MIT" ]
2
2018-05-30T18:07:16.000Z
2019-05-06T10:01:54.000Z
multco_permits_api/models.py
hackoregon/provisional-transportation-api
15a74f82f751e9bc60c00eacbe6f16bb2d8905e0
[ "MIT" ]
33
2018-05-24T05:00:58.000Z
2020-06-05T18:16:32.000Z
multco_permits_api/models.py
hackoregon/provisional-transportation-api
15a74f82f751e9bc60c00eacbe6f16bb2d8905e0
[ "MIT" ]
3
2018-05-24T03:38:59.000Z
2019-07-10T18:35:31.000Z
from django.db import models from django.contrib.gis.db import models import django.db.models.options as options options.DEFAULT_NAMES = options.DEFAULT_NAMES + ('in_db',) class ArchivedPermits(models.Model): id = models.TextField(blank=True, null=True) entry_date = models.DateField(blank=True, null=True) issue_date = models.DateField(blank=True, null=True) permit_number = models.TextField(blank=True, null=True) jurisdiction = models.TextField(blank=True, null=True) applic_addr = models.TextField(blank=True, null=True) applic_city = models.TextField(blank=True, null=True) applic_fax = models.TextField(blank=True, null=True) applic_title = models.TextField(blank=True, null=True) location = models.TextField(blank=True, null=True) street = models.TextField(blank=True, null=True) type = models.TextField(blank=True, null=True) comments = models.TextField(blank=True, null=True) expiration_date = models.DateField(blank=True, null=True) final_date = models.DateField(blank=True, null=True) permit_effect_date = models.DateField(blank=True, null=True) effect_date = models.DateField(blank=True, null=True) bond_number = models.TextField(blank=True, null=True) bond_expiration_date = models.DateField(blank=True, null=True) daily_posting_date = models.DateField(blank=True, null=True) commodity = models.TextField(blank=True, null=True) duplicate = models.TextField(blank=True, null=True) trip = models.TextField(blank=True, null=True) st_phrase = models.TextField(blank=True, null=True) excl_road_ole = models.TextField(blank=True, null=True) encr_brk_crb = models.TextField(blank=True, null=True) encr_instl_strm = models.TextField(blank=True, null=True) encr_side_wlk = models.TextField(blank=True, null=True) encr_side_wlk_desc = models.TextField(blank=True, null=True) encr_curb_desc = models.TextField(blank=True, null=True) encr_drwy = models.TextField(blank=True, null=True) encr_drwy_ft = models.TextField(blank=True, null=True) encr_drwy_typ = models.TextField(blank=True, null=True) encr_other = models.TextField(blank=True, null=True) encr_other_desc = models.TextField(blank=True, null=True) encr_grvl = models.TextField(blank=True, null=True) encr_grvl_ft = models.TextField(blank=True, null=True) encr_grvl_type = models.TextField(blank=True, null=True) encr_grvl_rck = models.TextField(blank=True, null=True) encr_grvl_grvl = models.TextField(blank=True, null=True) encr_grvl_asphlt = models.TextField(blank=True, null=True) encr_grvl_asphlt_type = models.TextField(blank=True, null=True) encr_pipe = models.TextField(blank=True, null=True) encr_pipe_ft = models.TextField(blank=True, null=True) encr_pipe_inch = models.TextField(blank=True, null=True) encr_gutter = models.TextField(blank=True, null=True) encr_gutter_ft = models.TextField(blank=True, null=True) encr_park = models.TextField(blank=True, null=True) encr_park_desc = models.TextField(blank=True, null=True) encr_other_2 = models.TextField(blank=True, null=True) encr_other_2_desc = models.TextField(blank=True, null=True) util_pole = models.TextField(blank=True, null=True) util_pole_desc = models.TextField(blank=True, null=True) util_cable = models.TextField(blank=True, null=True) util_cable_desc = models.TextField(blank=True, null=True) util_pipe = models.TextField(blank=True, null=True) util_pipe_desc = models.TextField(blank=True, null=True) util_misc = models.TextField(blank=True, null=True) util_sign = models.TextField(blank=True, null=True) util_cnty_main = models.TextField(blank=True, null=True) util_ins_req = models.TextField(blank=True, null=True) util_bond_req = models.TextField(blank=True, null=True) util_depth = models.TextField(blank=True, null=True) util_cut = models.TextField(blank=True, null=True) util_bore = models.TextField(blank=True, null=True) util_tunnel = models.TextField(blank=True, null=True) util_desc = models.TextField(blank=True, null=True) st_eqp_load_length = models.TextField(blank=True, null=True) st_eqp_width = models.TextField(blank=True, null=True) st_eqp_height = models.TextField(blank=True, null=True) st_eqp_overall_length = models.TextField(blank=True, null=True) st_eqp_rear_overhang = models.TextField(blank=True, null=True) st_eqp_front_overhang = models.TextField(blank=True, null=True) st_eqp_gross_weight = models.TextField(blank=True, null=True) st_eqp_table = models.TextField(blank=True, null=True) st_eqp_table_note = models.TextField(blank=True, null=True) st_eqp_pilot_two_lane = models.TextField(blank=True, null=True) st_eqp_pilot_four_lane = models.TextField(blank=True, null=True) permit_count = models.TextField(blank=True, null=True) pkey = models.AutoField(primary_key=True) class Meta: managed = False db_table = 'archived_permits' in_db = 'multnomah_county_permits' class CurrentPermits(models.Model): permit_id = models.TextField(primary_key=True) entry_date = models.DateField(blank=True, null=True) issue_date = models.DateField(blank=True, null=True) permit_category = models.TextField(blank=True, null=True) type = models.TextField(blank=True, null=True) district = models.TextField(blank=True, null=True) city_state = models.TextField(blank=True, null=True) location = models.TextField(blank=True, null=True) cross_street = models.TextField(blank=True, null=True) street_number = models.TextField(blank=True, null=True) direction = models.TextField(blank=True, null=True) street = models.TextField(blank=True, null=True) street_type = models.TextField(blank=True, null=True) city = models.TextField(blank=True, null=True) state = models.TextField(blank=True, null=True) zip_code = models.TextField(blank=True, null=True) comments = models.TextField(blank=True, null=True) expiration_date = models.DateField(blank=True, null=True) final_date = models.DateField(blank=True, null=True) effect_date = models.DateField(blank=True, null=True) lat_lng = models.TextField(blank=True, null=True) longitude = models.FloatField(blank=True, null=True) latitude = models.FloatField(blank=True, null=True) geom_point = models.GeometryField(blank=True, null=True, srid=4326) class Meta: managed = False db_table = 'current_permits' in_db = 'multnomah_county_permits'
52.8
71
0.742121
927
6,600
5.118662
0.142395
0.193467
0.279452
0.365437
0.874394
0.871444
0.792202
0.61138
0.337197
0.199368
0
0.001059
0.141818
6,600
124
72
53.225806
0.836688
0
0
0.20339
0
0
0.012727
0.007273
0
0
0
0
0
1
0
false
0
0.025424
0
0.940678
0
0
0
0
null
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
6
c67b839ac59724bf034f99cb260328ffbc38f5bb
24
py
Python
biopymlff/__init__.py
saandre15/biopymlff
ec90370a8c03c51426bd24477034c9413bdcdb04
[ "MIT" ]
null
null
null
biopymlff/__init__.py
saandre15/biopymlff
ec90370a8c03c51426bd24477034c9413bdcdb04
[ "MIT" ]
null
null
null
biopymlff/__init__.py
saandre15/biopymlff
ec90370a8c03c51426bd24477034c9413bdcdb04
[ "MIT" ]
null
null
null
"Interface to GEBF_MLFF"
24
24
0.833333
4
24
4.75
1
0
0
0
0
0
0
0
0
0
0
0
0.083333
24
1
24
24
0.863636
0.916667
0
0
0
0
0.88
0
0
0
0
0
0
1
0
true
0
0
0
0
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
1
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
6
c68d24a0496843df918370a239057e62e856b495
4,062
py
Python
tests/test_Contractinator.py
andybroth/RANS
f168792f63ed9e055941eda0869cb09ea30c3cb5
[ "MIT" ]
null
null
null
tests/test_Contractinator.py
andybroth/RANS
f168792f63ed9e055941eda0869cb09ea30c3cb5
[ "MIT" ]
10
2021-11-12T19:39:44.000Z
2021-12-20T19:45:09.000Z
tests/test_Contractinator.py
andybroth/RANS
f168792f63ed9e055941eda0869cb09ea30c3cb5
[ "MIT" ]
1
2022-03-23T02:26:34.000Z
2022-03-23T02:26:34.000Z
""" Description Tests the contractinator. Libraries/Modules -pytest \n -Field \n -numpy \n Notes """ import pytest import numpy as np from bin.Field import Field, array_equal import bin.Contractinator as Ctr def test_simple(): """ Tests the Contractinator.py 'simple' method. Args: Returns : Nothing, but asserts if 'simple' deletes items from the Field as it should. """ # test 2D zero field input_dims = (8,8) input_zeros = np.zeros(input_dims) input_field = Field(input_dims, input_zeros) coarse_dims = (4,4) coarse_field = Field(coarse_dims) output_dims = (4,4) output_zeros = np.zeros(output_dims) output_field = Field(output_dims, output_zeros) assert array_equal(input_field.size(), input_dims) Ctr.simple(input_field, coarse_field) assert array_equal(coarse_field, output_field) # test 3D zero field input_dims = (4,4,4) input_zeros = np.zeros(input_dims) input_field = Field(input_dims, input_zeros) coarse_dims = (2,2,4) coarse_field = Field(coarse_dims) output_dims = (2,2,4) output_zeros = np.zeros(output_dims) output_field = Field(output_dims, output_zeros) Ctr.simple(input_field, coarse_field) assert array_equal(coarse_field, output_field) # test 2D ones field input_dims = (4,4) input_values = np.array([[1,1,1,1],[1,1,1,1],[1,1,1,1],[1,1,1,1]]) input_field = Field(input_dims, input_values) coarse_dims = (2,2) coarse_field = Field(coarse_dims) output_dims = (2,2) output_values = np.array([[1,1],[1,1]]) output_field = Field(output_dims, output_values) Ctr.simple(input_field, coarse_field) assert array_equal(coarse_field, output_field) # test 2D one-two field input_dims = (4,4) input_values = np.array([[1,2,1,2],[1,2,1,2],[1,2,1,2],[1,2,1,2]]) input_field = Field(input_dims, input_values) coarse_dims = (2,2) coarse_field = Field(coarse_dims) output_dims = (2,2) output_values = np.array([[1,1],[1,1]]) output_field = Field(output_dims, output_values) Ctr.simple(input_field, coarse_field) assert array_equal(coarse_field, output_field) # test 2D field input_dims = (4,4) input_values = np.array([[1,1,1,1],[2,2,2,2],[3,3,3,3],[4,4,4,4]]) input_field = Field(input_dims, input_values) coarse_dims = (2,2) coarse_field = Field(coarse_dims) output_dims = (2,2) output_values = np.array([[1,1],[3,3]]) output_field = Field(output_dims, output_values) Ctr.simple(input_field, coarse_field) assert array_equal(coarse_field, output_field) def test_sum4way(): """ Tests the Contractinator.py 'sum4way' method. Args: Returns : Nothing, but asserts if 'sum4way' properly sums items from the Field as it should. """ # test 2D array input_dims = (4,4) input_values = np.array([[1,1,1,1],[2,2,2,2],[3,3,3,3],[4,4,4,4]]) input_field = Field(input_dims, input_values) coarse_dims = (2,2) coarse_field = Field(coarse_dims) output_dims = (2,2) output_values = np.array([[6,6],[14,14]]) output_field = Field(output_dims, output_values) Ctr.sum4way(input_field, coarse_field) assert array_equal(coarse_field, output_field) def test_conservative4way(): """ Tests the Contractinator.py 'conservative4way' method. Note: does not test weighted averaging. Args: Returns : Nothing, but asserts if 'conservative4way' properly averages items from the Field as it should. """ # test 2D array input_dims = (4,4) input_values = np.array([[1,3,1,3],[1,3,1,3],[1,3,1,3],[1,3,1,3]]) input_field = Field(input_dims, input_values) coarse_dims = (2,2) coarse_field = Field(coarse_dims) output_dims = (2,2) output_values = np.array([[2,2],[2,2]]) output_field = Field(output_dims, output_values) Ctr.conservative4way(input_field, coarse_field) assert array_equal(coarse_field, output_field)
27.08
103
0.661497
620
4,062
4.112903
0.104839
0.021961
0.025882
0.026667
0.798039
0.791765
0.78
0.751765
0.704706
0.676863
0
0.050373
0.208272
4,062
149
104
27.261745
0.742537
0.179222
0
0.717949
0
0
0
0
0
0
0
0
0.102564
1
0.038462
false
0
0.051282
0
0.089744
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
c6be649c549089e46c70f25ccfafe39358cd9764
99,240
py
Python
rastervision/protos/tf_object_detection/preprocessor_pb2.py
carderne/raster-vision
915fbcd3263d8f2193e65c2cd0eb53e050a47a01
[ "Apache-2.0" ]
4
2019-03-11T12:38:15.000Z
2021-04-06T14:57:52.000Z
rastervision/protos/tf_object_detection/preprocessor_pb2.py
carderne/raster-vision
915fbcd3263d8f2193e65c2cd0eb53e050a47a01
[ "Apache-2.0" ]
null
null
null
rastervision/protos/tf_object_detection/preprocessor_pb2.py
carderne/raster-vision
915fbcd3263d8f2193e65c2cd0eb53e050a47a01
[ "Apache-2.0" ]
1
2021-12-02T08:07:21.000Z
2021-12-02T08:07:21.000Z
# Generated by the protocol buffer compiler. DO NOT EDIT! # source: rastervision/protos/tf_object_detection/preprocessor.proto import sys _b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database from google.protobuf import descriptor_pb2 # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() DESCRIPTOR = _descriptor.FileDescriptor( name='rastervision/protos/tf_object_detection/preprocessor.proto', package='rastervision.protos.tf_object_detection', syntax='proto2', serialized_pb=_b('\n:rastervision/protos/tf_object_detection/preprocessor.proto\x12\'rastervision.protos.tf_object_detection\"\x8b\x15\n\x11PreprocessingStep\x12R\n\x0fnormalize_image\x18\x01 \x01(\x0b\x32\x37.rastervision.protos.tf_object_detection.NormalizeImageH\x00\x12_\n\x16random_horizontal_flip\x18\x02 \x01(\x0b\x32=.rastervision.protos.tf_object_detection.RandomHorizontalFlipH\x00\x12\x62\n\x18random_pixel_value_scale\x18\x03 \x01(\x0b\x32>.rastervision.protos.tf_object_detection.RandomPixelValueScaleH\x00\x12W\n\x12random_image_scale\x18\x04 \x01(\x0b\x32\x39.rastervision.protos.tf_object_detection.RandomImageScaleH\x00\x12V\n\x12random_rgb_to_gray\x18\x05 \x01(\x0b\x32\x38.rastervision.protos.tf_object_detection.RandomRGBtoGrayH\x00\x12\x63\n\x18random_adjust_brightness\x18\x06 \x01(\x0b\x32?.rastervision.protos.tf_object_detection.RandomAdjustBrightnessH\x00\x12_\n\x16random_adjust_contrast\x18\x07 \x01(\x0b\x32=.rastervision.protos.tf_object_detection.RandomAdjustContrastH\x00\x12U\n\x11random_adjust_hue\x18\x08 \x01(\x0b\x32\x38.rastervision.protos.tf_object_detection.RandomAdjustHueH\x00\x12\x63\n\x18random_adjust_saturation\x18\t \x01(\x0b\x32?.rastervision.protos.tf_object_detection.RandomAdjustSaturationH\x00\x12[\n\x14random_distort_color\x18\n \x01(\x0b\x32;.rastervision.protos.tf_object_detection.RandomDistortColorH\x00\x12Y\n\x13random_jitter_boxes\x18\x0b \x01(\x0b\x32:.rastervision.protos.tf_object_detection.RandomJitterBoxesH\x00\x12U\n\x11random_crop_image\x18\x0c \x01(\x0b\x32\x38.rastervision.protos.tf_object_detection.RandomCropImageH\x00\x12S\n\x10random_pad_image\x18\r \x01(\x0b\x32\x37.rastervision.protos.tf_object_detection.RandomPadImageH\x00\x12\\\n\x15random_crop_pad_image\x18\x0e \x01(\x0b\x32;.rastervision.protos.tf_object_detection.RandomCropPadImageH\x00\x12g\n\x1brandom_crop_to_aspect_ratio\x18\x0f \x01(\x0b\x32@.rastervision.protos.tf_object_detection.RandomCropToAspectRatioH\x00\x12[\n\x14random_black_patches\x18\x10 \x01(\x0b\x32;.rastervision.protos.tf_object_detection.RandomBlackPatchesH\x00\x12[\n\x14random_resize_method\x18\x11 \x01(\x0b\x32;.rastervision.protos.tf_object_detection.RandomResizeMethodH\x00\x12q\n scale_boxes_to_pixel_coordinates\x18\x12 \x01(\x0b\x32\x45.rastervision.protos.tf_object_detection.ScaleBoxesToPixelCoordinatesH\x00\x12L\n\x0cresize_image\x18\x13 \x01(\x0b\x32\x34.rastervision.protos.tf_object_detection.ResizeImageH\x00\x12]\n\x15subtract_channel_mean\x18\x14 \x01(\x0b\x32<.rastervision.protos.tf_object_detection.SubtractChannelMeanH\x00\x12Q\n\x0fssd_random_crop\x18\x15 \x01(\x0b\x32\x36.rastervision.protos.tf_object_detection.SSDRandomCropH\x00\x12X\n\x13ssd_random_crop_pad\x18\x16 \x01(\x0b\x32\x39.rastervision.protos.tf_object_detection.SSDRandomCropPadH\x00\x12t\n\"ssd_random_crop_fixed_aspect_ratio\x18\x17 \x01(\x0b\x32\x46.rastervision.protos.tf_object_detection.SSDRandomCropFixedAspectRatioH\x00\x12{\n&ssd_random_crop_pad_fixed_aspect_ratio\x18\x18 \x01(\x0b\x32I.rastervision.protos.tf_object_detection.SSDRandomCropPadFixedAspectRatioH\x00\x12[\n\x14random_vertical_flip\x18\x19 \x01(\x0b\x32;.rastervision.protos.tf_object_detection.RandomVerticalFlipH\x00\x12V\n\x11random_rotation90\x18\x1a \x01(\x0b\x32\x39.rastervision.protos.tf_object_detection.RandomRotation90H\x00\x12I\n\x0brgb_to_gray\x18\x1b \x01(\x0b\x32\x32.rastervision.protos.tf_object_detection.RGBtoGrayH\x00\x12o\n\x1f\x63onvert_class_logits_to_softmax\x18\x1c \x01(\x0b\x32\x44.rastervision.protos.tf_object_detection.ConvertClassLogitsToSoftmaxH\x00\x42\x14\n\x12preprocessing_step\"v\n\x0eNormalizeImage\x12\x17\n\x0foriginal_minval\x18\x01 \x01(\x02\x12\x17\n\x0foriginal_maxval\x18\x02 \x01(\x02\x12\x18\n\rtarget_minval\x18\x03 \x01(\x02:\x01\x30\x12\x18\n\rtarget_maxval\x18\x04 \x01(\x02:\x01\x31\"9\n\x14RandomHorizontalFlip\x12!\n\x19keypoint_flip_permutation\x18\x01 \x03(\x05\"7\n\x12RandomVerticalFlip\x12!\n\x19keypoint_flip_permutation\x18\x01 \x03(\x05\"\x12\n\x10RandomRotation90\"A\n\x15RandomPixelValueScale\x12\x13\n\x06minval\x18\x01 \x01(\x02:\x03\x30.9\x12\x13\n\x06maxval\x18\x02 \x01(\x02:\x03\x31.1\"L\n\x10RandomImageScale\x12\x1c\n\x0fmin_scale_ratio\x18\x01 \x01(\x02:\x03\x30.5\x12\x1a\n\x0fmax_scale_ratio\x18\x02 \x01(\x02:\x01\x32\"+\n\x0fRandomRGBtoGray\x12\x18\n\x0bprobability\x18\x01 \x01(\x02:\x03\x30.1\"0\n\x16RandomAdjustBrightness\x12\x16\n\tmax_delta\x18\x01 \x01(\x02:\x03\x30.2\"G\n\x14RandomAdjustContrast\x12\x16\n\tmin_delta\x18\x01 \x01(\x02:\x03\x30.8\x12\x17\n\tmax_delta\x18\x02 \x01(\x02:\x04\x31.25\"*\n\x0fRandomAdjustHue\x12\x17\n\tmax_delta\x18\x01 \x01(\x02:\x04\x30.02\"I\n\x16RandomAdjustSaturation\x12\x16\n\tmin_delta\x18\x01 \x01(\x02:\x03\x30.8\x12\x17\n\tmax_delta\x18\x02 \x01(\x02:\x04\x31.25\",\n\x12RandomDistortColor\x12\x16\n\x0e\x63olor_ordering\x18\x01 \x01(\x05\"(\n\x11RandomJitterBoxes\x12\x13\n\x05ratio\x18\x01 \x01(\x02:\x04\x30.05\"\xd1\x01\n\x0fRandomCropImage\x12\x1d\n\x12min_object_covered\x18\x01 \x01(\x02:\x01\x31\x12\x1e\n\x10min_aspect_ratio\x18\x02 \x01(\x02:\x04\x30.75\x12\x1e\n\x10max_aspect_ratio\x18\x03 \x01(\x02:\x04\x31.33\x12\x15\n\x08min_area\x18\x04 \x01(\x02:\x03\x30.1\x12\x13\n\x08max_area\x18\x05 \x01(\x02:\x01\x31\x12\x1b\n\x0eoverlap_thresh\x18\x06 \x01(\x02:\x03\x30.3\x12\x16\n\x0brandom_coef\x18\x07 \x01(\x02:\x01\x30\"\x89\x01\n\x0eRandomPadImage\x12\x18\n\x10min_image_height\x18\x01 \x01(\x02\x12\x17\n\x0fmin_image_width\x18\x02 \x01(\x02\x12\x18\n\x10max_image_height\x18\x03 \x01(\x02\x12\x17\n\x0fmax_image_width\x18\x04 \x01(\x02\x12\x11\n\tpad_color\x18\x05 \x03(\x02\"\xa5\x02\n\x12RandomCropPadImage\x12\x1d\n\x12min_object_covered\x18\x01 \x01(\x02:\x01\x31\x12\x1e\n\x10min_aspect_ratio\x18\x02 \x01(\x02:\x04\x30.75\x12\x1e\n\x10max_aspect_ratio\x18\x03 \x01(\x02:\x04\x31.33\x12\x15\n\x08min_area\x18\x04 \x01(\x02:\x03\x30.1\x12\x13\n\x08max_area\x18\x05 \x01(\x02:\x01\x31\x12\x1b\n\x0eoverlap_thresh\x18\x06 \x01(\x02:\x03\x30.3\x12\x16\n\x0brandom_coef\x18\x07 \x01(\x02:\x01\x30\x12\x1d\n\x15min_padded_size_ratio\x18\x08 \x03(\x02\x12\x1d\n\x15max_padded_size_ratio\x18\t \x03(\x02\x12\x11\n\tpad_color\x18\n \x03(\x02\"O\n\x17RandomCropToAspectRatio\x12\x17\n\x0c\x61spect_ratio\x18\x01 \x01(\x02:\x01\x31\x12\x1b\n\x0eoverlap_thresh\x18\x02 \x01(\x02:\x03\x30.3\"o\n\x12RandomBlackPatches\x12\x1d\n\x11max_black_patches\x18\x01 \x01(\x05:\x02\x31\x30\x12\x18\n\x0bprobability\x18\x02 \x01(\x02:\x03\x30.5\x12 \n\x13size_to_image_ratio\x18\x03 \x01(\x02:\x03\x30.1\"A\n\x12RandomResizeMethod\x12\x15\n\rtarget_height\x18\x01 \x01(\x02\x12\x14\n\x0ctarget_width\x18\x02 \x01(\x02\"\x0b\n\tRGBtoGray\"\x1e\n\x1cScaleBoxesToPixelCoordinates\"\xd0\x01\n\x0bResizeImage\x12\x12\n\nnew_height\x18\x01 \x01(\x05\x12\x11\n\tnew_width\x18\x02 \x01(\x05\x12U\n\x06method\x18\x03 \x01(\x0e\x32;.rastervision.protos.tf_object_detection.ResizeImage.Method:\x08\x42ILINEAR\"C\n\x06Method\x12\x08\n\x04\x41REA\x10\x01\x12\x0b\n\x07\x42ICUBIC\x10\x02\x12\x0c\n\x08\x42ILINEAR\x10\x03\x12\x14\n\x10NEAREST_NEIGHBOR\x10\x04\"$\n\x13SubtractChannelMean\x12\r\n\x05means\x18\x01 \x03(\x02\"\xb9\x01\n\x16SSDRandomCropOperation\x12\x1a\n\x12min_object_covered\x18\x01 \x01(\x02\x12\x18\n\x10min_aspect_ratio\x18\x02 \x01(\x02\x12\x18\n\x10max_aspect_ratio\x18\x03 \x01(\x02\x12\x10\n\x08min_area\x18\x04 \x01(\x02\x12\x10\n\x08max_area\x18\x05 \x01(\x02\x12\x16\n\x0eoverlap_thresh\x18\x06 \x01(\x02\x12\x13\n\x0brandom_coef\x18\x07 \x01(\x02\"d\n\rSSDRandomCrop\x12S\n\noperations\x18\x01 \x03(\x0b\x32?.rastervision.protos.tf_object_detection.SSDRandomCropOperation\"\xb9\x02\n\x19SSDRandomCropPadOperation\x12\x1a\n\x12min_object_covered\x18\x01 \x01(\x02\x12\x18\n\x10min_aspect_ratio\x18\x02 \x01(\x02\x12\x18\n\x10max_aspect_ratio\x18\x03 \x01(\x02\x12\x10\n\x08min_area\x18\x04 \x01(\x02\x12\x10\n\x08max_area\x18\x05 \x01(\x02\x12\x16\n\x0eoverlap_thresh\x18\x06 \x01(\x02\x12\x13\n\x0brandom_coef\x18\x07 \x01(\x02\x12\x1d\n\x15min_padded_size_ratio\x18\x08 \x03(\x02\x12\x1d\n\x15max_padded_size_ratio\x18\t \x03(\x02\x12\x13\n\x0bpad_color_r\x18\n \x01(\x02\x12\x13\n\x0bpad_color_g\x18\x0b \x01(\x02\x12\x13\n\x0bpad_color_b\x18\x0c \x01(\x02\"j\n\x10SSDRandomCropPad\x12V\n\noperations\x18\x01 \x03(\x0b\x32\x42.rastervision.protos.tf_object_detection.SSDRandomCropPadOperation\"\x95\x01\n&SSDRandomCropFixedAspectRatioOperation\x12\x1a\n\x12min_object_covered\x18\x01 \x01(\x02\x12\x10\n\x08min_area\x18\x04 \x01(\x02\x12\x10\n\x08max_area\x18\x05 \x01(\x02\x12\x16\n\x0eoverlap_thresh\x18\x06 \x01(\x02\x12\x13\n\x0brandom_coef\x18\x07 \x01(\x02\"\x9d\x01\n\x1dSSDRandomCropFixedAspectRatio\x12\x63\n\noperations\x18\x01 \x03(\x0b\x32O.rastervision.protos.tf_object_detection.SSDRandomCropFixedAspectRatioOperation\x12\x17\n\x0c\x61spect_ratio\x18\x02 \x01(\x02:\x01\x31\"\xcc\x01\n)SSDRandomCropPadFixedAspectRatioOperation\x12\x1a\n\x12min_object_covered\x18\x01 \x01(\x02\x12\x18\n\x10min_aspect_ratio\x18\x02 \x01(\x02\x12\x18\n\x10max_aspect_ratio\x18\x03 \x01(\x02\x12\x10\n\x08min_area\x18\x04 \x01(\x02\x12\x10\n\x08max_area\x18\x05 \x01(\x02\x12\x16\n\x0eoverlap_thresh\x18\x06 \x01(\x02\x12\x13\n\x0brandom_coef\x18\x07 \x01(\x02\"\xe1\x01\n SSDRandomCropPadFixedAspectRatio\x12\x66\n\noperations\x18\x01 \x03(\x0b\x32R.rastervision.protos.tf_object_detection.SSDRandomCropPadFixedAspectRatioOperation\x12\x17\n\x0c\x61spect_ratio\x18\x02 \x01(\x02:\x01\x31\x12\x1d\n\x15min_padded_size_ratio\x18\x03 \x03(\x02\x12\x1d\n\x15max_padded_size_ratio\x18\x04 \x03(\x02\"5\n\x1b\x43onvertClassLogitsToSoftmax\x12\x16\n\x0btemperature\x18\x01 \x01(\x02:\x01\x31') ) _sym_db.RegisterFileDescriptor(DESCRIPTOR) _RESIZEIMAGE_METHOD = _descriptor.EnumDescriptor( name='Method', full_name='rastervision.protos.tf_object_detection.ResizeImage.Method', filename=None, file=DESCRIPTOR, values=[ _descriptor.EnumValueDescriptor( name='AREA', index=0, number=1, options=None, type=None), _descriptor.EnumValueDescriptor( name='BICUBIC', index=1, number=2, options=None, type=None), _descriptor.EnumValueDescriptor( name='BILINEAR', index=2, number=3, options=None, type=None), _descriptor.EnumValueDescriptor( name='NEAREST_NEIGHBOR', index=3, number=4, options=None, type=None), ], containing_type=None, options=None, serialized_start=4677, serialized_end=4744, ) _sym_db.RegisterEnumDescriptor(_RESIZEIMAGE_METHOD) _PREPROCESSINGSTEP = _descriptor.Descriptor( name='PreprocessingStep', full_name='rastervision.protos.tf_object_detection.PreprocessingStep', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='normalize_image', full_name='rastervision.protos.tf_object_detection.PreprocessingStep.normalize_image', index=0, number=1, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='random_horizontal_flip', full_name='rastervision.protos.tf_object_detection.PreprocessingStep.random_horizontal_flip', index=1, number=2, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='random_pixel_value_scale', full_name='rastervision.protos.tf_object_detection.PreprocessingStep.random_pixel_value_scale', index=2, number=3, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='random_image_scale', full_name='rastervision.protos.tf_object_detection.PreprocessingStep.random_image_scale', index=3, number=4, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='random_rgb_to_gray', full_name='rastervision.protos.tf_object_detection.PreprocessingStep.random_rgb_to_gray', index=4, number=5, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='random_adjust_brightness', full_name='rastervision.protos.tf_object_detection.PreprocessingStep.random_adjust_brightness', index=5, number=6, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='random_adjust_contrast', full_name='rastervision.protos.tf_object_detection.PreprocessingStep.random_adjust_contrast', index=6, number=7, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='random_adjust_hue', full_name='rastervision.protos.tf_object_detection.PreprocessingStep.random_adjust_hue', index=7, number=8, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='random_adjust_saturation', full_name='rastervision.protos.tf_object_detection.PreprocessingStep.random_adjust_saturation', index=8, number=9, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='random_distort_color', full_name='rastervision.protos.tf_object_detection.PreprocessingStep.random_distort_color', index=9, number=10, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='random_jitter_boxes', full_name='rastervision.protos.tf_object_detection.PreprocessingStep.random_jitter_boxes', index=10, number=11, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='random_crop_image', full_name='rastervision.protos.tf_object_detection.PreprocessingStep.random_crop_image', index=11, number=12, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='random_pad_image', full_name='rastervision.protos.tf_object_detection.PreprocessingStep.random_pad_image', index=12, number=13, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='random_crop_pad_image', full_name='rastervision.protos.tf_object_detection.PreprocessingStep.random_crop_pad_image', index=13, number=14, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='random_crop_to_aspect_ratio', full_name='rastervision.protos.tf_object_detection.PreprocessingStep.random_crop_to_aspect_ratio', index=14, number=15, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='random_black_patches', full_name='rastervision.protos.tf_object_detection.PreprocessingStep.random_black_patches', index=15, number=16, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='random_resize_method', full_name='rastervision.protos.tf_object_detection.PreprocessingStep.random_resize_method', index=16, number=17, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='scale_boxes_to_pixel_coordinates', full_name='rastervision.protos.tf_object_detection.PreprocessingStep.scale_boxes_to_pixel_coordinates', index=17, number=18, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='resize_image', full_name='rastervision.protos.tf_object_detection.PreprocessingStep.resize_image', index=18, number=19, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='subtract_channel_mean', full_name='rastervision.protos.tf_object_detection.PreprocessingStep.subtract_channel_mean', index=19, number=20, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='ssd_random_crop', full_name='rastervision.protos.tf_object_detection.PreprocessingStep.ssd_random_crop', index=20, number=21, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='ssd_random_crop_pad', full_name='rastervision.protos.tf_object_detection.PreprocessingStep.ssd_random_crop_pad', index=21, number=22, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='ssd_random_crop_fixed_aspect_ratio', full_name='rastervision.protos.tf_object_detection.PreprocessingStep.ssd_random_crop_fixed_aspect_ratio', index=22, number=23, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='ssd_random_crop_pad_fixed_aspect_ratio', full_name='rastervision.protos.tf_object_detection.PreprocessingStep.ssd_random_crop_pad_fixed_aspect_ratio', index=23, number=24, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='random_vertical_flip', full_name='rastervision.protos.tf_object_detection.PreprocessingStep.random_vertical_flip', index=24, number=25, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='random_rotation90', full_name='rastervision.protos.tf_object_detection.PreprocessingStep.random_rotation90', index=25, number=26, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='rgb_to_gray', full_name='rastervision.protos.tf_object_detection.PreprocessingStep.rgb_to_gray', index=26, number=27, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='convert_class_logits_to_softmax', full_name='rastervision.protos.tf_object_detection.PreprocessingStep.convert_class_logits_to_softmax', index=27, number=28, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), ], extensions=[ ], nested_types=[], enum_types=[ ], options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ _descriptor.OneofDescriptor( name='preprocessing_step', full_name='rastervision.protos.tf_object_detection.PreprocessingStep.preprocessing_step', index=0, containing_type=None, fields=[]), ], serialized_start=104, serialized_end=2803, ) _NORMALIZEIMAGE = _descriptor.Descriptor( name='NormalizeImage', full_name='rastervision.protos.tf_object_detection.NormalizeImage', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='original_minval', full_name='rastervision.protos.tf_object_detection.NormalizeImage.original_minval', index=0, number=1, type=2, cpp_type=6, label=1, has_default_value=False, default_value=float(0), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='original_maxval', full_name='rastervision.protos.tf_object_detection.NormalizeImage.original_maxval', index=1, number=2, type=2, cpp_type=6, label=1, has_default_value=False, default_value=float(0), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='target_minval', full_name='rastervision.protos.tf_object_detection.NormalizeImage.target_minval', index=2, number=3, type=2, cpp_type=6, label=1, has_default_value=True, default_value=float(0), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='target_maxval', full_name='rastervision.protos.tf_object_detection.NormalizeImage.target_maxval', index=3, number=4, type=2, cpp_type=6, label=1, has_default_value=True, default_value=float(1), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), ], extensions=[ ], nested_types=[], enum_types=[ ], options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=2805, serialized_end=2923, ) _RANDOMHORIZONTALFLIP = _descriptor.Descriptor( name='RandomHorizontalFlip', full_name='rastervision.protos.tf_object_detection.RandomHorizontalFlip', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='keypoint_flip_permutation', full_name='rastervision.protos.tf_object_detection.RandomHorizontalFlip.keypoint_flip_permutation', index=0, number=1, type=5, cpp_type=1, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), ], extensions=[ ], nested_types=[], enum_types=[ ], options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=2925, serialized_end=2982, ) _RANDOMVERTICALFLIP = _descriptor.Descriptor( name='RandomVerticalFlip', full_name='rastervision.protos.tf_object_detection.RandomVerticalFlip', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='keypoint_flip_permutation', full_name='rastervision.protos.tf_object_detection.RandomVerticalFlip.keypoint_flip_permutation', index=0, number=1, type=5, cpp_type=1, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), ], extensions=[ ], nested_types=[], enum_types=[ ], options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=2984, serialized_end=3039, ) _RANDOMROTATION90 = _descriptor.Descriptor( name='RandomRotation90', full_name='rastervision.protos.tf_object_detection.RandomRotation90', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ ], extensions=[ ], nested_types=[], enum_types=[ ], options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=3041, serialized_end=3059, ) _RANDOMPIXELVALUESCALE = _descriptor.Descriptor( name='RandomPixelValueScale', full_name='rastervision.protos.tf_object_detection.RandomPixelValueScale', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='minval', full_name='rastervision.protos.tf_object_detection.RandomPixelValueScale.minval', index=0, number=1, type=2, cpp_type=6, label=1, has_default_value=True, default_value=float(0.9), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='maxval', full_name='rastervision.protos.tf_object_detection.RandomPixelValueScale.maxval', index=1, number=2, type=2, cpp_type=6, label=1, has_default_value=True, default_value=float(1.1), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), ], extensions=[ ], nested_types=[], enum_types=[ ], options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=3061, serialized_end=3126, ) _RANDOMIMAGESCALE = _descriptor.Descriptor( name='RandomImageScale', full_name='rastervision.protos.tf_object_detection.RandomImageScale', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='min_scale_ratio', full_name='rastervision.protos.tf_object_detection.RandomImageScale.min_scale_ratio', index=0, number=1, type=2, cpp_type=6, label=1, has_default_value=True, default_value=float(0.5), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='max_scale_ratio', full_name='rastervision.protos.tf_object_detection.RandomImageScale.max_scale_ratio', index=1, number=2, type=2, cpp_type=6, label=1, has_default_value=True, default_value=float(2), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), ], extensions=[ ], nested_types=[], enum_types=[ ], options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=3128, serialized_end=3204, ) _RANDOMRGBTOGRAY = _descriptor.Descriptor( name='RandomRGBtoGray', full_name='rastervision.protos.tf_object_detection.RandomRGBtoGray', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='probability', full_name='rastervision.protos.tf_object_detection.RandomRGBtoGray.probability', index=0, number=1, type=2, cpp_type=6, label=1, has_default_value=True, default_value=float(0.1), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), ], extensions=[ ], nested_types=[], enum_types=[ ], options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=3206, serialized_end=3249, ) _RANDOMADJUSTBRIGHTNESS = _descriptor.Descriptor( name='RandomAdjustBrightness', full_name='rastervision.protos.tf_object_detection.RandomAdjustBrightness', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='max_delta', full_name='rastervision.protos.tf_object_detection.RandomAdjustBrightness.max_delta', index=0, number=1, type=2, cpp_type=6, label=1, has_default_value=True, default_value=float(0.2), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), ], extensions=[ ], nested_types=[], enum_types=[ ], options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=3251, serialized_end=3299, ) _RANDOMADJUSTCONTRAST = _descriptor.Descriptor( name='RandomAdjustContrast', full_name='rastervision.protos.tf_object_detection.RandomAdjustContrast', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='min_delta', full_name='rastervision.protos.tf_object_detection.RandomAdjustContrast.min_delta', index=0, number=1, type=2, cpp_type=6, label=1, has_default_value=True, default_value=float(0.8), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='max_delta', full_name='rastervision.protos.tf_object_detection.RandomAdjustContrast.max_delta', index=1, number=2, type=2, cpp_type=6, label=1, has_default_value=True, default_value=float(1.25), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), ], extensions=[ ], nested_types=[], enum_types=[ ], options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=3301, serialized_end=3372, ) _RANDOMADJUSTHUE = _descriptor.Descriptor( name='RandomAdjustHue', full_name='rastervision.protos.tf_object_detection.RandomAdjustHue', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='max_delta', full_name='rastervision.protos.tf_object_detection.RandomAdjustHue.max_delta', index=0, number=1, type=2, cpp_type=6, label=1, has_default_value=True, default_value=float(0.02), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), ], extensions=[ ], nested_types=[], enum_types=[ ], options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=3374, serialized_end=3416, ) _RANDOMADJUSTSATURATION = _descriptor.Descriptor( name='RandomAdjustSaturation', full_name='rastervision.protos.tf_object_detection.RandomAdjustSaturation', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='min_delta', full_name='rastervision.protos.tf_object_detection.RandomAdjustSaturation.min_delta', index=0, number=1, type=2, cpp_type=6, label=1, has_default_value=True, default_value=float(0.8), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='max_delta', full_name='rastervision.protos.tf_object_detection.RandomAdjustSaturation.max_delta', index=1, number=2, type=2, cpp_type=6, label=1, has_default_value=True, default_value=float(1.25), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), ], extensions=[ ], nested_types=[], enum_types=[ ], options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=3418, serialized_end=3491, ) _RANDOMDISTORTCOLOR = _descriptor.Descriptor( name='RandomDistortColor', full_name='rastervision.protos.tf_object_detection.RandomDistortColor', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='color_ordering', full_name='rastervision.protos.tf_object_detection.RandomDistortColor.color_ordering', index=0, number=1, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), ], extensions=[ ], nested_types=[], enum_types=[ ], options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=3493, serialized_end=3537, ) _RANDOMJITTERBOXES = _descriptor.Descriptor( name='RandomJitterBoxes', full_name='rastervision.protos.tf_object_detection.RandomJitterBoxes', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='ratio', full_name='rastervision.protos.tf_object_detection.RandomJitterBoxes.ratio', index=0, number=1, type=2, cpp_type=6, label=1, has_default_value=True, default_value=float(0.05), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), ], extensions=[ ], nested_types=[], enum_types=[ ], options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=3539, serialized_end=3579, ) _RANDOMCROPIMAGE = _descriptor.Descriptor( name='RandomCropImage', full_name='rastervision.protos.tf_object_detection.RandomCropImage', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='min_object_covered', full_name='rastervision.protos.tf_object_detection.RandomCropImage.min_object_covered', index=0, number=1, type=2, cpp_type=6, label=1, has_default_value=True, default_value=float(1), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='min_aspect_ratio', full_name='rastervision.protos.tf_object_detection.RandomCropImage.min_aspect_ratio', index=1, number=2, type=2, cpp_type=6, label=1, has_default_value=True, default_value=float(0.75), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='max_aspect_ratio', full_name='rastervision.protos.tf_object_detection.RandomCropImage.max_aspect_ratio', index=2, number=3, type=2, cpp_type=6, label=1, has_default_value=True, default_value=float(1.33), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='min_area', full_name='rastervision.protos.tf_object_detection.RandomCropImage.min_area', index=3, number=4, type=2, cpp_type=6, label=1, has_default_value=True, default_value=float(0.1), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='max_area', full_name='rastervision.protos.tf_object_detection.RandomCropImage.max_area', index=4, number=5, type=2, cpp_type=6, label=1, has_default_value=True, default_value=float(1), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='overlap_thresh', full_name='rastervision.protos.tf_object_detection.RandomCropImage.overlap_thresh', index=5, number=6, type=2, cpp_type=6, label=1, has_default_value=True, default_value=float(0.3), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='random_coef', full_name='rastervision.protos.tf_object_detection.RandomCropImage.random_coef', index=6, number=7, type=2, cpp_type=6, label=1, has_default_value=True, default_value=float(0), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), ], extensions=[ ], nested_types=[], enum_types=[ ], options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=3582, serialized_end=3791, ) _RANDOMPADIMAGE = _descriptor.Descriptor( name='RandomPadImage', full_name='rastervision.protos.tf_object_detection.RandomPadImage', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='min_image_height', full_name='rastervision.protos.tf_object_detection.RandomPadImage.min_image_height', index=0, number=1, type=2, cpp_type=6, label=1, has_default_value=False, default_value=float(0), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='min_image_width', full_name='rastervision.protos.tf_object_detection.RandomPadImage.min_image_width', index=1, number=2, type=2, cpp_type=6, label=1, has_default_value=False, default_value=float(0), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='max_image_height', full_name='rastervision.protos.tf_object_detection.RandomPadImage.max_image_height', index=2, number=3, type=2, cpp_type=6, label=1, has_default_value=False, default_value=float(0), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='max_image_width', full_name='rastervision.protos.tf_object_detection.RandomPadImage.max_image_width', index=3, number=4, type=2, cpp_type=6, label=1, has_default_value=False, default_value=float(0), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='pad_color', full_name='rastervision.protos.tf_object_detection.RandomPadImage.pad_color', index=4, number=5, type=2, cpp_type=6, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), ], extensions=[ ], nested_types=[], enum_types=[ ], options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=3794, serialized_end=3931, ) _RANDOMCROPPADIMAGE = _descriptor.Descriptor( name='RandomCropPadImage', full_name='rastervision.protos.tf_object_detection.RandomCropPadImage', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='min_object_covered', full_name='rastervision.protos.tf_object_detection.RandomCropPadImage.min_object_covered', index=0, number=1, type=2, cpp_type=6, label=1, has_default_value=True, default_value=float(1), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='min_aspect_ratio', full_name='rastervision.protos.tf_object_detection.RandomCropPadImage.min_aspect_ratio', index=1, number=2, type=2, cpp_type=6, label=1, has_default_value=True, default_value=float(0.75), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='max_aspect_ratio', full_name='rastervision.protos.tf_object_detection.RandomCropPadImage.max_aspect_ratio', index=2, number=3, type=2, cpp_type=6, label=1, has_default_value=True, default_value=float(1.33), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='min_area', full_name='rastervision.protos.tf_object_detection.RandomCropPadImage.min_area', index=3, number=4, type=2, cpp_type=6, label=1, has_default_value=True, default_value=float(0.1), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='max_area', full_name='rastervision.protos.tf_object_detection.RandomCropPadImage.max_area', index=4, number=5, type=2, cpp_type=6, label=1, has_default_value=True, default_value=float(1), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='overlap_thresh', full_name='rastervision.protos.tf_object_detection.RandomCropPadImage.overlap_thresh', index=5, number=6, type=2, cpp_type=6, label=1, has_default_value=True, default_value=float(0.3), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='random_coef', full_name='rastervision.protos.tf_object_detection.RandomCropPadImage.random_coef', index=6, number=7, type=2, cpp_type=6, label=1, has_default_value=True, default_value=float(0), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='min_padded_size_ratio', full_name='rastervision.protos.tf_object_detection.RandomCropPadImage.min_padded_size_ratio', index=7, number=8, type=2, cpp_type=6, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='max_padded_size_ratio', full_name='rastervision.protos.tf_object_detection.RandomCropPadImage.max_padded_size_ratio', index=8, number=9, type=2, cpp_type=6, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='pad_color', full_name='rastervision.protos.tf_object_detection.RandomCropPadImage.pad_color', index=9, number=10, type=2, cpp_type=6, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), ], extensions=[ ], nested_types=[], enum_types=[ ], options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=3934, serialized_end=4227, ) _RANDOMCROPTOASPECTRATIO = _descriptor.Descriptor( name='RandomCropToAspectRatio', full_name='rastervision.protos.tf_object_detection.RandomCropToAspectRatio', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='aspect_ratio', full_name='rastervision.protos.tf_object_detection.RandomCropToAspectRatio.aspect_ratio', index=0, number=1, type=2, cpp_type=6, label=1, has_default_value=True, default_value=float(1), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='overlap_thresh', full_name='rastervision.protos.tf_object_detection.RandomCropToAspectRatio.overlap_thresh', index=1, number=2, type=2, cpp_type=6, label=1, has_default_value=True, default_value=float(0.3), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), ], extensions=[ ], nested_types=[], enum_types=[ ], options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=4229, serialized_end=4308, ) _RANDOMBLACKPATCHES = _descriptor.Descriptor( name='RandomBlackPatches', full_name='rastervision.protos.tf_object_detection.RandomBlackPatches', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='max_black_patches', full_name='rastervision.protos.tf_object_detection.RandomBlackPatches.max_black_patches', index=0, number=1, type=5, cpp_type=1, label=1, has_default_value=True, default_value=10, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='probability', full_name='rastervision.protos.tf_object_detection.RandomBlackPatches.probability', index=1, number=2, type=2, cpp_type=6, label=1, has_default_value=True, default_value=float(0.5), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='size_to_image_ratio', full_name='rastervision.protos.tf_object_detection.RandomBlackPatches.size_to_image_ratio', index=2, number=3, type=2, cpp_type=6, label=1, has_default_value=True, default_value=float(0.1), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), ], extensions=[ ], nested_types=[], enum_types=[ ], options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=4310, serialized_end=4421, ) _RANDOMRESIZEMETHOD = _descriptor.Descriptor( name='RandomResizeMethod', full_name='rastervision.protos.tf_object_detection.RandomResizeMethod', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='target_height', full_name='rastervision.protos.tf_object_detection.RandomResizeMethod.target_height', index=0, number=1, type=2, cpp_type=6, label=1, has_default_value=False, default_value=float(0), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='target_width', full_name='rastervision.protos.tf_object_detection.RandomResizeMethod.target_width', index=1, number=2, type=2, cpp_type=6, label=1, has_default_value=False, default_value=float(0), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), ], extensions=[ ], nested_types=[], enum_types=[ ], options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=4423, serialized_end=4488, ) _RGBTOGRAY = _descriptor.Descriptor( name='RGBtoGray', full_name='rastervision.protos.tf_object_detection.RGBtoGray', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ ], extensions=[ ], nested_types=[], enum_types=[ ], options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=4490, serialized_end=4501, ) _SCALEBOXESTOPIXELCOORDINATES = _descriptor.Descriptor( name='ScaleBoxesToPixelCoordinates', full_name='rastervision.protos.tf_object_detection.ScaleBoxesToPixelCoordinates', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ ], extensions=[ ], nested_types=[], enum_types=[ ], options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=4503, serialized_end=4533, ) _RESIZEIMAGE = _descriptor.Descriptor( name='ResizeImage', full_name='rastervision.protos.tf_object_detection.ResizeImage', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='new_height', full_name='rastervision.protos.tf_object_detection.ResizeImage.new_height', index=0, number=1, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='new_width', full_name='rastervision.protos.tf_object_detection.ResizeImage.new_width', index=1, number=2, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='method', full_name='rastervision.protos.tf_object_detection.ResizeImage.method', index=2, number=3, type=14, cpp_type=8, label=1, has_default_value=True, default_value=3, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), ], extensions=[ ], nested_types=[], enum_types=[ _RESIZEIMAGE_METHOD, ], options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=4536, serialized_end=4744, ) _SUBTRACTCHANNELMEAN = _descriptor.Descriptor( name='SubtractChannelMean', full_name='rastervision.protos.tf_object_detection.SubtractChannelMean', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='means', full_name='rastervision.protos.tf_object_detection.SubtractChannelMean.means', index=0, number=1, type=2, cpp_type=6, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), ], extensions=[ ], nested_types=[], enum_types=[ ], options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=4746, serialized_end=4782, ) _SSDRANDOMCROPOPERATION = _descriptor.Descriptor( name='SSDRandomCropOperation', full_name='rastervision.protos.tf_object_detection.SSDRandomCropOperation', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='min_object_covered', full_name='rastervision.protos.tf_object_detection.SSDRandomCropOperation.min_object_covered', index=0, number=1, type=2, cpp_type=6, label=1, has_default_value=False, default_value=float(0), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='min_aspect_ratio', full_name='rastervision.protos.tf_object_detection.SSDRandomCropOperation.min_aspect_ratio', index=1, number=2, type=2, cpp_type=6, label=1, has_default_value=False, default_value=float(0), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='max_aspect_ratio', full_name='rastervision.protos.tf_object_detection.SSDRandomCropOperation.max_aspect_ratio', index=2, number=3, type=2, cpp_type=6, label=1, has_default_value=False, default_value=float(0), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='min_area', full_name='rastervision.protos.tf_object_detection.SSDRandomCropOperation.min_area', index=3, number=4, type=2, cpp_type=6, label=1, has_default_value=False, default_value=float(0), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='max_area', full_name='rastervision.protos.tf_object_detection.SSDRandomCropOperation.max_area', index=4, number=5, type=2, cpp_type=6, label=1, has_default_value=False, default_value=float(0), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='overlap_thresh', full_name='rastervision.protos.tf_object_detection.SSDRandomCropOperation.overlap_thresh', index=5, number=6, type=2, cpp_type=6, label=1, has_default_value=False, default_value=float(0), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='random_coef', full_name='rastervision.protos.tf_object_detection.SSDRandomCropOperation.random_coef', index=6, number=7, type=2, cpp_type=6, label=1, has_default_value=False, default_value=float(0), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), ], extensions=[ ], nested_types=[], enum_types=[ ], options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=4785, serialized_end=4970, ) _SSDRANDOMCROP = _descriptor.Descriptor( name='SSDRandomCrop', full_name='rastervision.protos.tf_object_detection.SSDRandomCrop', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='operations', full_name='rastervision.protos.tf_object_detection.SSDRandomCrop.operations', index=0, number=1, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), ], extensions=[ ], nested_types=[], enum_types=[ ], options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=4972, serialized_end=5072, ) _SSDRANDOMCROPPADOPERATION = _descriptor.Descriptor( name='SSDRandomCropPadOperation', full_name='rastervision.protos.tf_object_detection.SSDRandomCropPadOperation', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='min_object_covered', full_name='rastervision.protos.tf_object_detection.SSDRandomCropPadOperation.min_object_covered', index=0, number=1, type=2, cpp_type=6, label=1, has_default_value=False, default_value=float(0), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='min_aspect_ratio', full_name='rastervision.protos.tf_object_detection.SSDRandomCropPadOperation.min_aspect_ratio', index=1, number=2, type=2, cpp_type=6, label=1, has_default_value=False, default_value=float(0), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='max_aspect_ratio', full_name='rastervision.protos.tf_object_detection.SSDRandomCropPadOperation.max_aspect_ratio', index=2, number=3, type=2, cpp_type=6, label=1, has_default_value=False, default_value=float(0), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='min_area', full_name='rastervision.protos.tf_object_detection.SSDRandomCropPadOperation.min_area', index=3, number=4, type=2, cpp_type=6, label=1, has_default_value=False, default_value=float(0), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='max_area', full_name='rastervision.protos.tf_object_detection.SSDRandomCropPadOperation.max_area', index=4, number=5, type=2, cpp_type=6, label=1, has_default_value=False, default_value=float(0), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='overlap_thresh', full_name='rastervision.protos.tf_object_detection.SSDRandomCropPadOperation.overlap_thresh', index=5, number=6, type=2, cpp_type=6, label=1, has_default_value=False, default_value=float(0), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='random_coef', full_name='rastervision.protos.tf_object_detection.SSDRandomCropPadOperation.random_coef', index=6, number=7, type=2, cpp_type=6, label=1, has_default_value=False, default_value=float(0), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='min_padded_size_ratio', full_name='rastervision.protos.tf_object_detection.SSDRandomCropPadOperation.min_padded_size_ratio', index=7, number=8, type=2, cpp_type=6, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='max_padded_size_ratio', full_name='rastervision.protos.tf_object_detection.SSDRandomCropPadOperation.max_padded_size_ratio', index=8, number=9, type=2, cpp_type=6, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='pad_color_r', full_name='rastervision.protos.tf_object_detection.SSDRandomCropPadOperation.pad_color_r', index=9, number=10, type=2, cpp_type=6, label=1, has_default_value=False, default_value=float(0), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='pad_color_g', full_name='rastervision.protos.tf_object_detection.SSDRandomCropPadOperation.pad_color_g', index=10, number=11, type=2, cpp_type=6, label=1, has_default_value=False, default_value=float(0), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='pad_color_b', full_name='rastervision.protos.tf_object_detection.SSDRandomCropPadOperation.pad_color_b', index=11, number=12, type=2, cpp_type=6, label=1, has_default_value=False, default_value=float(0), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), ], extensions=[ ], nested_types=[], enum_types=[ ], options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=5075, serialized_end=5388, ) _SSDRANDOMCROPPAD = _descriptor.Descriptor( name='SSDRandomCropPad', full_name='rastervision.protos.tf_object_detection.SSDRandomCropPad', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='operations', full_name='rastervision.protos.tf_object_detection.SSDRandomCropPad.operations', index=0, number=1, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), ], extensions=[ ], nested_types=[], enum_types=[ ], options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=5390, serialized_end=5496, ) _SSDRANDOMCROPFIXEDASPECTRATIOOPERATION = _descriptor.Descriptor( name='SSDRandomCropFixedAspectRatioOperation', full_name='rastervision.protos.tf_object_detection.SSDRandomCropFixedAspectRatioOperation', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='min_object_covered', full_name='rastervision.protos.tf_object_detection.SSDRandomCropFixedAspectRatioOperation.min_object_covered', index=0, number=1, type=2, cpp_type=6, label=1, has_default_value=False, default_value=float(0), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='min_area', full_name='rastervision.protos.tf_object_detection.SSDRandomCropFixedAspectRatioOperation.min_area', index=1, number=4, type=2, cpp_type=6, label=1, has_default_value=False, default_value=float(0), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='max_area', full_name='rastervision.protos.tf_object_detection.SSDRandomCropFixedAspectRatioOperation.max_area', index=2, number=5, type=2, cpp_type=6, label=1, has_default_value=False, default_value=float(0), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='overlap_thresh', full_name='rastervision.protos.tf_object_detection.SSDRandomCropFixedAspectRatioOperation.overlap_thresh', index=3, number=6, type=2, cpp_type=6, label=1, has_default_value=False, default_value=float(0), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='random_coef', full_name='rastervision.protos.tf_object_detection.SSDRandomCropFixedAspectRatioOperation.random_coef', index=4, number=7, type=2, cpp_type=6, label=1, has_default_value=False, default_value=float(0), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), ], extensions=[ ], nested_types=[], enum_types=[ ], options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=5499, serialized_end=5648, ) _SSDRANDOMCROPFIXEDASPECTRATIO = _descriptor.Descriptor( name='SSDRandomCropFixedAspectRatio', full_name='rastervision.protos.tf_object_detection.SSDRandomCropFixedAspectRatio', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='operations', full_name='rastervision.protos.tf_object_detection.SSDRandomCropFixedAspectRatio.operations', index=0, number=1, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='aspect_ratio', full_name='rastervision.protos.tf_object_detection.SSDRandomCropFixedAspectRatio.aspect_ratio', index=1, number=2, type=2, cpp_type=6, label=1, has_default_value=True, default_value=float(1), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), ], extensions=[ ], nested_types=[], enum_types=[ ], options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=5651, serialized_end=5808, ) _SSDRANDOMCROPPADFIXEDASPECTRATIOOPERATION = _descriptor.Descriptor( name='SSDRandomCropPadFixedAspectRatioOperation', full_name='rastervision.protos.tf_object_detection.SSDRandomCropPadFixedAspectRatioOperation', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='min_object_covered', full_name='rastervision.protos.tf_object_detection.SSDRandomCropPadFixedAspectRatioOperation.min_object_covered', index=0, number=1, type=2, cpp_type=6, label=1, has_default_value=False, default_value=float(0), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='min_aspect_ratio', full_name='rastervision.protos.tf_object_detection.SSDRandomCropPadFixedAspectRatioOperation.min_aspect_ratio', index=1, number=2, type=2, cpp_type=6, label=1, has_default_value=False, default_value=float(0), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='max_aspect_ratio', full_name='rastervision.protos.tf_object_detection.SSDRandomCropPadFixedAspectRatioOperation.max_aspect_ratio', index=2, number=3, type=2, cpp_type=6, label=1, has_default_value=False, default_value=float(0), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='min_area', full_name='rastervision.protos.tf_object_detection.SSDRandomCropPadFixedAspectRatioOperation.min_area', index=3, number=4, type=2, cpp_type=6, label=1, has_default_value=False, default_value=float(0), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='max_area', full_name='rastervision.protos.tf_object_detection.SSDRandomCropPadFixedAspectRatioOperation.max_area', index=4, number=5, type=2, cpp_type=6, label=1, has_default_value=False, default_value=float(0), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='overlap_thresh', full_name='rastervision.protos.tf_object_detection.SSDRandomCropPadFixedAspectRatioOperation.overlap_thresh', index=5, number=6, type=2, cpp_type=6, label=1, has_default_value=False, default_value=float(0), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='random_coef', full_name='rastervision.protos.tf_object_detection.SSDRandomCropPadFixedAspectRatioOperation.random_coef', index=6, number=7, type=2, cpp_type=6, label=1, has_default_value=False, default_value=float(0), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), ], extensions=[ ], nested_types=[], enum_types=[ ], options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=5811, serialized_end=6015, ) _SSDRANDOMCROPPADFIXEDASPECTRATIO = _descriptor.Descriptor( name='SSDRandomCropPadFixedAspectRatio', full_name='rastervision.protos.tf_object_detection.SSDRandomCropPadFixedAspectRatio', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='operations', full_name='rastervision.protos.tf_object_detection.SSDRandomCropPadFixedAspectRatio.operations', index=0, number=1, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='aspect_ratio', full_name='rastervision.protos.tf_object_detection.SSDRandomCropPadFixedAspectRatio.aspect_ratio', index=1, number=2, type=2, cpp_type=6, label=1, has_default_value=True, default_value=float(1), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='min_padded_size_ratio', full_name='rastervision.protos.tf_object_detection.SSDRandomCropPadFixedAspectRatio.min_padded_size_ratio', index=2, number=3, type=2, cpp_type=6, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='max_padded_size_ratio', full_name='rastervision.protos.tf_object_detection.SSDRandomCropPadFixedAspectRatio.max_padded_size_ratio', index=3, number=4, type=2, cpp_type=6, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), ], extensions=[ ], nested_types=[], enum_types=[ ], options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=6018, serialized_end=6243, ) _CONVERTCLASSLOGITSTOSOFTMAX = _descriptor.Descriptor( name='ConvertClassLogitsToSoftmax', full_name='rastervision.protos.tf_object_detection.ConvertClassLogitsToSoftmax', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='temperature', full_name='rastervision.protos.tf_object_detection.ConvertClassLogitsToSoftmax.temperature', index=0, number=1, type=2, cpp_type=6, label=1, has_default_value=True, default_value=float(1), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), ], extensions=[ ], nested_types=[], enum_types=[ ], options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=6245, serialized_end=6298, ) _PREPROCESSINGSTEP.fields_by_name['normalize_image'].message_type = _NORMALIZEIMAGE _PREPROCESSINGSTEP.fields_by_name['random_horizontal_flip'].message_type = _RANDOMHORIZONTALFLIP _PREPROCESSINGSTEP.fields_by_name['random_pixel_value_scale'].message_type = _RANDOMPIXELVALUESCALE _PREPROCESSINGSTEP.fields_by_name['random_image_scale'].message_type = _RANDOMIMAGESCALE _PREPROCESSINGSTEP.fields_by_name['random_rgb_to_gray'].message_type = _RANDOMRGBTOGRAY _PREPROCESSINGSTEP.fields_by_name['random_adjust_brightness'].message_type = _RANDOMADJUSTBRIGHTNESS _PREPROCESSINGSTEP.fields_by_name['random_adjust_contrast'].message_type = _RANDOMADJUSTCONTRAST _PREPROCESSINGSTEP.fields_by_name['random_adjust_hue'].message_type = _RANDOMADJUSTHUE _PREPROCESSINGSTEP.fields_by_name['random_adjust_saturation'].message_type = _RANDOMADJUSTSATURATION _PREPROCESSINGSTEP.fields_by_name['random_distort_color'].message_type = _RANDOMDISTORTCOLOR _PREPROCESSINGSTEP.fields_by_name['random_jitter_boxes'].message_type = _RANDOMJITTERBOXES _PREPROCESSINGSTEP.fields_by_name['random_crop_image'].message_type = _RANDOMCROPIMAGE _PREPROCESSINGSTEP.fields_by_name['random_pad_image'].message_type = _RANDOMPADIMAGE _PREPROCESSINGSTEP.fields_by_name['random_crop_pad_image'].message_type = _RANDOMCROPPADIMAGE _PREPROCESSINGSTEP.fields_by_name['random_crop_to_aspect_ratio'].message_type = _RANDOMCROPTOASPECTRATIO _PREPROCESSINGSTEP.fields_by_name['random_black_patches'].message_type = _RANDOMBLACKPATCHES _PREPROCESSINGSTEP.fields_by_name['random_resize_method'].message_type = _RANDOMRESIZEMETHOD _PREPROCESSINGSTEP.fields_by_name['scale_boxes_to_pixel_coordinates'].message_type = _SCALEBOXESTOPIXELCOORDINATES _PREPROCESSINGSTEP.fields_by_name['resize_image'].message_type = _RESIZEIMAGE _PREPROCESSINGSTEP.fields_by_name['subtract_channel_mean'].message_type = _SUBTRACTCHANNELMEAN _PREPROCESSINGSTEP.fields_by_name['ssd_random_crop'].message_type = _SSDRANDOMCROP _PREPROCESSINGSTEP.fields_by_name['ssd_random_crop_pad'].message_type = _SSDRANDOMCROPPAD _PREPROCESSINGSTEP.fields_by_name['ssd_random_crop_fixed_aspect_ratio'].message_type = _SSDRANDOMCROPFIXEDASPECTRATIO _PREPROCESSINGSTEP.fields_by_name['ssd_random_crop_pad_fixed_aspect_ratio'].message_type = _SSDRANDOMCROPPADFIXEDASPECTRATIO _PREPROCESSINGSTEP.fields_by_name['random_vertical_flip'].message_type = _RANDOMVERTICALFLIP _PREPROCESSINGSTEP.fields_by_name['random_rotation90'].message_type = _RANDOMROTATION90 _PREPROCESSINGSTEP.fields_by_name['rgb_to_gray'].message_type = _RGBTOGRAY _PREPROCESSINGSTEP.fields_by_name['convert_class_logits_to_softmax'].message_type = _CONVERTCLASSLOGITSTOSOFTMAX _PREPROCESSINGSTEP.oneofs_by_name['preprocessing_step'].fields.append( _PREPROCESSINGSTEP.fields_by_name['normalize_image']) _PREPROCESSINGSTEP.fields_by_name['normalize_image'].containing_oneof = _PREPROCESSINGSTEP.oneofs_by_name['preprocessing_step'] _PREPROCESSINGSTEP.oneofs_by_name['preprocessing_step'].fields.append( _PREPROCESSINGSTEP.fields_by_name['random_horizontal_flip']) _PREPROCESSINGSTEP.fields_by_name['random_horizontal_flip'].containing_oneof = _PREPROCESSINGSTEP.oneofs_by_name['preprocessing_step'] _PREPROCESSINGSTEP.oneofs_by_name['preprocessing_step'].fields.append( _PREPROCESSINGSTEP.fields_by_name['random_pixel_value_scale']) _PREPROCESSINGSTEP.fields_by_name['random_pixel_value_scale'].containing_oneof = _PREPROCESSINGSTEP.oneofs_by_name['preprocessing_step'] _PREPROCESSINGSTEP.oneofs_by_name['preprocessing_step'].fields.append( _PREPROCESSINGSTEP.fields_by_name['random_image_scale']) _PREPROCESSINGSTEP.fields_by_name['random_image_scale'].containing_oneof = _PREPROCESSINGSTEP.oneofs_by_name['preprocessing_step'] _PREPROCESSINGSTEP.oneofs_by_name['preprocessing_step'].fields.append( _PREPROCESSINGSTEP.fields_by_name['random_rgb_to_gray']) _PREPROCESSINGSTEP.fields_by_name['random_rgb_to_gray'].containing_oneof = _PREPROCESSINGSTEP.oneofs_by_name['preprocessing_step'] _PREPROCESSINGSTEP.oneofs_by_name['preprocessing_step'].fields.append( _PREPROCESSINGSTEP.fields_by_name['random_adjust_brightness']) _PREPROCESSINGSTEP.fields_by_name['random_adjust_brightness'].containing_oneof = _PREPROCESSINGSTEP.oneofs_by_name['preprocessing_step'] _PREPROCESSINGSTEP.oneofs_by_name['preprocessing_step'].fields.append( _PREPROCESSINGSTEP.fields_by_name['random_adjust_contrast']) _PREPROCESSINGSTEP.fields_by_name['random_adjust_contrast'].containing_oneof = _PREPROCESSINGSTEP.oneofs_by_name['preprocessing_step'] _PREPROCESSINGSTEP.oneofs_by_name['preprocessing_step'].fields.append( _PREPROCESSINGSTEP.fields_by_name['random_adjust_hue']) _PREPROCESSINGSTEP.fields_by_name['random_adjust_hue'].containing_oneof = _PREPROCESSINGSTEP.oneofs_by_name['preprocessing_step'] _PREPROCESSINGSTEP.oneofs_by_name['preprocessing_step'].fields.append( _PREPROCESSINGSTEP.fields_by_name['random_adjust_saturation']) _PREPROCESSINGSTEP.fields_by_name['random_adjust_saturation'].containing_oneof = _PREPROCESSINGSTEP.oneofs_by_name['preprocessing_step'] _PREPROCESSINGSTEP.oneofs_by_name['preprocessing_step'].fields.append( _PREPROCESSINGSTEP.fields_by_name['random_distort_color']) _PREPROCESSINGSTEP.fields_by_name['random_distort_color'].containing_oneof = _PREPROCESSINGSTEP.oneofs_by_name['preprocessing_step'] _PREPROCESSINGSTEP.oneofs_by_name['preprocessing_step'].fields.append( _PREPROCESSINGSTEP.fields_by_name['random_jitter_boxes']) _PREPROCESSINGSTEP.fields_by_name['random_jitter_boxes'].containing_oneof = _PREPROCESSINGSTEP.oneofs_by_name['preprocessing_step'] _PREPROCESSINGSTEP.oneofs_by_name['preprocessing_step'].fields.append( _PREPROCESSINGSTEP.fields_by_name['random_crop_image']) _PREPROCESSINGSTEP.fields_by_name['random_crop_image'].containing_oneof = _PREPROCESSINGSTEP.oneofs_by_name['preprocessing_step'] _PREPROCESSINGSTEP.oneofs_by_name['preprocessing_step'].fields.append( _PREPROCESSINGSTEP.fields_by_name['random_pad_image']) _PREPROCESSINGSTEP.fields_by_name['random_pad_image'].containing_oneof = _PREPROCESSINGSTEP.oneofs_by_name['preprocessing_step'] _PREPROCESSINGSTEP.oneofs_by_name['preprocessing_step'].fields.append( _PREPROCESSINGSTEP.fields_by_name['random_crop_pad_image']) _PREPROCESSINGSTEP.fields_by_name['random_crop_pad_image'].containing_oneof = _PREPROCESSINGSTEP.oneofs_by_name['preprocessing_step'] _PREPROCESSINGSTEP.oneofs_by_name['preprocessing_step'].fields.append( _PREPROCESSINGSTEP.fields_by_name['random_crop_to_aspect_ratio']) _PREPROCESSINGSTEP.fields_by_name['random_crop_to_aspect_ratio'].containing_oneof = _PREPROCESSINGSTEP.oneofs_by_name['preprocessing_step'] _PREPROCESSINGSTEP.oneofs_by_name['preprocessing_step'].fields.append( _PREPROCESSINGSTEP.fields_by_name['random_black_patches']) _PREPROCESSINGSTEP.fields_by_name['random_black_patches'].containing_oneof = _PREPROCESSINGSTEP.oneofs_by_name['preprocessing_step'] _PREPROCESSINGSTEP.oneofs_by_name['preprocessing_step'].fields.append( _PREPROCESSINGSTEP.fields_by_name['random_resize_method']) _PREPROCESSINGSTEP.fields_by_name['random_resize_method'].containing_oneof = _PREPROCESSINGSTEP.oneofs_by_name['preprocessing_step'] _PREPROCESSINGSTEP.oneofs_by_name['preprocessing_step'].fields.append( _PREPROCESSINGSTEP.fields_by_name['scale_boxes_to_pixel_coordinates']) _PREPROCESSINGSTEP.fields_by_name['scale_boxes_to_pixel_coordinates'].containing_oneof = _PREPROCESSINGSTEP.oneofs_by_name['preprocessing_step'] _PREPROCESSINGSTEP.oneofs_by_name['preprocessing_step'].fields.append( _PREPROCESSINGSTEP.fields_by_name['resize_image']) _PREPROCESSINGSTEP.fields_by_name['resize_image'].containing_oneof = _PREPROCESSINGSTEP.oneofs_by_name['preprocessing_step'] _PREPROCESSINGSTEP.oneofs_by_name['preprocessing_step'].fields.append( _PREPROCESSINGSTEP.fields_by_name['subtract_channel_mean']) _PREPROCESSINGSTEP.fields_by_name['subtract_channel_mean'].containing_oneof = _PREPROCESSINGSTEP.oneofs_by_name['preprocessing_step'] _PREPROCESSINGSTEP.oneofs_by_name['preprocessing_step'].fields.append( _PREPROCESSINGSTEP.fields_by_name['ssd_random_crop']) _PREPROCESSINGSTEP.fields_by_name['ssd_random_crop'].containing_oneof = _PREPROCESSINGSTEP.oneofs_by_name['preprocessing_step'] _PREPROCESSINGSTEP.oneofs_by_name['preprocessing_step'].fields.append( _PREPROCESSINGSTEP.fields_by_name['ssd_random_crop_pad']) _PREPROCESSINGSTEP.fields_by_name['ssd_random_crop_pad'].containing_oneof = _PREPROCESSINGSTEP.oneofs_by_name['preprocessing_step'] _PREPROCESSINGSTEP.oneofs_by_name['preprocessing_step'].fields.append( _PREPROCESSINGSTEP.fields_by_name['ssd_random_crop_fixed_aspect_ratio']) _PREPROCESSINGSTEP.fields_by_name['ssd_random_crop_fixed_aspect_ratio'].containing_oneof = _PREPROCESSINGSTEP.oneofs_by_name['preprocessing_step'] _PREPROCESSINGSTEP.oneofs_by_name['preprocessing_step'].fields.append( _PREPROCESSINGSTEP.fields_by_name['ssd_random_crop_pad_fixed_aspect_ratio']) _PREPROCESSINGSTEP.fields_by_name['ssd_random_crop_pad_fixed_aspect_ratio'].containing_oneof = _PREPROCESSINGSTEP.oneofs_by_name['preprocessing_step'] _PREPROCESSINGSTEP.oneofs_by_name['preprocessing_step'].fields.append( _PREPROCESSINGSTEP.fields_by_name['random_vertical_flip']) _PREPROCESSINGSTEP.fields_by_name['random_vertical_flip'].containing_oneof = _PREPROCESSINGSTEP.oneofs_by_name['preprocessing_step'] _PREPROCESSINGSTEP.oneofs_by_name['preprocessing_step'].fields.append( _PREPROCESSINGSTEP.fields_by_name['random_rotation90']) _PREPROCESSINGSTEP.fields_by_name['random_rotation90'].containing_oneof = _PREPROCESSINGSTEP.oneofs_by_name['preprocessing_step'] _PREPROCESSINGSTEP.oneofs_by_name['preprocessing_step'].fields.append( _PREPROCESSINGSTEP.fields_by_name['rgb_to_gray']) _PREPROCESSINGSTEP.fields_by_name['rgb_to_gray'].containing_oneof = _PREPROCESSINGSTEP.oneofs_by_name['preprocessing_step'] _PREPROCESSINGSTEP.oneofs_by_name['preprocessing_step'].fields.append( _PREPROCESSINGSTEP.fields_by_name['convert_class_logits_to_softmax']) _PREPROCESSINGSTEP.fields_by_name['convert_class_logits_to_softmax'].containing_oneof = _PREPROCESSINGSTEP.oneofs_by_name['preprocessing_step'] _RESIZEIMAGE.fields_by_name['method'].enum_type = _RESIZEIMAGE_METHOD _RESIZEIMAGE_METHOD.containing_type = _RESIZEIMAGE _SSDRANDOMCROP.fields_by_name['operations'].message_type = _SSDRANDOMCROPOPERATION _SSDRANDOMCROPPAD.fields_by_name['operations'].message_type = _SSDRANDOMCROPPADOPERATION _SSDRANDOMCROPFIXEDASPECTRATIO.fields_by_name['operations'].message_type = _SSDRANDOMCROPFIXEDASPECTRATIOOPERATION _SSDRANDOMCROPPADFIXEDASPECTRATIO.fields_by_name['operations'].message_type = _SSDRANDOMCROPPADFIXEDASPECTRATIOOPERATION DESCRIPTOR.message_types_by_name['PreprocessingStep'] = _PREPROCESSINGSTEP DESCRIPTOR.message_types_by_name['NormalizeImage'] = _NORMALIZEIMAGE DESCRIPTOR.message_types_by_name['RandomHorizontalFlip'] = _RANDOMHORIZONTALFLIP DESCRIPTOR.message_types_by_name['RandomVerticalFlip'] = _RANDOMVERTICALFLIP DESCRIPTOR.message_types_by_name['RandomRotation90'] = _RANDOMROTATION90 DESCRIPTOR.message_types_by_name['RandomPixelValueScale'] = _RANDOMPIXELVALUESCALE DESCRIPTOR.message_types_by_name['RandomImageScale'] = _RANDOMIMAGESCALE DESCRIPTOR.message_types_by_name['RandomRGBtoGray'] = _RANDOMRGBTOGRAY DESCRIPTOR.message_types_by_name['RandomAdjustBrightness'] = _RANDOMADJUSTBRIGHTNESS DESCRIPTOR.message_types_by_name['RandomAdjustContrast'] = _RANDOMADJUSTCONTRAST DESCRIPTOR.message_types_by_name['RandomAdjustHue'] = _RANDOMADJUSTHUE DESCRIPTOR.message_types_by_name['RandomAdjustSaturation'] = _RANDOMADJUSTSATURATION DESCRIPTOR.message_types_by_name['RandomDistortColor'] = _RANDOMDISTORTCOLOR DESCRIPTOR.message_types_by_name['RandomJitterBoxes'] = _RANDOMJITTERBOXES DESCRIPTOR.message_types_by_name['RandomCropImage'] = _RANDOMCROPIMAGE DESCRIPTOR.message_types_by_name['RandomPadImage'] = _RANDOMPADIMAGE DESCRIPTOR.message_types_by_name['RandomCropPadImage'] = _RANDOMCROPPADIMAGE DESCRIPTOR.message_types_by_name['RandomCropToAspectRatio'] = _RANDOMCROPTOASPECTRATIO DESCRIPTOR.message_types_by_name['RandomBlackPatches'] = _RANDOMBLACKPATCHES DESCRIPTOR.message_types_by_name['RandomResizeMethod'] = _RANDOMRESIZEMETHOD DESCRIPTOR.message_types_by_name['RGBtoGray'] = _RGBTOGRAY DESCRIPTOR.message_types_by_name['ScaleBoxesToPixelCoordinates'] = _SCALEBOXESTOPIXELCOORDINATES DESCRIPTOR.message_types_by_name['ResizeImage'] = _RESIZEIMAGE DESCRIPTOR.message_types_by_name['SubtractChannelMean'] = _SUBTRACTCHANNELMEAN DESCRIPTOR.message_types_by_name['SSDRandomCropOperation'] = _SSDRANDOMCROPOPERATION DESCRIPTOR.message_types_by_name['SSDRandomCrop'] = _SSDRANDOMCROP DESCRIPTOR.message_types_by_name['SSDRandomCropPadOperation'] = _SSDRANDOMCROPPADOPERATION DESCRIPTOR.message_types_by_name['SSDRandomCropPad'] = _SSDRANDOMCROPPAD DESCRIPTOR.message_types_by_name['SSDRandomCropFixedAspectRatioOperation'] = _SSDRANDOMCROPFIXEDASPECTRATIOOPERATION DESCRIPTOR.message_types_by_name['SSDRandomCropFixedAspectRatio'] = _SSDRANDOMCROPFIXEDASPECTRATIO DESCRIPTOR.message_types_by_name['SSDRandomCropPadFixedAspectRatioOperation'] = _SSDRANDOMCROPPADFIXEDASPECTRATIOOPERATION DESCRIPTOR.message_types_by_name['SSDRandomCropPadFixedAspectRatio'] = _SSDRANDOMCROPPADFIXEDASPECTRATIO DESCRIPTOR.message_types_by_name['ConvertClassLogitsToSoftmax'] = _CONVERTCLASSLOGITSTOSOFTMAX PreprocessingStep = _reflection.GeneratedProtocolMessageType('PreprocessingStep', (_message.Message,), dict( DESCRIPTOR = _PREPROCESSINGSTEP, __module__ = 'rastervision.protos.tf_object_detection.preprocessor_pb2' # @@protoc_insertion_point(class_scope:rastervision.protos.tf_object_detection.PreprocessingStep) )) _sym_db.RegisterMessage(PreprocessingStep) NormalizeImage = _reflection.GeneratedProtocolMessageType('NormalizeImage', (_message.Message,), dict( DESCRIPTOR = _NORMALIZEIMAGE, __module__ = 'rastervision.protos.tf_object_detection.preprocessor_pb2' # @@protoc_insertion_point(class_scope:rastervision.protos.tf_object_detection.NormalizeImage) )) _sym_db.RegisterMessage(NormalizeImage) RandomHorizontalFlip = _reflection.GeneratedProtocolMessageType('RandomHorizontalFlip', (_message.Message,), dict( DESCRIPTOR = _RANDOMHORIZONTALFLIP, __module__ = 'rastervision.protos.tf_object_detection.preprocessor_pb2' # @@protoc_insertion_point(class_scope:rastervision.protos.tf_object_detection.RandomHorizontalFlip) )) _sym_db.RegisterMessage(RandomHorizontalFlip) RandomVerticalFlip = _reflection.GeneratedProtocolMessageType('RandomVerticalFlip', (_message.Message,), dict( DESCRIPTOR = _RANDOMVERTICALFLIP, __module__ = 'rastervision.protos.tf_object_detection.preprocessor_pb2' # @@protoc_insertion_point(class_scope:rastervision.protos.tf_object_detection.RandomVerticalFlip) )) _sym_db.RegisterMessage(RandomVerticalFlip) RandomRotation90 = _reflection.GeneratedProtocolMessageType('RandomRotation90', (_message.Message,), dict( DESCRIPTOR = _RANDOMROTATION90, __module__ = 'rastervision.protos.tf_object_detection.preprocessor_pb2' # @@protoc_insertion_point(class_scope:rastervision.protos.tf_object_detection.RandomRotation90) )) _sym_db.RegisterMessage(RandomRotation90) RandomPixelValueScale = _reflection.GeneratedProtocolMessageType('RandomPixelValueScale', (_message.Message,), dict( DESCRIPTOR = _RANDOMPIXELVALUESCALE, __module__ = 'rastervision.protos.tf_object_detection.preprocessor_pb2' # @@protoc_insertion_point(class_scope:rastervision.protos.tf_object_detection.RandomPixelValueScale) )) _sym_db.RegisterMessage(RandomPixelValueScale) RandomImageScale = _reflection.GeneratedProtocolMessageType('RandomImageScale', (_message.Message,), dict( DESCRIPTOR = _RANDOMIMAGESCALE, __module__ = 'rastervision.protos.tf_object_detection.preprocessor_pb2' # @@protoc_insertion_point(class_scope:rastervision.protos.tf_object_detection.RandomImageScale) )) _sym_db.RegisterMessage(RandomImageScale) RandomRGBtoGray = _reflection.GeneratedProtocolMessageType('RandomRGBtoGray', (_message.Message,), dict( DESCRIPTOR = _RANDOMRGBTOGRAY, __module__ = 'rastervision.protos.tf_object_detection.preprocessor_pb2' # @@protoc_insertion_point(class_scope:rastervision.protos.tf_object_detection.RandomRGBtoGray) )) _sym_db.RegisterMessage(RandomRGBtoGray) RandomAdjustBrightness = _reflection.GeneratedProtocolMessageType('RandomAdjustBrightness', (_message.Message,), dict( DESCRIPTOR = _RANDOMADJUSTBRIGHTNESS, __module__ = 'rastervision.protos.tf_object_detection.preprocessor_pb2' # @@protoc_insertion_point(class_scope:rastervision.protos.tf_object_detection.RandomAdjustBrightness) )) _sym_db.RegisterMessage(RandomAdjustBrightness) RandomAdjustContrast = _reflection.GeneratedProtocolMessageType('RandomAdjustContrast', (_message.Message,), dict( DESCRIPTOR = _RANDOMADJUSTCONTRAST, __module__ = 'rastervision.protos.tf_object_detection.preprocessor_pb2' # @@protoc_insertion_point(class_scope:rastervision.protos.tf_object_detection.RandomAdjustContrast) )) _sym_db.RegisterMessage(RandomAdjustContrast) RandomAdjustHue = _reflection.GeneratedProtocolMessageType('RandomAdjustHue', (_message.Message,), dict( DESCRIPTOR = _RANDOMADJUSTHUE, __module__ = 'rastervision.protos.tf_object_detection.preprocessor_pb2' # @@protoc_insertion_point(class_scope:rastervision.protos.tf_object_detection.RandomAdjustHue) )) _sym_db.RegisterMessage(RandomAdjustHue) RandomAdjustSaturation = _reflection.GeneratedProtocolMessageType('RandomAdjustSaturation', (_message.Message,), dict( DESCRIPTOR = _RANDOMADJUSTSATURATION, __module__ = 'rastervision.protos.tf_object_detection.preprocessor_pb2' # @@protoc_insertion_point(class_scope:rastervision.protos.tf_object_detection.RandomAdjustSaturation) )) _sym_db.RegisterMessage(RandomAdjustSaturation) RandomDistortColor = _reflection.GeneratedProtocolMessageType('RandomDistortColor', (_message.Message,), dict( DESCRIPTOR = _RANDOMDISTORTCOLOR, __module__ = 'rastervision.protos.tf_object_detection.preprocessor_pb2' # @@protoc_insertion_point(class_scope:rastervision.protos.tf_object_detection.RandomDistortColor) )) _sym_db.RegisterMessage(RandomDistortColor) RandomJitterBoxes = _reflection.GeneratedProtocolMessageType('RandomJitterBoxes', (_message.Message,), dict( DESCRIPTOR = _RANDOMJITTERBOXES, __module__ = 'rastervision.protos.tf_object_detection.preprocessor_pb2' # @@protoc_insertion_point(class_scope:rastervision.protos.tf_object_detection.RandomJitterBoxes) )) _sym_db.RegisterMessage(RandomJitterBoxes) RandomCropImage = _reflection.GeneratedProtocolMessageType('RandomCropImage', (_message.Message,), dict( DESCRIPTOR = _RANDOMCROPIMAGE, __module__ = 'rastervision.protos.tf_object_detection.preprocessor_pb2' # @@protoc_insertion_point(class_scope:rastervision.protos.tf_object_detection.RandomCropImage) )) _sym_db.RegisterMessage(RandomCropImage) RandomPadImage = _reflection.GeneratedProtocolMessageType('RandomPadImage', (_message.Message,), dict( DESCRIPTOR = _RANDOMPADIMAGE, __module__ = 'rastervision.protos.tf_object_detection.preprocessor_pb2' # @@protoc_insertion_point(class_scope:rastervision.protos.tf_object_detection.RandomPadImage) )) _sym_db.RegisterMessage(RandomPadImage) RandomCropPadImage = _reflection.GeneratedProtocolMessageType('RandomCropPadImage', (_message.Message,), dict( DESCRIPTOR = _RANDOMCROPPADIMAGE, __module__ = 'rastervision.protos.tf_object_detection.preprocessor_pb2' # @@protoc_insertion_point(class_scope:rastervision.protos.tf_object_detection.RandomCropPadImage) )) _sym_db.RegisterMessage(RandomCropPadImage) RandomCropToAspectRatio = _reflection.GeneratedProtocolMessageType('RandomCropToAspectRatio', (_message.Message,), dict( DESCRIPTOR = _RANDOMCROPTOASPECTRATIO, __module__ = 'rastervision.protos.tf_object_detection.preprocessor_pb2' # @@protoc_insertion_point(class_scope:rastervision.protos.tf_object_detection.RandomCropToAspectRatio) )) _sym_db.RegisterMessage(RandomCropToAspectRatio) RandomBlackPatches = _reflection.GeneratedProtocolMessageType('RandomBlackPatches', (_message.Message,), dict( DESCRIPTOR = _RANDOMBLACKPATCHES, __module__ = 'rastervision.protos.tf_object_detection.preprocessor_pb2' # @@protoc_insertion_point(class_scope:rastervision.protos.tf_object_detection.RandomBlackPatches) )) _sym_db.RegisterMessage(RandomBlackPatches) RandomResizeMethod = _reflection.GeneratedProtocolMessageType('RandomResizeMethod', (_message.Message,), dict( DESCRIPTOR = _RANDOMRESIZEMETHOD, __module__ = 'rastervision.protos.tf_object_detection.preprocessor_pb2' # @@protoc_insertion_point(class_scope:rastervision.protos.tf_object_detection.RandomResizeMethod) )) _sym_db.RegisterMessage(RandomResizeMethod) RGBtoGray = _reflection.GeneratedProtocolMessageType('RGBtoGray', (_message.Message,), dict( DESCRIPTOR = _RGBTOGRAY, __module__ = 'rastervision.protos.tf_object_detection.preprocessor_pb2' # @@protoc_insertion_point(class_scope:rastervision.protos.tf_object_detection.RGBtoGray) )) _sym_db.RegisterMessage(RGBtoGray) ScaleBoxesToPixelCoordinates = _reflection.GeneratedProtocolMessageType('ScaleBoxesToPixelCoordinates', (_message.Message,), dict( DESCRIPTOR = _SCALEBOXESTOPIXELCOORDINATES, __module__ = 'rastervision.protos.tf_object_detection.preprocessor_pb2' # @@protoc_insertion_point(class_scope:rastervision.protos.tf_object_detection.ScaleBoxesToPixelCoordinates) )) _sym_db.RegisterMessage(ScaleBoxesToPixelCoordinates) ResizeImage = _reflection.GeneratedProtocolMessageType('ResizeImage', (_message.Message,), dict( DESCRIPTOR = _RESIZEIMAGE, __module__ = 'rastervision.protos.tf_object_detection.preprocessor_pb2' # @@protoc_insertion_point(class_scope:rastervision.protos.tf_object_detection.ResizeImage) )) _sym_db.RegisterMessage(ResizeImage) SubtractChannelMean = _reflection.GeneratedProtocolMessageType('SubtractChannelMean', (_message.Message,), dict( DESCRIPTOR = _SUBTRACTCHANNELMEAN, __module__ = 'rastervision.protos.tf_object_detection.preprocessor_pb2' # @@protoc_insertion_point(class_scope:rastervision.protos.tf_object_detection.SubtractChannelMean) )) _sym_db.RegisterMessage(SubtractChannelMean) SSDRandomCropOperation = _reflection.GeneratedProtocolMessageType('SSDRandomCropOperation', (_message.Message,), dict( DESCRIPTOR = _SSDRANDOMCROPOPERATION, __module__ = 'rastervision.protos.tf_object_detection.preprocessor_pb2' # @@protoc_insertion_point(class_scope:rastervision.protos.tf_object_detection.SSDRandomCropOperation) )) _sym_db.RegisterMessage(SSDRandomCropOperation) SSDRandomCrop = _reflection.GeneratedProtocolMessageType('SSDRandomCrop', (_message.Message,), dict( DESCRIPTOR = _SSDRANDOMCROP, __module__ = 'rastervision.protos.tf_object_detection.preprocessor_pb2' # @@protoc_insertion_point(class_scope:rastervision.protos.tf_object_detection.SSDRandomCrop) )) _sym_db.RegisterMessage(SSDRandomCrop) SSDRandomCropPadOperation = _reflection.GeneratedProtocolMessageType('SSDRandomCropPadOperation', (_message.Message,), dict( DESCRIPTOR = _SSDRANDOMCROPPADOPERATION, __module__ = 'rastervision.protos.tf_object_detection.preprocessor_pb2' # @@protoc_insertion_point(class_scope:rastervision.protos.tf_object_detection.SSDRandomCropPadOperation) )) _sym_db.RegisterMessage(SSDRandomCropPadOperation) SSDRandomCropPad = _reflection.GeneratedProtocolMessageType('SSDRandomCropPad', (_message.Message,), dict( DESCRIPTOR = _SSDRANDOMCROPPAD, __module__ = 'rastervision.protos.tf_object_detection.preprocessor_pb2' # @@protoc_insertion_point(class_scope:rastervision.protos.tf_object_detection.SSDRandomCropPad) )) _sym_db.RegisterMessage(SSDRandomCropPad) SSDRandomCropFixedAspectRatioOperation = _reflection.GeneratedProtocolMessageType('SSDRandomCropFixedAspectRatioOperation', (_message.Message,), dict( DESCRIPTOR = _SSDRANDOMCROPFIXEDASPECTRATIOOPERATION, __module__ = 'rastervision.protos.tf_object_detection.preprocessor_pb2' # @@protoc_insertion_point(class_scope:rastervision.protos.tf_object_detection.SSDRandomCropFixedAspectRatioOperation) )) _sym_db.RegisterMessage(SSDRandomCropFixedAspectRatioOperation) SSDRandomCropFixedAspectRatio = _reflection.GeneratedProtocolMessageType('SSDRandomCropFixedAspectRatio', (_message.Message,), dict( DESCRIPTOR = _SSDRANDOMCROPFIXEDASPECTRATIO, __module__ = 'rastervision.protos.tf_object_detection.preprocessor_pb2' # @@protoc_insertion_point(class_scope:rastervision.protos.tf_object_detection.SSDRandomCropFixedAspectRatio) )) _sym_db.RegisterMessage(SSDRandomCropFixedAspectRatio) SSDRandomCropPadFixedAspectRatioOperation = _reflection.GeneratedProtocolMessageType('SSDRandomCropPadFixedAspectRatioOperation', (_message.Message,), dict( DESCRIPTOR = _SSDRANDOMCROPPADFIXEDASPECTRATIOOPERATION, __module__ = 'rastervision.protos.tf_object_detection.preprocessor_pb2' # @@protoc_insertion_point(class_scope:rastervision.protos.tf_object_detection.SSDRandomCropPadFixedAspectRatioOperation) )) _sym_db.RegisterMessage(SSDRandomCropPadFixedAspectRatioOperation) SSDRandomCropPadFixedAspectRatio = _reflection.GeneratedProtocolMessageType('SSDRandomCropPadFixedAspectRatio', (_message.Message,), dict( DESCRIPTOR = _SSDRANDOMCROPPADFIXEDASPECTRATIO, __module__ = 'rastervision.protos.tf_object_detection.preprocessor_pb2' # @@protoc_insertion_point(class_scope:rastervision.protos.tf_object_detection.SSDRandomCropPadFixedAspectRatio) )) _sym_db.RegisterMessage(SSDRandomCropPadFixedAspectRatio) ConvertClassLogitsToSoftmax = _reflection.GeneratedProtocolMessageType('ConvertClassLogitsToSoftmax', (_message.Message,), dict( DESCRIPTOR = _CONVERTCLASSLOGITSTOSOFTMAX, __module__ = 'rastervision.protos.tf_object_detection.preprocessor_pb2' # @@protoc_insertion_point(class_scope:rastervision.protos.tf_object_detection.ConvertClassLogitsToSoftmax) )) _sym_db.RegisterMessage(ConvertClassLogitsToSoftmax) # @@protoc_insertion_point(module_scope)
47.734488
9,601
0.782628
11,977
99,240
6.137848
0.040912
0.043421
0.070464
0.091603
0.788894
0.764994
0.737788
0.704488
0.644553
0.610491
0
0.03553
0.105512
99,240
2,078
9,602
47.757459
0.792608
0.034966
0
0.681865
1
0.007772
0.271926
0.223802
0
0
0
0
0
1
0
false
0
0.003109
0
0.003109
0
0
0
0
null
0
0
0
0
1
1
1
0
1
0
0
0
0
0
1
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
c6d3dd5c8f2a708a15a39b9c923a8341cd57c40b
41
py
Python
checker/punchy_mc_lochface/__init__.py
fausecteam/faustctf-2019-punchy
c68d80ff2c57e45c8c4ef8d6ed65b73efc41cfe0
[ "0BSD" ]
null
null
null
checker/punchy_mc_lochface/__init__.py
fausecteam/faustctf-2019-punchy
c68d80ff2c57e45c8c4ef8d6ed65b73efc41cfe0
[ "0BSD" ]
null
null
null
checker/punchy_mc_lochface/__init__.py
fausecteam/faustctf-2019-punchy
c68d80ff2c57e45c8c4ef8d6ed65b73efc41cfe0
[ "0BSD" ]
null
null
null
from .punch_checker import PunchyChecker
20.5
40
0.878049
5
41
7
1
0
0
0
0
0
0
0
0
0
0
0
0.097561
41
1
41
41
0.945946
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
c6ee715c1b5964f1d19de8175376af12d2388aa5
7,589
py
Python
evaluation/evaluation.py
georgeepta/BGP-Simulator
3fba8e19da5940b9af5638b3b9109c9473ba9e99
[ "BSD-3-Clause" ]
null
null
null
evaluation/evaluation.py
georgeepta/BGP-Simulator
3fba8e19da5940b9af5638b3b9109c9473ba9e99
[ "BSD-3-Clause" ]
null
null
null
evaluation/evaluation.py
georgeepta/BGP-Simulator
3fba8e19da5940b9af5638b3b9109c9473ba9e99
[ "BSD-3-Clause" ]
1
2021-07-05T00:42:37.000Z
2021-07-05T00:42:37.000Z
import json class NestedDict(dict): def __missing__(self, key): value = self[key] = type(self)() return value def read_evaluation_data(file_path): try: with open(file_path, 'r') as json_file: data = json.load(json_file) return data except FileNotFoundError: print("Sorry, the file, "+ file_path + " ,does not exist.") return 0 def write_evaluation_results(evaluation_results_dict, file_path): with open(file_path, 'w') as json_file: json.dump(evaluation_results_dict, json_file) def compute_avg_impact(eval_data): impact_estimation_after_hijack_list = [] for simulation_result in eval_data: impact_estimation_after_hijack_list.append(simulation_result["after_hijack"]["impact_estimation"]) return sum(impact_estimation_after_hijack_list) / len(impact_estimation_after_hijack_list) def collateral_benefit_prefix_hijacking(num_of_top_isp_rpki_adopters, rpki_adoption_propability_list, evaluation_results_dict): print("#### Collateral benefit Prefix Hijacking ####") print("Number of Top RPKI adopters | RPKI Adoption Propability | Average Impact Estimation") for rpki_adopters_value in num_of_top_isp_rpki_adopters: for rpki_adoption_propability in rpki_adoption_propability_list: file_path = "./evaluation_data/prefix-hijacking-random/top-isps-" + str( rpki_adopters_value) + "/rpki-adoption-prop-" + str(rpki_adoption_propability) + ".json" eval_data = read_evaluation_data(file_path) if eval_data: avg_impact_estimation_after_hijack = compute_avg_impact(eval_data) print(str(rpki_adopters_value) + " " + str(rpki_adoption_propability) + " " + str( avg_impact_estimation_after_hijack)) evaluation_results_dict["collateral_benefit"]["prefix_hijacking"][str(rpki_adopters_value)][str(rpki_adoption_propability)] = avg_impact_estimation_after_hijack def collateral_benefit_subprefix_hijacking(num_of_top_isp_rpki_adopters, rpki_adoption_propability_list, evaluation_results_dict): print("#### Collateral benefit Subprefix Hijacking ####") print("Number of Top RPKI adopters | RPKI Adoption Propability | Average Impact Estimation") for rpki_adopters_value in num_of_top_isp_rpki_adopters: for rpki_adoption_propability in rpki_adoption_propability_list: file_path = "./evaluation_data/subprefix-hijacking-random/top-isps-" + str( rpki_adopters_value) + "/rpki-adoption-prop-" + str(rpki_adoption_propability) + ".json" eval_data = read_evaluation_data(file_path) if eval_data: avg_impact_estimation_after_hijack = compute_avg_impact(eval_data) print(str(rpki_adopters_value) + " " + str(rpki_adoption_propability) + " " + str( avg_impact_estimation_after_hijack)) evaluation_results_dict["collateral_benefit"]["subprefix_hijacking"][str(rpki_adopters_value)][str(rpki_adoption_propability)] = avg_impact_estimation_after_hijack def today_rov_status_other_random_prop_prefix_hijacking(other_random_prop_list, evaluation_results_dict): print("### Today ROV status + Other ASes (Prefix Hijacking) ###") print("RPKI Adoption Propability of other ASes | Average Impact Estimation") for prop_value in other_random_prop_list: file_path = "./evaluation_data/prefix-hijacking-random/today-rov-status/other-random-prop-" + str(prop_value) + ".json" eval_data = read_evaluation_data(file_path) if eval_data: avg_impact_estimation_after_hijack = compute_avg_impact(eval_data) print(str(prop_value) + " " + str(avg_impact_estimation_after_hijack)) evaluation_results_dict["today_rov_status_other_random_prop"]["prefix_hijacking"][str(prop_value)] = avg_impact_estimation_after_hijack def today_rov_status_other_random_prop_subprefix_hijacking(other_random_prop_list, evaluation_results_dict): print("### Today ROV status + Other ASes (Subprefix Hijacking) ###") print("RPKI Adoption Propability of other ASes | Average Impact Estimation") for prop_value in other_random_prop_list: file_path = "./evaluation_data/subprefix-hijacking-random/today-rov-status/other-random-prop-" + str(prop_value) + ".json" eval_data = read_evaluation_data(file_path) if eval_data: avg_impact_estimation_after_hijack = compute_avg_impact(eval_data) print(str(prop_value) + " " + str(avg_impact_estimation_after_hijack)) evaluation_results_dict["today_rov_status_other_random_prop"]["subprefix_hijacking"][str(prop_value)] = avg_impact_estimation_after_hijack def top_isps_rov_other_random_prop_prefix_hijacking(num_of_top_isp_rpki_adopters, other_random_prop_list, evaluation_results_dict): print("### Top ISPs ROV + Other ASes (Prefix Hijacking) ###") print("RPKI Adoption Propability of other ASes | Average Impact Estimation") for prop_value in other_random_prop_list: file_path = "./evaluation_data/prefix-hijacking-random/top-isps-rov/"+str(num_of_top_isp_rpki_adopters)+"-other-random-prop-" + str(prop_value) + ".json" eval_data = read_evaluation_data(file_path) if eval_data: avg_impact_estimation_after_hijack = compute_avg_impact(eval_data) print(str(prop_value) + " " + str(avg_impact_estimation_after_hijack)) evaluation_results_dict["top_isps_rov_other_random_prop"]["prefix_hijacking"][str(prop_value)] = avg_impact_estimation_after_hijack def top_isps_rov_other_random_prop_subprefix_hijacking(num_of_top_isp_rpki_adopters, other_random_prop_list, evaluation_results_dict): print("### Top ISPs ROV + Other ASes (Subprefix Hijacking) ###") print("RPKI Adoption Propability of other ASes | Average Impact Estimation") for prop_value in other_random_prop_list: file_path = "./evaluation_data/subprefix-hijacking-random/top-isps-rov/"+str(num_of_top_isp_rpki_adopters)+"-other-random-prop-" + str(prop_value) + ".json" eval_data = read_evaluation_data(file_path) if eval_data: avg_impact_estimation_after_hijack = compute_avg_impact(eval_data) print(str(prop_value) + " " + str(avg_impact_estimation_after_hijack)) evaluation_results_dict["top_isps_rov_other_random_prop"]["subprefix_hijacking"][str(prop_value)] = avg_impact_estimation_after_hijack if __name__ == '__main__': rpki_adoption_propability_list = [0.25, 0.50, 0.75, 1.0] num_of_top_isp_rpki_adopters = list(range(0, 101, 10)) other_random_prop_list = [v * 0.1 for v in range(0, 11, 1)] print("#### Evaluation Results ####") evaluation_results_dict = NestedDict() collateral_benefit_prefix_hijacking(num_of_top_isp_rpki_adopters, rpki_adoption_propability_list, evaluation_results_dict) collateral_benefit_subprefix_hijacking(num_of_top_isp_rpki_adopters, rpki_adoption_propability_list, evaluation_results_dict) today_rov_status_other_random_prop_prefix_hijacking(other_random_prop_list, evaluation_results_dict) today_rov_status_other_random_prop_subprefix_hijacking(other_random_prop_list, evaluation_results_dict) top_isps_rov_other_random_prop_prefix_hijacking(100, other_random_prop_list, evaluation_results_dict) top_isps_rov_other_random_prop_subprefix_hijacking(100, other_random_prop_list, evaluation_results_dict) write_evaluation_results(evaluation_results_dict, "evaluation_results.json")
63.773109
179
0.75504
989
7,589
5.301314
0.088979
0.088499
0.082968
0.113294
0.899485
0.872401
0.851612
0.835018
0.832157
0.823384
0
0.004699
0.158651
7,589
119
180
63.773109
0.816445
0
0
0.42
0
0.02
0.216206
0.069302
0
0
0
0
0
1
0.1
false
0
0.01
0
0.16
0.2
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
05ce4486589aded18f3c314cd9ea3cbc1cff1232
1,790
py
Python
tests/test_cli.py
octoenergy/terraform-enterprise-client
aad496e39ed77f93ae7da3397e6628279dcff273
[ "MIT" ]
9
2019-10-11T15:14:05.000Z
2021-08-07T19:28:37.000Z
tests/test_cli.py
octoenergy/terraform-cloud-client
aad496e39ed77f93ae7da3397e6628279dcff273
[ "MIT" ]
3
2019-09-23T00:01:48.000Z
2021-02-02T22:16:00.000Z
tests/test_cli.py
octoenergy/terraform-enterprise-client
aad496e39ed77f93ae7da3397e6628279dcff273
[ "MIT" ]
null
null
null
from unittest import mock import pytest import tfc.cli @mock.patch.object(tfc.cli, "client") def test_cli_run_wth_no_variables_or_message(mock_client_module, monkeypatch): monkeypatch.setenv("TERRAFORM_CLOUD_TOKEN", "my_token") client = mock_client_module.TerraformClient.return_value tfc.cli.main(argv=["tfc", "my_org", "my_workspace"]) mock_client_module.TerraformClient.assert_called_once_with( "my_token", "my_org", "my_workspace" ) client.update_variable.assert_not_called() client.create_run.assert_called_once_with(tfc.cli.DEFAULT_RUN_MESSAGE) @mock.patch.object(tfc.cli, "client") def test_cli_run_with_message(mock_client_module, monkeypatch): monkeypatch.setenv("TERRAFORM_CLOUD_TOKEN", "my_token") client = mock_client_module.TerraformClient.return_value tfc.cli.main(argv=["tfc", "my_org", "my_workspace", "--message=my_message"]) mock_client_module.TerraformClient.assert_called_once_with( "my_token", "my_org", "my_workspace" ) client.update_variable.assert_not_called() client.create_run.assert_called_once_with("my_message") @mock.patch.object(tfc.cli, "client") def test_cli_run_with_variable_being_set(mock_client_module, monkeypatch): monkeypatch.setenv("TERRAFORM_CLOUD_TOKEN", "my_token") client = mock_client_module.TerraformClient.return_value client.get_variables.return_value = {"foo": mock.Mock(name="foo", id="foo_id")} tfc.cli.main(argv=["tfc", "my_org", "my_workspace", "foo=bar"]) mock_client_module.TerraformClient.assert_called_once_with( "my_token", "my_org", "my_workspace" ) client.get_variables.assert_called_once_with() client.update_variable.assert_called_once_with("foo_id", "bar") client.create_run.assert_called_once()
35.8
83
0.762011
247
1,790
5.101215
0.194332
0.071429
0.114286
0.111111
0.801587
0.8
0.775397
0.775397
0.775397
0.749206
0
0
0.115642
1,790
49
84
36.530612
0.795957
0
0
0.485714
0
0
0.169832
0.035196
0
0
0
0
0.285714
1
0.085714
false
0
0.085714
0
0.171429
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
af19e5e5aa560f1195d38d0b202c5ad387ff7052
15
py
Python
python/testData/psi/FStringFragmentDuplicateTypeConversion.py
jnthn/intellij-community
8fa7c8a3ace62400c838e0d5926a7be106aa8557
[ "Apache-2.0" ]
2
2019-04-28T07:48:50.000Z
2020-12-11T14:18:08.000Z
python/testData/psi/FStringFragmentDuplicateTypeConversion.py
jnthn/intellij-community
8fa7c8a3ace62400c838e0d5926a7be106aa8557
[ "Apache-2.0" ]
173
2018-07-05T13:59:39.000Z
2018-08-09T01:12:03.000Z
python/testData/psi/FStringFragmentDuplicateTypeConversion.py
jnthn/intellij-community
8fa7c8a3ace62400c838e0d5926a7be106aa8557
[ "Apache-2.0" ]
2
2020-03-15T08:57:37.000Z
2020-04-07T04:48:14.000Z
s = f'{42!r!r}'
15
15
0.4
5
15
1.2
0.8
0
0
0
0
0
0
0
0
0
0
0.153846
0.133333
15
1
15
15
0.307692
0
0
0
0
0
0.5
0
0
0
0
0
0
0
null
null
0
0
null
null
0
1
1
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
6
af5abc814acb8111897a9ea9c78409ca9c89f282
454
py
Python
pyschism/forcing/bctides/__init__.py
SorooshMani-NOAA/pyschism
df803edb53184625b12399f38a8bd26a022abbc1
[ "Apache-2.0" ]
17
2020-02-02T09:48:20.000Z
2022-02-02T19:28:58.000Z
pyschism/forcing/bctides/__init__.py
SorooshMani-NOAA/pyschism
df803edb53184625b12399f38a8bd26a022abbc1
[ "Apache-2.0" ]
20
2020-03-04T13:40:22.000Z
2022-02-10T15:30:42.000Z
pyschism/forcing/bctides/__init__.py
SorooshMani-NOAA/pyschism
df803edb53184625b12399f38a8bd26a022abbc1
[ "Apache-2.0" ]
12
2020-03-04T09:54:57.000Z
2022-02-10T00:14:25.000Z
from pyschism.forcing.bctides.tides import Tides from pyschism.forcing.bctides.iettype import Iettype from pyschism.forcing.bctides.ifltype import Ifltype from pyschism.forcing.bctides.isatype import Isatype from pyschism.forcing.bctides.itetype import Itetype from pyschism.forcing.bctides.itrtype import Itrtype from pyschism.forcing.bctides.bctides import Bctides __all__ = ["Bctides", "Tides", "Iettype", "Ifltype", "Isatype", "Itetype", "Itrtype"]
45.4
85
0.819383
57
454
6.45614
0.210526
0.228261
0.361413
0.494565
0
0
0
0
0
0
0
0
0.0837
454
9
86
50.444444
0.884615
0
0
0
0
0
0.103524
0
0
0
0
0
0
1
0
false
0
0.875
0
0.875
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
6
afcaa16563f923ac7e884f8f4b09ee3e1150f86b
48
py
Python
june/__init__.py
EnergieID/june
c3319d33daf1ac488eec9f0b98fb0af203ff61b4
[ "MIT" ]
1
2022-02-28T04:16:54.000Z
2022-02-28T04:16:54.000Z
june/__init__.py
EnergieID/june
c3319d33daf1ac488eec9f0b98fb0af203ff61b4
[ "MIT" ]
null
null
null
june/__init__.py
EnergieID/june
c3319d33daf1ac488eec9f0b98fb0af203ff61b4
[ "MIT" ]
null
null
null
from .june import June, SimpleJune, __version__
24
47
0.8125
6
48
5.833333
0.833333
0
0
0
0
0
0
0
0
0
0
0
0.125
48
1
48
48
0.833333
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
a558ae691d0f27db1a3d829cd46d8431eaa783ba
5,310
py
Python
plotter/get_training_time_traffic.py
giuliapuntoit/RL-framework-iot
1c0961f10f0477415198bbee94b6eb3272973004
[ "MIT" ]
5
2021-01-23T20:47:18.000Z
2021-09-13T14:37:01.000Z
plotter/get_training_time_traffic.py
SmartData-Polito/RL-IoT
d293c8410d6c2e8fcb56f96c346c519dd3a84a28
[ "MIT" ]
null
null
null
plotter/get_training_time_traffic.py
SmartData-Polito/RL-IoT
d293c8410d6c2e8fcb56f96c346c519dd3a84a28
[ "MIT" ]
1
2021-02-09T17:34:47.000Z
2021-02-09T17:34:47.000Z
""" Class to retrieve training time and traffic data about execution of RL algorithms """ import os import csv import pathlib from plotter.support_plotter import read_time_traffic_from_log output_dir = "./tmp/" class GetTrainingTimeTraffic(object): def __init__(self, date_to_retrieve='YY_mm_dd_HH_MM_SS', target_output="algorithm.csv"): if date_to_retrieve != 'YY_mm_dd_HH_MM_SS': self.date_to_retrieve = date_to_retrieve # Date must be in format %Y_%m_%d_%H_%M_%S else: print("Invalid date") exit(1) self.target_output = target_output def run(self): """ Retrieve and save into csv files training time and traffic """ secs, commands = read_time_traffic_from_log(self.date_to_retrieve) if not os.path.isfile(self.target_output): # If file does not exist # Write header with open(self.target_output, mode='w') as output_file: output_writer = csv.writer(output_file, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL) output_writer.writerow(['Date', 'Training_time', 'Sent_commands']) with open(self.target_output, mode="a") as output_file: output_writer = csv.writer(output_file, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL) output_writer.writerow([self.date_to_retrieve, secs, commands]) def get_data_before_tuning_unique_path(): """ Retrieve data for executions made before parameter tuning phase All the executions refer to the same path 2 """ from dates_for_graphs.date_for_graphs_before_tuning_path2 import sarsa from dates_for_graphs.date_for_graphs_before_tuning_path2 import sarsa_lambda from dates_for_graphs.date_for_graphs_before_tuning_path2 import qlearning from dates_for_graphs.date_for_graphs_before_tuning_path2 import qlearning_lambda for dat in sarsa: GetTrainingTimeTraffic(date_to_retrieve=dat, target_output=output_dir+'0_sarsa.csv').run() for dat in sarsa_lambda: GetTrainingTimeTraffic(date_to_retrieve=dat, target_output=output_dir+'0_sarsa_lambda.csv').run() for dat in qlearning: GetTrainingTimeTraffic(date_to_retrieve=dat, target_output=output_dir+'0_qlearning.csv').run() for dat in qlearning_lambda: GetTrainingTimeTraffic(date_to_retrieve=dat, target_output=output_dir+'0_qlearning_lambda.csv').run() def get_data_algos_path(sarsa, sarsa_lambda, qlearning, qlearning_lambda, path=None): """ Retrieve training time and traffic for all different algorithms and append into related csv files """ for dat in sarsa: GetTrainingTimeTraffic(date_to_retrieve=dat, target_output=output_dir+'path' + str(path) + '_sarsa.csv').run() for dat in sarsa_lambda: GetTrainingTimeTraffic(date_to_retrieve=dat, target_output=output_dir+'path' + str(path) + '_sarsa_lambda.csv').run() for dat in qlearning: GetTrainingTimeTraffic(date_to_retrieve=dat, target_output=output_dir+'path' + str(path) + '_qlearning.csv').run() for dat in qlearning_lambda: GetTrainingTimeTraffic(date_to_retrieve=dat, target_output=output_dir+'path' + str(path) + '_qlearning_lambda.csv').run() def main(): pathlib.Path(output_dir).mkdir(parents=True, exist_ok=True) # for Python > 3.5 get_data_before_tuning_unique_path() # Plot all paths target_path = 1 print("PATH ", target_path) from dates_for_graphs.date_for_graphs_path1 import sarsa_dates from dates_for_graphs.date_for_graphs_path1 import sarsa_lambda_dates from dates_for_graphs.date_for_graphs_path1 import qlearning_dates from dates_for_graphs.date_for_graphs_path1 import qlearning_lambda_dates get_data_algos_path(sarsa_dates, sarsa_lambda_dates, qlearning_dates, qlearning_lambda_dates, path=target_path) target_path = 2 print("PATH ", target_path) from dates_for_graphs.date_for_graphs_path2 import sarsa_dates from dates_for_graphs.date_for_graphs_path2 import sarsa_lambda_dates from dates_for_graphs.date_for_graphs_path2 import qlearning_dates from dates_for_graphs.date_for_graphs_path2 import qlearning_lambda_dates get_data_algos_path(sarsa_dates, sarsa_lambda_dates, qlearning_dates, qlearning_lambda_dates, path=target_path) target_path = 3 print("PATH ", target_path) from dates_for_graphs.date_for_graphs_path3 import sarsa_dates from dates_for_graphs.date_for_graphs_path3 import sarsa_lambda_dates from dates_for_graphs.date_for_graphs_path3 import qlearning_dates from dates_for_graphs.date_for_graphs_path3 import qlearning_lambda_dates get_data_algos_path(sarsa_dates, sarsa_lambda_dates, qlearning_dates, qlearning_lambda_dates, path=target_path) target_path = 4 print("PATH ", target_path) from dates_for_graphs.date_for_graphs_path4 import sarsa_dates from dates_for_graphs.date_for_graphs_path4 import sarsa_lambda_dates from dates_for_graphs.date_for_graphs_path4 import qlearning_dates from dates_for_graphs.date_for_graphs_path4 import qlearning_lambda_dates get_data_algos_path(sarsa_dates, sarsa_lambda_dates, qlearning_dates, qlearning_lambda_dates, path=target_path) if __name__ == '__main__': main()
40.534351
129
0.761582
754
5,310
4.94695
0.164456
0.096515
0.064343
0.096515
0.784182
0.742627
0.712064
0.712064
0.712064
0.698123
0
0.007214
0.164595
5,310
130
130
40.846154
0.833634
0.085876
0
0.233766
0
0
0.057101
0.008994
0
0
0
0
0
1
0.064935
false
0
0.311688
0
0.38961
0.064935
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
6
a574b541d8082b0ca1cf282dd84950f76a23bce1
27
py
Python
src/euler_python_package/euler_python/medium/p392.py
wilsonify/euler
5214b776175e6d76a7c6d8915d0e062d189d9b79
[ "MIT" ]
null
null
null
src/euler_python_package/euler_python/medium/p392.py
wilsonify/euler
5214b776175e6d76a7c6d8915d0e062d189d9b79
[ "MIT" ]
null
null
null
src/euler_python_package/euler_python/medium/p392.py
wilsonify/euler
5214b776175e6d76a7c6d8915d0e062d189d9b79
[ "MIT" ]
null
null
null
def problem392(): pass
9
17
0.62963
3
27
5.666667
1
0
0
0
0
0
0
0
0
0
0
0.15
0.259259
27
2
18
13.5
0.7
0
0
0
0
0
0
0
0
0
0
0
0
1
0.5
true
0.5
0
0
0.5
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
1
0
0
0
0
0
6
3c66694bcef5d0977a592ccadda1548ba28544da
213
py
Python
admin_toolbox/builders/__init__.py
sarendsen/django-admin-toolbox
9f733d90d0d95924a0f07e5a4e45e56f4de29d85
[ "MIT" ]
12
2017-04-28T18:58:01.000Z
2020-07-16T11:10:00.000Z
admin_toolbox/builders/__init__.py
sarendsen/django-admin-toolbox
9f733d90d0d95924a0f07e5a4e45e56f4de29d85
[ "MIT" ]
7
2019-01-28T13:02:44.000Z
2019-06-16T21:50:23.000Z
admin_toolbox/builders/__init__.py
sarendsen/django-admin-toolbox
9f733d90d0d95924a0f07e5a4e45e56f4de29d85
[ "MIT" ]
4
2019-02-26T06:12:53.000Z
2020-03-08T10:18:05.000Z
from .generic import ListBuilder, ItemBuilder from .models import AppsListBuilder, ModelsListBuilder, ModelBuilder __all__ = ('ListBuilder', 'ItemBuilder', 'AppsListBuilder', 'ModelsListBuilder', 'ModelBuilder')
42.6
96
0.807512
17
213
9.882353
0.588235
0.261905
0.52381
0
0
0
0
0
0
0
0
0
0.089202
213
4
97
53.25
0.865979
0
0
0
0
0
0.309859
0
0
0
0
0
0
1
0
false
0
0.666667
0
0.666667
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
6
3c966e2dfc0fc216e7b23a9471846eedf27e721c
187
py
Python
hw/sasha_yaroshevich/test_lesson3.py
alexander-sidorov/qap-05
6db7c0a1eeadd15f7d3f826e7f0ac4be3949ec8c
[ "MIT" ]
9
2021-12-10T21:30:07.000Z
2022-02-25T21:32:34.000Z
hw/sasha_yaroshevich/test_lesson3.py
alexander-sidorov/qap-05
6db7c0a1eeadd15f7d3f826e7f0ac4be3949ec8c
[ "MIT" ]
22
2021-12-11T08:46:58.000Z
2022-02-02T15:56:37.000Z
hw/sasha_yaroshevich/test_lesson3.py
alexander-sidorov/qap-05
6db7c0a1eeadd15f7d3f826e7f0ac4be3949ec8c
[ "MIT" ]
8
2021-12-11T09:15:45.000Z
2022-02-02T08:09:09.000Z
from hw.sasha_yaroshevich.lesson3_fink2 import f from hw.sasha_yaroshevich.lesson3_fink2 import g def test_lesson3() -> None: assert f() is None # type: ignore assert g() == 4
23.375
48
0.727273
29
187
4.517241
0.586207
0.091603
0.167939
0.335878
0.610687
0.610687
0.610687
0
0
0
0
0.039216
0.181818
187
7
49
26.714286
0.816993
0.064171
0
0
0
0
0
0
0
0
0
0
0.4
1
0.2
true
0
0.4
0
0.6
0
1
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
b1b82425a9f5ead65e41ec5291e825da14f5b115
54
py
Python
scripts/fetchai_code_quality/__init__.py
cyenyxe/ledger
6b42c3a3a5c78d257a02634437f9e00d1439690b
[ "Apache-2.0" ]
null
null
null
scripts/fetchai_code_quality/__init__.py
cyenyxe/ledger
6b42c3a3a5c78d257a02634437f9e00d1439690b
[ "Apache-2.0" ]
null
null
null
scripts/fetchai_code_quality/__init__.py
cyenyxe/ledger
6b42c3a3a5c78d257a02634437f9e00d1439690b
[ "Apache-2.0" ]
null
null
null
from .internal.static_analysis import static_analysis
27
53
0.888889
7
54
6.571429
0.714286
0.608696
0
0
0
0
0
0
0
0
0
0
0.074074
54
1
54
54
0.92
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
b1e5448e7cfdbe1e42e911ff904bf7c78775c7fe
13,918
py
Python
trvae/plotting.py
gokceneraslan/trVAE
596127b02f4a86ed6a91d5a3f666d6b5d97aff0c
[ "MIT" ]
46
2019-10-07T21:46:16.000Z
2022-02-13T15:30:50.000Z
trvae/plotting.py
gokceneraslan/trVAE
596127b02f4a86ed6a91d5a3f666d6b5d97aff0c
[ "MIT" ]
9
2020-05-15T16:59:32.000Z
2021-09-14T11:35:29.000Z
trvae/plotting.py
gokceneraslan/trVAE
596127b02f4a86ed6a91d5a3f666d6b5d97aff0c
[ "MIT" ]
4
2020-03-04T11:47:01.000Z
2021-01-05T17:48:47.000Z
import matplotlib import numpy as np import pandas as pd import scanpy as sc from adjustText import adjust_text from matplotlib import pyplot from scipy import stats, sparse font = {'family': 'Arial', # 'weight' : 'bold', 'size': 14} matplotlib.rc('font', **font) matplotlib.rc('ytick', labelsize=14) matplotlib.rc('xtick', labelsize=14) def reg_mean_plot(adata, condition_key, axis_keys, labels, path_to_save="./reg_mean.pdf", gene_list=None, top_100_genes=None, show=False, legend=True, title=None, x_coeff=0.30, y_coeff=0.8, fontsize=14, **kwargs): """ Plots mean matching figure for a set of specific genes. # Parameters adata: `~anndata.AnnData` Annotated Data Matrix. condition_key: basestring Condition state to be used. axis_keys: dict dictionary of axes labels. path_to_save: basestring path to save the plot. gene_list: list list of gene names to be plotted. show: bool if `True`: will show to the plot after saving it. # Example ```python import anndata import scgen import scanpy as sc train = sc.read("./tests/data/train.h5ad", backup_url="https://goo.gl/33HtVh") network = scgen.VAEArith(x_dimension=train.shape[1], model_path="../models/test") network.train(train_data=train, n_epochs=0) unperturbed_data = train[((train.obs["cell_type"] == "CD4T") & (train.obs["condition"] == "control"))] condition = {"ctrl": "control", "stim": "stimulated"} pred, delta = network.predict(adata=train, adata_to_predict=unperturbed_data, conditions=condition) pred_adata = anndata.AnnData(pred, obs={"condition": ["pred"] * len(pred)}, var={"var_names": train.var_names}) CD4T = train[train.obs["cell_type"] == "CD4T"] all_adata = CD4T.concatenate(pred_adata) scgen.plotting.reg_mean_plot(all_adata, condition_key="condition", axis_keys={"x": "control", "y": "pred", "y1": "stimulated"}, gene_list=["ISG15", "CD3D"], path_to_save="tests/reg_mean.pdf", show=False) network.sess.close() ``` """ import seaborn as sns sns.set() sns.set(color_codes=True) if sparse.issparse(adata.X): adata.X = adata.X.A diff_genes = top_100_genes stim = adata[adata.obs[condition_key] == axis_keys["y"]] ctrl = adata[adata.obs[condition_key] == axis_keys["x"]] if diff_genes is not None: if hasattr(diff_genes, "tolist"): diff_genes = diff_genes.tolist() adata_diff = adata[:, diff_genes] stim_diff = adata_diff[adata_diff.obs[condition_key] == axis_keys["y"]] ctrl_diff = adata_diff[adata_diff.obs[condition_key] == axis_keys["x"]] x_diff = np.average(ctrl_diff.X, axis=0) y_diff = np.average(stim_diff.X, axis=0) m, b, r_value_diff, p_value_diff, std_err_diff = stats.linregress(x_diff, y_diff) print('reg_mean_top100:', r_value_diff ** 2) if "y1" in axis_keys.keys(): real_stim = adata[adata.obs[condition_key] == axis_keys["y1"]] x = np.average(ctrl.X, axis=0) y = np.average(stim.X, axis=0) m, b, r_value, p_value, std_err = stats.linregress(x, y) print('reg_mean_all:', r_value ** 2) df = pd.DataFrame({axis_keys["x"]: x, axis_keys["y"]: y}) ax = sns.regplot(x=axis_keys["x"], y=axis_keys["y"], data=df, scatter_kws={'rasterized': True}) ax.tick_params(labelsize=fontsize) if "range" in kwargs: start, stop, step = kwargs.get("range") ax.set_xticks(np.arange(start, stop, step)) ax.set_yticks(np.arange(start, stop, step)) # _p1 = pyplot.scatter(x, y, marker=".", label=f"{axis_keys['x']}-{axis_keys['y']}") # pyplot.plot(x, m * x + b, "-", color="green") ax.set_xlabel(labels["x"], fontsize=fontsize) ax.set_ylabel(labels["y"], fontsize=fontsize) # if "y1" in axis_keys.keys(): # y1 = np.average(real_stim.X, axis=0) # _p2 = pyplot.scatter(x, y1, marker="*", c="red", alpha=.5, label=f"{axis_keys['x']}-{axis_keys['y1']}") if gene_list is not None: texts = [] for i in gene_list: j = adata.var_names.tolist().index(i) x_bar = x[j] y_bar = y[j] texts.append(pyplot.text(x_bar, y_bar, i, fontsize=11, color="black")) pyplot.plot(x_bar, y_bar, 'o', color="red", markersize=5) # if "y1" in axis_keys.keys(): # y1_bar = y1[j] # pyplot.text(x_bar, y1_bar, i, fontsize=11, color="black") if gene_list is not None: adjust_text(texts, x=x, y=y, arrowprops=dict(arrowstyle="->", color='grey', lw=0.5), force_points=(0.0, 0.0)) if legend: pyplot.legend(loc='center left', bbox_to_anchor=(1, 0.5)) if title is None: pyplot.title(f"", fontsize=fontsize) else: pyplot.title(title, fontsize=fontsize) ax.text(max(x) - max(x) * x_coeff, max(y) - y_coeff * max(y), r'$\mathrm{R^2_{\mathrm{\mathsf{all\ genes}}}}$= ' + f"{r_value ** 2:.2f}", fontsize=kwargs.get("textsize", fontsize)) if diff_genes is not None: ax.text(max(x) - max(x) * x_coeff, max(y) - (y_coeff + 0.15) * max(y), r'$\mathrm{R^2_{\mathrm{\mathsf{top\ ' + str(len(top_100_genes)) + '\ DEGs}}}}$= ' + f"{r_value_diff ** 2:.2f}", fontsize=kwargs.get("textsize", fontsize)) pyplot.savefig(f"{path_to_save}", bbox_inches='tight', dpi=100) if show: pyplot.show() pyplot.close() def reg_var_plot(adata, condition_key, axis_keys, labels, path_to_save="./reg_var.pdf", gene_list=None, top_100_genes=None, show=False, legend=True, title=None, x_coeff=0.30, y_coeff=0.8, fontsize=14, **kwargs): """ Plots variance matching figure for a set of specific genes. # Parameters adata: `~anndata.AnnData` Annotated Data Matrix. condition_key: basestring Condition state to be used. axis_keys: dict dictionary of axes labels. path_to_save: basestring path to save the plot. gene_list: list list of gene names to be plotted. show: bool if `True`: will show to the plot after saving it. # Example ```python import anndata import scgen import scanpy as sc train = sc.read("./tests/data/train.h5ad", backup_url="https://goo.gl/33HtVh") network = scgen.VAEArith(x_dimension=train.shape[1], model_path="../models/test") network.train(train_data=train, n_epochs=0) unperturbed_data = train[((train.obs["cell_type"] == "CD4T") & (train.obs["condition"] == "control"))] condition = {"ctrl": "control", "stim": "stimulated"} pred, delta = network.predict(adata=train, adata_to_predict=unperturbed_data, conditions=condition) pred_adata = anndata.AnnData(pred, obs={"condition": ["pred"] * len(pred)}, var={"var_names": train.var_names}) CD4T = train[train.obs["cell_type"] == "CD4T"] all_adata = CD4T.concatenate(pred_adata) scgen.plotting.reg_var_plot(all_adata, condition_key="condition", axis_keys={"x": "control", "y": "pred", "y1": "stimulated"}, gene_list=["ISG15", "CD3D"], path_to_save="tests/reg_var4.pdf", show=False) network.sess.close() ``` """ import seaborn as sns sns.set() sns.set(color_codes=True) if sparse.issparse(adata.X): adata.X = adata.X.A diff_genes = top_100_genes stim = adata[adata.obs[condition_key] == axis_keys["y"]] ctrl = adata[adata.obs[condition_key] == axis_keys["x"]] if diff_genes is not None: if hasattr(diff_genes, "tolist"): diff_genes = diff_genes.tolist() adata_diff = adata[:, diff_genes] stim_diff = adata_diff[adata_diff.obs[condition_key] == axis_keys["y"]] ctrl_diff = adata_diff[adata_diff.obs[condition_key] == axis_keys["x"]] x_diff = np.var(ctrl_diff.X, axis=0) y_diff = np.var(stim_diff.X, axis=0) m, b, r_value_diff, p_value_diff, std_err_diff = stats.linregress(x_diff, y_diff) print('reg_var_top100:', r_value_diff ** 2) if "y1" in axis_keys.keys(): real_stim = adata[adata.obs[condition_key] == axis_keys["y1"]] x = np.var(ctrl.X, axis=0) y = np.var(stim.X, axis=0) m, b, r_value, p_value, std_err = stats.linregress(x, y) print('reg_var_all:', r_value ** 2) df = pd.DataFrame({axis_keys["x"]: x, axis_keys["y"]: y}) ax = sns.regplot(x=axis_keys["x"], y=axis_keys["y"], data=df, scatter_kws={'rasterized': True}) ax.tick_params(labelsize=fontsize) if "range" in kwargs: start, stop, step = kwargs.get("range") ax.set_xticks(np.arange(start, stop, step)) ax.set_yticks(np.arange(start, stop, step)) # _p1 = pyplot.scatter(x, y, marker=".", label=f"{axis_keys['x']}-{axis_keys['y']}") # pyplot.plot(x, m * x + b, "-", color="green") ax.set_xlabel(labels['x'], fontsize=fontsize) ax.set_ylabel(labels['y'], fontsize=fontsize) if "y1" in axis_keys.keys(): y1 = np.var(real_stim.X, axis=0) _p2 = pyplot.scatter(x, y1, marker="*", c="grey", alpha=.5, label=f"{axis_keys['x']}-{axis_keys['y1']}") if gene_list is not None: for i in gene_list: j = adata.var_names.tolist().index(i) x_bar = x[j] y_bar = y[j] pyplot.text(x_bar, y_bar, i, fontsize=11, color="black") pyplot.plot(x_bar, y_bar, 'o', color="red", markersize=5) if "y1" in axis_keys.keys(): y1_bar = y1[j] pyplot.text(x_bar, y1_bar, '*', color="black", alpha=.5) if legend: pyplot.legend(loc='center left', bbox_to_anchor=(1, 0.5)) if title is None: pyplot.title(f"", fontsize=12) else: pyplot.title(title, fontsize=12) ax.text(max(x) - max(x) * x_coeff, max(y) - y_coeff * max(y), r'$\mathrm{R^2_{\mathrm{\mathsf{all\ genes}}}}$= ' + f"{r_value ** 2:.2f}", fontsize=kwargs.get("textsize", fontsize)) if diff_genes is not None: ax.text(max(x) - max(x) * x_coeff, max(y) - (y_coeff + 0.15) * max(y), r'$\mathrm{R^2_{\mathrm{\mathsf{top\ ' + str(len(top_100_genes)) + '\ DEGs}}}}$= ' + f"{r_value_diff ** 2:.2f}", fontsize=kwargs.get("textsize", fontsize)) pyplot.savefig(f"{path_to_save}", bbox_inches='tight', dpi=100) if show: pyplot.show() pyplot.close() def binary_classifier(scg_object, adata, delta, condition_key, conditions, path_to_save, fontsize=14): """ Builds a linear classifier based on the dot product between the difference vector and the latent representation of each cell and plots the dot product results between delta and latent representation. # Parameters scg_object: `~scgen.models.VAEArith` one of scGen models object. adata: `~anndata.AnnData` Annotated Data Matrix. delta: float Difference between stimulated and control cells in latent space condition_key: basestring Condition state to be used. conditions: dict dictionary of conditions. path_to_save: basestring path to save the plot. # Example ```python import anndata import scgen import scanpy as sc train = sc.read("./tests/data/train.h5ad", backup_url="https://goo.gl/33HtVh") network = scgen.VAEArith(x_dimension=train.shape[1], model_path="../models/test") network.train(train_data=train, n_epochs=0) unperturbed_data = train[((train.obs["cell_type"] == "CD4T") & (train.obs["condition"] == "control"))] condition = {"ctrl": "control", "stim": "stimulated"} pred, delta = network.predict(adata=train, adata_to_predict=unperturbed_data, conditions=condition) scgen.plotting.binary_classifier(network, train, delta, condtion_key="condition", conditions={"ctrl": "control", "stim": "stimulated"}, path_to_save="tests/binary_classifier.pdf") network.sess.close() ``` """ # matplotlib.rcParams.update(matplotlib.rcParamsDefault) pyplot.close("all") if sparse.issparse(adata.X): adata.X = adata.X.A cd = adata[adata.obs[condition_key] == conditions["ctrl"], :] stim = adata[adata.obs[condition_key] == conditions["stim"], :] all_latent_cd = scg_object.to_z_latent(cd.X) all_latent_stim = scg_object.to_z_latent(stim.X) dot_cd = np.zeros((len(all_latent_cd))) dot_sal = np.zeros((len(all_latent_stim))) for ind, vec in enumerate(all_latent_cd): dot_cd[ind] = np.dot(delta, vec) for ind, vec in enumerate(all_latent_stim): dot_sal[ind] = np.dot(delta, vec) pyplot.hist(dot_cd, label=conditions["ctrl"], bins=50, ) pyplot.hist(dot_sal, label=conditions["stim"], bins=50) # pyplot.legend(loc=1, prop={'size': 7}) pyplot.axvline(0, color='k', linestyle='dashed', linewidth=1) pyplot.title(" ", fontsize=fontsize) pyplot.xlabel(" ", fontsize=fontsize) pyplot.ylabel(" ", fontsize=fontsize) pyplot.xticks(fontsize=fontsize) pyplot.yticks(fontsize=fontsize) ax = pyplot.gca() ax.grid(False) pyplot.savefig(f"{path_to_save}", bbox_inches='tight', dpi=100) pyplot.show()
45.782895
135
0.598362
1,923
13,918
4.155486
0.140926
0.038043
0.018771
0.030034
0.810537
0.790264
0.767989
0.760731
0.749969
0.745339
0
0.017169
0.250898
13,918
303
136
45.933993
0.749281
0.355008
0
0.605882
0
0
0.083497
0.020874
0
0
0
0
0
1
0.017647
false
0
0.052941
0
0.070588
0.023529
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6