hexsha
string
size
int64
ext
string
lang
string
max_stars_repo_path
string
max_stars_repo_name
string
max_stars_repo_head_hexsha
string
max_stars_repo_licenses
list
max_stars_count
int64
max_stars_repo_stars_event_min_datetime
string
max_stars_repo_stars_event_max_datetime
string
max_issues_repo_path
string
max_issues_repo_name
string
max_issues_repo_head_hexsha
string
max_issues_repo_licenses
list
max_issues_count
int64
max_issues_repo_issues_event_min_datetime
string
max_issues_repo_issues_event_max_datetime
string
max_forks_repo_path
string
max_forks_repo_name
string
max_forks_repo_head_hexsha
string
max_forks_repo_licenses
list
max_forks_count
int64
max_forks_repo_forks_event_min_datetime
string
max_forks_repo_forks_event_max_datetime
string
content
string
avg_line_length
float64
max_line_length
int64
alphanum_fraction
float64
qsc_code_num_words_quality_signal
int64
qsc_code_num_chars_quality_signal
float64
qsc_code_mean_word_length_quality_signal
float64
qsc_code_frac_words_unique_quality_signal
float64
qsc_code_frac_chars_top_2grams_quality_signal
float64
qsc_code_frac_chars_top_3grams_quality_signal
float64
qsc_code_frac_chars_top_4grams_quality_signal
float64
qsc_code_frac_chars_dupe_5grams_quality_signal
float64
qsc_code_frac_chars_dupe_6grams_quality_signal
float64
qsc_code_frac_chars_dupe_7grams_quality_signal
float64
qsc_code_frac_chars_dupe_8grams_quality_signal
float64
qsc_code_frac_chars_dupe_9grams_quality_signal
float64
qsc_code_frac_chars_dupe_10grams_quality_signal
float64
qsc_code_frac_chars_replacement_symbols_quality_signal
float64
qsc_code_frac_chars_digital_quality_signal
float64
qsc_code_frac_chars_whitespace_quality_signal
float64
qsc_code_size_file_byte_quality_signal
float64
qsc_code_num_lines_quality_signal
float64
qsc_code_num_chars_line_max_quality_signal
float64
qsc_code_num_chars_line_mean_quality_signal
float64
qsc_code_frac_chars_alphabet_quality_signal
float64
qsc_code_frac_chars_comments_quality_signal
float64
qsc_code_cate_xml_start_quality_signal
float64
qsc_code_frac_lines_dupe_lines_quality_signal
float64
qsc_code_cate_autogen_quality_signal
float64
qsc_code_frac_lines_long_string_quality_signal
float64
qsc_code_frac_chars_string_length_quality_signal
float64
qsc_code_frac_chars_long_word_length_quality_signal
float64
qsc_code_frac_lines_string_concat_quality_signal
float64
qsc_code_cate_encoded_data_quality_signal
float64
qsc_code_frac_chars_hex_words_quality_signal
float64
qsc_code_frac_lines_prompt_comments_quality_signal
float64
qsc_code_frac_lines_assert_quality_signal
float64
qsc_codepython_cate_ast_quality_signal
float64
qsc_codepython_frac_lines_func_ratio_quality_signal
float64
qsc_codepython_cate_var_zero_quality_signal
bool
qsc_codepython_frac_lines_pass_quality_signal
float64
qsc_codepython_frac_lines_import_quality_signal
float64
qsc_codepython_frac_lines_simplefunc_quality_signal
float64
qsc_codepython_score_lines_no_logic_quality_signal
float64
qsc_codepython_frac_lines_print_quality_signal
float64
qsc_code_num_words
int64
qsc_code_num_chars
int64
qsc_code_mean_word_length
int64
qsc_code_frac_words_unique
null
qsc_code_frac_chars_top_2grams
int64
qsc_code_frac_chars_top_3grams
int64
qsc_code_frac_chars_top_4grams
int64
qsc_code_frac_chars_dupe_5grams
int64
qsc_code_frac_chars_dupe_6grams
int64
qsc_code_frac_chars_dupe_7grams
int64
qsc_code_frac_chars_dupe_8grams
int64
qsc_code_frac_chars_dupe_9grams
int64
qsc_code_frac_chars_dupe_10grams
int64
qsc_code_frac_chars_replacement_symbols
int64
qsc_code_frac_chars_digital
int64
qsc_code_frac_chars_whitespace
int64
qsc_code_size_file_byte
int64
qsc_code_num_lines
int64
qsc_code_num_chars_line_max
int64
qsc_code_num_chars_line_mean
int64
qsc_code_frac_chars_alphabet
int64
qsc_code_frac_chars_comments
int64
qsc_code_cate_xml_start
int64
qsc_code_frac_lines_dupe_lines
int64
qsc_code_cate_autogen
int64
qsc_code_frac_lines_long_string
int64
qsc_code_frac_chars_string_length
int64
qsc_code_frac_chars_long_word_length
int64
qsc_code_frac_lines_string_concat
null
qsc_code_cate_encoded_data
int64
qsc_code_frac_chars_hex_words
int64
qsc_code_frac_lines_prompt_comments
int64
qsc_code_frac_lines_assert
int64
qsc_codepython_cate_ast
int64
qsc_codepython_frac_lines_func_ratio
int64
qsc_codepython_cate_var_zero
int64
qsc_codepython_frac_lines_pass
int64
qsc_codepython_frac_lines_import
int64
qsc_codepython_frac_lines_simplefunc
int64
qsc_codepython_score_lines_no_logic
int64
qsc_codepython_frac_lines_print
int64
effective
string
hits
int64
529ce14e30da741c312fc393c6a3bc3122276b10
2,283
py
Python
genomics_data_index/test/unit/variant/model/test_QueryFeatureMutationSPDI.py
apetkau/genomics-data-index
d0cc119fd57b8cbd701affb1c84450cf7832fa01
[ "Apache-2.0" ]
12
2021-05-03T20:56:05.000Z
2022-01-04T14:52:19.000Z
genomics_data_index/test/unit/variant/model/test_QueryFeatureMutationSPDI.py
apetkau/thesis-index
6c96e9ed75d8e661437effe62a939727a0b473fc
[ "Apache-2.0" ]
30
2021-04-26T23:03:40.000Z
2022-02-25T18:41:14.000Z
genomics_data_index/test/unit/variant/model/test_QueryFeatureMutationSPDI.py
apetkau/genomics-data-index
d0cc119fd57b8cbd701affb1c84450cf7832fa01
[ "Apache-2.0" ]
null
null
null
from genomics_data_index.storage.model.QueryFeatureMutationSPDI import QueryFeatureMutationSPDI def test_create(): f = QueryFeatureMutationSPDI('ref:1:A:T') assert 'ref:1:A:T' == f.id assert 'ref:1:A:T' == f.id_no_prefix def test_to_unknown(): f = QueryFeatureMutationSPDI('ref:1:A:T') assert f.to_unknown().id == 'ref:1:A:?' f = QueryFeatureMutationSPDI('ref:1:1:T') assert f.to_unknown().id == 'ref:1:1:?' f = QueryFeatureMutationSPDI('ref:10:AT:C') assert f.to_unknown().id == 'ref:10:AT:?' f = QueryFeatureMutationSPDI('ref:1:A:TT') assert f.to_unknown().id == 'ref:1:A:?' f = QueryFeatureMutationSPDI('ref:1:AT:TCG') assert f.to_unknown().id == 'ref:1:AT:?' f = QueryFeatureMutationSPDI('ref:1:2:TCG') assert f.to_unknown().id == 'ref:1:2:?' def test_to_unknown_explode(): f = QueryFeatureMutationSPDI('ref:1:A:T') assert [u.id for u in f.to_unknown_explode()] == ['ref:1:A:?'] f = QueryFeatureMutationSPDI('ref:1:1:T') assert [u.id for u in f.to_unknown_explode()] == ['ref:1:1:?'] f = QueryFeatureMutationSPDI('ref:10:AT:T') assert [u.id for u in f.to_unknown_explode()] == ['ref:10:A:?', 'ref:11:T:?'] f = QueryFeatureMutationSPDI('ref:10:T:TC') assert [u.id for u in f.to_unknown_explode()] == ['ref:10:T:?'] f = QueryFeatureMutationSPDI('ref:10:ATC:TCC') assert [u.id for u in f.to_unknown_explode()] == ['ref:10:A:?', 'ref:11:T:?', 'ref:12:C:?'] f = QueryFeatureMutationSPDI('ref:10:3:TCC') assert [u.id for u in f.to_unknown_explode()] == ['ref:10:1:?', 'ref:11:1:?', 'ref:12:1:?'] def test_has_deletion_sequence(): assert QueryFeatureMutationSPDI('ref:1:A:T').has_deletion_sequence() assert QueryFeatureMutationSPDI('ref:10:ATT:T').has_deletion_sequence() assert not QueryFeatureMutationSPDI('ref:10:1:T').has_deletion_sequence() assert not QueryFeatureMutationSPDI('ref:10:3:T').has_deletion_sequence() def test_deletion_length(): assert 1 == QueryFeatureMutationSPDI('ref:10:A:T').deletion_length() assert 3 == QueryFeatureMutationSPDI('ref:10:ATT:T').deletion_length() assert 1 == QueryFeatureMutationSPDI('ref:10:1:T').deletion_length() assert 3 == QueryFeatureMutationSPDI('ref:10:3:T').deletion_length()
36.822581
95
0.670171
339
2,283
4.380531
0.129794
0.381818
0.245118
0.156229
0.824916
0.729293
0.645118
0.496296
0.338047
0.259933
0
0.040837
0.141919
2,283
61
96
37.42623
0.717203
0
0
0.170732
0
0
0.176522
0
0
0
0
0
0.536585
1
0.121951
false
0
0.02439
0
0.146341
0
0
0
0
null
1
1
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
0
0
0
0
0
0
0
0
5
52c7b415470f1ee5a26642164d0c0f54c218476f
1,096
py
Python
tests/adapters/test_r_adapter.py
pietervans/viewflow
3123b1e36ca4b9464d4a8cdf13c520384b71c600
[ "MIT" ]
106
2021-03-25T12:38:54.000Z
2022-03-10T02:56:37.000Z
tests/adapters/test_r_adapter.py
pietervans/viewflow
3123b1e36ca4b9464d4a8cdf13c520384b71c600
[ "MIT" ]
null
null
null
tests/adapters/test_r_adapter.py
pietervans/viewflow
3123b1e36ca4b9464d4a8cdf13c520384b71c600
[ "MIT" ]
7
2021-04-03T13:30:36.000Z
2021-08-28T03:11:21.000Z
import viewflow from unittest.mock import patch, ANY, call @patch("viewflow.parsers.dependencies_r_patterns.custom_get_dependencies") @patch("viewflow.parsers.dependencies_r_patterns.get_dependencies_default") def test_default_dependencies_pattern(get_default_mock, get_custom_mock): viewflow.create_dag("./tests/projects/r/pattern_default") # Dependencies must have been retrieved for all possible schema's calls = [call(ANY, "viewflow"), call(ANY, "public")] get_default_mock.assert_has_calls(calls, any_order=True) get_custom_mock.assert_not_called() @patch("viewflow.parsers.dependencies_r_patterns.custom_get_dependencies") @patch("viewflow.parsers.dependencies_r_patterns.get_dependencies_default") def test_custom_dependencies_pattern(get_default_mock, get_custom_mock): viewflow.create_dag("./tests/projects/r/pattern_custom") # Dependencies must have been retrieved for all possible schema's get_default_mock.assert_not_called() calls = [call(ANY, "viewflow"), call(ANY, "public")] get_custom_mock.assert_has_calls(calls, any_order=True)
39.142857
75
0.801095
149
1,096
5.543624
0.261745
0.062954
0.096852
0.154964
0.825666
0.825666
0.825666
0.825666
0.653753
0.653753
0
0
0.097628
1,096
27
76
40.592593
0.835187
0.115876
0
0.375
0
0
0.365424
0.336439
0
0
0
0
0.25
1
0.125
false
0
0.125
0
0.25
0
0
0
0
null
0
0
0
1
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
52e34aa39cddec7a3ff4b010a4ea70f119755c4a
110
py
Python
weatherbotskeleton/__init__.py
andrewmichaud/weatherbotskeleton
37dcc1b6ad337622a6fca064cb156dc1d3ac77e4
[ "BSD-3-Clause" ]
null
null
null
weatherbotskeleton/__init__.py
andrewmichaud/weatherbotskeleton
37dcc1b6ad337622a6fca064cb156dc1d3ac77e4
[ "BSD-3-Clause" ]
null
null
null
weatherbotskeleton/__init__.py
andrewmichaud/weatherbotskeleton
37dcc1b6ad337622a6fca064cb156dc1d3ac77e4
[ "BSD-3-Clause" ]
null
null
null
"""Skeleton for weather twitter bots. Spooky weather.""" from weatherbotskeleton.weatherbot_skeleton import *
36.666667
56
0.809091
12
110
7.333333
0.833333
0
0
0
0
0
0
0
0
0
0
0
0.1
110
2
57
55
0.888889
0.454545
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
5e1db6b33ca47f83924d39e2b27e65a2927b1f53
36
py
Python
smt/utils/__init__.py
jbussemaker/smt
a1d8bc0ebea936148a2534f4e20a1134e6760dca
[ "BSD-3-Clause" ]
354
2017-08-15T22:12:58.000Z
2022-03-31T08:34:19.000Z
smt/utils/__init__.py
yuhaibo2017/smt
ab3bda1e38ca3a785dfc0ed692f1437bf7e78bd3
[ "BSD-3-Clause" ]
258
2017-08-11T15:08:40.000Z
2022-03-30T09:54:26.000Z
smt/utils/__init__.py
yuhaibo2017/smt
ab3bda1e38ca3a785dfc0ed692f1437bf7e78bd3
[ "BSD-3-Clause" ]
184
2017-08-11T14:55:17.000Z
2022-03-17T11:22:50.000Z
from .misc import compute_rms_error
18
35
0.861111
6
36
4.833333
1
0
0
0
0
0
0
0
0
0
0
0
0.111111
36
1
36
36
0.90625
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
eaa6d18cda9171f6fcc95d74b553285aab33299b
19,272
py
Python
aoc18_data.py
zidarsk8/aoc2020
21239a8bfd3cba31f16c91c28a176e1163ba4cf9
[ "Apache-2.0" ]
1
2020-12-02T08:29:50.000Z
2020-12-02T08:29:50.000Z
aoc18_data.py
zidarsk8/aoc2020
21239a8bfd3cba31f16c91c28a176e1163ba4cf9
[ "Apache-2.0" ]
null
null
null
aoc18_data.py
zidarsk8/aoc2020
21239a8bfd3cba31f16c91c28a176e1163ba4cf9
[ "Apache-2.0" ]
null
null
null
data = """ (7 * 5 * 6 + (9 * 8 + 3 * 3 + 5) + 7) * (6 + 3 * 9) + 6 + 7 + (7 * 5) * 4 (4 + 9 + (8 * 2) + 5) * 8 + (3 + 2 * 3 * 7 * (7 * 4 * 5) * 9) * 2 3 + 7 + (9 + 6 + 4 * 7 * 3 + 5) * 9 3 + 3 * (5 + (7 * 5 + 4 * 8 + 9 * 2) + 3) * 8 * 7 (8 + 3 + 7 * 7) + (3 + 8) * 4 + 2 2 + 9 * (7 + 3 * 3 * 8) + 9 + 3 2 * ((5 + 7 + 9 + 7 * 3 * 7) * 2 + 4 * 4 + (2 + 2 + 7) + 3) + 6 8 * 3 * (6 + (6 * 8 * 2)) + 9 + 9 * 3 (9 * 7 + 6) + 5 * (7 + 5 + 4) + 2 (9 + 4 * (5 + 5 + 4 * 2) * 7) * 7 * 9 * 5 * 3 7 * 6 * ((5 + 6 + 8 + 4 * 3) + 2 + 2 * (4 + 6 + 2 + 7) + 8) + 4 6 + 5 + 3 * (4 * (8 + 8 + 7 + 2 * 6) + 3 + (7 * 6 * 3) * (9 + 5)) + 4 9 + 4 + (7 + 3 + 3 + 2 + 8) + (2 * (4 * 2) + 8 + (9 + 9 * 9 * 5 + 2 + 3) * (6 + 4 * 5)) * (4 + 8 * 2) (2 * 3 + 7 * 5 * (2 + 3 * 7) * 3) + 9 9 + (6 * 6 * 3) * 3 ((3 * 6) + (5 + 5 * 9 * 7 + 8) * 7 * 8) + 8 + (6 * (4 * 2 + 6 + 7 + 2) * (2 * 8 + 3) * 5 + 7) + 5 + 7 (2 * 3 + 4 + 9 + 8) + (5 * 4 * (5 + 5 + 3) + 2 * 8 + 2) 7 + 7 + 7 + 4 + (7 + 7 + 8) 9 * (8 + 7) + 2 * 6 6 + 8 + 8 * 6 * 8 + ((5 + 2 * 2 * 6 + 8) + 9 + 3 + (5 * 3 * 8) + 7) 4 * 9 + 8 + (5 * (5 + 6 * 8) + 5 + 4 + (7 * 2 * 6)) * 5 (8 * 5 + 3 * 6 + 8 + 6) + 5 * 2 + 6 + (3 + 8 + 3 + 7 * 7 * 6) 5 + (2 * (6 * 8 * 9 + 3 + 9 * 6) + 8 * (7 + 6 + 8) + 9 + 7) + 6 * 6 7 + 2 * (5 + 4 * 8 + (8 + 9) + 3) * 3 2 * ((8 + 3 + 2) + 9 * (6 * 2 * 5 * 6)) * 7 * 2 * 9 ((6 + 6) + (4 * 4 + 7 * 6 * 3) * (2 + 2 + 5 + 8) + 7 + 5) + 6 * 8 * 7 * 4 * 7 6 + 5 * 8 + (8 + 5 + 4 + 6 * 5) + 8 + 9 2 + (2 + (8 + 8) + 2) 3 * 3 * (8 + 2 + 3) * (2 + 4) + 7 9 * 2 + 9 + (3 + 5 + 5 * 2 * 2 + 7) (7 * (2 + 6 * 7 * 2) + (8 + 4 + 4) + 4 + 5) + 3 (7 * 5 + 7) + (3 * 9 * 4 * (8 * 2 + 9) * 8) * 6 + 9 * 9 9 + (2 * 5 + 2 * 7 * 3 * 7) + 5 + 3 (3 * (5 * 3 * 4) * (5 + 2)) + 7 (7 + 4) + 9 * 2 3 * (5 * 6 * 5 * 8) * 2 * 2 * 7 * 7 (7 + (7 * 2 * 5 * 6 * 7 + 2)) * 5 + 9 * 9 (5 * (3 + 6 + 4 + 4 * 8)) * (8 * 6 * 6 + (4 + 8)) * 2 + 8 + 8 ((9 * 8) * 3 * 7 * (3 + 9 * 8 + 6)) + (4 + 2 + 7 * 9) (6 * 4 + 8 * 2 * 9) + 5 * 5 * 3 5 + 6 * 9 * 7 (2 * 5 * (2 + 8 + 5 + 2 * 5) * 2 * 9) + (6 + 8 * 2) + 5 7 * ((4 + 8 + 8) * 7 * 3 * 3 * 5 + (5 + 6 + 9 * 6)) (6 + 5) * 2 7 * 2 + 4 + (2 * 4) + 5 + 2 8 * 6 + 9 * (3 + (4 + 2 + 9 + 3 + 8) + (8 + 6 + 3 * 9 * 3 + 7) + 4 + 2) + (5 + 4 + 6) 2 + ((7 * 4) * 9 * 8 + 2 * 8) 6 + 2 * 8 * (5 * 9 * 7 * 9 + (3 + 2 * 9 * 8 * 6 + 7)) * 7 * 6 9 * 6 * 9 * 8 + (2 + (7 * 4 + 6) * 8 + (3 * 7 + 3 + 4 + 2)) * 7 5 * 9 * 8 * 2 + 7 9 + (2 * 6 * 6 * 3) + 7 + 4 + 3 (9 * 6) * ((5 + 3) * 5) + 2 + 3 + 6 * (6 * (8 + 6 + 2 * 2 + 9 + 8)) ((2 + 5 + 6 + 2 + 5 + 9) + 4 * (5 + 7 * 5) + 2) * 3 + 9 * (6 + 9 * 8 * 7) + 7 2 * 9 + (4 * (3 + 7 + 3) * 4) + 5 + 9 ((9 * 4 * 4) + 3 + 2 + 3) + 8 + ((7 * 6 * 5 * 2 * 9) + (9 * 6 + 8 + 3 * 8 + 4) * (6 * 8) * 6 + 8) * 5 + 6 2 * 8 + 5 + 4 6 + (6 + 6 + 6) + 3 * 4 6 + (7 + 4 * 9 * 2 + 7) + 7 + 3 * 2 * 2 (7 * 7 + (3 * 6 + 4 + 9) + (5 * 5 * 7 * 7 + 4 + 6) * (4 + 9 + 7 * 5 * 9)) * 2 + 8 + 3 * 8 4 * (5 + 5) + (8 + 5 * 4) + 7 * (5 * 6) 9 + 4 * 4 * 3 + 6 + 3 5 + 5 * (6 + 6 * 5 + (3 + 5 * 6) + (2 * 9) + 2) * 6 + 8 + 5 5 * 8 * 8 * 5 * 3 + 2 6 * 2 + 3 + 7 + (6 + 4) + 3 (5 + 7 + 6 + 5 + (9 * 5 + 5 + 6) + 8) + 3 * 6 * 9 * 6 (8 + 2 * 7 * 2 * (8 + 9)) + 7 * (4 + 4 + 5 * (8 * 9 * 8 + 4) * (8 + 8 * 5)) * 8 8 + ((4 * 8 * 8 * 7 + 6 + 9) + 5) + 9 * 4 ((4 * 8 * 5) + 6 + (8 + 4 * 5 + 2) * 5 * 8) + 6 * 3 8 * ((2 * 9 * 9 * 7 * 2) + 4 + 8 + 2) 2 * 9 + (9 + 6 + 7) * ((8 * 3 * 3 * 7 * 3 + 9) + 4 * 4 + (8 * 7 * 6 * 3) * (6 * 5 * 2 + 6 + 3)) + (2 + 9 + 9 * 5 * 7) (4 + (9 + 3 * 6) * 8) + 2 7 + 5 * (3 + 7 * 3 + 7 + (4 * 7) + 3) + 4 * 9 (3 * 7 * 9 + 7) + 6 * 6 + 7 + (9 * 3) * 7 6 + (9 + 5 + (9 + 9 * 9 + 8) + 2 + 5 * 9) 9 * 4 3 + 8 + (4 + 2 + 3) + (7 * 9 * 9 + 3 * (4 + 5 + 3 + 4 * 3) + 8) * 8 * 9 7 + (7 + 9 * 2 * 2 * 5) 8 * ((5 * 8 * 7 + 5) * 6) * 7 * 5 * 8 7 + 6 * 6 * 6 * 5 * (4 * (2 + 6 + 6 * 3 * 7)) ((8 + 2 + 2) * 2 * 4) + 5 + (8 + 8 + 2 + 4 * 8 + 9) + 9 (2 + (5 * 3 * 2 + 3 + 5 + 6) * (5 + 8 + 5 + 3 + 2) + (3 + 3 + 9)) + 9 (6 * 2 + (3 + 3 + 3 * 6 + 8)) * (6 * (4 + 9 * 9 * 9) + 6) * 4 * 2 7 + ((9 + 3 * 6 + 4) + 4 * (3 * 9 * 7 * 2) + 2 + 7) * (2 + (5 + 8 + 7) * 9 * (6 * 2 + 3 * 7) + 3) 9 * (7 + (6 * 7 * 2 * 3 + 7) + 4 + 4) * 9 * 7 + (2 * 6) + 8 6 * (5 + 5 * 5 * (6 * 3) + 6) 7 * 8 * 7 * (9 + (4 + 3 + 9 + 3 + 9)) * 9 (9 + 6) + (2 + 8) * 9 * 3 (3 * 9) * 3 * 6 * 5 * 9 8 * 2 + (7 * 6 * 6) (9 + 4 * 3 * 9 * 4 + 3) * 3 2 * ((4 + 9 * 4 + 3 * 9) * 9) + 2 + 2 8 + 2 * 7 + (4 + 8 * 6) 7 * ((7 + 8 * 9) + 4 + 4 + (7 + 4 + 7 + 5) * 3 + (6 * 9 + 2 * 9)) * 8 + (5 * 7 + (3 + 7 * 3)) 3 * 9 + (3 + (9 + 8 + 4 * 5) + (4 + 2 * 2 * 4)) * 2 + 3 (5 + 3 * 5) * 8 * 9 * 5 + 2 * 2 (3 * 4 + 8 + 2 * (7 * 8 * 8 * 9 * 3 * 3) * 7) + 7 + 4 * 7 * 7 2 * 6 * 6 + (6 * 7 + 8 * 8 * (5 * 9 * 3 + 2 * 2 + 4) + (4 * 5 + 7 + 2 * 8 + 6)) + (9 * (5 + 6 + 4) * 9 * 8 * 8) * (4 + 8 + 7 + 6) (2 * (6 * 2 * 4 * 7 * 4)) + 4 * 7 * 5 9 * 7 + 2 + (4 * (4 * 5) * 3 + (2 * 3 + 4 * 4) * 9 * 6) 6 * 6 + (8 * 3 + 5 + (8 + 6 * 2)) * 6 + (7 * 3 * (8 * 8 * 2 * 3) * 7 * 5 * 8) 5 + 2 + 9 7 * 9 + (8 + 3 + 9) * (3 + 8 * 7) (7 * 9 * 3 + 8) * (8 * 9) + 7 * 8 * 9 + (2 * 3 + 6 * (6 * 2 * 7) * (2 * 7 + 6 + 8 + 4) + 8) 2 + 2 + 4 * (8 + 2 * 8 + 2) + 5 * 8 2 * 6 * 8 + (6 * 7 * 2 + 9 + 9) 4 + (3 * 5 + 6 + (9 + 2 + 9 + 4 * 7) * 8 + 7) 9 + ((5 + 2 * 8) * 6 + 9 * 5) 6 * ((2 * 2 * 7 + 8 * 5) * (7 * 7 + 3 + 5 + 6) + 8 * 3) + 9 2 * 5 + ((5 + 5 * 9) + 5 + 6) * 6 5 * 5 + 7 * (2 * 9 * 2 * 5) 5 + 2 + 3 * (5 * 5 + (3 * 2) + 8) (5 + 7 * (4 + 2 + 5 + 7 + 6)) + 9 * 9 * 5 + 4 * 2 (6 + 8 * 8 + (4 + 5 + 9 * 6 * 9 * 7) + (2 + 7 * 3) + 2) * 5 * 7 9 * 5 + ((3 + 7 * 7 * 3 + 3) * 6 * (5 * 5 + 8 + 9 * 2 + 7)) + 8 (5 + (5 * 3 + 5) + (3 * 8 * 7) + 2 * 5 * (8 + 2 + 5)) + 7 * 5 7 + ((2 * 4) * (3 + 2 * 8 * 3 * 8) * 5 + (9 + 7 + 6 + 3 + 7 * 9) * (4 * 4 + 6 + 9 * 7) * 4) 2 + (8 * 2 + 8 * 4 + 3 + 3) * 9 * 6 + 9 + 2 8 + 3 + ((3 * 9) * 4 + 6 * (2 * 3 * 3) + 2 + 5) 3 * (7 + (2 * 5 + 7 * 6) * 6 + 9 * 3) + 7 * (3 + 2 + 4) + (5 * 4) + 9 ((2 + 6 + 4 + 4 * 9) * 2 * (4 + 8 + 4) * 7 + 2) * 3 * (6 * 6) 5 * 4 * (2 * 9) (3 + 4 * 3 + 4) + ((9 * 2 + 2 + 8 * 8 * 9) * 7 * 7 * (7 * 8 + 3 * 2) * (8 + 2 * 7 * 7 + 7 + 7)) + 8 + ((2 * 4) + 8 * 2 + 2 + 3 + 2) 8 + ((8 * 2 * 2 * 5) + 2 * 8) 9 * ((6 * 8 + 9) * 8 * 7 * 2) * 5 * 3 * (7 * 8) + 6 (3 + 5 + 8 + 7 * 2) * 2 + 8 * (5 * (3 + 3 * 9 * 3 * 9) * 9 + 5 * (2 + 5 + 8 * 3)) + 4 * 3 (8 + (6 * 5 * 9 + 3 * 3) * 5) + (4 * 3 + 7) 5 + 6 + 3 + 6 + ((5 + 4 * 2 + 7 * 8 * 3) + 5) 6 * 4 + (2 * 6 * (5 * 4 + 7 + 9 * 5) + 9 * (5 * 7) + 6) * 7 * 8 5 * 7 * ((7 * 6 * 8) + 5 + 8 * (3 * 3 * 4 * 2 + 7 + 4)) * (9 * (5 * 8) + 3) * 3 4 + ((7 * 6 * 4 + 3 + 6 + 8) * 7 * 4 * (6 + 3 * 5)) 9 * 8 ((5 * 5) * 7 * 4 * 2 * 9 + (7 * 6 + 6 + 7 * 7)) + 8 * 7 + 6 3 + (5 * (2 + 4 * 4 + 4) + 5) * 4 + 2 (4 + 3 + 4) * 3 * 7 + (2 * (6 * 3 * 7 * 8) * 8) (4 * 9 + 3 * 2 * 5) * 5 + (5 * 4 + 2) 8 + (2 + (2 + 4 + 2) + 8 + 7 + 6) + 9 + 3 * (7 * 4 * 6 + 7) 9 * 7 9 + 7 * 6 6 + 7 * 3 + 5 + ((4 * 6 + 9 * 6 * 6) * (8 + 8 + 2 + 8 + 6 * 5)) + 9 3 * 6 * 3 * (4 + 8 * 6 + 5 * 7) + 8 7 * (8 + 2 * 6 + 4 * 2) * 7 + 8 * 5 7 + ((6 * 2 * 9 * 7 * 5 + 5) * 2 + 7 + (3 + 3 + 5 * 6) * 2) * (6 + 8 * (8 + 4 + 4 + 7 * 4 + 9) * 6) * 4 * 3 6 + 5 * (9 + 2 * 7 + 7 + 4 * 9) * 2 * 8 3 * 5 + (7 + 5 * 2 + 2 * 8) + 9 * 2 8 * 8 * 4 + (3 + 7) (2 * 8) * 2 + 7 * 6 + 5 * (7 + 4) 8 + 5 + (2 + 3 + 7 * 7 * 8 * 2) + 2 + 8 * 6 (5 * 7) + (7 * 4 + (2 + 9 + 7 * 8) * 7 + 2 * 5) * 2 * (7 + (3 * 7 * 6) + (8 * 6 * 6 + 6)) + 8 * 9 5 + 9 + 6 * 6 * 8 * (5 + 7 * 8 * 2) ((6 + 2) * (6 + 5 + 5 + 6 + 9) + (4 + 2 + 8 + 9) + (4 + 9 + 6 * 3 + 4 * 9) + 6) + (4 + 2 + 2) (7 + (4 + 8 * 4 * 9)) + 6 + 8 9 * 7 + 4 * (6 + 4 + (4 * 4 * 3 + 9) * 9 + 8) + (3 * 5 + (3 * 5 * 6 + 6 * 2 + 4) * 7 * 3) + 8 6 + 9 * 8 * ((4 + 7 * 9 + 8) + 7) + 7 * 7 5 * (9 + 9) (5 + 5) + 2 + 7 * (7 + 5) + 6 4 + 3 * (8 * 7 + 4 * 3 * (2 * 9 * 4)) * 5 ((6 + 5) * 5 * 6) + 8 + 7 + 4 9 * 9 + (6 + 6 + (7 * 6 + 2)) 9 + 4 * 6 * 9 + 9 4 * 9 * 8 + (5 + 5 * 8) + (2 * 6 * 7) 9 + 8 + ((9 + 4 * 6 + 3) * 8 * 8 * 4 + (7 * 5 * 9 * 9 * 4 + 4) * (4 + 6 * 7)) + 5 5 * 7 * (6 * (2 * 2 * 9 * 3)) + 2 * 7 * 5 9 * (6 + 7 * 7) * 3 * 2 + 4 (2 * 3 + (7 * 3 + 7) + 6 * 3 * (4 + 7 + 6 + 2 * 9 * 5)) + 9 * 4 + 2 9 * (4 * 6 + 8 * (4 + 4)) (8 * 4 * 4 + 6 + (8 + 9 * 4 * 7 * 3) * 4) * 7 * 4 * 8 + (4 * 3 * 6 * 5) + 5 (3 * 4 * 6 * 6 + 3) * 6 9 + (5 * 8 * 8 * (2 + 2 + 7) * 8 * 5) ((5 + 4 + 2 + 9 + 2 * 8) * 2 * 2 + 4 + 7) + 2 + 6 (5 + (9 * 5 * 9 * 9) * 3 * 7 + 2 + 9) + ((5 + 4) + 4 * (8 + 3 + 4 + 8)) * 6 + 4 * (5 * 3 + 9 + 6) + 6 ((8 + 4) * (4 * 9 * 8) * 7 + 4 + 5 * 6) + (8 * 9 * 4 * 2 + 2) (8 * 7 + 8 + (6 + 2 + 4 + 8 * 4 * 8) + 7 * 4) * (6 * 2 + 6 + 2 + 3) + ((5 * 8 * 8 + 7 * 5 + 3) + 8 * (6 + 6 * 4)) 5 * 8 + (4 * (3 * 9 * 5 * 5 + 5)) * 7 + 3 5 * 8 + 3 + 4 3 + ((9 + 3 + 6) * 4 * 9 * (9 + 8 * 3 * 6) + 7 + 2) + 4 * (5 * (9 * 2 * 2) * 9 * (6 * 8 + 4 * 8)) ((2 * 5) + 5 + 4 + 9 + (4 + 5 * 4) * 6) * 8 + 3 5 + 7 * (9 + 6 * 7 * 5) 7 * (5 * 3) + 7 * (7 + 6 + 8) (5 + 7 * (8 + 8 * 3 * 7 * 6) + (6 * 4)) * 2 + 9 8 * (6 + 5 + 6 + (3 + 7 * 3 + 9 * 8 * 6)) (6 + 5) + 9 + (8 * 7 + 8) + 4 6 + 2 + 4 * ((2 + 2 + 5) + (5 + 2 + 6) * 4 + 2) * 5 * 4 8 + 6 (6 + (4 * 5) + 8) * 8 * 6 + 2 * 2 + 7 7 + 4 + ((2 + 7 * 2 + 9 + 9 * 7) + 5) + (9 * 8 + 4 + 7 + 8) + 5 4 + 2 * 2 * 6 + 7 * ((7 * 2 + 3) + 7 + 6 * (8 + 4 + 2 * 6) + (9 + 7 + 3)) (4 + 8 * 7 * 8 + 9 * 8) + 6 * 6 * ((9 + 2 * 2 + 7) * 3 + 3 * 9) 6 + (3 + 2 + 2 + 4 + (5 + 9)) * 8 + 4 + 7 ((5 * 4 * 7 * 9) + 7) + 7 + 4 * (8 * 7 * 3) + 3 5 * (6 * 8) + (3 * 7 * 3 * (8 * 9 + 6 * 2) * 9 + 3) * 7 6 * 5 + (7 * 4 * 2 * 4 * 6) + (5 + (3 + 3 + 9 + 9 * 4 * 6) + 2 + 9 * 9 * (8 + 5)) 4 * 5 * 5 * 3 + 5 2 * 6 + (2 + 7 * 6) * (7 + 3 + 3 * 3 * (6 + 7) * 7) + (4 + (8 * 7 * 6) + 4) * 9 3 * 9 + (8 * 4 + 8 * (6 + 7 * 3 * 3) * (6 + 8)) 2 * 6 * (9 * 6 + 8) + (2 + 3 * 7 * 4 + 3) * 2 * ((8 + 6 * 5 + 8) * 5 + 4 * 8) 6 + 3 * (6 + 3 * 7) + 8 + 7 * 3 8 + 3 * (3 + 4 * 5 + 6 + 8) + 7 + 5 * 2 2 * (2 + 7 + 4 + 6) + (4 * (9 * 9) + 8 * 8 + 5) + 7 * 5 * 7 7 + (5 + (5 * 8 + 2 + 7 * 4) + (9 * 9 + 4 * 3 * 5 + 4) + 4 + 5 * 3) (8 + 2) * 9 * 6 * 7 3 * 5 + 5 * ((4 * 3) + 3 * 3 + 8) + 5 * 5 4 * 7 * 6 + 4 + (4 * (4 * 5) * 3 + 2 + 8 * 4) 2 + 7 * (2 + (7 + 6 + 8 + 3 + 4) * (7 * 9 + 8 * 9) * (2 + 5 * 3 + 9 + 2 + 9)) 6 * (6 + 2 + (8 * 3 + 7 * 5 + 6 + 6) * (4 + 2 * 6 + 5) + (7 * 8 * 8 + 8) + (9 + 4 * 8 * 3 + 4)) * 3 (6 + 9 + 8 * 6 + 8) + 4 * 7 + 2 + 5 (9 * (2 * 9 + 8 * 5 * 2 + 3) + 9 * 7) + (6 + (7 * 6 + 9 + 9)) 6 + 6 + (2 + 7 * (5 * 3 + 3) * 7 + (2 + 6 + 8 + 3 * 8 + 4) * 2) * 5 + 5 * 4 (6 * 7 * 6) * ((4 + 4) + 2 * 6 + 2 + 3 * 8) * 4 * 2 * 7 * 9 9 + ((3 + 6 + 6 + 2 + 3) * 5 + 6 * 6 + 3 * (4 + 4 * 8 + 4 + 5 * 2)) * 5 * 7 8 + (7 * 4 * 6 * 4 * 8) * ((4 + 6) * 5 + (9 * 6) + (7 + 9 + 3) * 2) * 5 * (2 * 5 * (2 * 4 * 8 * 5 * 4 * 3) * 9 * 3) + 5 (7 + 2 + (6 * 7) * 5) * 3 3 + 5 * 9 + (9 + 9) * 3 9 * 3 + (9 * 4 * 9) * 8 * 3 4 * 4 + 6 + (7 * 8 * 6 * (8 * 6) * 6) (6 + 5 * 7 + 3) + (4 * 8 * (5 * 7 + 9 + 9 + 2)) + 5 2 + (2 + 9 * (4 * 7 * 3 * 3) + 8) + 9 * 9 + (6 + 9 * (4 * 5 + 6 * 6 + 8)) * 5 9 * 7 + 9 7 + 7 * 7 * (5 + 5 + 7 * 7) * 6 2 + 2 + (6 + (8 + 6 * 9 + 9)) (7 + 6 + 3 * 3) * 8 8 + (7 + 9 + 9 * 2 * 5 * 3) + 5 * 7 + 3 5 + 8 * (5 + 2 * 7 + 4 * 4) + ((8 * 8 + 2 + 5 + 5) * 9 * 3 + 9 * 6 * (4 + 7)) * 2 8 * 5 * 3 + 2 * (8 * 8 + 8 * 9) + (5 * (9 + 3 * 2 + 7 + 3) * 2) 8 + 4 + 2 + 4 + 8 * 4 9 + 2 * 8 + 8 * ((2 + 8 * 2) + 2 + (8 * 4 + 7 + 7 + 9)) + 9 7 * (9 + 6 + 8 * 7) + 5 + (8 * 7 + (4 + 3 + 9 * 6) + 9 + (5 * 7 + 5 + 6 * 2 + 2) + 2) * 4 7 + (9 * 7 + 5 * 9 + (4 + 9 * 2 + 8 * 8 + 3) * 9) (5 * 9) + 9 + (7 + 5 * 2 * 2) * (2 + (8 + 4 + 5 * 5 + 2)) * 6 + 6 9 * (9 * 3) * 2 + (2 + 3 * 6 * (8 * 4) * 4) + (9 + 8 * 3 * (7 + 6 * 7 * 6 * 5) + 3) 6 * 9 * (9 + 9 * 9 * (3 + 2 * 9 * 6 * 6) + (4 + 8) * 3) * 4 + 7 (8 * 4 + 8 * 3 * 7) + ((9 + 9) + 3 * 3 * 5 + (3 * 7 * 3 * 9 + 4)) + 9 + 9 + ((9 * 6 * 7) * 9 + 8 + 9 * 8) + 9 (3 * (3 * 2) + 5 + 9) * 6 + 5 * 3 (5 + 5) + 5 * 8 * 3 * 3 * (9 * 9 * (4 + 5 + 9) + 4) 3 * 8 + 9 + 8 + (6 + (9 * 3 + 7) + 9 * 3 * 6 * 2) + 6 3 + 9 + 3 * 5 5 * 7 + 4 * (2 * 7 * 7 * 5 * 7 + 9) * 4 8 * (3 + (8 * 7 + 8 + 6 * 5 + 5) * 4) 9 * 6 * (8 + 6) * 5 + 7 * 6 (8 + 2 + (4 * 9 * 9 + 3) + 5) + 4 (9 * 7 + 3 + 8) * 3 * 5 + 9 + (2 * 9 + 7 + 8 + 8 + 5) 3 * (8 + 9) * 6 4 + 6 + 6 + ((5 * 5 * 2 + 9 + 7) + 6 * (6 + 4 * 8 * 9 + 8) * 4 + 9 + (9 + 4 * 5)) + 7 8 + 8 * (9 + 8 * (2 * 7)) + 6 * 9 (3 + (8 + 2) + 8 + 8 * 6) * 5 (9 * (2 + 3 + 3 + 5) * 8 * 7) + 6 + 9 + 6 (8 * (7 * 5)) + 6 * 6 * ((3 * 8 + 7 + 2 * 7) + 6 * 3 + 6 + 9 * 9) * 4 (6 * (4 + 3 * 3 * 9 * 3 + 6) + (7 * 7 + 6) + 8 * (3 + 2) * 5) + 4 (5 + 4 * 3 * (8 * 6) * (3 + 6 + 3) + 7) + 8 + ((3 * 7) + 9 + (7 + 4)) * 3 (9 * 5 * 4 * 6) * 6 * 2 + ((8 + 6 * 5) + 4 + 5) * 9 (3 * 8 + 8) * 9 + (8 + 4 * 7 + 8 + 7 + (7 + 8)) 6 + ((8 + 9 + 9 * 9) + 2) * 5 + 7 2 + (8 + (6 * 9 + 6 + 2 + 3 * 8) + 6 * 9 + 3) + 9 + 2 6 + (5 + 3) * 3 + 9 * 3 9 * ((9 + 7 * 2 + 5 * 7 + 2) * (9 * 5 + 6)) + ((9 * 7) + 4) * 3 9 * (6 * 6 + (5 + 7 * 6) * (5 + 3) + 4 + 9) * 5 (9 * 7 * 9) + 5 * 9 * 8 7 * ((8 + 9 + 5 * 2 * 2) * 5 * 5 * 8 + 4) * (5 * 7) + 9 * (7 + (8 + 2)) (3 * 6 * 6) + 7 + 2 + (9 * 2 * 4 + (5 * 8 + 8 * 6) + 8 + 6) * 9 3 + (6 + 7 * 5 + 4 * 6) * 5 * 4 * 2 * ((3 + 3) * 9 + 9 * 5 * 4) 5 * (5 + 2) * 6 + (4 * 6) + 4 + 9 (6 + 9) * 2 * 2 + 2 + (3 + 9 * (3 * 7 + 6 + 7 + 9 * 2) + 9 * 5 * (9 + 2)) 9 + (2 + 3 * 8 * 3 + 8 + 8) + 8 8 * (3 + 4 + 9) + 5 + (2 + 4 + 3 * 4) (2 + 6 + 4 * 9) * 9 * 3 * (5 + 5 * 9 + (6 * 5 + 7 * 6) * 8 + (7 + 7 * 8 * 8 + 8 + 6)) 6 * 6 * 6 + ((3 * 5 * 6 + 8 + 3 + 7) + 5 * (2 + 3 + 5 * 9 * 5) * 2) + (9 * (7 * 9 + 9) + (8 + 3 + 9 + 5 + 6) + 5 + 9) * 7 ((4 * 3 + 9 * 7 + 3 * 9) * 5) * 5 * 6 * 6 + 7 ((6 * 3 + 8 + 6) * 6 + 4 + (6 + 5 + 6 * 5 + 8 * 2) * 7 + 2) * 9 * 6 9 + 5 * 2 * (8 + 8 * 9 + 7 * (4 * 8 + 2)) + 8 * 5 (4 + 7 * 4 * 4) + 5 * 4 * 2 (8 + 3 * 3 * 6 * 4 * 7) * 7 * 3 + 8 3 * 8 + (5 + 7 * 7 + 6 * (4 * 6 * 6 * 9 * 6 + 5) * 2) * 7 + 6 * 7 5 + 2 + (7 + 9 * 3 + 8 * (2 * 4 + 4 * 3 * 9)) * 5 * 9 + (5 * 7 + 6 * 2 + 5 + 9) 9 * 3 * (6 * 7 * 7) * 2 3 + 8 * (2 + 6 + (8 + 8 + 9) * (3 * 5 * 6 + 7 + 2) + 2 * 9) + 4 * 7 4 + (7 + 9 * 8 * 7) * (9 + 7) * 7 + 6 * 3 (5 + 4 * 4) + ((8 + 9 + 4 * 9 * 5) * (3 * 3) * 8 + (6 + 5 + 8 * 7)) * 9 + 8 * 8 6 + (4 * 9 * 2 + 2 * 2 + 6) + (9 * 7 + (6 + 5)) * (7 * (2 + 5) * 9 + 7 + 5) * 4 (2 + 4 * 7 * 7 + (2 * 7) + 8) + 5 * 6 * 9 7 + 7 + (5 + 5) + 7 * 2 4 * (9 * 5) * 5 * (8 + 4 + (4 + 3) + 3) 4 * ((7 + 5) + 3 * 4) * 5 * 2 + 6 + 3 (7 + 3 * 2 + (4 * 2 * 5 + 9 * 5) + (8 * 2 + 5 + 9 + 8) + 7) + 8 + (7 + 9 + (4 + 9) * (9 + 6 + 2 + 8) + (4 + 2 + 8 + 9)) + 3 * (9 * 7 * 3) 5 * (6 * 6 * 8 + 6) * 4 + 9 (4 + (7 * 7 + 2 + 8 * 3 * 3) + 9 + 4 + (9 * 2) + 6) + ((2 + 7 * 2) * 5 + 3) * 8 + 8 2 * 5 * (6 * 6 + 4 + 5 * 2 + 9) + 5 * 7 + (5 + 8) 9 * (3 * 9 + (8 + 5) + 6 + (6 * 7 * 9 + 3 + 8 * 4) * 9) + (6 + 3 + 9 + 9 * (5 * 3 + 8) * 8) (6 * 9 * (6 + 5 * 2 + 8 + 9 + 8) + 5 + 6) + 4 (3 + 5 * 9) + (8 * 2 * 4 + 3 + 9 * 3) * 7 ((3 + 2 * 9 + 7 * 9) * (9 * 7 * 3 + 5 + 6 * 3) + 6 * 8 * (8 + 9 + 9)) * (7 + (7 * 3) * 9 * 3 + 5) * 5 ((9 * 7) * 2 + 5 + 4) + (2 + 5 * 5) (3 + 6 + 2 * 7) + 6 + 6 9 * ((4 * 6 + 4 + 5 * 7 * 9) + 9 + 9) + 5 6 * 8 * 4 + 9 + 4 5 + (9 * 5) * 6 + (3 * 4 + 7 * 7 + 7) + 7 (8 * 6 * 8 * 3 * 8 + 7) + 4 + 4 * (4 * 6 * (9 + 5 * 2) + 5) * (4 + 5 * 6 + (6 * 4 * 6 * 5) * 8 + (4 + 5 * 2)) 3 * 8 + 8 5 * 8 * (9 + (8 + 2 + 4)) 9 + (2 * 5 * 9 + 2) + 3 (2 + 6 + (3 * 6 + 2 * 4 + 4) * 3 * 7 + 6) * 6 * 6 + 6 ((9 + 3 * 7) + (2 + 8)) * ((2 * 3 + 3 + 4) * 8 + 5 + 9 + 3 * 9) (3 * 9 + 3 + 6 * 8 * 7) * 3 * 5 + ((6 + 8 + 4) + 8 * (7 * 4 + 6 + 5) * 3 * 9) 9 * 4 + 7 + (8 + (4 + 6 * 6 * 7 + 9) * 7 + 4 * (5 + 7 + 7) + 2) * 4 * 5 2 * 7 + ((6 + 4 + 3) + (8 * 8 * 7 + 2 + 4) + 3 * 9 * 9 + 7) 4 + 6 * 2 + 8 + (6 * 9 * 6 + 6) * 9 3 + 3 * 8 + 9 * (7 + (2 + 8 + 5 + 5) * 7 + 3 * 2) * (7 * 7 + 7) 9 + 4 * 9 * 2 * (4 * 9) 9 + 4 * 5 + (4 * (2 * 8 * 5 * 7 * 8) * (3 * 8) + 6) + 3 * 4 7 + 3 * (8 + 7 * 9 * (2 + 9 + 4) * (3 + 5) + (7 * 9 * 2 * 8 + 9 + 8)) * (9 * (2 * 7 * 9) + (6 * 8 + 9 + 7) + 3 + (2 + 3 + 4) + (5 + 8 + 4 * 9 + 7)) 3 * 9 + 4 * (7 * (5 + 4 + 9 * 3 + 3 * 4) + 6 * 6) 3 + (8 + 8 * 9) + 9 + 5 * (7 + 4 * 3 * (7 * 4 + 2 + 8 * 4)) (8 * 2 * 4 * 6 + 4) * 5 * 9 * (8 * 5 * 5 + 7 + 8 + 5) 2 + (5 * 2 + (2 * 8 * 3 * 2) * 5 * 5) + 4 + 9 + 2 7 + (5 + 5 * 3) 4 * 6 * (5 + 2 * 3 * (3 + 8 * 3)) + 9 * 4 2 * 7 * (8 * (2 * 3 * 8 * 8 * 5) + 2 + 2 + 5) * 7 2 + ((5 + 2 * 8 * 3 * 4 * 7) + 4 + 3 + 5 * 5 + (5 * 8 + 6)) 3 * 7 * ((9 + 8) * 8 * 3) + (9 * (8 + 8 + 2)) * 9 * 9 5 + (5 * (7 + 8 * 2 * 9) * (7 + 5 + 2 + 5 + 4 + 2) * 4 * 7) + 2 + ((6 * 7 * 2 + 9) * 6) * 9 6 * 6 + (2 + (6 + 4) + (4 * 8 + 2 + 6) + 8 * 3) 2 * 3 + 5 + ((7 * 8 + 8 * 5 * 8 * 9) + 2 * 4 + 2 + 8) (8 + (9 * 9)) + (6 + 2 * 7) 2 + 5 + 4 + 8 + 4 8 + 2 + ((5 * 5 + 5) + (6 * 8 + 6 * 3) * 8) * 7 + 6 9 * ((2 + 5) + 2 + 8 * (5 + 7 + 5 * 3) * 6) + 9 9 + 6 + ((9 * 7 * 7) + 5 * 9 + 8 * (4 * 9 + 3 + 3) * 5) + 7 * 2 (3 * 3 + 5 + 6 + 5 * 5) + 8 * 4 * 4 + 2 7 * 2 + 3 + 9 * 4 + 4 5 + (2 + 9) + 5 + 3 + 8 * 6 9 * 3 + 5 + 3 * (6 * (4 * 8 + 6 * 9 * 9 + 6) * 7 + (9 * 2 * 3 + 7)) 7 * 8 + 3 + (7 + 3 + 9 + 7 * 2) + (9 + 6 * 3 * 3) ((9 * 7) + 2) + 9 * (4 + 4) ((9 + 5) * 2 + (2 + 4 * 4 * 3 + 9 * 9)) + (7 + (7 * 6 + 4) + (9 + 7) * 9) + 8 + 9 4 + 7 * 3 + 2 * (7 + 2 * 8 + 9 * 3 * 3) 5 + 3 (2 + 3 * 5 * 6 * 2 * 2) + (8 * 8 + 3 * 3 + 7 * (7 + 3 + 9 * 8)) + 7 + 5 (7 + 3 * 5) * 7 4 * 6 + (6 + 7 + 9 * (3 + 4 * 4) + 3 + 2) * (6 + 8 * 9 * 6 * 5 + (2 * 5 * 8 + 8 + 5 * 2)) 8 * (7 + 5 * 8 * 7 + 6) * ((4 + 5) * 9 + 9 * 2 * 7) * 5 + 8 6 + 8 * 5 (8 * (7 * 4) + 5 * 9 * 9 + 3) * 7 8 + 5 + 7 + (8 + (6 + 5) + 7 + (7 * 9 * 2 * 6 + 4) * 8) * 2 ((4 + 3 * 9 + 9) + 4 * (9 + 2 + 4 + 8 + 9) + 5 + 7 * 4) + 6 * 9 9 + (9 * 8 * 3) * (8 * 6 * 7 * 8 * 6) 5 * 7 * (9 * 3 + 6 + 8 + 8 + 5) 2 + (2 + 7 * (5 * 7 * 3 + 2 * 7 * 8) + 7 + (5 + 8 * 5)) * 8 * 3 * 3 + ((6 * 5 * 8 + 8 + 9) + 6 * (8 + 3) * 6) 9 * 5 * 9 * 8 6 + 5 + 7 + 2 + (8 + (5 * 5 * 7 * 3 + 7 + 7) + 2 + 7) 4 + 4 * 2 + 3 * 7 + 6 4 + (5 + 3 + 2 * 8 * 5) + 2 2 * 2 * 8 + (6 + 9 * 4 * 7 * (4 + 7 * 7 * 2) + 2) + 9 4 * 7 * 8 + (3 * (9 * 2 + 4 * 4 * 5 * 5) + 5 * 2 * 7 + 7) + 8 + 5 (8 + (7 + 9)) * 6 9 + ((8 * 7 + 5 * 3) + 7 + (6 * 4 * 6 + 8)) * 5 3 + (6 + 5 * 7 + 4 * 2) * (3 * 3 + 7 * 6 + 4 * (6 + 7 * 3)) * (4 + 8 * 9) 7 * (7 + (4 * 3) + 4 + 5 * 4) 8 * 2 * (9 * 7 + 5 + 8) + 4 * 8 + (3 * 3) 7 * (3 + 2 * 3) + ((2 * 8) * 4) * 8 ((8 + 3 + 4 + 7 * 5 * 6) + (3 + 2) * (8 + 6 * 9 * 4 + 9 + 2)) + 3 + 8 * (8 + (4 + 9 * 2 + 8 + 6 + 9) * 9 * 3 * 3 + 3) + 9 4 * (7 * 4 * 8 * 5) + 7 * 8 * (2 * 4 * 5 * 4) (3 + 8 + 6 * 6 * 4) * (4 * 6 + 8 + 3 * 6) * 5 2 + (3 + (6 + 9 + 6 * 3) * 7 + (9 + 7 + 8)) + 9 * 7 9 + 7 * (4 * 8 + 3 * 2 * 9 + 4) + ((6 * 8) + 3 + 7 + 7 * 8 * (9 + 5)) * 9 + ((3 * 9 * 8 * 8) + (5 + 2) + 2 * 4 * 3 + 8) 6 * 3 + 3 * (8 * 7 + (2 * 8 + 4 + 6 + 5 * 6) + 3 + 3 * (9 * 9 * 8 + 2 * 5)) + (3 + (3 + 5 + 6) + 8 + 9 * 4) + 8 9 * ((7 * 6) * 9 * 2) * ((8 * 9 * 3 + 8 * 2) * 6 + 8 + 9 * 5 + 3) + (5 * 4) * (7 + 4) * 7 (4 * 7 + 8 * 9) + (7 + (3 + 9 * 2 * 9 * 4) * 9 * 3 * 4 * 7) * 4 * 6 + 2 6 + 9 * 6 + (2 + 9 * 4) * 8 3 * (6 * 5 + (7 * 6 * 5 * 8) + 7 * 5 * 4) + (3 * (6 * 5 + 7 * 9) + (4 * 9 + 2 * 3) * 7 * 8 * 3) + 9 + 5 + 9 3 + (2 + (6 * 3 * 8 + 3) + 4) * (8 * 9) * 7 + 6 (2 * (7 * 9)) * 6 + 7 * 2 + 9 * 4 ((5 + 4 * 2) + (3 * 2 * 6 + 3 + 5 * 5) * 4 + (2 + 5 * 9 * 8 + 3 * 5) * (7 * 2 * 8 * 2) + (5 * 4)) + 7 * 8 + 9 (8 + 4 + (5 * 4 * 6) * 2 * 8) * (6 + (9 * 4 * 5) * 7 * 5 * 4) + 3 + ((2 + 2) + 7 + 5 * 7 * 2 + 4) * 5 + (9 * 3 * (9 + 3 * 4 * 8 * 7 + 9)) 6 * 7 * 3 * (7 * 2 + (6 * 4 * 5) * (5 * 3 * 8 + 8 + 3 + 9)) * (7 * 9 + 7 * 5) 4 + 2 + (3 * (9 + 7) * 6) (8 + (8 * 4)) + (7 + 3 * 5) + 4 + 4 + 7 (7 + 7 * 4 * 4 * 4) * 4 + 3 """.strip()
50.849604
147
0.234745
4,517
19,272
1.00155
0.002214
0.039346
0.013263
0.007958
0.476348
0.070955
0.015252
0
0
0
0
0.425462
0.449357
19,272
378
148
50.984127
0.000848
0
0
0
0
0.64191
0.998858
0
0
0
0
0
0
1
0
false
0
0
0
0
0
0
0
1
null
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
1
0
0
0
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
eabb21b286b72f3459b6347cff4fa98c0d52c7bc
49
py
Python
python/testData/formatter/optionalAlignForMethodParameters_after.py
truthiswill/intellij-community
fff88cfb0dc168eea18ecb745d3e5b93f57b0b95
[ "Apache-2.0" ]
2
2019-04-28T07:48:50.000Z
2020-12-11T14:18:08.000Z
python/testData/formatter/optionalAlignForMethodParameters_after.py
truthiswill/intellij-community
fff88cfb0dc168eea18ecb745d3e5b93f57b0b95
[ "Apache-2.0" ]
173
2018-07-05T13:59:39.000Z
2018-08-09T01:12:03.000Z
python/testData/formatter/optionalAlignForMethodParameters_after.py
truthiswill/intellij-community
fff88cfb0dc168eea18ecb745d3e5b93f57b0b95
[ "Apache-2.0" ]
2
2020-03-15T08:57:37.000Z
2020-04-07T04:48:14.000Z
def long_method_name(bar, baz): pass
12.25
25
0.591837
7
49
3.857143
1
0
0
0
0
0
0
0
0
0
0
0
0.326531
49
3
26
16.333333
0.818182
0
0
0
0
0
0
0
0
0
0
0
0
1
0.333333
false
0.333333
0
0
0.333333
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
0
0
0
5
eac92247cc785c1bbe237d6cdcaee52116efb1a8
767
py
Python
py_hcl/firrtl_ir/expr/literal.py
zhongzc/py-hcl
5a2be0208f915377a1dae12509f1af016df6412b
[ "MIT" ]
null
null
null
py_hcl/firrtl_ir/expr/literal.py
zhongzc/py-hcl
5a2be0208f915377a1dae12509f1af016df6412b
[ "MIT" ]
null
null
null
py_hcl/firrtl_ir/expr/literal.py
zhongzc/py-hcl
5a2be0208f915377a1dae12509f1af016df6412b
[ "MIT" ]
null
null
null
from ..type import UIntType, SIntType from . import Expression from ..utils import serialize_str class UIntLiteral(Expression): def __init__(self, value, width): self.value = value self.tpe = UIntType(width) def serialize(self, output): self.tpe.serialize(output) output.write(b'("') output.write(serialize_str("h" + hex(self.value).replace("0x", ""))) output.write(b'")') class SIntLiteral(Expression): def __init__(self, value, width): self.value = value self.tpe = SIntType(width) def serialize(self, output): self.tpe.serialize(output) output.write(b'("') output.write(serialize_str("h" + hex(self.value).replace("0x", ""))) output.write(b'")')
27.392857
76
0.6206
91
767
5.10989
0.274725
0.116129
0.103226
0.090323
0.731183
0.731183
0.731183
0.731183
0.731183
0.731183
0
0.003378
0.228162
767
27
77
28.407407
0.782095
0
0
0.666667
0
0
0.018253
0
0
0
0
0
0
1
0.190476
false
0
0.142857
0
0.428571
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
dc3c44586a2cd192470454c7b5af0364f5af012f
329
py
Python
src/dx/ams/text/scraper/pickle_utils.py
lmmx/dx
063e8f8cfc24dfdf09a12001b58b4017a75ea3e8
[ "MIT" ]
null
null
null
src/dx/ams/text/scraper/pickle_utils.py
lmmx/dx
063e8f8cfc24dfdf09a12001b58b4017a75ea3e8
[ "MIT" ]
2
2021-01-03T16:22:11.000Z
2021-02-07T08:41:57.000Z
src/dx/ams/text/scraper/pickle_utils.py
lmmx/dx
063e8f8cfc24dfdf09a12001b58b4017a75ea3e8
[ "MIT" ]
null
null
null
from ..store import dir_path as store_dir from ....share.scraper.pickle_utils import retrieve_pickle, store_as_pickle from functools import partial __all__ = ["retrieve_pickle", "store_as_pickle"] retrieve_pickle = partial(retrieve_pickle, pickle_dir=store_dir) store_as_pickle = partial(store_as_pickle, pickle_dir=store_dir)
36.555556
75
0.829787
49
329
5.102041
0.285714
0.224
0.208
0.168
0.4
0
0
0
0
0
0
0
0.088146
329
8
76
41.125
0.833333
0
0
0
0
0
0.091185
0
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
5
dc6f845ce0df54c846e98b3105ea9322099cc3ac
20,469
py
Python
test/programytest/storage/stores/sql/store/test_conversations.py
cdoebler1/AIML2
ee692ec5ea3794cd1bc4cc8ec2a6b5e5c20a0d6a
[ "MIT" ]
345
2016-11-23T22:37:04.000Z
2022-03-30T20:44:44.000Z
test/programytest/storage/stores/sql/store/test_conversations.py
MikeyBeez/program-y
00d7a0c7d50062f18f0ab6f4a041068e119ef7f0
[ "MIT" ]
275
2016-12-07T10:30:28.000Z
2022-02-08T21:28:33.000Z
test/programytest/storage/stores/sql/store/test_conversations.py
VProgramMist/modified-program-y
f32efcafafd773683b3fe30054d5485fe9002b7d
[ "MIT" ]
159
2016-11-28T18:59:30.000Z
2022-03-20T18:02:44.000Z
import unittest from unittest.mock import patch import programytest.storage.engines as Engines from programy.dialog.conversation import Conversation from programy.dialog.question import Question from programy.parser.pattern.match import Match from programy.parser.pattern.matchcontext import MatchContext from programy.parser.pattern.nodes.word import PatternWordNode from programy.storage.stores.sql.config import SQLStorageConfiguration from programy.storage.stores.sql.engine import SQLStorageEngine from programy.storage.stores.sql.store.conversations import SQLConversationStore from programytest.client import TestClient from programytest.storage.asserts.store.assert_conversations import ConverstionStoreAsserts from programy.storage.stores.sql.dao.conversation import Conversation as ConversationDAO from programy.storage.stores.sql.dao.conversation import Question as QuestionDAO from programy.storage.stores.sql.dao.conversation import Sentence as SentenceDAO from programy.storage.stores.sql.dao.conversation import ConversationProperty as ConversationPropertyDAO from programy.storage.stores.sql.dao.conversation import Match as MatchDAO from programy.storage.stores.sql.dao.conversation import MatchNode as MatchNodeDAO class SQLConversationStoreTests(ConverstionStoreAsserts): @unittest.skipIf(Engines.sql is False, Engines.sql_disabled) def test_initialise(self): config = SQLStorageConfiguration() engine = SQLStorageEngine(config) engine.initialise() store = SQLConversationStore(engine) self.assertEqual(store.storage_engine, engine) @unittest.skipIf(Engines.sql is False, Engines.sql_disabled) def test_get_all(self): config = SQLStorageConfiguration() engine = SQLStorageEngine(config) engine.initialise() store = SQLConversationStore(engine) with self.assertRaises(Exception): store._get_all() @unittest.skipIf(Engines.sql is False, Engines.sql_disabled) def test_read_write_conversation_properties_in_db(self): config = SQLStorageConfiguration() engine = SQLStorageEngine(config) engine.initialise() store = SQLConversationStore(engine) client = TestClient() client_context = client.create_client_context("user1") store.empty() properties1 = {"key1": "value1", "key2": "value2"} store._write_properties_to_db(client_context, 1, 2, ConversationPropertyDAO.CONVERSATION, properties1) store.commit() properties2 = {} store._read_properties_from_db(client_context, 1, 2, ConversationPropertyDAO.CONVERSATION, properties2) self.assertEqual({"key1": "value1", "key2": "value2"}, properties2) store.empty() @unittest.skipIf(Engines.sql is False, Engines.sql_disabled) def test_read_write_question_properties_in_db(self): config = SQLStorageConfiguration() engine = SQLStorageEngine(config) engine.initialise() store = SQLConversationStore(engine) client = TestClient() client_context = client.create_client_context("user1") store.empty() properties1 = {"key1": "value1", "key2": "value2"} store._write_properties_to_db(client_context, 1, 2, ConversationPropertyDAO.QUESTION, properties1) store.commit() properties2 = {} store._read_properties_from_db(client_context, 1, 2, ConversationPropertyDAO.QUESTION, properties2) self.assertEqual({"key1": "value1", "key2": "value2"}, properties2) store.empty() @unittest.skipIf(Engines.sql is False, Engines.sql_disabled) def test_read_write_matches_in_db(self): config = SQLStorageConfiguration() engine = SQLStorageEngine(config) engine.initialise() store = SQLConversationStore(engine) store.empty() client = TestClient() client_context = client.create_client_context("user1") matched_context1 = MatchContext(100, 100, sentence="Hello", response="Hi There") matched_context1.matched_nodes.append(Match(Match.WORD, PatternWordNode("Hello"), "Hello")) store._write_matches_to_db(client_context, matched_context1, 1) store.commit() matched_context2 = MatchContext(0, 0) store._read_matches_from_db(client_context, matched_context2, 1) self.assertEqual(1, len(matched_context2.matched_nodes)) self.assertEqual(Match.WORD, matched_context2.matched_nodes[0].matched_node_type) self.assertEqual("WORD [Hello]", matched_context2.matched_nodes[0].matched_node_str) self.assertFalse(matched_context2.matched_nodes[0].matched_node_multi_word) self.assertFalse(matched_context2.matched_nodes[0].matched_node_wildcard) self.assertEqual(1, len(matched_context2.matched_nodes[0].matched_node_words)) self.assertEqual(["Hello"], matched_context2.matched_nodes[0].matched_node_words) store.empty() @unittest.skipIf(Engines.sql is False, Engines.sql_disabled) def test_match_context_in_db(self): config = SQLStorageConfiguration() engine = SQLStorageEngine(config) engine.initialise() store = SQLConversationStore(engine) store.empty() client = TestClient() client_context = client.create_client_context("user1") matched_context1 = MatchContext(100, 100, sentence="Hello", response="Hi There") matched_context1._matched_nodes = [] matched_context1._template_node = None store._write_match_context_to_db(client_context, 1, matched_context1) store.commit() matched_context2 = MatchContext(100, 100) store._read_match_context_from_db(client_context, 1, matched_context2) self.assertEqual(100, matched_context2.max_search_timeout) self.assertEqual(100, matched_context2.max_search_depth) self.assertEqual("Hello", matched_context2.sentence) self.assertEqual("Hi There", matched_context2.response) store.empty() @unittest.skipIf(Engines.sql is False, Engines.sql_disabled) def test_sentences_in_db(self): config = SQLStorageConfiguration() engine = SQLStorageEngine(config) engine.initialise() store = SQLConversationStore(engine) store.empty() client = TestClient() client_context = client.create_client_context("user1") question1 = Question.create_from_text(client_context, "Hello There") question1.sentence(0).response = "Hi" question1.sentence(0)._positivity = 0.5 question1.sentence(0)._subjectivity = 0.6 store._write_sentences_to_db(client_context, 1, question1) store.commit() question2 = Question() store._read_sentences_from_db(client_context, 1, question2) self.assertEqual(1, len(question2.sentences)) self.assertEqual(0.5, question2.sentences[0].positivity) self.assertEqual(0.6, question2.sentences[0].subjectivity) self.assertEqual(["Hello", "There"], question2.sentences[0].words) self.assertEqual("Hi", question2.sentences[0].response) store.empty() @unittest.skipIf(Engines.sql is False, Engines.sql_disabled) def test_questions_in_db(self): config = SQLStorageConfiguration() engine = SQLStorageEngine(config) engine.initialise() store = SQLConversationStore(engine) store.empty() client = TestClient() client_context = client.create_client_context("user1") conversation1 = Conversation(client_context) question1 = Question.create_from_text(client_context, "Hello There") question1.sentence(0).response = "Hi" question1.sentence(0)._positivity = 0.5 question1.sentence(0)._subjectivity = 0.6 conversation1.record_dialog(question1) store._write_questions_to_db(client_context, 1, conversation1) store.commit() conversation2 = Conversation(client_context) store._read_questions_from_db(client_context, 1, conversation2) self.assertEqual(1, len(conversation2.questions)) self.assertEqual(1, len(conversation2.questions[0].sentences)) self.assertEqual(0.5, conversation2.questions[0].sentences[0].positivity) self.assertEqual(0.6, conversation2.questions[0].sentences[0].subjectivity) self.assertEqual(["Hello", "There"], conversation2.questions[0].sentences[0].words) self.assertEqual("Hi", conversation2.questions[0].sentences[0].response) store.empty() @unittest.skipIf(Engines.sql is False, Engines.sql_disabled) def test_conversation_in_db(self): config = SQLStorageConfiguration() engine = SQLStorageEngine(config) engine.initialise() store = SQLConversationStore(engine) store.empty() client = TestClient() client_context = client.create_client_context("user1") conversation1 = Conversation(client_context) conversation1.properties['ckey1'] = "cvalue1" conversation1.properties['ckey2'] ="cvalue2" question1 = Question.create_from_text(client_context, "Hello There") question1.sentence(0).response = "Hi" question1.sentence(0)._positivity = 0.5 question1.sentence(0)._subjectivity = 0.6 question1.properties['qkey1'] = "qvalue1" question1.properties['qkey2'] = "qvalue2" conversation1.record_dialog(question1) store.store_conversation(client_context, conversation1) store.commit () conversation2 = Conversation(client_context) store.load_conversation (client_context, conversation2) self.assertEqual(conversation2.properties['ckey1'], "cvalue1") self.assertEqual(conversation2.properties['ckey2'], "cvalue2") self.assertEqual(conversation2.questions[0].sentence(0).response, "Hi") self.assertEqual(conversation2.questions[0].sentence(0)._positivity, 0.5) self.assertEqual(conversation2.questions[0].sentence(0)._subjectivity, 0.6) self.assertEqual(conversation2.questions[0].properties['qkey1'], "qvalue1") self.assertEqual(conversation2.questions[0].properties['qkey2'], "qvalue2") store.empty() @unittest.skipIf(Engines.sql is False, Engines.sql_disabled) def test_conversation_storage(self): config = SQLStorageConfiguration() engine = SQLStorageEngine(config) engine.initialise() store = SQLConversationStore(engine) self.assertEqual(store.storage_engine, engine) self.assert_conversation_storage(store) def patch_get_conversation_dao(self, client_context): return ConversationDAO(id=1, clientid="client1", userid="user1", botid="bot1", brainid="brain1", maxhistories=100) @patch("programy.storage.stores.sql.store.conversations.SQLConversationStore._get_conversation_dao", patch_get_conversation_dao) @unittest.skipIf(Engines.sql is False, Engines.sql_disabled) def test_storage_where_existing_conversation(self): config = SQLStorageConfiguration() engine = SQLStorageEngine(config) engine.initialise() store = SQLConversationStore(engine) self.assertEqual(store.storage_engine, engine) self.assert_just_conversation_storage(store) def patch_get_conversation_dao2(self, client_context): return None @patch("programy.storage.stores.sql.store.conversations.SQLConversationStore._get_conversation_dao", patch_get_conversation_dao2) @unittest.skipIf(Engines.sql is False, Engines.sql_disabled) def test_storage_where_no_conversation(self): config = SQLStorageConfiguration() engine = SQLStorageEngine(config) engine.initialise() store = SQLConversationStore(engine) self.assertEqual(store.storage_engine, engine) self.assert_just_conversation_storage(store) def patch_get_question_dao(self, conversationid, question_no): return QuestionDAO(id=1, conversationid=conversationid, questionno=question_no, srai=False) @patch("programy.storage.stores.sql.store.conversations.SQLConversationStore._get_question_dao", patch_get_question_dao) @unittest.skipIf(Engines.sql is False, Engines.sql_disabled) def test_storage_where_existing_question(self): config = SQLStorageConfiguration() engine = SQLStorageEngine(config) engine.initialise() store = SQLConversationStore(engine) self.assertEqual(store.storage_engine, engine) self.assert_just_conversation_storage(store) def patch_get_question_dao2(self, conversationid, question_no): return None @patch("programy.storage.stores.sql.store.conversations.SQLConversationStore._get_question_dao", patch_get_question_dao2) @unittest.skipIf(Engines.sql is False, Engines.sql_disabled) def test_storage_where_noquestion(self): config = SQLStorageConfiguration() engine = SQLStorageEngine(config) engine.initialise() store = SQLConversationStore(engine) self.assertEqual(store.storage_engine, engine) self.assert_just_conversation_storage(store) def patch_get_sentence_dao(self, questionid, sentence_no): return SentenceDAO(id=1, questionid = questionid, sentenceno = sentence_no, sentence = "Hello", response = "Hi There", positivity = "0.5", subjectivity = "0.5") @patch("programy.storage.stores.sql.store.conversations.SQLConversationStore._get_sentence_dao", patch_get_sentence_dao) @unittest.skipIf(Engines.sql is False, Engines.sql_disabled) def test_storage_where_existing_sentence(self): config = SQLStorageConfiguration() engine = SQLStorageEngine(config) engine.initialise() store = SQLConversationStore(engine) self.assertEqual(store.storage_engine, engine) self.assert_just_conversation_storage(store) def patch_get_sentence_dao2(self, questionid, sentence_no): return None @patch("programy.storage.stores.sql.store.conversations.SQLConversationStore._get_sentence_dao", patch_get_sentence_dao2) @unittest.skipIf(Engines.sql is False, Engines.sql_disabled) def test_storage_where_no_sentence(self): config = SQLStorageConfiguration() engine = SQLStorageEngine(config) engine.initialise() store = SQLConversationStore(engine) self.assertEqual(store.storage_engine, engine) self.assert_just_conversation_storage(store) def patch_get_match_dao(self, sentenceid): return MatchDAO(id=1, sentenceid = sentenceid, max_search_depth = 99, max_search_timeout = 99, sentence = "Hello", response = "Hi there", score = "1.0") @patch("programy.storage.stores.sql.store.conversations.SQLConversationStore._get_match_dao", patch_get_match_dao) @unittest.skipIf(Engines.sql is False, Engines.sql_disabled) def test_storage_where_existing_match(self): config = SQLStorageConfiguration() engine = SQLStorageEngine(config) engine.initialise() store = SQLConversationStore(engine) self.assertEqual(store.storage_engine, engine) self.assert_just_conversation_storage(store) def patch_get_match_dao2(self, sentenceid): return None @patch("programy.storage.stores.sql.store.conversations.SQLConversationStore._get_match_dao", patch_get_match_dao2) @unittest.skipIf(Engines.sql is False, Engines.sql_disabled) def test_storage_where_no_match(self): config = SQLStorageConfiguration() engine = SQLStorageEngine(config) engine.initialise() store = SQLConversationStore(engine) self.assertEqual(store.storage_engine, engine) self.assert_just_conversation_storage(store) def patch_get_matchnode_dao(self, matchid, match_count): return MatchNodeDAO(id = 1, matchid = matchid, matchcount = match_count, matchtype = "WORD", matchnode = "WORD", matchstr = "HELLO", wildcard = False, multiword = False) @patch("programy.storage.stores.sql.store.conversations.SQLConversationStore._get_matchnode_dao", patch_get_matchnode_dao) @unittest.skipIf(Engines.sql is False, Engines.sql_disabled) def test_storage_where_existing_matchnode(self): config = SQLStorageConfiguration() engine = SQLStorageEngine(config) engine.initialise() store = SQLConversationStore(engine) self.assertEqual(store.storage_engine, engine) self.assert_just_conversation_storage(store) def patch_get_matchnode_dao2(self, matchid, match_count): return None @patch("programy.storage.stores.sql.store.conversations.SQLConversationStore._get_matchnode_dao", patch_get_matchnode_dao2) @unittest.skipIf(Engines.sql is False, Engines.sql_disabled) def test_storage_where_no_matchnode(self): config = SQLStorageConfiguration() engine = SQLStorageEngine(config) engine.initialise() store = SQLConversationStore(engine) self.assertEqual(store.storage_engine, engine) self.assert_just_conversation_storage(store) def patch_get_property_dao(self, conversationid, questionid, proptype, name): return ConversationPropertyDAO(id=1, conversationid = conversationid, questionid =questionid, type = proptype, name = name, value = "value") @patch("programy.storage.stores.sql.store.conversations.SQLConversationStore._get_property_dao", patch_get_property_dao) @unittest.skipIf(Engines.sql is False, Engines.sql_disabled) def test_storage_where_existing_property_unmatched(self): config = SQLStorageConfiguration() engine = SQLStorageEngine(config) engine.initialise() store = SQLConversationStore(engine) self.assertEqual(store.storage_engine, engine) self.assert_just_conversation_storage(store) def patch_get_property_dao2(self, conversationid, questionid, proptype, name): return ConversationPropertyDAO(id=1, conversationid = conversationid, questionid =questionid, type = proptype, name = "topic", value = "*") @patch("programy.storage.stores.sql.store.conversations.SQLConversationStore._get_property_dao", patch_get_property_dao2) @unittest.skipIf(Engines.sql is False, Engines.sql_disabled) def test_storage_where_existing_property_matched(self): config = SQLStorageConfiguration() engine = SQLStorageEngine(config) engine.initialise() store = SQLConversationStore(engine) self.assertEqual(store.storage_engine, engine) self.assert_just_conversation_storage(store) def patch_get_property_dao3(self, conversationid, questionid, proptype, name): return None @patch("programy.storage.stores.sql.store.conversations.SQLConversationStore._get_property_dao", patch_get_property_dao3) @unittest.skipIf(Engines.sql is False, Engines.sql_disabled) def test_storage_where_no_property(self): config = SQLStorageConfiguration() engine = SQLStorageEngine(config) engine.initialise() store = SQLConversationStore(engine) self.assertEqual(store.storage_engine, engine) self.assert_just_conversation_storage(store)
40.7749
133
0.6887
2,054
20,469
6.63778
0.078871
0.033739
0.035426
0.040487
0.828297
0.771234
0.74747
0.708523
0.662315
0.654393
0
0.016647
0.222287
20,469
501
134
40.856287
0.839814
0
0
0.585752
0
0
0.07548
0.054815
0
0
0
0
0.166227
1
0.094987
false
0
0.050132
0.034301
0.182058
0
0
0
0
null
0
0
0
1
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
dc95cafc17ab0c4c6e21da796505c0a067d3adaa
140
py
Python
registry_py/__init__.py
kasun/registry_py
8d6323932ffab84a064f3f4eff1fd4c46c0e6f18
[ "MIT" ]
null
null
null
registry_py/__init__.py
kasun/registry_py
8d6323932ffab84a064f3f4eff1fd4c46c0e6f18
[ "MIT" ]
null
null
null
registry_py/__init__.py
kasun/registry_py
8d6323932ffab84a064f3f4eff1fd4c46c0e6f18
[ "MIT" ]
null
null
null
from .registry import ClassNotRegistered, Registrable, Registry __version__ = "0.1" __all__ = [ClassNotRegistered, Registrable, Registry]
23.333333
63
0.8
13
140
8
0.692308
0.557692
0.711538
0
0
0
0
0
0
0
0
0.016129
0.114286
140
5
64
28
0.822581
0
0
0
0
0
0.021429
0
0
0
0
0
0
1
0
false
0
0.333333
0
0.333333
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
5
dc9ceb1308996c16bc45af0ee449ae0102fe7051
45,864
py
Python
nova/network/api.py
bopopescu/nova-token
ec98f69dea7b3e2b9013b27fd55a2c1a1ac6bfb2
[ "Apache-2.0" ]
null
null
null
nova/network/api.py
bopopescu/nova-token
ec98f69dea7b3e2b9013b27fd55a2c1a1ac6bfb2
[ "Apache-2.0" ]
null
null
null
nova/network/api.py
bopopescu/nova-token
ec98f69dea7b3e2b9013b27fd55a2c1a1ac6bfb2
[ "Apache-2.0" ]
2
2017-07-20T17:31:34.000Z
2020-07-24T02:42:19.000Z
begin_unit comment|'# Copyright (c) 2011 X.commerce, a business unit of eBay Inc.' nl|'\n' comment|'# Copyright 2010 United States Government as represented by the' nl|'\n' comment|'# Administrator of the National Aeronautics and Space Administration.' nl|'\n' comment|'# Copyright 2013 IBM Corp.' nl|'\n' comment|'# All Rights Reserved.' nl|'\n' comment|'#' nl|'\n' comment|'# Licensed under the Apache License, Version 2.0 (the "License"); you may' nl|'\n' comment|'# not use this file except in compliance with the License. You may obtain' nl|'\n' comment|'# a copy of the License at' nl|'\n' comment|'#' nl|'\n' comment|'# http://www.apache.org/licenses/LICENSE-2.0' nl|'\n' comment|'#' nl|'\n' comment|'# Unless required by applicable law or agreed to in writing, software' nl|'\n' comment|'# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT' nl|'\n' comment|'# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the' nl|'\n' comment|'# License for the specific language governing permissions and limitations' nl|'\n' comment|'# under the License.' nl|'\n' nl|'\n' name|'import' name|'functools' newline|'\n' nl|'\n' name|'from' name|'oslo_config' name|'import' name|'cfg' newline|'\n' name|'from' name|'oslo_log' name|'import' name|'log' name|'as' name|'logging' newline|'\n' name|'from' name|'oslo_utils' name|'import' name|'strutils' newline|'\n' nl|'\n' name|'from' name|'nova' name|'import' name|'exception' newline|'\n' name|'from' name|'nova' op|'.' name|'i18n' name|'import' name|'_LI' newline|'\n' name|'from' name|'nova' op|'.' name|'network' name|'import' name|'base_api' newline|'\n' name|'from' name|'nova' op|'.' name|'network' name|'import' name|'floating_ips' newline|'\n' name|'from' name|'nova' op|'.' name|'network' name|'import' name|'model' name|'as' name|'network_model' newline|'\n' name|'from' name|'nova' op|'.' name|'network' name|'import' name|'rpcapi' name|'as' name|'network_rpcapi' newline|'\n' name|'from' name|'nova' name|'import' name|'objects' newline|'\n' name|'from' name|'nova' op|'.' name|'objects' name|'import' name|'base' name|'as' name|'obj_base' newline|'\n' name|'from' name|'nova' name|'import' name|'policy' newline|'\n' name|'from' name|'nova' name|'import' name|'utils' newline|'\n' nl|'\n' DECL|variable|CONF name|'CONF' op|'=' name|'cfg' op|'.' name|'CONF' newline|'\n' nl|'\n' DECL|variable|LOG name|'LOG' op|'=' name|'logging' op|'.' name|'getLogger' op|'(' name|'__name__' op|')' newline|'\n' nl|'\n' nl|'\n' DECL|function|wrap_check_policy name|'def' name|'wrap_check_policy' op|'(' name|'func' op|')' op|':' newline|'\n' indent|' ' string|'"""Check policy corresponding to the wrapped methods prior to execution."""' newline|'\n' nl|'\n' op|'@' name|'functools' op|'.' name|'wraps' op|'(' name|'func' op|')' newline|'\n' DECL|function|wrapped name|'def' name|'wrapped' op|'(' name|'self' op|',' name|'context' op|',' op|'*' name|'args' op|',' op|'**' name|'kwargs' op|')' op|':' newline|'\n' indent|' ' name|'action' op|'=' name|'func' op|'.' name|'__name__' newline|'\n' name|'if' name|'not' name|'self' op|'.' name|'skip_policy_check' op|':' newline|'\n' indent|' ' name|'check_policy' op|'(' name|'context' op|',' name|'action' op|')' newline|'\n' dedent|'' name|'return' name|'func' op|'(' name|'self' op|',' name|'context' op|',' op|'*' name|'args' op|',' op|'**' name|'kwargs' op|')' newline|'\n' nl|'\n' dedent|'' name|'return' name|'wrapped' newline|'\n' nl|'\n' nl|'\n' DECL|function|check_policy dedent|'' name|'def' name|'check_policy' op|'(' name|'context' op|',' name|'action' op|')' op|':' newline|'\n' indent|' ' name|'target' op|'=' op|'{' nl|'\n' string|"'project_id'" op|':' name|'context' op|'.' name|'project_id' op|',' nl|'\n' string|"'user_id'" op|':' name|'context' op|'.' name|'user_id' op|',' nl|'\n' op|'}' newline|'\n' name|'_action' op|'=' string|"'network:%s'" op|'%' name|'action' newline|'\n' name|'policy' op|'.' name|'enforce' op|'(' name|'context' op|',' name|'_action' op|',' name|'target' op|')' newline|'\n' nl|'\n' nl|'\n' DECL|class|API dedent|'' name|'class' name|'API' op|'(' name|'base_api' op|'.' name|'NetworkAPI' op|')' op|':' newline|'\n' indent|' ' string|'"""API for doing networking via the nova-network network manager.\n\n This is a pluggable module - other implementations do networking via\n other services (such as Neutron).\n """' newline|'\n' DECL|member|__init__ name|'def' name|'__init__' op|'(' name|'self' op|',' op|'**' name|'kwargs' op|')' op|':' newline|'\n' indent|' ' name|'self' op|'.' name|'network_rpcapi' op|'=' name|'network_rpcapi' op|'.' name|'NetworkAPI' op|'(' op|')' newline|'\n' name|'helper' op|'=' name|'utils' op|'.' name|'ExceptionHelper' newline|'\n' comment|'# NOTE(vish): this local version of floating_manager has to convert' nl|'\n' comment|"# ClientExceptions back since they aren't going over rpc." nl|'\n' name|'self' op|'.' name|'floating_manager' op|'=' name|'helper' op|'(' name|'floating_ips' op|'.' name|'LocalManager' op|'(' op|')' op|')' newline|'\n' name|'super' op|'(' name|'API' op|',' name|'self' op|')' op|'.' name|'__init__' op|'(' op|'**' name|'kwargs' op|')' newline|'\n' nl|'\n' dedent|'' op|'@' name|'wrap_check_policy' newline|'\n' DECL|member|get_all name|'def' name|'get_all' op|'(' name|'self' op|',' name|'context' op|')' op|':' newline|'\n' indent|' ' string|'"""Get all the networks.\n\n If it is an admin user then api will return all the\n networks. If it is a normal user and nova Flat or FlatDHCP\n networking is being used then api will return all\n networks. Otherwise api will only return the networks which\n belong to the user\'s project.\n """' newline|'\n' name|'if' string|'"nova.network.manager.Flat"' name|'in' name|'CONF' op|'.' name|'network_manager' op|':' newline|'\n' indent|' ' name|'project_only' op|'=' string|'"allow_none"' newline|'\n' dedent|'' name|'else' op|':' newline|'\n' indent|' ' name|'project_only' op|'=' name|'True' newline|'\n' dedent|'' name|'try' op|':' newline|'\n' indent|' ' name|'return' name|'objects' op|'.' name|'NetworkList' op|'.' name|'get_all' op|'(' name|'context' op|',' nl|'\n' name|'project_only' op|'=' name|'project_only' op|')' newline|'\n' dedent|'' name|'except' name|'exception' op|'.' name|'NoNetworksFound' op|':' newline|'\n' indent|' ' name|'return' op|'[' op|']' newline|'\n' nl|'\n' dedent|'' dedent|'' op|'@' name|'wrap_check_policy' newline|'\n' DECL|member|get name|'def' name|'get' op|'(' name|'self' op|',' name|'context' op|',' name|'network_uuid' op|')' op|':' newline|'\n' indent|' ' name|'return' name|'objects' op|'.' name|'Network' op|'.' name|'get_by_uuid' op|'(' name|'context' op|',' name|'network_uuid' op|')' newline|'\n' nl|'\n' dedent|'' op|'@' name|'wrap_check_policy' newline|'\n' DECL|member|create name|'def' name|'create' op|'(' name|'self' op|',' name|'context' op|',' op|'**' name|'kwargs' op|')' op|':' newline|'\n' indent|' ' name|'return' name|'self' op|'.' name|'network_rpcapi' op|'.' name|'create_networks' op|'(' name|'context' op|',' op|'**' name|'kwargs' op|')' newline|'\n' nl|'\n' dedent|'' op|'@' name|'wrap_check_policy' newline|'\n' DECL|member|delete name|'def' name|'delete' op|'(' name|'self' op|',' name|'context' op|',' name|'network_uuid' op|')' op|':' newline|'\n' indent|' ' name|'network' op|'=' name|'self' op|'.' name|'get' op|'(' name|'context' op|',' name|'network_uuid' op|')' newline|'\n' name|'if' name|'network' op|'.' name|'project_id' name|'is' name|'not' name|'None' op|':' newline|'\n' indent|' ' name|'raise' name|'exception' op|'.' name|'NetworkInUse' op|'(' name|'network_id' op|'=' name|'network_uuid' op|')' newline|'\n' dedent|'' name|'return' name|'self' op|'.' name|'network_rpcapi' op|'.' name|'delete_network' op|'(' name|'context' op|',' name|'network_uuid' op|',' name|'None' op|')' newline|'\n' nl|'\n' dedent|'' op|'@' name|'wrap_check_policy' newline|'\n' DECL|member|disassociate name|'def' name|'disassociate' op|'(' name|'self' op|',' name|'context' op|',' name|'network_uuid' op|')' op|':' newline|'\n' indent|' ' name|'network' op|'=' name|'self' op|'.' name|'get' op|'(' name|'context' op|',' name|'network_uuid' op|')' newline|'\n' name|'objects' op|'.' name|'Network' op|'.' name|'disassociate' op|'(' name|'context' op|',' name|'network' op|'.' name|'id' op|',' nl|'\n' name|'host' op|'=' name|'True' op|',' name|'project' op|'=' name|'True' op|')' newline|'\n' nl|'\n' dedent|'' op|'@' name|'wrap_check_policy' newline|'\n' DECL|member|get_fixed_ip name|'def' name|'get_fixed_ip' op|'(' name|'self' op|',' name|'context' op|',' name|'id' op|')' op|':' newline|'\n' indent|' ' name|'return' name|'objects' op|'.' name|'FixedIP' op|'.' name|'get_by_id' op|'(' name|'context' op|',' name|'id' op|')' newline|'\n' nl|'\n' dedent|'' op|'@' name|'wrap_check_policy' newline|'\n' DECL|member|get_fixed_ip_by_address name|'def' name|'get_fixed_ip_by_address' op|'(' name|'self' op|',' name|'context' op|',' name|'address' op|')' op|':' newline|'\n' indent|' ' name|'return' name|'objects' op|'.' name|'FixedIP' op|'.' name|'get_by_address' op|'(' name|'context' op|',' name|'address' op|')' newline|'\n' nl|'\n' dedent|'' op|'@' name|'wrap_check_policy' newline|'\n' DECL|member|get_floating_ip name|'def' name|'get_floating_ip' op|'(' name|'self' op|',' name|'context' op|',' name|'id' op|')' op|':' newline|'\n' indent|' ' name|'if' name|'not' name|'strutils' op|'.' name|'is_int_like' op|'(' name|'id' op|')' op|':' newline|'\n' indent|' ' name|'raise' name|'exception' op|'.' name|'InvalidID' op|'(' name|'id' op|'=' name|'id' op|')' newline|'\n' dedent|'' name|'return' name|'objects' op|'.' name|'FloatingIP' op|'.' name|'get_by_id' op|'(' name|'context' op|',' name|'id' op|')' newline|'\n' nl|'\n' dedent|'' op|'@' name|'wrap_check_policy' newline|'\n' DECL|member|get_floating_ip_pools name|'def' name|'get_floating_ip_pools' op|'(' name|'self' op|',' name|'context' op|')' op|':' newline|'\n' indent|' ' name|'return' name|'objects' op|'.' name|'FloatingIP' op|'.' name|'get_pool_names' op|'(' name|'context' op|')' newline|'\n' nl|'\n' dedent|'' op|'@' name|'wrap_check_policy' newline|'\n' DECL|member|get_floating_ip_by_address name|'def' name|'get_floating_ip_by_address' op|'(' name|'self' op|',' name|'context' op|',' name|'address' op|')' op|':' newline|'\n' indent|' ' name|'return' name|'objects' op|'.' name|'FloatingIP' op|'.' name|'get_by_address' op|'(' name|'context' op|',' name|'address' op|')' newline|'\n' nl|'\n' dedent|'' op|'@' name|'wrap_check_policy' newline|'\n' DECL|member|get_floating_ips_by_project name|'def' name|'get_floating_ips_by_project' op|'(' name|'self' op|',' name|'context' op|')' op|':' newline|'\n' indent|' ' name|'return' name|'objects' op|'.' name|'FloatingIPList' op|'.' name|'get_by_project' op|'(' name|'context' op|',' nl|'\n' name|'context' op|'.' name|'project_id' op|')' newline|'\n' nl|'\n' dedent|'' op|'@' name|'wrap_check_policy' newline|'\n' DECL|member|get_instance_id_by_floating_address name|'def' name|'get_instance_id_by_floating_address' op|'(' name|'self' op|',' name|'context' op|',' name|'address' op|')' op|':' newline|'\n' indent|' ' name|'fixed_ip' op|'=' name|'objects' op|'.' name|'FixedIP' op|'.' name|'get_by_floating_address' op|'(' name|'context' op|',' name|'address' op|')' newline|'\n' name|'if' name|'fixed_ip' name|'is' name|'None' op|':' newline|'\n' indent|' ' name|'return' name|'None' newline|'\n' dedent|'' name|'else' op|':' newline|'\n' indent|' ' name|'return' name|'fixed_ip' op|'.' name|'instance_uuid' newline|'\n' nl|'\n' dedent|'' dedent|'' op|'@' name|'wrap_check_policy' newline|'\n' DECL|member|get_vifs_by_instance name|'def' name|'get_vifs_by_instance' op|'(' name|'self' op|',' name|'context' op|',' name|'instance' op|')' op|':' newline|'\n' indent|' ' name|'vifs' op|'=' name|'objects' op|'.' name|'VirtualInterfaceList' op|'.' name|'get_by_instance_uuid' op|'(' name|'context' op|',' nl|'\n' name|'instance' op|'.' name|'uuid' op|')' newline|'\n' name|'for' name|'vif' name|'in' name|'vifs' op|':' newline|'\n' indent|' ' name|'if' name|'vif' op|'.' name|'network_id' name|'is' name|'not' name|'None' op|':' newline|'\n' indent|' ' name|'network' op|'=' name|'objects' op|'.' name|'Network' op|'.' name|'get_by_id' op|'(' name|'context' op|',' name|'vif' op|'.' name|'network_id' op|',' nl|'\n' name|'project_only' op|'=' string|"'allow_none'" op|')' newline|'\n' name|'vif' op|'.' name|'net_uuid' op|'=' name|'network' op|'.' name|'uuid' newline|'\n' dedent|'' dedent|'' name|'return' name|'vifs' newline|'\n' nl|'\n' dedent|'' op|'@' name|'wrap_check_policy' newline|'\n' DECL|member|get_vif_by_mac_address name|'def' name|'get_vif_by_mac_address' op|'(' name|'self' op|',' name|'context' op|',' name|'mac_address' op|')' op|':' newline|'\n' indent|' ' name|'vif' op|'=' name|'objects' op|'.' name|'VirtualInterface' op|'.' name|'get_by_address' op|'(' name|'context' op|',' nl|'\n' name|'mac_address' op|')' newline|'\n' name|'if' name|'vif' op|'.' name|'network_id' name|'is' name|'not' name|'None' op|':' newline|'\n' indent|' ' name|'network' op|'=' name|'objects' op|'.' name|'Network' op|'.' name|'get_by_id' op|'(' name|'context' op|',' name|'vif' op|'.' name|'network_id' op|',' nl|'\n' name|'project_only' op|'=' string|"'allow_none'" op|')' newline|'\n' name|'vif' op|'.' name|'net_uuid' op|'=' name|'network' op|'.' name|'uuid' newline|'\n' dedent|'' name|'return' name|'vif' newline|'\n' nl|'\n' dedent|'' op|'@' name|'wrap_check_policy' newline|'\n' DECL|member|allocate_floating_ip name|'def' name|'allocate_floating_ip' op|'(' name|'self' op|',' name|'context' op|',' name|'pool' op|'=' name|'None' op|')' op|':' newline|'\n' indent|' ' string|'"""Adds (allocates) a floating IP to a project from a pool."""' newline|'\n' name|'return' name|'self' op|'.' name|'floating_manager' op|'.' name|'allocate_floating_ip' op|'(' name|'context' op|',' nl|'\n' name|'context' op|'.' name|'project_id' op|',' name|'False' op|',' name|'pool' op|')' newline|'\n' nl|'\n' dedent|'' op|'@' name|'wrap_check_policy' newline|'\n' DECL|member|release_floating_ip name|'def' name|'release_floating_ip' op|'(' name|'self' op|',' name|'context' op|',' name|'address' op|',' nl|'\n' name|'affect_auto_assigned' op|'=' name|'False' op|')' op|':' newline|'\n' indent|' ' string|'"""Removes (deallocates) a floating IP with address from a project."""' newline|'\n' name|'return' name|'self' op|'.' name|'floating_manager' op|'.' name|'deallocate_floating_ip' op|'(' name|'context' op|',' name|'address' op|',' nl|'\n' name|'affect_auto_assigned' op|')' newline|'\n' nl|'\n' DECL|member|disassociate_and_release_floating_ip dedent|'' name|'def' name|'disassociate_and_release_floating_ip' op|'(' name|'self' op|',' name|'context' op|',' name|'instance' op|',' nl|'\n' name|'floating_ip' op|')' op|':' newline|'\n' indent|' ' string|'"""Removes (deallocates) and deletes the floating IP.\n\n This api call was added to allow this to be done in one operation\n if using neutron.\n """' newline|'\n' nl|'\n' name|'address' op|'=' name|'floating_ip' op|'[' string|"'address'" op|']' newline|'\n' name|'if' name|'floating_ip' op|'.' name|'get' op|'(' string|"'fixed_ip_id'" op|')' op|':' newline|'\n' indent|' ' name|'try' op|':' newline|'\n' indent|' ' name|'self' op|'.' name|'disassociate_floating_ip' op|'(' name|'context' op|',' name|'instance' op|',' name|'address' op|')' newline|'\n' dedent|'' name|'except' name|'exception' op|'.' name|'FloatingIpNotAssociated' op|':' newline|'\n' indent|' ' name|'msg' op|'=' op|'(' string|'"Floating IP %s has already been disassociated, "' nl|'\n' string|'"perhaps by another concurrent action."' op|')' op|'%' name|'address' newline|'\n' name|'LOG' op|'.' name|'debug' op|'(' name|'msg' op|')' newline|'\n' nl|'\n' comment|'# release ip from project' nl|'\n' dedent|'' dedent|'' name|'return' name|'self' op|'.' name|'release_floating_ip' op|'(' name|'context' op|',' name|'address' op|')' newline|'\n' nl|'\n' dedent|'' op|'@' name|'wrap_check_policy' newline|'\n' op|'@' name|'base_api' op|'.' name|'refresh_cache' newline|'\n' DECL|member|associate_floating_ip name|'def' name|'associate_floating_ip' op|'(' name|'self' op|',' name|'context' op|',' name|'instance' op|',' nl|'\n' name|'floating_address' op|',' name|'fixed_address' op|',' nl|'\n' name|'affect_auto_assigned' op|'=' name|'False' op|')' op|':' newline|'\n' indent|' ' string|'"""Associates a floating IP with a fixed IP.\n\n Ensures floating IP is allocated to the project in context.\n Does not verify ownership of the fixed IP. Caller is assumed to have\n checked that the instance is properly owned.\n\n """' newline|'\n' name|'orig_instance_uuid' op|'=' name|'self' op|'.' name|'floating_manager' op|'.' name|'associate_floating_ip' op|'(' nl|'\n' name|'context' op|',' name|'floating_address' op|',' name|'fixed_address' op|',' name|'affect_auto_assigned' op|')' newline|'\n' nl|'\n' name|'if' name|'orig_instance_uuid' op|':' newline|'\n' indent|' ' name|'msg_dict' op|'=' name|'dict' op|'(' name|'address' op|'=' name|'floating_address' op|',' nl|'\n' name|'instance_id' op|'=' name|'orig_instance_uuid' op|')' newline|'\n' name|'LOG' op|'.' name|'info' op|'(' name|'_LI' op|'(' string|"'re-assign floating IP %(address)s from '" nl|'\n' string|"'instance %(instance_id)s'" op|')' op|',' name|'msg_dict' op|')' newline|'\n' name|'orig_instance' op|'=' name|'objects' op|'.' name|'Instance' op|'.' name|'get_by_uuid' op|'(' nl|'\n' name|'context' op|',' name|'orig_instance_uuid' op|',' name|'expected_attrs' op|'=' op|'[' string|"'flavor'" op|']' op|')' newline|'\n' nl|'\n' comment|'# purge cached nw info for the original instance' nl|'\n' name|'base_api' op|'.' name|'update_instance_cache_with_nw_info' op|'(' name|'self' op|',' name|'context' op|',' nl|'\n' name|'orig_instance' op|')' newline|'\n' nl|'\n' dedent|'' dedent|'' op|'@' name|'wrap_check_policy' newline|'\n' op|'@' name|'base_api' op|'.' name|'refresh_cache' newline|'\n' DECL|member|disassociate_floating_ip name|'def' name|'disassociate_floating_ip' op|'(' name|'self' op|',' name|'context' op|',' name|'instance' op|',' name|'address' op|',' nl|'\n' name|'affect_auto_assigned' op|'=' name|'False' op|')' op|':' newline|'\n' indent|' ' string|'"""Disassociates a floating IP from fixed IP it is associated with."""' newline|'\n' name|'return' name|'self' op|'.' name|'floating_manager' op|'.' name|'disassociate_floating_ip' op|'(' name|'context' op|',' name|'address' op|',' nl|'\n' name|'affect_auto_assigned' op|')' newline|'\n' nl|'\n' dedent|'' op|'@' name|'wrap_check_policy' newline|'\n' op|'@' name|'base_api' op|'.' name|'refresh_cache' newline|'\n' DECL|member|allocate_for_instance name|'def' name|'allocate_for_instance' op|'(' name|'self' op|',' name|'context' op|',' name|'instance' op|',' name|'vpn' op|',' nl|'\n' name|'requested_networks' op|',' name|'macs' op|'=' name|'None' op|',' nl|'\n' name|'security_groups' op|'=' name|'None' op|',' nl|'\n' name|'dhcp_options' op|'=' name|'None' op|',' nl|'\n' name|'bind_host_id' op|'=' name|'None' op|')' op|':' newline|'\n' indent|' ' string|'"""Allocates all network structures for an instance.\n\n :param context: The request context.\n :param instance: nova.objects.instance.Instance object.\n :param vpn: A boolean, if True, indicate a vpn to access the instance.\n :param requested_networks: A dictionary of requested_networks,\n Optional value containing network_id, fixed_ip, and port_id.\n :param macs: None or a set of MAC addresses that the instance\n should use. macs is supplied by the hypervisor driver (contrast\n with requested_networks which is user supplied).\n :param security_groups: None or security groups to allocate for\n instance.\n :param dhcp_options: None or a set of key/value pairs that should\n determine the DHCP BOOTP response, eg. for PXE booting an instance\n configured with the baremetal hypervisor. It is expected that these\n are already formatted for the neutron v2 api.\n See nova/virt/driver.py:dhcp_options_for_instance for an example.\n :param bind_host_id: ignored by this driver.\n :returns: network info as from get_instance_nw_info() below\n """' newline|'\n' comment|"# NOTE(vish): We can't do the floating ip allocation here because" nl|'\n' comment|"# this is called from compute.manager which shouldn't" nl|'\n' comment|'# have db access so we do it on the other side of the' nl|'\n' comment|'# rpc.' nl|'\n' name|'flavor' op|'=' name|'instance' op|'.' name|'get_flavor' op|'(' op|')' newline|'\n' name|'args' op|'=' op|'{' op|'}' newline|'\n' name|'args' op|'[' string|"'vpn'" op|']' op|'=' name|'vpn' newline|'\n' name|'args' op|'[' string|"'requested_networks'" op|']' op|'=' name|'requested_networks' newline|'\n' name|'args' op|'[' string|"'instance_id'" op|']' op|'=' name|'instance' op|'.' name|'uuid' newline|'\n' name|'args' op|'[' string|"'project_id'" op|']' op|'=' name|'instance' op|'.' name|'project_id' newline|'\n' name|'args' op|'[' string|"'host'" op|']' op|'=' name|'instance' op|'.' name|'host' newline|'\n' name|'args' op|'[' string|"'rxtx_factor'" op|']' op|'=' name|'flavor' op|'[' string|"'rxtx_factor'" op|']' newline|'\n' name|'args' op|'[' string|"'macs'" op|']' op|'=' name|'macs' newline|'\n' name|'args' op|'[' string|"'dhcp_options'" op|']' op|'=' name|'dhcp_options' newline|'\n' name|'nw_info' op|'=' name|'self' op|'.' name|'network_rpcapi' op|'.' name|'allocate_for_instance' op|'(' name|'context' op|',' op|'**' name|'args' op|')' newline|'\n' nl|'\n' name|'return' name|'network_model' op|'.' name|'NetworkInfo' op|'.' name|'hydrate' op|'(' name|'nw_info' op|')' newline|'\n' nl|'\n' dedent|'' op|'@' name|'wrap_check_policy' newline|'\n' DECL|member|deallocate_for_instance name|'def' name|'deallocate_for_instance' op|'(' name|'self' op|',' name|'context' op|',' name|'instance' op|',' nl|'\n' name|'requested_networks' op|'=' name|'None' op|')' op|':' newline|'\n' indent|' ' string|'"""Deallocates all network structures related to instance."""' newline|'\n' comment|"# NOTE(vish): We can't do the floating ip deallocation here because" nl|'\n' comment|"# this is called from compute.manager which shouldn't" nl|'\n' comment|'# have db access so we do it on the other side of the' nl|'\n' comment|'# rpc.' nl|'\n' name|'if' name|'not' name|'isinstance' op|'(' name|'instance' op|',' name|'obj_base' op|'.' name|'NovaObject' op|')' op|':' newline|'\n' indent|' ' name|'instance' op|'=' name|'objects' op|'.' name|'Instance' op|'.' name|'_from_db_object' op|'(' name|'context' op|',' nl|'\n' name|'objects' op|'.' name|'Instance' op|'(' op|')' op|',' name|'instance' op|')' newline|'\n' dedent|'' name|'self' op|'.' name|'network_rpcapi' op|'.' name|'deallocate_for_instance' op|'(' name|'context' op|',' name|'instance' op|'=' name|'instance' op|',' nl|'\n' name|'requested_networks' op|'=' name|'requested_networks' op|')' newline|'\n' nl|'\n' comment|'# NOTE(danms): Here for neutron compatibility' nl|'\n' DECL|member|allocate_port_for_instance dedent|'' name|'def' name|'allocate_port_for_instance' op|'(' name|'self' op|',' name|'context' op|',' name|'instance' op|',' name|'port_id' op|',' nl|'\n' name|'network_id' op|'=' name|'None' op|',' name|'requested_ip' op|'=' name|'None' op|',' nl|'\n' name|'bind_host_id' op|'=' name|'None' op|')' op|':' newline|'\n' indent|' ' name|'raise' name|'NotImplementedError' op|'(' op|')' newline|'\n' nl|'\n' comment|'# NOTE(danms): Here for neutron compatibility' nl|'\n' DECL|member|deallocate_port_for_instance dedent|'' name|'def' name|'deallocate_port_for_instance' op|'(' name|'self' op|',' name|'context' op|',' name|'instance' op|',' name|'port_id' op|')' op|':' newline|'\n' indent|' ' name|'raise' name|'NotImplementedError' op|'(' op|')' newline|'\n' nl|'\n' comment|'# NOTE(danms): Here for neutron compatibility' nl|'\n' DECL|member|list_ports dedent|'' name|'def' name|'list_ports' op|'(' name|'self' op|',' op|'*' name|'args' op|',' op|'**' name|'kwargs' op|')' op|':' newline|'\n' indent|' ' name|'raise' name|'NotImplementedError' op|'(' op|')' newline|'\n' nl|'\n' comment|'# NOTE(danms): Here for neutron compatibility' nl|'\n' DECL|member|show_port dedent|'' name|'def' name|'show_port' op|'(' name|'self' op|',' op|'*' name|'args' op|',' op|'**' name|'kwargs' op|')' op|':' newline|'\n' indent|' ' name|'raise' name|'NotImplementedError' op|'(' op|')' newline|'\n' nl|'\n' dedent|'' op|'@' name|'wrap_check_policy' newline|'\n' op|'@' name|'base_api' op|'.' name|'refresh_cache' newline|'\n' DECL|member|add_fixed_ip_to_instance name|'def' name|'add_fixed_ip_to_instance' op|'(' name|'self' op|',' name|'context' op|',' name|'instance' op|',' name|'network_id' op|')' op|':' newline|'\n' indent|' ' string|'"""Adds a fixed IP to instance from specified network."""' newline|'\n' name|'flavor' op|'=' name|'instance' op|'.' name|'get_flavor' op|'(' op|')' newline|'\n' name|'args' op|'=' op|'{' string|"'instance_id'" op|':' name|'instance' op|'.' name|'uuid' op|',' nl|'\n' string|"'rxtx_factor'" op|':' name|'flavor' op|'[' string|"'rxtx_factor'" op|']' op|',' nl|'\n' string|"'host'" op|':' name|'instance' op|'.' name|'host' op|',' nl|'\n' string|"'network_id'" op|':' name|'network_id' op|'}' newline|'\n' name|'nw_info' op|'=' name|'self' op|'.' name|'network_rpcapi' op|'.' name|'add_fixed_ip_to_instance' op|'(' nl|'\n' name|'context' op|',' op|'**' name|'args' op|')' newline|'\n' name|'return' name|'network_model' op|'.' name|'NetworkInfo' op|'.' name|'hydrate' op|'(' name|'nw_info' op|')' newline|'\n' nl|'\n' dedent|'' op|'@' name|'wrap_check_policy' newline|'\n' op|'@' name|'base_api' op|'.' name|'refresh_cache' newline|'\n' DECL|member|remove_fixed_ip_from_instance name|'def' name|'remove_fixed_ip_from_instance' op|'(' name|'self' op|',' name|'context' op|',' name|'instance' op|',' name|'address' op|')' op|':' newline|'\n' indent|' ' string|'"""Removes a fixed IP from instance from specified network."""' newline|'\n' nl|'\n' name|'flavor' op|'=' name|'instance' op|'.' name|'get_flavor' op|'(' op|')' newline|'\n' name|'args' op|'=' op|'{' string|"'instance_id'" op|':' name|'instance' op|'.' name|'uuid' op|',' nl|'\n' string|"'rxtx_factor'" op|':' name|'flavor' op|'[' string|"'rxtx_factor'" op|']' op|',' nl|'\n' string|"'host'" op|':' name|'instance' op|'.' name|'host' op|',' nl|'\n' string|"'address'" op|':' name|'address' op|'}' newline|'\n' name|'nw_info' op|'=' name|'self' op|'.' name|'network_rpcapi' op|'.' name|'remove_fixed_ip_from_instance' op|'(' nl|'\n' name|'context' op|',' op|'**' name|'args' op|')' newline|'\n' name|'return' name|'network_model' op|'.' name|'NetworkInfo' op|'.' name|'hydrate' op|'(' name|'nw_info' op|')' newline|'\n' nl|'\n' dedent|'' op|'@' name|'wrap_check_policy' newline|'\n' DECL|member|add_network_to_project name|'def' name|'add_network_to_project' op|'(' name|'self' op|',' name|'context' op|',' name|'project_id' op|',' name|'network_uuid' op|'=' name|'None' op|')' op|':' newline|'\n' indent|' ' string|'"""Force adds another network to a project."""' newline|'\n' name|'self' op|'.' name|'network_rpcapi' op|'.' name|'add_network_to_project' op|'(' name|'context' op|',' name|'project_id' op|',' nl|'\n' name|'network_uuid' op|')' newline|'\n' nl|'\n' dedent|'' op|'@' name|'wrap_check_policy' newline|'\n' DECL|member|associate name|'def' name|'associate' op|'(' name|'self' op|',' name|'context' op|',' name|'network_uuid' op|',' name|'host' op|'=' name|'base_api' op|'.' name|'SENTINEL' op|',' nl|'\n' name|'project' op|'=' name|'base_api' op|'.' name|'SENTINEL' op|')' op|':' newline|'\n' indent|' ' string|'"""Associate or disassociate host or project to network."""' newline|'\n' name|'network' op|'=' name|'self' op|'.' name|'get' op|'(' name|'context' op|',' name|'network_uuid' op|')' newline|'\n' name|'if' name|'host' name|'is' name|'not' name|'base_api' op|'.' name|'SENTINEL' op|':' newline|'\n' indent|' ' name|'if' name|'host' name|'is' name|'None' op|':' newline|'\n' indent|' ' name|'objects' op|'.' name|'Network' op|'.' name|'disassociate' op|'(' name|'context' op|',' name|'network' op|'.' name|'id' op|',' nl|'\n' name|'host' op|'=' name|'True' op|',' name|'project' op|'=' name|'False' op|')' newline|'\n' dedent|'' name|'else' op|':' newline|'\n' indent|' ' name|'network' op|'.' name|'host' op|'=' name|'host' newline|'\n' name|'network' op|'.' name|'save' op|'(' op|')' newline|'\n' dedent|'' dedent|'' name|'if' name|'project' name|'is' name|'not' name|'base_api' op|'.' name|'SENTINEL' op|':' newline|'\n' indent|' ' name|'if' name|'project' name|'is' name|'None' op|':' newline|'\n' indent|' ' name|'objects' op|'.' name|'Network' op|'.' name|'disassociate' op|'(' name|'context' op|',' name|'network' op|'.' name|'id' op|',' nl|'\n' name|'host' op|'=' name|'False' op|',' name|'project' op|'=' name|'True' op|')' newline|'\n' dedent|'' name|'else' op|':' newline|'\n' indent|' ' name|'objects' op|'.' name|'Network' op|'.' name|'associate' op|'(' name|'context' op|',' name|'project' op|',' nl|'\n' name|'network_id' op|'=' name|'network' op|'.' name|'id' op|',' name|'force' op|'=' name|'True' op|')' newline|'\n' nl|'\n' dedent|'' dedent|'' dedent|'' op|'@' name|'wrap_check_policy' newline|'\n' DECL|member|get_instance_nw_info name|'def' name|'get_instance_nw_info' op|'(' name|'self' op|',' name|'context' op|',' name|'instance' op|',' op|'**' name|'kwargs' op|')' op|':' newline|'\n' indent|' ' string|'"""Returns all network info related to an instance."""' newline|'\n' name|'return' name|'super' op|'(' name|'API' op|',' name|'self' op|')' op|'.' name|'get_instance_nw_info' op|'(' name|'context' op|',' name|'instance' op|',' nl|'\n' op|'**' name|'kwargs' op|')' newline|'\n' nl|'\n' DECL|member|_get_instance_nw_info dedent|'' name|'def' name|'_get_instance_nw_info' op|'(' name|'self' op|',' name|'context' op|',' name|'instance' op|',' op|'**' name|'kwargs' op|')' op|':' newline|'\n' indent|' ' string|'"""Returns all network info related to an instance."""' newline|'\n' name|'flavor' op|'=' name|'instance' op|'.' name|'get_flavor' op|'(' op|')' newline|'\n' name|'args' op|'=' op|'{' string|"'instance_id'" op|':' name|'instance' op|'.' name|'uuid' op|',' nl|'\n' string|"'rxtx_factor'" op|':' name|'flavor' op|'[' string|"'rxtx_factor'" op|']' op|',' nl|'\n' string|"'host'" op|':' name|'instance' op|'.' name|'host' op|',' nl|'\n' string|"'project_id'" op|':' name|'instance' op|'.' name|'project_id' op|'}' newline|'\n' name|'nw_info' op|'=' name|'self' op|'.' name|'network_rpcapi' op|'.' name|'get_instance_nw_info' op|'(' name|'context' op|',' op|'**' name|'args' op|')' newline|'\n' nl|'\n' name|'return' name|'network_model' op|'.' name|'NetworkInfo' op|'.' name|'hydrate' op|'(' name|'nw_info' op|')' newline|'\n' nl|'\n' dedent|'' op|'@' name|'wrap_check_policy' newline|'\n' DECL|member|validate_networks name|'def' name|'validate_networks' op|'(' name|'self' op|',' name|'context' op|',' name|'requested_networks' op|',' name|'num_instances' op|')' op|':' newline|'\n' indent|' ' string|'"""validate the networks passed at the time of creating\n the server.\n\n Return the number of instances that can be successfully allocated\n with the requested network configuration.\n """' newline|'\n' name|'if' name|'requested_networks' op|':' newline|'\n' indent|' ' name|'self' op|'.' name|'network_rpcapi' op|'.' name|'validate_networks' op|'(' name|'context' op|',' nl|'\n' name|'requested_networks' op|')' newline|'\n' nl|'\n' comment|'# Neutron validation checks and returns how many of num_instances' nl|'\n' comment|'# instances can be supported by the quota. For Nova network' nl|'\n' comment|'# this is part of the subsequent quota check, so we just return' nl|'\n' comment|'# the requested number in this case.' nl|'\n' dedent|'' name|'return' name|'num_instances' newline|'\n' nl|'\n' DECL|member|create_pci_requests_for_sriov_ports dedent|'' name|'def' name|'create_pci_requests_for_sriov_ports' op|'(' name|'self' op|',' name|'context' op|',' nl|'\n' name|'pci_requests' op|',' nl|'\n' name|'requested_networks' op|')' op|':' newline|'\n' indent|' ' string|'"""Check requested networks for any SR-IOV port request.\n\n Create a PCI request object for each SR-IOV port, and add it to the\n pci_requests object that contains a list of PCI request object.\n """' newline|'\n' comment|"# This is NOOP for Nova network since it doesn't support SR-IOV." nl|'\n' name|'pass' newline|'\n' nl|'\n' dedent|'' op|'@' name|'wrap_check_policy' newline|'\n' DECL|member|get_dns_domains name|'def' name|'get_dns_domains' op|'(' name|'self' op|',' name|'context' op|')' op|':' newline|'\n' indent|' ' string|'"""Returns a list of available dns domains.\n These can be used to create DNS entries for floating IPs.\n """' newline|'\n' name|'return' name|'self' op|'.' name|'network_rpcapi' op|'.' name|'get_dns_domains' op|'(' name|'context' op|')' newline|'\n' nl|'\n' dedent|'' op|'@' name|'wrap_check_policy' newline|'\n' DECL|member|add_dns_entry name|'def' name|'add_dns_entry' op|'(' name|'self' op|',' name|'context' op|',' name|'address' op|',' name|'name' op|',' name|'dns_type' op|',' name|'domain' op|')' op|':' newline|'\n' indent|' ' string|'"""Create specified DNS entry for address."""' newline|'\n' name|'args' op|'=' op|'{' string|"'address'" op|':' name|'address' op|',' nl|'\n' string|"'name'" op|':' name|'name' op|',' nl|'\n' string|"'dns_type'" op|':' name|'dns_type' op|',' nl|'\n' string|"'domain'" op|':' name|'domain' op|'}' newline|'\n' name|'return' name|'self' op|'.' name|'network_rpcapi' op|'.' name|'add_dns_entry' op|'(' name|'context' op|',' op|'**' name|'args' op|')' newline|'\n' nl|'\n' dedent|'' op|'@' name|'wrap_check_policy' newline|'\n' DECL|member|modify_dns_entry name|'def' name|'modify_dns_entry' op|'(' name|'self' op|',' name|'context' op|',' name|'name' op|',' name|'address' op|',' name|'domain' op|')' op|':' newline|'\n' indent|' ' string|'"""Create specified DNS entry for address."""' newline|'\n' name|'args' op|'=' op|'{' string|"'address'" op|':' name|'address' op|',' nl|'\n' string|"'name'" op|':' name|'name' op|',' nl|'\n' string|"'domain'" op|':' name|'domain' op|'}' newline|'\n' name|'return' name|'self' op|'.' name|'network_rpcapi' op|'.' name|'modify_dns_entry' op|'(' name|'context' op|',' op|'**' name|'args' op|')' newline|'\n' nl|'\n' dedent|'' op|'@' name|'wrap_check_policy' newline|'\n' DECL|member|delete_dns_entry name|'def' name|'delete_dns_entry' op|'(' name|'self' op|',' name|'context' op|',' name|'name' op|',' name|'domain' op|')' op|':' newline|'\n' indent|' ' string|'"""Delete the specified dns entry."""' newline|'\n' name|'args' op|'=' op|'{' string|"'name'" op|':' name|'name' op|',' string|"'domain'" op|':' name|'domain' op|'}' newline|'\n' name|'return' name|'self' op|'.' name|'network_rpcapi' op|'.' name|'delete_dns_entry' op|'(' name|'context' op|',' op|'**' name|'args' op|')' newline|'\n' nl|'\n' dedent|'' op|'@' name|'wrap_check_policy' newline|'\n' DECL|member|delete_dns_domain name|'def' name|'delete_dns_domain' op|'(' name|'self' op|',' name|'context' op|',' name|'domain' op|')' op|':' newline|'\n' indent|' ' string|'"""Delete the specified dns domain."""' newline|'\n' name|'return' name|'self' op|'.' name|'network_rpcapi' op|'.' name|'delete_dns_domain' op|'(' name|'context' op|',' name|'domain' op|'=' name|'domain' op|')' newline|'\n' nl|'\n' dedent|'' op|'@' name|'wrap_check_policy' newline|'\n' DECL|member|get_dns_entries_by_address name|'def' name|'get_dns_entries_by_address' op|'(' name|'self' op|',' name|'context' op|',' name|'address' op|',' name|'domain' op|')' op|':' newline|'\n' indent|' ' string|'"""Get entries for address and domain."""' newline|'\n' name|'args' op|'=' op|'{' string|"'address'" op|':' name|'address' op|',' string|"'domain'" op|':' name|'domain' op|'}' newline|'\n' name|'return' name|'self' op|'.' name|'network_rpcapi' op|'.' name|'get_dns_entries_by_address' op|'(' name|'context' op|',' op|'**' name|'args' op|')' newline|'\n' nl|'\n' dedent|'' op|'@' name|'wrap_check_policy' newline|'\n' DECL|member|get_dns_entries_by_name name|'def' name|'get_dns_entries_by_name' op|'(' name|'self' op|',' name|'context' op|',' name|'name' op|',' name|'domain' op|')' op|':' newline|'\n' indent|' ' string|'"""Get entries for name and domain."""' newline|'\n' name|'args' op|'=' op|'{' string|"'name'" op|':' name|'name' op|',' string|"'domain'" op|':' name|'domain' op|'}' newline|'\n' name|'return' name|'self' op|'.' name|'network_rpcapi' op|'.' name|'get_dns_entries_by_name' op|'(' name|'context' op|',' op|'**' name|'args' op|')' newline|'\n' nl|'\n' dedent|'' op|'@' name|'wrap_check_policy' newline|'\n' DECL|member|create_private_dns_domain name|'def' name|'create_private_dns_domain' op|'(' name|'self' op|',' name|'context' op|',' name|'domain' op|',' name|'availability_zone' op|')' op|':' newline|'\n' indent|' ' string|'"""Create a private DNS domain with nova availability zone."""' newline|'\n' name|'args' op|'=' op|'{' string|"'domain'" op|':' name|'domain' op|',' string|"'av_zone'" op|':' name|'availability_zone' op|'}' newline|'\n' name|'return' name|'self' op|'.' name|'network_rpcapi' op|'.' name|'create_private_dns_domain' op|'(' name|'context' op|',' op|'**' name|'args' op|')' newline|'\n' nl|'\n' dedent|'' op|'@' name|'wrap_check_policy' newline|'\n' DECL|member|create_public_dns_domain name|'def' name|'create_public_dns_domain' op|'(' name|'self' op|',' name|'context' op|',' name|'domain' op|',' name|'project' op|'=' name|'None' op|')' op|':' newline|'\n' indent|' ' string|'"""Create a public DNS domain with optional nova project."""' newline|'\n' name|'args' op|'=' op|'{' string|"'domain'" op|':' name|'domain' op|',' string|"'project'" op|':' name|'project' op|'}' newline|'\n' name|'return' name|'self' op|'.' name|'network_rpcapi' op|'.' name|'create_public_dns_domain' op|'(' name|'context' op|',' op|'**' name|'args' op|')' newline|'\n' nl|'\n' dedent|'' op|'@' name|'wrap_check_policy' newline|'\n' DECL|member|setup_networks_on_host name|'def' name|'setup_networks_on_host' op|'(' name|'self' op|',' name|'context' op|',' name|'instance' op|',' name|'host' op|'=' name|'None' op|',' nl|'\n' name|'teardown' op|'=' name|'False' op|')' op|':' newline|'\n' indent|' ' string|'"""Setup or teardown the network structures on hosts related to\n instance.\n """' newline|'\n' name|'host' op|'=' name|'host' name|'or' name|'instance' op|'.' name|'host' newline|'\n' comment|'# NOTE(tr3buchet): host is passed in cases where we need to setup' nl|'\n' comment|'# or teardown the networks on a host which has been migrated to/from' nl|'\n' comment|'# and instance.host is not yet or is no longer equal to' nl|'\n' name|'args' op|'=' op|'{' string|"'instance_id'" op|':' name|'instance' op|'.' name|'id' op|',' nl|'\n' string|"'host'" op|':' name|'host' op|',' nl|'\n' string|"'teardown'" op|':' name|'teardown' op|',' nl|'\n' string|"'instance'" op|':' name|'instance' op|'}' newline|'\n' nl|'\n' name|'self' op|'.' name|'network_rpcapi' op|'.' name|'setup_networks_on_host' op|'(' name|'context' op|',' op|'**' name|'args' op|')' newline|'\n' nl|'\n' DECL|member|_get_multi_addresses dedent|'' name|'def' name|'_get_multi_addresses' op|'(' name|'self' op|',' name|'context' op|',' name|'instance' op|')' op|':' newline|'\n' indent|' ' name|'try' op|':' newline|'\n' indent|' ' name|'fixed_ips' op|'=' name|'objects' op|'.' name|'FixedIPList' op|'.' name|'get_by_instance_uuid' op|'(' nl|'\n' name|'context' op|',' name|'instance' op|'.' name|'uuid' op|')' newline|'\n' dedent|'' name|'except' name|'exception' op|'.' name|'FixedIpNotFoundForInstance' op|':' newline|'\n' indent|' ' name|'return' name|'False' op|',' op|'[' op|']' newline|'\n' dedent|'' name|'addresses' op|'=' op|'[' op|']' newline|'\n' name|'for' name|'fixed' name|'in' name|'fixed_ips' op|':' newline|'\n' indent|' ' name|'for' name|'floating' name|'in' name|'fixed' op|'.' name|'floating_ips' op|':' newline|'\n' indent|' ' name|'addresses' op|'.' name|'append' op|'(' name|'floating' op|'.' name|'address' op|')' newline|'\n' dedent|'' dedent|'' name|'return' name|'fixed_ips' op|'[' number|'0' op|']' op|'.' name|'network' op|'.' name|'multi_host' op|',' name|'addresses' newline|'\n' nl|'\n' dedent|'' op|'@' name|'wrap_check_policy' newline|'\n' DECL|member|migrate_instance_start name|'def' name|'migrate_instance_start' op|'(' name|'self' op|',' name|'context' op|',' name|'instance' op|',' name|'migration' op|')' op|':' newline|'\n' indent|' ' string|'"""Start to migrate the network of an instance."""' newline|'\n' name|'flavor' op|'=' name|'instance' op|'.' name|'get_flavor' op|'(' op|')' newline|'\n' name|'args' op|'=' name|'dict' op|'(' nl|'\n' name|'instance_uuid' op|'=' name|'instance' op|'.' name|'uuid' op|',' nl|'\n' name|'rxtx_factor' op|'=' name|'flavor' op|'[' string|"'rxtx_factor'" op|']' op|',' nl|'\n' name|'project_id' op|'=' name|'instance' op|'.' name|'project_id' op|',' nl|'\n' name|'source_compute' op|'=' name|'migration' op|'[' string|"'source_compute'" op|']' op|',' nl|'\n' name|'dest_compute' op|'=' name|'migration' op|'[' string|"'dest_compute'" op|']' op|',' nl|'\n' name|'floating_addresses' op|'=' name|'None' op|',' nl|'\n' op|')' newline|'\n' nl|'\n' name|'multi_host' op|',' name|'addresses' op|'=' name|'self' op|'.' name|'_get_multi_addresses' op|'(' name|'context' op|',' name|'instance' op|')' newline|'\n' name|'if' name|'multi_host' op|':' newline|'\n' indent|' ' name|'args' op|'[' string|"'floating_addresses'" op|']' op|'=' name|'addresses' newline|'\n' name|'args' op|'[' string|"'host'" op|']' op|'=' name|'migration' op|'[' string|"'source_compute'" op|']' newline|'\n' nl|'\n' dedent|'' name|'self' op|'.' name|'network_rpcapi' op|'.' name|'migrate_instance_start' op|'(' name|'context' op|',' op|'**' name|'args' op|')' newline|'\n' nl|'\n' dedent|'' op|'@' name|'wrap_check_policy' newline|'\n' DECL|member|migrate_instance_finish name|'def' name|'migrate_instance_finish' op|'(' name|'self' op|',' name|'context' op|',' name|'instance' op|',' name|'migration' op|')' op|':' newline|'\n' indent|' ' string|'"""Finish migrating the network of an instance."""' newline|'\n' name|'flavor' op|'=' name|'instance' op|'.' name|'get_flavor' op|'(' op|')' newline|'\n' name|'args' op|'=' name|'dict' op|'(' nl|'\n' name|'instance_uuid' op|'=' name|'instance' op|'.' name|'uuid' op|',' nl|'\n' name|'rxtx_factor' op|'=' name|'flavor' op|'[' string|"'rxtx_factor'" op|']' op|',' nl|'\n' name|'project_id' op|'=' name|'instance' op|'.' name|'project_id' op|',' nl|'\n' name|'source_compute' op|'=' name|'migration' op|'[' string|"'source_compute'" op|']' op|',' nl|'\n' name|'dest_compute' op|'=' name|'migration' op|'[' string|"'dest_compute'" op|']' op|',' nl|'\n' name|'floating_addresses' op|'=' name|'None' op|',' nl|'\n' op|')' newline|'\n' nl|'\n' name|'multi_host' op|',' name|'addresses' op|'=' name|'self' op|'.' name|'_get_multi_addresses' op|'(' name|'context' op|',' name|'instance' op|')' newline|'\n' name|'if' name|'multi_host' op|':' newline|'\n' indent|' ' name|'args' op|'[' string|"'floating_addresses'" op|']' op|'=' name|'addresses' newline|'\n' name|'args' op|'[' string|"'host'" op|']' op|'=' name|'migration' op|'[' string|"'dest_compute'" op|']' newline|'\n' nl|'\n' dedent|'' name|'self' op|'.' name|'network_rpcapi' op|'.' name|'migrate_instance_finish' op|'(' name|'context' op|',' op|'**' name|'args' op|')' newline|'\n' nl|'\n' DECL|member|setup_instance_network_on_host dedent|'' name|'def' name|'setup_instance_network_on_host' op|'(' name|'self' op|',' name|'context' op|',' name|'instance' op|',' name|'host' op|')' op|':' newline|'\n' indent|' ' string|'"""Setup network for specified instance on host."""' newline|'\n' name|'self' op|'.' name|'migrate_instance_finish' op|'(' name|'context' op|',' name|'instance' op|',' nl|'\n' op|'{' string|"'source_compute'" op|':' name|'None' op|',' nl|'\n' string|"'dest_compute'" op|':' name|'host' op|'}' op|')' newline|'\n' nl|'\n' DECL|member|cleanup_instance_network_on_host dedent|'' name|'def' name|'cleanup_instance_network_on_host' op|'(' name|'self' op|',' name|'context' op|',' name|'instance' op|',' name|'host' op|')' op|':' newline|'\n' indent|' ' string|'"""Cleanup network for specified instance on host."""' newline|'\n' name|'self' op|'.' name|'migrate_instance_start' op|'(' name|'context' op|',' name|'instance' op|',' nl|'\n' op|'{' string|"'source_compute'" op|':' name|'host' op|',' nl|'\n' string|"'dest_compute'" op|':' name|'None' op|'}' op|')' newline|'\n' dedent|'' dedent|'' endmarker|'' end_unit
14.168675
1,216
0.622449
6,776
45,864
4.104634
0.064492
0.151656
0.068313
0.056089
0.783411
0.734656
0.6749
0.618596
0.576601
0.545033
0
0.000518
0.116344
45,864
3,236
1,217
14.173053
0.685748
0
0
0.93665
0
0.000618
0.854287
0.152625
0
0
0
0
0
0
null
null
0.000927
0.004326
null
null
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
0
1
0
0
0
0
1
0
0
1
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
5
dcae96d912559e20b274d32ee80812e93b0b6bab
378
py
Python
bika/lims/browser/fields/__init__.py
hocinebendou/bika.gsoc
85bc0c587de7f52073ae0e89bddbc77bf875f295
[ "MIT" ]
null
null
null
bika/lims/browser/fields/__init__.py
hocinebendou/bika.gsoc
85bc0c587de7f52073ae0e89bddbc77bf875f295
[ "MIT" ]
null
null
null
bika/lims/browser/fields/__init__.py
hocinebendou/bika.gsoc
85bc0c587de7f52073ae0e89bddbc77bf875f295
[ "MIT" ]
null
null
null
from addressfield import AddressField from datetimefield import DateTimeField from durationfield import DurationField from aranalysesfield import ARAnalysesField from interimfieldsfield import InterimFieldsField from referenceresultsfield import ReferenceResultsField from historyawarereferencefield import HistoryAwareReferenceField from coordinatefield import CoordinateField
42
65
0.915344
32
378
10.8125
0.3125
0
0
0
0
0
0
0
0
0
0
0
0.084656
378
8
66
47.25
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
f4be7b1045efb742f732c41b714ecb00ec646734
132
py
Python
AttackPatterns.py
stefanosesia/Cthulu
dc2531d7250a14453b2d83f370d5a37e7f74f49f
[ "BSD-3-Clause-Clear" ]
4
2019-03-23T17:59:20.000Z
2021-05-25T18:03:47.000Z
AttackPatterns.py
stefanosesia/Cthulu
dc2531d7250a14453b2d83f370d5a37e7f74f49f
[ "BSD-3-Clause-Clear" ]
null
null
null
AttackPatterns.py
stefanosesia/Cthulu
dc2531d7250a14453b2d83f370d5a37e7f74f49f
[ "BSD-3-Clause-Clear" ]
null
null
null
exploits = {"445": {"metasploit": ["exploit/linux/samba/is_known_pipename","exploit/multi/samba/usermap_script"]}, } #@TODO add more
44
114
0.734848
17
132
5.529412
0.882353
0
0
0
0
0
0
0
0
0
0
0.024194
0.060606
132
3
115
44
0.733871
0.106061
0
0
0
0
0.711864
0.601695
0
0
0
0.333333
0
1
0
false
0
0
0
0
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
1
1
null
0
0
1
0
0
0
0
0
0
0
0
0
0
5
f4d2f355b8e36d4aff621ecf3a21feeb0d198f62
94
py
Python
src/rubrix/sdk/__init__.py
drahnreb/rubrix
340e545baf4d65a0d94e3c671ad6c93ff1d59700
[ "Apache-2.0" ]
null
null
null
src/rubrix/sdk/__init__.py
drahnreb/rubrix
340e545baf4d65a0d94e3c671ad6c93ff1d59700
[ "Apache-2.0" ]
null
null
null
src/rubrix/sdk/__init__.py
drahnreb/rubrix
340e545baf4d65a0d94e3c671ad6c93ff1d59700
[ "Apache-2.0" ]
null
null
null
""" A client library for accessing Rubrix """ from .client import AuthenticatedClient, Client
31.333333
47
0.776596
11
94
6.636364
0.818182
0
0
0
0
0
0
0
0
0
0
0
0.138298
94
2
48
47
0.901235
0.393617
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
f4f0b61e0966ba17b82ccb9bead9fa1c6018a32a
183
py
Python
ClassArch/utils/peek_hdf5.py
seungjunlee96/ClassArch
c8a07f5662fbc2bba97bb09055c222e5697482bb
[ "CC0-1.0" ]
null
null
null
ClassArch/utils/peek_hdf5.py
seungjunlee96/ClassArch
c8a07f5662fbc2bba97bb09055c222e5697482bb
[ "CC0-1.0" ]
null
null
null
ClassArch/utils/peek_hdf5.py
seungjunlee96/ClassArch
c8a07f5662fbc2bba97bb09055c222e5697482bb
[ "CC0-1.0" ]
null
null
null
import h5py file_name = '../data/modelnet40/ply_data_labeled0.h5' final_label = h5py.File(file_name, 'r') print(final_label) print(final_label['data']) print(final_label['label'])
18.3
53
0.754098
28
183
4.642857
0.464286
0.307692
0.346154
0
0
0
0
0
0
0
0
0.035714
0.081967
183
9
54
20.333333
0.738095
0
0
0
0
0
0.26776
0.213115
0
0
0
0
0
1
0
false
0
0.166667
0
0.166667
0.5
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
5
520889d86fc73e4f2227dd4c4e9e72efd9c52c62
63
py
Python
rina/__init__.py
ddk50/upfg_rina
c6808ffec6dd8c84ab1c02721ab2b57f29a5283b
[ "MIT" ]
null
null
null
rina/__init__.py
ddk50/upfg_rina
c6808ffec6dd8c84ab1c02721ab2b57f29a5283b
[ "MIT" ]
null
null
null
rina/__init__.py
ddk50/upfg_rina
c6808ffec6dd8c84ab1c02721ab2b57f29a5283b
[ "MIT" ]
null
null
null
from .notifications import Notifications from .msg import Msg
15.75
40
0.825397
8
63
6.5
0.5
0
0
0
0
0
0
0
0
0
0
0
0.142857
63
3
41
21
0.962963
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
520facfab8600a492d46797bc56417563edc70ea
155
py
Python
tests/base.py
sharadmv/deepx
07470e7a579a63427de1d5ff90b9fd00d3f54b61
[ "MIT" ]
74
2015-11-13T02:26:37.000Z
2021-07-29T11:00:45.000Z
tests/base.py
sharadmv/deepx
07470e7a579a63427de1d5ff90b9fd00d3f54b61
[ "MIT" ]
21
2015-12-12T20:33:55.000Z
2019-04-03T02:49:42.000Z
tests/base.py
sharadmv/deepx
07470e7a579a63427de1d5ff90b9fd00d3f54b61
[ "MIT" ]
19
2015-11-23T10:07:01.000Z
2021-08-30T17:06:00.000Z
import unittest class BaseTest(unittest.TestCase): def create_function(self, net): # self.assertTrue(net.is_configured()) return net
19.375
46
0.690323
18
155
5.833333
0.777778
0
0
0
0
0
0
0
0
0
0
0
0.212903
155
7
47
22.142857
0.860656
0.232258
0
0
0
0
0
0
0
0
0
0
0
1
0.25
false
0
0.25
0.25
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
5
52106f5f615145c1a366696ccc503c8a0738b1e3
92
py
Python
molecules/utils/__init__.py
yngtodd/molecules
b16281ae3eda64891f603d23472092051c2bc244
[ "MIT" ]
2
2020-03-16T07:47:42.000Z
2021-05-12T11:39:51.000Z
molecules/utils/__init__.py
yngtodd/molecules
b16281ae3eda64891f603d23472092051c2bc244
[ "MIT" ]
55
2018-02-10T23:22:15.000Z
2019-12-22T23:11:13.000Z
molecules/utils/__init__.py
yngtodd/molecules
b16281ae3eda64891f603d23472092051c2bc244
[ "MIT" ]
4
2018-08-06T19:49:35.000Z
2020-06-03T02:07:42.000Z
from matrix_op import triu_to_full from read_file import read_h5_contact_maps, read_h5_RMSD
30.666667
56
0.891304
18
92
4.055556
0.722222
0.164384
0
0
0
0
0
0
0
0
0
0.024096
0.097826
92
2
57
46
0.855422
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
522063b1c67a35bc106ebce4eaa7f9ea1de957f3
17,391
py
Python
reviewboard/search/tests.py
jeyaprabum/Reviewboard
fe4dc611ef961b12b6991a74faa24a22d393fb4a
[ "MIT" ]
1
2019-08-20T03:39:04.000Z
2019-08-20T03:39:04.000Z
reviewboard/search/tests.py
jeyaprabum/Reviewboard
fe4dc611ef961b12b6991a74faa24a22d393fb4a
[ "MIT" ]
1
2019-08-20T03:39:45.000Z
2019-08-20T03:39:45.000Z
reviewboard/search/tests.py
jeyaprabum/Reviewboard
fe4dc611ef961b12b6991a74faa24a22d393fb4a
[ "MIT" ]
null
null
null
from __future__ import unicode_literals from django.contrib.auth.models import User from django.contrib.sites.models import Site from django.core.management import call_command from django.utils import six from djblets.siteconfig.models import SiteConfiguration from djblets.testing.decorators import add_fixtures from haystack import signal_processor from kgb import SpyAgency from reviewboard.admin.server import build_server_url from reviewboard.admin.siteconfig import load_site_config from reviewboard.reviews.models import ReviewRequestDraft from reviewboard.site.urlresolvers import local_site_reverse from reviewboard.testing.testcase import TestCase class SearchTests(SpyAgency, TestCase): """Unit tests for search functionality.""" fixtures = ['test_users'] @classmethod def setUpClass(cls): """Set some initial state for all search-related tests. This will enable search and reset Haystack's configuration based on that, allowing the search tests to run. """ super(SearchTests, cls).setUpClass() siteconfig = SiteConfiguration.objects.get_current() siteconfig.set('search_enable', True) siteconfig.save() load_site_config() def test_search_all(self): """Testing search with review requests and users""" # We already have doc. Now let's create a review request. review_request = self.create_review_request(submitter='doc', publish=True) self.reindex() # Perform the search. response = self.search('doc') context = response.context self.assertEqual(context['hits_returned'], 2) results = context['page'].object_list self.assertEqual(results[0].content_type(), 'auth.user') self.assertEqual(results[0].username, 'doc') self.assertEqual(results[1].content_type(), 'reviews.reviewrequest') self.assertEqual(results[1].summary, review_request.summary) def test_filter_review_requests(self): """Testing search with filtering for review requests""" # We already have doc. Now let's create a review request. review_request = self.create_review_request(submitter='doc', publish=True) self.reindex() # Perform the search. response = self.search('doc', filter_by='reviewrequests') context = response.context self.assertEqual(context['hits_returned'], 1) results = context['page'].object_list self.assertEqual(results[0].content_type(), 'reviews.reviewrequest') self.assertEqual(results[0].summary, review_request.summary) def test_filter_users(self): """Testing search with filtering for review requests""" # We already have doc. Now let's create a review request. self.create_review_request(submitter='doc', publish=True) self.reindex() # Perform the search. response = self.search('doc', filter_by='users') context = response.context self.assertEqual(context['hits_returned'], 1) results = context['page'].object_list self.assertEqual(results[0].content_type(), 'auth.user') self.assertEqual(results[0].username, 'doc') @add_fixtures(['test_scmtools']) def test_review_requests_without_private_repo_access(self): """Testing search with private review requests without access to private repositories """ self.client.login(username='grumpy', password='grumpy') user = User.objects.get(username='grumpy') repository = self.create_repository(public=False) review_request = self.create_review_request(repository=repository, publish=True) self.assertFalse(review_request.is_accessible_by(user)) self.reindex() # Perform the search. response = self.search(review_request.summary) context = response.context self.assertEqual(context['hits_returned'], 0) @add_fixtures(['test_scmtools']) def test_review_requests_with_private_repo_access(self): """Testing search with private review requests with access to private repositories """ self.client.login(username='grumpy', password='grumpy') user = User.objects.get(username='grumpy') repository = self.create_repository(public=False) repository.users.add(user) review_request = self.create_review_request(repository=repository, publish=True) self.assertTrue(review_request.is_accessible_by(user)) self.reindex() # Perform the search. response = self.search(review_request.summary) context = response.context self.assertEqual(context['hits_returned'], 1) results = context['page'].object_list self.assertEqual(results[0].content_type(), 'reviews.reviewrequest') self.assertEqual(results[0].summary, review_request.summary) @add_fixtures(['test_scmtools']) def test_review_requests_with_private_repo_access_through_group(self): """Testing search with private review requests with access to private repositories through groups """ self.client.login(username='grumpy', password='grumpy') user = User.objects.get(username='grumpy') group = self.create_review_group(invite_only=True) group.users.add(user) repository = self.create_repository(public=False) repository.review_groups.add(group) review_request = self.create_review_request(repository=repository, publish=True) self.assertTrue(review_request.is_accessible_by(user)) self.reindex() # Perform the search. response = self.search(review_request.summary) context = response.context self.assertEqual(context['hits_returned'], 1) results = context['page'].object_list self.assertEqual(results[0].content_type(), 'reviews.reviewrequest') self.assertEqual(results[0].summary, review_request.summary) def test_review_requests_without_private_group_access(self): """Testing search with private review requests without access to a private group """ self.client.login(username='grumpy', password='grumpy') user = User.objects.get(username='grumpy') group = self.create_review_group(invite_only=True) review_request = self.create_review_request(publish=True) review_request.target_groups.add(group) self.assertFalse(review_request.is_accessible_by(user)) self.reindex() # Perform the search. response = self.search(review_request.summary) context = response.context self.assertEqual(context['hits_returned'], 0) def test_review_requests_with_private_group_access(self): """Testing search with private review requests with access to a private group """ self.client.login(username='grumpy', password='grumpy') user = User.objects.get(username='grumpy') group = self.create_review_group(invite_only=True) group.users.add(user) review_request = self.create_review_request(publish=True) review_request.target_groups.add(group) self.assertTrue(review_request.is_accessible_by(user)) self.reindex() # Perform the search. response = self.search(review_request.summary) context = response.context self.assertEqual(context['hits_returned'], 1) results = context['page'].object_list self.assertEqual(results[0].content_type(), 'reviews.reviewrequest') self.assertEqual(results[0].summary, review_request.summary) @add_fixtures(['test_scmtools']) def test_review_requests_with_private_repo_access_no_private_group(self): """Testing search with private review requests with access to a private repository and without access to a private_group """ self.client.login(username='grumpy', password='grumpy') user = User.objects.get(username='grumpy') group = self.create_review_group(invite_only=True) repository = self.create_repository(public=False) repository.users.add(user) review_request = self.create_review_request(repository=repository, publish=True) review_request.target_groups.add(group) self.assertFalse(review_request.is_accessible_by(user)) self.reindex() # Perform the search. response = self.search(review_request.summary) context = response.context self.assertEqual(context['hits_returned'], 0) @add_fixtures(['test_scmtools']) def test_review_requests_with_private_repository_as_submitter(self): """Testing search with private review requests without access to a private repository as the submitter """ self.client.login(username='grumpy', password='grumpy') user = User.objects.get(username='grumpy') repository = self.create_repository(public=False) repository.users.add(user) review_request = self.create_review_request(repository=repository, submitter=user, publish=True) self.assertTrue(review_request.is_accessible_by(user)) self.reindex() # Perform the search. response = self.search(review_request.summary) context = response.context self.assertEqual(context['hits_returned'], 1) results = context['page'].object_list self.assertEqual(results[0].content_type(), 'reviews.reviewrequest') self.assertEqual(results[0].summary, review_request.summary) @add_fixtures(['test_scmtools']) def test_review_requests_with_private_repository_and_target_people(self): """Testing search with private review requests without access to a private repository and user in target_people """ self.client.login(username='grumpy', password='grumpy') user = User.objects.get(username='grumpy') repository = self.create_repository(public=False) review_request = self.create_review_request(repository=repository, publish=True) review_request.target_people.add(user) self.assertFalse(review_request.is_accessible_by(user)) self.reindex() # Perform the search. response = self.search(review_request.summary) context = response.context self.assertEqual(context['hits_returned'], 0) def test_review_requests_with_private_group_and_target_people(self): """Testing search with private review requests without access to a private group and user in target_people """ self.client.login(username='grumpy', password='grumpy') user = User.objects.get(username='grumpy') group = self.create_review_group(invite_only=True) review_request = self.create_review_request(publish=True) review_request.target_groups.add(group) review_request.target_people.add(user) self.assertTrue(review_request.is_accessible_by(user)) self.reindex() # Perform the search. response = self.search(review_request.summary) context = response.context self.assertEqual(context['hits_returned'], 1) results = context['page'].object_list self.assertEqual(results[0].content_type(), 'reviews.reviewrequest') self.assertEqual(results[0].summary, review_request.summary) def test_search_review_request_id(self): """Testing search with a review request ID""" site = Site.objects.get_current() site.domain = 'testserver' site.save() self.client.login(username='grumpy', password='grumpy') user = User.objects.get(username='grumpy') review_request = self.create_review_request(publish=True) self.assertTrue(review_request.is_accessible_by(user)) self.reindex() # Perform the search. response = self.search('%d' % review_request.id) self.assertEqual(response.url, build_server_url(review_request.get_absolute_url())) def test_search_numeric_non_id(self): """Testing search with a numeric query that is not a review request ID """ self.client.login(username='grumpy', password='grumpy') user = User.objects.get(username='grumpy') review_request = self.create_review_request(bugs_closed='123,456', publish=True) self.assertTrue(review_request.is_accessible_by(user)) self.reindex() # Perform the search. response = self.search('456') context = response.context self.assertEqual(context['hits_returned'], 1) results = context['page'].object_list self.assertEqual(results[0].content_type(), 'reviews.reviewrequest') self.assertEqual(results[0].summary, review_request.summary) def reindex(self): """Re-index the search database for the unit tests.""" call_command('rebuild_index', interactive=False) def search(self, q, filter_by=None): """Perform a search with the given query and optional filters. The resulting response object is returned. The search results can be inspected by looking at ``response.context``. """ options = { 'q': q, } if filter_by: options['filter'] = filter_by return self.client.get(local_site_reverse('search'), options) def test_on_the_fly_indexing_review_requests(self): """Testing on-the-fly indexing for review requests""" self.reindex() siteconfig = SiteConfiguration.objects.get_current() siteconfig.set('search_on_the_fly_indexing', True) siteconfig.save() group = self.create_review_group() invite_only_group = self.create_review_group(name='invite-only-group', invite_only=True) grumpy = User.objects.get(username='grumpy') try: self.spy_on(signal_processor.handle_save) review_request = self.create_review_request(summary='foo', publish=True) self.assertTrue(signal_processor.handle_save.spy.called) draft = ReviewRequestDraft.create(review_request) draft.summary = 'Not foo whatsoever' draft.save() draft.target_people = [grumpy] draft.target_groups = [group, invite_only_group] review_request.publish(review_request.submitter) rsp = self.search('Not foo') finally: siteconfig = SiteConfiguration.objects.get_current() siteconfig.set('search_on_the_fly_indexing', False) siteconfig.save() # There will be one call from each publish. self.assertEqual(len(signal_processor.handle_save.spy.calls), 2) self.assertEqual(rsp.context['hits_returned'], 1) result = rsp.context['result'] self.assertEqual(result.summary, 'Not foo whatsoever') self.assertEqual(result.target_users, [six.text_type(grumpy.pk)]) self.assertEqual(result.private_target_groups, [six.text_type(invite_only_group.pk)]) def test_on_the_fly_indexing_users(self): """Testing on-the-fly indexing for users""" self.reindex() siteconfig = SiteConfiguration.objects.get_current() siteconfig.set('search_on_the_fly_indexing', True) siteconfig.save() u = User.objects.get(username='doc') group = self.create_review_group() invite_only_group = self.create_review_group(name='invite-only-group', invite_only=True) try: self.spy_on(signal_processor.handle_save) u.username = 'not_doc' u.first_name = 'Not Doc' u.last_name = 'Dwarf' u.save() u.review_groups = [group, invite_only_group] rsp = self.search('not_doc') finally: siteconfig = SiteConfiguration.objects.get_current() siteconfig.set('search_on_the_fly_indexing', False) siteconfig.save() # There should be three calls: # * one from each of the m2m_changed actions post_clear and post_add; # and # * one from User.save(). self.assertEqual(len(signal_processor.handle_save.spy.calls), 3) self.assertEqual(rsp.context['hits_returned'], 1) result = rsp.context['result'] self.assertEqual(result.groups, 'test-group') self.assertEqual(result.url, '/users/not_doc/') self.assertEqual(result.username, 'not_doc') self.assertEqual(result.full_name, 'Not Doc Dwarf')
38.560976
78
0.651314
1,936
17,391
5.656508
0.110021
0.087846
0.035065
0.037805
0.785225
0.769154
0.753538
0.730253
0.708611
0.699297
0
0.00361
0.251337
17,391
450
79
38.646667
0.837481
0.127997
0
0.678571
0
0
0.073062
0.01835
0
0
0
0
0.203571
1
0.067857
false
0.039286
0.05
0
0.128571
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
5232bc7999efef13019d6f1b2c985c180c87823b
632
py
Python
tests/y2021/test_2021_d3.py
ErikThorsell/advent-of-code-python
8afb3d2dd731b77a421eff9dbd33d1f6a9dfbee3
[ "MIT" ]
2
2021-12-03T16:17:13.000Z
2022-01-27T12:29:45.000Z
tests/y2021/test_2021_d3.py
ErikThorsell/advent-of-code-python
8afb3d2dd731b77a421eff9dbd33d1f6a9dfbee3
[ "MIT" ]
null
null
null
tests/y2021/test_2021_d3.py
ErikThorsell/advent-of-code-python
8afb3d2dd731b77a421eff9dbd33d1f6a9dfbee3
[ "MIT" ]
1
2021-12-29T20:38:38.000Z
2021-12-29T20:38:38.000Z
"""TEST MODULE TEMPLATE""" from advent_of_code.y2021.d3 import solution_1 from advent_of_code.y2021.d3 import solution_2 from advent_of_code.utils.parse import split_str_by_newline def test_solution_1(): example_input = """00100 11110 10110 10111 10101 01111 00111 11100 10000 11001 00010 01010""" example_result = 198 assert solution_1(split_str_by_newline(example_input)) == example_result def test_solution_2(): example_input = """00100 11110 10110 10111 10101 01111 00111 11100 10000 11001 00010 01010""" example_result = 230 assert solution_2(split_str_by_newline(example_input)) == example_result
15.8
76
0.78481
96
632
4.84375
0.395833
0.103226
0.077419
0.103226
0.705376
0.705376
0.705376
0.705376
0.365591
0.365591
0
0.261993
0.142405
632
39
77
16.205128
0.595941
0.031646
0
0.727273
0
0
0.234323
0
0
0
0
0
0.060606
1
0.060606
false
0
0.090909
0
0.151515
0
0
0
0
null
0
0
0
0
1
1
1
0
0
0
1
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
525ffd2065b9ffba4f854faf0944dc840d048dfc
131
py
Python
utl/layout/__init__.py
angellbelger/Electronic-Ballot-Box
2d929c9e42ea326be455f2347f2ee10dcf420ae9
[ "MIT" ]
null
null
null
utl/layout/__init__.py
angellbelger/Electronic-Ballot-Box
2d929c9e42ea326be455f2347f2ee10dcf420ae9
[ "MIT" ]
null
null
null
utl/layout/__init__.py
angellbelger/Electronic-Ballot-Box
2d929c9e42ea326be455f2347f2ee10dcf420ae9
[ "MIT" ]
null
null
null
colour = {'r': '\033[31m', 'g': '\033[32m', 'w': '\033[33m', 'b': '\033[34m', 'p': '\033[35m', 'c': '\033[36m', 'limit': '\033[m'}
65.5
130
0.435115
22
131
2.590909
0.727273
0
0
0
0
0
0
0
0
0
0
0.286957
0.122137
131
1
131
131
0.208696
0
0
0
0
0
0.496183
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
1
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
bff01ceadbd08e1782704d9d69e8bfef5179d8be
21,702
py
Python
tests/test_parser/test_rdfxml.py
cthoyt/pronto
aa1deaf8ed02c0ce8a3cbdb1a4e7dd18436bd017
[ "MIT" ]
182
2016-07-23T00:04:30.000Z
2022-03-31T13:42:03.000Z
tests/test_parser/test_rdfxml.py
cthoyt/pronto
aa1deaf8ed02c0ce8a3cbdb1a4e7dd18436bd017
[ "MIT" ]
160
2016-08-11T19:12:48.000Z
2022-03-28T07:22:42.000Z
tests/test_parser/test_rdfxml.py
cthoyt/pronto
aa1deaf8ed02c0ce8a3cbdb1a4e7dd18436bd017
[ "MIT" ]
44
2016-11-02T10:57:06.000Z
2021-11-05T11:30:23.000Z
import io import os import unittest import warnings import xml.etree.ElementTree as etree import pronto class TestRdfXMLParser(unittest.TestCase): @staticmethod def get_ontology(content): xml = f""" <rdf:RDF xmlns="http://purl.obolibrary.org/obo/TEMP#" xml:base="http://purl.obolibrary.org/obo/TEMP" xmlns:obo="http://purl.obolibrary.org/obo/" xmlns:owl="http://www.w3.org/2002/07/owl#" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:xml="http://www.w3.org/XML/1998/namespace" xmlns:xsd="http://www.w3.org/2001/XMLSchema#" xmlns:doap="http://usefulinc.com/ns/doap#" xmlns:rdfs="http://www.w3.org/2000/01/rdf-schema#" xmlns:oboInOwl="http://www.geneontology.org/formats/oboInOwl#"> {content} </rdf:RDF> """ s = io.BytesIO(xml.encode('utf-8')) return pronto.Ontology(s, import_depth=0) def setUp(self): warnings.simplefilter("error") def tearDown(self): warnings.simplefilter(warnings.defaultaction) # --- def test_iao(self): warnings.simplefilter("ignore") path = os.path.join(__file__, "..", "..", "data", "iao.owl") iao = pronto.Ontology(os.path.realpath(path)) self.assertEqual(len(iao.terms()), 245) def test_aeo(self): warnings.simplefilter("ignore") path = os.path.join(__file__, "..", "..", "data", "aeo.owl") aeo = pronto.Ontology(os.path.realpath(path)) self.assertEqual(len(aeo.terms()), 250) self.assertEqual(len(aeo.relationships()), 11) self.assertEqual(aeo["AEO:0000099"].name, "keratin-based structure") self.assertEqual(len(aeo["AEO:0000099"].definition.xrefs), 1) def test_invalid_xml_file(self): self.assertRaises(ValueError, self.get_ontology, "") # ------------------------------------------------------------------------ def test_metadata_auto_generated_by(self): ont = self.get_ontology( """ <owl:Ontology> <oboInOwl:auto-generated-by>pronto</oboInOwl:auto-generated-by> </owl:Ontology> """ ) self.assertEqual(ont.metadata.auto_generated_by, "pronto") def test_metadata_default_namespace(self): ont = self.get_ontology( """ <owl:Ontology> <oboInOwl:hasDefaultNamespace rdf:datatype="http://www.w3.org/2001/XMLSchema#string">thing</oboInOwl:hasDefaultNamespace> </owl:Ontology> """ ) self.assertEqual(ont.metadata.default_namespace, "thing") def test_metadata_data_version(self): # owl:versionrIRI ont = self.get_ontology( """ <owl:Ontology rdf:about="http://purl.obolibrary.org/obo/ms.owl"> <owl:versionIRI rdf:resource="http://purl.obolibrary.org/obo/ms/4.1.30/ms.owl"/> </owl:Ontology> """ ) self.assertEqual(ont.metadata.ontology, "ms") self.assertEqual(ont.metadata.data_version, "4.1.30") # doap:Version ont2 = self.get_ontology( "<owl:Ontology><doap:Version>0.1.0</doap:Version></owl:Ontology>" ) self.assertEqual(ont2.metadata.data_version, "0.1.0") def test_metadata_format_version(self): ont = self.get_ontology( """ <owl:Ontology> <oboInOwl:hasOBOFormatVersion>1.2</oboInOwl:hasOBOFormatVersion> </owl:Ontology> """ ) self.assertEqual(ont.metadata.format_version, "1.2") def test_metadata_imports(self): ont = self.get_ontology( """ <owl:Ontology> <owl:imports rdf:resource="http://purl.obolibrary.org/obo/ms.obo"/> </owl:Ontology> """ ) self.assertIn("http://purl.obolibrary.org/obo/ms.obo", ont.metadata.imports) def test_metadata_saved_by(self): ont = self.get_ontology( """ <owl:Ontology> <oboInOwl:savedBy>Martin Larralde</oboInOwl:savedBy> </owl:Ontology> """ ) self.assertEqual(ont.metadata.saved_by, "Martin Larralde") # ------------------------------------------------------------------------ def test_term_consider(self): # Extract from `oboInOwl:consider` text ont = self.get_ontology( """ <owl:Ontology/> <owl:Class rdf:about="http://purl.obolibrary.org/obo/TST_001"> <oboInOwl:consider rdf:datatype="http://www.w3.org/2001/XMLSchema#string">TST:002</oboInOwl:consider> </owl:Class> <owl:Class rdf:about="http://purl.obolibrary.org/obo/TST_002"/> """ ) self.assertIn("TST:001", ont) self.assertIn("TST:002", ont) self.assertIn(ont["TST:002"], ont["TST:001"].consider) # Extract from `oboInOwl:consider` RDF resource ont2 = self.get_ontology( """ <owl:Ontology/> <owl:Class rdf:about="http://purl.obolibrary.org/obo/TST_001"> <oboInOwl:consider rdf:resource="http://purl.obolibrary.org/obo/TST_002"/> </owl:Class> <owl:Class rdf:about="http://purl.obolibrary.org/obo/TST_002"/> """ ) self.assertIn("TST:001", ont2) self.assertIn("TST:002", ont2) self.assertIn(ont2["TST:002"], ont2["TST:001"].consider) def test_term_definition_as_property(self): ont = self.get_ontology(""" <owl:Ontology/> <owl:Class rdf:about="http://purl.obolibrary.org/obo/TST_001"> <obo:IAO_0000115 rdf:datatype="http://www.w3.org/2001/XMLSchema#string">a term</obo:IAO_0000115> <oboInOwl:id rdf:datatype="http://www.w3.org/2001/XMLSchema#string">TST:001</oboInOwl:id> </owl:Class> """) self.assertIn("TST:001", ont) self.assertEqual(ont["TST:001"].definition, "a term") self.assertEqual(len(ont["TST:001"].definition.xrefs), 0) def test_term_definition_as_axiom(self): ont = self.get_ontology(""" <owl:Ontology/> <owl:Class rdf:about="http://purl.obolibrary.org/obo/TST_001"> <obo:IAO_0000115 rdf:datatype="http://www.w3.org/2001/XMLSchema#string">a term</obo:IAO_0000115> <oboInOwl:id rdf:datatype="http://www.w3.org/2001/XMLSchema#string">TST:001</oboInOwl:id> </owl:Class> <owl:Axiom> <owl:annotatedSource rdf:resource="http://purl.obolibrary.org/obo/TST_001"/> <owl:annotatedProperty rdf:resource="http://purl.obolibrary.org/obo/IAO_0000115"/> <owl:annotatedTarget rdf:datatype="http://www.w3.org/2001/XMLSchema#string">a term</owl:annotatedTarget> <oboInOwl:hasDbXref rdf:datatype="http://www.w3.org/2001/XMLSchema#string">ISBN:1234</oboInOwl:hasDbXref> </owl:Axiom> """) self.assertIn("TST:001", ont) self.assertEqual(ont["TST:001"].definition, "a term") self.assertEqual(list(ont["TST:001"].definition.xrefs)[0], pronto.Xref("ISBN:1234")) def test_term_multiple_labels(self): txt = """ <owl:Ontology/> <owl:Class rdf:about="http://purl.obolibrary.org/obo/TST_001"> <rdfs:label>A</rdfs:label> <rdfs:label>B</rdfs:label> </owl:Class> """ # check multiple labels is a syntax error in error mode with warnings.catch_warnings(): warnings.simplefilter("error", pronto.warnings.SyntaxWarning) with self.assertRaises(SyntaxError): ont = self.get_ontology(txt) # check multiple labels is fine in ignore mode with warnings.catch_warnings(): warnings.simplefilter("ignore", pronto.warnings.SyntaxWarning) ont = self.get_ontology(txt) self.assertIn(ont['TST:001'].name, ["A", "B"]) def test_term_subclass_of(self): ont = self.get_ontology(""" <owl:Ontology/> <owl:Class rdf:about="http://purl.obolibrary.org/obo/TST_001"/> <owl:Class rdf:about="http://purl.obolibrary.org/obo/TST_002"> <rdfs:subClassOf rdf:resource="http://purl.obolibrary.org/obo/TST_001"/> </owl:Class> """) self.assertIn(ont["TST:001"], ont["TST:002"].superclasses().to_set()) self.assertIn(ont["TST:002"], ont["TST:001"].subclasses().to_set()) def test_term_subset(self): ont = self.get_ontology(""" <owl:Ontology rdf:about="http://purl.obolibrary.org/obo/tst.owl"/> <owl:AnnotationProperty rdf:about="http://purl.obolibrary.org/obo/tst#ss"> <rdfs:comment rdf:datatype="http://www.w3.org/2001/XMLSchema#string">a subset</rdfs:comment> <rdfs:subPropertyOf rdf:resource="http://www.geneontology.org/formats/oboInOwl#SubsetProperty"/> </owl:AnnotationProperty> <owl:Class rdf:about="http://purl.obolibrary.org/obo/TST_001"> <oboInOwl:id rdf:datatype="http://www.w3.org/2001/XMLSchema#string">TST:001</oboInOwl:id> <oboInOwl:inSubset rdf:resource="http://purl.obolibrary.org/obo/tst#ss"/> </owl:Class> """) self.assertIn("TST:001", ont) self.assertEqual(ont["TST:001"].subsets, {"ss"}) def test_term_synonym_as_property(self): with warnings.catch_warnings(): warnings.simplefilter("ignore", pronto.warnings.SyntaxWarning) ont = self.get_ontology(""" <owl:Ontology/> <owl:Class rdf:about="http://purl.obolibrary.org/obo/TST_001"> <oboInOwl:hasExactSynonym rdf:datatype="http://www.w3.org/2001/XMLSchema#string">stuff</oboInOwl:hasExactSynonym> <oboInOwl:id rdf:datatype="http://www.w3.org/2001/XMLSchema#string">TST:001</oboInOwl:id> </owl:Class> """) self.assertIn("TST:001", ont) self.assertEqual(len(ont["TST:001"].synonyms), 1) syn = next(iter(ont["TST:001"].synonyms)) self.assertEqual(syn.description, "stuff") self.assertEqual(syn.scope, "EXACT") self.assertEqual(syn.xrefs, set()) def test_term_synonym_as_axiom(self): with warnings.catch_warnings(): warnings.simplefilter("ignore", pronto.warnings.SyntaxWarning) ont = self.get_ontology(""" <owl:Ontology/> <owl:Class rdf:about="http://purl.obolibrary.org/obo/TST_001"> <oboInOwl:hasExactSynonym rdf:datatype="http://www.w3.org/2001/XMLSchema#string">stuff</oboInOwl:hasExactSynonym> <oboInOwl:id rdf:datatype="http://www.w3.org/2001/XMLSchema#string">TST:001</oboInOwl:id> </owl:Class> <owl:Axiom> <owl:annotatedSource rdf:resource="http://purl.obolibrary.org/obo/TST_001"/> <owl:annotatedProperty rdf:resource="http://www.geneontology.org/formats/oboInOwl#hasExactSynonym"/> <owl:annotatedTarget rdf:datatype="http://www.w3.org/2001/XMLSchema#string">stuff</owl:annotatedTarget> <oboInOwl:hasDbXref rdf:datatype="http://www.w3.org/2001/XMLSchema#string">ISBN:1234</oboInOwl:hasDbXref> </owl:Axiom> """) self.assertIn("TST:001", ont) self.assertEqual(len(ont["TST:001"].synonyms), 1) syn = next(iter(ont["TST:001"].synonyms)) self.assertEqual(syn.description, "stuff") self.assertEqual(syn.scope, "EXACT") self.assertEqual(syn.xrefs, {pronto.Xref("ISBN:1234")}) def test_term_relationship(self): ont = self.get_ontology(""" <owl:Ontology/> <owl:ObjectProperty rdf:about="http://purl.obolibrary.org/obo/RO_0002202"> <rdf:type rdf:resource="http://www.w3.org/2002/07/owl#TransitiveProperty"/> <oboInOwl:hasDbXref rdf:datatype="http://www.w3.org/2001/XMLSchema#string">RO:0002202</oboInOwl:hasDbXref> <oboInOwl:id rdf:datatype="http://www.w3.org/2001/XMLSchema#string"></oboInOwl:id> <oboInOwl:shorthand rdf:datatype="http://www.w3.org/2001/XMLSchema#string">develops_from</oboInOwl:shorthand> <rdfs:label rdf:datatype="http://www.w3.org/2001/XMLSchema#string">develops from</rdfs:label> </owl:ObjectProperty> <owl:Class rdf:about="http://purl.obolibrary.org/obo/TST_001"/> <owl:Class rdf:about="http://purl.obolibrary.org/obo/TST_002"> <rdfs:subClassOf> <owl:Restriction> <owl:onProperty rdf:resource="http://purl.obolibrary.org/obo/RO_0002202"/> <owl:someValuesFrom rdf:resource="http://purl.obolibrary.org/obo/TST_001"/> </owl:Restriction> </rdfs:subClassOf> </owl:Class> """) self.assertIn("develops_from", [r.id for r in ont.relationships()]) develops_from = ont.get_relationship("develops_from") self.assertIn(ont["TST:001"], ont["TST:002"].relationships[develops_from]) def test_term_xref_as_property_resource(self): with warnings.catch_warnings(): warnings.simplefilter("ignore", pronto.warnings.SyntaxWarning) ont = self.get_ontology(""" <owl:Ontology/> <owl:Class rdf:about="http://purl.obolibrary.org/obo/TST_001"> <oboInOwl:hasDbXref rdf:datatype="http://www.w3.org/2001/XMLSchema#string">ISBN:1234</oboInOwl:hasDbXref> <oboInOwl:id rdf:resource="http://purl.obolibrary.org/obo/ISBN_1234"/> </owl:Class> """) self.assertEqual(len(ont["TST:001"].xrefs), 1) self.assertEqual(list(ont["TST:001"].xrefs)[0].id, "ISBN:1234") def test_term_xref_as_property_text(self): with warnings.catch_warnings(): warnings.simplefilter("ignore", pronto.warnings.SyntaxWarning) ont = self.get_ontology(""" <owl:Ontology/> <owl:Class rdf:about="http://purl.obolibrary.org/obo/TST_001"> <oboInOwl:hasDbXref rdf:datatype="http://www.w3.org/2001/XMLSchema#string">ISBN:1234</oboInOwl:hasDbXref> <oboInOwl:id rdf:datatype="http://www.w3.org/2001/XMLSchema#string">TST:001</oboInOwl:id> </owl:Class> """) self.assertEqual(len(ont["TST:001"].xrefs), 1) self.assertEqual(list(ont["TST:001"].xrefs)[0].id, "ISBN:1234") def test_term_xref_as_axiom_without_description(self): with warnings.catch_warnings(): warnings.simplefilter("ignore", pronto.warnings.SyntaxWarning) ont = self.get_ontology(""" <owl:Ontology/> <owl:Class rdf:about="http://purl.obolibrary.org/obo/TST_001"> <oboInOwl:hasDbXref rdf:datatype="http://www.w3.org/2001/XMLSchema#string">ISBN:1234</oboInOwl:hasDbXref> <oboInOwl:id rdf:datatype="http://www.w3.org/2001/XMLSchema#string">TST:001</oboInOwl:id> </owl:Class> <owl:Axiom> <owl:annotatedSource rdf:resource="http://purl.obolibrary.org/obo/TST_001"/> <owl:annotatedProperty rdf:resource="http://www.geneontology.org/formats/oboInOwl#hasDbXref"/> <owl:annotatedTarget rdf:datatype="http://www.w3.org/2001/XMLSchema#string">ISBN:1234</owl:annotatedTarget> </owl:Axiom> """) self.assertEqual(len(ont["TST:001"].xrefs), 1) self.assertEqual(list(ont["TST:001"].xrefs)[0].id, "ISBN:1234") self.assertEqual(list(ont["TST:001"].xrefs)[0].description, None) def test_term_xref_as_axiom_with_description(self): with warnings.catch_warnings(): warnings.simplefilter("ignore", pronto.warnings.SyntaxWarning) ont = self.get_ontology(""" <owl:Ontology/> <owl:Class rdf:about="http://purl.obolibrary.org/obo/TST_001"> <oboInOwl:hasDbXref rdf:datatype="http://www.w3.org/2001/XMLSchema#string">ISBN:1234</oboInOwl:hasDbXref> <oboInOwl:id rdf:datatype="http://www.w3.org/2001/XMLSchema#string">TST:001</oboInOwl:id> </owl:Class> <owl:Axiom> <owl:annotatedSource rdf:resource="http://purl.obolibrary.org/obo/TST_001"/> <owl:annotatedProperty rdf:resource="http://www.geneontology.org/formats/oboInOwl#hasDbXref"/> <owl:annotatedTarget rdf:datatype="http://www.w3.org/2001/XMLSchema#string">ISBN:1234</owl:annotatedTarget> <rdfs:label rdf:datatype="http://www.w3.org/2001/XMLSchema#string">a great book</rdfs:label> </owl:Axiom> """) self.assertEqual(len(ont["TST:001"].xrefs), 1) self.assertEqual(list(ont["TST:001"].xrefs)[0].id, "ISBN:1234") self.assertEqual(list(ont["TST:001"].xrefs)[0].description, "a great book") # ------------------------------------------------------------------------ def test_relationship_cyclic(self): ont = self.get_ontology( """ <owl:Ontology/> <owl:ObjectProperty rdf:about="http://purl.obolibrary.org/obo/TST_001"> <oboInOwl:id rdf:datatype="http://www.w3.org/2001/XMLSchema#string">TST:001</oboInOwl:id> <oboInOwl:is_cyclic rdf:datatype="http://www.w3.org/2001/XMLSchema#boolean">true</oboInOwl:is_cyclic> </owl:ObjectProperty> """ ) self.assertIn("TST:001", ont.relationships()) self.assertTrue(ont.get_relationship("TST:001").cyclic) def test_relationship_functional(self): ont = self.get_ontology( """ <owl:Ontology/> <owl:ObjectProperty rdf:about="http://purl.obolibrary.org/obo/TST_001"> <oboInOwl:id rdf:datatype="http://www.w3.org/2001/XMLSchema#string">TST:001</oboInOwl:id> <rdf:type rdf:resource="http://www.w3.org/2002/07/owl#FunctionalProperty"/> </owl:ObjectProperty> """ ) self.assertIn("TST:001", ont.relationships()) self.assertTrue(ont.get_relationship("TST:001").functional) def test_relationship_multiple_labels(self): txt = """ <owl:Ontology/> <owl:ObjectProperty rdf:about="http://purl.obolibrary.org/obo/TST_001"> <rdfs:label>A</rdfs:label> <rdfs:label>B</rdfs:label> </owl:ObjectProperty> """ # check multiple labels is a syntax error in error mode with warnings.catch_warnings(): warnings.simplefilter("error", pronto.warnings.SyntaxWarning) with self.assertRaises(SyntaxError): ont = self.get_ontology(txt) # check multiple labels is fine in ignore mode with warnings.catch_warnings(): warnings.simplefilter("ignore", pronto.warnings.SyntaxWarning) ont = self.get_ontology(txt) self.assertIn(ont.get_relationship('TST:001').name, ["A", "B"]) def test_relationship_reflexive(self): ont = self.get_ontology( """ <owl:Ontology/> <owl:ObjectProperty rdf:about="http://purl.obolibrary.org/obo/TST_001"> <oboInOwl:id rdf:datatype="http://www.w3.org/2001/XMLSchema#string">TST:001</oboInOwl:id> <rdf:type rdf:resource="http://www.w3.org/2002/07/owl#ReflexiveProperty"/> </owl:ObjectProperty> """ ) self.assertIn("TST:001", ont.relationships()) self.assertTrue(ont.get_relationship("TST:001").reflexive) def test_relationship_subset(self): ont = self.get_ontology(""" <owl:Ontology rdf:about="http://purl.obolibrary.org/obo/tst.owl"/> <owl:AnnotationProperty rdf:about="http://purl.obolibrary.org/obo/tst#ss"> <rdfs:comment rdf:datatype="http://www.w3.org/2001/XMLSchema#string">a subset</rdfs:comment> <rdfs:subPropertyOf rdf:resource="http://www.geneontology.org/formats/oboInOwl#SubsetProperty"/> </owl:AnnotationProperty> <owl:ObjectProperty rdf:about="http://purl.obolibrary.org/obo/tst#friend_of"> <oboInOwl:id rdf:datatype="http://www.w3.org/2001/XMLSchema#string">friend_of</oboInOwl:id> <oboInOwl:inSubset rdf:resource="http://purl.obolibrary.org/obo/tst#ss"/> </owl:ObjectProperty> """) self.assertIn("friend_of", ont.relationships()) self.assertEqual(ont.get_relationship("friend_of").subsets, {"ss"}) def test_relationship_symmetric(self): ont = self.get_ontology( """ <owl:Ontology/> <owl:ObjectProperty rdf:about="http://purl.obolibrary.org/obo/TST_001"> <oboInOwl:id rdf:datatype="http://www.w3.org/2001/XMLSchema#string">TST:001</oboInOwl:id> <rdf:type rdf:resource="http://www.w3.org/2002/07/owl#SymmetricProperty"/> </owl:ObjectProperty> """ ) self.assertIn("TST:001", ont.relationships()) self.assertTrue(ont.get_relationship("TST:001").symmetric)
48.013274
137
0.589531
2,476
21,702
5.086026
0.084814
0.037164
0.06861
0.080044
0.800762
0.790836
0.75407
0.730326
0.699913
0.68141
0
0.0427
0.245692
21,702
451
138
48.119734
0.726573
0.02456
0
0.583333
0
0.138889
0.550384
0.032032
0
0
0
0
0.203704
1
0.095679
false
0
0.027778
0
0.12963
0
0
0
0
null
0
0
0
1
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
874de35dfed9fa1e42f40b3c1cf4355d5c87ef70
95
py
Python
app/parking/__init__.py
joannekoye/parking-system
b70c48dc3e648e69dc3902572a7457e57dfc4f0b
[ "MIT" ]
null
null
null
app/parking/__init__.py
joannekoye/parking-system
b70c48dc3e648e69dc3902572a7457e57dfc4f0b
[ "MIT" ]
1
2020-05-14T08:43:41.000Z
2020-05-14T08:43:41.000Z
app/parking/__init__.py
joannekoye/parking-system
b70c48dc3e648e69dc3902572a7457e57dfc4f0b
[ "MIT" ]
null
null
null
from flask import Blueprint parking = Blueprint('parking', __name__) from . import views,forms
23.75
40
0.789474
12
95
5.916667
0.666667
0.450704
0
0
0
0
0
0
0
0
0
0
0.126316
95
4
41
23.75
0.855422
0
0
0
0
0
0.072917
0
0
0
0
0
0
1
0
false
0
0.666667
0
0.666667
0.666667
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
1
0
5
8759ba42917de9b8c83de47fe9fc973f6bcc177d
1,802
py
Python
tests/test_grammar.py
bwhmather/python-lalr
8a98db00c05ee7c13a5e5ec7b580d28e04cb43ca
[ "BSD-3-Clause" ]
1
2018-04-11T14:45:58.000Z
2018-04-11T14:45:58.000Z
tests/test_grammar.py
bwhmather/python-lalr
8a98db00c05ee7c13a5e5ec7b580d28e04cb43ca
[ "BSD-3-Clause" ]
null
null
null
tests/test_grammar.py
bwhmather/python-lalr
8a98db00c05ee7c13a5e5ec7b580d28e04cb43ca
[ "BSD-3-Clause" ]
null
null
null
import pytest from lalr.grammar import Grammar, Production def test_production_name_immutable(): production = Production("A", ("B",)) with pytest.raises(AttributeError): production.name = "a" def test_production_symbols_immutable(): production = Production("A", ("B",)) with pytest.raises(AttributeError): production.symbols = ("b",) def test_terminals_loop(): grammar = Grammar( [ Production("A", ("B",)), Production("A", ("x",)), Production("B", ("A",)), ], ) assert grammar.terminals() == {"x"} def test_terminals_example(): grammar = Grammar( [ Production("N", ("V", "=", "E")), Production("N", ("E",)), Production("E", ("V",)), Production("V", ("x",)), Production("V", ("*", "E")), ] ) assert grammar.terminals() == {"x", "=", "*"} def test_first_set_loop(): grammar = Grammar( [ Production("A", ("B",)), Production("A", ("x",)), Production("B", ("A",)), ] ) assert grammar.first_set("x") == {"x"} assert grammar.first_set("A") == {"x"} assert grammar.first_set("B") == {"x"} def test_first_set_example(): grammar = Grammar( [ Production("N", ("V", "=", "E")), Production("N", ("E",)), Production("E", ("V",)), Production("V", ("x",)), Production("V", ("*", "E")), ] ) assert grammar.first_set("x") == {"x"} assert grammar.first_set("=") == {"="} assert grammar.first_set("*") == {"*"} assert grammar.first_set("N") == {"*", "x"} assert grammar.first_set("E") == {"*", "x"} assert grammar.first_set("V") == {"*", "x"}
24.684932
49
0.476138
174
1,802
4.787356
0.149425
0.171669
0.194478
0.226891
0.833133
0.72509
0.684274
0.684274
0.633854
0.633854
0
0
0.292453
1,802
72
50
25.027778
0.653333
0
0
0.45614
0
0
0.038291
0
0
0
0
0
0.192982
1
0.105263
false
0
0.035088
0
0.140351
0
0
0
0
null
0
1
1
1
1
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
5e468d2413b70f1452a5d871c307e98ee2f6ee09
19
py
Python
checkov/version.py
mourlos/checkov
9d74c43eebd1d88f2af85ce16647c9ac0d575117
[ "Apache-2.0" ]
null
null
null
checkov/version.py
mourlos/checkov
9d74c43eebd1d88f2af85ce16647c9ac0d575117
[ "Apache-2.0" ]
null
null
null
checkov/version.py
mourlos/checkov
9d74c43eebd1d88f2af85ce16647c9ac0d575117
[ "Apache-2.0" ]
null
null
null
version = '2.0.27'
9.5
18
0.578947
4
19
2.75
1
0
0
0
0
0
0
0
0
0
0
0.25
0.157895
19
1
19
19
0.4375
0
0
0
0
0
0.315789
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
5e4bc06e2b522ca4dddf996ce6b9548e74976792
157
py
Python
tests/SampleApps/python/flask-setup-py-requirement-txt/app.py
samruddhikhandale/Oryx
9031b36c02967bb4000645950680572a8a76fa56
[ "MIT" ]
403
2019-05-07T23:40:45.000Z
2022-03-31T11:14:07.000Z
tests/SampleApps/python/flask-setup-py-requirement-txt/app.py
samruddhikhandale/Oryx
9031b36c02967bb4000645950680572a8a76fa56
[ "MIT" ]
514
2019-05-07T17:00:14.000Z
2022-03-31T20:09:16.000Z
tests/SampleApps/python/flask-setup-py-requirement-txt/app.py
samruddhikhandale/Oryx
9031b36c02967bb4000645950680572a8a76fa56
[ "MIT" ]
108
2019-05-07T23:40:47.000Z
2022-03-30T00:15:19.000Z
from flask import Flask from datetime import datetime app = Flask(__name__) @app.route("/") def hello(): return "Hello World! " + str(datetime.now())
15.7
48
0.687898
21
157
4.952381
0.619048
0
0
0
0
0
0
0
0
0
0
0
0.171975
157
9
49
17.444444
0.8
0
0
0
0
0
0.089744
0
0
0
0
0
0
1
0.166667
false
0
0.333333
0.166667
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
1
1
0
0
5
5ea028f7100c4cd8651a24292deb65d77ff55a0e
8,628
py
Python
segments_helpers.py
NateThom/partial_blackout
58116cec821f94358f6bbb2c9d8873dea2d62a58
[ "MIT" ]
null
null
null
segments_helpers.py
NateThom/partial_blackout
58116cec821f94358f6bbb2c9d8873dea2d62a58
[ "MIT" ]
null
null
null
segments_helpers.py
NateThom/partial_blackout
58116cec821f94358f6bbb2c9d8873dea2d62a58
[ "MIT" ]
null
null
null
import statistics import numpy as np def landmark_points(image_name, landmarks): ########## Hardcoded / Predefined Landmarks ######### # Eye Brow brow_points = np.array([ [landmarks.loc[image_name, f'x_{17}'], landmarks.loc[image_name, f'y_{17}']], [landmarks.loc[image_name, f'x_{18}'], landmarks.loc[image_name, f'y_{18}']], [landmarks.loc[image_name, f'x_{19}'], landmarks.loc[image_name, f'y_{19}']], [landmarks.loc[image_name, f'x_{20}'], landmarks.loc[image_name, f'y_{20}']], [landmarks.loc[image_name, f'x_{21}'], landmarks.loc[image_name, f'y_{21}']], [landmarks.loc[image_name, f'x_{22}'], landmarks.loc[image_name, f'y_{22}']], [landmarks.loc[image_name, f'x_{23}'], landmarks.loc[image_name, f'y_{23}']], [landmarks.loc[image_name, f'x_{24}'], landmarks.loc[image_name, f'y_{24}']], [landmarks.loc[image_name, f'x_{25}'], landmarks.loc[image_name, f'y_{25}']], [landmarks.loc[image_name, f'x_{26}'], landmarks.loc[image_name, f'y_{26}']], ], dtype=np.int32) # Eye eye_points = np.array([ [landmarks.loc[image_name, f'x_{36}'], landmarks.loc[image_name, f'y_{36}']], [landmarks.loc[image_name, f'x_{37}'], landmarks.loc[image_name, f'y_{37}']], [landmarks.loc[image_name, f'x_{38}'], landmarks.loc[image_name, f'y_{38}']], [landmarks.loc[image_name, f'x_{39}'], landmarks.loc[image_name, f'y_{39}']], [landmarks.loc[image_name, f'x_{40}'], landmarks.loc[image_name, f'y_{40}']], [landmarks.loc[image_name, f'x_{41}'], landmarks.loc[image_name, f'y_{41}']], [landmarks.loc[image_name, f'x_{36}'], landmarks.loc[image_name, f'y_{36}']], [landmarks.loc[image_name, f'x_{42}'], landmarks.loc[image_name, f'y_{42}']], [landmarks.loc[image_name, f'x_{43}'], landmarks.loc[image_name, f'y_{43}']], [landmarks.loc[image_name, f'x_{44}'], landmarks.loc[image_name, f'y_{44}']], [landmarks.loc[image_name, f'x_{45}'], landmarks.loc[image_name, f'y_{45}']], [landmarks.loc[image_name, f'x_{46}'], landmarks.loc[image_name, f'y_{46}']], [landmarks.loc[image_name, f'x_{47}'], landmarks.loc[image_name, f'y_{47}']], [landmarks.loc[image_name, f'x_{42}'], landmarks.loc[image_name, f'y_{42}']], ], dtype=np.int32) # Nose nose_points = np.array([ [landmarks.loc[image_name, f'x_{31}'] - ( landmarks.loc[image_name, f'x_{32}'] - landmarks.loc[ image_name, f'x_{31}']), landmarks.loc[image_name, f'y_{31}']], [landmarks.loc[image_name, f'x_{32}'], landmarks.loc[image_name, f'y_{32}']], [landmarks.loc[image_name, f'x_{33}'], landmarks.loc[image_name, f'y_{33}']], [landmarks.loc[image_name, f'x_{34}'], landmarks.loc[image_name, f'y_{34}']], [landmarks.loc[image_name, f'x_{35}'] + ( landmarks.loc[image_name, f'x_{35}'] - landmarks.loc[ image_name, f'x_{34}']), landmarks.loc[image_name, f'y_{35}']], [landmarks.loc[image_name, f'x_{42}'], landmarks.loc[image_name, f'y_{42}']], [landmarks.loc[image_name, f'x_{27}'], landmarks.loc[image_name, f'y_{27}']], [landmarks.loc[image_name, f'x_{39}'], landmarks.loc[image_name, f'y_{39}']], [landmarks.loc[image_name, f'x_{31}'] - ( landmarks.loc[image_name, f'x_{32}'] - landmarks.loc[ image_name, f'x_{31}']), landmarks.loc[image_name, f'y_{31}']], ], dtype=np.int32) # Mouth mouth_points = np.array([ [landmarks.loc[image_name, f'x_{48}'], landmarks.loc[image_name, f'y_{48}']], [landmarks.loc[image_name, f'x_{49}'], landmarks.loc[image_name, f'y_{49}']], [landmarks.loc[image_name, f'x_{50}'], landmarks.loc[image_name, f'y_{50}']], [landmarks.loc[image_name, f'x_{51}'], landmarks.loc[image_name, f'y_{51}']], [landmarks.loc[image_name, f'x_{52}'], landmarks.loc[image_name, f'y_{52}']], [landmarks.loc[image_name, f'x_{53}'], landmarks.loc[image_name, f'y_{53}']], [landmarks.loc[image_name, f'x_{54}'], landmarks.loc[image_name, f'y_{54}']], [landmarks.loc[image_name, f'x_{55}'], landmarks.loc[image_name, f'y_{55}']], [landmarks.loc[image_name, f'x_{56}'], landmarks.loc[image_name, f'y_{56}']], [landmarks.loc[image_name, f'x_{57}'], landmarks.loc[image_name, f'y_{57}']], [landmarks.loc[image_name, f'x_{58}'], landmarks.loc[image_name, f'y_{58}']], [landmarks.loc[image_name, f'x_{59}'], landmarks.loc[image_name, f'y_{59}']], [landmarks.loc[image_name, f'x_{48}'], landmarks.loc[image_name, f'y_{48}']] ], dtype=np.int32) # Neck neck_points = np.array([ [landmarks.loc[image_name, f'x_{5}'], landmarks.loc[image_name, f'y_{5}']], [landmarks.loc[image_name, f'x_{6}'], landmarks.loc[image_name, f'y_{6}']], [landmarks.loc[image_name, f'x_{7}'], landmarks.loc[image_name, f'y_{7}']], [landmarks.loc[image_name, f'x_{8}'], landmarks.loc[image_name, f'y_{8}']], [landmarks.loc[image_name, f'x_{9}'], landmarks.loc[image_name, f'y_{9}']], [landmarks.loc[image_name, f'x_{10}'], landmarks.loc[image_name, f'y_{10}']], [landmarks.loc[image_name, f'x_{11}'], landmarks.loc[image_name, f'y_{11}']], [landmarks.loc[image_name, f'x_{11}'], landmarks.loc[image_name, f'y_{11}'] - ( landmarks.loc[image_name, f'y_{33}'] - landmarks.loc[ image_name, f'y_{58}'])], [landmarks.loc[image_name, f'x_{10}'], landmarks.loc[image_name, f'y_{10}'] - ( landmarks.loc[image_name, f'y_{33}'] - landmarks.loc[ image_name, f'y_{58}'])], [landmarks.loc[image_name, f'x_{9}'], landmarks.loc[image_name, f'y_{9}'] - ( landmarks.loc[image_name, f'y_{33}'] - landmarks.loc[ image_name, f'y_{58}'])], [landmarks.loc[image_name, f'x_{8}'], landmarks.loc[image_name, f'y_{8}'] - ( landmarks.loc[image_name, f'y_{33}'] - landmarks.loc[ image_name, f'y_{58}'])], [landmarks.loc[image_name, f'x_{7}'], landmarks.loc[image_name, f'y_{7}'] - ( landmarks.loc[image_name, f'y_{33}'] - landmarks.loc[ image_name, f'y_{58}'])], [landmarks.loc[image_name, f'x_{6}'], landmarks.loc[image_name, f'y_{6}'] - ( landmarks.loc[image_name, f'y_{33}'] - landmarks.loc[ image_name, f'y_{58}'])], [landmarks.loc[image_name, f'x_{5}'], landmarks.loc[image_name, f'y_{5}'] - ( landmarks.loc[image_name, f'y_{33}'] - landmarks.loc[ image_name, f'y_{58}'])], ], dtype=np.int32) ##################################################### return brow_points, eye_points, nose_points, mouth_points, neck_points def top_to_bottom_segments(path_to_celeba_image, landmarks): eye_brow_points, eye_points, nose_points, mouth_points, chin_points = landmark_points(path_to_celeba_image[-10:], landmarks) # list of segments, only considering vertical (y) axis segments = [min(eye_brow_points[0:len(eye_brow_points), 1]), min(eye_points[0:len(eye_points), 1]), max(eye_points[0:len(eye_points),1]), max(nose_points[0:len(nose_points),1]), max(mouth_points[0:len(mouth_points),1]), statistics.mean(chin_points[0:len(chin_points),1])] return segments def bottom_to_top_segments(path_to_celeba_image, path_to_landmarks): #landmarks = pd.read_csv(path_to_landmarks, sep=',') #landmarks.set_index('image_name', inplace=True) #image = cv2.imread(path_to_celeba_image) eye_brow_points, eye_points, nose_points, mouth_points, chin_points = landmark_points(path_to_celeba_image[-10:], landmarks) # list of segments, only considering vertical (y) axis segments = [max(chin_points[0:len(chin_points), 1]), max(mouth_points[0:len(mouth_points), 1]), min(mouth_points[0:len(mouth_points), 1]), max(nose_points[0:len(nose_points), 1]), max(eye_points[0:len(eye_points), 1]), max(eye_brow_points[0:len(eye_brow_points), 1]) ] return segments
61.191489
128
0.584492
1,252
8,628
3.738818
0.08147
0.273019
0.508438
0.628071
0.90878
0.898099
0.602008
0.582354
0.573809
0.538347
0
0.044303
0.217779
8,628
141
129
61.191489
0.649281
0.035466
0
0.37931
0
0
0.099515
0
0
0
0
0
0
1
0.025862
false
0
0.017241
0
0.068966
0
0
0
0
null
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
5ea7485a195bd839653b8d2c62bb55fdbe019d1b
29
py
Python
runner.py
DiZiNnEs/OLX-parser
731ca4a8d32a7bba12ffa2ff0e332c56f16e24be
[ "MIT" ]
1
2020-09-19T18:39:39.000Z
2020-09-19T18:39:39.000Z
runner.py
Putri777/OLX-parser
5c855533c83341bfa79cbe522a50a7463da61590
[ "MIT" ]
null
null
null
runner.py
Putri777/OLX-parser
5c855533c83341bfa79cbe522a50a7463da61590
[ "MIT" ]
1
2020-09-18T20:55:07.000Z
2020-09-18T20:55:07.000Z
from src import main main()
7.25
20
0.724138
5
29
4.2
0.8
0
0
0
0
0
0
0
0
0
0
0
0.206897
29
3
21
9.666667
0.913043
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
0d5f54faefa8e8879faecfe3b9b74073edc0eb43
105
py
Python
7_Sarven_Desert/357-Harrowland/harrowland.py
katitek/Code-Combat
fbda1ac0ae4a2e2cbfce21492a2caec8098f1bef
[ "MIT" ]
null
null
null
7_Sarven_Desert/357-Harrowland/harrowland.py
katitek/Code-Combat
fbda1ac0ae4a2e2cbfce21492a2caec8098f1bef
[ "MIT" ]
null
null
null
7_Sarven_Desert/357-Harrowland/harrowland.py
katitek/Code-Combat
fbda1ac0ae4a2e2cbfce21492a2caec8098f1bef
[ "MIT" ]
null
null
null
# Advanced solutions for multiplayer battless and brawls will be ready after all levels will be finished
52.5
104
0.828571
16
105
5.4375
0.875
0.137931
0
0
0
0
0
0
0
0
0
0
0.161905
105
1
105
105
0.988636
0.971429
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
1
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
5
0d6198281f5001c84ecdecab4794751e06e52c89
75
py
Python
neo4jdriver/views.py
absalon-james/cloud_snitch_web
85e00b68b7cd267ce1173a01f565a86c14d5b087
[ "Apache-2.0" ]
1
2021-05-21T01:01:16.000Z
2021-05-21T01:01:16.000Z
neo4jdriver/views.py
absalon-james/cloud_snitch_web
85e00b68b7cd267ce1173a01f565a86c14d5b087
[ "Apache-2.0" ]
50
2021-05-20T21:00:55.000Z
2022-03-12T00:59:18.000Z
neo4jdriver/views.py
absalon-james/cloud_snitch_web
85e00b68b7cd267ce1173a01f565a86c14d5b087
[ "Apache-2.0" ]
null
null
null
from django.shortcuts import render # noqa F401 # Create your views here.
18.75
47
0.773333
11
75
5.272727
1
0
0
0
0
0
0
0
0
0
0
0.048387
0.173333
75
3
48
25
0.887097
0.44
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
0d8edf7bcdef5842f1bfd01f58ac8e4ef8b29ff5
1,096
py
Python
test/question.py
guoweikuang/zhihu-api
84d15bc7fc6fbf14fe4a170fb18aa0a132c213b3
[ "MIT" ]
1,065
2017-04-16T14:22:15.000Z
2022-03-17T13:10:29.000Z
test/question.py
guoweikuang/zhihu-api
84d15bc7fc6fbf14fe4a170fb18aa0a132c213b3
[ "MIT" ]
44
2017-04-17T07:07:45.000Z
2021-04-13T03:32:30.000Z
test/question.py
guoweikuang/zhihu-api
84d15bc7fc6fbf14fe4a170fb18aa0a132c213b3
[ "MIT" ]
378
2017-04-17T06:51:35.000Z
2022-02-23T03:17:29.000Z
# encoding: utf-8 import unittest from zhihu import Question class QuestionTestCase(unittest.TestCase): def test_follow_question_with_id(self): data = Question(id=32096743).follow_question() self.assertEqual({"is_following": True}, data) def test_unfollow_question_with_id(self): data = Question(id=32096743).unfollow_question() self.assertEqual({"is_following": False}, data) def test_follow_question_with_url(self): data = Question(url='https://www.zhihu.com/question/58684385').follow_question() self.assertEqual({"is_following": True}, data) def test_follow_question_with_answer_url(self): """ 也支持回答的URL,因为从回答中也能找到问题的ID :return: """ data = Question(url='https://www.zhihu.com/question/59001738/answer/160832685').follow_question() self.assertEqual({"is_following": True}, data) def test_unfollow_question_with_url(self): data = Question(url='https://www.zhihu.com/question/58684385').unfollow_question() self.assertEqual({"is_following": False}, data)
34.25
105
0.694343
129
1,096
5.658915
0.27907
0.115068
0.157534
0.171233
0.791781
0.773973
0.731507
0.731507
0.445205
0.445205
0
0.055494
0.17792
1,096
31
106
35.354839
0.754717
0.046533
0
0.277778
0
0
0.1917
0
0
0
0
0
0.277778
1
0.277778
false
0
0.111111
0
0.444444
0
0
0
0
null
0
0
1
0
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
5
0d94830753bf4d5cb72b3ee2d29b788adf71448f
88
py
Python
src/ansible_navigator/image_manager/__init__.py
ekmixon/ansible-navigator
9903d82ac76a4aee61a64c2e5f19f5ccca3cf136
[ "Apache-2.0", "MIT" ]
134
2021-03-26T17:44:49.000Z
2022-03-31T13:15:52.000Z
src/ansible_navigator/image_manager/__init__.py
cidrblock/ansible-navigator
674e5edce4d4181e6f79b6f24b590a347156665d
[ "Apache-2.0", "MIT" ]
631
2021-03-26T19:38:32.000Z
2022-03-31T22:57:36.000Z
src/ansible_navigator/image_manager/__init__.py
cidrblock/ansible-navigator
674e5edce4d4181e6f79b6f24b590a347156665d
[ "Apache-2.0", "MIT" ]
48
2021-03-26T17:44:29.000Z
2022-03-08T21:12:26.000Z
"""image manager""" from .puller import ImagePuller from .inspector import inspect_all
17.6
34
0.784091
11
88
6.181818
0.818182
0
0
0
0
0
0
0
0
0
0
0
0.125
88
4
35
22
0.883117
0.147727
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
0da381b34f4d59767ef4e6edfda8eedf7b5a92fd
135
py
Python
pic_parser/utils.py
onakanob/ehd_exsitu
28dcc3366f09f149b2d45fe51146d51c9be65c2f
[ "MIT" ]
null
null
null
pic_parser/utils.py
onakanob/ehd_exsitu
28dcc3366f09f149b2d45fe51146d51c9be65c2f
[ "MIT" ]
null
null
null
pic_parser/utils.py
onakanob/ehd_exsitu
28dcc3366f09f149b2d45fe51146d51c9be65c2f
[ "MIT" ]
null
null
null
import numpy as np def cell_to_array(string): split_string = string[1:-1].split() return np.array(split_string).astype(float)
22.5
47
0.725926
22
135
4.272727
0.636364
0.234043
0
0
0
0
0
0
0
0
0
0.017391
0.148148
135
6
47
22.5
0.8
0
0
0
0
0
0
0
0
0
0
0
0
1
0.25
false
0
0.25
0
0.75
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
5
0de32b91134ada897cd65028115f0fd17b1bfc96
69
py
Python
asaplib/kernel/__init__.py
BingqingCheng/ASAP
a92dd34eaa092dcbe46163e000ebd2ccee22f8ae
[ "MIT" ]
74
2020-01-09T10:38:39.000Z
2022-03-04T15:09:05.000Z
asaplib/kernel/__init__.py
FelixWodaczek/ASAP
d34a064cd7e409ad8b5ae0dec4f1c0a621717773
[ "MIT" ]
31
2020-01-30T13:15:42.000Z
2022-03-03T05:42:51.000Z
asaplib/kernel/__init__.py
FelixWodaczek/ASAP
d34a064cd7e409ad8b5ae0dec4f1c0a621717773
[ "MIT" ]
14
2020-02-23T15:03:31.000Z
2022-03-04T15:04:04.000Z
from .ml_kernel_operations import * from .kernel_transforms import *
23
35
0.826087
9
69
6
0.666667
0
0
0
0
0
0
0
0
0
0
0
0.115942
69
2
36
34.5
0.885246
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
219a2fe56ae20108818f8f0d4c45f6c94472b2e4
18
py
Python
apps/users/views.py
vuonghv/django_rocket
d0dacddb9a2464327b4bbfb03cb199e7dafe8f28
[ "MIT" ]
2
2018-11-19T07:43:31.000Z
2018-11-20T03:36:00.000Z
apps/users/views.py
vuonghv/django_rocket
d0dacddb9a2464327b4bbfb03cb199e7dafe8f28
[ "MIT" ]
null
null
null
apps/users/views.py
vuonghv/django_rocket
d0dacddb9a2464327b4bbfb03cb199e7dafe8f28
[ "MIT" ]
null
null
null
# Input code here
9
17
0.722222
3
18
4.333333
1
0
0
0
0
0
0
0
0
0
0
0
0.222222
18
1
18
18
0.928571
0.833333
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
5
219b43c413d778cf4e16e7e0475d4ded0a55cd10
158
py
Python
starfish/spots/__init__.py
ttung/starfish
1bd8abf55a335620e4b20abb041f478334714081
[ "MIT" ]
3
2020-09-01T12:18:20.000Z
2021-05-18T03:50:31.000Z
starfish/spots/__init__.py
ttung/starfish
1bd8abf55a335620e4b20abb041f478334714081
[ "MIT" ]
null
null
null
starfish/spots/__init__.py
ttung/starfish
1bd8abf55a335620e4b20abb041f478334714081
[ "MIT" ]
null
null
null
from ._decoder import Decoder from ._detector import SpotFinder from ._pixel_decoder import PixelSpotDecoder from ._target_assignment import TargetAssignment
31.6
48
0.873418
18
158
7.333333
0.555556
0.19697
0
0
0
0
0
0
0
0
0
0
0.101266
158
4
49
39.5
0.929577
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
21a1a68f314ffeaa8e6060d63c9efd42ecfb02d2
107
py
Python
__init__.py
chenkovsky/recpy
4e9387785021670862f73831d159bfc0b55ec5ba
[ "MIT" ]
3
2015-08-18T16:16:18.000Z
2016-02-29T12:13:20.000Z
__init__.py
chenkovsky/recpy
4e9387785021670862f73831d159bfc0b55ec5ba
[ "MIT" ]
null
null
null
__init__.py
chenkovsky/recpy
4e9387785021670862f73831d159bfc0b55ec5ba
[ "MIT" ]
null
null
null
__author__ = 'chenkovsky' from .knn import UserBasedKNNRecommender from .knn import ItemBasedKNNRecommender
35.666667
40
0.859813
10
107
8.8
0.7
0.159091
0.295455
0
0
0
0
0
0
0
0
0
0.093458
107
3
41
35.666667
0.907216
0
0
0
0
0
0.092593
0
0
0
0
0
0
1
0
false
0
0.666667
0
0.666667
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
5
21a737df32f9598ed98747c0a2d9e8decbd44971
163
py
Python
gym_simplifiedtetris/_utils/__init__.py
OliverOverend/gym-simplifiedtetristemp
832a0e99b52ec0c13bad1badc0dc1ba6453a6981
[ "MIT" ]
3
2021-10-04T19:38:14.000Z
2022-03-15T09:15:09.000Z
gym_simplifiedtetris/_utils/__init__.py
OliverOverend/gym-simplifiedtetristemp
832a0e99b52ec0c13bad1badc0dc1ba6453a6981
[ "MIT" ]
2
2021-10-05T18:19:29.000Z
2021-10-05T18:29:37.000Z
gym_simplifiedtetris/_utils/__init__.py
OliverOverend/gym-simplifiedtetristemp
832a0e99b52ec0c13bad1badc0dc1ba6453a6981
[ "MIT" ]
3
2021-11-19T20:50:07.000Z
2022-03-24T16:37:37.000Z
"""Initialise the utils package.""" from gym_simplifiedtetris._utils._piece import _Piece from gym_simplifiedtetris._utils._colours import _Colours __all__ = []
23.285714
57
0.809816
19
163
6.315789
0.578947
0.116667
0.383333
0.466667
0
0
0
0
0
0
0
0
0.104294
163
6
58
27.166667
0.821918
0.177914
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.666667
0
0.666667
0
1
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
5
21e754beffe5138bc7fda84796744b4e816a124a
59
py
Python
deep_compare/__init__.py
lavanyavijayk/deep-compare
c7943b883948795d5f7ca86af0ada5dc888db2b6
[ "MIT" ]
null
null
null
deep_compare/__init__.py
lavanyavijayk/deep-compare
c7943b883948795d5f7ca86af0ada5dc888db2b6
[ "MIT" ]
null
null
null
deep_compare/__init__.py
lavanyavijayk/deep-compare
c7943b883948795d5f7ca86af0ada5dc888db2b6
[ "MIT" ]
1
2020-10-08T06:20:24.000Z
2020-10-08T06:20:24.000Z
from deep_compare.compare_variables import CompareVariables
59
59
0.932203
7
59
7.571429
0.857143
0
0
0
0
0
0
0
0
0
0
0
0.050847
59
1
59
59
0.946429
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
df20cce046d4a2e34e19d27317efb766ffb1c3ed
105
py
Python
game/citygen/__init__.py
Moguri/owg
2acadef7d3307251f60820846e845a39595e36b5
[ "Apache-2.0" ]
null
null
null
game/citygen/__init__.py
Moguri/owg
2acadef7d3307251f60820846e845a39595e36b5
[ "Apache-2.0" ]
null
null
null
game/citygen/__init__.py
Moguri/owg
2acadef7d3307251f60820846e845a39595e36b5
[ "Apache-2.0" ]
null
null
null
from citygen import CityGen from util import mesh_to_p3d_node from map import CityMap from data import *
21
33
0.838095
18
105
4.722222
0.611111
0
0
0
0
0
0
0
0
0
0
0.011236
0.152381
105
4
34
26.25
0.94382
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
df4cfd1ccbe7c3a3b74141fad506db41b0bc72ce
28
py
Python
py_cf/__init__.py
bopopescu/local_scda
40fa4a586f140dc00b8d3f53c732e22e022be338
[ "MIT" ]
null
null
null
py_cf/__init__.py
bopopescu/local_scda
40fa4a586f140dc00b8d3f53c732e22e022be338
[ "MIT" ]
null
null
null
py_cf/__init__.py
bopopescu/local_scda
40fa4a586f140dc00b8d3f53c732e22e022be338
[ "MIT" ]
2
2020-07-23T21:55:21.000Z
2021-01-14T12:27:19.000Z
from cf_interpreter import *
28
28
0.857143
4
28
5.75
1
0
0
0
0
0
0
0
0
0
0
0
0.107143
28
1
28
28
0.92
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
df57ecf05730ff3e5c40fda2bddc313b7083ed73
940
py
Python
session01_Decorators/test_assert_raises.py
morales-gregorio/Python-Module-of-the-Week
2c68e20be3e174be9b91c92ac872806dd982e7d2
[ "MIT" ]
15
2017-06-22T11:57:38.000Z
2022-03-31T13:34:07.000Z
session01_Decorators/test_assert_raises.py
morales-gregorio/Python-Module-of-the-Week
2c68e20be3e174be9b91c92ac872806dd982e7d2
[ "MIT" ]
3
2019-10-16T10:32:55.000Z
2020-01-09T09:24:48.000Z
session01_Decorators/test_assert_raises.py
morales-gregorio/Python-Module-of-the-Week
2c68e20be3e174be9b91c92ac872806dd982e7d2
[ "MIT" ]
6
2016-10-07T12:50:24.000Z
2019-11-28T11:15:04.000Z
#!/usr/bin/env python3 import sys def test_we_can_import_module(): import assert_raises def test_context_manager_exists(): import assert_raises assert_raises.assert_raises def test_context_manager_raises_exception(): import assert_raises with assert_raises.assert_raises(Exception): 1 / 0 def test_unexpected_success(): import assert_raises try: with assert_raises.assert_raises(ZeroDivisionError): pass except AssertionError: pass else: raise AssertionError('AssertionError was not raised') def test_cm_checks_exception_type(): import assert_raises try: with assert_raises.assert_raises(ZeroDivisionError): raise ValueError except AssertionError: pass else: raise AssertionError('AssertionError was not raised') if __name__ == '__main__': import pytest pytest.main([__file__] + sys.argv[1:])
23.5
61
0.706383
108
940
5.759259
0.388889
0.250804
0.144695
0.192926
0.598071
0.553055
0.446945
0.446945
0.446945
0.446945
0
0.005502
0.226596
940
39
62
24.102564
0.850069
0.02234
0
0.580645
0
0
0.071895
0
0
0
0
0
0.419355
1
0.16129
true
0.096774
0.258065
0
0.419355
0
0
0
0
null
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
0
1
1
0
0
0
0
0
5
10c425aa5985bbd6ffcaf18085d8b73c1391169f
1,524
py
Python
src/sugar/logo.py
rahlk/MOOSE
e45b64cf625bb90aa8c1c24ab1c8f52ab485a316
[ "MIT" ]
null
null
null
src/sugar/logo.py
rahlk/MOOSE
e45b64cf625bb90aa8c1c24ab1c8f52ab485a316
[ "MIT" ]
1
2016-11-27T20:18:00.000Z
2016-11-27T20:18:00.000Z
src/sugar/logo.py
rahlk/MAPGen
25bc1a84f07e30ab0dbb638cd2aa1ce416c510ff
[ "MIT" ]
null
null
null
from __future__ import print_function print(r""" __ __ ____ ____ _____ ______ | \/ |/ __ \ / __ \ / ____| ____| | \ / | | | | | | | (___ | |__ | |\/| | | | | | | |\___ \| __| | | | | |__| | |__| |____) | |____ |_| |_|\____/ \____/|_____/|______| _.--'''--, .' `\ .-''''''-. .' | / '. / .-._/ | `. | | \ \ .-._ | _ \ `""'-. \_.-. \ ` ( \__/ | ) '=. ., \ / ( \ / \ / /` `\ | / `' '..-`\ _.-. `\ _.__/ .=. | _ / \ '.-` `-.' / \_/ | | './ _ _ \.' '-' | / \ | | .-. .-. | \ / o| |o \ / | / \ | / `"` `"` \ / \ | '._.' \ | / | \ | | || _ _ / /|\ (_\ /_) / RAISE \ \'._ ` '_.' `''` `'''` """)
38.1
51
0.096457
10
1,524
4.2
0.8
0
0
0
0
0
0
0
0
0
0
0
0.727034
1,524
40
52
38.1
0.100962
0
0
0
0
0
0.949508
0.01377
0
0
0
0
0
1
0
true
0
0.028571
0
0.028571
0.057143
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
1
0
0
0
0
0
1
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
5
10c452a323c1ec92b133eec28963164ad92e2f44
156
py
Python
PasswordGenerationService/password_generation/util/_logging.py
zmcneilly/coral_barnacle
9e24016db9363ed1e7923aaec0bbe3a407175fd1
[ "Apache-2.0" ]
null
null
null
PasswordGenerationService/password_generation/util/_logging.py
zmcneilly/coral_barnacle
9e24016db9363ed1e7923aaec0bbe3a407175fd1
[ "Apache-2.0" ]
null
null
null
PasswordGenerationService/password_generation/util/_logging.py
zmcneilly/coral_barnacle
9e24016db9363ed1e7923aaec0bbe3a407175fd1
[ "Apache-2.0" ]
null
null
null
import logging import os logger = logging.getLogger() logger.setLevel(logging.INFO) if os.getenv("STAGE", "") == "dev": logger.setLevel(logging.DEBUG)
19.5
35
0.717949
20
156
5.6
0.6
0.25
0.375
0
0
0
0
0
0
0
0
0
0.121795
156
7
36
22.285714
0.817518
0
0
0
0
0
0.051282
0
0
0
0
0
0
1
0
false
0
0.333333
0
0.333333
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
5
10c7ea1e4bcd397e6f106977ed323a359d752d4f
126
py
Python
tests/test_application.py
chriswhalen/junior
f49074fe4e890849b3aceb787c53b0e6f210b8e9
[ "MIT" ]
4
2021-03-13T09:09:56.000Z
2022-02-20T03:59:36.000Z
tests/test_application.py
chriswhalen/junior
f49074fe4e890849b3aceb787c53b0e6f210b8e9
[ "MIT" ]
33
2021-05-14T21:36:14.000Z
2022-03-25T03:10:36.000Z
tests/test_application.py
chriswhalen/junior
f49074fe4e890849b3aceb787c53b0e6f210b8e9
[ "MIT" ]
null
null
null
class TestApplication: # Will the application start? def test_start(self, application): application.start()
18
38
0.690476
13
126
6.615385
0.692308
0.372093
0
0
0
0
0
0
0
0
0
0
0.230159
126
6
39
21
0.886598
0.214286
0
0
0
0
0
0
0
0
0
0
0
1
0.333333
false
0
0
0
0.666667
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
5
800beb22c40693a920aa7946cf703d8af0ab54fe
28
py
Python
cj/scraper/novels/ww.py
james-garfield/Captain-Japan
8edc63f138f956fe9187b6eda3bffdd871fba839
[ "Unlicense" ]
null
null
null
cj/scraper/novels/ww.py
james-garfield/Captain-Japan
8edc63f138f956fe9187b6eda3bffdd871fba839
[ "Unlicense" ]
null
null
null
cj/scraper/novels/ww.py
james-garfield/Captain-Japan
8edc63f138f956fe9187b6eda3bffdd871fba839
[ "Unlicense" ]
null
null
null
# Scraper for WuxiaWorld.com
28
28
0.821429
4
28
5.75
1
0
0
0
0
0
0
0
0
0
0
0
0.107143
28
1
28
28
0.92
0.928571
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
5
33788adcdfbca85b0549c00274a0d0511db9dc68
196
py
Python
tests/plugins/test_lyx_filters.py
dsoto/dexy
0f2090250040c3c54c8481a16de8e476b559e87c
[ "MIT" ]
136
2015-01-06T15:04:47.000Z
2021-12-21T22:52:41.000Z
tests/plugins/test_lyx_filters.py
dsoto/dexy
0f2090250040c3c54c8481a16de8e476b559e87c
[ "MIT" ]
13
2015-01-26T14:06:58.000Z
2020-03-27T21:16:10.000Z
tests/plugins/test_lyx_filters.py
dsoto/dexy
0f2090250040c3c54c8481a16de8e476b559e87c
[ "MIT" ]
34
2015-01-02T16:24:53.000Z
2021-11-27T05:38:30.000Z
from tests.utils import assert_output def test_lyx(): assert_output("lyxjinja", "dexy:foo.py|idio:multiply", "<< d['foo.py|idio']['multiply'] >>", ".tex")
24.5
49
0.545918
23
196
4.521739
0.73913
0.230769
0.173077
0.326923
0
0
0
0
0
0
0
0
0.270408
196
7
50
28
0.727273
0
0
0
0
0
0.362245
0.270408
0
0
0
0
0.333333
1
0.166667
true
0
0.166667
0
0.333333
0
1
0
0
null
1
0
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
5
33d07e55fe17455851006bcb513488dcf3540674
638
py
Python
account_ux/models/__init__.py
odoo-mastercore/odoo-argentina
58cdfe8610bae42f69ddb9d652a28eb3245f6a04
[ "MIT" ]
1
2021-01-25T15:57:58.000Z
2021-01-25T15:57:58.000Z
account_ux/models/__init__.py
odoo-mastercore/odoo-argentina
58cdfe8610bae42f69ddb9d652a28eb3245f6a04
[ "MIT" ]
null
null
null
account_ux/models/__init__.py
odoo-mastercore/odoo-argentina
58cdfe8610bae42f69ddb9d652a28eb3245f6a04
[ "MIT" ]
2
2020-10-17T16:36:02.000Z
2021-01-24T10:20:05.000Z
############################################################################## # For copyright and license notices, see __manifest__.py file in module root # directory ############################################################################## from . import account_group from . import account_account from . import account_journal from . import account_payment from . import account_bank_statements from . import account_partial_reconcile from . import account_move_line from . import res_partner from . import res_currency_rate from . import account_bank_statement_line from . import account_account_type from . import account_move
35.444444
78
0.628527
68
638
5.558824
0.455882
0.31746
0.449735
0.126984
0
0
0
0
0
0
0
0
0.101881
638
17
79
37.529412
0.659686
0.131661
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
33d0fffc2a410473b9d687a42eaa5f057a492ec7
126
py
Python
info/moduels/news/__init__.py
haczfdx/information
7364fb6eea54bb1d17b70c626cdceda129f99902
[ "MIT" ]
null
null
null
info/moduels/news/__init__.py
haczfdx/information
7364fb6eea54bb1d17b70c626cdceda129f99902
[ "MIT" ]
null
null
null
info/moduels/news/__init__.py
haczfdx/information
7364fb6eea54bb1d17b70c626cdceda129f99902
[ "MIT" ]
null
null
null
"""这个蓝图对应的是新闻页面的相关的操作""" from flask import Blueprint news_blue = Blueprint('news', __name__) import info.moduels.news.views
18
39
0.777778
15
126
6.2
0.733333
0.27957
0
0
0
0
0
0
0
0
0
0
0.103175
126
6
40
21
0.823009
0.142857
0
0
0
0
0.039216
0
0
0
0
0
0
1
0
false
0
0.666667
0
0.666667
0.666667
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
1
0
5
33fff26f2bda55d6f746e8012701fab417fba8ec
407
py
Python
apprest/utils/exceptions.py
dsanchez-cells/calipsoplus-backend
7eaa6904ec59d88052644b31041b92ee20e54354
[ "MIT" ]
4
2018-12-04T15:08:27.000Z
2019-04-11T09:49:41.000Z
apprest/utils/exceptions.py
dsanchez-cells/calipsoplus-backend
7eaa6904ec59d88052644b31041b92ee20e54354
[ "MIT" ]
63
2018-11-22T13:07:56.000Z
2021-06-10T20:55:58.000Z
apprest/utils/exceptions.py
dsanchez-cells/calipsoplus-backend
7eaa6904ec59d88052644b31041b92ee20e54354
[ "MIT" ]
10
2018-11-23T08:17:28.000Z
2022-01-15T23:41:59.000Z
class QuotaExceptionExceeded(Exception): pass class QuotaMaxSimultaneousExceeded(QuotaExceptionExceeded): pass class QuotaCpuExceeded(QuotaExceptionExceeded): pass class QuotaMemoryExceeded(QuotaExceptionExceeded): pass class QuotaHddExceeded(QuotaExceptionExceeded): pass class ResourceAlreadyLaunched(Exception): pass class DockerExceptionNotFound(Exception): pass
15.074074
59
0.798526
28
407
11.607143
0.357143
0.166154
0.381538
0
0
0
0
0
0
0
0
0
0.149877
407
26
60
15.653846
0.939306
0
0
0.5
0
0
0
0
0
0
0
0
0
1
0
true
0.5
0
0
0.5
0
1
0
1
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
0
0
0
0
0
5
1d299e71eceb5d1a7696dd7e38c963da446e0e4e
9,902
py
Python
inference.py
cemfi/BachNet
81931952d96f36f45819a5ad77cc45106883a961
[ "MIT" ]
2
2019-09-16T09:35:48.000Z
2022-01-11T10:09:38.000Z
inference.py
cemfi/BachNet
81931952d96f36f45819a5ad77cc45106883a961
[ "MIT" ]
1
2020-09-24T14:25:51.000Z
2020-09-24T16:05:34.000Z
inference.py
cemfi/BachNet
81931952d96f36f45819a5ad77cc45106883a961
[ "MIT" ]
null
null
null
from argparse import ArgumentParser import utils import torch from torch.nn.functional import one_hot from tqdm import tqdm from data import pitch_sizes_parts, generate_data_inference, indices_parts from model import BachNetInferenceContinuo, BachNetInferenceMiddleParts # from music21 import environment # environment.set('musicxmlPath', 'C:\\Program Files\\MuseScore 3\\bin\\MuseScore3.exe') def predict_middle_parts(bass, soprano_path, checkpoint_path, num_candidates=1): bass_for_playback = bass checkpoint = torch.load(checkpoint_path, map_location='cpu') config = checkpoint['config'] state = checkpoint['state_middle_parts'] model = BachNetInferenceMiddleParts( hidden_size=config.hidden_size, context_radius=config.context_radius, num_candidates=num_candidates ) model.load_state_dict(state) model.set_part_weights( # loss_bass=checkpoint['loss_bass'], loss_alto=checkpoint['loss_alto'], loss_tenor=checkpoint['loss_tenor'] ) model.eval() sample = generate_data_inference( time_grid=config.time_grid, soprano_path=soprano_path ) inputs = sample['data'] metadata = sample['metadata'] soprano = inputs['soprano'].clone() extra = inputs['extra'].clone() length = inputs['soprano'].shape[0] # Zero padding for input data for part in ['soprano', 'extra']: inputs[part] = torch.cat([ torch.zeros((config.context_radius, inputs[part].shape[1])), inputs[part], torch.zeros((config.context_radius, inputs[part].shape[1])) ], dim=0) bass = torch.cat([ torch.zeros((config.context_radius, bass.shape[1])), bass, torch.zeros((config.context_radius, bass.shape[1])) ], dim=0) predictions = model({ 'soprano': inputs['soprano'][:2 * config.context_radius + 1], 'alto': torch.zeros((config.context_radius, pitch_sizes_parts['alto'] + len(indices_parts))), 'tenor': torch.zeros((config.context_radius, pitch_sizes_parts['tenor'] + len(indices_parts))), 'bass_with_context': bass[:2 * config.context_radius + 1], 'extra': inputs['extra'][:2 * config.context_radius + 1] }) probabilities = [torch.max(predictions[:, 0]).item()] acc_probabilities = predictions[:, 0] history_pitches = [predictions[:, 1:]] for step in tqdm(range(1, length), unit='time steps'): candidates = [] padding_size = max(0, config.context_radius - len(history_pitches)) history_size = min(config.context_radius, len(history_pitches)) cur_history = torch.stack(history_pitches[-history_size:], dim=0).long() for candidate_idx in range(num_candidates): predictions = model({ 'soprano': inputs['soprano'][step:step + 2 * config.context_radius + 1], 'bass_with_context': bass[step:step + 2 * config.context_radius + 1], 'alto': torch.cat([ torch.zeros((padding_size, pitch_sizes_parts['alto'] + len(indices_parts))), one_hot(cur_history[:, candidate_idx, 0], pitch_sizes_parts['alto'] + len(indices_parts)).float() ], dim=0), 'tenor': torch.cat([ torch.zeros((padding_size, pitch_sizes_parts['tenor'] + len(indices_parts))), one_hot(cur_history[:, candidate_idx, 1], pitch_sizes_parts['tenor'] + len(indices_parts)).float() ], dim=0), 'extra': inputs['extra'][step:step + 2 * config.context_radius + 1] }) candidates.append(predictions) candidates = torch.stack(candidates, dim=0) # 0: Index for chord in history_pitches[-1] # 1: Candidate idx # 2: Probability Pitches # Add log probabilities of candidates to current probabilities candidates[:, :, 0] = (acc_probabilities + candidates[:, :, 0].t()).t() candidate_indices = torch.argsort(candidates.view(-1, 4)[:, 0], dim=0, descending=True) best_indices = torch.stack([candidate_indices // num_candidates, candidate_indices % num_candidates], dim=1)[ :num_candidates] # [[last_chord_idx, new_chord_idx]] history_pitches.append(torch.empty_like(history_pitches[-1])) for i in range(num_candidates): for k in range(len(history_pitches) - 2, -1, -1): history_pitches[k][i] = history_pitches[k][best_indices[i, 0]] history_pitches[-1][i] = candidates[best_indices[i, 0], best_indices[i, 1], 1:] acc_probabilities[i] = candidates[best_indices[i, 0], best_indices[i, 1], 0] probabilities.append(torch.max(acc_probabilities, dim=0)[0].item()) winner = torch.stack(history_pitches, dim=1)[torch.argmax(acc_probabilities)].long().t() score = utils.tensors_to_stream({ 'soprano': soprano, 'extra': extra, 'bass': bass_for_playback, 'alto': one_hot(winner[0], pitch_sizes_parts['alto'] + len(indices_parts)), 'tenor': one_hot(winner[1], pitch_sizes_parts['tenor'] + len(indices_parts)), }, config, metadata) # for e in zip(winner.t(), probabilities): # print(e) return score # score.write('musicxml', f'beam_{num_candidates}.musicxml') # score.show('musicxml') def predict_bass(soprano_path, checkpoint_path, num_candidates=1): checkpoint = torch.load(checkpoint_path, map_location='cpu') config = checkpoint['config'] state = checkpoint['state_continuo'] model = BachNetInferenceContinuo( hidden_size=config.hidden_size, context_radius=config.context_radius, num_candidates=num_candidates, ) model.load_state_dict(state) model.eval() sample = generate_data_inference( time_grid=config.time_grid, soprano_path=soprano_path ) inputs = sample['data'] length = inputs['soprano'].shape[0] # Zero padding for input data for part in ['soprano', 'extra']: inputs[part] = torch.cat([ torch.zeros((config.context_radius, inputs[part].shape[1])), inputs[part], torch.zeros((config.context_radius, inputs[part].shape[1])) ], dim=0) predictions = model({ 'soprano': inputs['soprano'][:2 * config.context_radius + 1], 'bass': torch.zeros((config.context_radius, pitch_sizes_parts['bass'] + len(indices_parts))), 'extra': inputs['extra'][:2 * config.context_radius + 1] }) probabilities = [torch.max(predictions[:, 0]).item()] acc_probabilities = predictions[:, 0] history_pitches = [predictions[:, 1:]] for step in tqdm(range(1, length), unit='time steps'): candidates = [] history_size = min(config.context_radius, len(history_pitches)) padding_size = config.context_radius - history_size cur_history = torch.stack(history_pitches[-history_size:], dim=0).long() for candidate_idx in range(num_candidates): predictions = model({ 'soprano': inputs['soprano'][step:step + 2 * config.context_radius + 1], 'bass': torch.cat([ torch.zeros((padding_size, pitch_sizes_parts['bass'] + len(indices_parts))), one_hot(cur_history[:, candidate_idx, 0], pitch_sizes_parts['bass'] + len(indices_parts)).float() ], dim=0), 'extra': inputs['extra'][step:step + 2 * config.context_radius + 1] }) candidates.append(predictions) candidates = torch.stack(candidates, dim=0) # 0: Index for chord in history_pitches[-1] # 1: Candidate idx # 2: Probability Pitches # Add log probabilities of candidates to current probabilities candidates[:, :, 0] = (acc_probabilities + candidates[:, :, 0].t()).t() candidate_indices = torch.argsort(candidates.view(-1, 4)[:, 0], dim=0, descending=True) best_indices = torch.stack([candidate_indices // num_candidates, candidate_indices % num_candidates], dim=1)[ :num_candidates] # [[last_chord_idx, new_chord_idx]] history_pitches.append(torch.empty_like(history_pitches[-1])) for i in range(num_candidates): for k in range(len(history_pitches) - 2, -1, -1): history_pitches[k][i] = history_pitches[k][best_indices[i, 0]] history_pitches[-1][i] = candidates[best_indices[i, 0], best_indices[i, 1], 1:] acc_probabilities[i] = candidates[best_indices[i, 0], best_indices[i, 1], 0] probabilities.append(torch.max(acc_probabilities, dim=0)[0].item()) winner = torch.stack(history_pitches, dim=1)[torch.argmax(acc_probabilities)].long().t() # for e in zip(winner.t(), probabilities): # print(e) bass_predicted = one_hot(winner[0], pitch_sizes_parts['bass'] + len(indices_parts)) return bass_predicted def compose_score(checkpoint_path, soprano_path): torch.set_grad_enabled(False) bass = predict_bass( soprano_path=soprano_path, checkpoint_path=checkpoint_path, num_candidates=1 ).clone().detach().float() score = predict_middle_parts( bass, soprano_path=soprano_path, checkpoint_path=checkpoint_path, num_candidates=1 ) return score if __name__ == '__main__': parser = ArgumentParser() parser.add_argument("-c", "--checkpoint", help="path to checkpoint .pt file", metavar="PATH") parser.add_argument("-m", "--musicxml", help="path to musicxml file") args = parser.parse_args() # python inference.py -c checkpoints/2021-04-06_18-18-01/2021-04-06_18-18-01_epoch=0142.pt -m data/musicxml/001_soprano.xml score = compose_score(args.checkpoint, args.musicxml) score.show('musicxml')
39.767068
127
0.639063
1,193
9,902
5.084661
0.139983
0.057864
0.078305
0.032971
0.78701
0.782888
0.764919
0.743653
0.666337
0.609463
0
0.019286
0.225005
9,902
248
128
39.927419
0.771175
0.088366
0
0.613636
1
0
0.053532
0
0
0
0
0
0
1
0.017045
false
0
0.039773
0
0.073864
0
0
0
0
null
0
0
0
0
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
d51a63df1b0db1765947945e831546018c1e0252
79
py
Python
python/mph.py/mph/__init__.py
MBender/muphasa
7d0887d7f6c6b0f673510a6a70fa9ae1f2473b89
[ "MIT" ]
3
2022-01-08T13:45:16.000Z
2022-03-14T12:11:34.000Z
python/mph.py/mph/__init__.py
MBender/muphasa
7d0887d7f6c6b0f673510a6a70fa9ae1f2473b89
[ "MIT" ]
null
null
null
python/mph.py/mph/__init__.py
MBender/muphasa
7d0887d7f6c6b0f673510a6a70fa9ae1f2473b89
[ "MIT" ]
1
2022-01-11T16:57:36.000Z
2022-01-11T16:57:36.000Z
from .mph import * from ._version import __version__ from .examples import *
13.166667
33
0.759494
10
79
5.5
0.5
0
0
0
0
0
0
0
0
0
0
0
0.177215
79
5
34
15.8
0.846154
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
d51c2f45c00a7845151973696c1d118b8214d486
159
py
Python
tests/conftest.py
metno/py-mms
ccc970f00d4b4a3fb8989fd5679ab42ecaaae2de
[ "Apache-2.0" ]
null
null
null
tests/conftest.py
metno/py-mms
ccc970f00d4b4a3fb8989fd5679ab42ecaaae2de
[ "Apache-2.0" ]
20
2020-08-20T07:37:26.000Z
2021-11-02T09:49:50.000Z
tests/conftest.py
metno/py-mms
ccc970f00d4b4a3fb8989fd5679ab42ecaaae2de
[ "Apache-2.0" ]
null
null
null
# -*- coding: utf-8 -*- """Test Config """ import os import sys sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), os.path.pardir)))
17.666667
92
0.672956
26
159
3.961538
0.615385
0.23301
0
0
0
0
0
0
0
0
0
0.014085
0.106918
159
8
93
19.875
0.711268
0.213836
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.666667
0
0.666667
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
d52ceb6566eab5f6af94d8e00b8ad592007b1e21
42
py
Python
tests/test_forms.py
pmrowla/p101stat
8fa5ab63d13e395295b974294c6ec4c7c933bb8b
[ "BSD-3-Clause" ]
1
2016-02-22T06:34:52.000Z
2016-02-22T06:34:52.000Z
tests/test_forms.py
pmrowla/p101stat
8fa5ab63d13e395295b974294c6ec4c7c933bb8b
[ "BSD-3-Clause" ]
null
null
null
tests/test_forms.py
pmrowla/p101stat
8fa5ab63d13e395295b974294c6ec4c7c933bb8b
[ "BSD-3-Clause" ]
null
null
null
# -*- coding: utf-8 -*- """Test forms."""
14
23
0.452381
5
42
3.8
1
0
0
0
0
0
0
0
0
0
0
0.028571
0.166667
42
2
24
21
0.514286
0.809524
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
5
d543036f8026c68ba584e01bfd26d8cfc9d2fae6
99
py
Python
twobuntu/ads/admin.py
muhiza/originarities
ca0a67363579e6237127386f13baa2ab7a7c2717
[ "Apache-2.0" ]
null
null
null
twobuntu/ads/admin.py
muhiza/originarities
ca0a67363579e6237127386f13baa2ab7a7c2717
[ "Apache-2.0" ]
null
null
null
twobuntu/ads/admin.py
muhiza/originarities
ca0a67363579e6237127386f13baa2ab7a7c2717
[ "Apache-2.0" ]
null
null
null
from django.contrib import admin from twobuntu.ads.models import Ad admin.site.register(Ad)
16.5
35
0.767677
15
99
5.066667
0.733333
0
0
0
0
0
0
0
0
0
0
0
0.161616
99
5
36
19.8
0.915663
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.666667
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
d54836fa33a38982a420f819e48a85d1992ef47b
47
py
Python
cpyquickhelper/algorithms/__init__.py
sdpython/cpyquickhelper
c2bdebad2201c7e10a5999a836bbf53e27b963c7
[ "MIT" ]
2
2017-10-03T20:39:13.000Z
2019-02-06T15:24:04.000Z
cpyquickhelper/algorithms/__init__.py
sdpython/cpyquickhelper
c2bdebad2201c7e10a5999a836bbf53e27b963c7
[ "MIT" ]
21
2017-09-17T11:14:04.000Z
2021-01-01T13:24:20.000Z
cpyquickhelper/algorithms/__init__.py
sdpython/cpyquickhelper
c2bdebad2201c7e10a5999a836bbf53e27b963c7
[ "MIT" ]
null
null
null
""" @file @brief Shortcut to *algorithms*. """
9.4
32
0.617021
5
47
5.8
1
0
0
0
0
0
0
0
0
0
0
0
0.148936
47
4
33
11.75
0.725
0.808511
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
5
d54f88f0b671db792bdab815d6938dcbb3e88e66
134
py
Python
work/admin.py
lazypunky/makeportfolio
010ed282d5b954e16b225d8c61737a0e744aaef1
[ "MIT" ]
null
null
null
work/admin.py
lazypunky/makeportfolio
010ed282d5b954e16b225d8c61737a0e744aaef1
[ "MIT" ]
null
null
null
work/admin.py
lazypunky/makeportfolio
010ed282d5b954e16b225d8c61737a0e744aaef1
[ "MIT" ]
1
2020-09-15T09:30:13.000Z
2020-09-15T09:30:13.000Z
from django.contrib import admin from .models import WorkCategory, Work admin.site.register(WorkCategory) admin.site.register(Work)
19.142857
38
0.820896
18
134
6.111111
0.555556
0.163636
0.309091
0
0
0
0
0
0
0
0
0
0.097015
134
6
39
22.333333
0.909091
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
d58af46ed562d6ef28e777c217c613c436a49650
79
py
Python
pyobis/taxa/__init__.py
ayushanand18/pyobis
e4f37f47a0fe87eb91bbce63cd06aa68dbb0a882
[ "MIT" ]
null
null
null
pyobis/taxa/__init__.py
ayushanand18/pyobis
e4f37f47a0fe87eb91bbce63cd06aa68dbb0a882
[ "MIT" ]
null
null
null
pyobis/taxa/__init__.py
ayushanand18/pyobis
e4f37f47a0fe87eb91bbce63cd06aa68dbb0a882
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- from .taxa import search, taxon, common, taxon_search
19.75
53
0.670886
11
79
4.727273
0.818182
0
0
0
0
0
0
0
0
0
0
0.015152
0.164557
79
3
54
26.333333
0.772727
0.265823
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
d599f3d7fe3a07c471a8a6593ef05cb05a974a56
67
py
Python
ilastikrag/accumulators/edgeregion/__init__.py
ilastik/ilastikrag
e6cd157b25eaffed5283c48471f7b58a0e4889d7
[ "BSD-3-Clause" ]
null
null
null
ilastikrag/accumulators/edgeregion/__init__.py
ilastik/ilastikrag
e6cd157b25eaffed5283c48471f7b58a0e4889d7
[ "BSD-3-Clause" ]
6
2017-10-21T00:03:33.000Z
2021-12-21T17:30:17.000Z
ilastikrag/accumulators/edgeregion/__init__.py
ilastik/ilastikrag
e6cd157b25eaffed5283c48471f7b58a0e4889d7
[ "BSD-3-Clause" ]
null
null
null
from .edgeregion_edge_accumulator import EdgeRegionEdgeAccumulator
33.5
66
0.925373
6
67
10
1
0
0
0
0
0
0
0
0
0
0
0
0.059701
67
1
67
67
0.952381
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
d5af01f515de0588bea08387a9f6681f63b5f33e
58
py
Python
test_twitchtracker.py
adegenaar/Twitch-Tracker
d6ccd984ad43e27e41dd1838e0d588961ddf209f
[ "MIT" ]
null
null
null
test_twitchtracker.py
adegenaar/Twitch-Tracker
d6ccd984ad43e27e41dd1838e0d588961ddf209f
[ "MIT" ]
null
null
null
test_twitchtracker.py
adegenaar/Twitch-Tracker
d6ccd984ad43e27e41dd1838e0d588961ddf209f
[ "MIT" ]
null
null
null
import pytest def test_pass(): assert True pass
8.285714
16
0.655172
8
58
4.625
0.875
0
0
0
0
0
0
0
0
0
0
0
0.293103
58
6
17
9.666667
0.902439
0
0
0
0
0
0
0
0
0
0
0
0.25
1
0.25
true
0.5
0.25
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
1
0
0
0
0
0
5
6358a2bff87f6ccafeefb7392d357c6bc582f414
49
py
Python
src/backend/common/queries/exceptions.py
guineawheek/ftc-data-take-2
337bff2077eadb3bd6bbebd153cbb6181c99516f
[ "MIT" ]
266
2015-01-04T00:10:48.000Z
2022-03-28T18:42:05.000Z
src/backend/common/queries/exceptions.py
guineawheek/ftc-data-take-2
337bff2077eadb3bd6bbebd153cbb6181c99516f
[ "MIT" ]
2,673
2015-01-01T20:14:33.000Z
2022-03-31T18:17:16.000Z
src/backend/common/queries/exceptions.py
guineawheek/ftc-data-take-2
337bff2077eadb3bd6bbebd153cbb6181c99516f
[ "MIT" ]
230
2015-01-04T00:10:48.000Z
2022-03-26T18:12:04.000Z
class DoesNotExistException(Exception): pass
16.333333
39
0.795918
4
49
9.75
1
0
0
0
0
0
0
0
0
0
0
0
0.142857
49
2
40
24.5
0.928571
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0.5
0
0
0.5
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
0
0
0
0
0
5
636e17cd678a9464158739e9de84b7e53d6688ed
403
py
Python
vvlogin/templatetags/vvlogin_tags.py
synw/django-vvlogin
f261bb73d39ef5a742c9e6d30cfea0c9fa8ba6f8
[ "MIT" ]
2
2017-04-22T11:13:57.000Z
2021-07-05T17:52:59.000Z
vvlogin/templatetags/vvlogin_tags.py
synw/django-vvlogin
f261bb73d39ef5a742c9e6d30cfea0c9fa8ba6f8
[ "MIT" ]
null
null
null
vvlogin/templatetags/vvlogin_tags.py
synw/django-vvlogin
f261bb73d39ef5a742c9e6d30cfea0c9fa8ba6f8
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- from django import template from vvlogin.conf import REGISTRATION_URL, LOGIN_REDIRECT_URL, LOGOUT_REDIRECT_URL register = template.Library() @register.simple_tag def registration_url(): return REGISTRATION_URL @register.simple_tag def login_redirect_url(): return LOGIN_REDIRECT_URL @register.simple_tag def logout_redirect_url(): return LOGOUT_REDIRECT_URL
17.521739
82
0.789082
53
403
5.660377
0.377358
0.22
0.16
0.2
0.153333
0
0
0
0
0
0
0.002865
0.133995
403
22
83
18.318182
0.856734
0.052109
0
0.25
0
0
0
0
0
0
0
0
0
1
0.25
false
0
0.166667
0.25
0.666667
0
0
0
0
null
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
5
63728c8980ad6e25bca988d0e0e323d2a5cceea7
529
py
Python
Grade 7/Lesson 3/dot.py
arm-university/ASP_International-Intro-to-Computing
2aae65503187c839cd1f39629c51ca4cf2f1b551
[ "RSA-MD" ]
null
null
null
Grade 7/Lesson 3/dot.py
arm-university/ASP_International-Intro-to-Computing
2aae65503187c839cd1f39629c51ca4cf2f1b551
[ "RSA-MD" ]
null
null
null
Grade 7/Lesson 3/dot.py
arm-university/ASP_International-Intro-to-Computing
2aae65503187c839cd1f39629c51ca4cf2f1b551
[ "RSA-MD" ]
1
2022-01-25T19:35:15.000Z
2022-01-25T19:35:15.000Z
# Add your Python code here. E.g. from microbit import * from time import sleep dot1 = Image("00000:" "00000:" "00000:" "00000:" "50000") dot2 = Image("00000:" "00000:" "00000:" "00000:" "05000") dot3 = Image("00000:" "00000:" "00000:" "00000:" "00500") display.show(dot1) sleep(0.5) display.show(dot2) sleep(0.5) display.show(dot3) sleep(0.5)
18.892857
34
0.431002
53
529
4.301887
0.45283
0.394737
0.394737
0.263158
0.486842
0
0
0
0
0
0
0.288079
0.429112
529
28
35
18.892857
0.466887
0.058601
0
0.521739
0
0
0.17505
0
0
0
0
0
0
1
0
false
0
0.086957
0
0.086957
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
1
0
0
0
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
63955b5328608a167e1e656dd1d099cc5089cada
9,120
py
Python
tests/test_api.py
phracek/container-ci-suite
6a1ec6228cc2c0344d32386e26474e6596d4ea4d
[ "MIT" ]
null
null
null
tests/test_api.py
phracek/container-ci-suite
6a1ec6228cc2c0344d32386e26474e6596d4ea4d
[ "MIT" ]
null
null
null
tests/test_api.py
phracek/container-ci-suite
6a1ec6228cc2c0344d32386e26474e6596d4ea4d
[ "MIT" ]
null
null
null
#!/bin/env python3 # MIT License # # Copyright (c) 2018-2019 Red Hat, Inc. # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. import pytest import flexmock from tempfile import mkdtemp from pathlib import Path from container_ci_suite.api import ContainerCISuite from container_ci_suite.container import DockerCLIWrapper from tests.conftest import s2i_build_as_df_fedora_test_app from tests.spellbook import DATA_DIR class TestContainerCISuiteAPI(object): @pytest.mark.parametrize( "app_path,s2i_args,src_image,dest_image,df", [ ( f"file://{DATA_DIR}/test-app", "--pull-policy=never -e NODE_ENV=development", "f32/nodejs:12", "f32/nodejs:12-testapp", s2i_build_as_df_fedora_test_app(), ) ], ) def test_s2i_build_from_df(self, app_path, s2i_args, src_image, dest_image, df): ccs = ContainerCISuite(image_name=src_image) flexmock(DockerCLIWrapper).should_receive("docker_image_exists").with_args( src_image ).and_return(True) flexmock(DockerCLIWrapper).should_receive("docker_inspect").and_return(1001) flexmock(DockerCLIWrapper).should_receive("docker_get_user_id").and_return(1001) tmp_dir = Path(mkdtemp()) generated_df = ccs.s2i_create_df( tmp_dir=tmp_dir, app_path=app_path, s2i_args=s2i_args, src_image=src_image, dst_image=dest_image, ) assert generated_df == df def test_check_envs_set(self): run_envs = """MANPATH=/opt/rh/rh-ruby26/root/usr/local/share/man:/opt/rh/rh-ruby26/root/usr/share/man:/opt/rh/rh-nodejs14/root/usr/share/man: APP_ROOT=/opt/app-root NODEJS_SCL=rh-nodejs14 X_SCLS=rh-nodejs14 rh-ruby26 LD_LIBRARY_PATH=/opt/rh/rh-ruby26/root/usr/local/lib64:/opt/rh/rh-ruby26/root/usr/lib64:/opt/rh/rh-nodejs14/root/usr/lib64 PATH=/opt/rh/rh-ruby26/root/usr/local/bin:/opt/rh/rh-ruby26/root/usr/bin:/opt/rh/rh-nodejs14/root/usr/bin:/opt/app-root/src/bin:/opt/app-root/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin STI_SCRIPTS_URL=image:///usr/libexec/s2i PWD=/opt/app-root/src STI_SCRIPTS_PATH=/usr/libexec/s2i IMAGE_NAME=rhscl/ruby-26-rhel7 HOME=/opt/app-root/src RUBY_SCL=rh-ruby26 XDG_DATA_DIRS=/opt/rh/rh-ruby26/root/usr/local/share:/opt/rh/rh-ruby26/root/usr/share:/usr/local/share:/usr/share PKG_CONFIG_PATH=/opt/rh/rh-ruby26/root/usr/local/lib64/pkgconfig:/opt/rh/rh-ruby26/root/usr/lib64/pkgconfig RUBY_VERSION=2.6""" exec_envs = """PATH=/opt/rh/rh-ruby26/root/usr/local/bin:/opt/rh/rh-ruby26/root/usr/bin:/opt/rh/rh-nodejs14/root/usr/bin:/opt/app-root/src/bin:/opt/app-root/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin SUMMARY=Platform for building and running Ruby 2.6 applications STI_SCRIPTS_URL=image:///usr/libexec/s2i STI_SCRIPTS_PATH=/usr/libexec/s2i APP_ROOT=/opt/app-root HOME=/opt/app-root/src BASH_ENV=/opt/app-root/etc/scl_enable ENV=/opt/app-root/etc/scl_enable PROMPT_COMMAND=. /opt/app-root/etc/scl_enable NODEJS_SCL=rh-nodejs14 RUBY_SCL=rh-ruby26 IMAGE_NAME=rhscl/ruby-26-rhel7 LD_LIBRARY_PATH=/opt/rh/rh-ruby26/root/usr/local/lib64:/opt/rh/rh-ruby26/root/usr/lib64:/opt/rh/rh-nodejs14/root/usr/lib64 X_SCLS=rh-nodejs14 rh-ruby26 MANPATH=/opt/rh/rh-ruby26/root/usr/local/share/man:/opt/rh/rh-ruby26/root/usr/share/man:/opt/rh/rh-nodejs14/root/usr/share/man: XDG_DATA_DIRS=/opt/rh/rh-ruby26/root/usr/local/share:/opt/rh/rh-ruby26/root/usr/share:/usr/local/share:/usr/share PKG_CONFIG_PATH=/opt/rh/rh-ruby26/root/usr/local/lib64/pkgconfig:/opt/rh/rh-ruby26/root/usr/lib64/pkgconfig """ ccs = ContainerCISuite(image_name="f32/nodejs:12") ccs.test_check_envs_set(env_filter="^X_SCLS=|/opt/rh|/opt/app-root", check_envs=exec_envs, loop_envs=run_envs) def test_check_envs_set_home_not_in_docker_exec(self): run_envs = """MANPATH=/opt/rh/rh-ruby26/root/usr/local/share/man:/opt/rh/rh-ruby26/root/usr/share/man:/opt/rh/rh-nodejs14/root/usr/share/man: APP_ROOT=/opt/app-root X_SCLS=rh-nodejs14 rh-ruby26 LD_LIBRARY_PATH=/opt/rh/rh-ruby26/root/usr/local/lib64:/opt/rh/rh-ruby26/root/usr/lib64:/opt/rh/rh-nodejs14/root/usr/lib64 PATH=/opt/rh/rh-ruby26/root/usr/local/bin:/opt/rh/rh-ruby26/root/usr/bin:/opt/rh/rh-nodejs14/root/usr/bin:/opt/app-root/src/bin:/opt/app-root/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin STI_SCRIPTS_URL=image:///usr/libexec/s2i STI_SCRIPTS_PATH=/usr/libexec/s2i HOME=/opt/app-root/src XDG_DATA_DIRS=/opt/rh/rh-ruby26/root/usr/local/share:/opt/rh/rh-ruby26/root/usr/share:/usr/local/share:/usr/share PKG_CONFIG_PATH=/opt/rh/rh-ruby26/root/usr/local/lib64/pkgconfig:/opt/rh/rh-ruby26/root/usr/lib64/pkgconfig RUBY_VERSION=2.6""" exec_envs = """PATH=/opt/rh/rh-ruby26/root/usr/local/bin:/opt/rh/rh-ruby26/root/usr/bin:/opt/rh/rh-nodejs14/root/usr/bin:/opt/app-root/src/bin:/opt/app-root/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin STI_SCRIPTS_URL=image:///usr/libexec/s2i STI_SCRIPTS_PATH=/usr/libexec/s2i APP_ROOT=/opt/app-root BASH_ENV=/opt/app-root/etc/scl_enable ENV=/opt/app-root/etc/scl_enable PROMPT_COMMAND=. /opt/app-root/etc/scl_enable LD_LIBRARY_PATH=/opt/rh/rh-ruby26/root/usr/local/lib64:/opt/rh/rh-ruby26/root/usr/lib64:/opt/rh/rh-nodejs14/root/usr/lib64 X_SCLS=rh-nodejs14 rh-ruby26 MANPATH=/opt/rh/rh-ruby26/root/usr/local/share/man:/opt/rh/rh-ruby26/root/usr/share/man:/opt/rh/rh-nodejs14/root/usr/share/man: XDG_DATA_DIRS=/opt/rh/rh-ruby26/root/usr/local/share:/opt/rh/rh-ruby26/root/usr/share:/usr/local/share:/usr/share PKG_CONFIG_PATH=/opt/rh/rh-ruby26/root/usr/local/lib64/pkgconfig:/opt/rh/rh-ruby26/root/usr/lib64/pkgconfig """ ccs = ContainerCISuite(image_name="f32/nodejs:12") ret = ccs.test_check_envs_set( env_filter="^X_SCLS=|/opt/rh|/opt/app-root", check_envs=exec_envs, loop_envs=run_envs ) assert not ret def test_check_envs_set_not_in_run_envs_not_path(self): run_envs = """MANPATH=/opt/rh/rh-ruby26/root/usr/local/share/man:/opt/rh/rh-ruby26/root/usr/share/man:/opt/rh/rh-nodejs14/root/usr/share/man: APP_ROOT=/opt/app-root X_SCLS=rh-nodejs14 rh-ruby26 LD_LIBRARY_PATH=/opt/rh/rh-ruby26/root/usr/local/lib64:/opt/rh/rh-ruby26/root/usr/lib64:/opt/rh/rh-nodejs14/root/usr/lib64 PATH=/opt/rh/rh-ruby26/root/usr/local/bin:/opt/rh/rh-ruby26/root/usr/bin:/opt/rh/rh-nodejs14/root/usr/bin:/opt/app-root/src/bin:/opt/app-root/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin STI_SCRIPTS_URL=image:///usr/libexec/s2i STI_SCRIPTS_PATH=/usr/libexec/s2i HOME=/opt/app-root/src XDG_DATA_DIRS=/opt/rh/rh-ruby26/root/usr/local/share:/opt/rh/rh-ruby26/root/usr/share:/usr/local/share:/usr/share PKG_CONFIG_PATH=/opt/rh/rh-ruby26/root/usr/local/lib64/pkgconfig:/opt/rh/rh-ruby26/root/usr/lib64/pkgconfig RUBY_VERSION=2.6""" exec_envs = """PATH=/opt/rh/rh-ruby26/root/usr/local/bin:/opt/rh/rh-nodejs14/root/usr/bin:/opt/app-root/src/bin:/opt/app-root/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin STI_SCRIPTS_URL=image:///usr/libexec/s2i STI_SCRIPTS_PATH=/usr/libexec/s2i APP_ROOT=/opt/app-root BASH_ENV=/opt/app-root/etc/scl_enable ENV=/opt/app-root/etc/scl_enable PROMPT_COMMAND=. /opt/app-root/etc/scl_enable LD_LIBRARY_PATH=/opt/rh/rh-ruby26/root/usr/local/lib64:/opt/rh/rh-ruby26/root/usr/lib64:/opt/rh/rh-nodejs14/root/usr/lib64 X_SCLS=rh-nodejs14 rh-ruby26 HOME=/opt/app-root/src MANPATH=/opt/rh/rh-ruby26/root/usr/local/share/man:/opt/rh/rh-ruby26/root/usr/share/man:/opt/rh/rh-nodejs14/root/usr/share/man: XDG_DATA_DIRS=/opt/rh/rh-ruby26/root/usr/local/share:/opt/rh/rh-ruby26/root/usr/share:/usr/local/share:/usr/share PKG_CONFIG_PATH=/opt/rh/rh-ruby26/root/usr/local/lib64/pkgconfig:/opt/rh/rh-ruby26/root/usr/lib64/pkgconfig """ ccs = ContainerCISuite(image_name="f32/nodejs:12") ret = ccs.test_check_envs_set( env_filter="^X_SCLS=|/opt/rh|/opt/app-root", check_envs=exec_envs, loop_envs=run_envs ) assert not ret
53.333333
229
0.741447
1,600
9,120
4.0925
0.1375
0.061087
0.082315
0.117135
0.742211
0.706017
0.694258
0.686622
0.686622
0.676542
0
0.038668
0.101096
9,120
170
230
53.647059
0.760063
0.119298
0
0.666667
0
0.217391
0.734116
0.660717
0
0
0
0
0.021739
1
0.028986
false
0
0.057971
0
0.094203
0
0
0
0
null
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
1
1
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
89200de0b5c0bc87babd0eee0f07388f6c0b43d5
1,413
py
Python
lib/flows/tests.py
nahidupa/grr
100a9d85ef2abb234e12e3ac2623caffb4116be7
[ "Apache-2.0" ]
1
2016-02-13T15:40:20.000Z
2016-02-13T15:40:20.000Z
lib/flows/tests.py
nahidupa/grr
100a9d85ef2abb234e12e3ac2623caffb4116be7
[ "Apache-2.0" ]
3
2020-09-11T12:54:50.000Z
2020-09-11T12:55:01.000Z
lib/flows/tests.py
nahidupa/grr
100a9d85ef2abb234e12e3ac2623caffb4116be7
[ "Apache-2.0" ]
null
null
null
#!/usr/bin/env python """Loads up all flow tests.""" # pylint: disable=unused-import # These import populate the Flow test registry from grr.lib.flows.console import debugging_test # Cron tests. from grr.lib.flows.cron import compactors_test from grr.lib.flows.cron import filestore_stats_test from grr.lib.flows.cron import system_test from grr.lib.flows.general import administrative_test from grr.lib.flows.general import artifact_fallbacks_test from grr.lib.flows.general import audit_test from grr.lib.flows.general import checks_test from grr.lib.flows.general import collectors_test from grr.lib.flows.general import discovery_test from grr.lib.flows.general import endtoend_test from grr.lib.flows.general import export_test from grr.lib.flows.general import file_finder_test from grr.lib.flows.general import filesystem_test from grr.lib.flows.general import filetypes_test from grr.lib.flows.general import find_test from grr.lib.flows.general import fingerprint_test from grr.lib.flows.general import grep_test from grr.lib.flows.general import memory_test from grr.lib.flows.general import network_test from grr.lib.flows.general import processes_test from grr.lib.flows.general import registry_test from grr.lib.flows.general import timelines_test from grr.lib.flows.general import transfer_test from grr.lib.flows.general import webhistory_test from grr.lib.flows.general import windows_vsc_test
40.371429
57
0.840764
234
1,413
4.948718
0.230769
0.157168
0.224525
0.336788
0.67962
0.67962
0.658031
0
0
0
0
0
0.090587
1,413
34
58
41.558824
0.901167
0.093418
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0.038462
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
8940583c3ee56773c6cb38364d729d4de4246522
148
py
Python
students/groups_context.py
ovod88/studentsdb
ade2cff0eea7a13644a0f708133901457352ff5c
[ "MIT" ]
null
null
null
students/groups_context.py
ovod88/studentsdb
ade2cff0eea7a13644a0f708133901457352ff5c
[ "MIT" ]
null
null
null
students/groups_context.py
ovod88/studentsdb
ade2cff0eea7a13644a0f708133901457352ff5c
[ "MIT" ]
null
null
null
from .models.groups import Group from .utils import get_groups def groups_context_processor(request): return {'GROUPS_ALL' : get_groups(request)}
24.666667
44
0.804054
21
148
5.428571
0.619048
0.157895
0
0
0
0
0
0
0
0
0
0
0.108108
148
6
44
24.666667
0.863636
0
0
0
0
0
0.067114
0
0
0
0
0
0
1
0.25
false
0
0.5
0.25
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
1
1
0
0
0
5
894837e000fcacf851767951a474a713927ae7bc
155
py
Python
Exercicios do curso em video/pythonProject/pythonexercicios/ex006.py
HiCosta/Exercicios-de-Python
1dabbe8764f6742aba1d62a9ea14705534aaf6f7
[ "MIT" ]
null
null
null
Exercicios do curso em video/pythonProject/pythonexercicios/ex006.py
HiCosta/Exercicios-de-Python
1dabbe8764f6742aba1d62a9ea14705534aaf6f7
[ "MIT" ]
null
null
null
Exercicios do curso em video/pythonProject/pythonexercicios/ex006.py
HiCosta/Exercicios-de-Python
1dabbe8764f6742aba1d62a9ea14705534aaf6f7
[ "MIT" ]
null
null
null
num = int(input('Digite um numero: ')) print('O dobro de', num, 'é {}'.format(num*2), 'seu triplo é {}'.format(num*3), 'e sua raiz é {}'.format(num**0.5))
51.666667
115
0.593548
29
155
3.172414
0.724138
0.228261
0.326087
0
0
0
0
0
0
0
0
0.029851
0.135484
155
2
116
77.5
0.656716
0
0
0
0
0
0.4
0
0
0
0
0
0
1
0
false
0
0
0
0
0.5
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
5
89709e56ef0d260a28aaa3c1cdfbd6a255ff909e
5,799
py
Python
backend/apps/workorder/views/workorder.py
bopopescu/Journey
654eb66e0e2df59e916eff4c75b68b183f9b58b5
[ "MIT" ]
41
2019-01-02T09:36:54.000Z
2022-02-20T13:13:05.000Z
backend/apps/workorder/views/workorder.py
bopopescu/Journey
654eb66e0e2df59e916eff4c75b68b183f9b58b5
[ "MIT" ]
15
2019-09-30T05:40:20.000Z
2022-02-17T19:28:41.000Z
backend/apps/workorder/views/workorder.py
bopopescu/Journey
654eb66e0e2df59e916eff4c75b68b183f9b58b5
[ "MIT" ]
23
2019-02-18T10:50:10.000Z
2022-01-06T07:53:18.000Z
# -*- coding: UTF-8 -*- from django.db.models import Q from rest_framework.response import Response from rest_framework.views import APIView from workorder.models.sqlorder import * from workorder.models.autoorder import * from workorder.serializers.workorder import * from user.permissions import CustomerPremission from rest_framework.permissions import IsAuthenticated, IsAdminUser, IsAuthenticatedOrReadOnly, AllowAny import time,datetime import pytz import logging # 生成一个以当前文件名为名字的logger实例 logger = logging.getLogger('default') class AllWorkOrderViewSet(APIView): # 权限相关标识 permission_classes = [CustomerPremission,IsAuthenticated] module_perms = ['workorder:allworkorder'] def get(self,request,format=None): userinfo = self.request.user username = userinfo.username results = [] # 获取最近7天内的工单 laste_time = (datetime.date.today() - datetime.timedelta(days=7)).strftime('%Y-%m-%d %X') laste_sqlorders = SqlOrder.objects.filter(Q(create_time__gt=laste_time)) laste_autoorders = AutoOrder.objects.filter(Q(create_time__gt=laste_time)) # //获取sql工单列表 for sqlorder in laste_sqlorders: sqlorder_serializer = AllWorkOrderSerializer(sqlorder) results.append(sqlorder_serializer.data) # 获取自助工单列表 for autoorder in laste_autoorders: autoorder_serializer = AllWorkOrderSerializer(autoorder) results.append(autoorder_serializer.data) sort_results = sorted(results, key=lambda k: k['create_time'],reverse=True) re = { 'results': '',} re['results'] = sort_results return Response(re) def post(self,request,format=None): userinfo = self.request.user username = userinfo.username results = [] # 获取7天内的初始时间 laste_time = (datetime.date.today() - datetime.timedelta(days=7)).strftime('%Y-%m-%d %X') searchtype = request.data['searchtype'] timerange = request.data['timerange'] if (searchtype == 'my'): if (timerange): date_from = timerange[0] date_to = timerange[1] searchsqlorders = SqlOrder.objects.filter(Q(creator=self.request.user.username),Q(create_time__range=(date_from,date_to))).order_by('-id') searchautoorders = AutoOrder.objects.filter(Q(creator=self.request.user.username),Q(create_time__range=(date_from,date_to))).order_by('-id') else: searchsqlorders = SqlOrder.objects.filter(Q(creator=self.request.user.username),Q(create_time__gt=laste_time)).order_by('-id') searchautoorders = AutoOrder.objects.filter(Q(creator=self.request.user.username),Q(create_time__gt=laste_time)).order_by('-id') elif (searchtype == 'time'): if (timerange): date_from = timerange[0] date_to = timerange[1] searchsqlorders = SqlOrder.objects.filter(Q(create_time__range=(date_from,date_to))).order_by('-id') searchautoorders = AutoOrder.objects.filter(Q(create_time__range=(date_from,date_to))).order_by('-id') # 获取待办的工单 elif (searchtype == 'todo'): todo_order_status = [-1,1,2] user_approvalgroup_list = [] for i in userinfo.approver_user.all(): user_approvalgroup_list.append(i.id) if (timerange): date_from = timerange[0] date_to = timerange[1] searchsqlorders = SqlOrder.objects.filter(~Q(status__in=todo_order_status),Q(approver_group_id__in=user_approvalgroup_list),Q(create_time__range=(date_from,date_to))).order_by('-id') searchautoorders = AutoOrder.objects.filter(~Q(status__in=todo_order_status),Q(approver_group_id__in=user_approvalgroup_list),Q(create_time__range=(date_from,date_to))).order_by('-id') else: searchsqlorders = SqlOrder.objects.filter(~Q(status__in=todo_order_status),Q(approver_group_id__in=user_approvalgroup_list),Q(create_time__gt=laste_time)).order_by('-id') searchautoorders = AutoOrder.objects.filter(~Q(status__in=todo_order_status),Q(approver_group_id__in=user_approvalgroup_list),Q(create_time__gt=laste_time)).order_by('-id') elif (searchtype == 'search'): searchcontent = request.data['searchcontent'] if (timerange): date_from = timerange[0] date_to = timerange[1] searchsqlorders = SqlOrder.objects.filter(Q(title__contains=searchcontent)|Q(creator__contains=searchcontent),Q(create_time__range=(date_from,date_to))).order_by('-id') searchautoorders = AutoOrder.objects.filter(Q(title__contains=searchcontent)|Q(creator__contains=searchcontent),Q(create_time__range=(date_from,date_to))).order_by('-id') else: searchsqlorders = SqlOrder.objects.filter(Q(title__contains=searchcontent)|Q(creator__contains=searchcontent),Q(create_time__gt=laste_time)).order_by('-id') searchautoorders = AutoOrder.objects.filter(Q(title__contains=searchcontent)|Q(creator__contains=searchcontent),Q(create_time__gt=laste_time)).order_by('-id') # //获取sql工单列表 for sqlorder in searchsqlorders: sqlorder_serializer = AllWorkOrderSerializer(sqlorder) results.append(sqlorder_serializer.data) # 获取自助工单列表 for autoorder in searchautoorders: autoorder_serializer = AllWorkOrderSerializer(autoorder) results.append(autoorder_serializer.data) sort_results = sorted(results, key=lambda k: k['create_time'],reverse=True) re = { 'results': '',} re['results'] = sort_results return Response(re)
55.228571
200
0.677875
654
5,799
5.732416
0.178899
0.048013
0.059749
0.046946
0.740731
0.740731
0.736196
0.736196
0.717525
0.717525
0
0.003487
0.208829
5,799
105
201
55.228571
0.813644
0.021211
0
0.448276
0
0
0.03371
0.003883
0
0
0
0
0
1
0.022989
false
0
0.126437
0
0.206897
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
8979c04dff2c16dfdeb45d5fcfa7969314bc19fa
25
py
Python
python/testData/inspections/PyArgumentListInspection/InitializingImportedTypingNamedTupleInheritor/a.py
jnthn/intellij-community
8fa7c8a3ace62400c838e0d5926a7be106aa8557
[ "Apache-2.0" ]
2
2018-12-29T09:53:39.000Z
2018-12-29T09:53:42.000Z
python/testData/inspections/PyArgumentListInspection/InitializingImportedTypingNamedTupleInheritor/a.py
Cyril-lamirand/intellij-community
60ab6c61b82fc761dd68363eca7d9d69663cfa39
[ "Apache-2.0" ]
173
2018-07-05T13:59:39.000Z
2018-08-09T01:12:03.000Z
python/testData/inspections/PyArgumentListInspection/InitializingImportedTypingNamedTupleInheritor/a.py
Cyril-lamirand/intellij-community
60ab6c61b82fc761dd68363eca7d9d69663cfa39
[ "Apache-2.0" ]
2
2020-03-15T08:57:37.000Z
2020-04-07T04:48:14.000Z
import b b.MyTup(5, "5")
8.333333
15
0.6
6
25
2.5
0.666667
0
0
0
0
0
0
0
0
0
0
0.095238
0.16
25
3
15
8.333333
0.619048
0
0
0
0
0
0.038462
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
89894b4c0e7aa909a8182b5cb7b00664994882dd
8,375
py
Python
test/test_statistics.py
dataware-tools/pydtk
1da61fb8ca90de6c39a371a9b2b65f4473932991
[ "Apache-2.0" ]
11
2020-10-09T01:29:18.000Z
2022-01-21T13:21:40.000Z
test/test_statistics.py
dataware-tools/pydtk
1da61fb8ca90de6c39a371a9b2b65f4473932991
[ "Apache-2.0" ]
64
2020-10-20T04:55:22.000Z
2022-01-24T15:52:32.000Z
test/test_statistics.py
dataware-tools/pydtk
1da61fb8ca90de6c39a371a9b2b65f4473932991
[ "Apache-2.0" ]
1
2021-07-30T04:52:38.000Z
2021-07-30T04:52:38.000Z
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # Copyright Toolkit Authors """Test base statistic calculation script with Pytest.""" import pytest @pytest.mark.extra @pytest.mark.ros def test_base_statistic_calculation(): """Run the base statistic calculation test.""" import pandas as pd from pydtk.io import BaseFileReader from pydtk.statistics import BaseStatisticCalculation path = 'test/records/rosbag_model_test/data/records.bag' reader = BaseFileReader() timestamps, data, columns = reader.read(path=path, contents='/vehicle/acceleration') target_span = 0.3 calculator = BaseStatisticCalculation(target_span) stat_df = calculator.statistic_tables(timestamps, data, columns) assert isinstance(stat_df, pd.core.frame.DataFrame) def _test_v2_db_statistic(): """Run the v2 DB statistics test.""" import pandas as pd from pydtk.io import BaseFileReader from pydtk.statistics import BaseStatisticCalculation path = 'test/records/rosbag_model_test/data/records.bag' reader = BaseFileReader() timestamps, data, columns = reader.read(path=path, contents='/vehicle/acceleration') target_span = 0.3 calculator = BaseStatisticCalculation(target_span) stat_df = calculator.statistic_tables(timestamps, data, columns) assert isinstance(stat_df, pd.core.frame.DataFrame) from pydtk.db.v2 import TimeSeriesDBHandler from requests.exceptions import ConnectionError try: db_handler = TimeSeriesDBHandler( df_name='test_statistics_span_0.3' ) db_handler.df = stat_df db_handler.save() except ConnectionError as e: print(str(e)) def _test_v2_db_statistic_search(): """Run the v2 DB statistics search test.""" from pydtk.db import V2TimeSeriesDBHandler from requests.exceptions import ConnectionError from pydtk.db import V2TimeSeriesDBSearchEngine span = 60 content = '/vehicle/acceleration' try: db_handler = V2TimeSeriesDBHandler(db_name='statistics', read_on_init=False) engine = V2TimeSeriesDBSearchEngine(db_handler=db_handler, content=content, span=span) engine.add_condition('"/vehicle/acceleration/accel_linear_x/min" > 0') candidates = engine.search() print(candidates) except ConnectionError as e: print(str(e)) def _test_v3_db_statistic(): """Run the v3 DB statistics test.""" from cassandra.cluster import NoHostAvailable import pandas as pd from pydtk.io import BaseFileReader from pydtk.statistics import BaseStatisticCalculation path = 'test/records/rosbag_model_test/data/records.bag' reader = BaseFileReader() timestamps, data, columns = reader.read(path=path, contents='/vehicle/acceleration') target_span = 0.3 calculator = BaseStatisticCalculation(target_span) stat_df = calculator.statistic_tables(timestamps, data, columns) assert isinstance(stat_df, pd.core.frame.DataFrame) from pydtk.db import V3TimeSeriesCassandraDBHandler try: db_handler = V3TimeSeriesCassandraDBHandler( df_name='test_statistics_span_0' ) db_handler.df = stat_df db_handler.save() except NoHostAvailable as e: print(str(e)) def _test_v3_db_statistic_search(): """Run the v3 DB statistics search test.""" import time from cassandra.cluster import NoHostAvailable from pydtk.db import V3TimeSeriesCassandraDBHandler from pydtk.db import V3TimeSeriesCassandraDBSearchEngine try: db_handler = V3TimeSeriesCassandraDBHandler(read_on_init=False) engine = V3TimeSeriesCassandraDBSearchEngine(db_handler=db_handler) engine.add_condition('"/vehicle/acceleration/accel_linear_x/min" > 0') t1 = time.time() candidates = engine.search() t2 = time.time() print(candidates) print('Search execution time: {0:.03f} sec.'.format(t2 - t1)) except NoHostAvailable as e: print(str(e)) @pytest.mark.extra @pytest.mark.ros def test_v3_db_statistic_sqlite(): """Run the v3 DB statistics test.""" import pandas as pd from pydtk.io import BaseFileReader from pydtk.statistics import BaseStatisticCalculation from pydtk.db import V3DBHandler as DBHandler path = 'test/records/rosbag_model_test/data/records.bag' reader = BaseFileReader() timestamps, data, columns = reader.read(path=path, contents='/vehicle/acceleration') # timestamps, data, columns = reader.read(path=path, contents='/vehicle/analog/speed_pulse') target_span = 0.3 calculator = BaseStatisticCalculation(target_span) stat_df = calculator.statistic_tables(timestamps, data, columns) stat_df['record_id'] = 'B05_17000000010000000829' assert isinstance(stat_df, pd.core.frame.DataFrame) db_handler = DBHandler( db_class='time_series', db_engine='sqlite', db_host='test/test_statistics.db', db_username='', db_password='', db_name='', read_on_init=False, ) db_handler.df = stat_df db_handler.save(remove_duplicates=True) @pytest.mark.extra @pytest.mark.ros def test_v3_db_statistic_sqlite_2(): """Run the v3 DB statistics test.""" import pandas as pd from pydtk.io import BaseFileReader from pydtk.statistics import BaseStatisticCalculation from pydtk.db import V3DBHandler as DBHandler path = 'test/records/rosbag_model_test/data/records.bag' reader = BaseFileReader() timestamps, data, columns = reader.read(path=path, contents='/vehicle/acceleration') # timestamps, data, columns = reader.read(path=path, contents='/vehicle/analog/speed_pulse') target_span = 0.3 calculator = BaseStatisticCalculation(target_span) stat_df = calculator.statistic_tables(timestamps, data, columns) stat_df['record_id'] = 'B05_17000000010000000829' assert isinstance(stat_df, pd.core.frame.DataFrame) db_handler = DBHandler( database_id='test', span=0.3, db_class='statistics', db_engine='sqlite', db_host='test/test_statistics.db', db_username='', db_password='', db_name='', read_on_init=False, ) db_handler.df = stat_df db_handler.save(remove_duplicates=True) @pytest.mark.extra @pytest.mark.ros def test_v3_db_statistic_search_sqlite(): """Run the v3 DB statistics search test.""" import time from pydtk.db import V3DBHandler as DBHandler from pydtk.db import V3DBSearchEngine as DBSearchEngine db_handler = DBHandler( database_id='test', span=0.3, db_class='statistics', db_engine='sqlite', db_host='test/test_statistics.db', db_username='', db_password='', db_name='', read_on_init=False, ) engine = DBSearchEngine(db_handler=db_handler) engine.add_condition('"/vehicle/acceleration/accel_linear_x/min" < 0') t1 = time.time() candidates = engine.search() t2 = time.time() print(candidates) print('Search execution time: {0:.03f} sec.'.format(t2 - t1)) @pytest.mark.extra @pytest.mark.ros def test_base_statistic_calculation_with_sync_timestamp(): """Run the base statistic calculation test.""" from pydtk.io import BaseFileReader from pydtk.statistics import BaseStatisticCalculation path = 'test/records/rosbag_model_test/data/records.bag' reader = BaseFileReader() timestamps, data, columns = reader.read(path=path, contents='/vehicle/acceleration') target_span = 0.3 calculator = BaseStatisticCalculation(target_span, sync_timestamps=False) stat_df = calculator.statistic_tables(timestamps, data, columns) result = stat_df.to_dict(orient='record') calculator_sync = BaseStatisticCalculation(target_span, sync_timestamps=True) stat_df_sync = calculator_sync.statistic_tables(timestamps, data, columns) result_sync = stat_df_sync.to_dict(orient='record') for without_sync, with_sync in zip(result, result_sync): assert without_sync['timestamp'] // target_span * target_span == with_sync['timestamp'] if __name__ == '__main__': # test_v3_db_statistic_sqlite() # test_v3_db_statistic_sqlite_2() # test_v3_db_statistic_search_sqlite() test_base_statistic_calculation_with_sync_timestamp()
32.714844
96
0.713075
1,005
8,375
5.725373
0.135323
0.034411
0.054745
0.02659
0.842023
0.764338
0.719499
0.6943
0.685262
0.663191
0
0.017394
0.18997
8,375
255
97
32.843137
0.830778
0.086209
0
0.770492
0
0
0.121791
0.094536
0
0
0
0
0.032787
1
0.04918
false
0.016393
0.185792
0
0.234973
0.04918
0
0
0
null
0
0
0
1
1
1
0
0
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
8999cd4f579c539cbfca7c1b60bb7c9cacf27566
77
py
Python
asyncrai/arai/__init__.py
Z-Map/async-rai
9aa59f1f87959bf249088485e253c41c95c4e578
[ "MIT" ]
null
null
null
asyncrai/arai/__init__.py
Z-Map/async-rai
9aa59f1f87959bf249088485e253c41c95c4e578
[ "MIT" ]
null
null
null
asyncrai/arai/__init__.py
Z-Map/async-rai
9aa59f1f87959bf249088485e253c41c95c4e578
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- """ Async RAI module """ from . import errors, arai
12.833333
26
0.571429
10
77
4.4
1
0
0
0
0
0
0
0
0
0
0
0.016393
0.207792
77
5
27
15.4
0.704918
0.506494
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
89a744fb04f740285b9a1b36e33671802f2a3f1e
2,403
py
Python
ibmdbpy/tests/test_frame_private.py
marc-mclean1/ibmdbpy
46d885e793da52c58424885d74ab1a6668c391b3
[ "BSD-3-Clause" ]
21
2016-02-18T13:10:48.000Z
2020-11-09T00:09:07.000Z
ibmdbpy/tests/test_frame_private.py
marc-mclean1/ibmdbpy
46d885e793da52c58424885d74ab1a6668c391b3
[ "BSD-3-Clause" ]
57
2016-02-29T15:14:05.000Z
2021-07-23T07:19:41.000Z
ibmdbpy/tests/test_frame_private.py
marc-mclean1/ibmdbpy
46d885e793da52c58424885d74ab1a6668c391b3
[ "BSD-3-Clause" ]
17
2016-01-04T07:11:37.000Z
2021-11-05T12:45:41.000Z
#!/usr/bin/env python # -*- coding: utf-8 -*- #----------------------------------------------------------------------------- # Copyright (c) 2015, IBM Corp. # All rights reserved. # # Distributed under the terms of the BSD Simplified License. # # The full license is in the LICENSE file, distributed with this software. #----------------------------------------------------------------------------- """ Test module for private methods of IdaDataFrame """ from __future__ import unicode_literals from __future__ import print_function from __future__ import division from __future__ import absolute_import from future import standard_library standard_library.install_aliases() import pandas class Test_IdaDataFrame_PrivateMethods(object): def test_idadf_clone(self, idadf): pass def test_idadf_clone_as_series(self, idadf): pass def test_idadf_get_type(self, idadf): pass def test_idadf_get_columns(self, idadf): pass def test_idadf_get_all_columns_in_table(self, idadf): pass def test_idadf_get_index(self, idadf): pass def test_idadf_get_shape(self, idadf): pass def test_idadf_get_columns_dtypes(self, idadf): pass def test_idadf_table_def(self, idadf): to_assert = idadf._table_def() assert isinstance(to_assert, pandas.core.frame.DataFrame) assert to_assert.shape[0] == len(idadf.columns) assert to_assert.shape[1] == 3 def test_idadf_get_numerical_columns(self, idadf): pass def test_idadf_reset_attributes(self, idadf): pass def test_combine_check(self, idadf): pass def test_idadf_prepare_and_execute(self, idadf): pass def test_idadf_autocommit(self, idadf): pass def test_idadf_check_connection(self, idadf): pass def test_idadf_numeric_stats(self, idadf): pass def test_idadf_get_percentiles(self, idadf): pass def test_idadf_categorical_stats(self, idadf): pass def test_idadf_get_number_of_nas(self, idadf): pass def test_idadf_count_level(self, idadf): pass def test_idadf_counbt_level_groupby(self, idadf): pass def test_idadf_factors_count(self, idadf): pass def test_idadf_factors_sum(self, idadf): pass def test_idadf_factors_avg(self, idadf): pass
24.272727
78
0.650437
303
2,403
4.79868
0.330033
0.115543
0.189821
0.242091
0.427098
0.413343
0.258597
0.093535
0
0
0
0.00426
0.218477
2,403
99
79
24.272727
0.769968
0.177695
0
0.389831
0
0
0
0
0
0
0
0
0.067797
1
0.40678
false
0.389831
0.101695
0
0.525424
0.016949
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
1
0
0
5
89abe9e14b2d472a6c222b7f4e4e9a4157836ed8
39
py
Python
chapter4/4_7_2.py
kungbob/Machine_Learning_In_Action
007db9d2a6c957d314ecd0b4322cad5b04da7113
[ "MIT" ]
null
null
null
chapter4/4_7_2.py
kungbob/Machine_Learning_In_Action
007db9d2a6c957d314ecd0b4322cad5b04da7113
[ "MIT" ]
1
2018-01-05T15:48:33.000Z
2018-01-05T15:54:22.000Z
chapter4/4_7_2.py
kungbob/Machine_Learning_In_Action
007db9d2a6c957d314ecd0b4322cad5b04da7113
[ "MIT" ]
2
2019-02-12T01:35:20.000Z
2019-03-24T03:00:51.000Z
import bayes bayes.getTopWords(ny,sf)
9.75
24
0.794872
6
39
5.166667
0.833333
0
0
0
0
0
0
0
0
0
0
0
0.102564
39
3
25
13
0.885714
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
98236e394645af4e13012ee612636e97073553a1
205
py
Python
Validation/RecoEgamma/python/egammaPostValidationMiniAOD_cff.py
ckamtsikis/cmssw
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
[ "Apache-2.0" ]
852
2015-01-11T21:03:51.000Z
2022-03-25T21:14:00.000Z
Validation/RecoEgamma/python/egammaPostValidationMiniAOD_cff.py
ckamtsikis/cmssw
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
[ "Apache-2.0" ]
30,371
2015-01-02T00:14:40.000Z
2022-03-31T23:26:05.000Z
Validation/RecoEgamma/python/egammaPostValidationMiniAOD_cff.py
ckamtsikis/cmssw
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
[ "Apache-2.0" ]
3,240
2015-01-02T05:53:18.000Z
2022-03-31T17:24:21.000Z
import FWCore.ParameterSet.Config as cms from Validation.RecoEgamma.electronPostValidationSequenceMiniAOD_cff import * egammaPostValidationMiniAOD = cms.Sequence( electronPostValidationSequenceMiniAOD )
34.166667
83
0.882927
16
205
11.25
0.8125
0
0
0
0
0
0
0
0
0
0
0
0.073171
205
5
84
41
0.947368
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.666667
0
0.666667
0
1
0
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
5
9879cf6b9b29119169ce5fd2e733fdf88c94c5a9
53
py
Python
resolvere/__init__.py
dukhniav/reolvere
34a8b3d8a63da9615f47e9c608535cca18ace6ff
[ "MIT" ]
null
null
null
resolvere/__init__.py
dukhniav/reolvere
34a8b3d8a63da9615f47e9c608535cca18ace6ff
[ "MIT" ]
null
null
null
resolvere/__init__.py
dukhniav/reolvere
34a8b3d8a63da9615f47e9c608535cca18ace6ff
[ "MIT" ]
null
null
null
import buy import sell from .config import coin_list
13.25
29
0.830189
9
53
4.777778
0.777778
0
0
0
0
0
0
0
0
0
0
0
0.150943
53
3
30
17.666667
0.955556
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
98a58ef8fabfff2f39936546a04e564d0c9be221
357
py
Python
src/109_swap_even_and_odd_bits.py
redfast00/daily-algorithm-challenge
3507164d5ec58abe68a6e820120625e100dee96c
[ "MIT" ]
null
null
null
src/109_swap_even_and_odd_bits.py
redfast00/daily-algorithm-challenge
3507164d5ec58abe68a6e820120625e100dee96c
[ "MIT" ]
null
null
null
src/109_swap_even_and_odd_bits.py
redfast00/daily-algorithm-challenge
3507164d5ec58abe68a6e820120625e100dee96c
[ "MIT" ]
null
null
null
def swap_even_and_odd_bits(n): '''Swaps the even and odd bits of an unsigned, 8-bits number. >>> swap_even_and_odd_bits(0b10101010) == 0b01010101 True >>> swap_even_and_odd_bits(0b11100010) == 0b11010001 True ''' assert 0 <= n <= 0xFF, 'Not an 8-bit unsigned number' return ((n & 0b10101010) >> 1) | ((n & 0b01010101) << 1)
32.454545
65
0.638655
52
357
4.153846
0.5
0.12963
0.185185
0.259259
0.25
0
0
0
0
0
0
0.216607
0.22409
357
10
66
35.7
0.563177
0.490196
0
0
0
0
0.180645
0
0
0
0.025806
0
0.333333
1
0.333333
false
0
0
0
0.666667
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
5
7f7cf03f04f2ce7e8bd8da2035501b8faa3af38c
59
py
Python
beginning-game-development/Chapter 12/12-3.py
CrtomirJuren/pygame-projects
f710f36050bfe3ece866bbda7d570caa1e037d7a
[ "MIT" ]
43
2015-09-20T02:05:48.000Z
2022-03-01T22:00:43.000Z
beginning-game-development/Chapter 12/12-3.py
CrtomirJuren/pygame-projects
f710f36050bfe3ece866bbda7d570caa1e037d7a
[ "MIT" ]
null
null
null
beginning-game-development/Chapter 12/12-3.py
CrtomirJuren/pygame-projects
f710f36050bfe3ece866bbda7d570caa1e037d7a
[ "MIT" ]
40
2015-05-19T06:51:13.000Z
2022-03-27T18:11:16.000Z
def additive_blend(src, dst): return src * src.a + dst
19.666667
29
0.661017
10
59
3.8
0.7
0
0
0
0
0
0
0
0
0
0
0
0.220339
59
2
30
29.5
0.826087
0
0
0
0
0
0
0
0
0
0
0
0
1
0.5
false
0
0
0.5
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
5
7f834f710e04553b1f9e56644c1eda2eb184a9e1
151
py
Python
adelie/core/__init__.py
JRBCH/adelie
02a12fcb424866635d248c356cc236f4ae5d12af
[ "MIT" ]
null
null
null
adelie/core/__init__.py
JRBCH/adelie
02a12fcb424866635d248c356cc236f4ae5d12af
[ "MIT" ]
null
null
null
adelie/core/__init__.py
JRBCH/adelie
02a12fcb424866635d248c356cc236f4ae5d12af
[ "MIT" ]
null
null
null
from .module import AdelieModule as Module from .neurons import Neurons from .connection import AdelieConnection as Connection from .input import Input
37.75
54
0.847682
20
151
6.4
0.45
0
0
0
0
0
0
0
0
0
0
0
0.125828
151
4
55
37.75
0.969697
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
7f855138f28cf7e810d1b05210a9b14dbfa750f5
289
py
Python
rex/exploit/cgc/__init__.py
tiedaoxiaotubie/rex
049bbce3ab2717cbb4d2f0fc10fe8c0433b39c1d
[ "BSD-2-Clause" ]
471
2016-08-20T19:25:33.000Z
2019-12-06T04:46:55.000Z
rex/exploit/cgc/__init__.py
tiedaoxiaotubie/rex
049bbce3ab2717cbb4d2f0fc10fe8c0433b39c1d
[ "BSD-2-Clause" ]
42
2016-08-21T04:26:43.000Z
2019-12-18T02:02:58.000Z
rex/exploit/cgc/__init__.py
tiedaoxiaotubie/rex
049bbce3ab2717cbb4d2f0fc10fe8c0433b39c1d
[ "BSD-2-Clause" ]
101
2016-08-21T02:21:26.000Z
2019-09-12T07:22:24.000Z
from .cgc_type1_exploit import CGCType1Exploit from .cgc_type2_exploit import CGCType2Exploit from .cgc_exploit import CGCExploit from .type1 import CGCType1RopExploit, CGCType1ShellcodeExploit, CGCType1CircumstantialExploit from .type2 import CGCType2RopExploit, CGCType2ShellcodeExploit
48.166667
94
0.892734
28
289
9.035714
0.5
0.083004
0
0
0
0
0
0
0
0
0
0.041353
0.079585
289
5
95
57.8
0.909774
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
f6c7b454eb1e0172b5b5ab8241c30bd60292ac58
13,476
py
Python
tests/test_ascii_art.py
JustinHBird/ascii_art
b34684a706863d38e3e1cbb6b45ac868b8d9c528
[ "MIT" ]
null
null
null
tests/test_ascii_art.py
JustinHBird/ascii_art
b34684a706863d38e3e1cbb6b45ac868b8d9c528
[ "MIT" ]
4
2021-06-08T20:33:30.000Z
2022-03-12T00:03:56.000Z
tests/test_ascii_art.py
JustinHBird/ascii_art
b34684a706863d38e3e1cbb6b45ac868b8d9c528
[ "MIT" ]
null
null
null
import os import pytest from PIL import Image import ascii_art from ascii_art import Brightness, AsciiArt @pytest.fixture(scope='function') def test_image_path(): """Test image pixel array for testing basic image functions.""" # Setup: Create PIL Image object from pixel array test_pixel_matrix = [ [(254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254)], [(254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254)], [(254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254)], [(254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254)], [(254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254)], [(254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254)], [(254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254)], [(254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254)], [(254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254)], [(254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254)], [(254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254)], [(254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254)], [(254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254)], [(254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254)], [(254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254)], [(254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254)], [(254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254)], [(254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254)], [(254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254)], [(254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254)], [(254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254)], [(254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254)], [(254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254)], [(254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254)], [(254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254)], [(254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254)], [(254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254)], [(254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254)], [(254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254)], [(254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254)], [(254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254)], [(254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254)], [(254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254)], [(254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254)], [(254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254)], [(254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (254, 0, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 254, 0), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254), (0, 0, 254)] ] im_width = len(test_pixel_matrix[0]) im_height = len(test_pixel_matrix) im = Image.new('RGB', (im_width , im_height)) px = im.load() for row in range(im_height): for col in range(im_width): px[col, row] = test_pixel_matrix[row][col] # Save test image in test file while testing test_image_path = os.path.join(os.path.dirname(os.path.join(os.path.abspath(__file__))), 'test_img.png') print(test_image_path) im.save(test_image_path) yield test_image_path # Teardown : Delete image file os.remove(test_image_path) def test_construct_AsciiArt_class(test_image_path): # GIVEN a path to an image # WHEN the AsciiArt class is instantiated with the image path as an argument # THEN an AsciiArt object will be created a = AsciiArt(test_image_path) assert type(a).__name__ is 'AsciiArt' @pytest.mark.parametrize('dest_width, dest_height, scale', [ (500, 200, 1), (10, 30, 2), (30, 6, 3) ]) def test_scale_image(test_image_path, dest_width, dest_height, scale): # GIVEN a dest_width and dest_height # WHEN the scale_image method is called # THEN the scale factor will be returned to fit the image on the destination medium a = AsciiArt(test_image_path) assert a.scale_image(dest_width, dest_height) == scale def test_image_info(test_image_path, capsys): # GIVEN a PIL Image object # WHEN the image_info method is called on the PIL Image object. # THEN the width and height of the image will be printed img = Image.open(test_image_path) a = AsciiArt(test_image_path) a.image_info() captured = capsys.readouterr() assert captured.out == f'Image size: {img.size[0]} x {img.size[1]}\n' @pytest.mark.parametrize('brightness, brightness_range, inverse, char', [ (0, 255, False, '0' ), (0, 255, True, '5'), (135, 255, False, '3'), (135, 255, True, '2'), (225, 255, False, '4'), (225, 255, True, '1'), (255, 255, False, '5'), (255, 255, True, '0'), ]) def test_brightness_to_char(test_image_path, brightness, brightness_range, inverse, char): # GIVEN a brightness and a brightness_range # WHEN the brightness_to_char method is called # THEN a ascii_char will be returned as maped by the function according to it's # relative brightness in the brightness in the range a = AsciiArt(test_image_path) a.ascii_chars = '012345' a.inverse = inverse assert a.brightness_to_char(brightness, brightness_range) == char @pytest.mark.parametrize('mode, pixel, result', [ ('average', (123, 34, 211), 122), ('ave', (123, 34, 211), 122), ('', (123, 34, 211), 122), ('lightness', (123, 34, 211), 122), ('luminosity', (123, 34, 211), 65), ]) def test_brightness_calc(mode, pixel, result): # GIVEN a brightness calculation # WHEN the brightness object is passed a pixel tuple # THEN the calc method of the brightness class will calculate # the brightness based on the set calculation mode attribute bc = Brightness(mode) assert bc.calc(pixel) == result
94.237762
263
0.413327
2,456
13,476
2.231678
0.058225
0.249954
0.558292
0.630542
0.641854
0.609743
0.591133
0.591133
0.591133
0.591133
0
0.364157
0.313075
13,476
142
264
94.901408
0.227936
0.076952
0
0.411765
0
0
0.01684
0
0
0
0
0
0.04902
1
0.058824
false
0
0.04902
0
0.107843
0.009804
0
0
0
null
1
1
1
0
0
0
0
0
0
0
1
0
0
0
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
63ff20b575e4317946a0d6af93f33ad65b9c403a
40
py
Python
tests/__init__.py
leslieDLcy/eqstochsim
305607f358622529376f8115f6b75c43813457ad
[ "MIT" ]
null
null
null
tests/__init__.py
leslieDLcy/eqstochsim
305607f358622529376f8115f6b75c43813457ad
[ "MIT" ]
null
null
null
tests/__init__.py
leslieDLcy/eqstochsim
305607f358622529376f8115f6b75c43813457ad
[ "MIT" ]
1
2021-02-19T16:32:47.000Z
2021-02-19T16:32:47.000Z
"""Unit test package for eqstochsim."""
20
39
0.7
5
40
5.6
1
0
0
0
0
0
0
0
0
0
0
0
0.125
40
1
40
40
0.8
0.825
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
5
63fffe0ab563a2ed9ee4c028701a1897a3292567
44
py
Python
src/bdbdatastore/src/google/protobuf/internal/__init__.py
sprymak/typhoonae
fe31bcc7b21fc14f8aa97b36d66cd7671974543b
[ "Apache-2.0" ]
1
2018-12-02T10:36:07.000Z
2018-12-02T10:36:07.000Z
src/bdbdatastore/src/google/protobuf/internal/__init__.py
sprymak/typhoonae
fe31bcc7b21fc14f8aa97b36d66cd7671974543b
[ "Apache-2.0" ]
null
null
null
src/bdbdatastore/src/google/protobuf/internal/__init__.py
sprymak/typhoonae
fe31bcc7b21fc14f8aa97b36d66cd7671974543b
[ "Apache-2.0" ]
1
2018-12-02T10:36:08.000Z
2018-12-02T10:36:08.000Z
# Empty __init.py__ for package recognition
22
43
0.818182
6
44
5.333333
1
0
0
0
0
0
0
0
0
0
0
0
0.136364
44
1
44
44
0.842105
0.931818
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
5
120aca77865065c70a1f69b043d2035053d4ad18
1,877
py
Python
dalila/tests/penalty_tests.py
veronicatozzo/DaLiLa
9d8ed4a1adb0a57316e6ca41852da97ffeb23996
[ "BSD-2-Clause" ]
5
2017-06-14T16:30:48.000Z
2018-05-14T07:10:06.000Z
dalila/tests/penalty_tests.py
veronicatozzo/DaLiLa
9d8ed4a1adb0a57316e6ca41852da97ffeb23996
[ "BSD-2-Clause" ]
1
2017-06-25T14:38:48.000Z
2017-06-25T14:38:48.000Z
dalila/tests/penalty_tests.py
veronicatozzo/DaLiLa
9d8ed4a1adb0a57316e6ca41852da97ffeb23996
[ "BSD-2-Clause" ]
2
2017-09-12T06:18:37.000Z
2018-05-03T03:00:32.000Z
from nose.tools import * import numpy as np from dalila.dl.penalty import * @raises(ValueError) def negative_parameter_l1_test(): p = L1Penalty(-1) x = np.random.rand(100,100) p.apply_prox_operator(x, 0.1) @raises(ValueError) def negative_parameter_l2_test(): p = L2Penalty(-1) x = np.random.rand(100,100) p.apply_prox_operator(x, 0.1) @raises(ValueError) def negative_parameter_elasticnet_test(): p = ElasticNetPenalty(-1, -1, .5) x = np.random.rand(100,100) p.apply_prox_operator(x, 0.1) @raises(ValueError) def negative_parameter_l0_test(): p = L0Penalty(-1) x = np.random.rand(100,100) p.apply_prox_operator(x, 0.1) @raises(ValueError) def negative_parameter_grouplasso_test(): p = GroupLassoPenalty([np.arange(0, 50), np.arange(50, 100)], -1) x = np.random.rand(100,100) p.apply_prox_operator(x, 0.1) @raises(ValueError) def negative_parameter_linf_test(): p = LInfPenalty(-1) x = np.random.rand(100,100) p.apply_prox_operator(x, 0.1) @raises(ValueError) def wrong_alpha1_elasticnet_test(): p = ElasticNetPenalty(1, 1, -.5) x = np.random.rand(100,100) p.apply_prox_operator(x, 0.1) @raises(ValueError) def wrong_alpha2_elasticnet_test(): p = ElasticNetPenalty(1, 1, 2) x = np.random.rand(100,100) p.apply_prox_operator(x, 0.1) @raises(ValueError) def too_high_s__l0_test(): p = L0Penalty(10) x = np.random.rand(5,5) p.apply_prox_operator(x, 0.1) @raises(ValueError) def overlapping_groups_grouplasso_test(): p = GroupLassoPenalty([np.arange(0, 55), np.arange(50, 100)], 1) x = np.random.rand(100,100) p.apply_prox_operator(x, 0.1) @raises(ValueError) def non_coverage_groups_grouplasso_test(): p = GroupLassoPenalty([np.arange(0, 30), np.arange(50, 100)], 1) x = np.random.rand(100,100) p.apply_prox_operator(x, 0.1)
22.890244
69
0.689398
298
1,877
4.147651
0.184564
0.142395
0.169094
0.115696
0.830906
0.80178
0.774272
0.7411
0.665049
0.665049
0
0.086569
0.163026
1,877
81
70
23.17284
0.700191
0
0
0.551724
0
0
0
0
0
0
0
0
0
1
0.189655
false
0
0.051724
0
0.241379
0
0
0
0
null
0
0
0
1
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
121dbe22279131c102f34bc0fc68dfd09a529be5
234
py
Python
twitter_watcher/initiatives.py
azubieta/cmtrends
31b98e0e6ceed72f58cf831715ec56468c90386a
[ "Apache-2.0" ]
null
null
null
twitter_watcher/initiatives.py
azubieta/cmtrends
31b98e0e6ceed72f58cf831715ec56468c90386a
[ "Apache-2.0" ]
null
null
null
twitter_watcher/initiatives.py
azubieta/cmtrends
31b98e0e6ceed72f58cf831715ec56468c90386a
[ "Apache-2.0" ]
null
null
null
from twitter_api import api from twitter_api.models import User from twitter_api.models import Status def discover_tweets(since_id): tweets = api.api_call('search', '#Initiative @CMobileTrends', since_id = since_id) return tweets
26
83
0.803419
35
234
5.142857
0.485714
0.183333
0.233333
0.222222
0.288889
0
0
0
0
0
0
0
0.119658
234
8
84
29.25
0.873786
0
0
0
0
0
0.137339
0
0
0
0
0
0
1
0.166667
false
0
0.5
0
0.833333
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
5
12235c913c206facb6871cda7fbed78248387730
31
py
Python
arvestust/admin/permissions/__init__.py
lehvitus/arvestust
2d508317b744eaf12a643a398ff95723893a046a
[ "BSD-3-Clause" ]
1
2021-09-17T23:45:27.000Z
2021-09-17T23:45:27.000Z
arvestust/admin/permissions/__init__.py
lehvitus/arvestust
2d508317b744eaf12a643a398ff95723893a046a
[ "BSD-3-Clause" ]
3
2020-07-25T05:40:54.000Z
2020-08-11T04:01:19.000Z
arvestust/admin/permissions/__init__.py
lehvitus/arvestust
2d508317b744eaf12a643a398ff95723893a046a
[ "BSD-3-Clause" ]
null
null
null
# lua:file_cabinet:permissions
15.5
30
0.83871
4
31
6.25
1
0
0
0
0
0
0
0
0
0
0
0
0.064516
31
1
31
31
0.862069
0.903226
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
5
89f1c85b3efd45e5779d5706fd5e5ff2cdb04745
179
py
Python
imagelib/image/segmentation/image_segmenter.py
susautw/imagelib
e582ed2139a15b0aefe16c71b4b476a5b30b7b99
[ "MIT" ]
null
null
null
imagelib/image/segmentation/image_segmenter.py
susautw/imagelib
e582ed2139a15b0aefe16c71b4b476a5b30b7b99
[ "MIT" ]
null
null
null
imagelib/image/segmentation/image_segmenter.py
susautw/imagelib
e582ed2139a15b0aefe16c71b4b476a5b30b7b99
[ "MIT" ]
null
null
null
from abc import ABC, abstractmethod from imagelib.image import Image class ImageSegmenter(ABC): @abstractmethod def segment(self, image: Image) -> Image: pass
16.272727
45
0.709497
21
179
6.047619
0.571429
0.267717
0
0
0
0
0
0
0
0
0
0
0.217877
179
10
46
17.9
0.907143
0
0
0
0
0
0
0
0
0
0
0
0
1
0.166667
false
0.166667
0.333333
0
0.666667
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
1
0
1
0
0
5
d6250ce8cbb3a1e4979d3055c2a50d80500f7bf2
41
py
Python
calculator/main.py
ine-rmotr-projects/pyp-w1-gw-demo-project
58c58e5c317be1ee69d0a6ea84cbe687f70dc6ed
[ "MIT" ]
null
null
null
calculator/main.py
ine-rmotr-projects/pyp-w1-gw-demo-project
58c58e5c317be1ee69d0a6ea84cbe687f70dc6ed
[ "MIT" ]
null
null
null
calculator/main.py
ine-rmotr-projects/pyp-w1-gw-demo-project
58c58e5c317be1ee69d0a6ea84cbe687f70dc6ed
[ "MIT" ]
null
null
null
def calculate(x, y, operation): pass
13.666667
31
0.658537
6
41
4.5
1
0
0
0
0
0
0
0
0
0
0
0
0.219512
41
2
32
20.5
0.84375
0
0
0
0
0
0
0
0
0
0
0
0
1
0.5
false
0.5
0
0
0.5
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
0
0
0
5
d64584a3c79712573058c7b854ccc1cafb087538
314
py
Python
grow/extensions/hooks/deployment_register_hook_test.py
tabulon-ext/grow
2929c3e9b467a7768d5b5055fe965fbb5106603f
[ "MIT" ]
335
2016-04-02T20:12:21.000Z
2022-03-28T18:55:26.000Z
grow/extensions/hooks/deployment_register_hook_test.py
tabulon-ext/grow
2929c3e9b467a7768d5b5055fe965fbb5106603f
[ "MIT" ]
784
2016-04-01T16:56:41.000Z
2022-03-05T01:25:34.000Z
grow/extensions/hooks/deployment_register_hook_test.py
tabulon-ext/grow
2929c3e9b467a7768d5b5055fe965fbb5106603f
[ "MIT" ]
54
2016-05-03T13:06:15.000Z
2021-09-24T04:46:23.000Z
"""Tests for deployment destination registration hook.""" import unittest from grow.extensions.hooks import deployment_register_hook class DeploymentRegisterHookTestCase(unittest.TestCase): """Test the deployment destination registration hook.""" def test_something(self): """?""" pass
24.153846
60
0.738854
31
314
7.387097
0.709677
0.183406
0.28821
0.323144
0
0
0
0
0
0
0
0
0.165605
314
12
61
26.166667
0.874046
0.33121
0
0
0
0
0
0
0
0
0
0
0
1
0.2
false
0.2
0.4
0
0.8
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
1
0
1
0
0
5
d650bf4ca4c457be75bc1b726d4f4f86a394a372
38
py
Python
yaas/task.py
dmtucker/yaas
5487312459326cd5b4218122ebe26e20ead80ece
[ "MIT" ]
null
null
null
yaas/task.py
dmtucker/yaas
5487312459326cd5b4218122ebe26e20ead80ece
[ "MIT" ]
28
2015-09-10T07:56:15.000Z
2016-04-25T16:01:03.000Z
yaas/task.py
tucked/yaas
5487312459326cd5b4218122ebe26e20ead80ece
[ "MIT" ]
4
2015-09-09T22:03:35.000Z
2017-02-14T23:59:27.000Z
# coding: utf-8 class Task: pass
7.6
15
0.605263
6
38
3.833333
1
0
0
0
0
0
0
0
0
0
0
0.037037
0.289474
38
4
16
9.5
0.814815
0.342105
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0.5
0
0
0.5
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
0
0
0
0
0
5
c3c992aa250529ab774c5a6774098db710224abd
132
py
Python
samcli/lib/iac/constants.py
torresxb1/aws-sam-cli
d307f2eb6e1a91a476a5e2ca6070f974b0c913f1
[ "BSD-2-Clause", "Apache-2.0" ]
2,959
2018-05-08T21:48:56.000Z
2020-08-24T14:35:39.000Z
samcli/lib/iac/constants.py
torresxb1/aws-sam-cli
d307f2eb6e1a91a476a5e2ca6070f974b0c913f1
[ "BSD-2-Clause", "Apache-2.0" ]
1,469
2018-05-08T22:44:28.000Z
2020-08-24T20:19:24.000Z
samcli/lib/iac/constants.py
torresxb1/aws-sam-cli
d307f2eb6e1a91a476a5e2ca6070f974b0c913f1
[ "BSD-2-Clause", "Apache-2.0" ]
642
2018-05-08T22:09:19.000Z
2020-08-17T09:04:37.000Z
""" General IaC constants """ PARAMETER_OVERRIDES = "parameter_overrides" GLOBAL_PARAMETER_OVERRIDES = "global_parameter_overrides"
22
57
0.825758
13
132
7.923077
0.461538
0.699029
0.466019
0.640777
0.640777
0
0
0
0
0
0
0
0.083333
132
5
58
26.4
0.85124
0.159091
0
0
0
0
0.436893
0.252427
0
0
0
0
0
1
0
false
0
0
0
0
0
1
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
c3e81790f8991db85284d9d66cdd0701d61168a2
147
py
Python
bin/helpers/yaml2json.py
mschlenstedt/LoxBerry-Plugin-MultiIO
01adf6599ffa16fa6afd90c3f75eb261dc2d7918
[ "Apache-2.0" ]
null
null
null
bin/helpers/yaml2json.py
mschlenstedt/LoxBerry-Plugin-MultiIO
01adf6599ffa16fa6afd90c3f75eb261dc2d7918
[ "Apache-2.0" ]
null
null
null
bin/helpers/yaml2json.py
mschlenstedt/LoxBerry-Plugin-MultiIO
01adf6599ffa16fa6afd90c3f75eb261dc2d7918
[ "Apache-2.0" ]
null
null
null
#!/usr/bin/env python3 import json import sys import yaml sys.stdout.write( json.dumps( yaml.safe_load( sys.stdin ), indent=4, sort_keys=True ) )
21
87
0.741497
25
147
4.28
0.76
0
0
0
0
0
0
0
0
0
0
0.015504
0.122449
147
6
88
24.5
0.813953
0.142857
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.75
0
0.75
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
7f01bed492c8a551b0f5c214c43a7d74c1a06657
162
py
Python
tp_screening/models/__init__.py
MoffatMore/tp-screening
20a98ba6fc57118968e507bdf5861cbdcada1848
[ "MIT" ]
null
null
null
tp_screening/models/__init__.py
MoffatMore/tp-screening
20a98ba6fc57118968e507bdf5861cbdcada1848
[ "MIT" ]
null
null
null
tp_screening/models/__init__.py
MoffatMore/tp-screening
20a98ba6fc57118968e507bdf5861cbdcada1848
[ "MIT" ]
null
null
null
from .subject_screening import SubjectScreening from .in_eligible_subject import IneligibleSubject from .subject_rejections import subject_screening_on_post_save
40.5
62
0.907407
20
162
6.95
0.6
0.158273
0
0
0
0
0
0
0
0
0
0
0.074074
162
3
63
54
0.926667
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
7f1649f0c3e9f4e3723087c6cbb5ad82edb8a3f0
53
py
Python
bioimageio/spec/model/v0_4/utils.py
bioimage-io/spec-bioimage-io
b534001c67b3903dbd47f80a8b3ff027cd249119
[ "MIT" ]
6
2021-05-10T09:08:12.000Z
2022-03-27T17:44:08.000Z
bioimageio/spec/model/v0_4/utils.py
bioimage-io/spec-bioimage-io
b534001c67b3903dbd47f80a8b3ff027cd249119
[ "MIT" ]
227
2021-03-16T23:40:13.000Z
2022-03-17T14:30:53.000Z
bioimageio/spec/model/v0_4/utils.py
bioimage-io/spec-bioimage-io
b534001c67b3903dbd47f80a8b3ff027cd249119
[ "MIT" ]
5
2021-03-26T14:16:49.000Z
2021-11-28T11:24:40.000Z
from ..v0_3.utils import filter_resource_description
26.5
52
0.867925
8
53
5.375
1
0
0
0
0
0
0
0
0
0
0
0.040816
0.075472
53
1
53
53
0.836735
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
615b3c36968aec36501fb6dc870469d7433b1a98
15,049
py
Python
hiphip/http/client.py
nikitanovosibirsk/hiphip
7ffac0e9e18368c847af873a2638ec13f4bccb28
[ "MIT" ]
null
null
null
hiphip/http/client.py
nikitanovosibirsk/hiphip
7ffac0e9e18368c847af873a2638ec13f4bccb28
[ "MIT" ]
null
null
null
hiphip/http/client.py
nikitanovosibirsk/hiphip
7ffac0e9e18368c847af873a2638ec13f4bccb28
[ "MIT" ]
1
2015-12-01T07:37:10.000Z
2015-12-01T07:37:10.000Z
import errno from json import loads as json_loads, dumps as json_dumps try: import requests from requests.cookies import RequestsCookieJar except ImportError: print("Python module requests is required. Run this in your terminal:") print("$ pip3 install requests") exit(errno.ENOPKG) try: from requests_toolbelt import MultipartEncoder except ImportError: print("Python module requests is required. Run this in your terminal:") print("$ pip3 install requests_toolbelt") exit(errno.ENOPKG) class Client: params = None headers = None cookies = None timeout = None def __merge_params(self, request_params): params = request_params if type(self.params) is dict: for name, value in self.params.items(): params[name] = value return params def __merge_headers(self, request_headers): headers = request_headers if type(self.headers) is dict: for name, value in self.headers.items(): headers[name] = value return headers def __merge_cookies(self, request_cookies): if type(request_cookies) is RequestsCookieJar: cookies = request_cookies else: cookies = RequestsCookieJar() for name, value in request_cookies: cookies.set(name, value) if type(self.cookies) is RequestsCookieJar: for cookie in iter(self.cookies): cookies.set_cookie(cookie) elif type(self.cookies) is dict: for name, value in self.cookies.items(): cookies.set(name, value) return cookies def __patch_data(self, data): patched_data = {} for key, val in data.items(): if type(val) in [type(None), bool, int, float, list, dict]: patched_data[key] = json_dumps(val) else: patched_data[key] = val return patched_data def request(self, method, url, params=None, headers=None, cookies=None, data=None, form=None, json=None, raw=None, timeout=None, allow_redirects=False, **kwargs): """ :param method: HTTP method to use. :type method: str :param url: URL to send. :type url: str :param params: Dictionary of URL parameters to append to the URL. :type params: dict | None :param headers: Dictionary of HTTP Headers to send. :type headers: dict | None :param cookies: Dictionary or CookieJar of cookies to attach to this request. :type cookies: dict | None :param data: The body to attach to the application/x-www-form-urlencoded request. :type data: dict | None :param form: The body to attach to the multipart/form-data request. :type form: dict | None :param json: JSON for the body to attach to the request. :type json: dict | None :param timeout: How long to wait for the server to send data before giving up. :type timeout: float | None :param allow_redirects: Set to False by default. :type allow_redirects: bool :return requests.Response """ params = self.__merge_params(params or {}) headers = self.__merge_headers(headers or {}) cookies = self.__merge_cookies(cookies or {}) timeout = self.timeout if (self.timeout is not None) else timeout if form is not None: data = MultipartEncoder(self.__patch_data(form)) headers["content-type"] = data.content_type elif json is not None: data = json_dumps(json) headers["content-type"] = "application/json" elif data is not None: data = self.__patch_data(data) elif raw is not None: data = raw response = requests.request(method=method, url=url, params=params, headers=headers, cookies=cookies, data=data, timeout=timeout, allow_redirects=allow_redirects, **kwargs) response.body = response.text content_type = response.headers.get("content-type") if content_type and "json" in content_type: try: response.body = json_loads(response.text) except ValueError: pass return response def head(self, url, params=None, headers=None, cookies=None, timeout=None, allow_redirects=False): """ :param url: URL to send. :type url: str :param params: Dictionary of URL parameters to append to the URL. :type params: dict | None :param headers: Dictionary of HTTP Headers to send. :type headers: dict | None :param cookies: Dictionary or CookieJar of cookies to attach to this request. :type cookies: dict | None :param timeout: How long to wait for the server to send data before giving up. :type timeout: float | None :param allow_redirects: Set to False by default. :type allow_redirects: bool :return requests.Response """ return self.request("HEAD", url=url, params=params, headers=headers, cookies=cookies, timeout=timeout, allow_redirects=allow_redirects) def options(self, url, params=None, headers=None, cookies=None, timeout=None, allow_redirects=False): """ :param url: URL to send. :type url: str :param params: Dictionary of URL parameters to append to the URL. :type params: dict | None :param headers: Dictionary of HTTP Headers to send. :type headers: dict | None :param cookies: Dictionary or CookieJar of cookies to attach to this request. :type cookies: dict | None :param timeout: How long to wait for the server to send data before giving up. :type timeout: float | None :param allow_redirects: Set to False by default. :type allow_redirects: bool :return requests.Response """ return self.request("OPTIONS", url=url, params=params, headers=headers, cookies=cookies, timeout=timeout, allow_redirects=allow_redirects) def get(self, url, params=None, headers=None, cookies=None, timeout=None, allow_redirects=False): """ :param url: URL to send. :type url: str :param params: Dictionary of URL parameters to append to the URL. :type params: dict | None :param headers: Dictionary of HTTP Headers to send. :type headers: dict | None :param cookies: Dictionary or CookieJar of cookies to attach to this request. :type cookies: dict | None :param timeout: How long to wait for the server to send data before giving up. :type timeout: float | None :param allow_redirects: Set to False by default. :type allow_redirects: bool :return requests.Response """ return self.request("GET", url, params=params, headers=headers, cookies=cookies, timeout=timeout, allow_redirects=allow_redirects) def post(self, url, params=None, headers=None, cookies=None, data=None, form=None, json=None, raw=None, timeout=None, allow_redirects=False): """ :param url: URL to send. :type url: str :param params: Dictionary of URL parameters to append to the URL. :type params: dict | None :param headers: Dictionary of HTTP Headers to send. :type headers: dict | None :param cookies: Dictionary or CookieJar of cookies to attach to this request. :type cookies: dict | None :param data: The body to attach to the application/x-www-form-urlencoded request. :type data: dict | None :param form: The body to attach to the multipart/form-data request. :type form: dict | None :param json: JSON for the body to attach to the request. :type json: dict | None :param timeout: How long to wait for the server to send data before giving up. :type timeout: float | None :param allow_redirects: Set to False by default. :type allow_redirects: bool :return requests.Response """ return self.request("POST", url=url, params=params, headers=headers, cookies=cookies, data=data, form=form, json=json, raw=raw, timeout=timeout, allow_redirects=allow_redirects) def put(self, url, params=None, headers=None, cookies=None, data=None, form=None, json=None, timeout=None, allow_redirects=False): """ :param url: URL to send. :type url: str :param params: Dictionary of URL parameters to append to the URL. :type params: dict | None :param headers: Dictionary of HTTP Headers to send. :type headers: dict | None :param cookies: Dictionary or CookieJar of cookies to attach to this request. :type cookies: dict | None :param data: The body to attach to the application/x-www-form-urlencoded request. :type data: dict | None :param form: The body to attach to the multipart/form-data request. :type form: dict | None :param json: JSON for the body to attach to the request. :type json: dict | None :param timeout: How long to wait for the server to send data before giving up. :type timeout: float | None :param allow_redirects: Set to False by default. :type allow_redirects: bool :return requests.Response """ return self.request("PUT", url=url, params=params, headers=headers, cookies=cookies, data=data, form=form, json=json, timeout=timeout, allow_redirects=allow_redirects) def patch(self, url, params=None, headers=None, cookies=None, data=None, form=None, json=None, timeout=None, allow_redirects=False): """ :param url: URL to send. :type url: str :param params: Dictionary of URL parameters to append to the URL. :type params: dict | None :param headers: Dictionary of HTTP Headers to send. :type headers: dict | None :param cookies: Dictionary or CookieJar of cookies to attach to this request. :type cookies: dict | None :param data: The body to attach to the application/x-www-form-urlencoded request. :type data: dict | None :param form: The body to attach to the multipart/form-data request. :type form: dict | None :param json: JSON for the body to attach to the request. :type json: dict | None :param timeout: How long to wait for the server to send data before giving up. :type timeout: float | None :param allow_redirects: Set to False by default. :type allow_redirects: bool :return requests.Response """ return self.request("PATCH", url=url, params=params, headers=headers, cookies=cookies, data=data, form=form, json=json, timeout=timeout, allow_redirects=allow_redirects) def delete(self, url, params=None, headers=None, cookies=None, data=None, form=None, json=None, timeout=None, allow_redirects=False): """ :param url: URL to send. :type url: str :param params: Dictionary of URL parameters to append to the URL. :type params: dict | None :param headers: Dictionary of HTTP Headers to send. :type headers: dict | None :param cookies: Dictionary or CookieJar of cookies to attach to this request. :type cookies: dict | None :param data: The body to attach to the application/x-www-form-urlencoded request. :type data: dict | None :param form: The body to attach to the multipart/form-data request. :type form: dict | None :param json: JSON for the body to attach to the request. :type json: dict | None :param timeout: How long to wait for the server to send data before giving up. :type timeout: float | None :param allow_redirects: Set to False by default. :type allow_redirects: bool :return requests.Response """ return self.request("DELETE", url=url, params=params, headers=headers, cookies=cookies, data=data, form=form, json=json, timeout=timeout, allow_redirects=allow_redirects)
32.644252
89
0.523822
1,604
15,049
4.857855
0.069202
0.054286
0.065067
0.028876
0.794918
0.794918
0.789528
0.774512
0.768994
0.768994
0
0.000225
0.40933
15,049
460
90
32.715217
0.876364
0.380025
0
0.617117
0
0
0.032878
0
0
0
0
0
0
1
0.054054
false
0.004505
0.031532
0
0.162162
0.018018
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
61a98570edcf4511eddc0f4ccf57abc9ff642b4f
41
py
Python
rumor/exceptions.py
SudoQ/rumor
a02a0b8b4ee929d9cd41c33816b1533c24b9cdb3
[ "MIT" ]
1
2019-09-16T13:57:27.000Z
2019-09-16T13:57:27.000Z
rumor/exceptions.py
SudoQ/rumor
a02a0b8b4ee929d9cd41c33816b1533c24b9cdb3
[ "MIT" ]
null
null
null
rumor/exceptions.py
SudoQ/rumor
a02a0b8b4ee929d9cd41c33816b1533c24b9cdb3
[ "MIT" ]
null
null
null
class UpstreamError(Exception): pass
13.666667
31
0.756098
4
41
7.75
1
0
0
0
0
0
0
0
0
0
0
0
0.170732
41
2
32
20.5
0.911765
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0.5
0
0
0.5
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
0
0
0
0
0
5
f634f3a5d93b92ad784c7334f82881b94b090000
224
py
Python
criticalityMaps/__init__.py
terrahaxton/criticalityMaps
533490e1bc0f178cbce94814602caa545e438dcf
[ "MIT" ]
null
null
null
criticalityMaps/__init__.py
terrahaxton/criticalityMaps
533490e1bc0f178cbce94814602caa545e438dcf
[ "MIT" ]
null
null
null
criticalityMaps/__init__.py
terrahaxton/criticalityMaps
533490e1bc0f178cbce94814602caa545e438dcf
[ "MIT" ]
1
2020-03-12T12:36:06.000Z
2020-03-12T12:36:06.000Z
from criticalityMaps.mapping import inp_to_geojson, make_criticality_map, wn_dataframe from criticalityMaps.criticality import fire_criticality_analysis, pipe_criticality_analysis, process_criticality __version__ = '0.0.2'
44.8
113
0.879464
28
224
6.535714
0.678571
0.20765
0
0
0
0
0
0
0
0
0
0.014423
0.071429
224
4
114
56
0.865385
0
0
0
0
0
0.022321
0
0
0
0
0
0
1
0
false
0
0.666667
0
0.666667
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
5
f658c7d101365715042ebb3013aea7ecff99231b
156
py
Python
addons/auth_signup_verify_email/tests/__init__.py
marionumza/vocal_v12
480990e919c9410903e06e7813ee92800bd6a569
[ "Unlicense" ]
null
null
null
addons/auth_signup_verify_email/tests/__init__.py
marionumza/vocal_v12
480990e919c9410903e06e7813ee92800bd6a569
[ "Unlicense" ]
null
null
null
addons/auth_signup_verify_email/tests/__init__.py
marionumza/vocal_v12
480990e919c9410903e06e7813ee92800bd6a569
[ "Unlicense" ]
1
2021-05-05T07:59:08.000Z
2021-05-05T07:59:08.000Z
# Copyright 2016 Jairo Llopis <jairo.llopis@tecnativa.com> # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). from . import test_verify_email
31.2
63
0.762821
25
156
4.68
0.88
0.188034
0
0
0
0
0
0
0
0
0
0.043165
0.108974
156
4
64
39
0.798561
0.75641
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
9c9101be8d08013e4401071f963d40a8648eed0f
462
py
Python
web/0-basic_dev_tools/server.py
b01lers/bootcamp-training-2020
5efa86f45df66541b1e4ca7340c689aeda95b99d
[ "MIT" ]
9
2020-10-07T11:21:30.000Z
2022-02-04T05:08:46.000Z
web/0-basic_dev_tools/server.py
b01lers/bootcamp-training-2020
5efa86f45df66541b1e4ca7340c689aeda95b99d
[ "MIT" ]
1
2020-10-04T22:19:53.000Z
2020-10-04T22:19:53.000Z
web/0-basic_dev_tools/server.py
b01lers/bootcamp-training-2020
5efa86f45df66541b1e4ca7340c689aeda95b99d
[ "MIT" ]
5
2020-10-02T04:18:58.000Z
2021-06-11T16:18:26.000Z
#!/usr/bin/env python3 from flask import Flask, render_template app = Flask(__name__) @app.route('/') def index(): return render_template('index.html') @app.route('/flag2.5') def flag2(): return render_template('flag2.5.html') @app.route('/flag3') def flag3(): return render_template('flag3.html') @app.route('/flag4') def flag4(): return render_template('flag4.html') if __name__ == '__main__': app.run(host='0.0.0.0', port=5000)
15.931034
42
0.666667
66
462
4.409091
0.409091
0.24055
0.274914
0
0
0
0
0
0
0
0
0.050633
0.145022
462
28
43
16.5
0.686076
0.045455
0
0
0
0
0.177273
0
0
0
0
0
0
1
0.25
false
0
0.0625
0.25
0.5625
0
0
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
5
9cf80feafc424c2f87fee099f9644344a0cbe437
54
py
Python
d-series/d827.py
TheLurkingCat/ZeroJudge
6fc49c54a45e2b4b3a8d04b7a5a1fc81a2ff4eee
[ "MIT" ]
1
2018-10-21T10:03:42.000Z
2018-10-21T10:03:42.000Z
d-series/d827.py
TheLurkingCat/ZeroJudge
6fc49c54a45e2b4b3a8d04b7a5a1fc81a2ff4eee
[ "MIT" ]
null
null
null
d-series/d827.py
TheLurkingCat/ZeroJudge
6fc49c54a45e2b4b3a8d04b7a5a1fc81a2ff4eee
[ "MIT" ]
2
2018-10-12T16:40:11.000Z
2021-04-05T12:05:36.000Z
a = int(input()) print((a % 12) * 5 + (a // 12) * 50)
18
36
0.425926
10
54
2.3
0.7
0.26087
0
0
0
0
0
0
0
0
0
0.175
0.259259
54
2
37
27
0.4
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0
0
0
0.5
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
5
1405a5a579465ed26ce0fff2e31bb497d28af882
68
py
Python
retina/optim/lr_schedulers/__init__.py
mike112223/retina
cdad3af8240471619f42e9edd1caf68a0241bea6
[ "Apache-2.0" ]
null
null
null
retina/optim/lr_schedulers/__init__.py
mike112223/retina
cdad3af8240471619f42e9edd1caf68a0241bea6
[ "Apache-2.0" ]
null
null
null
retina/optim/lr_schedulers/__init__.py
mike112223/retina
cdad3af8240471619f42e9edd1caf68a0241bea6
[ "Apache-2.0" ]
null
null
null
from .builder import build_lr_scheduler from .step_lr import StepLR
22.666667
39
0.852941
11
68
5
0.727273
0
0
0
0
0
0
0
0
0
0
0
0.117647
68
2
40
34
0.916667
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5