hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
fb94459a5d69e26c4187afa63b0b6378f2161b76
| 46
|
py
|
Python
|
tests/__init__.py
|
kevihiiin/investopedia_simulator_api
|
ffb5f89e6d56abdded882aa6926e985da86b4931
|
[
"MIT"
] | null | null | null |
tests/__init__.py
|
kevihiiin/investopedia_simulator_api
|
ffb5f89e6d56abdded882aa6926e985da86b4931
|
[
"MIT"
] | null | null | null |
tests/__init__.py
|
kevihiiin/investopedia_simulator_api
|
ffb5f89e6d56abdded882aa6926e985da86b4931
|
[
"MIT"
] | null | null | null |
"""Unit test package for investopedia_api."""
| 23
| 45
| 0.73913
| 6
| 46
| 5.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.108696
| 46
| 1
| 46
| 46
| 0.804878
| 0.847826
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
fbcd1d663fbdaf767d25d44ece6ecc012e63d980
| 21
|
py
|
Python
|
language-python-test/test/features/ifthenelse/if_true.py
|
wbadart/language-python
|
6c048c215ff7fe4a5d5cc36ba3c17a666af74821
|
[
"BSD-3-Clause"
] | 137
|
2015-02-13T21:03:23.000Z
|
2021-11-24T03:53:55.000Z
|
language-python-test/test/features/ifthenelse/if_true.py
|
wbadart/language-python
|
6c048c215ff7fe4a5d5cc36ba3c17a666af74821
|
[
"BSD-3-Clause"
] | 4
|
2015-04-01T13:49:13.000Z
|
2019-07-09T19:28:56.000Z
|
language-python-test/test/features/ifthenelse/if_true.py
|
wbadart/language-python
|
6c048c215ff7fe4a5d5cc36ba3c17a666af74821
|
[
"BSD-3-Clause"
] | 8
|
2015-04-25T03:47:52.000Z
|
2019-07-27T06:33:56.000Z
|
if True:
print(1)
| 7
| 11
| 0.571429
| 4
| 21
| 3
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.066667
| 0.285714
| 21
| 2
| 12
| 10.5
| 0.733333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0.5
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
83c2565798ea3bc11ddfca57a3a89ffc70c52904
| 923
|
py
|
Python
|
py/test_api.py
|
JinhuaSu/RMusicDown
|
0f30381f4976c4b6d5a716804a47c8c905c75b39
|
[
"MIT"
] | 3
|
2022-01-01T12:59:45.000Z
|
2022-01-06T08:54:25.000Z
|
py/test_api.py
|
JinhuaSu/RMusicDown
|
0f30381f4976c4b6d5a716804a47c8c905c75b39
|
[
"MIT"
] | null | null | null |
py/test_api.py
|
JinhuaSu/RMusicDown
|
0f30381f4976c4b6d5a716804a47c8c905c75b39
|
[
"MIT"
] | null | null | null |
#%%
import requests
# "https://music.163.com/weapi/cloudsearch/get/web?csrf_token=", body=list()
resp = requests.post(
"https://music.163.com/weapi/cloudsearch/get/web?csrf_token=",
data={
"params": "e+f7PY7zVaw3ANYt8nggFYipZCnr6/dU+Wo9/2S9VyvR4a1fqwijrBaFfGyEOljPJa1jYsZKER5VnNXYnZ0OhgILwTFgK0hG31rf/DeltLSY9GoZTS6CXQL2w8VYom17YydwyoeQuAWgAdUVQqWMaBClVvpFqCuGVVWUkWSxFKRygwt32u6M+gkiaJjdduFDQBVCyzaNp0m4aAC1jq/+jAR04SP+rKNxXQAsFc5FyIKp5sCUvd5lrelWOXToClKZx3/xd9nHyYUKbc95OVNk02OfQLxp6eCmng8hHpAHe+pqmutlFzRXk45bfRqO2Fn1jOBxQdvbtp44Xmhda6h4WVxmjURfnPig/IrIB25mk9A=",
"encSecKey": "43f8e5540b855f2022014d5ea66c632f93141186061531016cb91c46a1663256ea7665455961e2417b3c73124b6bcb3dd9da54539eda0575cb51e971ec37c3a4d99719818e94e3fa7c19a7fc1ebc2e7acb6171839a1781696d5cbee460fe6131e6b94c2a62e45e8714b4ac92ec94ba4963da7efc7f0ca04323813e280865f9e1",
},
)
resp
# %%
resp.text
# %%
resp.request.body
# %%
| 48.578947
| 385
| 0.852654
| 49
| 923
| 16.020408
| 0.693878
| 0.025478
| 0.033121
| 0.040764
| 0.119745
| 0.119745
| 0.119745
| 0.119745
| 0.119745
| 0.119745
| 0
| 0.265283
| 0.060672
| 923
| 18
| 386
| 51.277778
| 0.640138
| 0.093174
| 0
| 0
| 0
| 0.090909
| 0.834135
| 0.745192
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.090909
| 0
| 0.090909
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
f7d5b124c6df93a7ab1914a4c846270c8b79bc28
| 273
|
py
|
Python
|
python/ql/test/library-tests/stmts/try_stmt/test.py
|
vadi2/codeql
|
a806a4f08696d241ab295a286999251b56a6860c
|
[
"MIT"
] | 4,036
|
2020-04-29T00:09:57.000Z
|
2022-03-31T14:16:38.000Z
|
python/ql/test/library-tests/stmts/try_stmt/test.py
|
vadi2/codeql
|
a806a4f08696d241ab295a286999251b56a6860c
|
[
"MIT"
] | 2,970
|
2020-04-28T17:24:18.000Z
|
2022-03-31T22:40:46.000Z
|
python/ql/test/library-tests/stmts/try_stmt/test.py
|
ScriptBox99/github-codeql
|
2ecf0d3264db8fb4904b2056964da469372a235c
|
[
"MIT"
] | 794
|
2020-04-29T00:28:25.000Z
|
2022-03-30T08:21:46.000Z
|
def f():
try:
call()
try:
nested()
except Exception:
return 1
except:
return 2
try:
call2()
except Exception:
return 2
try:
call3a()
finally:
call3b()
| 13.65
| 25
| 0.391941
| 23
| 273
| 4.652174
| 0.608696
| 0.280374
| 0.392523
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.046875
| 0.531136
| 273
| 20
| 26
| 13.65
| 0.789063
| 0
| 0
| 0.470588
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.058824
| true
| 0
| 0
| 0
| 0.235294
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
f7dc629c32938e11d8619c8fa67296b698a7379f
| 88
|
py
|
Python
|
fast_dataset_cleaner/back/services/__init__.py
|
PhotoRoom/fast-dataset-cleaner
|
a18648866a17801492608bb910fc98c7f7e7a372
|
[
"MIT"
] | 4
|
2020-12-17T17:44:01.000Z
|
2022-02-18T10:41:19.000Z
|
fast_dataset_cleaner/back/services/__init__.py
|
PhotoRoom/fast-dataset-cleaner
|
a18648866a17801492608bb910fc98c7f7e7a372
|
[
"MIT"
] | null | null | null |
fast_dataset_cleaner/back/services/__init__.py
|
PhotoRoom/fast-dataset-cleaner
|
a18648866a17801492608bb910fc98c7f7e7a372
|
[
"MIT"
] | 1
|
2020-12-16T16:41:17.000Z
|
2020-12-16T16:41:17.000Z
|
from .AnnotationService import AnnotationService
from .ImageService import ImageService
| 29.333333
| 48
| 0.886364
| 8
| 88
| 9.75
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.090909
| 88
| 2
| 49
| 44
| 0.975
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
f7e8ce17f4d0a95077d8c0dce81834445f56494f
| 62
|
py
|
Python
|
api/citizens/tests.py
|
jaconsta/soat_cnpx
|
3c5a05a334e44158b0aa9ab57c309771953b2950
|
[
"MIT"
] | null | null | null |
api/citizens/tests.py
|
jaconsta/soat_cnpx
|
3c5a05a334e44158b0aa9ab57c309771953b2950
|
[
"MIT"
] | null | null | null |
api/citizens/tests.py
|
jaconsta/soat_cnpx
|
3c5a05a334e44158b0aa9ab57c309771953b2950
|
[
"MIT"
] | null | null | null |
from django.test import TestCase
from .tests_cases import *
| 12.4
| 32
| 0.790323
| 9
| 62
| 5.333333
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.16129
| 62
| 4
| 33
| 15.5
| 0.923077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
f7edd0a3e4ec250aee5513a085a7ad6844cc1a2f
| 47
|
py
|
Python
|
sherlock/exceptions.py
|
bhirsz/robotframework-sherlock
|
53edb5f15517d8fbdf05eb0c84eb34332dcbf308
|
[
"Apache-2.0"
] | 2
|
2022-03-17T07:55:37.000Z
|
2022-03-17T08:18:44.000Z
|
sherlock/exceptions.py
|
bhirsz/robotframework-sherlock
|
53edb5f15517d8fbdf05eb0c84eb34332dcbf308
|
[
"Apache-2.0"
] | 16
|
2022-03-09T09:29:34.000Z
|
2022-03-14T20:29:38.000Z
|
sherlock/exceptions.py
|
bhirsz/robotframework-sherlock
|
53edb5f15517d8fbdf05eb0c84eb34332dcbf308
|
[
"Apache-2.0"
] | null | null | null |
class SherlockFatalError(ValueError):
pass
| 15.666667
| 37
| 0.787234
| 4
| 47
| 9.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.148936
| 47
| 2
| 38
| 23.5
| 0.925
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
f7ee5cc5c4b49851a33403aff50bda125a108390
| 182
|
py
|
Python
|
PYTHON-CURSO EM VIDEO/Desafio 005.py
|
JaumVitor/HOMEWORK-PYTHON
|
aff564ac61802c7417d7280a73c1ed4a98978ed3
|
[
"Apache-2.0"
] | null | null | null |
PYTHON-CURSO EM VIDEO/Desafio 005.py
|
JaumVitor/HOMEWORK-PYTHON
|
aff564ac61802c7417d7280a73c1ed4a98978ed3
|
[
"Apache-2.0"
] | null | null | null |
PYTHON-CURSO EM VIDEO/Desafio 005.py
|
JaumVitor/HOMEWORK-PYTHON
|
aff564ac61802c7417d7280a73c1ed4a98978ed3
|
[
"Apache-2.0"
] | null | null | null |
num = int ( input ('Digite um valor: '))
print ('O número antecessor de {} = {} '.format (num, num - 1), end='>>> ')
print ('O número posterior de {} = {} '.format (num, num + 1))
| 45.5
| 76
| 0.549451
| 25
| 182
| 4
| 0.6
| 0.12
| 0.24
| 0.28
| 0.3
| 0
| 0
| 0
| 0
| 0
| 0
| 0.013986
| 0.214286
| 182
| 3
| 77
| 60.666667
| 0.685315
| 0
| 0
| 0
| 0
| 0
| 0.461538
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.666667
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
790bbaf8e6b6e3e030475b1cf8154b1be53ed58b
| 21
|
py
|
Python
|
examples/permissionsexample/models.py
|
max-arnold/django-rest-framework
|
ce5eb85082dd775bb5079ae7af91840fba7f9a6e
|
[
"BSD-2-Clause"
] | 2
|
2017-12-05T15:32:58.000Z
|
2017-12-05T15:33:02.000Z
|
examples/permissionsexample/models.py
|
upgrade-drf/django-rest-framework-0.4
|
ce5eb85082dd775bb5079ae7af91840fba7f9a6e
|
[
"BSD-2-Clause"
] | null | null | null |
examples/permissionsexample/models.py
|
upgrade-drf/django-rest-framework-0.4
|
ce5eb85082dd775bb5079ae7af91840fba7f9a6e
|
[
"BSD-2-Clause"
] | 1
|
2020-12-18T11:24:55.000Z
|
2020-12-18T11:24:55.000Z
|
#for fixture loading
| 10.5
| 20
| 0.809524
| 3
| 21
| 5.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 21
| 1
| 21
| 21
| 0.944444
| 0.904762
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
791b7cef6a9aeef0e465c0156eb1cb9e9a1d2317
| 117
|
py
|
Python
|
April 2021/Global and Local Inversions.py
|
parikshitgupta1/leetcode
|
eba6c11740dc7597204af127c0f4c2163376294f
|
[
"MIT"
] | null | null | null |
April 2021/Global and Local Inversions.py
|
parikshitgupta1/leetcode
|
eba6c11740dc7597204af127c0f4c2163376294f
|
[
"MIT"
] | null | null | null |
April 2021/Global and Local Inversions.py
|
parikshitgupta1/leetcode
|
eba6c11740dc7597204af127c0f4c2163376294f
|
[
"MIT"
] | null | null | null |
class Solution(object):
def isIdealPermutation(self, A):
return all(abs(v-i) <= 1 for i,v in enumerate(A))
| 29.25
| 55
| 0.65812
| 19
| 117
| 4.052632
| 0.842105
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.010638
| 0.196581
| 117
| 3
| 56
| 39
| 0.808511
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
792b482a0b0054303f33c96146ece387d8107116
| 50
|
py
|
Python
|
zaailabcorelib/zconfig/__init__.py
|
chiutuanbinh/zaailabcorelib
|
1967ebcd6e12f0c38ea03c827e16745cb8e0569c
|
[
"MIT"
] | 1
|
2019-11-26T07:12:31.000Z
|
2019-11-26T07:12:31.000Z
|
zaailabcorelib/zconfig/__init__.py
|
chiutuanbinh/zaailabcorelib
|
1967ebcd6e12f0c38ea03c827e16745cb8e0569c
|
[
"MIT"
] | null | null | null |
zaailabcorelib/zconfig/__init__.py
|
chiutuanbinh/zaailabcorelib
|
1967ebcd6e12f0c38ea03c827e16745cb8e0569c
|
[
"MIT"
] | 3
|
2019-12-05T14:54:03.000Z
|
2021-05-15T15:03:48.000Z
|
from zaailabcorelib.zconfig.zconfig import ZConfig
| 50
| 50
| 0.9
| 6
| 50
| 7.5
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.06
| 50
| 1
| 50
| 50
| 0.957447
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
f70b061d2606ca0be36e23f56f65b717929eb470
| 104
|
py
|
Python
|
calculator/__init__.py
|
goncalovalverde/seshat
|
deff5cdd985f81ac2b4ebd077eea11f7c4f4118f
|
[
"MIT"
] | 1
|
2020-12-22T13:23:00.000Z
|
2020-12-22T13:23:00.000Z
|
calculator/__init__.py
|
goncalovalverde/seshat
|
deff5cdd985f81ac2b4ebd077eea11f7c4f4118f
|
[
"MIT"
] | 5
|
2020-12-22T13:36:30.000Z
|
2021-02-27T05:42:18.000Z
|
calculator/__init__.py
|
goncalovalverde/seshat
|
deff5cdd985f81ac2b4ebd077eea11f7c4f4118f
|
[
"MIT"
] | null | null | null |
import logging
class Calculator(object):
def __init__(self, config):
self.config = config
| 14.857143
| 31
| 0.682692
| 12
| 104
| 5.583333
| 0.75
| 0.298507
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.230769
| 104
| 6
| 32
| 17.333333
| 0.8375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.25
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
f72cccf7222dd5cd1076530b8dcc103fb3bb156d
| 246
|
py
|
Python
|
board/amebaz_dev/ucube.py
|
jinlongliu/AliOS-Things
|
ce051172a775f987183e7aca88bb6f3b809ea7b0
|
[
"Apache-2.0"
] | 4
|
2019-03-12T11:04:48.000Z
|
2019-10-22T06:06:53.000Z
|
board/amebaz_dev/ucube.py
|
IamBaoMouMou/AliOS-Things
|
195a9160b871b3d78de6f8cf6c2ab09a71977527
|
[
"Apache-2.0"
] | 3
|
2018-12-17T13:06:46.000Z
|
2018-12-28T01:40:59.000Z
|
board/amebaz_dev/ucube.py
|
IamBaoMouMou/AliOS-Things
|
195a9160b871b3d78de6f8cf6c2ab09a71977527
|
[
"Apache-2.0"
] | 2
|
2018-01-23T07:54:08.000Z
|
2018-01-23T11:38:59.000Z
|
linux_only_targets="linuxapp helloworld helloworld_nocli linkkitapp alinkapp networkapp tls uDataapp hdlcapp.hdlcserver wifihalapp coapapp nano linkkit_gateway blink linkkit_sched meshapp acapp netmgrapp mqttapp wifimonitor vflashdemo athostapp"
| 123
| 245
| 0.894309
| 29
| 246
| 7.413793
| 0.931034
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.089431
| 246
| 1
| 246
| 246
| 0.959821
| 0
| 0
| 0
| 0
| 1
| 0.910569
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
f72f9494e109e2041d0b65a04cdffcdfc754e555
| 127
|
py
|
Python
|
pysparkrpc/server/__init__.py
|
abronte/PysparkAPI
|
894f9a550109b7d3fc10573fb1f080972ed13d8d
|
[
"MIT"
] | 1
|
2020-07-31T17:50:50.000Z
|
2020-07-31T17:50:50.000Z
|
pysparkrpc/server/__init__.py
|
abronte/PysparkAPI
|
894f9a550109b7d3fc10573fb1f080972ed13d8d
|
[
"MIT"
] | 3
|
2021-06-06T18:25:43.000Z
|
2021-06-07T00:26:44.000Z
|
pysparkrpc/server/__init__.py
|
abronte/PysparkAPI
|
894f9a550109b7d3fc10573fb1f080972ed13d8d
|
[
"MIT"
] | null | null | null |
from pysparkrpc.server.server import run
from pysparkrpc.server.capture import Capture
__all__ = [
'run', 'Capture'
]
| 18.142857
| 45
| 0.724409
| 15
| 127
| 5.866667
| 0.466667
| 0.318182
| 0.454545
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.181102
| 127
| 6
| 46
| 21.166667
| 0.846154
| 0
| 0
| 0
| 0
| 0
| 0.07874
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.4
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
f754b876057a1c17f486b6ee6718ef24463529f7
| 21
|
py
|
Python
|
model/__init__.py
|
brendan-bassett/Tiny-Ticker
|
955e2bf93c660dcd20a7c7f2e7fcd624fc4221d1
|
[
"MIT"
] | 1
|
2020-05-22T00:51:38.000Z
|
2020-05-22T00:51:38.000Z
|
model/__init__.py
|
brendan-bassett/Tiny-Ticker
|
955e2bf93c660dcd20a7c7f2e7fcd624fc4221d1
|
[
"MIT"
] | 17
|
2020-03-01T01:23:50.000Z
|
2021-03-31T19:47:33.000Z
|
model/__init__.py
|
brendan-bassett/Tiny-Ticker
|
955e2bf93c660dcd20a7c7f2e7fcd624fc4221d1
|
[
"MIT"
] | 2
|
2020-04-26T18:55:13.000Z
|
2020-05-04T21:51:34.000Z
|
"""model.__init__."""
| 21
| 21
| 0.619048
| 2
| 21
| 4.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 21
| 1
| 21
| 21
| 0.428571
| 0.714286
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
f7675f53f86e2b6eec00126125a4d6d225526d00
| 88
|
py
|
Python
|
powerline_taskwarrior/__init__.py
|
Zebradil/powerline-taskwarrior
|
3fe86666a70199993e53050044d0a846e74d7dee
|
[
"MIT"
] | 66
|
2016-09-07T20:48:36.000Z
|
2021-12-09T20:01:08.000Z
|
powerline_taskwarrior/__init__.py
|
Zebradil/powerline-taskwarrior
|
3fe86666a70199993e53050044d0a846e74d7dee
|
[
"MIT"
] | 10
|
2016-09-07T13:05:08.000Z
|
2021-10-14T00:22:57.000Z
|
powerline_taskwarrior/__init__.py
|
Zebradil/powerline-taskwarrior
|
3fe86666a70199993e53050044d0a846e74d7dee
|
[
"MIT"
] | 7
|
2016-09-07T23:41:30.000Z
|
2021-02-13T23:23:01.000Z
|
from .segments import active_task, context, next_task, pending_tasks_count, taskwarrior
| 44
| 87
| 0.852273
| 12
| 88
| 5.916667
| 0.916667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.090909
| 88
| 1
| 88
| 88
| 0.8875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
f77fc871b57f3226958985a11111b890ab38dcdb
| 110
|
py
|
Python
|
monday/api/aula04/projeto/imdb_books/src/database/__init__.py
|
Synapse-CIAg/synapse-2019H2
|
3a3635c59b9d30ee26344972ce6730dc1fd2ec33
|
[
"Apache-2.0"
] | null | null | null |
monday/api/aula04/projeto/imdb_books/src/database/__init__.py
|
Synapse-CIAg/synapse-2019H2
|
3a3635c59b9d30ee26344972ce6730dc1fd2ec33
|
[
"Apache-2.0"
] | null | null | null |
monday/api/aula04/projeto/imdb_books/src/database/__init__.py
|
Synapse-CIAg/synapse-2019H2
|
3a3635c59b9d30ee26344972ce6730dc1fd2ec33
|
[
"Apache-2.0"
] | 3
|
2019-09-19T10:49:42.000Z
|
2019-11-19T11:48:54.000Z
|
import mysql.connector
db = mysql.connector.connect(host="127.0.0.1", user="root", passwd="root", db="books")
| 36.666667
| 86
| 0.709091
| 18
| 110
| 4.333333
| 0.722222
| 0.358974
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.058824
| 0.072727
| 110
| 3
| 86
| 36.666667
| 0.705882
| 0
| 0
| 0
| 0
| 0
| 0.198198
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.5
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
|
0
| 5
|
f78631546c8676414d6201b3c6d1d0e110ad99d4
| 207
|
py
|
Python
|
glue/clients/__init__.py
|
bsipocz/glue
|
7b7e4879b4c746b2419a0eca2a17c2d07a3fded3
|
[
"BSD-3-Clause"
] | null | null | null |
glue/clients/__init__.py
|
bsipocz/glue
|
7b7e4879b4c746b2419a0eca2a17c2d07a3fded3
|
[
"BSD-3-Clause"
] | null | null | null |
glue/clients/__init__.py
|
bsipocz/glue
|
7b7e4879b4c746b2419a0eca2a17c2d07a3fded3
|
[
"BSD-3-Clause"
] | null | null | null |
from matplotlib import rcParams, rcdefaults
#standardize mpl setup
rcdefaults()
from .histogram_client import HistogramClient
from .image_client import ImageClient
from .scatter_client import ScatterClient
| 25.875
| 45
| 0.859903
| 24
| 207
| 7.291667
| 0.625
| 0.205714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.10628
| 207
| 7
| 46
| 29.571429
| 0.945946
| 0.101449
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.8
| 0
| 0.8
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
f789424c2a5c2ead75b6c8ba3343edc5c5fb3f90
| 186
|
py
|
Python
|
python/PyAlembic/Tests/testAbcGeomBinding.py
|
ryu-sw/alembic
|
395450bad88f9d5ed6d20612e9201aac93a5eb54
|
[
"MIT"
] | 7
|
2019-11-23T16:29:26.000Z
|
2020-06-30T20:24:15.000Z
|
python/PyAlembic/Tests/testAbcGeomBinding.py
|
ryu-sw/alembic
|
395450bad88f9d5ed6d20612e9201aac93a5eb54
|
[
"MIT"
] | null | null | null |
python/PyAlembic/Tests/testAbcGeomBinding.py
|
ryu-sw/alembic
|
395450bad88f9d5ed6d20612e9201aac93a5eb54
|
[
"MIT"
] | 1
|
2022-01-30T04:16:01.000Z
|
2022-01-30T04:16:01.000Z
|
import testGeomParam
import testXform
import testPolyMesh
import testCurves
import testNurbs
import testPoints
import testXform
import testCamera
import testSubDFaceSet
import testLight
| 16.909091
| 22
| 0.892473
| 20
| 186
| 8.3
| 0.5
| 0.180723
| 0.253012
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.107527
| 186
| 10
| 23
| 18.6
| 1
| 0
| 0
| 0.2
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
f791bdc64b0a83199712609a59f99bca6798734f
| 225
|
py
|
Python
|
bot_tests/data/__init__.py
|
alex13th/WakeBot
|
dd920d3bdcd82437191688fa361571e50cd9fc3f
|
[
"MIT"
] | null | null | null |
bot_tests/data/__init__.py
|
alex13th/WakeBot
|
dd920d3bdcd82437191688fa361571e50cd9fc3f
|
[
"MIT"
] | 19
|
2020-07-23T04:30:44.000Z
|
2020-08-05T08:37:32.000Z
|
bot_tests/data/__init__.py
|
alex13th/WakeBot
|
dd920d3bdcd82437191688fa361571e50cd9fc3f
|
[
"MIT"
] | 1
|
2020-07-27T08:19:09.000Z
|
2020-07-27T08:19:09.000Z
|
from .t_adapters import MemoryDataAdapterTestCase
from .t_state import StateManagerTestCase, StateProviderTestCase
if __name__ == "__main__":
MemoryDataAdapterTestCase
StateManagerTestCase
StateProviderTestCase
| 25
| 64
| 0.831111
| 17
| 225
| 10.411765
| 0.647059
| 0.056497
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.133333
| 225
| 8
| 65
| 28.125
| 0.907692
| 0
| 0
| 0
| 0
| 0
| 0.035556
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
e392424a923b0f5ccd6bbc400a1da04a39508684
| 249
|
py
|
Python
|
rst2rst/__init__.py
|
masklinn/rst2rst
|
a87cdd22b2471fc4a1face4b89a6a9cc31572a5c
|
[
"BSD-3-Clause"
] | 5
|
2015-03-12T19:08:00.000Z
|
2021-01-17T09:26:16.000Z
|
rst2rst/__init__.py
|
masklinn/rst2rst
|
a87cdd22b2471fc4a1face4b89a6a9cc31572a5c
|
[
"BSD-3-Clause"
] | 3
|
2016-10-29T14:49:23.000Z
|
2018-02-23T06:59:00.000Z
|
rst2rst/__init__.py
|
masklinn/rst2rst
|
a87cdd22b2471fc4a1face4b89a6a9cc31572a5c
|
[
"BSD-3-Clause"
] | 5
|
2016-10-29T14:26:02.000Z
|
2020-07-08T12:19:17.000Z
|
"""rst2rst package."""
from utils import LazyString, read_version
from writer import Writer # Required for docutils.core.publish_cmdline() to
# find the writer matching "rst2rst".
__version__ = LazyString(read_version)
| 31.125
| 76
| 0.698795
| 28
| 249
| 5.964286
| 0.678571
| 0.167665
| 0.251497
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.010309
| 0.220884
| 249
| 7
| 77
| 35.571429
| 0.850515
| 0.405622
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
e3db504b2a219d226de1c6a94d47d36bfb65c516
| 32
|
py
|
Python
|
login.py
|
374961988/www123
|
2b2daba08c2edf507cff13d47de41372c66fd252
|
[
"MIT"
] | null | null | null |
login.py
|
374961988/www123
|
2b2daba08c2edf507cff13d47de41372c66fd252
|
[
"MIT"
] | null | null | null |
login.py
|
374961988/www123
|
2b2daba08c2edf507cff13d47de41372c66fd252
|
[
"MIT"
] | null | null | null |
num=111
num=222
num333333333333
| 8
| 15
| 0.84375
| 5
| 32
| 5.4
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.62069
| 0.09375
| 32
| 3
| 16
| 10.666667
| 0.310345
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
e3e3463363099ce2edf112b00d3db4227a9dcfbb
| 786
|
py
|
Python
|
noisekit/levels.py
|
ggueret/noisekit
|
50421ebe04c5bb28f21bfeaa685ab14581efac24
|
[
"MIT"
] | null | null | null |
noisekit/levels.py
|
ggueret/noisekit
|
50421ebe04c5bb28f21bfeaa685ab14581efac24
|
[
"MIT"
] | 8
|
2019-04-02T11:24:17.000Z
|
2021-01-16T13:27:40.000Z
|
noisekit/levels.py
|
ggueret/noisekit
|
50421ebe04c5bb28f21bfeaa685ab14581efac24
|
[
"MIT"
] | null | null | null |
class level(str):
def __gt__(self, other):
return PRECEDENCE_LOOKUP[self] > PRECEDENCE_LOOKUP[other]
def __ge__(self, other):
print("ge", PRECEDENCE_LOOKUP[self], PRECEDENCE_LOOKUP[other])
return PRECEDENCE_LOOKUP[self] >= PRECEDENCE_LOOKUP[other]
def __lt__(self, other):
print("lt", PRECEDENCE_LOOKUP[self], PRECEDENCE_LOOKUP[other])
return PRECEDENCE_LOOKUP[self] < PRECEDENCE_LOOKUP[other]
def __le__(self, other):
print("le", PRECEDENCE_LOOKUP[self], PRECEDENCE_LOOKUP[other])
return PRECEDENCE_LOOKUP[self] <= PRECEDENCE_LOOKUP[other]
PRECEDENCE = ("QUIET", "LOW", "MEDIUM", "HIGH")
PRECEDENCE_LOOKUP = dict(zip(PRECEDENCE, range(0, len(PRECEDENCE))))
QUIET, LOW, MEDIUM, HIGH = map(level, PRECEDENCE)
| 35.727273
| 70
| 0.697201
| 92
| 786
| 5.619565
| 0.26087
| 0.464217
| 0.270793
| 0.40619
| 0.736944
| 0.628627
| 0.628627
| 0.628627
| 0.628627
| 0.522244
| 0
| 0.001536
| 0.171756
| 786
| 21
| 71
| 37.428571
| 0.792627
| 0
| 0
| 0
| 0
| 0
| 0.030534
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.266667
| false
| 0
| 0
| 0.066667
| 0.6
| 0.2
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
e3fa5772c37b63de8848f72aafdbea0e3e14f818
| 32,657
|
py
|
Python
|
retirement/tests/tests_viewset_Reservation_update.py
|
Jerome-Celle/Blitz-API
|
7dfb7b837ed47b11afcfaa5f5aee831c1aa4e5e0
|
[
"MIT"
] | null | null | null |
retirement/tests/tests_viewset_Reservation_update.py
|
Jerome-Celle/Blitz-API
|
7dfb7b837ed47b11afcfaa5f5aee831c1aa4e5e0
|
[
"MIT"
] | null | null | null |
retirement/tests/tests_viewset_Reservation_update.py
|
Jerome-Celle/Blitz-API
|
7dfb7b837ed47b11afcfaa5f5aee831c1aa4e5e0
|
[
"MIT"
] | null | null | null |
import json
import pytz
import responses
from datetime import datetime, timedelta
from decimal import Decimal, ROUND_HALF_UP
from rest_framework import status
from rest_framework.test import APIClient, APITestCase
from django.urls import reverse
from django.utils import timezone
from django.conf import settings
from django.core import mail
from django.contrib.contenttypes.models import ContentType
from django.contrib.auth import get_user_model
from django.test.utils import override_settings
from unittest import mock
from blitz_api.factories import UserFactory, AdminFactory
from blitz_api.services import remove_translation_fields
from store.models import Order, OrderLine, Refund
from store.tests.paysafe_sample_responses import (SAMPLE_REFUND_RESPONSE,
SAMPLE_NO_AMOUNT_TO_REFUND,
SAMPLE_PAYMENT_RESPONSE,
SAMPLE_PROFILE_RESPONSE,
SAMPLE_CARD_RESPONSE,
UNKNOWN_EXCEPTION, )
from ..models import Retirement, Reservation
User = get_user_model()
LOCAL_TIMEZONE = pytz.timezone(settings.TIME_ZONE)
TAX_RATE = settings.LOCAL_SETTINGS['SELLING_TAX']
@override_settings(
PAYSAFE={
'ACCOUNT_NUMBER': "0123456789",
'USER': "user",
'PASSWORD': "password",
'BASE_URL': "http://example.com/",
'VAULT_URL': "customervault/v1/",
'CARD_URL': "cardpayments/v1/"
}
)
class ReservationTests(APITestCase):
@classmethod
def setUpClass(cls):
super(ReservationTests, cls).setUpClass()
cls.client = APIClient()
cls.user = UserFactory()
cls.admin = AdminFactory()
cls.retirement_type = ContentType.objects.get_for_model(Retirement)
cls.retirement = Retirement.objects.create(
name="mega_retirement",
details="This is a description of the mega retirement.",
seats=400,
address_line1="123 random street",
postal_code="123 456",
state_province="Random state",
country="Random country",
price=199,
start_time=LOCAL_TIMEZONE.localize(datetime(2130, 1, 15, 8)),
end_time=LOCAL_TIMEZONE.localize(datetime(2130, 1, 17, 12)),
min_day_refund=7,
min_day_exchange=7,
refund_rate=50,
is_active=True,
reserved_seats=1,
accessibility=True,
form_url="example.com",
carpool_url='example2.com',
review_url='example3.com',
has_shared_rooms=True,
)
cls.retirement2 = Retirement.objects.create(
name="random_retirement",
details="This is a description of the retirement.",
seats=40,
address_line1="123 random street",
postal_code="123 456",
state_province="Random state",
country="Random country",
price=199,
start_time=LOCAL_TIMEZONE.localize(datetime(2130, 2, 15, 8)),
end_time=LOCAL_TIMEZONE.localize(datetime(2130, 2, 17, 12)),
min_day_refund=7,
min_day_exchange=7,
refund_rate=100,
is_active=False,
accessibility=True,
form_url="example.com",
carpool_url='example2.com',
review_url='example3.com',
has_shared_rooms=True,
)
cls.retirement_overlap = Retirement.objects.create(
name="ultra_retirement",
details="This is a description of the ultra retirement.",
seats=400,
address_line1="1234 random street",
postal_code="654 321",
state_province="Random state 2",
country="Random country 2",
price=199,
start_time=LOCAL_TIMEZONE.localize(datetime(2130, 1, 15, 8)),
end_time=LOCAL_TIMEZONE.localize(datetime(2130, 1, 17, 12)),
min_day_refund=7,
min_day_exchange=7,
refund_rate=50,
is_active=True,
accessibility=True,
form_url="example.com",
carpool_url='example2.com',
review_url='example3.com',
has_shared_rooms=True,
)
cls.order = Order.objects.create(
user=cls.user,
transaction_date=timezone.now(),
authorization_id=1,
settlement_id=1,
)
cls.order_line = OrderLine.objects.create(
order=cls.order,
quantity=1,
content_type=cls.retirement_type,
object_id=cls.retirement.id,
cost=cls.retirement.price,
)
cls.reservation = Reservation.objects.create(
user=cls.user,
retirement=cls.retirement,
order_line=cls.order_line,
is_active=True,
)
cls.reservation_expected_payload = {
'id': cls.reservation.id,
'is_active': True,
'is_present': False,
'retirement': 'http://testserver/retirement/retirements/' +
str(cls.reservation.retirement.id),
'url': 'http://testserver/retirement/reservations/' +
str(cls.reservation.id),
'user': 'http://testserver/users/' +
str(cls.user.id),
'order_line': 'http://testserver/order_lines/' +
str(cls.order_line.id),
'cancelation_date': None,
'cancelation_action': None,
'cancelation_reason': None,
'refundable': True,
'exchangeable': True,
}
cls.reservation_non_exchangeable = Reservation.objects.create(
user=cls.admin,
retirement=cls.retirement,
order_line=cls.order_line,
is_active=True,
exchangeable=False,
)
def test_update(self):
"""
Ensure we can't update a reservation.
"""
self.client.force_authenticate(user=self.admin)
data = {
'retirement': reverse(
'retirement:retirement-detail', args=[self.retirement.id]
),
'user': reverse('user-detail', args=[self.user.id]),
'is_active': False,
}
response = self.client.put(
reverse(
'retirement:reservation-detail',
kwargs={'pk': 1},
),
data,
format='json',
)
self.assertEqual(
response.status_code,
status.HTTP_405_METHOD_NOT_ALLOWED
)
def test_update_partial(self):
"""
Ensure we can partially update a reservation (is_present field and
retirement field only).
The retirement can be changed if we're at least 'min_day_exchange' days
before the event and if the new retirement is cheaper/same price.
Otherwise, only 'is_present' can be updated.
Sends an email to the user with the new retirement's info.
"""
self.client.force_authenticate(user=self.admin)
FIXED_TIME = datetime(2030, 1, 10, tzinfo=LOCAL_TIMEZONE)
data = {
'is_present': True,
'retirement': reverse(
'retirement:retirement-detail',
kwargs={'pk': self.retirement2.id},
),
}
with mock.patch(
'django.utils.timezone.now', return_value=FIXED_TIME):
response = self.client.patch(
reverse(
'retirement:reservation-detail',
kwargs={'pk': self.reservation.id},
),
data,
format='json',
)
self.assertEqual(
response.status_code,
status.HTTP_200_OK,
response.content
)
response_data = json.loads(response.content)
del response_data['user_details']
del response_data['retirement_details']
content = self.reservation_expected_payload.copy()
content['is_present'] = True
content['retirement'] = 'http://testserver' + reverse(
'retirement:retirement-detail',
kwargs={'pk': self.retirement2.id},
)
self.assertEqual(response_data, content)
canceled_reservation = Reservation.objects.filter(is_active=False)[0]
self.assertTrue(canceled_reservation)
self.assertEqual(canceled_reservation.cancelation_action, 'E')
self.assertEqual(canceled_reservation.cancelation_reason, 'U')
self.assertEqual(canceled_reservation.cancelation_date, FIXED_TIME)
self.assertEqual(canceled_reservation.retirement, self.retirement)
self.assertEqual(canceled_reservation.order_line, self.order_line)
# 1 email confirming the exchange
# 1 email confirming participation to the new retirement
self.assertEqual(len(mail.outbox), 2)
def test_update_partial_is_present(self):
"""
Ensure we can partially update a reservation (is_present).
"""
self.client.force_authenticate(user=self.admin)
FIXED_TIME = datetime(2030, 1, 10, tzinfo=LOCAL_TIMEZONE)
data = {
'is_present': True,
}
with mock.patch(
'django.utils.timezone.now', return_value=FIXED_TIME):
response = self.client.patch(
reverse(
'retirement:reservation-detail',
kwargs={'pk': self.reservation.id},
),
data,
format='json',
)
self.assertEqual(
response.status_code,
status.HTTP_200_OK,
response.content
)
response_data = json.loads(response.content)
del response_data['user_details']
del response_data['retirement_details']
content = self.reservation_expected_payload.copy()
content['is_present'] = True
self.assertEqual(response_data, content)
self.assertEqual(len(mail.outbox), 0)
def test_update_partial_ordered_more_than_1(self):
"""
Ensure we can't update a reservation if it has more implication than
predicted. (order.quantity > 1)
"""
self.client.force_authenticate(user=self.admin)
self.order_line.quantity = 2
self.order_line.save()
FIXED_TIME = datetime(2030, 1, 10, tzinfo=LOCAL_TIMEZONE)
data = {
'is_present': True,
'retirement': reverse(
'retirement:retirement-detail',
kwargs={'pk': self.retirement2.id},
),
}
with mock.patch(
'django.utils.timezone.now', return_value=FIXED_TIME):
response = self.client.patch(
reverse(
'retirement:reservation-detail',
kwargs={'pk': self.reservation.id},
),
data,
format='json',
)
self.assertEqual(
response.status_code,
status.HTTP_400_BAD_REQUEST,
response.content
)
response_data = json.loads(response.content)
content = {
'non_field_errors': [
"The order containing this reservation has a quantity "
"bigger than 1. Please contact the support team."
]
}
self.assertEqual(response_data, content)
def test_update_partial_no_place_left(self):
"""
Ensure we can't update a reservation if the new retirement has no free
place left.
"""
self.client.force_authenticate(user=self.admin)
self.retirement2.seats = 0
self.retirement2.save()
FIXED_TIME = datetime(2030, 1, 10, tzinfo=LOCAL_TIMEZONE)
data = {
'is_present': True,
'retirement': reverse(
'retirement:retirement-detail',
kwargs={'pk': self.retirement2.id},
),
}
with mock.patch(
'django.utils.timezone.now', return_value=FIXED_TIME):
response = self.client.patch(
reverse(
'retirement:reservation-detail',
kwargs={'pk': self.reservation.id},
),
data,
format='json',
)
self.assertEqual(
response.status_code,
status.HTTP_400_BAD_REQUEST,
response.content
)
response_data = json.loads(response.content)
content = {
'non_field_errors': [
"There are no places left in the requested retirement."
]
}
self.assertEqual(response_data, content)
def test_update_partial_same_retirement(self):
"""
Ensure we can't update a reservation if the new retirement has no free
place left.
"""
self.client.force_authenticate(user=self.admin)
FIXED_TIME = datetime(2030, 1, 10, tzinfo=LOCAL_TIMEZONE)
data = {
'is_present': True,
'retirement': reverse(
'retirement:retirement-detail',
kwargs={'pk': self.retirement.id},
),
}
with mock.patch(
'django.utils.timezone.now', return_value=FIXED_TIME):
response = self.client.patch(
reverse(
'retirement:reservation-detail',
kwargs={'pk': self.reservation.id},
),
data,
format='json',
)
self.assertEqual(
response.status_code,
status.HTTP_400_BAD_REQUEST,
response.content
)
response_data = json.loads(response.content)
content = {
'retirement': [
"That retirement is already assigned to this object."
]
}
self.assertEqual(response_data, content)
def test_update_partial_without_proper_fields(self):
"""
Ensure we can't partially update a reservation (other fields).
"""
self.client.force_authenticate(user=self.admin)
data = {
'is_active': False,
}
response = self.client.patch(
reverse(
'retirement:reservation-detail',
kwargs={'pk': self.reservation.id},
),
data,
format='json',
)
response_data = json.loads(response.content)
content = {
'non_field_errors': [
"Only is_present and retirement can be updated. To change "
"other fields, delete this reservation and create a new one."
]
}
self.assertEqual(response_data, content)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_update_partial_not_in_min_day_exchange(self):
"""
Ensure we can't change retirement if not respecting min_day_refund.
"""
self.client.force_authenticate(user=self.admin)
FIXED_TIME = datetime(2130, 1, 10, tzinfo=LOCAL_TIMEZONE)
data = {
'retirement': reverse(
'retirement:retirement-detail',
kwargs={'pk': self.retirement2.id},
),
}
with mock.patch(
'django.utils.timezone.now', return_value=FIXED_TIME):
response = self.client.patch(
reverse(
'retirement:reservation-detail',
kwargs={'pk': self.reservation.id},
),
data,
format='json',
)
self.assertEqual(
response.status_code,
status.HTTP_400_BAD_REQUEST,
response.content
)
response_data = json.loads(response.content)
content = {
'non_field_errors': [
"Maximum exchange date exceeded."
]
}
self.assertEqual(response_data, content)
def test_update_partial_overlapping(self):
"""
Ensure we can't change retirement if it overlaps with another
reservation.
"""
self.client.force_authenticate(user=self.admin)
reservation_user = Reservation.objects.create(
user=self.user,
retirement=self.retirement2,
order_line=self.order_line,
is_active=True,
)
data = {
'retirement': reverse(
'retirement:retirement-detail',
kwargs={'pk': self.retirement_overlap.id},
),
}
response = self.client.patch(
reverse(
'retirement:reservation-detail',
kwargs={'pk': reservation_user.id},
),
data,
format='json',
)
self.assertEqual(
response.status_code,
status.HTTP_400_BAD_REQUEST,
response.content
)
response_data = json.loads(response.content)
content = {
'non_field_errors': [
"This reservation overlaps with another active "
"reservations for this user."
]
}
self.assertEqual(response_data, content)
def test_update_partial_more_expensive_retirement_missing_info(self):
"""
Ensure we can't change retirement if the new one is more expensive and
no payment_token or single_use_token is provided.
"""
self.client.force_authenticate(user=self.admin)
self.retirement2.price = 999
self.retirement2.save()
data = {
'retirement': reverse(
'retirement:retirement-detail',
kwargs={'pk': self.retirement2.id},
),
}
response = self.client.patch(
reverse(
'retirement:reservation-detail',
kwargs={'pk': self.reservation.id},
),
data,
format='json',
)
self.assertEqual(
response.status_code,
status.HTTP_400_BAD_REQUEST,
response.content
)
response_data = json.loads(response.content)
content = {
'non_field_errors': [
"The new retirement is more expensive than the current one. "
"Provide a payment_token or single_use_token to charge the "
"balance."
]
}
self.assertEqual(response_data, content)
self.retirement2.price = 199
self.retirement2.save()
@responses.activate
def test_update_partial_more_expensive_retirement(self):
"""
Ensure we can change retirement if the new one is more expensive and
a payment_token or single_use_token is provided.
"""
self.client.force_authenticate(user=self.user)
self.retirement2.price = 999
self.retirement2.save()
responses.add(
responses.POST,
"http://example.com/cardpayments/v1/accounts/0123456789/auths/",
json=SAMPLE_PAYMENT_RESPONSE,
status=200
)
responses.add(
responses.POST,
"http://example.com/cardpayments/v1/accounts/0123456789/"
"settlements/1/refunds",
json=SAMPLE_REFUND_RESPONSE,
status=200
)
FIXED_TIME = datetime(2018, 1, 1, tzinfo=LOCAL_TIMEZONE)
data = {
'retirement': reverse(
'retirement:retirement-detail',
kwargs={'pk': self.retirement2.id},
),
'payment_token': "valid_token"
}
with mock.patch(
'django.utils.timezone.now', return_value=FIXED_TIME):
response = self.client.patch(
reverse(
'retirement:reservation-detail',
kwargs={'pk': self.reservation.id},
),
data,
format='json',
)
# Validate the new order
new_order = Order.objects.filter(transaction_date=FIXED_TIME)[0]
self.assertTrue(new_order)
self.assertEqual(new_order.transaction_date, FIXED_TIME)
self.assertEqual(new_order.user, self.user)
# Validate the new orderline
self.reservation.refresh_from_db()
new_orderline = self.reservation.order_line
self.assertFalse(self.order_line == new_orderline)
self.assertEqual(new_orderline.order, new_order)
self.assertEqual(new_orderline.object_id, self.retirement2.id)
self.assertEqual(new_orderline.content_type.model, "retirement")
self.assertEqual(new_orderline.quantity, 1)
# Validate the response
self.assertEqual(
response.status_code,
status.HTTP_200_OK,
response.content
)
response_data = json.loads(response.content)
del response_data['user_details']
del response_data['retirement_details']
content = self.reservation_expected_payload.copy()
content['retirement'] = 'http://testserver' + reverse(
'retirement:retirement-detail',
kwargs={'pk': self.retirement2.id},
)
content['order_line'] = 'http://testserver/order_lines/' + \
str(new_orderline.id)
self.assertEqual(response_data, content)
# Validate the canceled reservation
canceled_reservation = Reservation.objects.filter(is_active=False)[0]
self.assertTrue(canceled_reservation)
self.assertEqual(canceled_reservation.cancelation_action, 'E')
self.assertEqual(canceled_reservation.cancelation_reason, 'U')
self.assertEqual(canceled_reservation.cancelation_date, FIXED_TIME)
self.assertEqual(canceled_reservation.retirement, self.retirement)
self.assertEqual(canceled_reservation.order_line, self.order_line)
# Validate the full refund on old orderline
refund = Refund.objects.filter(orderline=self.order_line)[0]
self.assertTrue(refund)
refund_amount = self.retirement.price * (Decimal(TAX_RATE) + 1)
self.assertEqual(
refund.amount,
refund_amount.quantize(Decimal('.01'), rounding=ROUND_HALF_UP)
)
self.assertEqual(refund.refund_date, FIXED_TIME)
# 1 mail confirming the exchange
# 1 mail confirming the participation to the new retirement
# 1 mail confirming the new order
self.assertEqual(len(mail.outbox), 3)
self.retirement2.price = 199
self.retirement2.save()
@responses.activate
def test_update_partial_more_expensive_retirement_single_use_token(self):
"""
Ensure we can change retirement if the new one is more expensive and
a payment_token or single_use_token is provided.
"""
self.client.force_authenticate(user=self.user)
self.retirement2.price = 999
self.retirement2.save()
responses.add(
responses.POST,
"http://example.com/customervault/v1/profiles/",
json=SAMPLE_PROFILE_RESPONSE,
status=201
)
responses.add(
responses.POST,
"http://example.com/customervault/v1/profiles/123/cards/",
json=SAMPLE_CARD_RESPONSE,
status=201
)
responses.add(
responses.POST,
"http://example.com/cardpayments/v1/accounts/0123456789/auths/",
json=SAMPLE_PAYMENT_RESPONSE,
status=200
)
responses.add(
responses.POST,
"http://example.com/cardpayments/v1/accounts/0123456789/"
"settlements/1/refunds",
json=SAMPLE_REFUND_RESPONSE,
status=200
)
FIXED_TIME = datetime(2018, 1, 1, tzinfo=LOCAL_TIMEZONE)
data = {
'retirement': reverse(
'retirement:retirement-detail',
kwargs={'pk': self.retirement2.id},
),
'single_use_token': "valid_token"
}
with mock.patch(
'django.utils.timezone.now', return_value=FIXED_TIME):
response = self.client.patch(
reverse(
'retirement:reservation-detail',
kwargs={'pk': self.reservation.id},
),
data,
format='json',
)
# Validate the new order
new_order = Order.objects.filter(transaction_date=FIXED_TIME)[0]
self.assertTrue(new_order)
self.assertEqual(new_order.transaction_date, FIXED_TIME)
self.assertEqual(new_order.user, self.user)
# Validate the new orderline
self.reservation.refresh_from_db()
new_orderline = self.reservation.order_line
self.assertFalse(self.order_line == new_orderline)
self.assertEqual(new_orderline.order, new_order)
self.assertEqual(new_orderline.object_id, self.retirement2.id)
self.assertEqual(new_orderline.content_type.model, "retirement")
self.assertEqual(new_orderline.quantity, 1)
# Validate response
self.assertEqual(
response.status_code,
status.HTTP_200_OK,
response.content
)
response_data = json.loads(response.content)
del response_data['user_details']
del response_data['retirement_details']
content = self.reservation_expected_payload.copy()
content['retirement'] = 'http://testserver' + reverse(
'retirement:retirement-detail',
kwargs={'pk': self.retirement2.id},
)
content['order_line'] = 'http://testserver/order_lines/' + \
str(new_orderline.id)
# Validate the canceled reservation
canceled_reservation = Reservation.objects.filter(is_active=False)[0]
self.assertTrue(canceled_reservation)
self.assertEqual(canceled_reservation.cancelation_action, 'E')
self.assertEqual(canceled_reservation.cancelation_reason, 'U')
self.assertEqual(canceled_reservation.cancelation_date, FIXED_TIME)
self.assertEqual(canceled_reservation.retirement, self.retirement)
self.assertEqual(canceled_reservation.order_line, self.order_line)
# Validate the full refund on old orderline
refund = Refund.objects.filter(orderline=self.order_line)[0]
self.assertTrue(refund)
refund_amount = self.retirement.price * (Decimal(TAX_RATE) + 1)
self.assertEqual(
refund.amount,
refund_amount.quantize(Decimal('.01'), rounding=ROUND_HALF_UP)
)
self.assertEqual(refund.refund_date, FIXED_TIME)
# 1 mail confirming the exchange
# 1 mail confirming the participation to the new retirement
# 1 mail confirming the new order
self.assertEqual(len(mail.outbox), 3)
self.retirement2.price = 199
self.retirement2.save()
@responses.activate
def test_update_partial_less_expensive_retirement(self):
"""
Ensure we can change retirement if the new one is less expensive. A
refund will be issued.
"""
self.client.force_authenticate(user=self.user)
self.retirement2.price = 99
self.retirement2.save()
responses.add(
responses.POST,
"http://example.com/cardpayments/v1/accounts/0123456789/"
"settlements/1/refunds",
json=SAMPLE_REFUND_RESPONSE,
status=200
)
FIXED_TIME = datetime(2018, 1, 1, tzinfo=LOCAL_TIMEZONE)
data = {
'retirement': reverse(
'retirement:retirement-detail',
kwargs={'pk': self.retirement2.id},
),
}
with mock.patch(
'django.utils.timezone.now', return_value=FIXED_TIME):
response = self.client.patch(
reverse(
'retirement:reservation-detail',
kwargs={'pk': self.reservation.id},
),
data,
format='json',
)
self.assertEqual(
response.status_code,
status.HTTP_200_OK,
response.content
)
response_data = json.loads(response.content)
del response_data['user_details']
del response_data['retirement_details']
content = self.reservation_expected_payload.copy()
content['retirement'] = 'http://testserver' + reverse(
'retirement:retirement-detail',
kwargs={'pk': self.retirement2.id},
)
self.assertEqual(response_data, content)
# Validate that a canceled reservation is created
canceled_reservation = Reservation.objects.filter(is_active=False)[0]
self.assertTrue(canceled_reservation)
self.assertEqual(canceled_reservation.cancelation_action, 'E')
self.assertEqual(canceled_reservation.cancelation_reason, 'U')
self.assertEqual(canceled_reservation.cancelation_date, FIXED_TIME)
self.assertEqual(canceled_reservation.retirement, self.retirement)
self.assertEqual(canceled_reservation.order_line, self.order_line)
# Validate that the refund object has been created
refund = Refund.objects.filter(
orderline=self.reservation.order_line
)[0]
self.assertTrue(refund)
tax_rate = Decimal(TAX_RATE + 1)
amount = (self.retirement.price - self.retirement2.price) * tax_rate
self.assertEqual(
refund.amount,
amount.quantize(Decimal('.01'), rounding=ROUND_HALF_UP)
)
self.assertEqual(refund.refund_date, FIXED_TIME)
# 1 mail confirming the exchange
# 1 mail confirming the participation to the new retirement
# 1 mail confirming the refund
self.assertEqual(len(mail.outbox), 3)
self.retirement2.price = 199
self.retirement2.save()
def test_update_partial_with_forbidden_fields(self):
"""
Ensure we can't partially update a reservation (other fields).
"""
self.client.force_authenticate(user=self.admin)
data = {
'is_active': False,
'is_present': True,
}
response = self.client.patch(
reverse(
'retirement:reservation-detail',
kwargs={'pk': self.reservation.id},
),
data,
format='json',
)
response_data = json.loads(response.content)
content = {
'non_field_errors': [
"Only is_present and retirement can be updated. To change "
"other fields, delete this reservation and create a new one."
]
}
self.assertEqual(response_data, content)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_update_partial_non_exchangeable(self):
"""
Ensure we can't change a reservation's retirement if the reservation
is marked as non-exchangeable.
"""
self.client.force_authenticate(user=self.admin)
data = {
'retirement': reverse(
'retirement:retirement-detail',
kwargs={'pk': self.retirement2.id},
),
}
response = self.client.patch(
reverse(
'retirement:reservation-detail',
kwargs={'pk': self.reservation_non_exchangeable.pk},
),
data,
format='json',
)
response_data = json.loads(response.content)
content = {
'non_field_errors': [
"This reservation is not exchangeable. Please contact us "
"to make any changes to this reservation."
]
}
self.assertEqual(response_data, content)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
| 32.237907
| 79
| 0.574884
| 3,221
| 32,657
| 5.653213
| 0.104626
| 0.058488
| 0.023066
| 0.027679
| 0.799165
| 0.784612
| 0.773628
| 0.762206
| 0.743918
| 0.718491
| 0
| 0.019537
| 0.332302
| 32,657
| 1,012
| 80
| 32.269763
| 0.815547
| 0.07196
| 0
| 0.683024
| 0
| 0
| 0.136755
| 0.03926
| 0
| 0
| 0
| 0
| 0.108753
| 1
| 0.02122
| false
| 0.001326
| 0.026525
| 0
| 0.049072
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
5412089747b57e6535ada48b744e1113c7cf5967
| 442
|
py
|
Python
|
addons/l10n_cl/models/__init__.py
|
SHIVJITH/Odoo_Machine_Test
|
310497a9872db7844b521e6dab5f7a9f61d365a4
|
[
"Apache-2.0"
] | null | null | null |
addons/l10n_cl/models/__init__.py
|
SHIVJITH/Odoo_Machine_Test
|
310497a9872db7844b521e6dab5f7a9f61d365a4
|
[
"Apache-2.0"
] | null | null | null |
addons/l10n_cl/models/__init__.py
|
SHIVJITH/Odoo_Machine_Test
|
310497a9872db7844b521e6dab5f7a9f61d365a4
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from . import account_chart_template
from . import account_journal
from . import account_move
from . import account_tax
from . import ir_sequence
from . import l10n_latam_document_type
from . import res_company
from . import res_country
from . import res_currency
from . import res_partner
from . import res_partner_bank
from . import uom_uom
| 29.466667
| 74
| 0.79638
| 67
| 442
| 5.014925
| 0.537313
| 0.357143
| 0.193452
| 0.119048
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.007979
| 0.149321
| 442
| 14
| 75
| 31.571429
| 0.885638
| 0.21267
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
580e446b45752bc21b41be1bfaf4594058652a12
| 13
|
py
|
Python
|
test_data/parse_retree/unexpected/unterminated/group/source.py
|
aas-core-works/aas-core-codegen
|
afec2cf363b6cb69816e7724a2b58626e2165869
|
[
"MIT"
] | 5
|
2021-12-29T12:55:34.000Z
|
2022-03-01T17:57:21.000Z
|
test_data/parse_retree/unexpected/unterminated/group/source.py
|
aas-core-works/aas-core-codegen
|
afec2cf363b6cb69816e7724a2b58626e2165869
|
[
"MIT"
] | 10
|
2021-12-29T02:15:55.000Z
|
2022-03-09T11:04:22.000Z
|
test_data/parse_retree/unexpected/unterminated/group/source.py
|
aas-core-works/aas-core-codegen
|
afec2cf363b6cb69816e7724a2b58626e2165869
|
[
"MIT"
] | 2
|
2021-12-29T01:42:12.000Z
|
2022-02-15T13:46:33.000Z
|
"(something"
| 6.5
| 12
| 0.692308
| 1
| 13
| 9
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.076923
| 13
| 1
| 13
| 13
| 0.75
| 0.769231
| 0
| 0
| 0
| 0
| 0.769231
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
5831745302c9ad2496063c133c1b3ca72842fd80
| 63
|
py
|
Python
|
airflow_looker/operators/__init__.py
|
gocardless/airflow-looker
|
4152752054423bd0eeaf21860b5de20bc950d19b
|
[
"MIT"
] | 4
|
2020-11-17T23:42:20.000Z
|
2022-02-08T19:31:12.000Z
|
airflow_looker/operators/__init__.py
|
gocardless/airflow-looker
|
4152752054423bd0eeaf21860b5de20bc950d19b
|
[
"MIT"
] | 2
|
2020-09-25T06:52:54.000Z
|
2021-06-25T15:40:53.000Z
|
airflow_looker/operators/__init__.py
|
gocardless/airflow-looker
|
4152752054423bd0eeaf21860b5de20bc950d19b
|
[
"MIT"
] | null | null | null |
from .looker_operator import LookerUpdateDataGroupByIDOperator
| 31.5
| 62
| 0.920635
| 5
| 63
| 11.4
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.063492
| 63
| 1
| 63
| 63
| 0.966102
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
58384f7a592cf8ca306adec8d5559b449e2da424
| 140
|
py
|
Python
|
src/jazzit/play.py
|
gtaylor/jazzit
|
52256d5d9a477dc95da5c41daaf31020a662cb90
|
[
"Apache-2.0"
] | null | null | null |
src/jazzit/play.py
|
gtaylor/jazzit
|
52256d5d9a477dc95da5c41daaf31020a662cb90
|
[
"Apache-2.0"
] | null | null | null |
src/jazzit/play.py
|
gtaylor/jazzit
|
52256d5d9a477dc95da5c41daaf31020a662cb90
|
[
"Apache-2.0"
] | null | null | null |
import sys
import time
from playsound import playsound
track_path = sys.argv[1]
while True:
playsound(track_path)
time.sleep(0.1)
| 14
| 31
| 0.742857
| 22
| 140
| 4.636364
| 0.590909
| 0.27451
| 0.352941
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.026087
| 0.178571
| 140
| 9
| 32
| 15.555556
| 0.86087
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.428571
| 0
| 0.428571
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
583a52d92fcb33ca2864afe4d2c2a7ca0dce6630
| 48
|
py
|
Python
|
runserver.py
|
mdaniline/skysimulator-web
|
18c805dc20d5c4de3cba2a814ccda26108a2ff0b
|
[
"MIT"
] | null | null | null |
runserver.py
|
mdaniline/skysimulator-web
|
18c805dc20d5c4de3cba2a814ccda26108a2ff0b
|
[
"MIT"
] | null | null | null |
runserver.py
|
mdaniline/skysimulator-web
|
18c805dc20d5c4de3cba2a814ccda26108a2ff0b
|
[
"MIT"
] | null | null | null |
from skysimulator import app
app.run(debug=True)
| 24
| 28
| 0.833333
| 8
| 48
| 5
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083333
| 48
| 2
| 29
| 24
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
544d620318af7a0186038ed8ce9fc53b161deabb
| 69
|
py
|
Python
|
handroll/commands/__init__.py
|
iter8ve/handroll
|
55a97080b82ac5dcec1e974379b22fbaacf18c99
|
[
"BSD-2-Clause"
] | 17
|
2015-02-28T07:36:13.000Z
|
2022-01-22T13:49:22.000Z
|
handroll/commands/__init__.py
|
iter8ve/handroll
|
55a97080b82ac5dcec1e974379b22fbaacf18c99
|
[
"BSD-2-Clause"
] | 61
|
2015-02-24T01:12:56.000Z
|
2017-10-21T18:53:26.000Z
|
handroll/commands/__init__.py
|
iter8ve/handroll
|
55a97080b82ac5dcec1e974379b22fbaacf18c99
|
[
"BSD-2-Clause"
] | 3
|
2019-02-04T16:33:08.000Z
|
2021-04-20T08:58:56.000Z
|
# Copyright (c) 2017, Matt Layman
from .base import Command # NOQA
| 17.25
| 33
| 0.710145
| 10
| 69
| 4.9
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.072727
| 0.202899
| 69
| 3
| 34
| 23
| 0.818182
| 0.521739
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
5452e94358ef65c6fda8ea5d554e7c662239416a
| 235
|
py
|
Python
|
thread-renderer/src/generating/exceptions.py
|
FToovvr/adnmb-quests-tools
|
eb3c594cb94ff803edde4705ab67e8de060c7efb
|
[
"MIT"
] | null | null | null |
thread-renderer/src/generating/exceptions.py
|
FToovvr/adnmb-quests-tools
|
eb3c594cb94ff803edde4705ab67e8de060c7efb
|
[
"MIT"
] | null | null | null |
thread-renderer/src/generating/exceptions.py
|
FToovvr/adnmb-quests-tools
|
eb3c594cb94ff803edde4705ab67e8de060c7efb
|
[
"MIT"
] | null | null | null |
from typing import Optional
from dataclasses import dataclass
from ..configloader import DivisionType
@dataclass
class UnexpectedDivisionTypeException(Exception):
got: Optional[DivisionType]
expected: Optional[DivisionType]
| 21.363636
| 49
| 0.821277
| 22
| 235
| 8.772727
| 0.590909
| 0.207254
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.12766
| 235
| 10
| 50
| 23.5
| 0.941463
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.428571
| 0
| 0.857143
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
547ceb9f7020c304d6e89c5d16ad1bd2d142d34c
| 77
|
py
|
Python
|
nt-worker/newGetByline/__init__.py
|
KPFBERT/Newstrust
|
db1ca6454ce9f421f9c4006f8cd00bade06b17b5
|
[
"MIT"
] | 1
|
2022-02-25T02:35:09.000Z
|
2022-02-25T02:35:09.000Z
|
nt-worker/newGetByline/__init__.py
|
KPFBERT/Newstrust
|
db1ca6454ce9f421f9c4006f8cd00bade06b17b5
|
[
"MIT"
] | null | null | null |
nt-worker/newGetByline/__init__.py
|
KPFBERT/Newstrust
|
db1ca6454ce9f421f9c4006f8cd00bade06b17b5
|
[
"MIT"
] | null | null | null |
from .byLineClass import MainBylineParser
from .byLineParserSelector import *
| 38.5
| 41
| 0.87013
| 7
| 77
| 9.571429
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.090909
| 77
| 2
| 42
| 38.5
| 0.957143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
54810b2f4abc97995e0ba11ac40fba2b40bc5055
| 188
|
py
|
Python
|
_notebooks/myfuns.py
|
guebin/IP2022
|
8eac5980a089ebdf9a8da3444d674cda34bb9677
|
[
"Apache-2.0"
] | null | null | null |
_notebooks/myfuns.py
|
guebin/IP2022
|
8eac5980a089ebdf9a8da3444d674cda34bb9677
|
[
"Apache-2.0"
] | 1
|
2022-03-02T00:49:21.000Z
|
2022-03-02T00:49:21.000Z
|
_notebooks/myfuns.py
|
guebin/IP2022
|
8eac5980a089ebdf9a8da3444d674cda34bb9677
|
[
"Apache-2.0"
] | null | null | null |
"""이것은 길이가 2인 벡터의 합 혹은 차를 구하는 모듈입니다."""
def vec2_add(a,b):
print("이것은 myfuns.py에 정의된 함수입니다")
return [a[0]+b[0], a[1]+b[1]]
def vec2_sub(a,b):
return [a[0]-b[0], a[1]-b[1]]
| 26.857143
| 40
| 0.547872
| 43
| 188
| 2.348837
| 0.534884
| 0.138614
| 0.158416
| 0.178218
| 0.277228
| 0.277228
| 0.277228
| 0.277228
| 0.277228
| 0
| 0
| 0.073826
| 0.207447
| 188
| 6
| 41
| 31.333333
| 0.604027
| 0.175532
| 0
| 0
| 0
| 0
| 0.161074
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| false
| 0
| 0
| 0.2
| 0.8
| 0.2
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
49c3ef558e3461ae4925036944b068ed6e9fe598
| 525
|
py
|
Python
|
django_app/app/tests/__init__.py
|
kiryanenko/django-tarantool
|
9448e2acceedd3b99bcb3fc3ef3c13b5db3f29e8
|
[
"MIT"
] | 15
|
2020-06-26T16:27:50.000Z
|
2021-11-04T11:45:42.000Z
|
django_app/app/tests/__init__.py
|
kiryanenko/django-tarantool
|
9448e2acceedd3b99bcb3fc3ef3c13b5db3f29e8
|
[
"MIT"
] | 1
|
2021-01-15T13:05:34.000Z
|
2021-01-16T22:37:18.000Z
|
django_app/app/tests/__init__.py
|
kiryanenko/django-tarantool
|
9448e2acceedd3b99bcb3fc3ef3c13b5db3f29e8
|
[
"MIT"
] | 1
|
2020-12-21T12:57:53.000Z
|
2020-12-21T12:57:53.000Z
|
from .boolean import *
from .null_boolean import *
from .datetime import *
from .time import *
from .date import *
from .float import *
from .decimal import *
from .integer import *
from .big_integer import *
from .positive_integer import *
from .small_integer import *
from .positive_small_integer import *
from .char import *
from .text import *
from .uuid import *
from .slug import *
from .one_to_one import *
from .foreign_key import *
from .binary import *
from .many_to_many import *
from .generic_ip_address import *
| 23.863636
| 37
| 0.76
| 76
| 525
| 5.078947
| 0.342105
| 0.518135
| 0.220207
| 0.129534
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.16
| 525
| 21
| 38
| 25
| 0.875283
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
49db7799bb419ba15acc442f801f02f8a4db3140
| 67
|
py
|
Python
|
sensors/__init__.py
|
igrowing/RaspberryPi
|
adf60ac17bbdb0e77a8619e2b6f54a2f6b1232c5
|
[
"MIT"
] | 5
|
2018-12-29T08:08:04.000Z
|
2020-10-12T10:09:01.000Z
|
sensors/__init__.py
|
igrowing/RaspberryPi
|
adf60ac17bbdb0e77a8619e2b6f54a2f6b1232c5
|
[
"MIT"
] | 1
|
2019-07-30T12:59:03.000Z
|
2019-07-30T12:59:03.000Z
|
sensors/__init__.py
|
igrowing/RaspberryPi
|
adf60ac17bbdb0e77a8619e2b6f54a2f6b1232c5
|
[
"MIT"
] | 1
|
2019-02-14T22:45:40.000Z
|
2019-02-14T22:45:40.000Z
|
import bme280
import mlx90614
import max44009
import read4channels
| 13.4
| 20
| 0.880597
| 8
| 67
| 7.375
| 0.625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.237288
| 0.119403
| 67
| 4
| 21
| 16.75
| 0.762712
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
49eaca31e2c18266b32666fc186841882635e415
| 47
|
py
|
Python
|
lib/__init__.py
|
vinod-iref/awfleet
|
5cf86eea025e69b74d5aa3793d74f73284292fc3
|
[
"MIT"
] | null | null | null |
lib/__init__.py
|
vinod-iref/awfleet
|
5cf86eea025e69b74d5aa3793d74f73284292fc3
|
[
"MIT"
] | null | null | null |
lib/__init__.py
|
vinod-iref/awfleet
|
5cf86eea025e69b74d5aa3793d74f73284292fc3
|
[
"MIT"
] | null | null | null |
from demand import Demand
from spot import Spot
| 23.5
| 25
| 0.851064
| 8
| 47
| 5
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.148936
| 47
| 2
| 26
| 23.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
49fbb9bce17b068bba775c7cf3caaaef49666f8a
| 22
|
py
|
Python
|
pb.py
|
Morsladio/prueba
|
34740039970cceb06aea5c28d3b1557507e20c75
|
[
"MIT"
] | 1
|
2021-05-09T06:39:13.000Z
|
2021-05-09T06:39:13.000Z
|
pb.py
|
Vulgrihm/prueba
|
34740039970cceb06aea5c28d3b1557507e20c75
|
[
"MIT"
] | null | null | null |
pb.py
|
Vulgrihm/prueba
|
34740039970cceb06aea5c28d3b1557507e20c75
|
[
"MIT"
] | null | null | null |
#Created by: Morsladio
| 22
| 22
| 0.818182
| 3
| 22
| 6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.090909
| 22
| 1
| 22
| 22
| 0.9
| 0.954545
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
b70365862431e4d404c605775a64dad8ee2c055d
| 125
|
py
|
Python
|
python/testData/intentions/PyInvertIfConditionIntentionTest/conditionAssignmentMultiple_after.py
|
Tasemo/intellij-community
|
50aeaf729b7073e91c7c77487a1f155e0dfe3fcd
|
[
"Apache-2.0"
] | 2
|
2019-04-28T07:48:50.000Z
|
2020-12-11T14:18:08.000Z
|
python/testData/intentions/PyInvertIfConditionIntentionTest/conditionAssignmentMultiple_after.py
|
Tasemo/intellij-community
|
50aeaf729b7073e91c7c77487a1f155e0dfe3fcd
|
[
"Apache-2.0"
] | null | null | null |
python/testData/intentions/PyInvertIfConditionIntentionTest/conditionAssignmentMultiple_after.py
|
Tasemo/intellij-community
|
50aeaf729b7073e91c7c77487a1f155e0dfe3fcd
|
[
"Apache-2.0"
] | null | null | null |
def get_value():
return 1
if (value := get_value()) and value > 1:
print("Greater")
else:
print("Less or equal")
| 17.857143
| 40
| 0.608
| 19
| 125
| 3.894737
| 0.684211
| 0.216216
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.020833
| 0.232
| 125
| 7
| 41
| 17.857143
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0.15873
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| true
| 0
| 0
| 0.166667
| 0.333333
| 0.333333
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
|
0
| 5
|
b71e29f62bc0312336a1f068b0323f77a73686a7
| 5,313
|
py
|
Python
|
test/unit/tools/test_rc.py
|
thomasrockhu/bfg9000
|
1cd1226eab9bed2fc2ec6acccf7864fdcf2ed31a
|
[
"BSD-3-Clause"
] | null | null | null |
test/unit/tools/test_rc.py
|
thomasrockhu/bfg9000
|
1cd1226eab9bed2fc2ec6acccf7864fdcf2ed31a
|
[
"BSD-3-Clause"
] | 1
|
2020-11-14T10:30:15.000Z
|
2020-11-14T10:30:15.000Z
|
test/unit/tools/test_rc.py
|
thomasrockhu/bfg9000
|
1cd1226eab9bed2fc2ec6acccf7864fdcf2ed31a
|
[
"BSD-3-Clause"
] | null | null | null |
from collections import namedtuple
from unittest import mock
from .. import *
from bfg9000.languages import known_langs
from bfg9000.tools import rc
MockPlatform = namedtuple('MockPlatform', ['family'])
def mock_execute(args, **kwargs):
if args[-1] == '--version':
return 'version\n'
elif args[-1] == '-Wl,--version':
return '', ('COLLECT_GCC=g++\n/usr/bin/collect2 --version\n' +
'/usr/bin/ld --version\n')
elif args[-1] == '-print-search-dirs':
return 'libraries: =/lib/search/dir1:/lib/search/dir2\n'
elif args[-1] == '-print-sysroot':
return '/'
elif args[-1] == '--verbose':
return 'SEARCH_DIR("/usr")\n'
class TestRcBuilder(TestCase):
def test_implicit_linux(self):
env = make_env(platform='linux', clear_variables=True)
with mock.patch('bfg9000.tools.rc.choose_builder') as m, \
mock.patch('bfg9000.shell.which', return_value=['cmd']), \
mock.patch('bfg9000.shell.execute', mock_execute): # noqa
rc.rc_builder(env)
m.assert_called_once_with(env, known_langs['rc'], rc._builders,
candidates=['windres'])
def test_implicit_windows(self):
env = make_env(platform='winnt', clear_variables=True)
with mock.patch('bfg9000.tools.rc.choose_builder') as m, \
mock.patch('bfg9000.shell.which', return_value=['cmd']), \
mock.patch('bfg9000.shell.execute', mock_execute): # noqa
rc.rc_builder(env)
m.assert_called_once_with(env, known_langs['rc'], rc._builders,
candidates=['rc', 'windres'])
def test_explicit(self):
env = make_env(platform='linux', clear_variables=True,
variables={'RC': 'gcc-windres'})
with mock.patch('bfg9000.tools.rc.choose_builder') as m:
rc.rc_builder(env)
m.assert_called_once_with(env, known_langs['rc'], rc._builders,
candidates='gcc-windres')
def test_guess_sibling(self):
env = make_env(platform='linux', clear_variables=True,
variables={'CC': 'gcc-99'})
with mock.patch('bfg9000.tools.rc.choose_builder') as m, \
mock.patch('bfg9000.shell.which', return_value=['gcc-99']), \
mock.patch('bfg9000.shell.execute', mock_execute), \
mock.patch('bfg9000.log.info'): # noqa
rc.rc_builder(env)
m.assert_called_once_with(env, known_langs['rc'], rc._builders,
candidates='windres-99', strict=True)
def test_guess_sibling_nonexist(self):
env = make_env(platform='linux', clear_variables=True,
variables={'CC': 'gcc-99'})
with mock.patch('bfg9000.tools.rc.choose_builder') as m, \
mock.patch('bfg9000.shell.which', side_effect=IOError()), \
mock.patch('bfg9000.log.info'), \
mock.patch('warnings.warn'): # noqa
rc.rc_builder(env)
m.assert_called_once_with(env, known_langs['rc'], rc._builders,
candidates='windres-99', strict=True)
def test_guess_sibling_indirect(self):
env = make_env(platform='linux', clear_variables=True,
variables={'CXX': 'g++-99'})
with mock.patch('bfg9000.tools.rc.choose_builder') as m, \
mock.patch('bfg9000.shell.which', return_value=['gcc-99']), \
mock.patch('bfg9000.shell.execute', mock_execute), \
mock.patch('bfg9000.log.info'): # noqa
rc.rc_builder(env)
m.assert_called_once_with(env, known_langs['rc'], rc._builders,
candidates='windres-99', strict=True)
def test_guess_sibling_matches_default(self):
env = make_env(platform='linux', clear_variables=True,
variables={'CC': 'gcc'})
with mock.patch('bfg9000.tools.rc.choose_builder') as m, \
mock.patch('bfg9000.shell.which', return_value=['gcc']), \
mock.patch('bfg9000.shell.execute', mock_execute), \
mock.patch('bfg9000.log.info'): # noqa
rc.rc_builder(env)
m.assert_called_once_with(env, known_langs['rc'], rc._builders,
candidates=['windres'])
def test_guess_sibling_error(self):
def mock_choose_builder(*args, strict=False, **kwargs):
if strict:
raise IOError('bad')
return mock.MagicMock()
env = make_env(platform='linux', clear_variables=True,
variables={'CC': 'gcc-99'})
with mock.patch('bfg9000.tools.rc.choose_builder',
side_effect=mock_choose_builder) as m, \
mock.patch('bfg9000.log.info'), \
mock.patch('warnings.warn'): # noqa
rc.rc_builder(env)
self.assertEqual(m.mock_calls, [
mock.call(env, known_langs['rc'], rc._builders,
candidates='windres-99', strict=True),
mock.call(env, known_langs['rc'], rc._builders,
candidates=['windres']),
])
| 45.801724
| 75
| 0.566911
| 609
| 5,313
| 4.761905
| 0.164204
| 0.08069
| 0.132414
| 0.079655
| 0.775862
| 0.736552
| 0.736552
| 0.731034
| 0.731034
| 0.721724
| 0
| 0.034995
| 0.290043
| 5,313
| 115
| 76
| 46.2
| 0.733828
| 0.006399
| 0
| 0.53
| 0
| 0
| 0.192373
| 0.08025
| 0
| 0
| 0
| 0
| 0.08
| 1
| 0.1
| false
| 0
| 0.05
| 0
| 0.22
| 0.02
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
b735b6597368edc72a093033f3df5d0a0f252ea5
| 143
|
py
|
Python
|
gambolputty/Py2Compat.py
|
lukebeer/GambolPutty
|
d642433bb2a6a54be6b4cfaa12a507994af8445a
|
[
"Apache-2.0"
] | null | null | null |
gambolputty/Py2Compat.py
|
lukebeer/GambolPutty
|
d642433bb2a6a54be6b4cfaa12a507994af8445a
|
[
"Apache-2.0"
] | null | null | null |
gambolputty/Py2Compat.py
|
lukebeer/GambolPutty
|
d642433bb2a6a54be6b4cfaa12a507994af8445a
|
[
"Apache-2.0"
] | 1
|
2019-12-03T11:36:32.000Z
|
2019-12-03T11:36:32.000Z
|
# -*- coding: utf-8 -*-
def exec_function(string_to_execute, global_env={}, local_env={}):
exec string_to_execute in global_env, local_env
| 35.75
| 66
| 0.727273
| 22
| 143
| 4.318182
| 0.590909
| 0.168421
| 0.315789
| 0.357895
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.008
| 0.125874
| 143
| 4
| 67
| 35.75
| 0.752
| 0.146853
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
b73e3ab6c414d1d273651aa64b14316db59b0bb5
| 366
|
py
|
Python
|
moto/cloudformation/__init__.py
|
alexsult/moto
|
ed861ecae1039a048a6350a4ff832ef094cdf2c2
|
[
"Apache-2.0"
] | 2
|
2019-07-10T14:44:12.000Z
|
2020-06-08T17:26:29.000Z
|
moto/cloudformation/__init__.py
|
alexsult/moto
|
ed861ecae1039a048a6350a4ff832ef094cdf2c2
|
[
"Apache-2.0"
] | 5
|
2018-04-25T21:04:20.000Z
|
2018-11-02T19:59:27.000Z
|
moto/cloudformation/__init__.py
|
alexsult/moto
|
ed861ecae1039a048a6350a4ff832ef094cdf2c2
|
[
"Apache-2.0"
] | 12
|
2017-09-06T22:11:15.000Z
|
2021-05-28T17:22:31.000Z
|
from __future__ import unicode_literals
from .models import cloudformation_backends
from ..core.models import base_decorator, deprecated_base_decorator
cloudformation_backend = cloudformation_backends['us-east-1']
mock_cloudformation = base_decorator(cloudformation_backends)
mock_cloudformation_deprecated = deprecated_base_decorator(
cloudformation_backends)
| 40.666667
| 67
| 0.874317
| 40
| 366
| 7.525
| 0.425
| 0.292359
| 0.269103
| 0.245847
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.002959
| 0.076503
| 366
| 8
| 68
| 45.75
| 0.887574
| 0
| 0
| 0
| 0
| 0
| 0.02459
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.428571
| 0
| 0.428571
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
b73f322cbc9837cf747280334496f008628b5025
| 30
|
py
|
Python
|
mypack/subpackage/functions.py
|
caam37830/python-packages
|
471965214d8a518cb12ffd22de229287a0204a4c
|
[
"Unlicense"
] | null | null | null |
mypack/subpackage/functions.py
|
caam37830/python-packages
|
471965214d8a518cb12ffd22de229287a0204a4c
|
[
"Unlicense"
] | null | null | null |
mypack/subpackage/functions.py
|
caam37830/python-packages
|
471965214d8a518cb12ffd22de229287a0204a4c
|
[
"Unlicense"
] | null | null | null |
def plus2(x):
return x + 2
| 15
| 16
| 0.566667
| 6
| 30
| 2.833333
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.095238
| 0.3
| 30
| 2
| 16
| 15
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 5
|
3fd03b7c36443ee75be191036a081c4e47237b67
| 77
|
py
|
Python
|
spec/fonts/OpenBaskerville-0.0.53/tools/ufo2otf/__init__.py
|
Julusian/node-freetype2
|
be52fd5c8530bbb5d2486bccac5d698c578ef722
|
[
"MIT"
] | 29
|
2015-03-16T14:59:48.000Z
|
2022-02-06T20:28:16.000Z
|
spec/fonts/OpenBaskerville-0.0.53/tools/ufo2otf/__init__.py
|
Julusian/node-freetype2
|
be52fd5c8530bbb5d2486bccac5d698c578ef722
|
[
"MIT"
] | 34
|
2015-03-16T01:37:22.000Z
|
2022-02-19T10:51:03.000Z
|
spec/fonts/OpenBaskerville-0.0.53/tools/ufo2otf/__init__.py
|
isabella232/node-freetype2
|
c22d51f7c9d351e5968c87e4fe57c51d3b0eebaf
|
[
"MIT"
] | 16
|
2015-02-11T14:11:37.000Z
|
2021-11-19T12:04:38.000Z
|
from compilers import Compiler
from diagnostics import diagnostics, FontError
| 38.5
| 46
| 0.883117
| 9
| 77
| 7.555556
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.103896
| 77
| 2
| 46
| 38.5
| 0.985507
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
3fd4c8faf2858518dfa328f7bc75e1cf32f0d47e
| 227
|
py
|
Python
|
python/sierrapy/commands/__init__.py
|
hivdb/sierra-client
|
627f42eff737d5f378b24e4ad9e7f0b8bf3fa110
|
[
"MIT"
] | 6
|
2016-09-26T12:41:13.000Z
|
2022-01-22T03:28:58.000Z
|
python/sierrapy/commands/__init__.py
|
hivdb/sierra-client
|
627f42eff737d5f378b24e4ad9e7f0b8bf3fa110
|
[
"MIT"
] | 10
|
2016-10-26T16:42:50.000Z
|
2021-10-31T21:40:12.000Z
|
python/sierrapy/commands/__init__.py
|
hivdb/sierra-client
|
627f42eff737d5f378b24e4ad9e7f0b8bf3fa110
|
[
"MIT"
] | 7
|
2017-11-16T14:51:58.000Z
|
2021-05-01T19:21:11.000Z
|
from .cli import cli
from . import fasta # noqa
from . import mutations # noqa
from . import patterns # noqa
from . import seqreads # noqa
from . import introspection # noqa
from . import recipe # noqa
__all__ = ['cli']
| 22.7
| 35
| 0.700441
| 30
| 227
| 5.166667
| 0.366667
| 0.387097
| 0.451613
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.220264
| 227
| 9
| 36
| 25.222222
| 0.875706
| 0.127753
| 0
| 0
| 0
| 0
| 0.015707
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.875
| 0
| 0.875
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
3fe62118ed485a1b100b3eaed158027798585e58
| 68
|
py
|
Python
|
cloudwatchiter/__init__.py
|
jniedrauer/cloudwatchiter
|
223e58b5a00f424e0ae4000b09f7741a65cbb314
|
[
"MIT"
] | null | null | null |
cloudwatchiter/__init__.py
|
jniedrauer/cloudwatchiter
|
223e58b5a00f424e0ae4000b09f7741a65cbb314
|
[
"MIT"
] | null | null | null |
cloudwatchiter/__init__.py
|
jniedrauer/cloudwatchiter
|
223e58b5a00f424e0ae4000b09f7741a65cbb314
|
[
"MIT"
] | null | null | null |
"""Module namespace"""
from .cloudwatchiter import cloudwatchiter
| 13.6
| 42
| 0.779412
| 6
| 68
| 8.833333
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.117647
| 68
| 4
| 43
| 17
| 0.883333
| 0.235294
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
3ff36771949d4282522c2a60b3e7a251b5f7aa5d
| 130
|
py
|
Python
|
BaseKnowledge/base/complex.py
|
Kose-i/python_test
|
d7b031aa33d699aeb9fe196fe0a6d216aa006f0d
|
[
"Unlicense"
] | null | null | null |
BaseKnowledge/base/complex.py
|
Kose-i/python_test
|
d7b031aa33d699aeb9fe196fe0a6d216aa006f0d
|
[
"Unlicense"
] | null | null | null |
BaseKnowledge/base/complex.py
|
Kose-i/python_test
|
d7b031aa33d699aeb9fe196fe0a6d216aa006f0d
|
[
"Unlicense"
] | null | null | null |
#! /usr/bin/env python3
x = 3 + 2j
y = 2 + 5j
print(type(x))
print(x + y)
print(x - y)
print(x * y)
print(x ** y)
print(x / y)
| 10
| 23
| 0.530769
| 28
| 130
| 2.464286
| 0.428571
| 0.434783
| 0.507246
| 0.695652
| 0.507246
| 0.507246
| 0.507246
| 0.507246
| 0.507246
| 0.507246
| 0
| 0.05102
| 0.246154
| 130
| 12
| 24
| 10.833333
| 0.653061
| 0.169231
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.75
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
3fffde957ea8f5320f2f05a00efd64de07da93c7
| 211
|
py
|
Python
|
src/main.py
|
daniel3108-pl/FastApi_NoteStoringAPI
|
b8f58af28f37c0867f11616aa7adfbd60ff494c5
|
[
"MIT"
] | null | null | null |
src/main.py
|
daniel3108-pl/FastApi_NoteStoringAPI
|
b8f58af28f37c0867f11616aa7adfbd60ff494c5
|
[
"MIT"
] | null | null | null |
src/main.py
|
daniel3108-pl/FastApi_NoteStoringAPI
|
b8f58af28f37c0867f11616aa7adfbd60ff494c5
|
[
"MIT"
] | null | null | null |
from DA.database import initDataBase
from fastapi import FastAPI
from routers import notes, users
app = FastAPI()
app.include_router(router=notes.router)
app.include_router(router=users.router)
initDataBase()
| 21.1
| 39
| 0.819905
| 29
| 211
| 5.896552
| 0.413793
| 0.116959
| 0.187135
| 0.25731
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.099526
| 211
| 9
| 40
| 23.444444
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.428571
| 0
| 0.428571
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
b74a21a3a4baee8e1ace1d4298da7847fa61cc93
| 68
|
py
|
Python
|
src/obfuscapk/obfuscators/random_manifest/__init__.py
|
Elyorbe/Obfuscapk
|
2ba18df4b5efe44b4fab271ab5ccfe51488fa693
|
[
"MIT"
] | 688
|
2019-08-23T16:43:10.000Z
|
2022-03-29T19:35:07.000Z
|
src/obfuscapk/obfuscators/random_manifest/__init__.py
|
Ktm2590/Obfuscapk
|
4072b6dea88e91b75064a84c7d3adebe005e8207
|
[
"MIT"
] | 124
|
2019-09-07T13:10:59.000Z
|
2022-03-17T16:47:29.000Z
|
src/obfuscapk/obfuscators/random_manifest/__init__.py
|
Ktm2590/Obfuscapk
|
4072b6dea88e91b75064a84c7d3adebe005e8207
|
[
"MIT"
] | 204
|
2019-08-23T14:43:52.000Z
|
2022-03-30T21:04:50.000Z
|
#!/usr/bin/env python3
from .random_manifest import RandomManifest
| 17
| 43
| 0.808824
| 9
| 68
| 6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.016393
| 0.102941
| 68
| 3
| 44
| 22.666667
| 0.868852
| 0.308824
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
b756d885c5a14014ced5aa2387286b47f84fe426
| 119
|
py
|
Python
|
src/invoice/admin.py
|
vandana0608/Pharmacy-Managament
|
f99bdec11c24027a432858daa19247a21cecc092
|
[
"bzip2-1.0.6"
] | null | null | null |
src/invoice/admin.py
|
vandana0608/Pharmacy-Managament
|
f99bdec11c24027a432858daa19247a21cecc092
|
[
"bzip2-1.0.6"
] | null | null | null |
src/invoice/admin.py
|
vandana0608/Pharmacy-Managament
|
f99bdec11c24027a432858daa19247a21cecc092
|
[
"bzip2-1.0.6"
] | null | null | null |
from django.contrib import admin
from .models import invoice
admin.site.register(invoice)
# Register your models here.
| 23.8
| 32
| 0.815126
| 17
| 119
| 5.705882
| 0.647059
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.117647
| 119
| 4
| 33
| 29.75
| 0.92381
| 0.218487
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
b761c8f6a1bb69891c054946f1ee26236a447e8d
| 20
|
py
|
Python
|
pconway/render/__init__.py
|
calfcalfcalfd/pconway
|
785ea6a324e7ee96c0ff07247878a680c0555e7c
|
[
"MIT"
] | 2
|
2020-11-16T06:15:09.000Z
|
2021-09-07T09:32:55.000Z
|
pconway/render/__init__.py
|
calfcalfcalfd/pconway
|
785ea6a324e7ee96c0ff07247878a680c0555e7c
|
[
"MIT"
] | 8
|
2021-02-22T03:25:34.000Z
|
2021-05-05T19:40:37.000Z
|
pconway/render/__init__.py
|
calfcalfcalfd/pconway
|
785ea6a324e7ee96c0ff07247878a680c0555e7c
|
[
"MIT"
] | 1
|
2021-02-21T21:26:04.000Z
|
2021-02-21T21:26:04.000Z
|
from .play import *
| 10
| 19
| 0.7
| 3
| 20
| 4.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2
| 20
| 1
| 20
| 20
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
b779ef5a92986c4cbabc930d2e419dc06cb58e7f
| 38,739
|
py
|
Python
|
deckhand/tests/unit/engine/test_document_layering.py
|
att-comdev/test-submit
|
5961c0a91d348d531c1cbfc729e6d9b40cb28df7
|
[
"Apache-2.0"
] | null | null | null |
deckhand/tests/unit/engine/test_document_layering.py
|
att-comdev/test-submit
|
5961c0a91d348d531c1cbfc729e6d9b40cb28df7
|
[
"Apache-2.0"
] | null | null | null |
deckhand/tests/unit/engine/test_document_layering.py
|
att-comdev/test-submit
|
5961c0a91d348d531c1cbfc729e6d9b40cb28df7
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2017 AT&T Intellectual Property. All other rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import yaml
from deckhand.engine import layering
from deckhand import errors
from deckhand import factories
from deckhand.tests.unit import base as test_base
from deckhand import types
class TestDocumentLayering(test_base.DeckhandTestCase):
def _test_layering(self, documents, site_expected=None,
region_expected=None, global_expected=None,
substitution_sources=None, validate=False, **kwargs):
document_layering = layering.DocumentLayering(
documents, substitution_sources, validate=validate, **kwargs)
site_docs = []
region_docs = []
global_docs = []
# The layering policy is not returned as it is immutable. So all docs
# should have a metadata.layeringDefinitionn.layer section.
rendered_documents = document_layering.render()
for doc in rendered_documents:
# No need to validate the LayeringPolicy: it remains unchanged.
if doc['schema'].startswith(types.LAYERING_POLICY_SCHEMA):
continue
layer = doc['metadata']['layeringDefinition']['layer']
if layer == 'site':
site_docs.append(doc.get('data'))
if layer == 'region':
region_docs.append(doc.get('data'))
if layer == 'global':
global_docs.append(doc.get('data'))
if site_expected is not None:
if not isinstance(site_expected, list):
site_expected = [site_expected]
for expected in site_expected:
self.assertIn(expected, site_docs)
idx = site_docs.index(expected)
self.assertEqual(expected, site_docs[idx],
'Actual site data does not match expected.')
site_docs.remove(expected)
else:
self.assertEmpty(site_docs)
if region_expected is not None:
if not isinstance(region_expected, list):
region_expected = [region_expected]
for expected in region_expected:
self.assertIn(expected, region_docs)
idx = region_docs.index(expected)
self.assertEqual(expected, region_docs[idx],
'Actual region data does not match expected.')
region_docs.remove(expected)
else:
self.assertEmpty(region_docs)
if global_expected is not None:
if not isinstance(global_expected, list):
global_expected = [global_expected]
for expected in global_expected:
self.assertIn(expected, global_docs)
idx = global_docs.index(expected)
self.assertEqual(expected, global_docs[idx],
'Actual global data does not match expected.')
global_docs.remove(expected)
else:
self.assertEmpty(global_docs)
class TestDocumentLayering2Layers(TestDocumentLayering):
def test_layering_default_scenario(self):
# Default scenario mentioned in design document for 2 layers (region
# data is removed).
mapping = {
"_GLOBAL_DATA_1_": {"data": {"a": {"x": 1, "y": 2}}},
"_SITE_DATA_1_": {"data": {"b": 4}},
"_SITE_ACTIONS_1_": {
"actions": [{"method": "merge", "path": "."}]}
}
doc_factory = factories.DocumentFactory(2, [1, 1])
documents = doc_factory.gen_test(mapping, site_abstract=False)
site_expected = {'a': {'x': 1, 'y': 2}, 'b': 4}
self._test_layering(documents, site_expected)
def test_layering_default_scenario_multi_parentselector(self):
mapping = {
"_GLOBAL_DATA_1_": {"data": {"a": {"x": 1, "y": 2}}},
"_SITE_DATA_1_": {"data": {"b": 4}},
"_SITE_ACTIONS_1_": {
"actions": [{"method": "merge", "path": "."}]}
}
doc_factory = factories.DocumentFactory(2, [1, 1])
documents = doc_factory.gen_test(mapping, site_abstract=False)
# Test case where the same number of labels are found in parent
# labels and child's parentSelector.
labels = {'foo': 'bar', 'baz': 'qux'}
documents[1]['metadata']['labels'] = labels
documents[-1]['metadata']['layeringDefinition']['parentSelector'] = (
labels)
site_expected = {'a': {'x': 1, 'y': 2}, 'b': 4}
self._test_layering(documents, site_expected)
# Test case where child's parentSelector is a subset of parent's
# labels.
documents[-1]['metadata']['layeringDefinition']['parentSelector'] = {
'foo': 'bar'}
site_expected = {'a': {'x': 1, 'y': 2}, 'b': 4}
self._test_layering(documents, site_expected)
documents[-1]['metadata']['layeringDefinition']['parentSelector'] = {
'baz': 'qux'}
site_expected = {'a': {'x': 1, 'y': 2}, 'b': 4}
self._test_layering(documents, site_expected)
def test_layering_default_scenario_multi_parentselector_no_match(self):
mapping = {
"_GLOBAL_DATA_1_": {"data": {"a": {"x": 1, "y": 2}}},
"_SITE_DATA_1_": {},
"_SITE_ACTIONS_1_": {
"actions": [{"method": "merge", "path": "."}]}
}
doc_factory = factories.DocumentFactory(2, [1, 1])
documents = doc_factory.gen_test(mapping, site_abstract=False)
labels = {'a': 'b', 'c': 'd'}
documents[1]['metadata']['labels'] = labels
# Test case where none of the labels in parentSelector match.
documents[-1]['metadata']['layeringDefinition']['parentSelector'] = {
'w': 'x', 'y': 'z'
}
self._test_layering(documents, site_expected={})
# Test case where parentSelector has one too many labels to be a match.
documents[-1]['metadata']['layeringDefinition']['parentSelector'] = {
'a': 'b', 'c': 'd', 'e': 'f'
}
self._test_layering(documents, site_expected={})
# Test case where parentSelector keys match (but not values).
documents[-1]['metadata']['layeringDefinition']['parentSelector'] = {
'a': 'x', 'c': 'y'
}
self._test_layering(documents, site_expected={})
# Test case where parentSelector values match (but not keys).
documents[-1]['metadata']['layeringDefinition']['parentSelector'] = {
'x': 'b', 'y': 'd'
}
self._test_layering(documents, site_expected={})
def test_layering_method_delete(self):
site_expected = [{}, {'c': 9}, {"a": {"x": 1, "y": 2}}]
doc_factory = factories.DocumentFactory(2, [1, 1])
for idx, path in enumerate(['.', '.a', '.c']):
mapping = {
"_GLOBAL_DATA_1_": {"data": {"a": {"x": 1, "y": 2}, "c": 9}},
"_SITE_DATA_1_": {"data": {"a": {"x": 7, "z": 3}, "b": 4}},
"_SITE_ACTIONS_1_": {
"actions": [{"method": "delete", "path": path}]}
}
documents = doc_factory.gen_test(mapping, site_abstract=False)
self._test_layering(documents, site_expected[idx])
def test_layering_method_merge(self):
site_expected = [
{'a': {'x': 7, 'y': 2, 'z': 3}, 'b': 4, 'c': 9},
{'a': {'x': 7, 'y': 2, 'z': 3}, 'c': 9},
{'a': {'x': 1, 'y': 2}, 'b': 4, 'c': 9}
]
doc_factory = factories.DocumentFactory(2, [1, 1])
for idx, path in enumerate(['.', '.a', '.b']):
mapping = {
"_GLOBAL_DATA_1_": {"data": {"a": {"x": 1, "y": 2}, "c": 9}},
"_SITE_DATA_1_": {"data": {"a": {"x": 7, "z": 3}, "b": 4}},
"_SITE_ACTIONS_1_": {
"actions": [{"method": "merge", "path": path}]}
}
documents = doc_factory.gen_test(mapping, site_abstract=False)
self._test_layering(documents, site_expected[idx])
def test_layering_method_replace(self):
site_expected = [
{'a': {'x': 7, 'z': 3}, 'b': 4},
{'a': {'x': 7, 'z': 3}, 'c': 9},
{'a': {'x': 1, 'y': 2}, 'b': 4, 'c': 9}
]
doc_factory = factories.DocumentFactory(2, [1, 1])
for idx, path in enumerate(['.', '.a', '.b']):
mapping = {
"_GLOBAL_DATA_1_": {"data": {"a": {"x": 1, "y": 2}, "c": 9}},
"_SITE_DATA_1_": {"data": {"a": {"x": 7, "z": 3}, "b": 4}},
"_SITE_ACTIONS_1_": {
"actions": [{"method": "replace", "path": path}]}
}
documents = doc_factory.gen_test(mapping, site_abstract=False)
self._test_layering(documents, site_expected[idx])
class TestDocumentLayering2LayersAbstractConcrete(TestDocumentLayering):
"""The the 2-layer payload with site/global layers concrete.
Both the site and global data should be updated as they're both
concrete docs. (2-layer has no region layer.)
"""
def test_layering_site_and_global_concrete(self):
mapping = {
"_GLOBAL_DATA_1_": {"data": {"a": {"x": 1, "y": 2}, "c": 9}},
"_SITE_DATA_1_": {"data": {"a": {"x": 7, "z": 3}, "b": 4}},
"_SITE_ACTIONS_1_": {
"actions": [{"method": "delete", "path": '.a'}]}
}
doc_factory = factories.DocumentFactory(2, [1, 1])
documents = doc_factory.gen_test(mapping, site_abstract=False,
global_abstract=False)
site_expected = {'c': 9}
global_expected = {'a': {'x': 1, 'y': 2}, 'c': 9}
self._test_layering(documents, site_expected,
global_expected=global_expected)
def test_layering_site_and_global_abstract(self):
mapping = {
"_GLOBAL_DATA_1_": {"data": {"a": {"x": 1, "y": 2}, "c": 9}},
"_SITE_DATA_1_": {"data": {"a": {"x": 7, "z": 3}, "b": 4}},
"_SITE_ACTIONS_1_": {
"actions": [{"method": "delete", "path": '.a'}]}
}
doc_factory = factories.DocumentFactory(2, [1, 1])
documents = doc_factory.gen_test(mapping, site_abstract=True,
global_abstract=True)
site_expected = None
global_expected = None
self._test_layering(documents, site_expected,
global_expected=global_expected)
class TestDocumentLayering2Layers2Sites(TestDocumentLayering):
def test_layering_default_scenario(self):
mapping = {
"_GLOBAL_DATA_1_": {"data": {"a": {"x": 1, "y": 2}}},
"_SITE_DATA_1_": {"data": {"b": 4}},
"_SITE_ACTIONS_1_": {
"actions": [{"method": "merge", "path": "."}]},
"_SITE_DATA_2_": {"data": {"b": 3}},
"_SITE_ACTIONS_2_": {
"actions": [{"method": "merge", "path": "."},
{"method": "delete", "path": ".a"}]}
}
doc_factory = factories.DocumentFactory(2, [1, 2])
documents = doc_factory.gen_test(mapping, site_abstract=False)
site_expected = [{'a': {'x': 1, 'y': 2}, 'b': 4},
{'b': 3}]
self._test_layering(documents, site_expected)
def test_layering_alternate_scenario(self):
mapping = {
"_GLOBAL_DATA_1_": {"data": {"a": {"x": 1, "y": 2}}},
"_SITE_DATA_1_": {"data": {"b": 4}},
"_SITE_ACTIONS_1_": {
"actions": [{"method": "merge", "path": "."}]},
"_SITE_DATA_2_": {"data": {"b": 3}},
"_SITE_ACTIONS_2_": {
"actions": [{"method": "merge", "path": "."},
{"method": "delete", "path": ".a"},
{"method": "merge", "path": ".b"}]}
}
doc_factory = factories.DocumentFactory(2, [1, 2])
documents = doc_factory.gen_test(mapping, site_abstract=False)
site_expected = [{'a': {'x': 1, 'y': 2}, 'b': 4}, {'b': 3}]
self._test_layering(documents, site_expected)
class TestDocumentLayering2Layers2Sites2Globals(TestDocumentLayering):
def test_layering_two_parents_only_one_with_child(self):
mapping = {
"_GLOBAL_DATA_1_": {"data": {"a": {"x": 1, "y": 2}}},
"_GLOBAL_DATA_2_": {"data": {"a": {"x": 1, "y": 2}}},
"_SITE_DATA_1_": {"data": {"b": 4}},
"_SITE_ACTIONS_1_": {
"actions": [{"method": "merge", "path": "."}]},
"_SITE_DATA_2_": {"data": {"b": 3}},
"_SITE_ACTIONS_2_": {
"actions": [{"method": "merge", "path": "."}]}
}
doc_factory = factories.DocumentFactory(2, [2, 2])
documents = doc_factory.gen_test(
mapping, site_abstract=False, site_parent_selectors=[
{'global': 'global1'}, {'global': 'global2'}])
site_expected = [{'a': {'x': 1, 'y': 2}, 'b': 3},
{'a': {'x': 1, 'y': 2}, 'b': 4}]
self._test_layering(documents, site_expected)
def test_layering_two_parents_one_child_each_1(self):
mapping = {
"_GLOBAL_DATA_1_": {"data": {"a": {"x": 1, "y": 2}}},
"_GLOBAL_DATA_2_": {"data": {"a": {"x": 1, "y": 2}}},
"_SITE_DATA_1_": {"data": {"b": 4}},
"_SITE_ACTIONS_1_": {
"actions": [{"method": "merge", "path": "."}]},
"_SITE_DATA_2_": {"data": {"b": 3}},
"_SITE_ACTIONS_2_": {
"actions": [{"method": "merge", "path": "."}]}
}
doc_factory = factories.DocumentFactory(2, [2, 2])
documents = doc_factory.gen_test(
mapping, site_abstract=False, site_parent_selectors=[
{'global': 'global1'}, {'global': 'global2'}])
site_expected = [{'a': {'x': 1, 'y': 2}, 'b': 3},
{'a': {'x': 1, 'y': 2}, 'b': 4}]
self._test_layering(documents, site_expected)
def test_layering_two_parents_one_child_each_2(self):
"""Scenario:
Initially: p1: {"a": {"x": 1, "y": 2}}, p2: {"b": {"f": -9, "g": 71}}
Where: c1 references p1 and c2 references p2
Merge "." (p1 -> c1): {"a": {"x": 1, "y": 2, "b": 4}}
Merge "." (p2 -> c2): {"b": {"f": -9, "g": 71}, "c": 3}
Delete ".c" (p2 -> c2): {"b": {"f": -9, "g": 71}}
"""
mapping = {
"_GLOBAL_DATA_1_": {"data": {"a": {"x": 1, "y": 2}}},
"_GLOBAL_DATA_2_": {"data": {"b": {"f": -9, "g": 71}}},
"_SITE_DATA_1_": {"data": {"b": 4}},
"_SITE_ACTIONS_1_": {
"actions": [{"method": "merge", "path": "."}]},
"_SITE_DATA_2_": {"data": {"c": 3}},
"_SITE_ACTIONS_2_": {
"actions": [{"method": "merge", "path": "."},
{"method": "delete", "path": ".c"}]}
}
doc_factory = factories.DocumentFactory(2, [2, 2])
documents = doc_factory.gen_test(
mapping, site_abstract=False, site_parent_selectors=[
{'global': 'global1'}, {'global': 'global2'}])
site_expected = [{"b": {"f": -9, "g": 71}},
{'a': {'x': 1, 'y': 2}, 'b': 4}]
self._test_layering(documents, site_expected)
class TestDocumentLayering3Layers(TestDocumentLayering):
def test_layering_default_scenario(self):
# Default scenario mentioned in design document for 3 layers.
mapping = {
"_GLOBAL_DATA_1_": {"data": {"a": {"x": 1, "y": 2}}},
"_REGION_DATA_1_": {"data": {"a": {"z": 3}}},
"_SITE_DATA_1_": {"data": {"b": 4}},
"_REGION_ACTIONS_1_": {
"actions": [{"method": "replace", "path": ".a"}]},
"_SITE_ACTIONS_1_": {
"actions": [{"method": "merge", "path": "."}]}
}
doc_factory = factories.DocumentFactory(3, [1, 1, 1])
documents = doc_factory.gen_test(mapping, site_abstract=False)
site_expected = {'a': {'z': 3}, 'b': 4}
region_expected = None # Region is abstract.
self._test_layering(documents, site_expected, region_expected)
def test_layering_delete_everything(self):
mapping = {
"_GLOBAL_DATA_1_": {"data": {"a": {"x": 3, "y": 4}, "b": 99}},
"_REGION_DATA_1_": {"data": {"a": {"x": 1, "y": 2}}},
"_SITE_DATA_1_": {"data": {"b": 4}},
"_REGION_ACTIONS_1_": {
"actions": [{"path": ".a", "method": "delete"}]},
"_SITE_ACTIONS_1_": {"actions": [
{"method": "delete", "path": ".b"}]}
}
doc_factory = factories.DocumentFactory(3, [1, 1, 1])
documents = doc_factory.gen_test(mapping, site_abstract=False)
site_expected = {}
self._test_layering(documents, site_expected)
def test_layering_delete_everything_missing_path(self):
"""Scenario:
Initially: {"a": {"x": 3, "y": 4}, "b": 99}
Delete ".": {}
Delete ".b": MissingDocumentKey
"""
mapping = {
"_GLOBAL_DATA_1_": {"data": {"a": {"x": 3, "y": 4}, "b": 99}},
"_REGION_DATA_1_": {"data": {"a": {"x": 1, "y": 2}}},
"_SITE_DATA_1_": {"data": {"b": 4}},
"_REGION_ACTIONS_1_": {
"actions": [{"path": ".", "method": "delete"}]},
"_SITE_ACTIONS_1_": {"actions": [
{"method": "delete", "path": ".b"}]}
}
doc_factory = factories.DocumentFactory(3, [1, 1, 1])
documents = doc_factory.gen_test(mapping, site_abstract=False)
self.assertRaises(errors.MissingDocumentKey, self._test_layering,
documents)
def test_layering_delete_path_a(self):
mapping = {
"_GLOBAL_DATA_1_": {
"data": {'a': {'x': 1, 'y': 2}, 'b': {'v': 3, 'w': 4}}},
"_REGION_DATA_1_": {"data": {"a": {"z": 3}}},
"_SITE_DATA_1_": {"data": {"b": 4}},
"_REGION_ACTIONS_1_": {
"actions": [{'path': '.a', 'method': 'delete'}]},
"_SITE_ACTIONS_1_": {
"actions": [{"method": "merge", "path": "."}]}
}
doc_factory = factories.DocumentFactory(3, [1, 1, 1])
documents = doc_factory.gen_test(mapping, site_abstract=False)
site_expected = {'b': 4}
self._test_layering(documents, site_expected)
def test_layering_merge_and_replace(self):
mapping = {
"_GLOBAL_DATA_1_": {
"data": {'a': {'x': 1, 'y': 2}, 'b': {'v': 3, 'w': 4}}},
"_REGION_DATA_1_": {"data": {"a": {"z": 3}}},
"_SITE_DATA_1_": {"data": {'a': {'z': 5}}},
"_REGION_ACTIONS_1_": {
"actions": [{'path': '.', 'method': 'replace'}]},
"_SITE_ACTIONS_1_": {
"actions": [{"method": "merge", "path": "."}]}
}
doc_factory = factories.DocumentFactory(3, [1, 1, 1])
documents = doc_factory.gen_test(mapping, site_abstract=False)
site_expected = {'a': {'z': 5}}
self._test_layering(documents, site_expected)
def test_layering_double_merge(self):
mapping = {
"_GLOBAL_DATA_1_": {"data": {"c": {"e": 55}}},
"_REGION_DATA_1_": {
"data": {'a': {'x': 1, 'y': 2}, 'b': {'v': 3, 'w': 4}}},
"_SITE_DATA_1_": {"data": {"a": {"z": 5}}},
"_REGION_ACTIONS_1_": {
"actions": [{"method": "merge", "path": "."}]},
"_SITE_ACTIONS_1_": {"actions": [{"method": "merge", "path": "."}]}
}
doc_factory = factories.DocumentFactory(3, [1, 1, 1])
documents = doc_factory.gen_test(mapping, site_abstract=False)
site_expected = {'a': {'x': 1, 'y': 2, 'z': 5},
'b': {'v': 3, 'w': 4}, 'c': {'e': 55}}
self._test_layering(documents, site_expected)
def test_layering_double_merge_2(self):
mapping = {
"_GLOBAL_DATA_1_": {
"data": {'a': {'x': 1, 'y': 2}, 'b': {'v': 3, 'w': 4}}},
"_REGION_DATA_1_": {"data": {'a': {'e': 55}}},
"_SITE_DATA_1_": {"data": {"b": 4}},
"_REGION_ACTIONS_1_": {
"actions": [{'path': '.a', 'method': 'merge'}]},
"_SITE_ACTIONS_1_": {
"actions": [{"method": "merge", "path": "."}]}
}
doc_factory = factories.DocumentFactory(3, [1, 1, 1])
documents = doc_factory.gen_test(mapping, site_abstract=False)
site_expected = {'a': {'x': 1, 'y': 2, 'e': 55}, 'b': 4}
self._test_layering(documents, site_expected)
class TestDocumentLayering3LayersAbstractConcrete(TestDocumentLayering):
"""The the 3-layer payload with site/region layers concrete.
Both the site and region data should be updated as they're both concrete
docs.
"""
def test_layering_site_and_region_concrete(self):
"""Scenario:
Initially: {"a": {"x": 1, "y": 2}}
Merge ".": {"a": {"x": 1, "y": 2, "z": 3}, "b": 5, "c": 11}
(Region updated.)
Delete ".c": {"a": {"x": 1, "y": 2, "z": 3}, "b": 5} (Region updated.)
Replace ".b": {"a": {"x": 1, "y": 2, "z": 3}, "b": 4} (Site updated.)
"""
mapping = {
"_GLOBAL_DATA_1_": {"data": {"a": {"x": 1, "y": 2}}},
"_REGION_DATA_1_": {"data": {"a": {"z": 3}, "b": 5, "c": 11}},
"_SITE_DATA_1_": {"data": {"b": 4}},
"_REGION_ACTIONS_1_": {
"actions": [{"method": "merge", "path": "."},
{"method": "delete", "path": ".c"}]},
"_SITE_ACTIONS_1_": {
"actions": [{"method": "replace", "path": ".b"}]}
}
doc_factory = factories.DocumentFactory(3, [1, 1, 1])
documents = doc_factory.gen_test(mapping, site_abstract=False,
region_abstract=False)
site_expected = {"a": {"x": 1, "y": 2, "z": 3}, "b": 4}
region_expected = {"a": {"x": 1, "y": 2, "z": 3}, "b": 5}
self._test_layering(documents, site_expected, region_expected)
def test_layering_site_concrete_and_region_abstract(self):
"""Scenario:
Initially: {"a": {"x": 1, "y": 2}}
Merge ".": {"a": {"x": 1, "y": 2, "z": 3}, "b": 5, "c": 11}
Delete ".c": {"a": {"x": 1, "y": 2, "z": 3}, "b": 5}
Replace ".b": {"a": {"x": 1, "y": 2, "z": 3}, "b": 4} (Site updated.)
"""
mapping = {
"_GLOBAL_DATA_1_": {"data": {"a": {"x": 1, "y": 2}}},
"_REGION_DATA_1_": {"data": {"a": {"z": 3}, "b": 5, "c": 11}},
"_SITE_DATA_1_": {"data": {"b": 4}},
"_REGION_ACTIONS_1_": {
"actions": [{"method": "merge", "path": "."},
{"method": "delete", "path": ".c"}]},
"_SITE_ACTIONS_1_": {
"actions": [{"method": "replace", "path": ".b"}]}
}
doc_factory = factories.DocumentFactory(3, [1, 1, 1])
documents = doc_factory.gen_test(
mapping, site_abstract=False, region_abstract=True)
site_expected = {"a": {"x": 1, "y": 2, "z": 3}, "b": 4}
region_expected = None
self._test_layering(documents, site_expected, region_expected)
def test_layering_site_region_and_global_concrete(self):
# Both the site and region data should be updated as they're both
# concrete docs.
mapping = {
"_GLOBAL_DATA_1_": {"data": {"a": {"x": 1, "y": 2}}},
"_REGION_DATA_1_": {"data": {"a": {"z": 3}, "b": 5}},
"_SITE_DATA_1_": {"data": {"b": 4}},
"_REGION_ACTIONS_1_": {
"actions": [{"method": "replace", "path": ".a"}]},
"_SITE_ACTIONS_1_": {
"actions": [{"method": "merge", "path": "."}]}
}
doc_factory = factories.DocumentFactory(3, [1, 1, 1])
documents = doc_factory.gen_test(
mapping, site_abstract=False, region_abstract=False,
global_abstract=False)
site_expected = {'a': {'z': 3}, 'b': 4}
region_expected = {'a': {'z': 3}}
# Global data remains unchanged as there's no layer higher than it in
# this example.
global_expected = {'a': {'x': 1, 'y': 2}}
self._test_layering(documents, site_expected, region_expected,
global_expected)
class TestDocumentLayering3LayersScenario(TestDocumentLayering):
def test_layering_multiple_delete(self):
"""Scenario:
Initially: {'x': 1, 'y': 2}, 'b': {'v': 3, 'w': 4}}
Delete ".": {}
Delete ".": {}
Merge ".": {'b': 4}
"""
mapping = {
"_GLOBAL_DATA_1_": {
"data": {'a': {'x': 1, 'y': 2}, 'b': {'v': 3, 'w': 4}}},
"_REGION_DATA_1_": {"data": {"a": {"z": 3}}},
"_SITE_DATA_1_": {"data": {"b": 4}},
"_REGION_ACTIONS_1_": {
"actions": [{'path': '.', 'method': 'delete'},
{'path': '.', 'method': 'delete'}]},
"_SITE_ACTIONS_1_": {
"actions": [{"method": "merge", "path": "."}]}
}
doc_factory = factories.DocumentFactory(3, [1, 1, 1])
documents = doc_factory.gen_test(mapping, site_abstract=False)
site_expected = {'b': 4}
self._test_layering(documents, site_expected)
def test_layering_multiple_replace_1(self):
"""Scenario:
Initially: {'x': 1, 'y': 2}, 'b': {'v': 3, 'w': 4}}
Replace ".a": {'a': {'z': 5}, 'b': {'v': 3, 'w': 4}}
Replace ".a": {'a': {'z': 5}, 'b': {'v': 3, 'w': 4}}
Merge ".": {'a': {'z': 5}, 'b': 4}
"""
mapping = {
"_GLOBAL_DATA_1_": {
"data": {'a': {'x': 1, 'y': 2}, 'b': {'v': 3, 'w': 4}}},
"_REGION_DATA_1_": {"data": {'a': {'z': 5}}},
"_SITE_DATA_1_": {"data": {"b": 4}},
"_REGION_ACTIONS_1_": {
"actions": [{'path': '.a', 'method': 'replace'},
{'path': '.a', 'method': 'replace'}]},
"_SITE_ACTIONS_1_": {
"actions": [{"method": "merge", "path": "."}]}
}
doc_factory = factories.DocumentFactory(3, [1, 1, 1])
documents = doc_factory.gen_test(mapping, site_abstract=False)
site_expected = {'a': {'z': 5}, 'b': 4}
self._test_layering(documents, site_expected)
def test_layering_multiple_replace_2(self):
"""Scenario:
Initially: {'a': {'x': 1, 'y': 2}, 'b': {'v': 3, 'w': 4}}
Replace ".a": {'a': {'z': 5}, 'b': {'v': 3, 'w': 4}}
Replace ".b": {'a': {'z': 5}, 'b': [109]}
Merge ".": {'a': {'z': 5}, 'b': [32]}
"""
mapping = {
"_GLOBAL_DATA_1_": {
"data": {'a': {'x': 1, 'y': 2}, 'b': {'v': 3, 'w': 4}}},
"_REGION_DATA_1_": {"data": {'a': {'z': 5}, 'b': [109]}},
"_SITE_DATA_1_": {"data": {"b": [32]}},
"_REGION_ACTIONS_1_": {
"actions": [{'path': '.a', 'method': 'replace'},
{'path': '.b', 'method': 'replace'}]},
"_SITE_ACTIONS_1_": {
"actions": [{"method": "merge", "path": "."}]}
}
doc_factory = factories.DocumentFactory(3, [1, 1, 1])
documents = doc_factory.gen_test(mapping, site_abstract=False)
site_expected = {'a': {'z': 5}, 'b': [32]}
self._test_layering(documents, site_expected)
def test_layering_multiple_replace_3(self):
"""Scenario:
Initially: {'a': {'x': 1, 'y': 2}, 'b': {'v': 3, 'w': 4}, 'c': [123]}
Replace ".a": {'a': {'z': 5}, 'b': {'v': 3, 'w': 4}, 'c': [123]}
Replace ".b": {'a': {'z': 5}, 'b': -2, 'c': [123]}
Merge ".": {'a': {'z': 5}, 'b': 4, 'c': [123]}
"""
mapping = {
"_GLOBAL_DATA_1_": {
"data": {'a': {'x': 1, 'y': 2}, 'b': {'v': 3, 'w': 4},
'c': [123]}},
"_REGION_DATA_1_": {"data": {'a': {'z': 5}, 'b': -2, 'c': '_'}},
"_SITE_DATA_1_": {"data": {"b": 4}},
"_REGION_ACTIONS_1_": {
"actions": [{'path': '.a', 'method': 'replace'},
{'path': '.b', 'method': 'replace'}]},
"_SITE_ACTIONS_1_": {
"actions": [{"method": "merge", "path": "."}]}
}
doc_factory = factories.DocumentFactory(3, [1, 1, 1])
documents = doc_factory.gen_test(mapping, site_abstract=False)
site_expected = {'a': {'z': 5}, 'b': 4, 'c': [123]}
self._test_layering(documents, site_expected)
def test_layering_multiple_replace_4(self):
"""Scenario:
Initially: {'a': {'x': 1, 'y': 2}, 'b': {'v': 3, 'w': 4}, 'c': [123]}
Replace ".a": {'a': {'z': 5}, 'b': {'v': 3, 'w': 4}, 'c': [123]}
Replace ".b": {'a': {'z': 5}, 'b': -2, 'c': [123]}
Replace ".c": {'a': {'z': 5}, 'b': -2, 'c': '_'}
Merge ".": {'a': {'z': 5}, 'b': 4, 'c': '_'}
"""
mapping = {
"_GLOBAL_DATA_1_": {
"data": {'a': {'x': 1, 'y': 2}, 'b': {'v': 3, 'w': 4},
'c': [123]}},
"_REGION_DATA_1_": {"data": {'a': {'z': 5}, 'b': -2, 'c': '_'}},
"_SITE_DATA_1_": {"data": {"b": 4}},
"_REGION_ACTIONS_1_": {
"actions": [{'path': '.a', 'method': 'replace'},
{'path': '.b', 'method': 'replace'},
{'path': '.c', 'method': 'replace'}]},
"_SITE_ACTIONS_1_": {
"actions": [{"method": "merge", "path": "."}]}
}
doc_factory = factories.DocumentFactory(3, [1, 1, 1])
documents = doc_factory.gen_test(mapping, site_abstract=False)
site_expected = {'a': {'z': 5}, 'b': 4, 'c': '_'}
self._test_layering(documents, site_expected)
def test_layering_multiple_delete_replace(self):
"""Scenario:
Initially: {'a': {'x': 1, 'y': 2}, 'b': {'v': 3, 'w': 4}}
Delete ".a": {'b': {'v': 3, 'w': 4}}
Replace ".b": {'b': {'z': 3}}
Delete ".b": {}
Merge ".": {'b': 4}
"""
mapping = {
"_GLOBAL_DATA_1_": {
"data": {'a': {'x': 1, 'y': 2}, 'b': {'v': 3, 'w': 4}}},
"_REGION_DATA_1_": {"data": {"b": {"z": 3}}},
"_SITE_DATA_1_": {"data": {"b": 4}},
"_REGION_ACTIONS_1_": {
"actions": [{'path': '.a', 'method': 'delete'},
{'path': '.b', 'method': 'replace'},
{'path': '.b', 'method': 'delete'}]},
"_SITE_ACTIONS_1_": {
"actions": [{"method": "merge", "path": "."}]}
}
doc_factory = factories.DocumentFactory(3, [1, 1, 1])
documents = doc_factory.gen_test(mapping, site_abstract=False)
site_expected = {'b': 4}
self._test_layering(documents, site_expected)
def test_layering_using_grandparent_as_parent(self):
"""Test that layering works when a child document has layer N and its
parent document has layer N+2. In other words, given layerOrder of
'global', 'region' and 'site', check that a document with 'layer' site
can be layered with a parent with layer 'global'.
"""
test_yaml = """
---
metadata:
labels: {name: kubernetes-etcd-global}
layeringDefinition: {abstract: false, layer: global}
name: kubernetes-etcd-global
schema: metadata/Document/v1
storagePolicy: cleartext
schema: armada/Chart/v1
data:
chart_name: global-etcd
---
# This document is included so that this middle layer isn't stripped away.
metadata:
layeringDefinition:
abstract: false
actions:
- {method: merge, path: .}
layer: region
name: kubernetes-etcd-region
schema: metadata/Document/v1
storagePolicy: cleartext
schema: armada/Chart/v1
data: {}
---
metadata:
layeringDefinition:
abstract: false
actions:
- {method: merge, path: .}
layer: site
parentSelector: {name: kubernetes-etcd-global}
name: kubernetes-etcd
schema: metadata/Document/v1
storagePolicy: cleartext
schema: armada/Chart/v1
data: {}
---
schema: deckhand/LayeringPolicy/v1
metadata:
schema: metadata/Control/v1
name: layering-policy
data:
layerOrder:
- global
- region
- site
...
"""
documents = list(yaml.safe_load_all(test_yaml))
self._test_layering(
documents, site_expected={'chart_name': 'global-etcd'},
region_expected={}, global_expected={'chart_name': 'global-etcd'})
def test_layering_using_first_parent_as_actual_parent(self):
"""Test that layering works when a child document has layer N and has
a parent in layer N+1 and another parent in layer N+2 but selects
"younger" parent in layer N+1.
"""
test_yaml = """
---
metadata:
labels: {name: kubernetes-etcd}
layeringDefinition:
abstract: true
layer: global
name: kubernetes-etcd-global
schema: metadata/Document/v1
storagePolicy: cleartext
schema: armada/Chart/v1
data:
chart_name: global-etcd
---
metadata:
labels: {name: kubernetes-etcd}
layeringDefinition:
abstract: false
actions:
- {method: merge, path: .}
layer: region
parentSelector: {name: kubernetes-etcd}
name: kubernetes-etcd-region
schema: metadata/Document/v1
storagePolicy: cleartext
schema: armada/Chart/v1
data:
chart_name: region-etcd
---
metadata:
layeringDefinition:
abstract: false
actions:
- {method: merge, path: .}
layer: site
parentSelector: {name: kubernetes-etcd}
name: kubernetes-etcd
schema: metadata/Document/v1
storagePolicy: cleartext
schema: armada/Chart/v1
data: {}
---
schema: deckhand/LayeringPolicy/v1
metadata:
schema: metadata/Control/v1
name: layering-policy
data:
layerOrder:
- global
- region
- site
...
"""
documents = list(yaml.safe_load_all(test_yaml))
self._test_layering(
documents, site_expected={'chart_name': 'region-etcd'},
region_expected={'chart_name': 'region-etcd'})
class TestDocumentLayering3Layers2Regions2Sites(TestDocumentLayering):
def test_layering_two_abstract_regions_one_child_each(self):
"""Scenario:
Initially: r1: {"c": 3, "d": 4}, r2: {"e": 5, "f": 6}
Merge "." (g -> r1): {"a": 1, "b": 2, "c": 3, "d": 4}
Merge "." (r1 -> s1): {"a": 1, "b": 2, "c": 3, "d": 4, "g": 7, "h": 8}
Merge "." (g -> r2): {"a": 1, "b": 2, "e": 5, "f": 6}
Merge "." (r2 -> s2): {"a": 1, "b": 2, "e": 5, "f": 6, "i": 9, "j": 10}
"""
mapping = {
"_GLOBAL_DATA_1_": {"data": {"a": 1, "b": 2}},
"_REGION_DATA_1_": {"data": {"c": 3, "d": 4}},
"_REGION_ACTIONS_1_": {
"actions": [{"method": "merge", "path": "."}]},
"_REGION_DATA_2_": {"data": {"e": 5, "f": 6}},
"_REGION_ACTIONS_2_": {
"actions": [{"method": "merge", "path": "."}]},
"_SITE_DATA_1_": {"data": {"g": 7, "h": 8}},
"_SITE_ACTIONS_1_": {
"actions": [{"method": "merge", "path": "."}]},
"_SITE_DATA_2_": {"data": {"i": 9, "j": 10}},
"_SITE_ACTIONS_2_": {
"actions": [{"method": "merge", "path": "."}]}
}
doc_factory = factories.DocumentFactory(3, [1, 2, 2])
documents = doc_factory.gen_test(
mapping, region_abstract=True, site_abstract=False,
site_parent_selectors=[
{'region': 'region1'}, {'region': 'region2'}])
site_expected = [{"a": 1, "b": 2, "c": 3, "d": 4, "g": 7, "h": 8},
{"a": 1, "b": 2, "e": 5, "f": 6, "i": 9, "j": 10}]
region_expected = None
global_expected = None
self._test_layering(documents, site_expected, region_expected,
global_expected)
def test_layering_two_concrete_regions_one_child_each(self):
"""Scenario:
Initially: r1: {"c": 3, "d": 4}, r2: {"e": 5, "f": 6}
Merge "." (g -> r1): {"a": 1, "b": 2, "c": 3, "d": 4}
Merge "." (r1 -> s1): {"a": 1, "b": 2, "c": 3, "d": 4, "g": 7, "h": 8}
Merge "." (g -> r2): {"a": 1, "b": 2, "e": 5, "f": 6}
Merge "." (r2 -> s2): {"a": 1, "b": 2, "e": 5, "f": 6, "i": 9, "j": 10}
"""
mapping = {
"_GLOBAL_DATA_1_": {"data": {"a": 1, "b": 2}},
"_REGION_DATA_1_": {"data": {"c": 3, "d": 4}},
"_REGION_ACTIONS_1_": {
"actions": [{"method": "merge", "path": "."}]},
"_REGION_DATA_2_": {"data": {"e": 5, "f": 6}},
"_REGION_ACTIONS_2_": {
"actions": [{"method": "merge", "path": "."}]},
"_SITE_DATA_1_": {"data": {"g": 7, "h": 8}},
"_SITE_ACTIONS_1_": {
"actions": [{"method": "merge", "path": "."}]},
"_SITE_DATA_2_": {"data": {"i": 9, "j": 10}},
"_SITE_ACTIONS_2_": {
"actions": [{"method": "merge", "path": "."}]}
}
doc_factory = factories.DocumentFactory(3, [1, 2, 2])
documents = doc_factory.gen_test(
mapping, region_abstract=False, site_abstract=False,
site_parent_selectors=[
{'region': 'region1'}, {'region': 'region2'}])
site_expected = [{"a": 1, "b": 2, "c": 3, "d": 4, "g": 7, "h": 8},
{"a": 1, "b": 2, "e": 5, "f": 6, "i": 9, "j": 10}]
region_expected = [{"a": 1, "b": 2, "c": 3, "d": 4},
{"a": 1, "b": 2, "e": 5, "f": 6}]
global_expected = None
self._test_layering(documents, site_expected, region_expected,
global_expected)
| 40.820864
| 79
| 0.499497
| 4,350
| 38,739
| 4.186897
| 0.066897
| 0.021962
| 0.039038
| 0.014934
| 0.807884
| 0.776039
| 0.732389
| 0.700928
| 0.699555
| 0.680613
| 0
| 0.033043
| 0.302383
| 38,739
| 948
| 80
| 40.863924
| 0.640888
| 0.129017
| 0
| 0.690141
| 0
| 0
| 0.22791
| 0.005634
| 0
| 0
| 0
| 0
| 0.014085
| 1
| 0.047887
| false
| 0
| 0.008451
| 0
| 0.069014
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
b78c38e9c9839490ed4fdc4e128e940c5432daf6
| 19
|
py
|
Python
|
pypy/module/_cffi_backend/__init__.py
|
pymtl/pypy-pymtl3
|
d2f66f87686e48aeb1eecabeaa3de1381a149f2c
|
[
"Apache-2.0",
"OpenSSL"
] | 1
|
2021-06-02T23:02:09.000Z
|
2021-06-02T23:02:09.000Z
|
pypy/module/_cffi_backend/__init__.py
|
pymtl/pypy-pymtl3
|
d2f66f87686e48aeb1eecabeaa3de1381a149f2c
|
[
"Apache-2.0",
"OpenSSL"
] | 1
|
2021-03-30T18:08:41.000Z
|
2021-03-30T18:08:41.000Z
|
pypy/module/_cffi_backend/__init__.py
|
pymtl/pypy-pymtl3
|
d2f66f87686e48aeb1eecabeaa3de1381a149f2c
|
[
"Apache-2.0",
"OpenSSL"
] | null | null | null |
VERSION = "1.14.3"
| 9.5
| 18
| 0.578947
| 4
| 19
| 2.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.25
| 0.157895
| 19
| 1
| 19
| 19
| 0.4375
| 0
| 0
| 0
| 0
| 0
| 0.315789
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
b79372e8ccd5e365309a016ec91d02bc516957e7
| 284
|
py
|
Python
|
home/admin.py
|
kotedadgi/uniqna
|
2f89daae611558a38cca8f12633733cfb727b7ab
|
[
"BSD-3-Clause"
] | 12
|
2017-06-28T14:14:40.000Z
|
2020-09-15T19:04:35.000Z
|
home/admin.py
|
kotedadgi/uniqna
|
2f89daae611558a38cca8f12633733cfb727b7ab
|
[
"BSD-3-Clause"
] | 294
|
2017-06-12T09:45:12.000Z
|
2022-03-20T13:20:11.000Z
|
home/admin.py
|
kotedadgi/uniqna
|
2f89daae611558a38cca8f12633733cfb727b7ab
|
[
"BSD-3-Clause"
] | 14
|
2017-06-12T09:02:56.000Z
|
2021-09-12T09:52:50.000Z
|
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
UserAdmin.list_display = (
'username', 'email', 'is_active', 'date_joined', 'is_staff')
admin.site.unregister(User)
admin.site.register(User, UserAdmin)
| 28.4
| 64
| 0.778169
| 39
| 284
| 5.564103
| 0.538462
| 0.138249
| 0.235023
| 0.193548
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.102113
| 284
| 9
| 65
| 31.555556
| 0.85098
| 0
| 0
| 0
| 0
| 0
| 0.144366
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.428571
| 0
| 0.428571
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
b7abe4074beaf1b7d01ab4236af57dddbcebc4ad
| 580
|
py
|
Python
|
drone_api_client/drone_api_client.py
|
Slamnlc/drone-api-client
|
ffd4cee4b118144f89621a2be1ee3a75938851e7
|
[
"MIT"
] | null | null | null |
drone_api_client/drone_api_client.py
|
Slamnlc/drone-api-client
|
ffd4cee4b118144f89621a2be1ee3a75938851e7
|
[
"MIT"
] | null | null | null |
drone_api_client/drone_api_client.py
|
Slamnlc/drone-api-client
|
ffd4cee4b118144f89621a2be1ee3a75938851e7
|
[
"MIT"
] | null | null | null |
from .category import Cron, Secrets, User, Users, Repos, Template, Builds
from .session import Session
class DroneApi(Session):
@property
def cron(self):
return Cron(self)
@property
def secrets(self):
return Secrets(self)
@property
def user(self):
return User(self)
@property
def users(self):
return Users(self)
@property
def repos(self):
return Repos(self)
@property
def templates(self):
return Template(self)
@property
def builds(self):
return Builds(self)
| 17.058824
| 73
| 0.612069
| 66
| 580
| 5.378788
| 0.272727
| 0.216901
| 0.253521
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.296552
| 580
| 33
| 74
| 17.575758
| 0.870098
| 0
| 0
| 0.291667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.291667
| false
| 0
| 0.083333
| 0.291667
| 0.708333
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
4d3b094b8d2fadda2afff79f45d760cd0f70b877
| 45
|
py
|
Python
|
detect_language/__init__.py
|
wiktorowski-dev/detect_lang_of_sentence
|
abf326a77499d4992f2fc43730fb6ac631549fe0
|
[
"MIT"
] | null | null | null |
detect_language/__init__.py
|
wiktorowski-dev/detect_lang_of_sentence
|
abf326a77499d4992f2fc43730fb6ac631549fe0
|
[
"MIT"
] | null | null | null |
detect_language/__init__.py
|
wiktorowski-dev/detect_lang_of_sentence
|
abf326a77499d4992f2fc43730fb6ac631549fe0
|
[
"MIT"
] | null | null | null |
from .detect_language import LanguageChecker
| 22.5
| 44
| 0.888889
| 5
| 45
| 7.8
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.088889
| 45
| 1
| 45
| 45
| 0.95122
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
4d3d1558784b9deb77c2b7f794538adbc1a38768
| 17,201
|
py
|
Python
|
pycode/feat_seg.py
|
Skielex/InSegt
|
42a6c597f15f0a48112a7f11e7307ff9796b0ebf
|
[
"MIT"
] | 6
|
2020-06-13T11:52:33.000Z
|
2021-11-29T08:34:20.000Z
|
pycode/feat_seg.py
|
Skielex/InSegt
|
42a6c597f15f0a48112a7f11e7307ff9796b0ebf
|
[
"MIT"
] | null | null | null |
pycode/feat_seg.py
|
Skielex/InSegt
|
42a6c597f15f0a48112a7f11e7307ff9796b0ebf
|
[
"MIT"
] | 2
|
2021-05-28T04:17:29.000Z
|
2021-11-04T16:38:46.000Z
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sat Jan 9 00:06:22 2021
@author: abda
"""
import numpy as np
import numpy.random
import scipy.ndimage
import ctypes
import numpy.ctypeslib as ctl
import os
libfile = os.path.dirname(__file__) + '/image_feat_lib.so'
lib = ctypes.cdll.LoadLibrary(libfile)
def get_feat_vec(I, patch_size, n_train, n_keep = 10):
'''
Function to compute PCA features from image patches. Pixels in the image
patches are used as feature vectors in a patch_size x patch_size dimensional
space, and the PCA is used for reducing the dimensionality of this space.
Parameters
----------
I : numpy array or list of numpy arrays
2D image(s) with variable number of channels.
patch_size : integer
Side length of patch.
n_train : integer
Number of patches used for training the kmtree. If the number exceeds
the total number of patches in the image, then the trainin is done on
all possible patches from the image.
n_keep : integer, optional
Number of features to keep for each derivative image. The default is 10.
Returns
-------
vec : numpy array
PCA vectors.
mean_patch : numpy array
Mean patch to be used for computing PCA features.
'''
if ( isinstance(I, list)):
image = np.array(I).transpose(1,2,0)
else:
image = I
# Extract patches and put a random subset of n_train into an array
r,c = image.shape[:2]
l = 1
if (image.ndim == 3):
l = image.shape[2]
patch_size_h = int(np.floor(patch_size/2))
n_tot = (r-patch_size+1)*(c-patch_size+1)
n_train = np.min([n_tot, n_train])
idx = np.random.permutation(n_tot)[:n_train]
c_id = (np.floor(idx/(r-patch_size+1)) + patch_size_h).astype(np.int)
r_id = ((idx - (c_id - patch_size_h)*(r-patch_size+1)) + patch_size_h).astype(np.int)
P = np.zeros((l*patch_size**2,n_train))
for i in range(0,n_train):
tmp = image[r_id[i]-patch_size_h:r_id[i]+patch_size_h+1,c_id[i]-patch_size_h:c_id[i]+patch_size_h+1]
P[:,i] = tmp.ravel()
cv = np.cov(P)
# cv[cv<1e-5] = 0
# Compute eigen values and eigen vectors of the covariance matrix of the images
val, vec = np.linalg.eig(cv)
# print(vec.dtype)
vec = np.real(vec)#.astype(np.float)
# print(vec.dtype)
if (n_keep > 1):
n_keep = np.minimum(n_keep,l*patch_size**2)
else:
v = np.sqrt(val)
vp = v/np.sum(v)
i = 0
vs = 0
while ( i < vp.shape[0] and vs < n_keep ):
vs += vp[i]
i += 1
n_keep = i
# eigen vectors for computing PCA on new image patches
vec = vec[:,0:n_keep]
# Compute the mean patches when PCA should be applied to a new image
mean_patch = np.mean(P,axis = 1)
return vec, mean_patch
def get_im_dev(image, order_keep = (True, True, True), sigma = -1, norm_fac = None):
'''
Computes image derivatives and returns a list of derivatives
Parameters
----------
image : numpy array
2D image with variable number of channels.
order_keep : tuple of bool, optional
Determines if image derivatives should be computed. Derivatives are
computed according to:
if order_keep[0] == True then zeroth order image included (original)
if order_keep[1] == True then first order images included (dI/dx and dI/dy)
if order_keep[2] == True then second order image included (d2I/dx2, d2I/(dxdy), d2I/dy2)
The default is (True, True, True).
sigma : float, optional
Standard deviation of derivative. If sigma <= 0 then central difference
derivation is done. The default is -1.
Returns
-------
I : list of numpy arrays
List of image derivatives.
'''
if ( sigma <= 0 ):
# Simple derivative filters
g = np.array([[1,1,1]])
dg = np.array([[1,0,-1]])
ddg = np.array([[1,-2,1]])
else:
# Gaussian filters
g, dg, ddg = get_gauss_dev(sigma)
I = [] # List of output derivative images
if( image.ndim == 2):
convolve_append(image, g, dg, ddg, order_keep, I)
else:
for i in range(0,image.shape[2]):
convolve_append(image[:,:,i], g, dg, ddg, order_keep, I)
# Normalize to have same standard deviation
std_im = np.std(I[0])
if( norm_fac is None ):
norm_fac = np.ones(len(I))
for i in range(0,len(I)):
norm_fac[i] = std_im/np.std(I[i])
for im, n in zip(I,norm_fac):
im *= n
# for i in range(0,len(I)):
# I[i] *= norm_fac[i]
return I, norm_fac
def convolve_append(image, g, dg, ddg, order_keep, I):
'''
Parameters
----------
image : numpy array
2D image.
g : numpy array
0th order derivative 1D filter.
dg : numpy array
1th order derivative 1D filter.
ddg : numpy array
2th order derivative 1D filter.
order_keep : tuple of bool, optional
Determines if image derivatives should be computed. Derivatives are
computed according to:
if order_keep[0] == True then zeroth order image included (original)
if order_keep[1] == True then first order images included (dI/dx and dI/dy)
if order_keep[2] == True then second order image included (d2I/dx2, d2I/(dxdy), d2I/dy2)
The default is (True, True, True).
I : List of numpy arrays
Image derivatives.
Returns
-------
None.
'''
if ( order_keep[0] ):
I.append(np.asarray(image, order='C'))
if ( order_keep[1] ):
I.append(np.asarray(scipy.ndimage.convolve(scipy.ndimage.convolve(image, dg), g.T), order='C')) # Ix
I.append(np.asarray(scipy.ndimage.convolve(scipy.ndimage.convolve(image, g), dg.T), order='C')) # Iy
if ( order_keep[2] ):
I.append(np.asarray(scipy.ndimage.convolve(scipy.ndimage.convolve(image, ddg), g.T), order='C')) # Ixx
I.append(np.asarray(scipy.ndimage.convolve(scipy.ndimage.convolve(image, dg), dg.T), order='C')) # Ixy
I.append(np.asarray(scipy.ndimage.convolve(scipy.ndimage.convolve(image, g), ddg.T), order='C')) # Iyy
return None
def get_gauss_dev(sigma, size=4):
'''
Computes gaussian filters.
Parameters
----------
sigma : float
Standard deviation of Gaussian.
size : Integer, optional
Filter kernel size is sigma*size and never smaller than size. The default is 4.
Returns
-------
g : numpy array
2D row array of Gaussian.
dg : numpy array
2D row array of first order derivative of Gaussian.
ddg : numpy array
2D row array of second order derivative of Gaussian.
'''
s = np.ceil(np.max([sigma*size, size]))
x = np.c_[np.arange(-s,s+1)].T
g = np.exp(-x**2/(2*sigma*sigma))
g /= np.sum(g)
dg = -x/(sigma*sigma)*g
ddg = -1/(sigma*sigma)*g -x/(sigma*sigma)*dg
dg = dg/np.max(dg)
ddg = -ddg/np.min(ddg)
return g, dg, ddg
def get_pca_feat_im(I, vec, mean_patch, order_keep = (True, True, True)):
'''
Computes PCA features for an image.
Parameters
----------
I : list of numpy arrays
List of image derivatives.
vec : numpy array
PCA vectors.
mean_patch : numpy array
Mean patch to be used for computing PCA features.
order_keep : tuple of bool, optional
Determines if image derivatives should be computed. Derivatives are
computed according to:
if order_keep[0] == True then zeroth order image included (original)
if order_keep[1] == True then first order images included (dI/dx and dI/dy)
if order_keep[2] == True then second order image included (d2I/dx2, d2I/(dxdy), d2I/dy2)
The default is (True, True, True).
Returns
-------
feat_im : numpy array
feature image of size rows x cols x number of features.
'''
# python function for building km_tree
py_vec_to_feat_im = lib.vec_to_feat_im
# extern "C" void vec_to_feat_im(const double *I, int rows, int cols, int channels, const double *vec, int n_keep, int patch_size, const double *mean_patch, double *feat_im)
# say which inputs the function expects
py_vec_to_feat_im.argtypes = [ctl.ndpointer(ctypes.c_double, flags="C_CONTIGUOUS"), # Image
ctypes.c_int, ctypes.c_int, ctypes.c_int, # rows, cols, channels
ctl.ndpointer(ctypes.c_double, flags="C_CONTIGUOUS"), # eigen vectors
ctypes.c_int, ctypes.c_int, # n_keep, patch_size
ctl.ndpointer(ctypes.c_double, flags="C_CONTIGUOUS"), # mean_patch
ctl.ndpointer(ctypes.c_double, flags="C_CONTIGUOUS")] # feat_image
# say which output the function gives
py_vec_to_feat_im.restype = None
rows, cols = I[0].shape[:2]
channels = 1
if ( I[0].ndim == 3 ):
channels = I[0].shape[2]
patch_size = int(np.sqrt(mean_patch[0].shape[0]/channels))
# Compute feature vectors
feat_im = np.array([])
for i in range(0,len(I)):
# Get right order for vector and mean patch
vec_in = np.asarray(vec[i].transpose(), order = 'C')
mean_patch_in = np.asarray(mean_patch[i], order='C')
n_keep = vec_in.shape[0]
feat_im_cpp = np.empty((rows, cols, n_keep), dtype=np.float) # will be overwritten
py_vec_to_feat_im(I[i], rows, cols, channels, vec_in, n_keep, patch_size, mean_patch_in, feat_im_cpp)
if (feat_im.size == 0):
feat_im = feat_im_cpp
else:
feat_im = np.append(feat_im, feat_im_cpp, axis = 2)
return feat_im
def get_pca_feat(im, patch_size = None, n_train = None, n_keep = None, order_keep = (True, True, True), vec = None, mean_patch = None, sigma = -1, norm_fac = None):
'''
General function for computing PCA features. If vec and mean_patch is
supplied, then these are not computed. Otherwise the vec and mean_patch is
initially computed.
Parameters
----------
image : numpy array
2D image with variable number of channels.
patch_size : integer
Side length of patch.
n_train : integer
Number of patches used for training the kmtree. If the number exceeds
the total number of patches in the image, then the trainin is done on
all possible patches from the image.
n_keep : integer, optional
Number of features to keep for each derivative image. The default is 10.
order_keep : tuple of bool, optional
Determines if image derivatives should be computed. Derivatives are
computed according to:
if order_keep[0] == True then zeroth order image included (original)
if order_keep[1] == True then first order images included (dI/dx and dI/dy)
if order_keep[2] == True then second order image included (d2I/dx2, d2I/(dxdy), d2I/dy2)
The default is (True, True, True).
vec : numpy array
PCA vectors.
mean_patch : numpy array
Mean patch to be used for computing PCA features.
sigma : float
Standard deviation of Gaussian.
Returns
-------
feat_im : numpy array
feature image of size rows x cols x number of features.
vec : numpy array
PCA vectors.
mean_patch : numpy array
Mean patch to be used for computing PCA features.
'''
I, norm_fac = get_im_dev(im, order_keep = order_keep, sigma = sigma, norm_fac = norm_fac)
if ( vec is None or mean_patch is None ):
vec = []
mean_patch = []
for im in I:
vec_tmp, mean_patch_tmp = get_feat_vec(im, patch_size, n_train, n_keep)
vec.append(vec_tmp)
mean_patch.append(mean_patch_tmp)
feat_im = get_pca_feat_im(I, vec, mean_patch, order_keep = order_keep)
return feat_im, vec, mean_patch, norm_fac
def get_uni_pca_feat(im, patch_size = None, n_train = None, n_keep = None, order_keep = (True, True, True), vec = None, mean_patch = None, sigma = -1, norm_fac = None):
'''
General function for computing PCA features. If vec and mean_patch is
supplied, then these are not computed. Otherwise the vec and mean_patch is
initially computed.
Parameters
----------
image : numpy array
2D image with variable number of channels.
patch_size : integer
Side length of patch.
n_train : integer
Number of patches used for training the kmtree. If the number exceeds
the total number of patches in the image, then the trainin is done on
all possible patches from the image.
n_keep : integer, optional
Number of features to keep for each derivative image. The default is 10.
order_keep : tuple of bool, optional
Determines if image derivatives should be computed. Derivatives are
computed according to:
if order_keep[0] == True then zeroth order image included (original)
if order_keep[1] == True then first order images included (dI/dx and dI/dy)
if order_keep[2] == True then second order image included (d2I/dx2, d2I/(dxdy), d2I/dy2)
The default is (True, True, True).
vec : numpy array
PCA vectors.
mean_patch : numpy array
Mean patch to be used for computing PCA features.
sigma : float
Standard deviation of Gaussian.
Returns
-------
feat_im : numpy array
feature image of size rows x cols x number of features.
vec : numpy array
PCA vectors.
mean_patch : numpy array
Mean patch to be used for computing PCA features.
'''
I, norm_fac = get_im_dev(im, order_keep = order_keep, sigma = sigma, norm_fac = norm_fac)
I = [np.asarray(np.asarray(I).transpose(1,2,0), order='C')]
if ( vec is None or mean_patch is None ):
vec = []
mean_patch = []
for im in I:
vec_tmp, mean_patch_tmp = get_feat_vec(im, patch_size, n_train, n_keep)
vec.append(vec_tmp)
mean_patch.append(mean_patch_tmp)
feat_im = get_pca_feat_im(I, vec, mean_patch, order_keep = order_keep)
return feat_im, vec, mean_patch, norm_fac
# def get_pca_feat_slow(I, vec, mean_patch, order_keep = (True, True, True), sigma = 1):
# '''
# Computes PCA features for an image.
# Parameters
# ----------
# I : list of numpy arrays
# List of image derivatives.
# vec : numpy array
# PCA vectors.
# mean_patch : numpy array
# Mean patch to be used for computing PCA features.
# order_keep : tuple of bool, optional
# Determines if image derivatives should be computed. Derivatives are
# computed according to:
# if order_keep[0] == True then zeroth order image included (original)
# if order_keep[1] == True then first order images included (dI/dx and dI/dy)
# if order_keep[2] == True then second order image included (d2I/dx2, d2I/(dxdy), d2I/dy2)
# The default is (True, True, True).
# sigma : float
# Standard deviation of Gaussian.
# Returns
# -------
# feat_im : numpy array
# feature image of size rows x cols x number of features.
# '''
# # python function for building km_tree
# py_vec_to_feat_im = lib.vec_to_feat_im_slow
# # say which inputs the function expects
# py_vec_to_feat_im.argtypes = [ctl.ndpointer(ctypes.c_double, flags="C_CONTIGUOUS"), # Image
# ctypes.c_int, ctypes.c_int, ctypes.c_int, # rows, cols, channels
# ctl.ndpointer(ctypes.c_double, flags="C_CONTIGUOUS"), # eigen vectors
# ctypes.c_int, ctypes.c_int, # n_keep, patch_size
# ctl.ndpointer(ctypes.c_double, flags="C_CONTIGUOUS"), # mean_patch
# ctl.ndpointer(ctypes.c_double, flags="C_CONTIGUOUS")] # feat_image
# # say which output the function gives
# py_vec_to_feat_im.restype = None
# patch_size = int(np.sqrt(mean_patch[0].shape[0]))
# rows, cols = I[0].shape[:2]
# channels = 1
# if ( I[0].ndim == 3 ):
# channels = I[0].shape[2]
# # Compute feature vectors
# feat_im = np.array([])
# for i in range(0,len(I)):
# # Get right order for vector and mean patch
# vec_in = np.asarray(vec[i].transpose(), order = 'C')
# mean_patch_in = np.asarray(mean_patch[i], order='C')
# n_keep = vec_in.shape[0]
# feat_im_cpp = np.empty((n_keep, rows, cols), dtype=np.float) # will be overwritten
# py_vec_to_feat_im(I[i], rows, cols, channels, vec_in, n_keep, patch_size, mean_patch_in, feat_im_cpp)
# if (feat_im.size == 0):
# feat_im = feat_im_cpp.transpose(1,2,0)
# else:
# feat_im = np.append(feat_im, feat_im_cpp.transpose(1,2,0), axis = 2)
# return feat_im
| 36.754274
| 178
| 0.617348
| 2,555
| 17,201
| 4.014873
| 0.106458
| 0.044746
| 0.022519
| 0.011796
| 0.754338
| 0.739715
| 0.717391
| 0.711055
| 0.711055
| 0.693508
| 0
| 0.01417
| 0.277891
| 17,201
| 468
| 179
| 36.754274
| 0.81169
| 0.584326
| 0
| 0.214815
| 0
| 0
| 0.011939
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.051852
| false
| 0
| 0.044444
| 0
| 0.148148
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
4d7f5ea4baf003e12267bc0418695bd6f2098c34
| 2,575
|
py
|
Python
|
tests/new/test_ips_main.py
|
HPC-SimTools/IPS-framework
|
4911bcc97a64bb31503009de75f24d3cba263bec
|
[
"BSD-3-Clause"
] | 9
|
2019-05-06T17:30:35.000Z
|
2021-12-10T16:05:00.000Z
|
tests/new/test_ips_main.py
|
HPC-SimTools/IPS-framework
|
4911bcc97a64bb31503009de75f24d3cba263bec
|
[
"BSD-3-Clause"
] | 80
|
2018-04-13T20:25:04.000Z
|
2022-03-10T21:59:58.000Z
|
tests/new/test_ips_main.py
|
HPC-SimTools/IPS-framework
|
4911bcc97a64bb31503009de75f24d3cba263bec
|
[
"BSD-3-Clause"
] | 4
|
2020-08-07T17:58:13.000Z
|
2021-11-12T17:17:42.000Z
|
from ipsframework import ips
import pytest
from unittest import mock
import sys
import os
@mock.patch('ipsframework.ips.Framework')
def test_ips_main(MockFramework):
# override sys.argv for testing
sys.argv = ["ips.py"]
with pytest.raises(SystemExit) as excinfo:
ips.main()
assert excinfo.value.code == 2
MockFramework.assert_not_called()
MockFramework.reset_mock()
sys.argv = ["ips.py", "--simulation=sim.cfg"]
with pytest.raises(SystemExit) as excinfo:
ips.main()
assert excinfo.value.code == 2
MockFramework.assert_not_called()
MockFramework.reset_mock()
sys.argv = ["ips.py", "--platform=platform.conf"]
with pytest.raises(SystemExit) as excinfo:
ips.main()
assert excinfo.value.code == 2
MockFramework.assert_not_called()
MockFramework.reset_mock()
sys.argv = ["ips.py", "--simulation=sim.cfg", "--platform=platform.conf"]
ips.main()
MockFramework.assert_called_with(['sim.cfg'], 'sys.stdout', 'platform.conf', False, False, 0, 0)
os.environ['IPS_PLATFORM_FILE'] = 'platform.conf'
MockFramework.reset_mock()
sys.argv = ["ips.py", "--config=sim.cfg"]
ips.main()
MockFramework.assert_called_with(['sim.cfg'], 'sys.stdout', 'platform.conf', False, False, 0, 0)
MockFramework.reset_mock()
sys.argv = ["ips.py", "--config=sim1.cfg,sim2.cfg"]
ips.main()
MockFramework.assert_called_with(['sim1.cfg', 'sim2.cfg'], 'sys.stdout', 'platform.conf', False, False, 0, 0)
MockFramework.reset_mock()
sys.argv = ["ips.py", "--config=sim.cfg", "--log=file.log"]
ips.main()
MockFramework.assert_called_with(['sim.cfg'], 'file.log', 'platform.conf', False, False, 0, 0)
MockFramework.reset_mock()
sys.argv = ["ips.py", "--config=sim.cfg", "--nodes=5", "--ppn=32"]
ips.main()
MockFramework.assert_called_with(['sim.cfg'], 'sys.stdout', 'platform.conf', False, False, 5, 32)
MockFramework.reset_mock()
sys.argv = ["ips.py", "--config=sim.cfg", "--debug"]
ips.main()
MockFramework.assert_called_with(['sim.cfg'], 'sys.stdout', 'platform.conf', True, False, 0, 0)
MockFramework.reset_mock()
sys.argv = ["ips.py", "--config=sim.cfg", "--verbose"]
ips.main()
MockFramework.assert_called_with(['sim.cfg'], 'sys.stdout', 'platform.conf', False, True, 0, 0)
MockFramework.reset_mock()
sys.argv = ["ips.py", "--config=sim.cfg", "--platform=workstation.conf"]
ips.main()
MockFramework.assert_called_with(['sim.cfg'], 'sys.stdout', 'workstation.conf', False, False, 0, 0)
| 35.763889
| 113
| 0.657087
| 338
| 2,575
| 4.899408
| 0.159763
| 0.054348
| 0.066425
| 0.07971
| 0.789855
| 0.780193
| 0.780193
| 0.756643
| 0.73128
| 0.73128
| 0
| 0.012465
| 0.158835
| 2,575
| 71
| 114
| 36.267606
| 0.752078
| 0.011262
| 0
| 0.561404
| 0
| 0
| 0.257862
| 0.049921
| 0
| 0
| 0
| 0
| 0.245614
| 1
| 0.017544
| false
| 0
| 0.087719
| 0
| 0.105263
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
4d90e4437d7cda9c9c74070c424b6b1c7ace19ed
| 8,763
|
py
|
Python
|
heat/tests/test_validate.py
|
devcamcar/heat
|
0f1bd5d29102318e62b5a10281d809807bd3b163
|
[
"Apache-2.0"
] | 1
|
2015-05-11T04:54:30.000Z
|
2015-05-11T04:54:30.000Z
|
heat/tests/test_validate.py
|
devcamcar/heat
|
0f1bd5d29102318e62b5a10281d809807bd3b163
|
[
"Apache-2.0"
] | null | null | null |
heat/tests/test_validate.py
|
devcamcar/heat
|
0f1bd5d29102318e62b5a10281d809807bd3b163
|
[
"Apache-2.0"
] | null | null | null |
import sys
import os
import nose
import unittest
import mox
import json
import sqlalchemy
from nose.plugins.attrib import attr
from nose import with_setup
from heat.tests.v1_1 import fakes
from heat.engine import instance as instances
from heat.engine import volume as volumes
from heat.engine import manager as managers
import heat.db as db_api
from heat.engine import parser
test_template_volumeattach = '''
{
"AWSTemplateFormatVersion" : "2010-09-09",
"Description" : "test.",
"Resources" : {
"WikiDatabase": {
"Type": "AWS::EC2::Instance",
"Properties": {
"ImageId": "image_name",
"InstanceType": "m1.large",
"KeyName": "test_KeyName"
}
},
"DataVolume" : {
"Type" : "AWS::EC2::Volume",
"Properties" : {
"Size" : "6",
"AvailabilityZone" : "nova"
}
},
"MountPoint" : {
"Type" : "AWS::EC2::VolumeAttachment",
"Properties" : {
"InstanceId" : { "Ref" : "WikiDatabase" },
"VolumeId" : { "Ref" : "DataVolume" },
"Device" : "/dev/%s"
}
}
}
}
'''
test_template_ref = '''
{
"AWSTemplateFormatVersion" : "2010-09-09",
"Description" : "test.",
"Parameters" : {
"KeyName" : {
''' + \
'"Description" : "Name of an existing EC2' + \
'KeyPair to enable SSH access to the instances",' + \
'''
"Type" : "String"
}
},
"Resources" : {
"WikiDatabase": {
"Type": "AWS::EC2::Instance",
"Properties": {
"ImageId": "image_name",
"InstanceType": "m1.large",
"KeyName": "test_KeyName"
}
},
"DataVolume" : {
"Type" : "AWS::EC2::Volume",
"Properties" : {
"Size" : "6",
"AvailabilityZone" : "nova"
}
},
"MountPoint" : {
"Type" : "AWS::EC2::VolumeAttachment",
"Properties" : {
"InstanceId" : { "Ref" : "%s" },
"VolumeId" : { "Ref" : "DataVolume" },
"Device" : "/dev/vdb"
}
}
}
}
'''
test_template_findinmap_valid = '''
{
"AWSTemplateFormatVersion" : "2010-09-09",
"Description" : "test.",
"Parameters" : {
"KeyName" : {
''' + \
'"Description" : "Name of an existing EC2 KeyPair to' + \
'enable SSH access to the instances",' + \
'''
"Type" : "String"
}
},
"Resources" : {
"WikiDatabase": {
"Type": "AWS::EC2::Instance",
"Properties": {
"ImageId": "image_name",
"InstanceType": "m1.large",
"KeyName": "test_KeyName"
}
},
"DataVolume" : {
"Type" : "AWS::EC2::Volume",
"Properties" : {
"Size" : "6",
"AvailabilityZone" : "nova"
}
},
"MountPoint" : {
"Type" : "AWS::EC2::VolumeAttachment",
"Properties" : {
"InstanceId" : { "Ref" : "WikiDatabase" },
"VolumeId" : { "Ref" : "DataVolume" },
"Device" : "/dev/vdb"
}
}
}
}
'''
test_template_findinmap_invalid = '''
{
"AWSTemplateFormatVersion" : "2010-09-09",
"Description" : "test.",
"Parameters" : {
"KeyName" : {
''' + \
'"Description" : "Name of an existing EC2 KeyPair to enable SSH ' + \
'access to the instances",' + \
''' "Type" : "String"
}
},
"Mappings" : {
"AWSInstanceType2Arch" : {
"t1.micro" : { "Arch" : "64" },
"m1.small" : { "Arch" : "64" },
"m1.medium" : { "Arch" : "64" },
"m1.large" : { "Arch" : "64" },
"m1.xlarge" : { "Arch" : "64" },
"m2.xlarge" : { "Arch" : "64" },
"m2.2xlarge" : { "Arch" : "64" },
"m2.4xlarge" : { "Arch" : "64" },
"c1.medium" : { "Arch" : "64" },
"c1.xlarge" : { "Arch" : "64" },
"cc1.4xlarge" : { "Arch" : "64HVM" },
"cc2.8xlarge" : { "Arch" : "64HVM" },
"cg1.4xlarge" : { "Arch" : "64HVM" }
}
},
"Resources" : {
"WikiDatabase": {
"Type": "AWS::EC2::Instance",
"Properties": {
''' + \
'"ImageId" : { "Fn::FindInMap" : [ "DistroArch2AMI", { "Ref" : ' + \
'"LinuxDistribution" },' + \
'{ "Fn::FindInMap" : [ "AWSInstanceType2Arch", { "Ref" : ' + \
'"InstanceType" }, "Arch" ] } ] },' + \
'''
"InstanceType": "m1.large",
"KeyName": "test_KeyName"
}
},
"DataVolume" : {
"Type" : "AWS::EC2::Volume",
"Properties" : {
"Size" : "6",
"AvailabilityZone" : "nova"
}
},
"MountPoint" : {
"Type" : "AWS::EC2::VolumeAttachment",
"Properties" : {
"InstanceId" : { "Ref" : "WikiDatabase" },
"VolumeId" : { "Ref" : "DataVolume" },
"Device" : "/dev/vdb"
}
}
}
}
'''
@attr(tag=['unit', 'validate'])
@attr(speed='fast')
class validateTest(unittest.TestCase):
def setUp(self):
self.m = mox.Mox()
self.fc = fakes.FakeClient()
def tearDown(self):
self.m.UnsetStubs()
print "volumeTest teardown complete"
def test_validate_volumeattach_valid(self):
t = json.loads(test_template_volumeattach % 'vdq')
params = {}
params['KeyStoneCreds'] = None
stack = parser.Stack('test_stack', t, 0, params)
self.m.StubOutWithMock(db_api, 'resource_get_by_name_and_stack')
db_api.resource_get_by_name_and_stack(None, 'test_resource_name',\
stack).AndReturn(None)
self.m.ReplayAll()
volumeattach = stack.resources['MountPoint']
stack.resolve_attributes(volumeattach.t)
stack.resolve_joins(volumeattach.t)
stack.resolve_base64(volumeattach.t)
assert(volumeattach.validate() == None)
def test_validate_volumeattach_invalid(self):
t = json.loads(test_template_volumeattach % 'sda')
params = {}
params['KeyStoneCreds'] = None
stack = parser.Stack('test_stack', t, 0, params)
self.m.StubOutWithMock(db_api, 'resource_get_by_name_and_stack')
db_api.resource_get_by_name_and_stack(None, 'test_resource_name',\
stack).AndReturn(None)
self.m.ReplayAll()
volumeattach = stack.resources['MountPoint']
stack.resolve_attributes(volumeattach.t)
stack.resolve_joins(volumeattach.t)
stack.resolve_base64(volumeattach.t)
assert(volumeattach.validate())
def test_validate_ref_valid(self):
t = json.loads(test_template_ref % 'WikiDatabase')
t['Parameters']['KeyName']['Value'] = 'test'
params = {}
params['KeyStoneCreds'] = None
self.m.StubOutWithMock(instances.Instance, 'nova')
instances.Instance.nova().AndReturn(self.fc)
self.m.ReplayAll()
manager = managers.EngineManager()
res = dict(manager.\
validate_template(None, t, params)['ValidateTemplateResult'])
print 'res %s' % res
assert (res['Description'] == 'Successfully validated')
def test_validate_ref_invalid(self):
t = json.loads(test_template_ref % 'WikiDatabasez')
t['Parameters']['KeyName']['Value'] = 'test'
params = {}
params['KeyStoneCreds'] = None
self.m.StubOutWithMock(instances.Instance, 'nova')
instances.Instance.nova().AndReturn(self.fc)
self.m.ReplayAll()
manager = managers.EngineManager()
res = dict(manager.\
validate_template(None, t, params)['ValidateTemplateResult'])
assert (res['Description'] != 'Successfully validated')
def test_validate_findinmap_valid(self):
t = json.loads(test_template_findinmap_valid)
t['Parameters']['KeyName']['Value'] = 'test'
params = {}
params['KeyStoneCreds'] = None
self.m.StubOutWithMock(instances.Instance, 'nova')
instances.Instance.nova().AndReturn(self.fc)
self.m.ReplayAll()
manager = managers.EngineManager()
res = dict(manager.\
validate_template(None, t, params)['ValidateTemplateResult'])
assert (res['Description'] == 'Successfully validated')
def test_validate_findinmap_invalid(self):
t = json.loads(test_template_findinmap_invalid)
t['Parameters']['KeyName']['Value'] = 'test'
params = {}
params['KeyStoneCreds'] = None
self.m.StubOutWithMock(instances.Instance, 'nova')
instances.Instance.nova().AndReturn(self.fc)
self.m.ReplayAll()
manager = managers.EngineManager()
res = dict(manager.\
validate_template(None, t, params)['ValidateTemplateResult'])
assert (res['Description'] != 'Successfully validated')
# allows testing of the test directly, shown below
if __name__ == '__main__':
sys.argv.append(__file__)
nose.main()
| 28.086538
| 76
| 0.553463
| 805
| 8,763
| 5.895652
| 0.193789
| 0.014749
| 0.025284
| 0.017699
| 0.775811
| 0.775811
| 0.765908
| 0.715339
| 0.69174
| 0.675938
| 0
| 0.0174
| 0.278558
| 8,763
| 311
| 77
| 28.176849
| 0.733312
| 0.005478
| 0
| 0.491018
| 0
| 0
| 0.33801
| 0.052411
| 0
| 0
| 0
| 0
| 0.035928
| 0
| null | null | 0
| 0.08982
| null | null | 0.011976
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
4d9b5f8d5330b43955192c0eef119f082f7bc6a5
| 98
|
py
|
Python
|
strategies/ghosts/static.py
|
math2001/pacman
|
01c998bed3ca278e8dded30122e569a213a7b45a
|
[
"MIT"
] | null | null | null |
strategies/ghosts/static.py
|
math2001/pacman
|
01c998bed3ca278e8dded30122e569a213a7b45a
|
[
"MIT"
] | null | null | null |
strategies/ghosts/static.py
|
math2001/pacman
|
01c998bed3ca278e8dded30122e569a213a7b45a
|
[
"MIT"
] | null | null | null |
from strategies.strategy import Strategy
class Static(Strategy):
""" ghosts don't move """
| 14
| 40
| 0.704082
| 12
| 98
| 5.75
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.183673
| 98
| 6
| 41
| 16.333333
| 0.8625
| 0.173469
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
4dc1a559f7e948a8458e9d01c370f6b825f8a592
| 107
|
py
|
Python
|
sqip/api/__init__.py
|
gaomeng1900/SQIP-py
|
0b5e38b5bceda70e2deb9a89ad50d6c419b6768d
|
[
"CC0-1.0"
] | 1
|
2016-08-02T13:07:38.000Z
|
2016-08-02T13:07:38.000Z
|
sqip/api/__init__.py
|
gaomeng1900/SQIP-py
|
0b5e38b5bceda70e2deb9a89ad50d6c419b6768d
|
[
"CC0-1.0"
] | null | null | null |
sqip/api/__init__.py
|
gaomeng1900/SQIP-py
|
0b5e38b5bceda70e2deb9a89ad50d6c419b6768d
|
[
"CC0-1.0"
] | null | null | null |
#!/usr/bin/env python
#-*-coding:utf-8-*-
#
# @author Meng G.
# 2016-03-28 restructed
from api import api
| 15.285714
| 23
| 0.654206
| 18
| 107
| 3.888889
| 0.944444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.098901
| 0.149533
| 107
| 7
| 24
| 15.285714
| 0.67033
| 0.719626
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
15193b8a0a76e2526cb2013a96149e5b54b3dc1d
| 164
|
py
|
Python
|
10_custom_filter/filter_plugins/filter.py
|
tvsaru/ansible-class
|
b3c0902c99664a4fb7fb0a4b72305c27004a7030
|
[
"Apache-2.0"
] | null | null | null |
10_custom_filter/filter_plugins/filter.py
|
tvsaru/ansible-class
|
b3c0902c99664a4fb7fb0a4b72305c27004a7030
|
[
"Apache-2.0"
] | null | null | null |
10_custom_filter/filter_plugins/filter.py
|
tvsaru/ansible-class
|
b3c0902c99664a4fb7fb0a4b72305c27004a7030
|
[
"Apache-2.0"
] | null | null | null |
import codecs
def rot13(text):
return codecs.encode(text, 'rot_13')
class FilterModule(object):
def filters(self):
return { 'rot13': rot13 }
| 16.4
| 40
| 0.640244
| 20
| 164
| 5.2
| 0.7
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.064
| 0.237805
| 164
| 9
| 41
| 18.222222
| 0.768
| 0
| 0
| 0
| 0
| 0
| 0.067073
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.166667
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
12b8c4879e0d28fc469cffefb29598f3db53f08d
| 186
|
py
|
Python
|
src/poms/msserver.py
|
lauro-cesar/poms
|
bbaa3c875d07ef041f429af074b4be9414abc8cb
|
[
"Apache-2.0"
] | 1
|
2021-07-31T21:12:03.000Z
|
2021-07-31T21:12:03.000Z
|
src/poms/msserver.py
|
lauro-cesar/poms
|
bbaa3c875d07ef041f429af074b4be9414abc8cb
|
[
"Apache-2.0"
] | null | null | null |
src/poms/msserver.py
|
lauro-cesar/poms
|
bbaa3c875d07ef041f429af074b4be9414abc8cb
|
[
"Apache-2.0"
] | null | null | null |
import asyncio
import os
import ast
import signal
import sys
import threading
import websockets
class MsServer():
def __init__(*args,**kwargs):
pass
def autotest():
return True
| 12.4
| 30
| 0.758065
| 25
| 186
| 5.48
| 0.72
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.172043
| 186
| 15
| 31
| 12.4
| 0.88961
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| true
| 0.083333
| 0.583333
| 0.083333
| 0.916667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 5
|
12bd32718fdbe5239b455b3fce59f243ebb1f3b4
| 113
|
py
|
Python
|
configs/HDR/bestexp/retinanet_r50_fpn_scratch_bestexp.py
|
ismailkocdemir/mmdetection
|
4ac7e76dc66be7c97a8ca2c5f8a8e71434e3d823
|
[
"Apache-2.0"
] | null | null | null |
configs/HDR/bestexp/retinanet_r50_fpn_scratch_bestexp.py
|
ismailkocdemir/mmdetection
|
4ac7e76dc66be7c97a8ca2c5f8a8e71434e3d823
|
[
"Apache-2.0"
] | null | null | null |
configs/HDR/bestexp/retinanet_r50_fpn_scratch_bestexp.py
|
ismailkocdemir/mmdetection
|
4ac7e76dc66be7c97a8ca2c5f8a8e71434e3d823
|
[
"Apache-2.0"
] | null | null | null |
_base_ = [
'../retinanet_r50_fpn_scratch.py',
'../../_base_/datasets/hdr_detection_bestexp_scratch.py',
]
| 28.25
| 61
| 0.690265
| 13
| 113
| 5.230769
| 0.769231
| 0.264706
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.02
| 0.115044
| 113
| 4
| 62
| 28.25
| 0.66
| 0
| 0
| 0
| 0
| 0
| 0.745614
| 0.745614
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
12cecefbe32b918a5182d86d15703511e8b39093
| 87
|
py
|
Python
|
__init__.py
|
dfroger/closedcontour
|
9dbaeaec10794f27f0ab6536f2846323951c4059
|
[
"BSD-3-Clause"
] | null | null | null |
__init__.py
|
dfroger/closedcontour
|
9dbaeaec10794f27f0ab6536f2846323951c4059
|
[
"BSD-3-Clause"
] | null | null | null |
__init__.py
|
dfroger/closedcontour
|
9dbaeaec10794f27f0ab6536f2846323951c4059
|
[
"BSD-3-Clause"
] | null | null | null |
from .arc import *
from .assemble import *
from .point import *
from .segment import *
| 17.4
| 23
| 0.724138
| 12
| 87
| 5.25
| 0.5
| 0.47619
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.183908
| 87
| 4
| 24
| 21.75
| 0.887324
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
421348be019d7885b343843a6200101b7c3947bb
| 196
|
py
|
Python
|
groww_chatbot/chatbot/admin.py
|
Crio-Winter-of-Doing-2021/GROWW-T8
|
f42ddeed4ce6ac175ffb4fa5781ea1cc498d3b3f
|
[
"MIT"
] | null | null | null |
groww_chatbot/chatbot/admin.py
|
Crio-Winter-of-Doing-2021/GROWW-T8
|
f42ddeed4ce6ac175ffb4fa5781ea1cc498d3b3f
|
[
"MIT"
] | null | null | null |
groww_chatbot/chatbot/admin.py
|
Crio-Winter-of-Doing-2021/GROWW-T8
|
f42ddeed4ce6ac175ffb4fa5781ea1cc498d3b3f
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from .models import FAQ, Category, CategoryMap
# Register your models here.
admin.site.register(FAQ)
admin.site.register(Category)
admin.site.register(CategoryMap)
| 32.666667
| 46
| 0.821429
| 27
| 196
| 5.962963
| 0.481481
| 0.167702
| 0.31677
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.086735
| 196
| 6
| 47
| 32.666667
| 0.899441
| 0.132653
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.4
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
4227e528c12d524691c8a31cf1f934808d465e7d
| 93
|
wsgi
|
Python
|
packit_dashboard/packit_dashboard.wsgi
|
ritwickraj78/dashboard
|
97e0f258a87e9ec7cc637086ac47510ff2268295
|
[
"MIT"
] | null | null | null |
packit_dashboard/packit_dashboard.wsgi
|
ritwickraj78/dashboard
|
97e0f258a87e9ec7cc637086ac47510ff2268295
|
[
"MIT"
] | 1
|
2020-07-31T22:18:34.000Z
|
2020-07-31T22:18:34.000Z
|
packit_dashboard/packit_dashboard.wsgi
|
ritwickraj78/dashboard
|
97e0f258a87e9ec7cc637086ac47510ff2268295
|
[
"MIT"
] | null | null | null |
import sys, os
sys.path.append("/src/")
from packit_dashboard.app import app as application
| 18.6
| 51
| 0.774194
| 15
| 93
| 4.733333
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.11828
| 93
| 4
| 52
| 23.25
| 0.865854
| 0
| 0
| 0
| 0
| 0
| 0.053763
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
4259d6631917a54b55471a5a5c40b91b2c321e01
| 13,502
|
py
|
Python
|
cm/app/api_v1/my_calculation_module_directory/excess_heat/graph/test/test_graph.py
|
HotMaps/excess_heat_cm
|
3c022c1fdfb4b85e9cdc55adcb7dbb112d303289
|
[
"Apache-2.0"
] | null | null | null |
cm/app/api_v1/my_calculation_module_directory/excess_heat/graph/test/test_graph.py
|
HotMaps/excess_heat_cm
|
3c022c1fdfb4b85e9cdc55adcb7dbb112d303289
|
[
"Apache-2.0"
] | 3
|
2021-02-08T10:57:59.000Z
|
2022-03-08T21:23:16.000Z
|
cm/app/api_v1/my_calculation_module_directory/excess_heat/graph/test/test_graph.py
|
HotMaps/excess_heat_cm
|
3c022c1fdfb4b85e9cdc55adcb7dbb112d303289
|
[
"Apache-2.0"
] | null | null | null |
import unittest
from ..graph import NetworkGraph
import numpy as np
class TestNetworkGraph(unittest.TestCase):
def test_initiation(self):
source_sink_edges = [[0], [0, 1], [1], [2], [3], []]
source_source_edges = [[], [2], [1], [], []]
sink_sink_edges = [[], [], [], []]
source_correspondence = [0, 1, 2, 3, 4]
sink_correspondence = [0, 0, 1, 2]
graph = NetworkGraph(source_sink_edges, source_source_edges, sink_sink_edges, source_correspondence,
sink_correspondence)
self.assertIsInstance(graph, NetworkGraph)
self.assertEqual(graph.number_of_sources, 5)
self.assertEqual(graph.number_of_sinks, 4)
with self.assertRaises(ValueError):
graph = NetworkGraph(source_sink_edges, source_source_edges, sink_sink_edges, source_correspondence,
[])
def test_return_adjacency_lists(self):
source_sink_edges = [[0], [0, 1], [1], [2], [3]]
source_source_edges = [[], [2], [], [], []]
sink_sink_edges = [[], [], [], []]
source_correspondence = [0, 1, 2, 3, 4]
sink_correspondence = [0, 0, 1, 2]
graph = NetworkGraph(source_sink_edges, source_source_edges, sink_sink_edges, source_correspondence,
sink_correspondence)
adj = graph.return_adjacency_lists()
# Note that the whole network graph class is based on undirected graphs, hence it will not return bidirectional
# defined adjacencies above, but rather return a single direction.
self.assertSequenceEqual((source_sink_edges, source_source_edges, sink_sink_edges), adj)
def test_add_edge_attribute(self):
source_sink_edges = [[0], [0, 1], [1], [2], [3]]
source_source_edges = [[], [2], [], [], []]
sink_sink_edges = [[], [], [], []]
source_correspondence = [0, 1, 2, 3, 4]
sink_correspondence = [0, 0, 1, 2]
graph = NetworkGraph(source_sink_edges, source_source_edges, sink_sink_edges, source_correspondence,
sink_correspondence)
source_sink_distances = [[5], [2, 3], [2], [6], [7]]
source_source_distances = [[], [1], [], [], []]
sink_sink_distances = [[], [], [], []]
try:
graph.add_edge_attribute("distance", source_sink_distances, source_source_distances, sink_sink_distances)
except:
self.fail("could not set edge_attribute")
# wrong shape of attribute
with self.assertRaises(ValueError):
graph.add_edge_attribute("distance", [], source_source_distances, sink_sink_distances)
def test_get_edge_attribute(self):
source_sink_edges = [[0], [0, 1], [1], [2], [3]]
source_source_edges = [[], [2], [], [], []]
sink_sink_edges = [[], [], [], []]
source_correspondence = [0, 1, 2, 3, 4]
sink_correspondence = [0, 0, 1, 2]
graph = NetworkGraph(source_sink_edges, source_source_edges, sink_sink_edges, source_correspondence,
sink_correspondence)
source_sink_distances = [[5], [2, 3], [2], [6], [7]]
source_source_distances = [[], [1], [], [], []]
sink_sink_distances = [[], [], [], []]
graph.add_edge_attribute("distance", source_sink_distances, source_source_distances, sink_sink_distances)
self.assertSequenceEqual([5, 2, 3, 2, 6, 7, 1], graph.get_edge_attribute("distance"))
def test_reduce_to_minimum_spanning_tree(self):
source_sink_edges = [[0], [0, 1], [1], [2], [3]]
source_source_edges = [[], [2], [], [], []]
sink_sink_edges = [[], [], [], []]
source_correspondence = [0, 1, 2, 3, 4]
sink_correspondence = [0, 0, 1, 2]
graph = NetworkGraph(source_sink_edges, source_source_edges, sink_sink_edges, source_correspondence,
sink_correspondence)
source_sink_distances = [[5], [2, 3], [2], [6], [7]]
source_source_distances = [[], [1], [], [], []]
sink_sink_distances = [[], [], [], []]
graph.add_edge_attribute("distance", source_sink_distances, source_source_distances, sink_sink_distances)
graph.reduce_to_minimum_spanning_tree("distance")
self.assertSequenceEqual(([[0], [], [1], [2], [3]], [[], [2], [], [], []], [[], [], [], []]),
graph.return_adjacency_lists())
def test_maximum_flow(self):
source_sink_edges = [[0], [0, 1], [1], [2], [3]]
source_source_edges = [[], [2], [], [], []]
sink_sink_edges = [[], [], [], []]
source_correspondence = [0, 1, 2, 3, 4]
sink_correspondence = [0, 0, 1, 2]
graph = NetworkGraph(source_sink_edges, source_source_edges, sink_sink_edges, source_correspondence,
sink_correspondence)
# sink limited
source_capacities = [100, 100, 100, 100, 100]
sink_capacities = [1, 4, 2, 3]
flow = graph.maximum_flow(source_capacities, sink_capacities)
self.assertAlmostEqual(np.sum(flow[1]), np.sum(sink_capacities), 3)
# check continuity
self.assertAlmostEqual(np.sum(flow[0]), np.sum(flow[1]), 3)
# source limited
source_capacities = [5, 2, 3, 1, 1]
sink_capacities = [100, 100, 100, 100]
flow = graph.maximum_flow(source_capacities, sink_capacities)
self.assertAlmostEqual(np.sum(flow[1]), np.sum(source_capacities), 3)
# check continuity
self.assertAlmostEqual(np.sum(flow[0]), np.sum(flow[1]), 3)
def test_edge_source_target_vertices(self):
source_sink_edges = [[0], [0, 1], [1], [2], [3]]
source_source_edges = [[], [2], [], [], []]
sink_sink_edges = [[], [], [], []]
source_correspondence = [0, 1, 2, 3, 4]
sink_correspondence = [0, 0, 1, 2]
graph = NetworkGraph(source_sink_edges, source_source_edges, sink_sink_edges, source_correspondence,
sink_correspondence)
self.assertSequenceEqual([(('source', 0), ('sink', 0)), (('source', 1), ('sink', 0)), (('source', 1),
('sink', 1)), (('source', 2), ('sink', 1)), (('source', 3), ('sink', 2)),
(('source', 4), ('sink', 3)), (('source', 1), ('source', 2))],
graph.edge_source_target_vertices())
def test_delete_edges(self):
source_sink_edges = [[0], [0, 1], [1], [2], [3]]
source_source_edges = [[], [2], [], [], []]
sink_sink_edges = [[], [], [], []]
source_correspondence = [0, 1, 2, 3, 4]
sink_correspondence = [0, 0, 1, 2]
graph = NetworkGraph(source_sink_edges, source_source_edges, sink_sink_edges, source_correspondence,
sink_correspondence)
graph.delete_edges([(('source', 3), ('sink', 2))])
self.assertSequenceEqual([(('source', 0), ('sink', 0)), (('source', 1), ('sink', 0)), (('source', 1),
('sink', 1)), (('source', 2), ('sink', 1)), (('source', 4), ('sink', 3)),
(('source', 1), ('source', 2))],
graph.edge_source_target_vertices())
def test_number_of_edges(self):
source_sink_edges = [[0], [0, 1], [1], [2], [3]]
source_source_edges = [[], [2], [], [], []]
sink_sink_edges = [[], [], [], []]
source_correspondence = [0, 1, 2, 3, 4]
sink_correspondence = [0, 0, 1, 2]
graph = NetworkGraph(source_sink_edges, source_source_edges, sink_sink_edges, source_correspondence,
sink_correspondence)
self.assertEqual(7, graph.number_of_edges())
def test_number_of_vertices(self):
source_sink_edges = [[0], [0, 1], [1], [2], [3]]
source_source_edges = [[], [2], [], [], []]
sink_sink_edges = [[], [], [], []]
source_correspondence = [0, 1, 2, 3, 4]
sink_correspondence = [0, 0, 1, 2]
graph = NetworkGraph(source_sink_edges, source_source_edges, sink_sink_edges, source_correspondence,
sink_correspondence)
self.assertEqual(9, graph.number_of_vertices())
def test_vertices(self):
source_sink_edges = [[0], [0, 1], [1], [2], [3]]
source_source_edges = [[], [2], [], [], []]
sink_sink_edges = [[], [], [], []]
source_correspondence = [0, 1, 2, 3, 4]
sink_correspondence = [0, 0, 1, 2]
graph = NetworkGraph(source_sink_edges, source_source_edges, sink_sink_edges, source_correspondence,
sink_correspondence)
self.assertSequenceEqual([('source', 0), ('source', 1), ('source', 2), ('source', 3), ('source', 4),
('sink', 0), ('sink', 1), ('sink', 2), ('sink', 3)],
graph.vertices())
def test_contains_vertices(self):
source_sink_edges = [[0], [0, 1], [1], [2], [3]]
source_source_edges = [[], [2], [], [], []]
sink_sink_edges = [[], [], [], []]
source_correspondence = [0, 1, 2, 3, 4]
sink_correspondence = [0, 0, 1, 2]
graph = NetworkGraph(source_sink_edges, source_source_edges, sink_sink_edges, source_correspondence,
sink_correspondence)
self.assertTrue(graph.contains_vertices([('source', 0)]))
self.assertTrue(graph.contains_vertices([('source', 0), ('sink', 2)]))
self.assertFalse(graph.contains_vertices([('source', 7), ('sink', 2)]))
def test_decomposition(self):
source_sink_edges = [[0], [0, 1], [1], [2], []]
source_source_edges = [[], [2], [1], [], []]
sink_sink_edges = [[], [], [], []]
source_correspondence = [0, 1, 2, 3, 4]
sink_correspondence = [0, 0, 1, 2]
graph = NetworkGraph(source_sink_edges, source_source_edges, sink_sink_edges, source_correspondence,
sink_correspondence)
source_sink_distances = [[5], [2, 3], [2], [6], []]
source_source_distances = [[], [1], [1], [], []]
sink_sink_distances = [[], [], [], []]
graph.add_edge_attribute("distance", source_sink_distances, source_source_distances, sink_sink_distances)
components = graph.decompose_to_connected()
self.assertEqual(2, len(components))
self.assertEqual(3, components[0].number_of_sources)
self.assertEqual(1, components[1].number_of_sources)
self.assertEqual(2, components[0].number_of_sinks)
self.assertEqual(1, components[1].number_of_sinks)
self.assertEqual(5, components[0].number_of_edges())
self.assertEqual(1, components[1].number_of_edges())
self.assertEqual(5, components[0].number_of_vertices())
self.assertEqual(2, components[1].number_of_vertices())
self.assertSequenceEqual([(('source', 0), ('sink', 0)), (('source', 1), ('source', 2)),
(('source', 1), ('sink', 0)), (('source', 1), ('sink', 1)),
(('source', 2), ('sink', 1))],
components[0].edge_source_target_vertices())
self.assertSequenceEqual([(('source', 3), ('sink', 2))],
components[1].edge_source_target_vertices())
self.assertSequenceEqual([('source', 0), ('source', 1), ('source', 2), ('sink', 0), ('sink', 1)],
components[0].vertices())
self.assertSequenceEqual([('source', 3), ('sink', 2)],
components[1].vertices())
components[0].reduce_to_minimum_spanning_tree("distance")
self.assertSequenceEqual([(('source', 0), ('sink', 0)), (('source', 1), ('source', 2)),
(('source', 2), ('sink', 1))], components[0].edge_source_target_vertices())
components[1].reduce_to_minimum_spanning_tree("distance")
self.assertSequenceEqual([(('source', 3), ('sink', 2))], components[1].edge_source_target_vertices())
source_capacities = [100, 100, 100]
sink_capacities = [1, 4]
flow = components[0].maximum_flow(source_capacities, sink_capacities)
self.assertAlmostEqual(np.sum(flow[1]), np.sum(sink_capacities), 3)
# check continuity
self.assertAlmostEqual(np.sum(flow[0]), np.sum(flow[1]), 3)
# source limited
source_capacities = [5, 2, 3]
sink_capacities = [100, 100]
flow = components[0].maximum_flow(source_capacities, sink_capacities)
self.assertAlmostEqual(np.sum(flow[1]), np.sum(source_capacities), 3)
# check continuity
self.assertAlmostEqual(np.sum(flow[0]), np.sum(flow[1]), 3)
self.assertTrue(components[0].contains_vertices([('source', 0)]))
self.assertTrue(components[0].contains_vertices([('source', 0), ('sink', 1)]))
self.assertFalse(components[0].contains_vertices([('source', 7), ('sink', 1)]))
self.assertTrue(components[1].contains_vertices([('source', 3)]))
self.assertTrue(components[1].contains_vertices([('source', 3), ('sink', 2)]))
self.assertFalse(components[0].contains_vertices([('source', 7), ('sink', 2)]))
component = components[0].decompose_to_connected()
self.assertEqual(component[0].edge_source_target_vertices(), components[0].edge_source_target_vertices())
| 51.143939
| 119
| 0.568434
| 1,494
| 13,502
| 4.855422
| 0.069612
| 0.069479
| 0.086849
| 0.07072
| 0.858699
| 0.805349
| 0.780811
| 0.738765
| 0.696305
| 0.655087
| 0
| 0.044462
| 0.260406
| 13,502
| 263
| 120
| 51.338403
| 0.681955
| 0.02296
| 0
| 0.587678
| 0
| 0
| 0.038546
| 0
| 0
| 0
| 0
| 0
| 0.218009
| 1
| 0.061611
| false
| 0
| 0.014218
| 0
| 0.080569
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
429429e6833ec072f320c43109e938e17210db26
| 247
|
py
|
Python
|
whatsapp_tracker/bases/selenium_bases/base_selenium_shutdown.py
|
itay-bardugo/whatsapp_tracker
|
c53a309b08bf47597c8191ec0a155a1fe1536842
|
[
"MIT"
] | 1
|
2021-09-25T12:22:35.000Z
|
2021-09-25T12:22:35.000Z
|
whatsapp_tracker/bases/selenium_bases/base_selenium_shutdown.py
|
itay-bardugo/whatsapp_tracker
|
c53a309b08bf47597c8191ec0a155a1fe1536842
|
[
"MIT"
] | null | null | null |
whatsapp_tracker/bases/selenium_bases/base_selenium_shutdown.py
|
itay-bardugo/whatsapp_tracker
|
c53a309b08bf47597c8191ec0a155a1fe1536842
|
[
"MIT"
] | null | null | null |
from abc import ABCMeta, abstractmethod
from whatsapp_tracker.bases.selenium_bases.base_selenium_kit import BaseSeleniumKit
class BaseSeleniumShutDown(BaseSeleniumKit, metaclass=ABCMeta):
@abstractmethod
def shutdown(self):
...
| 24.7
| 83
| 0.793522
| 25
| 247
| 7.68
| 0.72
| 0.21875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1417
| 247
| 9
| 84
| 27.444444
| 0.90566
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
c41423263f094f6041e263886c2eea522f6dfdbb
| 111
|
py
|
Python
|
job/admin.py
|
IGHIF71/Portfolio
|
4336162baf648405dbdb36ad6ce2e10f383009ee
|
[
"MIT"
] | null | null | null |
job/admin.py
|
IGHIF71/Portfolio
|
4336162baf648405dbdb36ad6ce2e10f383009ee
|
[
"MIT"
] | null | null | null |
job/admin.py
|
IGHIF71/Portfolio
|
4336162baf648405dbdb36ad6ce2e10f383009ee
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from .models import job
# Register your models here.
admin.site.register(job)
| 22.2
| 32
| 0.801802
| 17
| 111
| 5.235294
| 0.647059
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.126126
| 111
| 5
| 33
| 22.2
| 0.917526
| 0.234234
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
c47f6c9cddf76f2b764114ffb922f584e6a5ef60
| 1,708
|
py
|
Python
|
contrato/decorators.py
|
ricardosmbr/smartcon
|
6f6090b586e717b38066c20a7d620c4abae0a915
|
[
"Apache-2.0"
] | null | null | null |
contrato/decorators.py
|
ricardosmbr/smartcon
|
6f6090b586e717b38066c20a7d620c4abae0a915
|
[
"Apache-2.0"
] | 1
|
2021-06-02T00:21:09.000Z
|
2021-06-02T00:21:09.000Z
|
contrato/decorators.py
|
ricardosmbr/smartcon
|
6f6090b586e717b38066c20a7d620c4abae0a915
|
[
"Apache-2.0"
] | null | null | null |
from django.shortcuts import redirect, get_object_or_404
from django.contrib import messages
from cliente.models import Cliente
from .models import Contrato,ContratToken
def permition_conrequired(view_func):
def _wrapper(request, *args,**kwargs):
pk = kwargs['pk']
contrato = get_object_or_404(Contrato ,pk=pk)
pkcon = contrato.id_cliente.id
cliente = get_object_or_404(Cliente,pk=contrato.id_cliente.id)
has_permition = request.user.is_staff
if not has_permition:
try:
contrato = Contrato.objects.get(
id_cliente=pkcon,
pk=pk
)
except cliente.DoesNotExist:
messages.success(request,'Desculpe, mas voce não tem permissão',extra_tags='text-denger')
else:
has_permition = True
if not has_permition:
messages.error(request,message)
return redirect('con:contrato')
request.contrato = contrato
return view_func(request, *args,**kwargs)
return _wrapper
def permition_tokenrequired(view_func):
def _wrapper(request, *args,**kwargs):
pk = kwargs['pk']
cont = get_object_or_404(ContratToken,pk=pk)
contrato = get_object_or_404(Contrato ,pk=cont.id_contrato.id)
pkcon = contrato.id_cliente.id
cliente = get_object_or_404(Cliente,pk=contrato.id_cliente.id)
has_permition = request.user.is_staff
if not has_permition:
try:
contrato = Contrato.objects.get(
id_cliente=pkcon,
pk=pk
)
except cliente.DoesNotExist:
messages.success(request,'Desculpe, mas voce não tem permissão',extra_tags='text-denger')
else:
has_permition = True
if not has_permition:
messages.error(request,message)
return redirect('con:contrato')
request.token = cont
return view_func(request, *args,**kwargs)
return _wrapper
| 31.054545
| 93
| 0.747073
| 235
| 1,708
| 5.229787
| 0.242553
| 0.058584
| 0.053702
| 0.068348
| 0.78275
| 0.78275
| 0.78275
| 0.78275
| 0.657445
| 0.657445
| 0
| 0.01244
| 0.15281
| 1,708
| 55
| 94
| 31.054545
| 0.836904
| 0
| 0
| 0.745098
| 0
| 0
| 0.071387
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.078431
| false
| 0
| 0.078431
| 0
| 0.27451
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
c48add9eeb73741454fc92b917b52e62979f4c59
| 18,695
|
py
|
Python
|
mavsdk/tracking_server_pb2_grpc.py
|
danylevskyi/MAVSDK-Python
|
50a811e7c3fc60d039397e296dd4cbf166e69164
|
[
"BSD-3-Clause"
] | 137
|
2019-06-22T16:42:49.000Z
|
2022-03-29T18:40:49.000Z
|
mavsdk/tracking_server_pb2_grpc.py
|
danylevskyi/MAVSDK-Python
|
50a811e7c3fc60d039397e296dd4cbf166e69164
|
[
"BSD-3-Clause"
] | 297
|
2019-06-21T20:20:21.000Z
|
2022-03-31T20:36:54.000Z
|
mavsdk/tracking_server_pb2_grpc.py
|
danylevskyi/MAVSDK-Python
|
50a811e7c3fc60d039397e296dd4cbf166e69164
|
[
"BSD-3-Clause"
] | 142
|
2019-06-11T11:45:03.000Z
|
2022-03-27T10:02:25.000Z
|
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
"""Client and server classes corresponding to protobuf-defined services."""
import grpc
from . import tracking_server_pb2 as tracking__server_dot_tracking__server__pb2
class TrackingServerServiceStub(object):
"""API for an onboard image tracking software.
"""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.SetTrackingPointStatus = channel.unary_unary(
'/mavsdk.rpc.tracking_server.TrackingServerService/SetTrackingPointStatus',
request_serializer=tracking__server_dot_tracking__server__pb2.SetTrackingPointStatusRequest.SerializeToString,
response_deserializer=tracking__server_dot_tracking__server__pb2.SetTrackingPointStatusResponse.FromString,
)
self.SetTrackingRectangleStatus = channel.unary_unary(
'/mavsdk.rpc.tracking_server.TrackingServerService/SetTrackingRectangleStatus',
request_serializer=tracking__server_dot_tracking__server__pb2.SetTrackingRectangleStatusRequest.SerializeToString,
response_deserializer=tracking__server_dot_tracking__server__pb2.SetTrackingRectangleStatusResponse.FromString,
)
self.SetTrackingOffStatus = channel.unary_unary(
'/mavsdk.rpc.tracking_server.TrackingServerService/SetTrackingOffStatus',
request_serializer=tracking__server_dot_tracking__server__pb2.SetTrackingOffStatusRequest.SerializeToString,
response_deserializer=tracking__server_dot_tracking__server__pb2.SetTrackingOffStatusResponse.FromString,
)
self.SubscribeTrackingPointCommand = channel.unary_stream(
'/mavsdk.rpc.tracking_server.TrackingServerService/SubscribeTrackingPointCommand',
request_serializer=tracking__server_dot_tracking__server__pb2.SubscribeTrackingPointCommandRequest.SerializeToString,
response_deserializer=tracking__server_dot_tracking__server__pb2.TrackingPointCommandResponse.FromString,
)
self.SubscribeTrackingRectangleCommand = channel.unary_stream(
'/mavsdk.rpc.tracking_server.TrackingServerService/SubscribeTrackingRectangleCommand',
request_serializer=tracking__server_dot_tracking__server__pb2.SubscribeTrackingRectangleCommandRequest.SerializeToString,
response_deserializer=tracking__server_dot_tracking__server__pb2.TrackingRectangleCommandResponse.FromString,
)
self.SubscribeTrackingOffCommand = channel.unary_stream(
'/mavsdk.rpc.tracking_server.TrackingServerService/SubscribeTrackingOffCommand',
request_serializer=tracking__server_dot_tracking__server__pb2.SubscribeTrackingOffCommandRequest.SerializeToString,
response_deserializer=tracking__server_dot_tracking__server__pb2.TrackingOffCommandResponse.FromString,
)
self.RespondTrackingPointCommand = channel.unary_unary(
'/mavsdk.rpc.tracking_server.TrackingServerService/RespondTrackingPointCommand',
request_serializer=tracking__server_dot_tracking__server__pb2.RespondTrackingPointCommandRequest.SerializeToString,
response_deserializer=tracking__server_dot_tracking__server__pb2.RespondTrackingPointCommandResponse.FromString,
)
self.RespondTrackingRectangleCommand = channel.unary_unary(
'/mavsdk.rpc.tracking_server.TrackingServerService/RespondTrackingRectangleCommand',
request_serializer=tracking__server_dot_tracking__server__pb2.RespondTrackingRectangleCommandRequest.SerializeToString,
response_deserializer=tracking__server_dot_tracking__server__pb2.RespondTrackingRectangleCommandResponse.FromString,
)
self.RespondTrackingOffCommand = channel.unary_unary(
'/mavsdk.rpc.tracking_server.TrackingServerService/RespondTrackingOffCommand',
request_serializer=tracking__server_dot_tracking__server__pb2.RespondTrackingOffCommandRequest.SerializeToString,
response_deserializer=tracking__server_dot_tracking__server__pb2.RespondTrackingOffCommandResponse.FromString,
)
class TrackingServerServiceServicer(object):
"""API for an onboard image tracking software.
"""
def SetTrackingPointStatus(self, request, context):
"""Set/update the current point tracking status.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def SetTrackingRectangleStatus(self, request, context):
"""Set/update the current rectangle tracking status.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def SetTrackingOffStatus(self, request, context):
"""Set the current tracking status to off.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def SubscribeTrackingPointCommand(self, request, context):
"""Subscribe to incoming tracking point command.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def SubscribeTrackingRectangleCommand(self, request, context):
"""Subscribe to incoming tracking rectangle command.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def SubscribeTrackingOffCommand(self, request, context):
"""Subscribe to incoming tracking off command.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def RespondTrackingPointCommand(self, request, context):
"""Respond to an incoming tracking point command.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def RespondTrackingRectangleCommand(self, request, context):
"""Respond to an incoming tracking rectangle command.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def RespondTrackingOffCommand(self, request, context):
"""Respond to an incoming tracking off command.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_TrackingServerServiceServicer_to_server(servicer, server):
rpc_method_handlers = {
'SetTrackingPointStatus': grpc.unary_unary_rpc_method_handler(
servicer.SetTrackingPointStatus,
request_deserializer=tracking__server_dot_tracking__server__pb2.SetTrackingPointStatusRequest.FromString,
response_serializer=tracking__server_dot_tracking__server__pb2.SetTrackingPointStatusResponse.SerializeToString,
),
'SetTrackingRectangleStatus': grpc.unary_unary_rpc_method_handler(
servicer.SetTrackingRectangleStatus,
request_deserializer=tracking__server_dot_tracking__server__pb2.SetTrackingRectangleStatusRequest.FromString,
response_serializer=tracking__server_dot_tracking__server__pb2.SetTrackingRectangleStatusResponse.SerializeToString,
),
'SetTrackingOffStatus': grpc.unary_unary_rpc_method_handler(
servicer.SetTrackingOffStatus,
request_deserializer=tracking__server_dot_tracking__server__pb2.SetTrackingOffStatusRequest.FromString,
response_serializer=tracking__server_dot_tracking__server__pb2.SetTrackingOffStatusResponse.SerializeToString,
),
'SubscribeTrackingPointCommand': grpc.unary_stream_rpc_method_handler(
servicer.SubscribeTrackingPointCommand,
request_deserializer=tracking__server_dot_tracking__server__pb2.SubscribeTrackingPointCommandRequest.FromString,
response_serializer=tracking__server_dot_tracking__server__pb2.TrackingPointCommandResponse.SerializeToString,
),
'SubscribeTrackingRectangleCommand': grpc.unary_stream_rpc_method_handler(
servicer.SubscribeTrackingRectangleCommand,
request_deserializer=tracking__server_dot_tracking__server__pb2.SubscribeTrackingRectangleCommandRequest.FromString,
response_serializer=tracking__server_dot_tracking__server__pb2.TrackingRectangleCommandResponse.SerializeToString,
),
'SubscribeTrackingOffCommand': grpc.unary_stream_rpc_method_handler(
servicer.SubscribeTrackingOffCommand,
request_deserializer=tracking__server_dot_tracking__server__pb2.SubscribeTrackingOffCommandRequest.FromString,
response_serializer=tracking__server_dot_tracking__server__pb2.TrackingOffCommandResponse.SerializeToString,
),
'RespondTrackingPointCommand': grpc.unary_unary_rpc_method_handler(
servicer.RespondTrackingPointCommand,
request_deserializer=tracking__server_dot_tracking__server__pb2.RespondTrackingPointCommandRequest.FromString,
response_serializer=tracking__server_dot_tracking__server__pb2.RespondTrackingPointCommandResponse.SerializeToString,
),
'RespondTrackingRectangleCommand': grpc.unary_unary_rpc_method_handler(
servicer.RespondTrackingRectangleCommand,
request_deserializer=tracking__server_dot_tracking__server__pb2.RespondTrackingRectangleCommandRequest.FromString,
response_serializer=tracking__server_dot_tracking__server__pb2.RespondTrackingRectangleCommandResponse.SerializeToString,
),
'RespondTrackingOffCommand': grpc.unary_unary_rpc_method_handler(
servicer.RespondTrackingOffCommand,
request_deserializer=tracking__server_dot_tracking__server__pb2.RespondTrackingOffCommandRequest.FromString,
response_serializer=tracking__server_dot_tracking__server__pb2.RespondTrackingOffCommandResponse.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'mavsdk.rpc.tracking_server.TrackingServerService', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
# This class is part of an EXPERIMENTAL API.
class TrackingServerService(object):
"""API for an onboard image tracking software.
"""
@staticmethod
def SetTrackingPointStatus(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/mavsdk.rpc.tracking_server.TrackingServerService/SetTrackingPointStatus',
tracking__server_dot_tracking__server__pb2.SetTrackingPointStatusRequest.SerializeToString,
tracking__server_dot_tracking__server__pb2.SetTrackingPointStatusResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def SetTrackingRectangleStatus(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/mavsdk.rpc.tracking_server.TrackingServerService/SetTrackingRectangleStatus',
tracking__server_dot_tracking__server__pb2.SetTrackingRectangleStatusRequest.SerializeToString,
tracking__server_dot_tracking__server__pb2.SetTrackingRectangleStatusResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def SetTrackingOffStatus(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/mavsdk.rpc.tracking_server.TrackingServerService/SetTrackingOffStatus',
tracking__server_dot_tracking__server__pb2.SetTrackingOffStatusRequest.SerializeToString,
tracking__server_dot_tracking__server__pb2.SetTrackingOffStatusResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def SubscribeTrackingPointCommand(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_stream(request, target, '/mavsdk.rpc.tracking_server.TrackingServerService/SubscribeTrackingPointCommand',
tracking__server_dot_tracking__server__pb2.SubscribeTrackingPointCommandRequest.SerializeToString,
tracking__server_dot_tracking__server__pb2.TrackingPointCommandResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def SubscribeTrackingRectangleCommand(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_stream(request, target, '/mavsdk.rpc.tracking_server.TrackingServerService/SubscribeTrackingRectangleCommand',
tracking__server_dot_tracking__server__pb2.SubscribeTrackingRectangleCommandRequest.SerializeToString,
tracking__server_dot_tracking__server__pb2.TrackingRectangleCommandResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def SubscribeTrackingOffCommand(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_stream(request, target, '/mavsdk.rpc.tracking_server.TrackingServerService/SubscribeTrackingOffCommand',
tracking__server_dot_tracking__server__pb2.SubscribeTrackingOffCommandRequest.SerializeToString,
tracking__server_dot_tracking__server__pb2.TrackingOffCommandResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def RespondTrackingPointCommand(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/mavsdk.rpc.tracking_server.TrackingServerService/RespondTrackingPointCommand',
tracking__server_dot_tracking__server__pb2.RespondTrackingPointCommandRequest.SerializeToString,
tracking__server_dot_tracking__server__pb2.RespondTrackingPointCommandResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def RespondTrackingRectangleCommand(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/mavsdk.rpc.tracking_server.TrackingServerService/RespondTrackingRectangleCommand',
tracking__server_dot_tracking__server__pb2.RespondTrackingRectangleCommandRequest.SerializeToString,
tracking__server_dot_tracking__server__pb2.RespondTrackingRectangleCommandResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def RespondTrackingOffCommand(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/mavsdk.rpc.tracking_server.TrackingServerService/RespondTrackingOffCommand',
tracking__server_dot_tracking__server__pb2.RespondTrackingOffCommandRequest.SerializeToString,
tracking__server_dot_tracking__server__pb2.RespondTrackingOffCommandResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
| 54.504373
| 149
| 0.721155
| 1,497
| 18,695
| 8.56513
| 0.0835
| 0.141944
| 0.074247
| 0.107238
| 0.846358
| 0.842926
| 0.802839
| 0.717517
| 0.433006
| 0.332631
| 0
| 0.003833
| 0.218508
| 18,695
| 342
| 150
| 54.663743
| 0.873785
| 0.046911
| 0
| 0.510638
| 1
| 0
| 0.117488
| 0.092997
| 0
| 0
| 0
| 0
| 0
| 1
| 0.070922
| false
| 0
| 0.007092
| 0.031915
| 0.120567
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
670c501a85e2bde56be6ebef0cec02e145cf142d
| 233
|
py
|
Python
|
3rdparty/huawei-lte-api/huawei_lte_api/api/Redirection.py
|
tux1c0/plugin-huawei4g
|
af83ce6321db23fcfd51eb204e00c287d3ea2325
|
[
"MIT"
] | null | null | null |
3rdparty/huawei-lte-api/huawei_lte_api/api/Redirection.py
|
tux1c0/plugin-huawei4g
|
af83ce6321db23fcfd51eb204e00c287d3ea2325
|
[
"MIT"
] | 1
|
2020-06-21T20:35:27.000Z
|
2020-07-14T20:08:55.000Z
|
3rdparty/huawei-lte-api/huawei_lte_api/api/Redirection.py
|
tux1c0/plugin-huawei4g
|
af83ce6321db23fcfd51eb204e00c287d3ea2325
|
[
"MIT"
] | 3
|
2020-02-27T11:35:55.000Z
|
2021-03-25T08:51:51.000Z
|
from huawei_lte_api.ApiGroup import ApiGroup
from huawei_lte_api.Connection import GetResponseType
class Redirection(ApiGroup):
def homepage(self) -> GetResponseType:
return self._connection.get('redirection/homepage')
| 29.125
| 59
| 0.798283
| 27
| 233
| 6.703704
| 0.555556
| 0.110497
| 0.143646
| 0.176796
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.128755
| 233
| 7
| 60
| 33.285714
| 0.891626
| 0
| 0
| 0
| 0
| 0
| 0.085837
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0.4
| 0.2
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
|
0
| 5
|
671795600f7a37e4201bc64bbdec0d4efa6f2815
| 234
|
py
|
Python
|
mlops/components/__init__.py
|
colingwuyu/mlops
|
56ba9b7169c183c1f89ead64670ebfe873f9694c
|
[
"Apache-2.0"
] | 1
|
2022-01-11T01:17:08.000Z
|
2022-01-11T01:17:08.000Z
|
mlops/components/__init__.py
|
colingwuyu/mlops
|
56ba9b7169c183c1f89ead64670ebfe873f9694c
|
[
"Apache-2.0"
] | null | null | null |
mlops/components/__init__.py
|
colingwuyu/mlops
|
56ba9b7169c183c1f89ead64670ebfe873f9694c
|
[
"Apache-2.0"
] | null | null | null |
from mlops.components.base_component import base_component, ARG_MLFLOW_RUN_ID
from mlops.components.pipeline_init_component import pipeline_init_component
from mlops.components.model_training_component import model_training_component
| 58.5
| 78
| 0.91453
| 32
| 234
| 6.28125
| 0.4375
| 0.134328
| 0.283582
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.055556
| 234
| 3
| 79
| 78
| 0.909502
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
674dfdb2a05c40ced1f5e54c36153915ea6ba26a
| 54
|
py
|
Python
|
main.py
|
twambolt/UEFA-Aggregator
|
51c8bf877d703d8372c800536af68e4f109f633a
|
[
"MIT"
] | null | null | null |
main.py
|
twambolt/UEFA-Aggregator
|
51c8bf877d703d8372c800536af68e4f109f633a
|
[
"MIT"
] | null | null | null |
main.py
|
twambolt/UEFA-Aggregator
|
51c8bf877d703d8372c800536af68e4f109f633a
|
[
"MIT"
] | null | null | null |
import config
import twilio
print(config.ACCOUNT_SID)
| 13.5
| 25
| 0.851852
| 8
| 54
| 5.625
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.092593
| 54
| 4
| 25
| 13.5
| 0.918367
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0.333333
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
675938cb27190891cf62bb598c5ec3900f497580
| 1,429
|
py
|
Python
|
population_genomics_scripts/filter_bams/make_sync.py
|
peterthorpe5/DROS_kitchenMadness_DNAseq_20211207
|
d4820edf5a71f9455c62fee875a67973d6ed9eaa
|
[
"MIT"
] | null | null | null |
population_genomics_scripts/filter_bams/make_sync.py
|
peterthorpe5/DROS_kitchenMadness_DNAseq_20211207
|
d4820edf5a71f9455c62fee875a67973d6ed9eaa
|
[
"MIT"
] | null | null | null |
population_genomics_scripts/filter_bams/make_sync.py
|
peterthorpe5/DROS_kitchenMadness_DNAseq_20211207
|
d4820edf5a71f9455c62fee875a67973d6ed9eaa
|
[
"MIT"
] | null | null | null |
bam = """F2014_female_fall_DrosEU_match_sorted_marked_RG.bam MR19female2_sorted_marked_RG.bam
F2015_female_fall_DrosEU_match_sorted_marked_RG.bam MR19male1_sorted_marked_RG.bam
Fly_femaleA_sorted_marked_RG.bam MR19male2_sorted_marked_RG.bam
Fly_femaleB_sorted_marked_RG.bam MR20female1_sorted_marked_RG.bam
Fly_Female_sorted_marked_RG.bam MR20female2_sorted_marked_RG.bam
Fly_MaleA_sorted_marked_RG.bam MR20male1_sorted_marked_RG.bam
Fly_MaleB_sorted_marked_RG.bam MR20male2_sorted_marked_RG.bam
Fly_Male_sorted_marked_RG.bam MR_female_sorted_marked_RG.bam
make_pileup.py MR_male_sorted_marked_RG.bam
MR19female1_sorted_marked_RG.bam""".split()
count = 0
for i in bam:
for j in bam:
if i == j: continue
f = "sync%s.sh" % (count)
f_out = open(f, "w")
count = count+1
out = "cd /storage/home/users/pjt6/kitchen_flies/bams/unique_mapped/\njava -ea -Xmx9g -jar /storage/home/users/pjt6/popoolation2/mpileup2sync.jar --input ./mpileup/%s_%s.mpileup --output ./sync/%s_%s_java.sync --fastq-type sanger --min-qual 30 --threads 4\n" % (i.split("_sor")[0], j.split("_sor")[0], i.split("_sor")[0], j.split("_sor")[0])
f_out.write(out)
f_out.close()
#print("qsub -V %s\n" % f)
| 57.16
| 350
| 0.657803
| 209
| 1,429
| 4.100478
| 0.382775
| 0.252042
| 0.294049
| 0.35706
| 0.378063
| 0.135356
| 0.135356
| 0.135356
| 0
| 0
| 0
| 0.042318
| 0.239328
| 1,429
| 25
| 351
| 57.16
| 0.74609
| 0.017495
| 0
| 0
| 0
| 0.05
| 0.773913
| 0.547101
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
6762b018f0a2624b66513582347834a7865df658
| 52
|
py
|
Python
|
testing/vcs/test_vcs_mkscale_zero_range.py
|
xylar/cdat
|
8a5080cb18febfde365efc96147e25f51494a2bf
|
[
"BSD-3-Clause"
] | 62
|
2018-03-30T15:46:56.000Z
|
2021-12-08T23:30:24.000Z
|
testing/vcs/test_vcs_mkscale_zero_range.py
|
xylar/cdat
|
8a5080cb18febfde365efc96147e25f51494a2bf
|
[
"BSD-3-Clause"
] | 114
|
2018-03-21T01:12:43.000Z
|
2021-07-05T12:29:54.000Z
|
testing/vcs/test_vcs_mkscale_zero_range.py
|
CDAT/uvcdat
|
5133560c0c049b5c93ee321ba0af494253b44f91
|
[
"BSD-3-Clause"
] | 14
|
2018-06-06T02:42:47.000Z
|
2021-11-26T03:27:00.000Z
|
import vcs
assert vcs.mkscale(3.5,3.5,16)==[3.5,]
| 10.4
| 38
| 0.634615
| 12
| 52
| 2.75
| 0.583333
| 0.181818
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.173913
| 0.115385
| 52
| 4
| 39
| 13
| 0.543478
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
67a3e76894d377061e48118370c156acc3392f0b
| 54
|
py
|
Python
|
problems 001 - 020/015_lattice_paths.py
|
max-97/ProjectEuler
|
5eab8b2e199f3253696c4f671b395f2f2773d7f1
|
[
"MIT"
] | null | null | null |
problems 001 - 020/015_lattice_paths.py
|
max-97/ProjectEuler
|
5eab8b2e199f3253696c4f671b395f2f2773d7f1
|
[
"MIT"
] | null | null | null |
problems 001 - 020/015_lattice_paths.py
|
max-97/ProjectEuler
|
5eab8b2e199f3253696c4f671b395f2f2773d7f1
|
[
"MIT"
] | null | null | null |
from scipy.special import binom
print(binom(40, 20))
| 13.5
| 31
| 0.759259
| 9
| 54
| 4.555556
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.085106
| 0.12963
| 54
| 3
| 32
| 18
| 0.787234
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0.5
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 5
|
67bc263f950d07d7d66b7f1d4c61063cedbf1849
| 19
|
py
|
Python
|
crepe/version.py
|
entn-at/crepe
|
1b834eaddff2f5048d198ec3a7466e2f0953985c
|
[
"MIT"
] | 1
|
2022-01-18T21:50:56.000Z
|
2022-01-18T21:50:56.000Z
|
crepe/version.py
|
entn-at/crepe
|
1b834eaddff2f5048d198ec3a7466e2f0953985c
|
[
"MIT"
] | 2
|
2021-09-10T11:58:36.000Z
|
2021-09-10T12:34:08.000Z
|
crepe/version.py
|
entn-at/crepe
|
1b834eaddff2f5048d198ec3a7466e2f0953985c
|
[
"MIT"
] | 1
|
2020-04-07T16:59:01.000Z
|
2020-04-07T16:59:01.000Z
|
version = '0.0.11'
| 9.5
| 18
| 0.578947
| 4
| 19
| 2.75
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.25
| 0.157895
| 19
| 1
| 19
| 19
| 0.4375
| 0
| 0
| 0
| 0
| 0
| 0.315789
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
67ed34dd104d8b39d7f62aa4a35e6cc38e46324d
| 178
|
py
|
Python
|
rofl/functions/__init__.py
|
Bobobert/RoLas
|
75d601e7906e31c95ff3c574ebbed1142e220992
|
[
"Apache-2.0"
] | 2
|
2021-11-02T16:33:08.000Z
|
2021-11-05T23:57:31.000Z
|
rofl/functions/__init__.py
|
Bobobert/RoLas
|
75d601e7906e31c95ff3c574ebbed1142e220992
|
[
"Apache-2.0"
] | null | null | null |
rofl/functions/__init__.py
|
Bobobert/RoLas
|
75d601e7906e31c95ff3c574ebbed1142e220992
|
[
"Apache-2.0"
] | null | null | null |
from .vars import linearSchedule, runningStat, updateVar
from .torch import getDevice
from .stop import testEvaluation
from .dicts import obsDict, mergeDicts, initResultDict
| 35.6
| 57
| 0.820225
| 20
| 178
| 7.3
| 0.7
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.134831
| 178
| 4
| 58
| 44.5
| 0.948052
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
db15069cf9545b43cd3fa067387aaf6fe2c32a5d
| 202
|
py
|
Python
|
invest_test_all.py
|
phargogh/invest-natcap.invest-3
|
ee96055a4fa034d9a95fa8ccc6259ab03264e6c1
|
[
"BSD-3-Clause"
] | null | null | null |
invest_test_all.py
|
phargogh/invest-natcap.invest-3
|
ee96055a4fa034d9a95fa8ccc6259ab03264e6c1
|
[
"BSD-3-Clause"
] | null | null | null |
invest_test_all.py
|
phargogh/invest-natcap.invest-3
|
ee96055a4fa034d9a95fa8ccc6259ab03264e6c1
|
[
"BSD-3-Clause"
] | null | null | null |
import unittest
import invest_natcap.iui.modelui_test
loader = unittest.TestLoader()
suite = loader.loadTestsFromModule(invest_natcap.iui.modelui_test)
unittest.TextTestRunner(verbosity=2).run(suite)
| 25.25
| 66
| 0.841584
| 25
| 202
| 6.64
| 0.6
| 0.144578
| 0.180723
| 0.26506
| 0.313253
| 0
| 0
| 0
| 0
| 0
| 0
| 0.005291
| 0.064356
| 202
| 7
| 67
| 28.857143
| 0.873016
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.4
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
db298d005c1a7596a02f24ea146555fdb72f7290
| 78
|
py
|
Python
|
spacy-textdescriptives/__init__.py
|
HLasse/spacy-textdescriptives
|
c079c6617ef266b54f28c51e619d2429a5dafb83
|
[
"Apache-2.0"
] | null | null | null |
spacy-textdescriptives/__init__.py
|
HLasse/spacy-textdescriptives
|
c079c6617ef266b54f28c51e619d2429a5dafb83
|
[
"Apache-2.0"
] | null | null | null |
spacy-textdescriptives/__init__.py
|
HLasse/spacy-textdescriptives
|
c079c6617ef266b54f28c51e619d2429a5dafb83
|
[
"Apache-2.0"
] | null | null | null |
from .load_components import add_components
from .extractor import extract_df
| 26
| 43
| 0.871795
| 11
| 78
| 5.909091
| 0.727273
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.102564
| 78
| 2
| 44
| 39
| 0.928571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
c0157ba08d665280eacbf84c5cc5a88b2b445a21
| 158
|
py
|
Python
|
app/constants.py
|
Zhibade/easy-file-renamer
|
d9a14bdf7acaf438ec6ae1428c71fd07fb9e4231
|
[
"MIT"
] | null | null | null |
app/constants.py
|
Zhibade/easy-file-renamer
|
d9a14bdf7acaf438ec6ae1428c71fd07fb9e4231
|
[
"MIT"
] | null | null | null |
app/constants.py
|
Zhibade/easy-file-renamer
|
d9a14bdf7acaf438ec6ae1428c71fd07fb9e4231
|
[
"MIT"
] | null | null | null |
"""Shared constants module"""
import os
LOG_PATH = os.path.join(os.environ['APPDATA'], 'EasyFileRenamer')
LOG_FILE_PATH = os.path.join(LOG_PATH, 'log.txt')
| 22.571429
| 65
| 0.727848
| 24
| 158
| 4.625
| 0.541667
| 0.126126
| 0.18018
| 0.252252
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.094937
| 158
| 6
| 66
| 26.333333
| 0.776224
| 0.14557
| 0
| 0
| 0
| 0
| 0.224806
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
c04c584f5cc8da2efc2ac4e6a56c28e35bd07f4f
| 89
|
py
|
Python
|
tccli/services/bmeip/__init__.py
|
zqfan/tencentcloud-cli
|
b6ad9fced2a2b340087e4e5522121d405f68b615
|
[
"Apache-2.0"
] | 47
|
2018-05-31T11:26:25.000Z
|
2022-03-08T02:12:45.000Z
|
tccli/services/bmeip/__init__.py
|
zqfan/tencentcloud-cli
|
b6ad9fced2a2b340087e4e5522121d405f68b615
|
[
"Apache-2.0"
] | 23
|
2018-06-14T10:46:30.000Z
|
2022-02-28T02:53:09.000Z
|
tccli/services/bmeip/__init__.py
|
zqfan/tencentcloud-cli
|
b6ad9fced2a2b340087e4e5522121d405f68b615
|
[
"Apache-2.0"
] | 22
|
2018-10-22T09:49:45.000Z
|
2022-03-30T08:06:04.000Z
|
# -*- coding: utf-8 -*-
from tccli.services.bmeip.bmeip_client import action_caller
| 22.25
| 59
| 0.707865
| 12
| 89
| 5.083333
| 0.916667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.013333
| 0.157303
| 89
| 4
| 60
| 22.25
| 0.8
| 0.235955
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
221963e95e778a7e6aaa27ae8a4be25435842995
| 3,409
|
py
|
Python
|
flask_sketch/handlers/commands_handler.py
|
ericsouza/flask-sketch
|
65625a567e5492b3787c5da3ba5e12b1473783c4
|
[
"MIT"
] | 11
|
2020-08-19T14:33:00.000Z
|
2021-05-29T17:12:47.000Z
|
flask_sketch/handlers/commands_handler.py
|
ericsouza/flask-sketch
|
65625a567e5492b3787c5da3ba5e12b1473783c4
|
[
"MIT"
] | 3
|
2021-03-16T19:33:39.000Z
|
2021-03-18T18:53:55.000Z
|
flask_sketch/handlers/commands_handler.py
|
ericsouza/flask-sketch
|
65625a567e5492b3787c5da3ba5e12b1473783c4
|
[
"MIT"
] | 2
|
2020-08-19T14:33:11.000Z
|
2020-09-16T19:34:16.000Z
|
from os.path import join as pjoin
from flask_sketch.utils import GenericHandler
from flask_sketch.sketch import Sketch
from flask_sketch import templates
def handle_mongo_default(sketch: Sketch):
if sketch.database == "mongodb" and sketch.auth_framework not in [
"security",
"none",
]:
sketch.write_template(
"mongo_default_tpl",
templates.commands,
pjoin(sketch.app_folder, "commands", "__init__.py",),
)
return True
def handle_mongo_jwt_default(sketch: Sketch):
if (
sketch.database == "mongodb"
and sketch.auth_framework == "none"
and sketch.api_auth_framework != "none"
):
print("é vdd esse bilete")
sketch.write_template(
"mongo_default_tpl",
templates.commands,
pjoin(sketch.app_folder, "commands", "__init__.py",),
)
return True
def handle_sql_default(sketch: Sketch):
if sketch.database in [
"sqlite",
"postgres",
"mysql",
] and sketch.auth_framework not in ["security", "none"]:
sketch.write_template(
"sql_default_tpl",
templates.commands,
pjoin(sketch.app_folder, "commands", "__init__.py",),
)
return True
def handle_sql_jwt_default(sketch: Sketch):
if (
sketch.database in ["sqlite", "postgres", "mysql"]
and sketch.auth_framework == "none"
and sketch.api_auth_framework != "none"
):
sketch.write_template(
"sql_default_tpl",
templates.commands,
pjoin(sketch.app_folder, "commands", "__init__.py",),
)
return True
def handle_mongo_security(sketch: Sketch):
if sketch.database == "mongodb" and sketch.auth_framework == "security":
sketch.write_template(
"mongo_security_tpl",
templates.commands,
pjoin(sketch.app_folder, "commands", "__init__.py",),
)
return True
def handle_sql_security(sketch: Sketch):
if (
sketch.database in ["sqlite", "postgres", "mysql"]
and sketch.auth_framework == "security"
):
sketch.write_template(
"sql_security_tpl",
templates.commands,
pjoin(sketch.app_folder, "commands", "__init__.py",),
)
return True
def handle_sql_noauth(sketch: Sketch):
if (
sketch.database in ["sqlite", "postgres", "mysql"]
and sketch.auth_framework == "none"
and sketch.api_auth_framework == "none"
):
sketch.write_template(
"sql_noauth_tpl",
templates.commands,
pjoin(sketch.app_folder, "commands", "__init__.py",),
)
def handle_mongo_noauth(sketch: Sketch):
if (
sketch.database == "mongodb"
and sketch.auth_framework == "none"
and sketch.api_auth_framework == "none"
):
sketch.write_template(
"mongo_noauth_tpl",
templates.commands,
pjoin(sketch.app_folder, "commands", "__init__.py",),
)
class CommandsHandler(GenericHandler):
...
commands_handler = CommandsHandler(
handle_mongo_default,
handle_sql_default,
handle_mongo_jwt_default,
handle_sql_jwt_default,
handle_mongo_security,
handle_sql_security,
handle_sql_noauth,
handle_mongo_noauth,
)
| 26.84252
| 76
| 0.606336
| 357
| 3,409
| 5.453782
| 0.142857
| 0.05547
| 0.057524
| 0.082178
| 0.799178
| 0.79661
| 0.782229
| 0.779147
| 0.751412
| 0.751412
| 0
| 0
| 0.286301
| 3,409
| 126
| 77
| 27.055556
| 0.800247
| 0
| 0
| 0.542857
| 0
| 0
| 0.13875
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.07619
| false
| 0
| 0.038095
| 0
| 0.180952
| 0.009524
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
22315632732061c9a8ee9df2bdf7c3b676b7da96
| 83
|
py
|
Python
|
backend/handlers/graphql/resolvers/host.py
|
al-indigo/vmemperor
|
80eb6d47d839a4736eb6f9d2fcfad35f0a7b3bb1
|
[
"Apache-2.0"
] | null | null | null |
backend/handlers/graphql/resolvers/host.py
|
al-indigo/vmemperor
|
80eb6d47d839a4736eb6f9d2fcfad35f0a7b3bb1
|
[
"Apache-2.0"
] | 8
|
2017-10-11T13:26:10.000Z
|
2021-12-13T20:27:52.000Z
|
backend/handlers/graphql/resolvers/host.py
|
ispras/vmemperor
|
80eb6d47d839a4736eb6f9d2fcfad35f0a7b3bb1
|
[
"Apache-2.0"
] | 4
|
2017-07-27T12:25:42.000Z
|
2018-01-28T02:06:26.000Z
|
def hostType():
from handlers.graphql.types.host import GHost
return GHost
| 20.75
| 49
| 0.73494
| 11
| 83
| 5.545455
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.192771
| 83
| 3
| 50
| 27.666667
| 0.910448
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
2258d4b4826d1457893b826bedbb1e850e630711
| 8,155
|
py
|
Python
|
src/azure-cli/azure/cli/command_modules/netappfiles/tests/latest/test_subvolume_commands.py
|
ZengTaoxu/azure-cli
|
6be96de450da5ac9f07aafb22dd69880bea04792
|
[
"MIT"
] | 4
|
2022-01-25T07:33:15.000Z
|
2022-03-24T05:15:13.000Z
|
src/azure-cli/azure/cli/command_modules/netappfiles/tests/latest/test_subvolume_commands.py
|
ZengTaoxu/azure-cli
|
6be96de450da5ac9f07aafb22dd69880bea04792
|
[
"MIT"
] | null | null | null |
src/azure-cli/azure/cli/command_modules/netappfiles/tests/latest/test_subvolume_commands.py
|
ZengTaoxu/azure-cli
|
6be96de450da5ac9f07aafb22dd69880bea04792
|
[
"MIT"
] | null | null | null |
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
from azure.cli.testsdk import ScenarioTest, ResourceGroupPreparer
LOCATION = "southcentralusstage"
VNET_LOCATION = "southcentralus"
class AzureNetAppFilesSubvolumeServiceScenarioTest(ScenarioTest):
def create_volume(self, volume_only=False):
if not volume_only:
# create vnet, account and pool
self.cmd("az network vnet create -n {vnet} -g {rg} -l {vnet_loc} --address-prefix 10.5.0.0/16")
self.cmd("az network vnet subnet create -n {subnet} --vnet-name {vnet} --address-prefixes '10.5.0.0/24' "
"--delegations 'Microsoft.Netapp/volumes' -g {rg}")
self.cmd("netappfiles account create -g {rg} -a {acc_name} -l {loc}")
self.cmd("netappfiles pool create -g {rg} -a {acc_name} -p {pool_name} -l {loc} --service-level 'Premium' "
"--size 4")
# create volume
return self.cmd("netappfiles volume create -g {rg} -a {acc_name} -p {pool_name} -v {vol_name} -l {loc} "
"--vnet {vnet} --subnet {subnet} --file-path {vol_name} --usage-threshold 100 "
"--enable-subvolumes {enable_subvolumes}")
@ResourceGroupPreparer(name_prefix='cli_netappfiles_test_subvolume_crud_', additional_tags={'owner': 'cli_test'})
def test_subvolume_crud(self):
self.kwargs.update({
'loc': LOCATION,
'acc_name': self.create_random_name(prefix='cli-acc-', length=24),
'pool_name': self.create_random_name(prefix='cli-pool-', length=24),
'vol_name': self.create_random_name(prefix='cli-vol-', length=24),
'sub_vol_name': self.create_random_name(prefix='cli-sub-vol-', length=24),
'path': "/sub_vol_1.txt",
'vnet': self.create_random_name(prefix='cli-vnet-', length=24),
'subnet': self.create_random_name(prefix='cli-subnet-', length=24),
'vnet_loc': VNET_LOCATION,
'enable_subvolumes': 'Enabled'
})
self.create_volume()
# create
self.cmd("az netappfiles subvolume create -g {rg} -a {acc_name} -p {pool_name} -v {vol_name} "
"--subvolume-name {sub_vol_name} --path {path}", checks=[
self.check('name', '{acc_name}' + '/' + '{pool_name}' + '/' + '{vol_name}' + '/' + '{sub_vol_name}'),
self.check('path', '{path}')])
# update
self.kwargs.update({
'path': "/sub_vol_update.txt"
})
self.cmd("az netappfiles subvolume update -g {rg} -a {acc_name} -p {pool_name} -v {vol_name} "
"--subvolume-name {sub_vol_name} --path {path}", checks=[
self.check('name', '{acc_name}' + '/' + '{pool_name}' + '/' + '{vol_name}' + '/' + '{sub_vol_name}'),
self.check('path', '{path}')])
# get
self.cmd("az netappfiles subvolume show -g {rg} -a {acc_name} -p {pool_name} -v {vol_name} "
"--subvolume-name {sub_vol_name}", checks=[
self.check('name', '{acc_name}' + '/' + '{pool_name}' + '/' + '{vol_name}' + '/' + '{sub_vol_name}'),
self.check('path', '{path}')])
# delete
self.cmd("az netappfiles subvolume delete -g {rg} -a {acc_name} -p {pool_name} -v {vol_name} "
"--subvolume-name {sub_vol_name} -y")
self.cmd("az netappfiles subvolume list -g {rg} -a {acc_name} -p {pool_name} -v {vol_name}", checks=[
self.check('length(@)', 0)])
@ResourceGroupPreparer(name_prefix='cli_netappfiles_test_subvolume_list_', additional_tags={'owner': 'cli_test'})
def test_subvolume_list(self):
self.kwargs.update({
'loc': LOCATION,
'acc_name': self.create_random_name(prefix='cli-acc-', length=24),
'pool_name': self.create_random_name(prefix='cli-pool-', length=24),
'vol_name': self.create_random_name(prefix='cli-vol-', length=24),
'sub_vol_name1': self.create_random_name(prefix='cli-sub-vol-', length=24),
'sub_vol_name2': self.create_random_name(prefix='cli-sub-vol-', length=24),
'path1': "/sub_vol_1.txt",
'path2': "/sub_vol_2.txt",
'vnet': self.create_random_name(prefix='cli-vnet-', length=24),
'subnet': self.create_random_name(prefix='cli-subnet-', length=24),
'vnet_loc': VNET_LOCATION,
'enable_subvolumes': 'Enabled'
})
self.create_volume()
# create
self.cmd("az netappfiles subvolume create -g {rg} -a {acc_name} -p {pool_name} -v {vol_name} "
"--subvolume-name {sub_vol_name1} --path {path1}", checks=[
self.check('name', '{acc_name}' + '/' + '{pool_name}' + '/' + '{vol_name}' + '/' + '{sub_vol_name1}'),
self.check('path', '{path1}')])
# list
self.cmd("az netappfiles subvolume list -g {rg} -a {acc_name} -p {pool_name} -v {vol_name}", checks=[
self.check('length(@)', 1)])
# create
self.cmd("az netappfiles subvolume create -g {rg} -a {acc_name} -p {pool_name} -v {vol_name} "
"--subvolume-name {sub_vol_name2} --path {path2}", checks=[
self.check('name', '{acc_name}' + '/' + '{pool_name}' + '/' + '{vol_name}' + '/' + '{sub_vol_name2}'),
self.check('path', '{path2}')])
# list
self.cmd("az netappfiles subvolume list -g {rg} -a {acc_name} -p {pool_name} -v {vol_name}", checks=[
self.check('length(@)', 2)])
# delete
self.cmd("az netappfiles subvolume delete -g {rg} -a {acc_name} -p {pool_name} -v {vol_name} "
"--subvolume-name {sub_vol_name1} -y")
# list
self.cmd("az netappfiles subvolume list -g {rg} -a {acc_name} -p {pool_name} -v {vol_name}", checks=[
self.check('length(@)', 1)])
# delete
self.cmd("az netappfiles subvolume delete -g {rg} -a {acc_name} -p {pool_name} -v {vol_name} "
"--subvolume-name {sub_vol_name2} -y")
# list
self.cmd("az netappfiles subvolume list -g {rg} -a {acc_name} -p {pool_name} -v {vol_name}", checks=[
self.check('length(@)', 0)])
@ResourceGroupPreparer(name_prefix='cli_netappfiles_test_subvolume_metadata_', additional_tags={'owner': 'cli_test'})
def test_subvolume_get_metadata(self):
self.kwargs.update({
'loc': LOCATION,
'acc_name': self.create_random_name(prefix='cli-acc-', length=24),
'pool_name': self.create_random_name(prefix='cli-pool-', length=24),
'vol_name': self.create_random_name(prefix='cli-vol-', length=24),
'sub_vol_name': self.create_random_name(prefix='cli-sub-vol-', length=24),
'path': "/sub_vol_1.txt",
'vnet': self.create_random_name(prefix='cli-vnet-', length=24),
'subnet': self.create_random_name(prefix='cli-subnet-', length=24),
'vnet_loc': VNET_LOCATION,
'enable_subvolumes': 'Enabled'
})
self.create_volume()
# create
self.cmd("az netappfiles subvolume create -g {rg} -a {acc_name} -p {pool_name} -v {vol_name} "
"--subvolume-name {sub_vol_name} --path {path}", checks=[
self.check('name', '{acc_name}' + '/' + '{pool_name}' + '/' + '{vol_name}' + '/' + '{sub_vol_name}'),
self.check('path', '{path}')])
# get metadata
self.cmd("az netappfiles subvolume metadata show -g {rg} -a {acc_name} -p {pool_name} -v {vol_name} "
"--subvolume-name {sub_vol_name}", checks=[
self.check('path', '{path}')])
self.cmd("az netappfiles subvolume delete -g {rg} -a {acc_name} -p {pool_name} -v {vol_name} "
"--subvolume-name {sub_vol_name} -y")
| 53.300654
| 121
| 0.563335
| 994
| 8,155
| 4.399396
| 0.109658
| 0.064029
| 0.065401
| 0.030414
| 0.790533
| 0.760119
| 0.756231
| 0.742968
| 0.714155
| 0.708209
| 0
| 0.012821
| 0.24439
| 8,155
| 153
| 122
| 53.300654
| 0.696852
| 0.058001
| 0
| 0.654545
| 0
| 0.190909
| 0.455495
| 0.018011
| 0
| 0
| 0
| 0
| 0
| 1
| 0.036364
| false
| 0
| 0.009091
| 0
| 0.063636
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
226a96432c487c02fe34fcd8d6f2d505407cc9d4
| 116
|
py
|
Python
|
src/reports.py
|
ThomasMeli/FeatureSelectionMaster
|
5bd972cc2ffeae815d5a3562b8fb9351d3a6428c
|
[
"MIT"
] | 1
|
2021-09-09T11:36:33.000Z
|
2021-09-09T11:36:33.000Z
|
src/reports.py
|
ThomasMeli/FeatureSelectionMaster
|
5bd972cc2ffeae815d5a3562b8fb9351d3a6428c
|
[
"MIT"
] | null | null | null |
src/reports.py
|
ThomasMeli/FeatureSelectionMaster
|
5bd972cc2ffeae815d5a3562b8fb9351d3a6428c
|
[
"MIT"
] | null | null | null |
class ReportsManager:
def __init__(self):
pass
def _add_to_overall_report(self, df):
pass
| 14.5
| 41
| 0.62931
| 14
| 116
| 4.642857
| 0.785714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.301724
| 116
| 7
| 42
| 16.571429
| 0.802469
| 0
| 0
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| false
| 0.4
| 0
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 5
|
3f2f3df73ebb4a8660f505af4ce55ac9369cfdd3
| 28,156
|
py
|
Python
|
doc/aliyun-tablestore-python-sdk-2.0.9/tests/parameter_validation_test.py
|
wanghq/goots
|
3493be0edb6b9a085e4fcd370dee0e354447897b
|
[
"MIT"
] | 1
|
2016-12-07T02:10:03.000Z
|
2016-12-07T02:10:03.000Z
|
doc/aliyun-tablestore-python-sdk-2.0.8/tests/parameter_validation_test.py
|
wanghq/goots
|
3493be0edb6b9a085e4fcd370dee0e354447897b
|
[
"MIT"
] | 2
|
2017-05-09T05:42:13.000Z
|
2017-05-09T13:42:09.000Z
|
doc/aliyun-tablestore-python-sdk-2.0.8/tests/parameter_validation_test.py
|
wanghq/goots
|
3493be0edb6b9a085e4fcd370dee0e354447897b
|
[
"MIT"
] | 2
|
2017-05-19T09:19:30.000Z
|
2018-02-06T12:09:43.000Z
|
# -*- coding: utf8 -*-
import unittest
from lib.ots2_api_test_base import *
from ots2 import *
import lib.restriction as restriction
import lib.test_config as test_config
import copy
import time
class ParameterValidationTest(OTS2APITestBase):
"""参数合法性检查测试(不包含限制项)"""
def _get_client(self, instance_name):
client = OTSClient(
test_config.OTS_ENDPOINT,
test_config.OTS_ID,
test_config.OTS_SECRET,
instance_name
)
return client
def _invalid_instance_or_table_name_op(self, client, table = None, error_code="", error_message=""):
table_name = table if table != None else "table_test"
table_meta = TableMeta(table_name, [("PK", "STRING")])
reserved_throughput = ReservedThroughput(CapacityUnit(restriction.MaxReadWriteCapacityUnit, restriction.MaxReadWriteCapacityUnit))
#create_table
try:
client.create_table(table_meta, reserved_throughput)
self.assert_false()
except OTSServiceError as e:
self.assert_error(e, 400, error_code, error_message)
#delete_table
try:
client.delete_table(table_name)
self.assert_false()
except OTSServiceError as e:
self.assert_error(e, 400, error_code, error_message)
#list_table
if table == None:
try:
client.list_table()
self.assert_false()
except OTSServiceError as e:
self.assert_error(e, 400, error_code, error_message)
#update_table
try:
client.update_table(table_name, ReservedThroughput(CapacityUnit(1, 1)))
self.assert_false()
except OTSServiceError as e:
self.assert_error(e, 400, error_code, error_message)
#describe_table
try:
client.describe_table(table_name)
self.assert_false()
except OTSServiceError as e:
self.assert_error(e, 400, error_code, error_message)
#get_row
try:
client.get_row(table_name, {"PK": "x"})
self.assert_false()
except OTSServiceError as e:
self.assert_error(e, 400, error_code, error_message)
#put_row
try:
client.put_row(table_name, Condition("IGNORE"), {"PK": "x"}, {"COL": "x"})
self.assert_false()
except OTSServiceError as e:
self.assert_error(e, 400, error_code, error_message)
#update_row
try:
client.update_row(table_name, Condition("IGNORE"), {"PK": "x"}, {'put':{"COL": "x"}})
self.assert_false()
except OTSServiceError as e:
self.assert_error(e, 400, error_code, error_message)
#delete_row
try:
client.delete_row(table_name, Condition("IGNORE"), {"PK": "x"})
self.assert_false()
except OTSServiceError as e:
self.assert_error(e, 400, error_code, error_message)
#bacth_get_row
batches = [(table_name, [{"PK": "x"}], [])]
try:
client.batch_get_row(batches)
self.assert_false()
except OTSServiceError as e:
self.assert_error(e, 400, error_code, error_message)
#batch_write_row
put_row_item = {"table_name": table_name, "put": [PutRowItem(Condition("IGNORE"), {"PK": "x"}, {"COL": "x"})]}
update_row_item = {"table_name": table_name, "update": [UpdateRowItem(Condition("IGNORE"), {"PK": "x"}, {'put':{"COL": "x"}})]}
delete_row_item = {"table_name": table_name, "delete": [DeleteRowItem(Condition("IGNORE"), {"PK": "x"})]}
batch_write_items = [put_row_item, update_row_item, delete_row_item]
for item in batch_write_items:
batches = [item]
try:
client.batch_write_row(batches)
self.assert_false()
except OTSServiceError as e:
self.assert_error(e, 400, error_code, error_message)
#get_range
try:
client.get_range(table_name, "FORWARD", {"PK": INF_MIN},{"PK": INF_MAX})
self.assert_false()
except OTSServiceError as e:
self.assert_error(e, 400, error_code, error_message)
def test_update_row_columns_empty(self):
"""对于UpdateRow和BatchWriteRow的UpdateRow操作,column的个数为0,期望返回ErrorCode: OTSParameterInvalid """
#create test table
table_name = "table_test"
table_meta = TableMeta(table_name, [('PK0', 'STRING')])
reserved_throughput = ReservedThroughput(CapacityUnit(restriction.MaxReadWriteCapacityUnit,restriction.MaxReadWriteCapacityUnit))
self.client_test.create_table(table_meta, reserved_throughput)
self.wait_for_partition_load(table_name)
primary_keys = {"PK0": "x"}
#update_row
try:
self.client_test.update_row(table_name, Condition("IGNORE"), primary_keys, {})
self.assert_false()
except OTSServiceError as e:
self.assert_error(e, 400, "OTSParameterInvalid", "No column specified while updating row.")
#batch_write_row
update_row_item = {"table_name": table_name, "update": [UpdateRowItem(Condition("IGNORE"), primary_keys, {})]}
write_batches = [update_row_item]
try:
self.client_test.batch_write_row(write_batches)
self.assert_false()
except OTSServiceError as e:
self.assert_error(e, 400, "OTSParameterInvalid", "No attribute column specified to update row #%d in table: '%s'." % (0, table_name))
def test_invalid_instance_name(self):
"""对于每个API,测试instance name为空,'*', '#', '中文', 'aa'的情形,期望返回ErrorCode: OTSParameterInvalid"""
for instance_name in ["", "*", "#", "中文", "aa", "_aaa", "a###", "h***", "-aa", "aa-"]:
client = self._get_client(instance_name)
self._invalid_instance_or_table_name_op(client, error_code="OTSParameterInvalid", error_message="Invalid instance name: '%s'." % instance_name)
def _valid_instance_or_table_name_op(self, client, table = None):
table_name = table if table != None else "table_test"
table_meta = TableMeta(table_name, [("PK", "STRING")])
reserved_throughput = ReservedThroughput(CapacityUnit(1, 2))
#create_table
expect_increase_time = int(time.time())
client.create_table(table_meta, reserved_throughput)
self.wait_for_partition_load(table_name)
#describe_table
response = client.describe_table(table_name)
self.assert_DescribeTableResponse(response, CapacityUnit(1, 2), table_meta)
#get_row
consumed, primary_keys, columns = client.get_row(table_name, {"PK": "x"})
self.assert_consumed(consumed, CapacityUnit(1, 0))
self.assert_equal(primary_keys, {})
self.assert_equal(columns, {})
#batch_get_row
batches = [(table_name, [{"PK": "x"}], [])]
response = client.batch_get_row(batches)
expect_row_data_item = RowDataItem(True, "", "", CapacityUnit(1, 0), {}, {})
expect_response = [[expect_row_data_item]]
self.assert_RowDataItem_equal(response, expect_response)
#get_range
consumed, next_start_primary_keys, rows = client.get_range(table_name, 'FORWARD', {"PK": INF_MIN}, {"PK": INF_MAX})
self.assert_consumed(consumed, CapacityUnit(1, 0))
self.assert_equal(next_start_primary_keys, None)
self.assert_equal(rows, [])
#put_row
consumed = client.put_row(table_name, Condition("IGNORE"), {"PK": "x"}, {'COL': 'x'})
self.assert_consumed(consumed, CapacityUnit(0, 1))
#update_row
consumed = client.update_row(table_name, Condition("IGNORE"), {"PK": "x"}, {'put':{"COL": "x1"}})
self.assert_consumed(consumed, CapacityUnit(0, 1))
#delete_row
consumed = client.delete_row(table_name, Condition("IGNORE"), {"PK": "x"})
self.assert_consumed(consumed, CapacityUnit(0, 1))
#batch_write_row
put_row_item = {"table_name": table_name, "put": [PutRowItem(Condition("IGNORE"), {"PK": "x"}, {'COL': 'x'})]}
update_row_item = {"table_name": table_name, "update": [UpdateRowItem(Condition("IGNORE"), {"PK": "x"}, {'put':{'COL': 'x1'}})]}
delete_row_item = {"table_name": table_name, "delete": [DeleteRowItem(Condition("IGNORE"), {"PK": "x"})]}
op_list = [('put', put_row_item), ('update', update_row_item), ('delete', delete_row_item)]
for op_type, item in op_list:
write_batches = [item]
response = client.batch_write_row(write_batches)
expect_write_data_item = BatchWriteRowResponseItem(True, "", "", CapacityUnit(0, 1))
expect_response = [{op_type : [expect_write_data_item]}]
self.assert_BatchWriteRowResponseItem(response, expect_response)
#delete_table
client.delete_table(table_name)
#list_table
if table == None:
table_list = client.list_table()
self.assert_equal(table_list, ())
def test_instance_name_of_huge_size(self):
"""用一个长度为2K的instance name去访问OTS,期望返回OTSServiceError"""
instance_name = 'X' * (2 * 1024)
client = self._get_client(instance_name)
try:
client.list_table()
self.assert_false()
except OTSServiceError:
# i don't care what kind of error
pass
def test_invalid_table_name(self):
"""测试所有相关API中表名为空,'0', '#', '中文', 'T#', 'T中文', '3t', '-'的情况,期望返回ErrorCode: OTSParameterInvalid"""
table_list = ['', '0', '#', '中文', 'T#', 'T中文', '3t', '-']
for table_name in table_list:
time.sleep(2) # to avoid too frequently table operation
self._invalid_instance_or_table_name_op(self.client_test, table_name, error_code="OTSParameterInvalid", error_message="Invalid table name: '%s'." % table_name)
def test_valid_table_name(self):
"""测试所有相关API中表名为'_0', '_T', 'A0'的情况,期望操作成功"""
table_list = ['_0', '_T', 'A0']
for table_name in table_list:
self._valid_instance_or_table_name_op(self.client_test, table_name)
def _invalid_column_name_test(self,table_name, primary_keys, columns_name, error_code="", error_message=""):
#get_row
try:
self.client_test.get_row(table_name, primary_keys, [columns_name])
self.assert_false()
except OTSServiceError as e:
self.assert_error(e, 400, error_code, error_message)
#put_row
try:
self.client_test.put_row(table_name, Condition("IGNORE"), primary_keys, {columns_name: "x"})
self.assert_false()
except OTSServiceError as e:
self.assert_error(e, 400, error_code, error_message)
#update_row
try:
self.client_test.update_row(table_name, Condition("IGNORE"), primary_keys, {'put':{columns_name: "x1"}})
self.assert_false()
except OTSServiceError as e:
self.assert_error(e, 400, error_code, error_message)
#batch_get_row
try:
get_batches = [(table_name, [primary_keys], [columns_name])]
self.client_test.batch_get_row(get_batches)
self.assert_false()
except OTSServiceError as e:
self.assert_error(e, 400, error_code, error_message)
put_row_item = {"table_name": table_name, "put":[PutRowItem(Condition("IGNORE"), primary_keys, {columns_name: "x"})]}
update_row_item = {"table_name": table_name, "update": [UpdateRowItem(Condition("IGNORE"), primary_keys, {'put':{columns_name: "x"}})]}
batches_list = [put_row_item, update_row_item]
for item in batches_list:
write_batches = [item]
try:
self.client_test.batch_write_row(write_batches)
self.assert_false()
except OTSServiceError as e:
self.assert_error(e, 400, error_code, error_message)
#get_range
try:
self.client_test.get_range(table_name, 'FORWARD', {"PK0": INF_MIN}, {"PK0": INF_MAX}, [columns_name], None)
self.assert_false()
except OTSServiceError as e:
self.assert_error(e, 400, error_code, error_message)
def test_invalid_column_name(self):
"""测试所有相关API中column name为空,'0', '#', '中文', 'T#', 'T中文'的情况,期望返回ErrorCode: OTSParameterInvalid"""
#create test table
table_name = "table_test"
table_meta = TableMeta(table_name, [('PK0', 'STRING')])
reserved_throughput = ReservedThroughput(CapacityUnit(1, 1))
self.client_test.create_table(table_meta, reserved_throughput)
self.wait_for_partition_load('table_test')
primary_keys = {"PK0": "x"}
column_name_list = ['', '"', '"abc', '!', '!abc', '#abc', '#', '中文', 'T中文']
for column_name in column_name_list:
self._invalid_column_name_test(table_name, primary_keys, column_name, error_code="OTSParameterInvalid", error_message="Invalid column name: '%s'." % column_name)
def _valid_column_name_test(self, table_name, columns_name):
pk = {"PK": columns_name}
#get_row
consumed, primary_keys, columns = self.client_test.get_row(table_name, pk, [columns_name])
self.assert_consumed(consumed, CapacityUnit(1, 0))
self.assert_equal(primary_keys, {})
self.assert_equal(columns, {})
#batch_get_row
batches = [(table_name, [pk], [columns_name])]
response = self.client_test.batch_get_row(batches)
expect_row_data_item = RowDataItem(True, "", "", CapacityUnit(1, 0), {}, {})
expect_response = [[expect_row_data_item]]
self.assert_RowDataItem_equal(response, expect_response)
#get_range
consumed, next_start_primary_keys, rows = self.client_test.get_range(table_name, 'FORWARD', {"PK": INF_MIN}, {"PK": INF_MAX}, [columns_name], None)
self.assert_consumed(consumed, CapacityUnit(1, 0))
self.assert_equal(next_start_primary_keys, None)
self.assert_equal(rows, [])
#put_row
consumed = self.client_test.put_row(table_name, Condition("IGNORE"), pk, {columns_name: 'x'})
self.assert_consumed(consumed, CapacityUnit(0, self.sum_CU_from_row(pk, {columns_name: 'x'})))
#update_row
consumed = self.client_test.update_row(table_name, Condition("IGNORE"), pk, {'put':{columns_name: "x1"}})
self.assert_consumed(consumed, CapacityUnit(0, self.sum_CU_from_row(pk, {columns_name: "x1"})))
#batch_write_row
put_row_item = {"table_name": table_name, "put": [PutRowItem(Condition("IGNORE"), pk, {columns_name: 'x'})]}
update_row_item = {"table_name": table_name, "update": [UpdateRowItem(Condition("IGNORE"), pk, {'put':{columns_name: 'x1'}})]}
write_batches = [put_row_item]
response = self.client_test.batch_write_row(write_batches)
expect_write_data_item = {"put": [BatchWriteRowResponseItem(True, "", "", CapacityUnit(0, self.sum_CU_from_row(pk, {columns_name: "x"})))]}
expect_response = [expect_write_data_item]
self.assert_BatchWriteRowResponseItem(response, expect_response)
write_batches = [update_row_item]
response = self.client_test.batch_write_row(write_batches)
expect_write_data_item = {"update": [BatchWriteRowResponseItem(True, "", "", CapacityUnit(0, self.sum_CU_from_row(pk, {columns_name: "x1"})))]}
expect_response = [expect_write_data_item]
self.assert_BatchWriteRowResponseItem(response, expect_response)
def test_valid_column_name(self):
"""测试所有相关API中column name为'_0', '_T', 'A0'的情况,期望操作成功"""
table_name = "table_test"
table_meta = TableMeta(table_name, [("PK", "STRING")])
reserved_throughput = ReservedThroughput(CapacityUnit(restriction.MaxReadWriteCapacityUnit, restriction.MaxReadWriteCapacityUnit))
#create_table
self.client_test.create_table(table_meta, reserved_throughput)
self.wait_for_partition_load(table_name)
column_name_list = ['_0', '_T', 'A0', '_a-b', 'a-b-c', 'waef-', '_--', '_%', '_a%b', 'abc%def', '/', '\\', '|', '&', '$', '~']
for column_name in column_name_list:
self._valid_column_name_test(table_name, column_name)
def _invalid_primary_key_type_create_table(self, _type, error_code='', error_message=''):
table_name = "table_test"
PK_schema = []
for i in range(restriction.MaxPKColumnNum):
PK_schema.append(("PK%d" % i, "STRING"))
for i in range(restriction.MaxPKColumnNum):
PK = copy.copy(PK_schema)
PK[i] = ("PK%d" % i, _type)
table_meta = TableMeta(table_name, PK)
try:
self.client_test.create_table(table_meta, ReservedThroughput(CapacityUnit(10, 10)))
self.assert_false()
except Exception as e:
self.assert_error(e, 400, error_code, error_message)
def _invalid_pk_type_test(self, invalid_pk, is_range=None, error_code="", error_message="", error_message_for_range=""):
table_name = "table_test"
pk_schema, valid_primary_keys = self.get_primary_keys(restriction.MaxPKColumnNum, "STRING")
for pk in valid_primary_keys.keys():
primary_keys = copy.copy(valid_primary_keys)
primary_keys[pk] = invalid_pk
#get row
try:
self.client_test.get_row(table_name, primary_keys, [])
self.assert_false()
except OTSServiceError as e:
self.assert_error(e, 400, error_code, error_message)
#put_row
try:
self.client_test.put_row(table_name, Condition("IGNORE"), primary_keys, {'COL': 'x'})
self.assert_false()
except OTSServiceError as e:
self.assert_error(e, 400, error_code, error_message)
#update_row
try:
self.client_test.update_row(table_name, Condition("IGNORE"), primary_keys, {'put':{'COL': 'x'}})
self.assert_false()
except OTSServiceError as e:
self.assert_error(e, 400, error_code, error_message)
#delete_row
try:
self.client_test.delete_row(table_name, Condition("IGNORE"), primary_keys)
self.assert_false()
except OTSServiceError as e:
self.assert_error(e, 400, error_code, error_message)
#batch_get_row
get_batches = [(table_name, [primary_keys], [])]
try:
self.client_test.batch_get_row(get_batches)
self.assert_false()
except OTSServiceError as e:
self.assert_error(e, 400, error_code, error_message)
#batch_write_row
put_row_item = {"table_name": table_name, "put":[PutRowItem(Condition("IGNORE"), primary_keys, {'COL': 'x'})]}
update_row_item = {"table_name": table_name, "update": [UpdateRowItem(Condition("IGNORE"), primary_keys, {'put':{'COL': 'x'}})]}
delete_row_item = {"table_name": table_name, "delete": [DeleteRowItem(Condition("IGNORE"), primary_keys)]}
batches_list = [put_row_item, update_row_item, delete_row_item]
for item in batches_list:
write_batches = [item]
try:
self.client_test.batch_write_row(write_batches)
self.assert_false()
except OTSServiceError as e:
self.assert_error(e, 400, error_code, error_message)
#get_range
def assert_get_range(table_name, inclusive_start_primary_keys, exclusive_end_primary_keys):
try:
self.client_test.get_range(table_name, 'FORWARD', inclusive_start_primary_keys, exclusive_end_primary_keys, [], None)
self.assert_false()
except OTSServiceError as e:
self.assert_error(e, 400, error_code, error_message_for_range)
if is_range == None:
pk_schema, exclusive_primary_keys = self.get_primary_keys(restriction.MaxPKColumnNum, "STRING", "PK", INF_MAX)
pk_schema, inclusive_primary_keys = self.get_primary_keys(restriction.MaxPKColumnNum, "STRING", "PK", INF_MIN)
for pk in valid_primary_keys.keys():
primary_keys = copy.copy(valid_primary_keys)
primary_keys[pk] = invalid_pk
assert_get_range(table_name, inclusive_primary_keys, primary_keys)
assert_get_range(table_name, primary_keys, exclusive_primary_keys)
def test_PK_double(self):
"""测试所有相关API中PK为double的情况,期望返回OTSParameterInvalid"""
double_value = 5.6
self._invalid_primary_key_type_create_table("DOUBLE", error_code="OTSParameterInvalid", error_message="DOUBLE is an invalid type for the primary key.")
self._invalid_pk_type_test(double_value, error_code="OTSParameterInvalid", error_message="DOUBLE is an invalid type for the primary key.", error_message_for_range="DOUBLE is an invalid type for the primary key in GetRange.")
def test_PK_boolean(self):
"""测试所有相关API中 PK为boolean的情况,期望返回OTSParameterInvalid"""
boolean_value = True
self._invalid_primary_key_type_create_table("BOOLEAN", error_code="OTSParameterInvalid", error_message="BOOLEAN is an invalid type for the primary key.")
self._invalid_pk_type_test(boolean_value, error_code="OTSParameterInvalid", error_message="BOOLEAN is an invalid type for the primary key.", error_message_for_range="BOOLEAN is an invalid type for the primary key in GetRange.")
def test_table_not_exist(self):
"""测试除CreateTable以外所有API,操作的表不存在的情况,期望返回OTSObjectNotExist"""
table_name = "table_name_for_table_not_exist_test"
#delete_table
try:
self.client_test.delete_table(table_name)
self.assert_false()
except OTSServiceError as e:
self.assert_error(e, 404, "OTSObjectNotExist", "Requested table does not exist.")
#update_table
try:
self.client_test.update_table(table_name, ReservedThroughput(CapacityUnit(10, 10)))
self.assert_false()
except OTSServiceError as e:
self.assert_error(e, 404, "OTSObjectNotExist", "Requested table does not exist.")
#describe_table
try:
self.client_test.describe_table(table_name)
self.assert_false()
except OTSServiceError as e:
self.assert_error(e, 404, "OTSObjectNotExist", "Requested table does not exist.")
#get_row
try:
self.client_test.get_row(table_name, {"PK": "x"})
self.assert_false()
except OTSServiceError as e:
self.assert_error(e, 404, "OTSObjectNotExist", "Requested table does not exist.")
#put_row
try:
self.client_test.put_row(table_name, Condition("IGNORE"), {"PK": "x"}, {"COL": "x"})
self.assert_false()
except OTSServiceError as e:
self.assert_error(e, 404, "OTSObjectNotExist", "Requested table does not exist.")
#update_row
try:
self.client_test.update_row(table_name, Condition("IGNORE"), {"PK": "x"}, {'put':{"COL": "x"}})
self.assert_false()
except OTSServiceError as e:
self.assert_error(e, 404, "OTSObjectNotExist", "Requested table does not exist.")
#delete_row
try:
self.client_test.delete_row(table_name, Condition("IGNORE"), {"PK": "x"})
self.assert_false()
except OTSServiceError as e:
self.assert_error(e, 404, "OTSObjectNotExist", "Requested table does not exist.")
#bacth_get_row
batches = [(table_name, [{"PK": "x"}], [])]
response = self.client_test.batch_get_row(batches)
expect_row_data_item = RowDataItem(False, "OTSObjectNotExist", "Requested table does not exist.", None, None, None)
expect_response = [[expect_row_data_item]]
self.assert_RowDataItem_equal(response, expect_response)
#batch_write_row
put_row_item = {"table_name": table_name, "put": [PutRowItem(Condition("IGNORE"), {"PK": "x"}, {"COL": "x"})]}
update_row_item = {"table_name": table_name, "update": [UpdateRowItem(Condition("IGNORE"), {"PK": "x"}, {'put':{"COL": "x"}})]}
delete_row_item = {"table_name": table_name, "delete": [DeleteRowItem(Condition("IGNORE"), {"PK": "x"})]}
batch_write_items = [put_row_item, update_row_item, delete_row_item]
resp_key = ["put", "update", "delete"]
for item, key in zip(batch_write_items, resp_key):
batches = [item]
response = self.client_test.batch_write_row(batches)
expect_write_data_item = BatchWriteRowResponseItem(False, "OTSObjectNotExist", "Requested table does not exist.", None)
expect_response = [{key : [expect_write_data_item]}]
self.assert_BatchWriteRowResponseItem(response, expect_response)
#get_range
try:
self.client_test.get_range(table_name, "FORWARD", {"PK": INF_MIN},{"PK": INF_MAX})
self.assert_false()
except OTSServiceError as e:
self.assert_error(e, 404, "OTSObjectNotExist", "Requested table does not exist.")
def _column_restriction_test_except(self, columns, error_code="", error_message=""):
#create test table
table_name = "table_test"
table_meta = TableMeta(table_name, [('PK0', 'STRING')])
reserved_throughput = ReservedThroughput(CapacityUnit(restriction.MaxReadWriteCapacityUnit,restriction.MaxReadWriteCapacityUnit))
self.client_test.create_table(table_meta, reserved_throughput)
self.wait_for_partition_load('table_test')
primary_keys = {"PK0": "x"}
#put_row
try:
self.client_test.put_row(table_name, Condition("IGNORE"), primary_keys, columns)
self.assert_false()
except OTSServiceError as e:
self.assert_error(e, 400, error_code, error_message)
#update_row
try:
self.client_test.update_row(table_name, Condition("IGNORE"), primary_keys, {'put':columns})
self.assert_false()
except OTSServiceError as e:
self.assert_error(e, 400, error_code, error_message)
put_row_item = {"table_name": table_name, "put": [PutRowItem(Condition("IGNORE"), primary_keys, columns)]}
update_row_item = {"table_name": table_name, "update": [UpdateRowItem(Condition("IGNORE"), primary_keys, {'put':columns})]}
batches_list = [put_row_item, update_row_item]
for item in batches_list:
write_batches = [item]
try:
self.client_test.batch_write_row(write_batches)
self.assert_false()
except OTSServiceError as e:
self.assert_error(e, 400, error_code, error_message)
def test_column_type_INF_MIN(self):
"""测试所有API中,主键和列的类型(get_range的start和end除外)为INF_MIN的情况,期望返回OTSParameterInvalid"""
self._invalid_pk_type_test(INF_MIN, True, error_code="OTSParameterInvalid", error_message="INF_MIN is an invalid type for the primary key.")
self._column_restriction_test_except({"COL": INF_MIN}, "OTSParameterInvalid", "INF_MIN is an invalid type for the attribute column.")
def test_column_type_INF_MAX(self):
"""测试所有API中,主键和列的类型(get_range的start和end除外)为INF_MAX的情况,期望返回OTSParameterInvalid"""
self._invalid_pk_type_test(INF_MAX, True, error_code="OTSParameterInvalid", error_message="INF_MAX is an invalid type for the primary key.")
self._column_restriction_test_except({"COL": INF_MAX}, "OTSParameterInvalid", "INF_MAX is an invalid type for the attribute column.")
if __name__ == '__main__':
unittest.main()
| 50.823105
| 235
| 0.638514
| 3,303
| 28,156
| 5.1099
| 0.063276
| 0.066122
| 0.034838
| 0.049769
| 0.850338
| 0.826638
| 0.779891
| 0.73344
| 0.689892
| 0.669214
| 0
| 0.009361
| 0.241192
| 28,156
| 553
| 236
| 50.915009
| 0.780623
| 0.054873
| 0
| 0.629371
| 0
| 0
| 0.097757
| 0.001322
| 0
| 0
| 0
| 0
| 0.251748
| 1
| 0.048951
| false
| 0.002331
| 0.016317
| 0
| 0.06993
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
58b15d60e2e598955273f607600af363f59de2c4
| 126
|
py
|
Python
|
ksher/payment/admin.py
|
atthana/ksher_test
|
7aa599cb338b30b6cdab2b6dcd2672aa55aa31b9
|
[
"bzip2-1.0.6"
] | null | null | null |
ksher/payment/admin.py
|
atthana/ksher_test
|
7aa599cb338b30b6cdab2b6dcd2672aa55aa31b9
|
[
"bzip2-1.0.6"
] | null | null | null |
ksher/payment/admin.py
|
atthana/ksher_test
|
7aa599cb338b30b6cdab2b6dcd2672aa55aa31b9
|
[
"bzip2-1.0.6"
] | null | null | null |
from django.contrib import admin
from .models import Allproduct
admin.site.register(Allproduct)
# Register your models here.
| 21
| 32
| 0.81746
| 17
| 126
| 6.058824
| 0.647059
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.119048
| 126
| 5
| 33
| 25.2
| 0.927928
| 0.206349
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
58c37740c73d2eb1a85c635fbd0327a0f85ff2cf
| 57
|
py
|
Python
|
MISSIONS/collective_assult_debug/envs/__init__.py
|
binary-husky/hmp2g
|
1a4f4093cd296f07348f4db4c7503aca6e1fb05c
|
[
"MIT"
] | 2
|
2022-02-25T12:04:55.000Z
|
2022-03-15T02:37:59.000Z
|
MISSIONS/collective_assult_debug/envs/__init__.py
|
binary-husky/hmp2g
|
1a4f4093cd296f07348f4db4c7503aca6e1fb05c
|
[
"MIT"
] | null | null | null |
MISSIONS/collective_assult_debug/envs/__init__.py
|
binary-husky/hmp2g
|
1a4f4093cd296f07348f4db4c7503aca6e1fb05c
|
[
"MIT"
] | null | null | null |
from .collective_assult_env import collective_assultEnvV1
| 57
| 57
| 0.929825
| 7
| 57
| 7.142857
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.018519
| 0.052632
| 57
| 1
| 57
| 57
| 0.907407
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
58ee53c32810fee48f79d73f798ddf8f5129b8ad
| 92
|
py
|
Python
|
stacks/mutations/__init__.py
|
david5010/sheliak
|
39d0cef25d4df0dd4cb8c5153b18be4eab1a0029
|
[
"MIT"
] | 6
|
2022-02-24T00:04:24.000Z
|
2022-03-21T08:46:46.000Z
|
stacks/mutations/__init__.py
|
david5010/sheliak
|
39d0cef25d4df0dd4cb8c5153b18be4eab1a0029
|
[
"MIT"
] | null | null | null |
stacks/mutations/__init__.py
|
david5010/sheliak
|
39d0cef25d4df0dd4cb8c5153b18be4eab1a0029
|
[
"MIT"
] | 8
|
2022-02-24T00:07:32.000Z
|
2022-03-21T01:59:46.000Z
|
from .stack import CreateStackMutation, PatchStackMutation
from .email import SendTestEmail
| 30.666667
| 58
| 0.869565
| 9
| 92
| 8.888889
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.097826
| 92
| 2
| 59
| 46
| 0.963855
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
189325089f8bfa6c355a71f0c6b1c9060029a4be
| 203
|
py
|
Python
|
main/admin.py
|
Mohsen7640/PicoSchool
|
69d8658111b27e843928f2458f9a7108ab8bd4e0
|
[
"Apache-2.0"
] | 48
|
2022-02-05T10:07:33.000Z
|
2022-03-30T18:14:45.000Z
|
main/admin.py
|
Mohsen7640/PicoSchool
|
69d8658111b27e843928f2458f9a7108ab8bd4e0
|
[
"Apache-2.0"
] | 2
|
2022-02-10T11:56:42.000Z
|
2022-02-16T13:27:26.000Z
|
main/admin.py
|
Mohsen7640/PicoSchool
|
69d8658111b27e843928f2458f9a7108ab8bd4e0
|
[
"Apache-2.0"
] | 14
|
2022-02-05T13:46:17.000Z
|
2022-02-23T16:23:12.000Z
|
from django.contrib import admin
from main.models import SiteSetting
# Register your models here.
# class SiteSettingAdmin(admin.ModelAdmin):
# list_display = ['']
admin.site.register(SiteSetting)
| 22.555556
| 43
| 0.773399
| 24
| 203
| 6.5
| 0.708333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.133005
| 203
| 8
| 44
| 25.375
| 0.886364
| 0.453202
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
18a1507671f51058e53f4d9b618f2cb204ff2eff
| 122
|
py
|
Python
|
Python3/LambdasAndBuilt-InFunctions/all.py
|
norbertosanchezdichi/TIL
|
2e9719ddd288022f53b094a42679e849bdbcc625
|
[
"MIT"
] | null | null | null |
Python3/LambdasAndBuilt-InFunctions/all.py
|
norbertosanchezdichi/TIL
|
2e9719ddd288022f53b094a42679e849bdbcc625
|
[
"MIT"
] | null | null | null |
Python3/LambdasAndBuilt-InFunctions/all.py
|
norbertosanchezdichi/TIL
|
2e9719ddd288022f53b094a42679e849bdbcc625
|
[
"MIT"
] | null | null | null |
print(all([1,2,3,4]))
print(all(char for char in 'eco' if char in 'aeiou'))
print(all([x % 2 == 0 for x in [2, 3, 4, 5]]))
| 40.666667
| 53
| 0.57377
| 29
| 122
| 2.413793
| 0.517241
| 0.342857
| 0.085714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.09901
| 0.172131
| 122
| 3
| 54
| 40.666667
| 0.594059
| 0
| 0
| 0
| 0
| 0
| 0.065041
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
18bf31427d30b06e6cdb94c0df0f49733d1e6712
| 61
|
py
|
Python
|
users/my_scripts/my_utils.py
|
LeonardoCruzx/Projeto_rede_social
|
5f6fc9fe4d1fbc1cfea696a4ff1d19fd98697bc0
|
[
"CC0-1.0"
] | null | null | null |
users/my_scripts/my_utils.py
|
LeonardoCruzx/Projeto_rede_social
|
5f6fc9fe4d1fbc1cfea696a4ff1d19fd98697bc0
|
[
"CC0-1.0"
] | 7
|
2020-06-05T20:31:01.000Z
|
2021-09-22T18:22:45.000Z
|
users/my_scripts/my_utils.py
|
LeonardoCruzx/Projeto_rede_social
|
5f6fc9fe4d1fbc1cfea696a4ff1d19fd98697bc0
|
[
"CC0-1.0"
] | null | null | null |
def make_nickname(email):
return email.split("@")[0]
| 8.714286
| 30
| 0.622951
| 8
| 61
| 4.625
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.020408
| 0.196721
| 61
| 6
| 31
| 10.166667
| 0.734694
| 0
| 0
| 0
| 0
| 0
| 0.017544
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
18c64718cb7b0f5c323e451bf2dc438161785ce9
| 1,808
|
py
|
Python
|
test/test_support_project_ticket_api.py
|
hyperonecom/h1-client-python
|
4ce355852ba3120ec1b8f509ab5894a5c08da730
|
[
"MIT"
] | null | null | null |
test/test_support_project_ticket_api.py
|
hyperonecom/h1-client-python
|
4ce355852ba3120ec1b8f509ab5894a5c08da730
|
[
"MIT"
] | null | null | null |
test/test_support_project_ticket_api.py
|
hyperonecom/h1-client-python
|
4ce355852ba3120ec1b8f509ab5894a5c08da730
|
[
"MIT"
] | null | null | null |
"""
HyperOne
HyperOne API # noqa: E501
The version of the OpenAPI document: 0.1.0
Generated by: https://openapi-generator.tech
"""
import unittest
import h1
from h1.api.support_project_ticket_api import SupportProjectTicketApi # noqa: E501
class TestSupportProjectTicketApi(unittest.TestCase):
"""SupportProjectTicketApi unit test stubs"""
def setUp(self):
self.api = SupportProjectTicketApi() # noqa: E501
def tearDown(self):
pass
def test_support_project_ticket_close(self):
"""Test case for support_project_ticket_close
Close support/ticket # noqa: E501
"""
pass
def test_support_project_ticket_create(self):
"""Test case for support_project_ticket_create
Create support/ticket # noqa: E501
"""
pass
def test_support_project_ticket_get(self):
"""Test case for support_project_ticket_get
Get support/ticket # noqa: E501
"""
pass
def test_support_project_ticket_list(self):
"""Test case for support_project_ticket_list
List support/ticket # noqa: E501
"""
pass
def test_support_project_ticket_message_create(self):
"""Test case for support_project_ticket_message_create
Create support/ticket.message # noqa: E501
"""
pass
def test_support_project_ticket_message_get(self):
"""Test case for support_project_ticket_message_get
Get support/ticket.message # noqa: E501
"""
pass
def test_support_project_ticket_message_list(self):
"""Test case for support_project_ticket_message_list
List support/ticket.message # noqa: E501
"""
pass
if __name__ == '__main__':
unittest.main()
| 23.179487
| 83
| 0.659845
| 209
| 1,808
| 5.392345
| 0.215311
| 0.186335
| 0.266193
| 0.111801
| 0.634428
| 0.622893
| 0.566992
| 0.535936
| 0.307897
| 0.301686
| 0
| 0.026296
| 0.263827
| 1,808
| 77
| 84
| 23.480519
| 0.820436
| 0.431969
| 0
| 0.333333
| 1
| 0
| 0.00949
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.375
| false
| 0.333333
| 0.125
| 0
| 0.541667
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 5
|
e15440a4f90711592e5a70f43941ed6aa49eef18
| 65
|
py
|
Python
|
24. entrada estandar (raw_input).py
|
JSNavas/CursoPython2.7
|
d1f9170dbf897b6eb729f9696a208880e33c550b
|
[
"MIT"
] | null | null | null |
24. entrada estandar (raw_input).py
|
JSNavas/CursoPython2.7
|
d1f9170dbf897b6eb729f9696a208880e33c550b
|
[
"MIT"
] | null | null | null |
24. entrada estandar (raw_input).py
|
JSNavas/CursoPython2.7
|
d1f9170dbf897b6eb729f9696a208880e33c550b
|
[
"MIT"
] | null | null | null |
edad = raw_input("Intruduce tu edad: ")
print "Tu edad es:",edad
| 21.666667
| 39
| 0.692308
| 11
| 65
| 4
| 0.636364
| 0.272727
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.153846
| 65
| 3
| 40
| 21.666667
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0.454545
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.5
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
e15bdd3bac2181c09df8388f244e0e4c6f6ece77
| 30
|
py
|
Python
|
text/_elisp/window/bar/mode.py
|
jedhsu/text
|
8525b602d304ac571a629104c48703443244545c
|
[
"Apache-2.0"
] | null | null | null |
text/_elisp/window/bar/mode.py
|
jedhsu/text
|
8525b602d304ac571a629104c48703443244545c
|
[
"Apache-2.0"
] | null | null | null |
text/_elisp/window/bar/mode.py
|
jedhsu/text
|
8525b602d304ac571a629104c48703443244545c
|
[
"Apache-2.0"
] | null | null | null |
class WindowModeBar:
pass
| 10
| 20
| 0.733333
| 3
| 30
| 7.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.233333
| 30
| 2
| 21
| 15
| 0.956522
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
e198089b4026f13d542d7bdcc0a9702bf0b879b0
| 210
|
py
|
Python
|
gamegym/algorithms/__init__.py
|
spirali/gamegym
|
8c2dbb7969cabae9ca86c0dab74c6ddc5fbd21bf
|
[
"MIT"
] | 49
|
2018-10-05T20:52:31.000Z
|
2021-12-29T05:59:27.000Z
|
gamegym/algorithms/__init__.py
|
spirali/gamegym
|
8c2dbb7969cabae9ca86c0dab74c6ddc5fbd21bf
|
[
"MIT"
] | 8
|
2018-10-07T12:11:20.000Z
|
2019-02-03T10:47:25.000Z
|
gamegym/algorithms/__init__.py
|
spirali/gamegym
|
8c2dbb7969cabae9ca86c0dab74c6ddc5fbd21bf
|
[
"MIT"
] | 4
|
2018-10-07T10:27:43.000Z
|
2020-02-13T18:47:37.000Z
|
#!/usr/bin/python3
from .bestresponse import BestResponse, exploitability, ApproxBestResponse, approx_exploitability
from .mccfr import OutcomeMCCFR, RegretStrategy
from .infosets import InformationSetSampler
| 35
| 97
| 0.857143
| 20
| 210
| 8.95
| 0.7
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.005208
| 0.085714
| 210
| 5
| 98
| 42
| 0.927083
| 0.080952
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
e1a58f73114794dc5df5ca81f9b12e77ef6979e2
| 12,140
|
py
|
Python
|
castle/test/client_test.py
|
castle/castle-python
|
a3732f975192fd2f8a9e840d86da3f0f978874b3
|
[
"MIT"
] | 3
|
2021-01-08T16:44:16.000Z
|
2021-06-13T14:57:15.000Z
|
castle/test/client_test.py
|
castle/castle-python
|
a3732f975192fd2f8a9e840d86da3f0f978874b3
|
[
"MIT"
] | 20
|
2018-01-12T14:33:46.000Z
|
2022-03-14T20:21:07.000Z
|
castle/test/client_test.py
|
castle/castle-python
|
a3732f975192fd2f8a9e840d86da3f0f978874b3
|
[
"MIT"
] | 6
|
2018-01-30T16:42:03.000Z
|
2022-03-09T12:37:21.000Z
|
from collections import namedtuple
import responses
from castle.api_request import APIRequest
from castle.client import Client
from castle.configuration import configuration
from castle.errors import ImpersonationFailed
from castle.failover.strategy import FailoverStrategy
from castle.test import unittest
from castle.verdict import Verdict
from castle.version import VERSION
def request():
req = namedtuple('Request', ['ip', 'environ', 'COOKIES'])
req.ip = '217.144.192.112'
req.environ = {'HTTP_X_FORWARDED_FOR': '217.144.192.112',
'HTTP-User-Agent': 'test',
'HTTP_X_CASTLE_CLIENT_ID': '1234'}
req.COOKIES = {}
return req
class ClientTestCase(unittest.TestCase):
def setUp(self):
configuration.api_secret = 'test'
def tearDown(self):
configuration.api_secret = None
def test_init(self):
context = {
'active': True,
'client_id': '1234',
'headers': {
'User-Agent': 'test',
'X-Forwarded-For': '217.144.192.112',
'X-Castle-Client-Id': '1234'
},
'ip': '217.144.192.112',
'library': {'name': 'castle-python', 'version': VERSION},
'user_agent': 'test'
}
client = Client.from_request(request(), {})
self.assertEqual(client.do_not_track, False)
self.assertEqual(client.context, context)
self.assertIsInstance(client.api, APIRequest)
@responses.activate
def test_start_impersonation(self):
response_text = {'success': True}
responses.add(
responses.POST,
'https://api.castle.io/v1/impersonate',
json=response_text,
status=200
)
client = Client.from_request(request(), {})
options = {'properties': {'impersonator': 'admin'}, 'user_id': '1234'}
self.assertEqual(client.start_impersonation(options), response_text)
@responses.activate
def test_end_impersonation(self):
response_text = {'success': True}
responses.add(
responses.DELETE,
'https://api.castle.io/v1/impersonate',
json=response_text,
status=200
)
client = Client.from_request(request(), {})
options = {'properties': {'impersonator': 'admin'}, 'user_id': '1234'}
self.assertEqual(client.end_impersonation(options), response_text)
@responses.activate
def test_start_impersonation_failed(self):
response_text = {}
responses.add(
responses.POST,
'https://api.castle.io/v1/impersonate',
json=response_text,
status=200
)
client = Client.from_request(request(), {})
options = {'properties': {'impersonator': 'admin'}, 'user_id': '1234'}
with self.assertRaises(ImpersonationFailed):
client.start_impersonation(options)
@responses.activate
def test_end_impersonation_failed(self):
response_text = {}
responses.add(
responses.DELETE,
'https://api.castle.io/v1/impersonate',
json=response_text,
status=200
)
client = Client.from_request(request(), {})
options = {'properties': {'impersonator': 'admin'}, 'user_id': '1234'}
with self.assertRaises(ImpersonationFailed):
client.end_impersonation(options)
@responses.activate
def test_authenticate_tracked_true(self):
response_text = {'action': Verdict.ALLOW.value, 'user_id': '1234'}
responses.add(
responses.POST,
'https://api.castle.io/v1/authenticate',
json=response_text,
status=200
)
client = Client.from_request(request(), {})
options = {'event': '$login.succeeded', 'user_id': '1234'}
response_text.update(failover=False, failover_reason=None)
self.assertEqual(client.authenticate(options), response_text)
@responses.activate
def test_authenticate_tracked_true_status_500(self):
response_text = {
'action': Verdict.ALLOW.value,
'user_id': '1234',
'failover': True,
'failover_reason': 'InternalServerError'
}
responses.add(
responses.POST,
'https://api.castle.io/v1/authenticate',
json='authenticate',
status=500
)
client = Client.from_request(request(), {})
options = {'event': '$login.succeeded', 'user_id': '1234'}
self.assertEqual(client.authenticate(options), response_text)
def test_authenticate_tracked_false(self):
response_text = {
'action': Verdict.ALLOW.value,
'user_id': '1234',
'failover': True,
'failover_reason': 'Castle set to do not track.'
}
client = Client.from_request(request(), {})
client.disable_tracking()
options = {'event': '$login.succeeded', 'user_id': '1234'}
self.assertEqual(client.authenticate(options), response_text)
@responses.activate
def test_track_tracked_true(self):
response_text = 'track'
responses.add(
responses.POST,
'https://api.castle.io/v1/track',
json=response_text,
status=200
)
client = Client.from_request(request(), {})
options = {'event': '$login.succeeded', 'user_id': '1234'}
self.assertEqual(client.track(options), response_text)
def test_track_tracked_false(self):
client = Client.from_request(request(), {})
client.disable_tracking()
self.assertEqual(client.track({}), None)
@responses.activate
def test_filter_tracked_true(self):
response_text = {'action': Verdict.ALLOW.value, 'user_id': '1234'}
responses.add(
responses.POST,
'https://api.castle.io/v1/filter',
json=response_text,
status=200
)
client = Client.from_request(request(), {})
options = {
'request_token': '7e51335b-f4bc-4bc7-875d-b713fb61eb23-bf021a3022a1a302',
'event': '$login',
'status': '$succeeded',
'user': {'id': '1234'}
}
response_text.update(failover=False, failover_reason=None)
self.assertEqual(client.filter(options), response_text)
@responses.activate
def test_filter_tracked_true_status_500(self):
response_text = {
'action': Verdict.ALLOW.value,
'user_id': '1234',
'failover': True,
'failover_reason': 'InternalServerError'
}
responses.add(
responses.POST,
'https://api.castle.io/v1/filter',
json='filter',
status=500
)
client = Client.from_request(request(), {})
options = {
'request_token': '7e51335b-f4bc-4bc7-875d-b713fb61eb23-bf021a3022a1a302',
'event': '$login',
'status': '$succeeded',
'user': {'id': '1234'}
}
self.assertEqual(client.filter(options), response_text)
def test_filter_tracked_false(self):
response_text = {
'action': Verdict.ALLOW.value,
'user_id': '1234',
'failover': True,
'failover_reason': 'Castle set to do not track.'
}
client = Client.from_request(request(), {})
client.disable_tracking()
options = {
'request_token': '7e51335b-f4bc-4bc7-875d-b713fb61eb23-bf021a3022a1a302',
'event': '$login',
'status': '$succeeded',
'user': {'id': '1234'}
}
self.assertEqual(client.filter(options), response_text)
@responses.activate
def test_log_tracked_true(self):
response_text = 'log'
responses.add(
responses.POST,
'https://api.castle.io/v1/log',
json=response_text,
status=200
)
client = Client.from_request(request(), {})
options = {
'request_token': '7e51335b-f4bc-4bc7-875d-b713fb61eb23-bf021a3022a1a302',
'event': '$login',
'status': '$succeeded',
'user': {'id': '1234'}
}
self.assertEqual(client.log(options), response_text)
def test_log_tracked_false(self):
client = Client.from_request(request(), {})
client.disable_tracking()
self.assertEqual(client.log({}), None)
@responses.activate
def test_risk_tracked_true(self):
response_text = {'action': Verdict.ALLOW.value, 'user_id': '1234'}
responses.add(
responses.POST,
'https://api.castle.io/v1/risk',
json=response_text,
status=200
)
client = Client.from_request(request(), {})
options = {
'request_token': '7e51335b-f4bc-4bc7-875d-b713fb61eb23-bf021a3022a1a302',
'event': '$login',
'status': '$succeeded',
'user': {'id': '1234'}
}
response_text.update(failover=False, failover_reason=None)
self.assertEqual(client.risk(options), response_text)
@responses.activate
def test_risk_tracked_true_status_500(self):
response_text = {
'action': Verdict.ALLOW.value,
'user_id': '1234',
'failover': True,
'failover_reason': 'InternalServerError'
}
responses.add(
responses.POST,
'https://api.castle.io/v1/risk',
json='risk',
status=500
)
client = Client.from_request(request(), {})
options = {
'request_token': '7e51335b-f4bc-4bc7-875d-b713fb61eb23-bf021a3022a1a302',
'event': '$login',
'status': '$succeeded',
'user': {'id': '1234'}
}
self.assertEqual(client.risk(options), response_text)
def test_risk_tracked_false(self):
response_text = {
'action': Verdict.ALLOW.value,
'user_id': '1234',
'failover': True,
'failover_reason': 'Castle set to do not track.'
}
client = Client.from_request(request(), {})
client.disable_tracking()
options = {
'request_token': '7e51335b-f4bc-4bc7-875d-b713fb61eb23-bf021a3022a1a302',
'event': '$login',
'status': '$succeeded',
'user': {'id': '1234'}
}
self.assertEqual(client.risk(options), response_text)
def test_disable_tracking(self):
client = Client.from_request(request(), {})
client.disable_tracking()
self.assertEqual(client.do_not_track, True)
def test_enable_tracking(self):
client = Client.from_request(request(), {})
client.disable_tracking()
self.assertEqual(client.do_not_track, True)
client.enable_tracking()
self.assertEqual(client.do_not_track, False)
def test_tracked_when_do_not_track_false(self):
client = Client.from_request(request(), {})
self.assertEqual(client.tracked(), True)
def test_tracked_when_do_not_track_true(self):
client = Client.from_request(request(), {'do_not_track': True})
self.assertEqual(client.tracked(), False)
def test_failover_strategy_not_throw(self):
options = {'user_id': '1234'}
self.assertEqual(
Client.failover_response_or_raise(options.get('user_id'), Exception()),
{
'action': Verdict.ALLOW.value,
'user_id': '1234',
'failover': True,
'failover_reason': 'Exception'
}
)
def test_failover_strategy_throw(self):
options = {'user_id': '1234'}
configuration.failover_strategy = FailoverStrategy.THROW.value
with self.assertRaises(Exception):
Client.failover_response_or_raise(options, Exception())
configuration.failover_strategy = FailoverStrategy.ALLOW.value
| 35.393586
| 85
| 0.583443
| 1,188
| 12,140
| 5.776094
| 0.098485
| 0.06995
| 0.039347
| 0.073739
| 0.834742
| 0.802681
| 0.762314
| 0.713203
| 0.670504
| 0.646313
| 0
| 0.050243
| 0.28682
| 12,140
| 342
| 86
| 35.497076
| 0.742319
| 0
| 0
| 0.632588
| 0
| 0
| 0.184267
| 0.032455
| 0
| 0
| 0
| 0
| 0.086262
| 1
| 0.086262
| false
| 0
| 0.031949
| 0
| 0.124601
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
bef36c3a88acdeb24125d1e296bb628b8b2cc5e9
| 57
|
py
|
Python
|
src/framework/__init__.py
|
KBraham/homehub
|
501b3a02d48453f1a444d1fa7d931ee8ac8202fd
|
[
"MIT"
] | null | null | null |
src/framework/__init__.py
|
KBraham/homehub
|
501b3a02d48453f1a444d1fa7d931ee8ac8202fd
|
[
"MIT"
] | null | null | null |
src/framework/__init__.py
|
KBraham/homehub
|
501b3a02d48453f1a444d1fa7d931ee8ac8202fd
|
[
"MIT"
] | null | null | null |
from .Router import Router
from .Service import Service
| 14.25
| 28
| 0.807018
| 8
| 57
| 5.75
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.157895
| 57
| 3
| 29
| 19
| 0.958333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.