hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
3f779a009a1f6373c0902f9a24aa1608d5bdf5ee
| 156
|
py
|
Python
|
tests/web_platform/CSS2/linebox/test_line_height_largest.py
|
fletchgraham/colosseum
|
77be4896ee52b8f5956a3d77b5f2ccd2c8608e8f
|
[
"BSD-3-Clause"
] | null | null | null |
tests/web_platform/CSS2/linebox/test_line_height_largest.py
|
fletchgraham/colosseum
|
77be4896ee52b8f5956a3d77b5f2ccd2c8608e8f
|
[
"BSD-3-Clause"
] | null | null | null |
tests/web_platform/CSS2/linebox/test_line_height_largest.py
|
fletchgraham/colosseum
|
77be4896ee52b8f5956a3d77b5f2ccd2c8608e8f
|
[
"BSD-3-Clause"
] | 1
|
2020-01-16T01:56:41.000Z
|
2020-01-16T01:56:41.000Z
|
from tests.utils import W3CTestCase
class TestLineHeightLargest(W3CTestCase):
vars().update(W3CTestCase.find_tests(__file__, 'line-height-largest-'))
| 26
| 75
| 0.794872
| 17
| 156
| 7
| 0.823529
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.021127
| 0.089744
| 156
| 5
| 76
| 31.2
| 0.816901
| 0
| 0
| 0
| 0
| 0
| 0.129032
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
3f801e53fda9242f539b0124c1d1e55369d9aa56
| 22
|
py
|
Python
|
testDir1/testPy.py
|
nokn0k/Net_AutomationScripts
|
06664d984c5be64a7fe080009b09c4fcaa6ec210
|
[
"Apache-2.0"
] | null | null | null |
testDir1/testPy.py
|
nokn0k/Net_AutomationScripts
|
06664d984c5be64a7fe080009b09c4fcaa6ec210
|
[
"Apache-2.0"
] | null | null | null |
testDir1/testPy.py
|
nokn0k/Net_AutomationScripts
|
06664d984c5be64a7fe080009b09c4fcaa6ec210
|
[
"Apache-2.0"
] | null | null | null |
print("print every")
| 7.333333
| 20
| 0.681818
| 3
| 22
| 5
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.136364
| 22
| 2
| 21
| 11
| 0.789474
| 0
| 0
| 0
| 0
| 0
| 0.52381
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
3fbf7289e794bbd1a256f47eb03cbbf9f7c961ec
| 157
|
py
|
Python
|
zoom_cart/zoom_cart/doctype/otp_password_verification/test_otp_password_verification.py
|
fadilsid/zoom_cart
|
df57531c685f4e7243baf871d94b496e7a397745
|
[
"MIT"
] | null | null | null |
zoom_cart/zoom_cart/doctype/otp_password_verification/test_otp_password_verification.py
|
fadilsid/zoom_cart
|
df57531c685f4e7243baf871d94b496e7a397745
|
[
"MIT"
] | null | null | null |
zoom_cart/zoom_cart/doctype/otp_password_verification/test_otp_password_verification.py
|
fadilsid/zoom_cart
|
df57531c685f4e7243baf871d94b496e7a397745
|
[
"MIT"
] | null | null | null |
# Copyright (c) 2021, Tridz and Contributors
# See license.txt
# import frappe
import unittest
class TestOTPPasswordVerification(unittest.TestCase):
pass
| 17.444444
| 53
| 0.796178
| 18
| 157
| 6.944444
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.029412
| 0.133758
| 157
| 8
| 54
| 19.625
| 0.889706
| 0.458599
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.666667
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
|
0
| 5
|
3fd004653aad104125fb5eac98aa642905b5f2d5
| 195
|
py
|
Python
|
proconfig/utils/__init__.py
|
astrong19/gs-pro-config
|
f43bcd78c1f44b424b308336bfd54150b6b6c17f
|
[
"MIT"
] | null | null | null |
proconfig/utils/__init__.py
|
astrong19/gs-pro-config
|
f43bcd78c1f44b424b308336bfd54150b6b6c17f
|
[
"MIT"
] | 4
|
2020-03-03T17:26:19.000Z
|
2020-03-04T09:58:05.000Z
|
proconfig/utils/__init__.py
|
gfw-api/gs-pro-config
|
cf43fb977189a025a0b5b6e1fe38616845021b03
|
[
"MIT"
] | null | null | null |
"""GS Utilities Config Init"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from proconfig.utils.google_sheet import GoogleSheet
| 24.375
| 52
| 0.846154
| 25
| 195
| 6
| 0.64
| 0.2
| 0.32
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.112821
| 195
| 7
| 53
| 27.857143
| 0.867052
| 0.123077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0.25
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
3fd8282abcbe89ab7cec165c818d8df485fb3393
| 23
|
py
|
Python
|
python/testData/joinLines/BinaryOpBelow.py
|
jnthn/intellij-community
|
8fa7c8a3ace62400c838e0d5926a7be106aa8557
|
[
"Apache-2.0"
] | 2
|
2019-04-28T07:48:50.000Z
|
2020-12-11T14:18:08.000Z
|
python/testData/joinLines/BinaryOpBelow.py
|
Cyril-lamirand/intellij-community
|
60ab6c61b82fc761dd68363eca7d9d69663cfa39
|
[
"Apache-2.0"
] | 173
|
2018-07-05T13:59:39.000Z
|
2018-08-09T01:12:03.000Z
|
python/testData/joinLines/BinaryOpBelow.py
|
Cyril-lamirand/intellij-community
|
60ab6c61b82fc761dd68363eca7d9d69663cfa39
|
[
"Apache-2.0"
] | 2
|
2020-03-15T08:57:37.000Z
|
2020-04-07T04:48:14.000Z
|
a = <caret>1 \
+ 2
| 7.666667
| 14
| 0.347826
| 4
| 23
| 2
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.153846
| 0.434783
| 23
| 2
| 15
| 11.5
| 0.461538
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
3fde2231ebccde26c6ce06c454e6ef4adf5aed35
| 94
|
py
|
Python
|
code/abc083_b_05.py
|
KoyanagiHitoshi/AtCoder
|
731892543769b5df15254e1f32b756190378d292
|
[
"MIT"
] | 3
|
2019-08-16T16:55:48.000Z
|
2021-04-11T10:21:40.000Z
|
code/abc083_b_05.py
|
KoyanagiHitoshi/AtCoder
|
731892543769b5df15254e1f32b756190378d292
|
[
"MIT"
] | null | null | null |
code/abc083_b_05.py
|
KoyanagiHitoshi/AtCoder
|
731892543769b5df15254e1f32b756190378d292
|
[
"MIT"
] | null | null | null |
n,a,b=map(int,input().split())
print(sum(i for i in range(n+1) if a<=sum(map(int,str(i)))<=b))
| 47
| 63
| 0.617021
| 24
| 94
| 2.416667
| 0.666667
| 0.206897
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.011494
| 0.074468
| 94
| 2
| 63
| 47
| 0.655172
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
3fe375cc97e82ee25611fd73f202af8d35318c06
| 104
|
py
|
Python
|
heat/tests/examples/__init__.py
|
devcamcar/heat
|
0f1bd5d29102318e62b5a10281d809807bd3b163
|
[
"Apache-2.0"
] | 1
|
2015-05-11T04:54:30.000Z
|
2015-05-11T04:54:30.000Z
|
heat/tests/examples/__init__.py
|
CiscoSystems/heat
|
1b609f3c0621c44e4988a166a38f36c2b57eb4c6
|
[
"Apache-2.0"
] | null | null | null |
heat/tests/examples/__init__.py
|
CiscoSystems/heat
|
1b609f3c0621c44e4988a166a38f36c2b57eb4c6
|
[
"Apache-2.0"
] | null | null | null |
def setup():
print "package setup complete"
def teardown():
print "package teardown complete"
| 14.857143
| 37
| 0.692308
| 12
| 104
| 6
| 0.5
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.211538
| 104
| 6
| 38
| 17.333333
| 0.878049
| 0
| 0
| 0
| 0
| 0
| 0.451923
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.5
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
3fecece9f0a43696ee5d7ee00a6490342cfb92ae
| 301
|
py
|
Python
|
src/samplics/sae/__init__.py
|
samplics-org/samplics
|
b5f49d075194cc24208f567e6a00e86aa24bec26
|
[
"MIT"
] | 14
|
2021-05-03T19:59:58.000Z
|
2022-03-27T18:58:36.000Z
|
src/samplics/sae/__init__.py
|
survey-methods/samplics
|
4a0f6ea6168afb74c2ea2c958fb76c7d27dfba83
|
[
"MIT"
] | 8
|
2020-06-04T16:39:50.000Z
|
2021-04-22T22:29:41.000Z
|
src/samplics/sae/__init__.py
|
survey-methods/samplics
|
4a0f6ea6168afb74c2ea2c958fb76c7d27dfba83
|
[
"MIT"
] | 2
|
2020-05-28T20:09:48.000Z
|
2021-01-19T17:34:22.000Z
|
from samplics.sae.eb_unit_model import EbUnitModel
from samplics.sae.eblup_area_model import EblupAreaModel
from samplics.sae.eblup_unit_model import EblupUnitModel
from samplics.sae.robust_unit_model import EllUnitModel
__all__ = ["EblupAreaModel", "EblupUnitModel", "EbUnitModel", "EllUnitModel"]
| 37.625
| 77
| 0.847176
| 37
| 301
| 6.567568
| 0.405405
| 0.197531
| 0.246914
| 0.164609
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.079734
| 301
| 7
| 78
| 43
| 0.877256
| 0
| 0
| 0
| 0
| 0
| 0.169435
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.8
| 0
| 0.8
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
3ff69e899e591a9d31a930b40001b0d1ad542afe
| 7,330
|
py
|
Python
|
code/Test/test_BaseAttack.py
|
thrimbor/ID2T
|
bcf7b3aa302acef02c724ef422d7d16707971fdf
|
[
"MIT"
] | 1
|
2022-02-15T06:41:35.000Z
|
2022-02-15T06:41:35.000Z
|
code/Test/test_BaseAttack.py
|
thrimbor/ID2T
|
bcf7b3aa302acef02c724ef422d7d16707971fdf
|
[
"MIT"
] | 24
|
2018-11-08T16:33:06.000Z
|
2018-11-08T16:36:02.000Z
|
code/Test/test_BaseAttack.py
|
Trace-Share/ID2T
|
ada96c6ba06bc1e52516ada7f7447eb3ea2791c7
|
[
"MIT"
] | null | null | null |
import unittest
import Attack.BaseAttack as BAtk
# TODO: improve coverage
class TestBaseAttack(unittest.TestCase):
def test_is_mac_address_valid(self):
self.assertTrue(BAtk.BaseAttack._is_mac_address("00:80:41:ae:fd:7e"))
def test_is_mac_address_invalid(self):
self.assertFalse(BAtk.BaseAttack._is_mac_address("00:80:41:aec:fd:7e"))
def test_is_mac_address_empty(self):
self.assertFalse(BAtk.BaseAttack._is_mac_address(""))
def test_is_mac_address_minus_valid(self):
self.assertTrue(BAtk.BaseAttack._is_mac_address("00-80-41-ae-fd-7e"))
def test_is_mac_address_minus_invalid(self):
self.assertFalse(BAtk.BaseAttack._is_mac_address("00-80-41-aec-fd-7e"))
def test_is_mac_address_list_valid(self):
self.assertTrue(BAtk.BaseAttack._is_mac_address(["00:80:41:ae:fd:7e", "00-80-41-ae-fd-7e"]))
def test_is_mac_address_list_invalid(self):
self.assertFalse(BAtk.BaseAttack._is_mac_address(["00:80:41:aec:fd:7e", "00-80-41-aec-fd-7e"]))
def test_is_ip_address_empty(self):
self.assertFalse(BAtk.BaseAttack._is_ip_address("")[0])
def test_is_ip_address_v4_valid(self):
self.assertTrue(BAtk.BaseAttack._is_ip_address("192.168.178.1")[0])
def test_is_ip_address_v4_invalid(self):
self.assertFalse(BAtk.BaseAttack._is_ip_address("192.1689.178.1")[0])
def test_is_ip_address_v6_valid(self):
self.assertTrue(BAtk.BaseAttack._is_ip_address("2001:0db8:85a3:08d3:1319:8a2e:0370:7344")[0])
def test_is_ip_address_v6_invalid(self):
self.assertFalse(BAtk.BaseAttack._is_ip_address("2001:0db8:85a3:08d3X:1319:8a2e:0370:7344")[0])
def test_is_ip_address_v6_shortened_valid(self):
self.assertTrue(BAtk.BaseAttack._is_ip_address("2001:0db8:85a3:08d3:1319::0370:7344")[0])
def test_is_ip_address_v6_shortened_invalid(self):
self.assertFalse(BAtk.BaseAttack._is_ip_address("2001::85a3:08d3X::8a2e:0370:7344")[0])
def test_is_ip_address_list_valid(self):
self.assertTrue(BAtk.BaseAttack._is_ip_address(["192.168.178.1", "192.168.178.10"])[0])
def test_is_ip_address_list_invalid(self):
self.assertFalse(BAtk.BaseAttack._is_ip_address(["192.1689.178.1", "192.168.178.10"])[0])
def test_is_ip_address_comma_list_valid(self):
self.assertTrue(BAtk.BaseAttack._is_ip_address("192.168.178.1,192.168.178.10")[0])
def test_is_ip_address_comma_list_invalid(self):
self.assertFalse(BAtk.BaseAttack._is_ip_address("192.168.178.1,192.1689.178.10")[0])
def test_is_port_none(self):
self.assertFalse(BAtk.BaseAttack._is_port(None))
def test_is_port_empty(self):
self.assertFalse(BAtk.BaseAttack._is_port(""))
def test_is_port_empty_list(self):
self.assertFalse(BAtk.BaseAttack._is_port([]))
def test_is_port_valid(self):
self.assertTrue(BAtk.BaseAttack._is_port(5000))
def test_is_port_invalid(self):
self.assertFalse(BAtk.BaseAttack._is_port(70000))
def test_is_port_string_valid(self):
self.assertTrue(BAtk.BaseAttack._is_port("5000"))
def test_is_port_string_invalid(self):
self.assertFalse(BAtk.BaseAttack._is_port("70000"))
def test_is_port_string_comma_valid(self):
self.assertTrue(BAtk.BaseAttack._is_port("5000, 4000, 3000"))
def test_is_port_string_comma_ivalid(self):
self.assertFalse(BAtk.BaseAttack._is_port("5000, 70000, 3000"))
def test_is_port_valid_list(self):
self.assertTrue(BAtk.BaseAttack._is_port([5000, 4000, 3000]))
def test_is_port_invalid_list(self):
self.assertFalse(BAtk.BaseAttack._is_port([5000, 70000, 0]))
def test_is_port_valid_string_list(self):
self.assertTrue(BAtk.BaseAttack._is_port(["5000", "4000", "3000"]))
def test_is_port_invalid_string_list(self):
self.assertFalse(BAtk.BaseAttack._is_port(["5000", "70000", "0"]))
def test_is_port_range_valid(self):
self.assertTrue(BAtk.BaseAttack._is_port("3000-5000"))
def test_is_port_range_invalid(self):
self.assertFalse(BAtk.BaseAttack._is_port("0-70000"))
def test_is_port_range_dots_valid(self):
self.assertTrue(BAtk.BaseAttack._is_port("3000...5000"))
def test_is_port_range_dots_invalid(self):
self.assertFalse(BAtk.BaseAttack._is_port("0...70000"))
def test_is_port_range_list_valid(self):
self.assertTrue(BAtk.BaseAttack._is_port(["3000-5000", "6000-7000"]))
def test_is_port_range_list_invalid(self):
self.assertFalse(BAtk.BaseAttack._is_port(["0-70000", "6000-7000"]))
def test_is_timestamp_valid(self):
self.assertTrue(BAtk.BaseAttack._is_timestamp("2018-01-25 23:54:00"))
def test_is_timestamp_invalid(self):
self.assertFalse(BAtk.BaseAttack._is_timestamp("20-0100-125 23c:54x:00a"))
def test_is_boolean_invalid(self):
self.assertFalse(BAtk.BaseAttack._is_boolean("42")[0])
def test_is_boolean_valid(self):
self.assertTrue(BAtk.BaseAttack._is_boolean(True))
self.assertTrue(BAtk.BaseAttack._is_boolean(False))
def test_is_boolean_valid_strings(self):
for value in {"y", "yes", "t", "true", "on", "1", "n", "no", "f", "false", "off", "0"}:
with self.subTest(value=value):
self.assertTrue(BAtk.BaseAttack._is_boolean(value))
def test_is_float_valid(self):
self.assertTrue(BAtk.BaseAttack._is_float(50.67)[0])
def test_is_float_invalid(self):
self.assertFalse(BAtk.BaseAttack._is_float("invalid")[0])
def test_is_domain_valid(self):
self.assertTrue(BAtk.BaseAttack._is_domain("foo://example.com:8042/over/there?name=ferret"))
def test_is_domain_invalid(self):
self.assertFalse(BAtk.BaseAttack._is_domain("this is not a valid domain, I guess, maybe, let's find out."))
def test_is_valid_ipaddress_valid(self):
self.assertTrue(BAtk.BaseAttack.is_valid_ip_address("192.168.178.42"))
def test_is_valid_ipaddress_invalid(self):
self.assertFalse(BAtk.BaseAttack.is_valid_ip_address("192.168.1789.42"))
def test_ip_src_dst_equal_check_equal(self):
with self.assertRaises(SystemExit):
BAtk.BaseAttack.ip_src_dst_equal_check("192.168.178.42", "192.168.178.42")
def test_ip_src_dst_equal_check_unequal(self):
BAtk.BaseAttack.ip_src_dst_equal_check("192.168.178.42", "192.168.178.43")
def test_clean_whitespaces(self):
self.assertEqual("a\nb\rc\td\'e", BAtk.BaseAttack.clean_white_spaces("a\\nb\\rc\\td\\\'e"))
def test_generate_random_ipv4_address(self):
ip_list = BAtk.BaseAttack.generate_random_ipv4_address("Unknown", 10)
for ip in ip_list:
with self.subTest(ip=ip):
self.assertTrue(BAtk.BaseAttack._is_ip_address(ip))
def test_generate_random_ipv6_address(self):
ip_list = BAtk.BaseAttack.generate_random_ipv6_address(10)
for ip in ip_list:
with self.subTest(ip=ip):
self.assertTrue(BAtk.BaseAttack._is_ip_address(ip))
def test_generate_random_mac_address(self):
mac_list = BAtk.BaseAttack.generate_random_mac_address(10)
for mac in mac_list:
with self.subTest(mac=mac):
self.assertTrue(BAtk.BaseAttack._is_mac_address(mac))
| 40.054645
| 115
| 0.713915
| 1,095
| 7,330
| 4.428311
| 0.128767
| 0.167457
| 0.171582
| 0.150134
| 0.844504
| 0.797072
| 0.760157
| 0.660755
| 0.582388
| 0.558878
| 0
| 0.087236
| 0.152387
| 7,330
| 182
| 116
| 40.274725
| 0.693224
| 0.003001
| 0
| 0.048387
| 0
| 0
| 0.117985
| 0.033945
| 0
| 0
| 0
| 0.005495
| 0.435484
| 1
| 0.435484
| false
| 0
| 0.016129
| 0
| 0.459677
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
b762f1de3c9fd7f45ea9c42760a6fd356a999739
| 150
|
py
|
Python
|
pyapp_ext/aiosmtplib/checks.py
|
pyapp-org/pyapp.aiosmtplib
|
f928a7eb838b041d279d974f7cb555964764a410
|
[
"BSD-3-Clause"
] | null | null | null |
pyapp_ext/aiosmtplib/checks.py
|
pyapp-org/pyapp.aiosmtplib
|
f928a7eb838b041d279d974f7cb555964764a410
|
[
"BSD-3-Clause"
] | 20
|
2020-07-31T05:07:07.000Z
|
2022-02-11T19:02:03.000Z
|
pyapp_ext/aiosmtplib/checks.py
|
pyapp-org/pyapp.aiosmtplib
|
f928a7eb838b041d279d974f7cb555964764a410
|
[
"BSD-3-Clause"
] | null | null | null |
from pyapp.checks.registry import register
from .factory import factory
from .helpers import email_factory
register(factory)
register(email_factory)
| 21.428571
| 42
| 0.846667
| 20
| 150
| 6.25
| 0.45
| 0.192
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1
| 150
| 6
| 43
| 25
| 0.925926
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.6
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
b7780b704ffbec8e4d70babf917bdc637fca5af6
| 177
|
py
|
Python
|
leetcode/python/383_ransom_note.py
|
VVKot/leetcode-solutions
|
7d6e599b223d89a7861929190be715d3b3604fa4
|
[
"MIT"
] | 4
|
2019-04-22T11:57:36.000Z
|
2019-10-29T09:12:56.000Z
|
leetcode/python/383_ransom_note.py
|
VVKot/coding-competitions
|
7d6e599b223d89a7861929190be715d3b3604fa4
|
[
"MIT"
] | null | null | null |
leetcode/python/383_ransom_note.py
|
VVKot/coding-competitions
|
7d6e599b223d89a7861929190be715d3b3604fa4
|
[
"MIT"
] | null | null | null |
from collections import Counter
class Solution:
def canConstruct(self, ransomNote: str, magazine: str) -> bool:
return not Counter(ransomNote) - Counter(magazine)
| 25.285714
| 67
| 0.728814
| 20
| 177
| 6.45
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.186441
| 177
| 6
| 68
| 29.5
| 0.895833
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.25
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
b7a2b4abcefed7a6b977d99ad7c9545162ee2541
| 127
|
py
|
Python
|
titan/tools_pkg/vandelay/resources.py
|
mnieber/gen
|
65f8aa4fb671c4f90d5cbcb1a0e10290647a31d9
|
[
"MIT"
] | null | null | null |
titan/tools_pkg/vandelay/resources.py
|
mnieber/gen
|
65f8aa4fb671c4f90d5cbcb1a0e10290647a31d9
|
[
"MIT"
] | null | null | null |
titan/tools_pkg/vandelay/resources.py
|
mnieber/gen
|
65f8aa4fb671c4f90d5cbcb1a0e10290647a31d9
|
[
"MIT"
] | null | null | null |
from dataclasses import dataclass
from titan.project_pkg.service import Tool
@dataclass
class Vandelay(Tool):
type: str
| 14.111111
| 42
| 0.787402
| 17
| 127
| 5.823529
| 0.764706
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.15748
| 127
| 8
| 43
| 15.875
| 0.925234
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.4
| 0
| 0.8
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
b7a74d56831243f0d0664bdb032402cf46ca0d17
| 132
|
py
|
Python
|
biothings_resolver/__init__.py
|
zcqian/biothings_idlookup
|
870141c2f36b5cc52fd6f34bb4db8d501b85c879
|
[
"Apache-2.0"
] | 1
|
2020-12-18T01:16:27.000Z
|
2020-12-18T01:16:27.000Z
|
biothings_resolver/__init__.py
|
zcqian/biothings_idlookup
|
870141c2f36b5cc52fd6f34bb4db8d501b85c879
|
[
"Apache-2.0"
] | 5
|
2020-09-22T03:25:58.000Z
|
2020-10-27T05:51:06.000Z
|
biothings_resolver/__init__.py
|
zcqian/biothings_idlookup
|
870141c2f36b5cc52fd6f34bb4db8d501b85c879
|
[
"Apache-2.0"
] | 1
|
2020-11-24T18:07:18.000Z
|
2020-11-24T18:07:18.000Z
|
from .Resolver import Resolver
from .agents import IDLookupAgent, BioThingsAPIAgent
from .predefined_resolvers import ChemResolver
| 26.4
| 52
| 0.863636
| 14
| 132
| 8.071429
| 0.642857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.106061
| 132
| 4
| 53
| 33
| 0.957627
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
b7cc3a6f5a523978a3b64a64100de4a1b227ca2f
| 131
|
py
|
Python
|
costar_task_plan/python/costar_task_plan/datasets/__init__.py
|
cpaxton/costar_plan
|
be5c12f9d0e9d7078e6a5c283d3be059e7f3d040
|
[
"Apache-2.0"
] | 66
|
2018-10-31T04:58:53.000Z
|
2022-03-17T02:32:25.000Z
|
costar_task_plan/python/costar_task_plan/datasets/__init__.py
|
cpaxton/costar_plan
|
be5c12f9d0e9d7078e6a5c283d3be059e7f3d040
|
[
"Apache-2.0"
] | 8
|
2018-10-23T21:19:25.000Z
|
2018-12-03T02:08:41.000Z
|
costar_task_plan/python/costar_task_plan/datasets/__init__.py
|
cpaxton/costar_plan
|
be5c12f9d0e9d7078e6a5c283d3be059e7f3d040
|
[
"Apache-2.0"
] | 25
|
2018-10-19T00:54:17.000Z
|
2021-10-10T08:28:15.000Z
|
from tools import *
from dataset import Config
from dataset import Dataset
from visr import VisrDataset
from tom import TomDataset
| 21.833333
| 28
| 0.839695
| 19
| 131
| 5.789474
| 0.473684
| 0.2
| 0.309091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.152672
| 131
| 5
| 29
| 26.2
| 0.990991
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
4d0cf22d34a46b1381674d2178462313896c0232
| 96
|
py
|
Python
|
core/balance.py
|
git-shuvam/helper-stripe
|
386fb1aba2d709a4a90a65f3c98dc17907410ed4
|
[
"MIT"
] | null | null | null |
core/balance.py
|
git-shuvam/helper-stripe
|
386fb1aba2d709a4a90a65f3c98dc17907410ed4
|
[
"MIT"
] | null | null | null |
core/balance.py
|
git-shuvam/helper-stripe
|
386fb1aba2d709a4a90a65f3c98dc17907410ed4
|
[
"MIT"
] | null | null | null |
import stripe
class Balance:
def retrieve(self):
return stripe.Balance.retrieve()
| 13.714286
| 40
| 0.6875
| 11
| 96
| 6
| 0.727273
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.229167
| 96
| 6
| 41
| 16
| 0.891892
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.25
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
4d1c2f0d54aadc7f84fc3522aa0f6a95ebb0b213
| 75
|
py
|
Python
|
mtx.py
|
RafalKucharskiPK/PTVVisum_Python_Snippets
|
08700ac9ff8dd8d0db01ed9b65550a15105cff17
|
[
"MIT"
] | 3
|
2020-02-14T19:43:23.000Z
|
2021-04-26T06:39:33.000Z
|
mtx.py
|
RafalKucharskiPK/PTVVisum_Python_Snippets
|
08700ac9ff8dd8d0db01ed9b65550a15105cff17
|
[
"MIT"
] | null | null | null |
mtx.py
|
RafalKucharskiPK/PTVVisum_Python_Snippets
|
08700ac9ff8dd8d0db01ed9b65550a15105cff17
|
[
"MIT"
] | 2
|
2020-05-03T13:53:05.000Z
|
2020-10-13T17:11:02.000Z
|
import numpy as np
a = [[0]*3]*3
print(a)
a[0] = [0,0,0,0]
print(a)
| 10.714286
| 19
| 0.493333
| 18
| 75
| 2.055556
| 0.444444
| 0.216216
| 0.243243
| 0.216216
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 0.253333
| 75
| 6
| 20
| 12.5
| 0.517857
| 0
| 0
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.2
| 0
| 0.2
| 0.4
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
4d4bc2489504add40129c1164cb9055d29d40c5d
| 37
|
py
|
Python
|
tests/__init__.py
|
KungfuKen/cookie1
|
fbfba55d08350dd42d6ffe78bb4dc0553be94220
|
[
"MIT"
] | null | null | null |
tests/__init__.py
|
KungfuKen/cookie1
|
fbfba55d08350dd42d6ffe78bb4dc0553be94220
|
[
"MIT"
] | 1
|
2020-11-12T18:01:45.000Z
|
2020-11-12T18:01:45.000Z
|
tests/__init__.py
|
KungfuKen/cookie1
|
fbfba55d08350dd42d6ffe78bb4dc0553be94220
|
[
"MIT"
] | null | null | null |
"""Unit test package for cookie1."""
| 18.5
| 36
| 0.675676
| 5
| 37
| 5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.03125
| 0.135135
| 37
| 1
| 37
| 37
| 0.75
| 0.810811
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
4d64e76965099ecdaede15c64fe62fc49665fc1f
| 328
|
py
|
Python
|
Patterns/Preetam-Das.py
|
Princekumaraj/HACKTOBERFEST2021_PATTERN
|
44dde2261be002d5d0664eb023045e34848e686b
|
[
"MIT"
] | 2
|
2021-10-03T13:02:28.000Z
|
2021-10-03T15:24:50.000Z
|
Patterns/Preetam-Das.py
|
Princekumaraj/HACKTOBERFEST2021_PATTERN
|
44dde2261be002d5d0664eb023045e34848e686b
|
[
"MIT"
] | 1
|
2021-10-01T11:52:40.000Z
|
2021-10-01T11:52:40.000Z
|
Patterns/Preetam-Das.py
|
Princekumaraj/HACKTOBERFEST2021_PATTERN
|
44dde2261be002d5d0664eb023045e34848e686b
|
[
"MIT"
] | 1
|
2021-10-09T05:48:05.000Z
|
2021-10-09T05:48:05.000Z
|
n = 15 # change the value to chenge the size
print()
for i in range(0, n):
print('*'*(i+1),end='')
print(' '*(n - i),end='')
print(' '*(n - i - 1),end='')
print('*'*(i + 1),end='')
print('*'*(i),end='')
print(' '*(n - i),end='')
print(' '*(n - i - 1),end='')
print('*'*(i + 1),end='')
print()
| 27.333333
| 44
| 0.414634
| 50
| 328
| 2.72
| 0.32
| 0.470588
| 0.183824
| 0.367647
| 0.632353
| 0.573529
| 0.573529
| 0.573529
| 0.573529
| 0.573529
| 0
| 0.032389
| 0.246951
| 328
| 12
| 45
| 27.333333
| 0.518219
| 0.106707
| 0
| 0.75
| 0
| 0
| 0.027397
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.833333
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
4d71bb09ed2effb6de1a89d8305b3fc90a734b1f
| 105
|
py
|
Python
|
scraper_api/apps/heartbeat/views.py
|
machinia/scraper-api
|
a341611575b07acef3dbfa51d31513466639805f
|
[
"MIT"
] | null | null | null |
scraper_api/apps/heartbeat/views.py
|
machinia/scraper-api
|
a341611575b07acef3dbfa51d31513466639805f
|
[
"MIT"
] | 1
|
2019-08-06T13:10:48.000Z
|
2019-08-06T13:10:48.000Z
|
scraper_api/apps/heartbeat/views.py
|
machinia/scraper-api
|
a341611575b07acef3dbfa51d31513466639805f
|
[
"MIT"
] | null | null | null |
from django.http import JsonResponse
def heartbeat(request):
return JsonResponse({"status": "OK"})
| 17.5
| 41
| 0.733333
| 12
| 105
| 6.416667
| 0.916667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 105
| 5
| 42
| 21
| 0.855556
| 0
| 0
| 0
| 0
| 0
| 0.07619
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 0
| 0
|
0
| 5
|
4d793dc3463a2635e3f5bc9f8e608c5c988b7c96
| 255
|
py
|
Python
|
meiduo_mall/MeiDuo/apps/users/views.py
|
songhaokk/SH
|
e0263ba51aa81f79e3473314cdb952ff2aabb6cd
|
[
"MIT"
] | 1
|
2019-10-24T03:30:07.000Z
|
2019-10-24T03:30:07.000Z
|
meiduo_mall/MeiDuo/apps/users/views.py
|
songhaokk/SH
|
e0263ba51aa81f79e3473314cdb952ff2aabb6cd
|
[
"MIT"
] | null | null | null |
meiduo_mall/MeiDuo/apps/users/views.py
|
songhaokk/SH
|
e0263ba51aa81f79e3473314cdb952ff2aabb6cd
|
[
"MIT"
] | null | null | null |
from django.shortcuts import render
from django.http import HttpResponse
# Create your views here.
from django.views import View
def index(request):
return HttpResponse("OK")
class register(View):
def get(self):
return HttpResponse("ok")
| 23.181818
| 36
| 0.741176
| 34
| 255
| 5.558824
| 0.617647
| 0.15873
| 0.21164
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.172549
| 255
| 11
| 37
| 23.181818
| 0.895735
| 0.090196
| 0
| 0
| 0
| 0
| 0.017316
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.375
| 0.25
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 5
|
4d976cf3fbac50823be1666b4a80d50d4506fa81
| 267
|
py
|
Python
|
configs/deepim/ycbvPbrSO/FlowNet512_1.5AugCosyAAEGray_AggressiveV2_Flat_ycbvPbr_SO/FlowNet512_1.5AugCosyAAEGray_AggressiveV2_Flat_Pbr_08_09GelatinBox_bop_test.py
|
THU-DA-6D-Pose-Group/self6dpp
|
c267cfa55e440e212136a5e9940598720fa21d16
|
[
"Apache-2.0"
] | 33
|
2021-12-15T07:11:47.000Z
|
2022-03-29T08:58:32.000Z
|
configs/deepim/ycbvPbrSO/FlowNet512_1.5AugCosyAAEGray_AggressiveV2_Flat_ycbvPbr_SO/FlowNet512_1.5AugCosyAAEGray_AggressiveV2_Flat_Pbr_08_09GelatinBox_bop_test.py
|
THU-DA-6D-Pose-Group/self6dpp
|
c267cfa55e440e212136a5e9940598720fa21d16
|
[
"Apache-2.0"
] | 3
|
2021-12-15T11:39:54.000Z
|
2022-03-29T07:24:23.000Z
|
configs/deepim/ycbvPbrSO/FlowNet512_1.5AugCosyAAEGray_AggressiveV2_Flat_ycbvPbr_SO/FlowNet512_1.5AugCosyAAEGray_AggressiveV2_Flat_Pbr_08_09GelatinBox_bop_test.py
|
THU-DA-6D-Pose-Group/self6dpp
|
c267cfa55e440e212136a5e9940598720fa21d16
|
[
"Apache-2.0"
] | null | null | null |
_base_ = "./FlowNet512_1.5AugCosyAAEGray_AggressiveV2_Flat_Pbr_01_02MasterChefCan_bop_test.py"
OUTPUT_DIR = "output/deepim/ycbvPbrSO/FlowNet512_1.5AugCosyAAEGray_AggressiveV2_Flat_ycbvPbr_SO/08_09GelatinBox"
DATASETS = dict(TRAIN=("ycbv_009_gelatin_box_train_pbr",))
| 66.75
| 112
| 0.876404
| 35
| 267
| 6.057143
| 0.771429
| 0.103774
| 0.245283
| 0.358491
| 0.396226
| 0
| 0
| 0
| 0
| 0
| 0
| 0.089147
| 0.033708
| 267
| 3
| 113
| 89
| 0.732558
| 0
| 0
| 0
| 0
| 0
| 0.786517
| 0.786517
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
4dc952855daa2efbdd1d6934f9504504427a10a7
| 114
|
py
|
Python
|
Logger/Log.py
|
Akash9859/flask-server-demo
|
fb744c690acecad0dc55def693ebb4f0af323370
|
[
"Apache-2.0"
] | null | null | null |
Logger/Log.py
|
Akash9859/flask-server-demo
|
fb744c690acecad0dc55def693ebb4f0af323370
|
[
"Apache-2.0"
] | null | null | null |
Logger/Log.py
|
Akash9859/flask-server-demo
|
fb744c690acecad0dc55def693ebb4f0af323370
|
[
"Apache-2.0"
] | null | null | null |
from Logger import LogConfig
class Log:
@staticmethod
def get_logger():
return LogConfig.logger
| 14.25
| 31
| 0.692982
| 13
| 114
| 6
| 0.769231
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.254386
| 114
| 7
| 32
| 16.285714
| 0.917647
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| true
| 0
| 0.2
| 0.2
| 0.8
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
4dd536037cb584fffcd35c5557fc336102e8f7d1
| 199
|
py
|
Python
|
app/gaia/tool/admin.py
|
mohit4/Gaia
|
b951c1e23351dc51e74c947f8e125028582d9406
|
[
"MIT"
] | null | null | null |
app/gaia/tool/admin.py
|
mohit4/Gaia
|
b951c1e23351dc51e74c947f8e125028582d9406
|
[
"MIT"
] | null | null | null |
app/gaia/tool/admin.py
|
mohit4/Gaia
|
b951c1e23351dc51e74c947f8e125028582d9406
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from .models import Ability
from .models import Tool
from .models import Weapon
admin.site.register(Ability)
admin.site.register(Tool)
admin.site.register(Weapon)
| 18.090909
| 32
| 0.809045
| 29
| 199
| 5.551724
| 0.37931
| 0.186335
| 0.298137
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.110553
| 199
| 10
| 33
| 19.9
| 0.909605
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.571429
| 0
| 0.571429
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
4dee19ecca184e86b62c8f89918e989698351c0f
| 191
|
py
|
Python
|
gammapy/obs/__init__.py
|
joleroi/gammapy
|
c4e0c4bd74c79d30e0837559d18b7a1a269f70d9
|
[
"BSD-3-Clause"
] | null | null | null |
gammapy/obs/__init__.py
|
joleroi/gammapy
|
c4e0c4bd74c79d30e0837559d18b7a1a269f70d9
|
[
"BSD-3-Clause"
] | null | null | null |
gammapy/obs/__init__.py
|
joleroi/gammapy
|
c4e0c4bd74c79d30e0837559d18b7a1a269f70d9
|
[
"BSD-3-Clause"
] | null | null | null |
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Observation utility functions and classes
"""
from .observers import *
from .observation import *
from .datastore import *
| 23.875
| 63
| 0.759162
| 25
| 191
| 5.8
| 0.76
| 0.137931
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006211
| 0.157068
| 191
| 7
| 64
| 27.285714
| 0.89441
| 0.544503
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
4df7a9c235c0d5cbe9287d47dbf3e7f56069aa9d
| 156
|
py
|
Python
|
client/client/config.py
|
felixputera/cz4013-flight-info
|
0b21edddd0077788229fb1a38b86e3c48d0be82e
|
[
"MIT"
] | null | null | null |
client/client/config.py
|
felixputera/cz4013-flight-info
|
0b21edddd0077788229fb1a38b86e3c48d0be82e
|
[
"MIT"
] | null | null | null |
client/client/config.py
|
felixputera/cz4013-flight-info
|
0b21edddd0077788229fb1a38b86e3c48d0be82e
|
[
"MIT"
] | null | null | null |
class Config(object):
SERVER_HOST = "127.0.0.1"
SERVER_PORT = 12345
CLIENT_HOST = "127.0.0.1"
CLIENT_PORT = 12346
UDP_BUF_SIZE = 1024
| 17.333333
| 29
| 0.634615
| 25
| 156
| 3.72
| 0.64
| 0.150538
| 0.172043
| 0.193548
| 0.215054
| 0
| 0
| 0
| 0
| 0
| 0
| 0.222222
| 0.25
| 156
| 8
| 30
| 19.5
| 0.57265
| 0
| 0
| 0
| 0
| 0
| 0.115385
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
15069877c02d1118f32252d6a3605ed8c7170602
| 3,133
|
py
|
Python
|
scripts/create_markers.py
|
FreshConsulting/ros1_roverpro_auto_dock
|
3f63c85faff495e2dde477f2e75a3576b03396df
|
[
"BSD-3-Clause"
] | 8
|
2021-03-24T20:06:18.000Z
|
2022-02-22T09:10:04.000Z
|
scripts/create_markers.py
|
RoverRobotics/ros1_roverpro_auto_dock
|
03b60f995cff28f6408995c17c8e62232434902f
|
[
"BSD-3-Clause"
] | 2
|
2021-01-08T23:48:13.000Z
|
2021-01-13T21:01:32.000Z
|
scripts/create_markers.py
|
FreshConsulting/ros1_roverpro_auto_dock
|
3f63c85faff495e2dde477f2e75a3576b03396df
|
[
"BSD-3-Clause"
] | 4
|
2020-08-24T02:02:20.000Z
|
2021-02-21T22:38:14.000Z
|
#!/usr/bin/python
import os, sys
"""
Generate a PDF file containaing one or more fiducial marker for printing
"""
def checkCmd(cmd, package):
rc = os.system("which inkscape > /dev/null")
if rc != 0:
print """This utility requires %s. It can be installed by typing:
sudo apt install %s""" % (cmd, package)
sys.exit(1)
def genSvg(file, id, dicno):
f = open(file, "w")
f.write("""<svg width="208.0mm" height="240.0mm"
version="1.1"
xmlns:xlink="http://www.w3.org/1999/xlink"
xmlns="http://www.w3.org/2000/svg">
<line x1="5.0mm" y1="5.0mm" x2="7.0mm" y2="5.0mm" style="stroke:black"/>
<line x1="195.0mm" y1="5.0mm" x2="197.0mm" y2="5.0mm" style="stroke:black"/>
<line x1="5.0mm" y1="21.0mm" x2="7.0mm" y2="21.0mm" style="stroke:black"/>
<line x1="5.0mm" y1="21.0mm" x2="5.0mm" y2="23.0mm" style="stroke:black"/>
<line x1="197.0mm" y1="21.0mm" x2="195.0mm" y2="21.0mm" style="stroke:black"/>
<line x1="197.0mm" y1="21.0mm" x2="197.0mm" y2="23.0mm" style="stroke:black"/>
<image x="31.0mm" y="47.0mm" width="140.0mm" height="140.0mm" xlink:href="marker%d.png" />
<rect x="31.0mm" y="47.0mm" width="140.0mm" height="4.0mm" style="stroke:black; fill:black"/>
<rect x="31.0mm" y="183.0mm" width="140.0mm" height="4.0mm" style="stroke:black; fill:black"/>
<rect x="31.0mm" y="47.0mm" width="4.0mm" height="140.0mm" style="stroke:black; fill:black"/>
<rect x="167.0mm" y="47.0mm" width="4.0mm" height="140.0mm" style="stroke:black; fill:black"/>
<line x1="5.0mm" y1="213.0mm" x2="7.0mm" y2="213.0mm" style="stroke:black"/>
<line x1="5.0mm" y1="213.0mm" x2="5.0mm" y2="211.0mm" style="stroke:black"/>
<line x1="195.0mm" y1="213.0mm" x2="197.0mm" y2="213.0mm" style="stroke:black"/>
<line x1="197.0mm" y1="213.0mm" x2="197.0mm" y2="211.0mm" style="stroke:black"/>
<line x1="5.0mm" y1="229.0mm" x2="7.0mm" y2="229.0mm" style="stroke:black"/>
<line x1="195.0mm" y1="229.0mm" x2="197.0mm" y2="229.0mm" style="stroke:black"/>
<text x="90.0mm" y="220.0mm" style="font-family:ariel; font-size:24">%d D%d</text>
</svg>
""" % (id, id, dicno))
f.close()
if __name__ == "__main__":
checkCmd("inkscape", "inkscape")
checkCmd("pdfunite", "poppler-utils")
dicno = 7
argc = len(sys.argv)
if argc != 4 and argc != 5:
print "Usage: %s startId endId pdfFile [dictionary]" % sys.argv[0]
sys.exit(1)
outfile = sys.argv[3]
if argc == 5:
dicno = int(sys.argv[4])
markers = range(int(sys.argv[1]), int(sys.argv[2])+1)
pdfs = map(lambda i: "marker%d.pdf" % i, markers)
for i in markers:
print " Marker %d\r" % i,
sys.stdout.flush()
os.system("rosrun aruco_detect create_marker --id=%d --ms=2000 --d=%d marker%d.png" % (i, dicno, i))
genSvg("marker%d.svg" % i, i, 7)
os.system("inkscape --without-gui --export-pdf=marker%d.pdf marker%d.svg" % (i, i))
os.remove("marker%d.svg" % i)
os.remove("marker%d.png" % i)
print "Combining into %s" % outfile
os.system("pdfunite %s %s" % (" ".join(pdfs), outfile))
for f in pdfs:
os.remove(f)
| 40.688312
| 108
| 0.599745
| 546
| 3,133
| 3.423077
| 0.271062
| 0.072766
| 0.11985
| 0.162654
| 0.486356
| 0.427501
| 0.427501
| 0.391118
| 0.374532
| 0.224719
| 0
| 0.119707
| 0.170763
| 3,133
| 76
| 109
| 41.223684
| 0.599692
| 0.005107
| 0
| 0.033898
| 1
| 0.305085
| 0.688076
| 0.11166
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.016949
| null | null | 0.067797
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
151ccff3f74d7d4a533acbec9ffef066da04fa22
| 65
|
py
|
Python
|
lib/mock/SceneGraphEditor/__init__.py
|
tommo/gii
|
03624a57cf74a07e38bfdc7f53c50bd926b7b5a7
|
[
"MIT"
] | 7
|
2016-02-13T18:47:23.000Z
|
2020-07-03T13:47:49.000Z
|
lib/mock/SceneGraphEditor/__init__.py
|
tommo/gii
|
03624a57cf74a07e38bfdc7f53c50bd926b7b5a7
|
[
"MIT"
] | 1
|
2018-06-13T04:55:27.000Z
|
2021-11-05T05:52:51.000Z
|
lib/mock/SceneGraphEditor/__init__.py
|
tommo/gii
|
03624a57cf74a07e38bfdc7f53c50bd926b7b5a7
|
[
"MIT"
] | 4
|
2016-02-15T13:32:46.000Z
|
2019-12-12T17:22:31.000Z
|
import SceneGraphEditor
import SceneCommands
import EntityEditor
| 16.25
| 23
| 0.907692
| 6
| 65
| 9.833333
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.092308
| 65
| 4
| 24
| 16.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
12ccf73b5aaa571b16207f017ce5c79bad037a18
| 26,562
|
py
|
Python
|
ukb/models/mri.py
|
wi905252/ukb-cardiac-mri
|
3177dde898a65b1d7f385b78e4f134de3852bea5
|
[
"Apache-2.0"
] | 19
|
2018-05-30T22:13:17.000Z
|
2022-01-18T14:04:40.000Z
|
ukb/models/mri.py
|
wi905252/ukb-cardiac-mri
|
3177dde898a65b1d7f385b78e4f134de3852bea5
|
[
"Apache-2.0"
] | 1
|
2019-08-07T07:29:07.000Z
|
2019-08-07T08:54:10.000Z
|
ukb/models/mri.py
|
wi905252/ukb-cardiac-mri
|
3177dde898a65b1d7f385b78e4f134de3852bea5
|
[
"Apache-2.0"
] | 8
|
2019-07-03T23:19:43.000Z
|
2021-11-15T17:09:24.000Z
|
import torch
import logging
import numpy as np
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Variable
from .frame import LeNetFrameEncoder, FNNFrameEncoder, DenseNet121, vgg16_bn, densenet121, densenet_40_12_bc
from .sequence import RNN, MetaRNN, SeqSumPoolingEncoder
logger = logging.getLogger(__name__)
################################################################################
# Image Container Models (each image is independantly classified)
################################################################################
class MRINet(nn.Module):
"""
Simple container class for MRI net. This module consists of:
1) A frame encoder, e.g., a ConvNet/CNN
2) Linear output layer
"""
def __init__(self, frame_encoder, n_classes, output_size, layers, dropout,
vote_opt='mean', use_cuda=False):
super(MRINet, self).__init__()
self.n_classes = n_classes
self.fenc = frame_encoder
self.classifier = self._make_classifier(output_size, n_classes, layers, dropout)
self.vote_opt = vote_opt
self.use_cuda = use_cuda
def _make_classifier(self, output_size, n_classes, layers=[64,32], dropout=0.2):
layers = [output_size] + layers + [n_classes]
classifier = []
for i, size in enumerate(layers[:-1]):
classifier.append(nn.Linear(layers[i], layers[i+1]))
if size != layers[-1]:
classifier.append(nn.ReLU(True))
classifier.append(nn.Dropout(p=dropout))
return nn.Sequential(*classifier)
def init_hidden(self, batch_size):
return None
def embedding(self, x, hidden=None):
"""Get learned representation of MRI sequence"""
if self.use_cuda and not x.is_cuda:
x = x.cuda()
batch_size, num_frames, num_channels, width, height = x.size()
self.num_frames = num_frames
x = x.view(-1, num_channels, width, height)
x = self.fenc(x)
x = self.classifier(x)
if self.use_cuda:
return x.cpu()
else:
return x
def forward(self, x, hidden=None):
if self.use_cuda and not x.is_cuda:
x = x.cuda()
# collapse all frames into new batch = batch_size * num_frames
batch_size, num_frames, num_channels, width, height = x.size()
self.num_frames = num_frames
x = x.view(-1, num_channels, width, height)
# encode frames
x = self.fenc(x)
# feed-forward-classifier
x = self.classifier(x)
return x
def vote(self, y_pred, threshold=None):
if threshold is not None:
y_pred = (y_pred > threshold).astype(float)
num_frames = self.num_frames
num_samples = int(y_pred.shape[0]/num_frames)
ex_shape = y_pred.shape[1:]
y_pred = np.reshape(y_pred, (num_samples, num_frames,)+ex_shape)
y_pred = np.mean(y_pred, axis=1)
return y_pred
def predict_proba(self, data_loader, binary=True, pos_label=1, threshold=0.5):
""" Forward inference """
y_pred = []
for i, data in enumerate(data_loader):
x, y = data
x = Variable(x) if not self.use_cuda else Variable(x).cuda()
y = Variable(y) if not self.use_cuda else Variable(y).cuda()
h0 = self.init_hidden(x.size(0))
outputs = self(x, h0)
y_hat = F.softmax(outputs, dim=1)
y_hat = y_hat.data.numpy() if not self.use_cuda else y_hat.cpu().data.numpy()
y_pred.append(y_hat)
# empty cuda cache
if self.use_cuda:
torch.cuda.empty_cache()
y_pred = np.concatenate(y_pred)
if self.vote_opt=='mean':
y_pred = self.vote(y_pred)
elif self.vote_opt=='vote':
y_pred = self.vote(y_pred, threshold)
return y_pred[:, pos_label] if binary else y_pred
def predict(self, data_loader, binary=True, pos_label=1, threshold=0.5, return_proba=False):
"""
If binary classification, use threshold on positive class
If multinomial, just select the max probability as the predicted class
:param data_loader:
:param binary:
:param pos_label:
:param threshold:
:return:
"""
proba = self.predict_proba(data_loader, binary, pos_label, threshold)
if binary:
pred = np.array([1 if p > threshold else 0 for p in proba])
else:
pred = np.argmax(proba, 1)
if return_proba:
return (proba, pred)
else:
return pred
class DenseNet121Net(MRINet):
def __init__(self, n_classes, output_size, use_cuda, **kwargs):
super(DenseNet121Net, self).__init__(frame_encoder=None, n_classes=n_classes,
output_size=output_size, use_cuda=use_cuda)
self.name = "DenseNet121Net"
self.fenc = DenseNet121()
class VGG16Net(MRINet):
def __init__(self, n_classes, use_cuda, **kwargs):
input_shape = kwargs.get("input_shape", (3, 32, 32))
layers = kwargs.get("layers", [64, 32])
dropout = kwargs.get("dropout", 0.2)
vote_opt = kwargs.get("vote_opt", "mean")
pretrained = kwargs.get("pretrained", True)
requires_grad = kwargs.get("requires_grad", False)
frm_output_size = self.get_frm_output_size(input_shape)
super(VGG16Net, self).__init__(frame_encoder=None, n_classes=n_classes,
output_size=frm_output_size,
layers=layers, dropout=dropout,
vote_opt=vote_opt, use_cuda=use_cuda)
self.name = "VGG16Net"
self.fenc = vgg16_bn(pretrained=pretrained, requires_grad=requires_grad)
def get_frm_output_size(self, input_shape):
feature_output = int(min(input_shape[-1], input_shape[-2])/32)
feature_output = 1 if feature_output == 0 else feature_output
frm_output_size = pow(feature_output, 2) * 512
return frm_output_size
class LeNet(MRINet):
def __init__(self, n_classes, n_channels, output_size, use_cuda, **kwargs):
super(LeNet, self).__init__(frame_encoder=None, n_classes=n_classes,
output_size=output_size, use_cuda=use_cuda)
self.name = "LeNet"
self.fenc = LeNetFrameEncoder(n_channels=n_channels, output_size=output_size)
################################################################################
# Sequence Container Models
################################################################################
class MRISequenceNet(nn.Module):
"""
Simple container network for MRI sequence classification. This module consists of:
1) A frame encoder, e.g., a ConvNet/CNN
2) A sequence encoder for merging frame representations, e.g., an RNN
"""
def __init__(self, frame_encoder, seq_encoder, use_cuda=False):
super(MRISequenceNet, self).__init__()
self.fenc = frame_encoder
self.senc = seq_encoder
self.use_cuda = use_cuda
def init_hidden(self, batch_size):
return self.senc.init_hidden(batch_size)
def embedding(self, x, hidden):
"""Get learned representation of MRI sequence"""
if self.use_cuda and not x.is_cuda:
x = x.cuda()
batch_size, num_frames, num_channels, width, height = x.size()
x = x.view(-1, num_channels, width, height)
x = self.fenc(x)
x = x.view(batch_size, num_frames, -1)
x = self.senc.embedding(x, hidden)
if self.use_cuda:
return x.cpu()
else:
return x
def forward(self, x, hidden=None):
if self.use_cuda and not x.is_cuda:
x = x.cuda()
# collapse all frames into new batch = batch_size * num_frames
batch_size, num_frames, num_channels, width, height = x.size()
x = x.view(-1, num_channels, width, height)
# encode frames
x = self.fenc(x)
x = x.view(batch_size, num_frames, -1)
# encode sequence
x = self.senc(x, hidden)
return x
def predict_proba(self, data_loader, binary=True, pos_label=1):
""" Forward inference """
y_pred = []
for i, data in enumerate(data_loader):
x, y = data
x = Variable(x) if not self.use_cuda else Variable(x).cuda()
y = Variable(y) if not self.use_cuda else Variable(y).cuda()
h0 = self.init_hidden(x.size(0))
outputs = self(x, h0)
y_hat = F.softmax(outputs, dim=1)
y_hat = y_hat.data.numpy() if not self.use_cuda else y_hat.cpu().data.numpy()
y_pred.append(y_hat)
# empty cuda cache
if self.use_cuda:
torch.cuda.empty_cache()
y_pred = np.concatenate(y_pred)
return y_pred[:, pos_label] if binary else y_pred
def predict(self, data_loader, binary=True, pos_label=1, threshold=0.5, return_proba=False, topSelection=None):
"""
If binary classification, use threshold on positive class
If multinomial, just select the max probability as the predicted class
:param data_loader:
:param binary:
:param pos_label:
:param threshold:
:return:
"""
proba = self.predict_proba(data_loader, binary, pos_label)
if topSelection is not None and topSelection < proba.shape[0]:
threshold = proba[np.argsort(proba)[-topSelection-1]]
if binary:
pred = np.array([1 if p > threshold else 0 for p in proba])
else:
pred = np.argmax(proba, 1)
if return_proba:
return (proba, pred)
else:
return pred
################################################################################
# FNN Models
################################################################################
class FNNFrameSum(MRISequenceNet):
def __init__(self, n_classes, use_cuda, **kwargs):
super(FNNFrameSum, self).__init__(frame_encoder=None, seq_encoder=None, use_cuda=use_cuda)
self.name = "FNNFrameSum"
self.n_classes = n_classes
frm_layers = kwargs.get("frm_layers", [64, 32])
input_shape = kwargs.get("input_shape", (1, 32, 32))
frm_input_size = input_shape[0]*input_shape[1]*input_shape[2]
self.fenc = FNNFrameEncoder(input_size=frm_input_size, layers=list(frm_layers))
self.senc = SeqSumPoolingEncoder(n_classes=n_classes, input_size=frm_layers[-1])
class FNNFrameRNN(MRISequenceNet):
def __init__(self, n_classes, use_cuda, **kwargs):
super(FNNFrameRNN, self).__init__(frame_encoder=None, seq_encoder=None, use_cuda=use_cuda)
self.name = "FNNFrameRNN"
self.n_classes = n_classes
frm_layers = kwargs.get("frm_layers", [64, 32])
input_shape = kwargs.get("input_shape", (1, 32, 32))
frm_input_size = input_shape[0]*input_shape[1]*input_shape[2]
frm_output_size = frm_layers[-1]
seq_output_size = kwargs.get("seq_output_size", 128)
seq_dropout = kwargs.get("seq_dropout", 0.1)
seq_attention = kwargs.get("seq_attention", True)
seq_bidirectional = kwargs.get("seq_bidirectional", True)
seq_max_seq_len = kwargs.get("seq_max_seq_len", 30)
seq_rnn_type = kwargs.get("rnn_type", "LSTM")
self.fenc = FNNFrameEncoder(input_size=frm_input_size, layers=frm_layers)
self.senc = RNN(n_classes=2, input_size=frm_output_size, hidden_size=seq_output_size,
dropout=seq_dropout, max_seq_len=seq_max_seq_len, attention=seq_attention,
rnn_type=seq_rnn_type, bidirectional=seq_bidirectional, use_cuda=self.use_cuda)
################################################################################
# LeNet Models
################################################################################
class LeNetFrameSum(MRISequenceNet):
def __init__(self, n_classes, use_cuda, **kwargs):
super(LeNetFrameSum, self).__init__(frame_encoder=None, seq_encoder=None, use_cuda=use_cuda)
self.name = "LeNetFrameSum"
self.n_classes = n_classes
frm_output_size = kwargs.get("frm_output_size", 84)
input_shape = kwargs.get("input_shape", (1, 32, 32))
self.fenc = LeNetFrameEncoder(input_shape=input_shape, output_size=frm_output_size)
self.senc = SeqSumPoolingEncoder(n_classes=n_classes, input_size=frm_output_size)
class LeNetFrameRNN(MRISequenceNet):
def __init__(self, n_classes, use_cuda, **kwargs):
super(LeNetFrameRNN, self).__init__(frame_encoder=None, seq_encoder=None, use_cuda=use_cuda)
self.name = "LeNetFrameRNN"
self.n_classes = n_classes
frm_output_size = kwargs.get("frm_output_size", 84)
input_shape = kwargs.get("input_shape", (1, 32, 32))
seq_output_size = kwargs.get("seq_output_size", 128)
seq_dropout = kwargs.get("seq_dropout", 0.1)
seq_attention = kwargs.get("seq_attention", True)
seq_bidirectional = kwargs.get("seq_bidirectional", True)
seq_max_seq_len = kwargs.get("seq_max_seq_len", 15)
seq_rnn_type = kwargs.get("rnn_type", "LSTM")
self.fenc = LeNetFrameEncoder(input_shape=input_shape, output_size=frm_output_size)
self.senc = RNN(n_classes=2, input_size=frm_output_size, hidden_size=seq_output_size,
dropout=seq_dropout, max_seq_len=seq_max_seq_len, attention=seq_attention,
rnn_type=seq_rnn_type, bidirectional=seq_bidirectional, use_cuda=self.use_cuda)
################################################################################
# DenseNet 3-channel Models
################################################################################
class DenseNet121FrameSum(MRISequenceNet):
def __init__(self, n_classes, use_cuda, **kwargs):
super(DenseNet121FrameSum, self).__init__(frame_encoder=None, seq_encoder=None, use_cuda=use_cuda)
self.name = "DenseNet121FrameSum"
self.n_classes = n_classes
input_shape = kwargs.get("input_shape", (3, 32, 32))
pretrained = kwargs.get("pretrained", True)
requires_grad = kwargs.get("requires_grad", False)
frm_output_size = pow(int(input_shape[-1]/32), 2) * 1024
#self.fenc = DenseNet121()
self.fenc = densenet121(pretrained=pretrained, requires_grad=requires_grad)
self.senc = SeqSumPoolingEncoder(n_classes=n_classes, input_size=frm_output_size)
class DenseNet121FrameRNN(MRISequenceNet):
def __init__(self, n_classes, use_cuda, **kwargs):
super(DenseNet121FrameRNN, self).__init__(frame_encoder=None, seq_encoder=None, use_cuda=use_cuda)
self.name = "DenseNet121FrameRNN"
self.n_classes = n_classes
input_shape = kwargs.get("input_shape", (3, 32, 32))
frm_output_size = pow(int(input_shape[-1]/32), 2) * 1024
seq_output_size = kwargs.get("seq_output_size", 128)
seq_dropout = kwargs.get("seq_dropout", 0.1)
seq_attention = kwargs.get("seq_attention", True)
seq_bidirectional = kwargs.get("seq_bidirectional", True)
seq_max_seq_len = kwargs.get("seq_max_seq_len", 15)
seq_rnn_type = kwargs.get("rnn_type", "LSTM")
pretrained = kwargs.get("pretrained", True)
requires_grad = kwargs.get("requires_grad", False)
#self.fenc = DenseNet121()
self.fenc = densenet121(pretrained=pretrained, requires_grad=requires_grad)
self.senc = RNN(n_classes=2, input_size=frm_output_size, hidden_size=seq_output_size,
dropout=seq_dropout, max_seq_len=seq_max_seq_len, attention=seq_attention,
rnn_type=seq_rnn_type, bidirectional=seq_bidirectional, use_cuda=self.use_cuda)
################################################################################
# VGG 3-channel Models
################################################################################
class VGG16FrameSum(MRISequenceNet):
def __init__(self, n_classes, use_cuda, **kwargs):
super(VGG16FrameSum, self).__init__(frame_encoder=None, seq_encoder=None, use_cuda=use_cuda)
self.name = "VGG16FrameSum"
self.n_classes = n_classes
input_shape = kwargs.get("input_shape", (3, 32, 32))
pretrained = kwargs.get("pretrained", True)
requires_grad = kwargs.get("requires_grad", False)
self.fenc = vgg16_bn(pretrained=pretrained, requires_grad=requires_grad)
frm_output_size = self.get_frm_output_size(input_shape)
self.senc = SeqSumPoolingEncoder(n_classes=n_classes, input_size=frm_output_size)
def get_frm_output_size(self, input_shape):
feature_output = int(min(input_shape[-1], input_shape[-2])/32)
feature_output = 1 if feature_output == 0 else feature_output
frm_output_size = pow(feature_output, 2) * 512
return frm_output_size
class VGG16FrameRNN(MRISequenceNet):
def __init__(self, n_classes, use_cuda, **kwargs):
super(VGG16FrameRNN, self).__init__(frame_encoder=None, seq_encoder=None, use_cuda=use_cuda)
self.name = "VGG16FrameRNN"
self.n_classes = n_classes
input_shape = kwargs.get("input_shape", (3, 32, 32))
pretrained = kwargs.get("pretrained", True)
requires_grad = kwargs.get("requires_grad", False)
self.fenc = vgg16_bn(pretrained=pretrained, requires_grad=requires_grad)
frm_output_size = self.get_frm_output_size(input_shape)
#print(kwargs)
#print("seq_bidirectional" in kwargs)
seq_output_size = kwargs.get("seq_output_size", 128)
seq_dropout = kwargs.get("seq_dropout", 0.1)
seq_attention = kwargs.get("seq_attention", True)
seq_bidirectional = kwargs.get("seq_bidirectional", True)
seq_max_seq_len = kwargs.get("seq_max_seq_len", 15)
seq_rnn_type = kwargs.get("seq_rnn_type", "LSTM")
self.senc = RNN(n_classes=n_classes, input_size=frm_output_size, hidden_size=seq_output_size,
dropout=seq_dropout, max_seq_len=seq_max_seq_len, attention=seq_attention,
rnn_type=seq_rnn_type, bidirectional=seq_bidirectional, use_cuda=self.use_cuda)
def get_frm_output_size(self, input_shape):
input_shape = list(input_shape)
input_shape.insert(0,1)
dummy_batch_size = tuple(input_shape)
x = torch.autograd.Variable(torch.zeros(dummy_batch_size))
frm_output_size = self.fenc.forward(x).size()[1]
return frm_output_size
class Dense4012FrameRNN(MRISequenceNet):
def __init__(self, n_classes, use_cuda, **kwargs):
super(Dense4012FrameRNN, self).__init__(frame_encoder=None, seq_encoder=None, use_cuda=use_cuda)
self.name = "Dense4012FrameRNN"
input_shape = kwargs.get("input_shape", (3, 32, 32))
seq_output_size = kwargs.get("seq_output_size", 128)
seq_dropout = kwargs.get("seq_dropout", 0.1)
seq_attention = kwargs.get("seq_attention", True)
seq_bidirectional = kwargs.get("seq_bidirectional", True)
seq_max_seq_len = kwargs.get("seq_max_seq_len", 15)
seq_rnn_type = kwargs.get("rnn_type", "LSTM")
pretrained = kwargs.get("pretrained", True)
requires_grad = kwargs.get("requires_grad", False)
logger.info("============================")
logger.info("Dense4012FrameRNN parameters")
logger.info("============================")
logger.info("seq_output_size: {}".format(seq_output_size))
logger.info("seq_dropout: {}".format(seq_dropout))
logger.info("seq_attention: {}".format(seq_attention))
logger.info("seq_bidirectional: {}".format(seq_bidirectional))
logger.info("seq_max_seq_len: {}".format(seq_max_seq_len))
logger.info("seq_rnn_type: {}".format(seq_rnn_type))
logger.info("pretrained: {}".format(pretrained))
logger.info("requires_grad: {}\n".format(requires_grad))
self.fenc = densenet_40_12_bc(pretrained=pretrained, requires_grad=requires_grad)
frm_output_size = self.get_frm_output_size(input_shape)
self.senc = RNN(n_classes=2, input_size=frm_output_size, hidden_size=seq_output_size,
dropout=seq_dropout, max_seq_len=seq_max_seq_len, attention=seq_attention,
rnn_type=seq_rnn_type, bidirectional=seq_bidirectional, use_cuda=self.use_cuda)
def get_frm_output_size(self, input_shape):
input_shape = list(input_shape)
input_shape.insert(0,1)
dummy_batch_size = tuple(input_shape)
x = torch.autograd.Variable(torch.zeros(dummy_batch_size))
frm_output_size = self.fenc.forward(x).size()[1]
return frm_output_size
################################################################################
# Sequence Container Meta Models
################################################################################
class MRIMetaSequenceRNN(MRISequenceNet):
def __init__(self, frame_encoder, n_classes, use_cuda, **kwargs):
super(MRIMetaSequenceRNN, self).__init__(frame_encoder=None, seq_encoder=None, use_cuda=use_cuda)
self.n_classes = n_classes
input_shape = kwargs.get("input_shape", (3, 32, 32))
self.fenc = frame_encoder
frm_output_size = self.get_frm_output_size(input_shape)
#print(kwargs)
#print("seq_bidirectional" in kwargs)
seq_output_size = kwargs.get("seq_output_size", 128)
seq_dropout = kwargs.get("seq_dropout", 0.1)
seq_attention = kwargs.get("seq_attention", True)
seq_bidirectional = kwargs.get("seq_bidirectional", True)
seq_max_seq_len = kwargs.get("seq_max_seq_len", 15)
seq_rnn_type = kwargs.get("seq_rnn_type", "LSTM")
self.senc = MetaRNN(n_classes=n_classes, input_size=frm_output_size, hidden_size=seq_output_size,
dropout=seq_dropout, max_seq_len=seq_max_seq_len, attention=seq_attention,
rnn_type=seq_rnn_type, bidirectional=seq_bidirectional, use_cuda=self.use_cuda)
meta_input_shape = kwargs.get("meta_input_shape", 3)
self.classifier = self.get_classifier(seq_output_size, n_classes, seq_bidirectional, meta_input_shape)
def get_frm_output_size(self, input_shape):
input_shape = list(input_shape)
input_shape.insert(0,1)
dummy_batch_size = tuple(input_shape)
x = torch.autograd.Variable(torch.zeros(dummy_batch_size))
frm_output_size = self.fenc.forward(x).size()[1]
return frm_output_size
def get_classifier(self, seq_output_size, n_classes, seq_bidirectional,
meta_input_shape):
b = 2 if seq_bidirectional else 1
meta_input_shape = np.prod([meta_input_shape])
classifier = nn.Linear(int(b * seq_output_size + meta_input_shape), int(n_classes))
return classifier
def embedding(self, x, hidden):
"""Get learned representation of MRI sequence"""
x, meta = x
return super(MRIMetaSequenceRNN, self).embedding(x, hidden)
def forward(self, x, hidden=None):
x, meta = x
if self.use_cuda and not meta.is_cuda:
meta = meta.cuda()
if self.use_cuda and not x.is_cuda:
x = x.cuda()
x = super(MRIMetaSequenceRNN, self).forward(x, hidden)
concats = torch.cat((x.view(x.size(0), -1).float(),
meta.view(meta.size(0), -1).float()), 1)
outputs = self.classifier(concats)
return outputs
def predict_proba(self, data_loader, binary=True, pos_label=1):
""" Forward inference """
y_pred = []
for i, data in enumerate(data_loader):
x, y = data
x = [Variable(x_) if not self.use_cuda else Variable(x_).cuda() for x_ in x]
y = Variable(y) if not self.use_cuda else Variable(y).cuda()
h0 = self.init_hidden(x[0].size(0))
outputs = self(x, h0)
y_hat = F.softmax(outputs, dim=1)
y_hat = y_hat.data.numpy() if not self.use_cuda else y_hat.cpu().data.numpy()
y_pred.append(y_hat)
# empty cuda cache
if self.use_cuda:
torch.cuda.empty_cache()
y_pred = np.concatenate(y_pred)
return y_pred[:, pos_label] if binary else y_pred
class MetaVGG16FrameRNN(MRIMetaSequenceRNN):
def __init__(self, n_classes, use_cuda, **kwargs):
self.name = "MetaVGG16FrameRNN"
pretrained = kwargs.get("pretrained", True)
requires_grad = kwargs.get("requires_grad", False)
frame_encoder = vgg16_bn(pretrained=pretrained, requires_grad=requires_grad)
super(MetaVGG16FrameRNN, self).__init__(frame_encoder=frame_encoder,
n_classes=n_classes,
use_cuda=use_cuda,
**kwargs)
class MetaDense4012FrameRNN(MRIMetaSequenceRNN):
def __init__(self, n_classes, use_cuda, **kwargs):
self.name = "MetaDense4012FrameRNN"
pretrained = kwargs.get("pretrained", True)
requires_grad = kwargs.get("requires_grad", False)
frame_encoder = densenet_40_12_bc(pretrained=pretrained, requires_grad=requires_grad)
super(MetaDense4012FrameRNN, self).__init__(frame_encoder=frame_encoder,
n_classes=n_classes,
use_cuda=use_cuda,
**kwargs)
| 43.904132
| 117
| 0.600482
| 3,258
| 26,562
| 4.589012
| 0.066605
| 0.03886
| 0.038258
| 0.022473
| 0.778878
| 0.762424
| 0.741489
| 0.732392
| 0.726841
| 0.716407
| 0
| 0.018158
| 0.257737
| 26,562
| 604
| 118
| 43.976821
| 0.74016
| 0.057187
| 0
| 0.653938
| 0
| 0
| 0.057522
| 0.003252
| 0
| 0
| 0
| 0
| 0
| 1
| 0.090692
| false
| 0
| 0.019093
| 0.004773
| 0.210024
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
42096e0a57b4ed474018bc6b30cac583021bbb6d
| 32
|
py
|
Python
|
gh_issue_agent/__main__.py
|
nzt4567/gh_issue_agent
|
95fa5124d0de48e436c0a563eeb506e548e44fab
|
[
"CC0-1.0"
] | null | null | null |
gh_issue_agent/__main__.py
|
nzt4567/gh_issue_agent
|
95fa5124d0de48e436c0a563eeb506e548e44fab
|
[
"CC0-1.0"
] | null | null | null |
gh_issue_agent/__main__.py
|
nzt4567/gh_issue_agent
|
95fa5124d0de48e436c0a563eeb506e548e44fab
|
[
"CC0-1.0"
] | null | null | null |
from .agent import main
main()
| 8
| 23
| 0.71875
| 5
| 32
| 4.6
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1875
| 32
| 3
| 24
| 10.666667
| 0.884615
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
42396570e34c39e56410c3ce86be9550307a21c4
| 9,466
|
py
|
Python
|
eminus/minimizer.py
|
wangenau/eminus
|
57b6876093e52a14fc044cac94d1963b94b4ce8a
|
[
"Apache-2.0"
] | null | null | null |
eminus/minimizer.py
|
wangenau/eminus
|
57b6876093e52a14fc044cac94d1963b94b4ce8a
|
[
"Apache-2.0"
] | null | null | null |
eminus/minimizer.py
|
wangenau/eminus
|
57b6876093e52a14fc044cac94d1963b94b4ce8a
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
'''Minimization algorithms.'''
import logging
import numpy as np
from .dft import get_grad, get_n_total, orth, solve_poisson
from .energies import get_E
from .logger import name
from .utils import dotprod
from .xc import get_xc
def scf_step(scf):
'''Perform one SCF step for a DFT calculation.
Calculating intermediate results speeds up the energy and gradient calculation.
Args:
scf: SCF object.
Returns:
float: Total energy.
'''
scf.Y = orth(scf.atoms, scf.W)
scf.n = get_n_total(scf.atoms, scf.Y)
scf.phi = solve_poisson(scf.atoms, scf.n)
scf.exc, scf.vxc = get_xc(scf.xc, scf.n)
return get_E(scf)
def check_energies(scf, Elist, linmin='', cg=''):
'''Check the energies for every SCF cycle and handle the output.
Args:
scf: SCF object.
Elist (list): Total energies per SCF step.
Keyword Args:
linmin (float): Cosine between previous search direction and current gradient.
cg (float): Conjugate-gradient orthogonality.
Returns:
bool: Convergence condition.
'''
iteration = len(Elist)
# Output handling
if not isinstance(linmin, str):
linmin = f' \tlinmin-test: {linmin:+.7f}'
if not isinstance(cg, str):
cg = f' \tcg-test: {cg:+.7f}'
if scf.log.level <= logging.DEBUG:
scf.log.debug(f'Iteration: {iteration} \tEtot: {scf.energies.Etot:+.7f}{linmin}{cg}')
else:
scf.log.info(f'Iteration: {iteration} \tEtot: {scf.energies.Etot:+.7f}')
# Check for convergence
if iteration > 1:
if abs(Elist[-2] - Elist[-1]) < scf.etol:
return True
elif Elist[-1] > Elist[-2]:
scf.log.warning('Total energy is not decreasing.')
return False
@name('steepest descent minimization')
def sd(scf, Nit, cost=scf_step, grad=get_grad, condition=check_energies, betat=3e-5):
'''Steepest descent minimization algorithm.
Args:
scf: SCF object.
Nit (int): Maximum number of SCF steps.
Keyword Args:
cost (Callable): Function that will run every SCF step.
grad (Callable): Function that calculates the respective gradient.
condition (Callable): Function to check and log the convergence condition.
betat (float): SCF step size.
Returns:
list: Total energies per SCF cycle.
'''
costs = []
for _ in range(Nit):
c = cost(scf)
costs.append(c)
if condition(scf, costs):
break
g = grad(scf, scf.W, scf.Y, scf.n, scf.phi, scf.vxc)
scf.W = scf.W - betat * g
return costs
@name('line minimization')
def lm(scf, Nit, cost=scf_step, grad=get_grad, condition=check_energies, betat=3e-5):
'''Line minimization algorithm.
Args:
scf: SCF object.
Nit (int): Maximum number of SCF steps.
Keyword Args:
cost (Callable): Function that will run every SCF step.
grad (Callable): Function that calculates the respective gradient.
condition (Callable): Function to check and log the convergence condition.
betat (float): SCF step size.
Returns:
list: Total energies per SCF cycle.
'''
costs = []
linmin = ''
# Do the first step without the linmin test
g = grad(scf, scf.W)
d = -g
gt = grad(scf, scf.W + betat * d)
beta = betat * dotprod(g, d) / dotprod(g - gt, d)
scf.W = scf.W + beta * d
c = cost(scf)
costs.append(c)
condition(scf, costs)
for _ in range(1, Nit):
g = grad(scf, scf.W, scf.Y, scf.n, scf.phi, scf.vxc)
if scf.log.level <= logging.DEBUG:
linmin = dotprod(g, d) / np.sqrt(dotprod(g, g) * dotprod(d, d))
d = -g
gt = grad(scf, scf.W + betat * d)
beta = betat * dotprod(g, d) / dotprod(g - gt, d)
scf.W = scf.W + beta * d
c = cost(scf)
costs.append(c)
if condition(scf, costs, linmin):
break
return costs
@name('preconditioned line minimization')
def pclm(scf, Nit, cost=scf_step, grad=get_grad, condition=check_energies, betat=3e-5):
'''Preconditioned line minimization algorithm.
Args:
scf: SCF object.
Nit (int): Maximum number of SCF steps.
Keyword Args:
cost (Callable): Function that will run every SCF step.
grad (Callable): Function that calculates the respective gradient.
condition (Callable): Function to check and log the convergence condition.
betat (float): SCF step size.
Returns:
list: Total energies per SCF cycle.
'''
atoms = scf.atoms
costs = []
linmin = ''
# Do the first step without the linmin test
g = grad(scf, scf.W)
d = -atoms.K(g)
gt = grad(scf, scf.W + betat * d)
beta = betat * dotprod(g, d) / dotprod(g - gt, d)
scf.W = scf.W + beta * d
c = cost(scf)
costs.append(c)
condition(scf, costs)
for _ in range(1, Nit):
g = grad(scf, scf.W, scf.Y, scf.n, scf.phi, scf.vxc)
if scf.log.level <= logging.DEBUG:
linmin = dotprod(g, d) / np.sqrt(dotprod(g, g) * dotprod(d, d))
d = -atoms.K(g)
gt = grad(scf, scf.W + betat * d)
beta = betat * dotprod(g, d) / dotprod(g - gt, d)
scf.W = scf.W + beta * d
c = cost(scf)
costs.append(c)
if condition(scf, costs, linmin):
break
return costs
@name('conjugate-gradient minimization')
def cg(scf, Nit, cost=scf_step, grad=get_grad, condition=check_energies, betat=3e-5):
'''Conjugate-gradient minimization algorithm.
Args:
scf: SCF object.
Nit (int): Maximum number of SCF steps.
Keyword Args:
cost (Callable): Function that will run every SCF step.
grad (Callable): Function that calculates the respective gradient.
condition (Callable): Function to check and log the convergence condition.
betat (float): SCF step size.
Returns:
list: Total energies per SCF cycle.
'''
costs = []
linmin = ''
cg = ''
# Do the first step without the linmin and cg test
g = grad(scf, scf.W)
d = -g
gt = grad(scf, scf.W + betat * d)
beta = betat * dotprod(g, d) / dotprod(g - gt, d)
d_old = d
g_old = g
scf.W = scf.W + beta * d
c = cost(scf)
costs.append(c)
condition(scf, costs)
for _ in range(1, Nit):
g = grad(scf, scf.W, scf.Y, scf.n, scf.phi, scf.vxc)
if scf.log.level <= logging.DEBUG:
linmin = dotprod(g, d_old) / np.sqrt(dotprod(g, g) * dotprod(d_old, d_old))
cg = dotprod(g, g_old) / np.sqrt(dotprod(g, g) *
dotprod(g_old, g_old))
if scf.cgform == 1: # Fletcher-Reeves
beta = dotprod(g, g) / dotprod(g_old, g_old)
elif scf.cgform == 2: # Polak-Ribiere
beta = dotprod(g - g_old, g) / dotprod(g_old, g_old)
elif scf.cgform == 3: # Hestenes-Stiefel
beta = dotprod(g - g_old, g) / dotprod(g - g_old, d_old)
d = -g + beta * d_old
gt = grad(scf, scf.W + betat * d)
beta = betat * dotprod(g, d) / dotprod(g - gt, d)
d_old = d
g_old = g
scf.W = scf.W + beta * d
c = cost(scf)
costs.append(c)
if condition(scf, costs, linmin, cg):
break
return costs
@name('preconditioned conjugate-gradient minimization')
def pccg(scf, Nit, cost=scf_step, grad=get_grad, condition=check_energies, betat=3e-5):
'''Preconditioned conjugate-gradient minimization algorithm.
Args:
scf: SCF object.
Nit (int): Maximum number of SCF steps.
Keyword Args:
cost (Callable): Function that will run every SCF step.
grad (Callable): Function that calculates the respective gradient.
condition (Callable): Function to check and log the convergence condition.
betat (float): SCF step size.
Returns:
list: Total energies per SCF cycle.
'''
atoms = scf.atoms
costs = []
linmin = ''
cg = ''
# Do the first step without the linmin and cg test
g = grad(scf, scf.W)
d = -atoms.K(g)
gt = grad(scf, scf.W + betat * d)
beta = betat * dotprod(g, d) / dotprod(g - gt, d)
d_old = d
g_old = g
scf.W = scf.W + beta * d
c = cost(scf)
costs.append(c)
condition(scf, costs)
for _ in range(1, Nit):
g = grad(scf, scf.W, scf.Y, scf.n, scf.phi, scf.vxc)
if scf.log.level <= logging.DEBUG:
linmin = dotprod(g, d_old) / np.sqrt(dotprod(g, g) * dotprod(d_old, d_old))
cg = dotprod(g, atoms.K(g_old)) / np.sqrt(dotprod(g, atoms.K(g)) *
dotprod(g_old, atoms.K(g_old)))
if scf.cgform == 1: # Fletcher-Reeves
beta = dotprod(g, atoms.K(g)) / dotprod(g_old, atoms.K(g_old))
elif scf.cgform == 2: # Polak-Ribiere
beta = dotprod(g - g_old, atoms.K(g)) / dotprod(g_old, atoms.K(g_old))
elif scf.cgform == 3: # Hestenes-Stiefel
beta = dotprod(g - g_old, atoms.K(g)) / dotprod(g - g_old, d_old)
d = -atoms.K(g) + beta * d_old
gt = grad(scf, scf.W + betat * d)
beta = betat * dotprod(g, d) / dotprod(g - gt, d)
d_old = d
g_old = g
scf.W = scf.W + beta * d
c = cost(scf)
costs.append(c)
if condition(scf, costs, linmin, cg):
break
return costs
| 30.535484
| 94
| 0.584302
| 1,357
| 9,466
| 4.023581
| 0.116433
| 0.061538
| 0.031136
| 0.034249
| 0.778388
| 0.767216
| 0.760623
| 0.758974
| 0.735348
| 0.730952
| 0
| 0.004478
| 0.292204
| 9,466
| 309
| 95
| 30.634304
| 0.810448
| 0.316501
| 0
| 0.684524
| 0
| 0.005952
| 0.058728
| 0.009788
| 0
| 0
| 0
| 0
| 0
| 1
| 0.041667
| false
| 0
| 0.041667
| 0
| 0.130952
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
42455d30b3b331a8252133d9e8a013ffef2bd620
| 89
|
py
|
Python
|
server/inbox/admin.py
|
C404TEAMW21/CMPUT404_PROJECT
|
0b5e7e5e37ff265e843056457bd00ab77f4373f7
|
[
"W3C-20150513"
] | null | null | null |
server/inbox/admin.py
|
C404TEAMW21/CMPUT404_PROJECT
|
0b5e7e5e37ff265e843056457bd00ab77f4373f7
|
[
"W3C-20150513"
] | 81
|
2021-02-14T02:35:52.000Z
|
2021-04-10T21:14:27.000Z
|
server/inbox/admin.py
|
C404TEAMW21/CMPUT404_PROJECT
|
0b5e7e5e37ff265e843056457bd00ab77f4373f7
|
[
"W3C-20150513"
] | 4
|
2021-02-14T19:44:23.000Z
|
2021-04-06T22:35:35.000Z
|
from django.contrib import admin
from .models import Inbox
admin.site.register(Inbox)
| 12.714286
| 32
| 0.797753
| 13
| 89
| 5.461538
| 0.692308
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.134831
| 89
| 6
| 33
| 14.833333
| 0.922078
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
4262089a6d49318b13c492ba538c77e988badec5
| 192
|
py
|
Python
|
src/Modules/Trinity.FFI/Trinity.FFI.Python/GraphEngine/Storage/core/__init__.py
|
qdoop/GraphEngine
|
d83381c781edc4040824c1e31057789939530eff
|
[
"MIT"
] | null | null | null |
src/Modules/Trinity.FFI/Trinity.FFI.Python/GraphEngine/Storage/core/__init__.py
|
qdoop/GraphEngine
|
d83381c781edc4040824c1e31057789939530eff
|
[
"MIT"
] | null | null | null |
src/Modules/Trinity.FFI/Trinity.FFI.Python/GraphEngine/Storage/core/__init__.py
|
qdoop/GraphEngine
|
d83381c781edc4040824c1e31057789939530eff
|
[
"MIT"
] | null | null | null |
#
"""
TODO:
TSL generation and sending msg to Graph Machine to compile TSL.
"""
from .SymTable import SymTable
from .Serialize import mark_as_serializable, Serializer, TSLJSONEncoder
| 17.454545
| 71
| 0.755208
| 24
| 192
| 5.958333
| 0.791667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.177083
| 192
| 10
| 72
| 19.2
| 0.905063
| 0.385417
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
426250dc8ba6d257b648203ca6ec5d9e6cdaa4a2
| 190
|
py
|
Python
|
page.py
|
berkeleybop/behave_core
|
aa8354fcf0deeb68f6add8749d80d0a2a1bef16d
|
[
"BSD-3-Clause"
] | null | null | null |
page.py
|
berkeleybop/behave_core
|
aa8354fcf0deeb68f6add8749d80d0a2a1bef16d
|
[
"BSD-3-Clause"
] | null | null | null |
page.py
|
berkeleybop/behave_core
|
aa8354fcf0deeb68f6add8749d80d0a2a1bef16d
|
[
"BSD-3-Clause"
] | null | null | null |
####
#### Helper functions to support page retrieval.
#### These are mostly going to be Selenium.
####
# from behave import *
###
### TODO: Not much here yet...
###
def hello():
pass
| 13.571429
| 48
| 0.6
| 24
| 190
| 4.75
| 0.958333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.210526
| 190
| 13
| 49
| 14.615385
| 0.76
| 0.684211
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.076923
| 0
| 1
| 0.5
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
c407ce23bca269d1860b535ecc5bb54824402419
| 33
|
py
|
Python
|
sensor/test.py
|
totomz/homelab
|
fa578cf7d7dbc8c4941d4944aa0fa8ff108156b7
|
[
"MIT"
] | null | null | null |
sensor/test.py
|
totomz/homelab
|
fa578cf7d7dbc8c4941d4944aa0fa8ff108156b7
|
[
"MIT"
] | null | null | null |
sensor/test.py
|
totomz/homelab
|
fa578cf7d7dbc8c4941d4944aa0fa8ff108156b7
|
[
"MIT"
] | null | null | null |
for j in (1, 2, 3):
print(j)
| 11
| 19
| 0.454545
| 8
| 33
| 1.875
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.136364
| 0.333333
| 33
| 2
| 20
| 16.5
| 0.545455
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.5
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
c45ded9adf64a1a0c81c0a29b0c763e37fd504bd
| 122
|
py
|
Python
|
la.py
|
wichit2s/cs-ubu-1144311-assignment-02
|
c414776ba86420efa9165772e7083c8ee0396708
|
[
"MIT"
] | null | null | null |
la.py
|
wichit2s/cs-ubu-1144311-assignment-02
|
c414776ba86420efa9165772e7083c8ee0396708
|
[
"MIT"
] | null | null | null |
la.py
|
wichit2s/cs-ubu-1144311-assignment-02
|
c414776ba86420efa9165772e7083c8ee0396708
|
[
"MIT"
] | null | null | null |
from mat import *
A = readm('A.csv')
b = readm('b.csv')
def solve(A, b):
# YOUR CODE HERE
# x = []
return x
| 12.2
| 20
| 0.516393
| 21
| 122
| 3
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.303279
| 122
| 9
| 21
| 13.555556
| 0.741176
| 0.172131
| 0
| 0
| 0
| 0
| 0.102041
| 0
| 0
| 0
| 0
| 0.111111
| 0
| 1
| 0.2
| false
| 0
| 0.2
| 0.2
| 0.6
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
c472b85c2dedb25a6323da0e853399092a4f2907
| 124
|
py
|
Python
|
features/steps/use_steplib_behave4cli.py
|
arichtman/pyadr
|
92a1a3495c92f50aca8966a5187fc96bff7ad3d1
|
[
"MIT"
] | 5
|
2020-07-08T08:19:18.000Z
|
2022-02-26T23:29:12.000Z
|
features/steps/use_steplib_behave4cli.py
|
arichtman/pyadr
|
92a1a3495c92f50aca8966a5187fc96bff7ad3d1
|
[
"MIT"
] | 50
|
2020-04-05T22:40:41.000Z
|
2022-02-26T23:53:12.000Z
|
features/steps/use_steplib_behave4cli.py
|
arichtman/pyadr
|
92a1a3495c92f50aca8966a5187fc96bff7ad3d1
|
[
"MIT"
] | 1
|
2022-02-26T23:46:38.000Z
|
2022-02-26T23:46:38.000Z
|
# flake8: noqa
"""
Use behave4cli step library.
"""
# -- REGISTER-STEPS FROM STEP-LIBRARY:
import behave4cli.__all_steps__
| 15.5
| 38
| 0.733871
| 15
| 124
| 5.733333
| 0.733333
| 0.255814
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.028037
| 0.137097
| 124
| 7
| 39
| 17.714286
| 0.775701
| 0.637097
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
673925fd73f4e5bbb69e49c87d97eb9b10ebcf6f
| 226
|
py
|
Python
|
py_script/cal_obj.py
|
zhz03/software_development
|
ba6eaada8b00f958e8bc1c89fcad8d9562c34ea9
|
[
"Apache-2.0"
] | null | null | null |
py_script/cal_obj.py
|
zhz03/software_development
|
ba6eaada8b00f958e8bc1c89fcad8d9562c34ea9
|
[
"Apache-2.0"
] | null | null | null |
py_script/cal_obj.py
|
zhz03/software_development
|
ba6eaada8b00f958e8bc1c89fcad8d9562c34ea9
|
[
"Apache-2.0"
] | null | null | null |
"""
This calculation feature/object
for calculating some math
"""
class calculation:
def __init__(self):
pass
def addition(self,a,b):
return a + b
def subtraction(self,a,b):
return a - b
| 15.066667
| 31
| 0.610619
| 30
| 226
| 4.466667
| 0.6
| 0.059701
| 0.089552
| 0.179104
| 0.208955
| 0.208955
| 0
| 0
| 0
| 0
| 0
| 0
| 0.287611
| 226
| 14
| 32
| 16.142857
| 0.832298
| 0.252212
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.428571
| false
| 0.142857
| 0
| 0.285714
| 0.857143
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 5
|
6748277c13d59ab801f6beaeee487619c4b608d6
| 224
|
py
|
Python
|
naslib/predictors/__init__.py
|
automl/nas-bench-x11
|
56aee15f125339c4d2af1cbfad9f66fd4643c9d7
|
[
"Apache-2.0"
] | 14
|
2021-12-08T17:56:01.000Z
|
2022-01-15T05:06:59.000Z
|
naslib/predictors/__init__.py
|
shenyann/nas-bench-x11
|
ebf64ce3c30cc2ad0909508b5e25652011179956
|
[
"Apache-2.0"
] | 4
|
2022-01-10T09:04:38.000Z
|
2022-01-23T03:35:09.000Z
|
naslib/predictors/__init__.py
|
shenyann/nas-bench-x11
|
ebf64ce3c30cc2ad0909508b5e25652011179956
|
[
"Apache-2.0"
] | 1
|
2021-12-08T17:56:06.000Z
|
2021-12-08T17:56:06.000Z
|
from .predictor import Predictor
from .ensemble import Ensemble
from .feedforward import FeedforwardPredictor
from .lcsvr import SVR_Estimator
from .zerocost_estimators import ZeroCostEstimators
from .lce import LCEPredictor
| 37.333333
| 51
| 0.870536
| 26
| 224
| 7.423077
| 0.538462
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.102679
| 224
| 6
| 52
| 37.333333
| 0.960199
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
67574e4fbe38e5025078e3e24a594661a5b3fbff
| 13,996
|
py
|
Python
|
test/system/benchmark/target/manager.py
|
gye-ul/poseidonos
|
bce8fe2cd1f36ede8647446ecc4cf8a9749e6918
|
[
"BSD-3-Clause"
] | 1
|
2022-03-08T13:08:53.000Z
|
2022-03-08T13:08:53.000Z
|
test/system/benchmark/target/manager.py
|
gye-ul/poseidonos
|
bce8fe2cd1f36ede8647446ecc4cf8a9749e6918
|
[
"BSD-3-Clause"
] | null | null | null |
test/system/benchmark/target/manager.py
|
gye-ul/poseidonos
|
bce8fe2cd1f36ede8647446ecc4cf8a9749e6918
|
[
"BSD-3-Clause"
] | null | null | null |
import json
import lib
import pos
import prerequisite
import time
import os
class Target:
def __init__(self, json):
self.json = json
self.name = json["NAME"]
self.id = json["ID"]
self.pw = json["PW"]
self.nic_ssh = json["NIC"]["SSH"]
self.nic_ip1 = json["NIC"]["IP1"]
try:
self.prereq = json["PREREQUISITE"]
except Exception as e:
self.prereq = None
self.spdk_dir = json["DIR"] + "/lib/spdk"
self.spdk_tp = json["SPDK"]["TRANSPORT"]["TYPE"]
self.spdk_no_shd_buf = json["SPDK"]["TRANSPORT"]["NUM_SHARED_BUFFER"]
self.pos_dir = json["DIR"]
self.pos_bin = json["POS"]["BIN"]
self.pos_cli = json["POS"]["CLI"]
self.pos_cfg = json["POS"]["CFG"]
self.pos_log = json["POS"]["LOG"]
self.use_autogen = json["AUTO_GENERATE"]["USE"]
self.subsystem_list = []
self.array_volume_list = {}
def Prepare(self):
lib.printer.green(f" {__name__}.Prepare : {self.name}")
if (self.prereq and self.prereq["CPU"]["RUN"]):
prerequisite.cpu.Scaling(self.id, self.pw, self.nic_ssh, self.prereq["CPU"]["SCALING"])
if (self.prereq and self.prereq["SSD"]["RUN"]):
prerequisite.ssd.Format(self.id, self.pw, self.nic_ssh, self.prereq["SSD"]["FORMAT"], self.spdk_dir)
if (self.prereq and self.prereq["MEMORY"]["RUN"]):
prerequisite.memory.MaxMapCount(self.id, self.pw, self.nic_ssh, self.prereq["MEMORY"]["MAX_MAP_COUNT"])
prerequisite.memory.DropCaches(self.id, self.pw, self.nic_ssh, self.prereq["MEMORY"]["DROP_CACHES"])
if (self.prereq and self.prereq["NETWORK"]["RUN"]):
prerequisite.network.IrqBalance(self.id, self.pw, self.nic_ssh, self.prereq["NETWORK"]["IRQ_BALANCE"])
prerequisite.network.TcpTune(self.id, self.pw, self.nic_ssh, self.prereq["NETWORK"]["TCP_TUNE"])
prerequisite.network.IrqAffinity(self.id, self.pw, self.nic_ssh, self.prereq["NETWORK"]["IRQ_AFFINITYs"], self.pos_dir)
prerequisite.network.Nic(self.id, self.pw, self.nic_ssh, self.prereq["NETWORK"]["NICs"])
if (self.prereq and self.prereq["SPDK"]["RUN"]):
prerequisite.spdk.Setup(self.id, self.pw, self.nic_ssh, self.prereq["SPDK"], self.pos_dir)
if (self.prereq and self.prereq["DEBUG"]["RUN"]):
prerequisite.debug.Ulimit(self.id, self.pw, self.nic_ssh, self.prereq["DEBUG"]["ULIMIT"])
prerequisite.debug.Apport(self.id, self.pw, self.nic_ssh, self.prereq["DEBUG"]["APPORT"])
prerequisite.debug.CorePattern(self.id, self.pw, self.nic_ssh, self.prereq["DEBUG"]["DUMP_DIR"], self.prereq["DEBUG"]["CORE_PATTERN"])
result = pos.env.check_pos_running(self.id, self.pw, self.nic_ssh, self.pos_bin)
if -1 == result:
return False
elif result:
result = pos.env.kill_pos(self.id, self.pw, self.nic_ssh, self.pos_bin)
if -1 == result:
return False
time.sleep(1)
if -1 == pos.env.copy_pos_config(self.id, self.pw, self.nic_ssh, self.pos_dir, self.pos_cfg):
return False
if -1 == pos.env.execute_pos(self.id, self.pw, self.nic_ssh, self.pos_bin, self.pos_dir, self.pos_log):
return False
time.sleep(10)
# spdk setting
if -1 == pos.cli.transport_create(self.id, self.pw, self.nic_ssh, self.pos_cli, self.pos_dir, self.spdk_tp, self.spdk_no_shd_buf):
return False
# create subsystem and add listener
if "yes" == self.use_autogen:
nqn_base = 0
for subsys in self.json["AUTO_GENERATE"]["SUBSYSTEMs"]:
for i in range(subsys["NUM"]):
nqn = f"nqn.2020-10.pos\\:subsystem{i+nqn_base+1:02d}"
sn = f"POS000000000000{i+nqn_base+1:02d}"
if -1 == pos.cli.subsystem_create(self.id, self.pw, self.nic_ssh, self.pos_cli, self.pos_dir, nqn, sn):
return False
if -1 == pos.cli.subsystem_add_listener(self.id, self.pw, self.nic_ssh, self.pos_cli, self.pos_dir, nqn,
self.spdk_tp, self.json["NIC"][subsys["IP"]], subsys["PORT"]):
return False
subsystem_tmp = [subsys["INITIATOR"], nqn, sn, self.json["NIC"][subsys["IP"]], subsys["PORT"]]
self.subsystem_list.append(subsystem_tmp)
nqn_base += subsys["NUM"]
else:
for subsys in self.json["SPDK"]["SUBSYSTEMs"]:
if -1 == pos.cli.subsystem_create(self.id, self.pw, self.nic_ssh, self.pos_cli, self.pos_dir, subsys["NQN"], subsys["SN"]):
return False
if -1 == pos.cli.subsystem_add_listener(self.id, self.pw, self.nic_ssh, self.pos_cli, self.pos_dir, subsys["NQN"],
self.spdk_tp, self.json["NIC"][subsys["IP"]], subsys["PORT"]):
return False
pos.cli.telemetry_stop(self.id, self.pw, self.nic_ssh, self.pos_cli, self.pos_dir)
# pos setting
for array in self.json["POS"]["ARRAYs"]:
buf_dev = array["BUFFER_DEVICE"]
if -1 == pos.cli.bdev_malloc_create(self.id, self.pw, self.nic_ssh, self.pos_cli, self.pos_dir, buf_dev["NAME"],
buf_dev["TYPE"], buf_dev["NUM_BLOCKS"], buf_dev["BLOCK_SIZE"], buf_dev["NUMA"]):
return False
if -1 == pos.cli.device_scan(self.id, self.pw, self.nic_ssh, self.pos_cli, self.pos_dir):
return False
if -1 == pos.cli.array_reset(self.id, self.pw, self.nic_ssh, self.pos_cli, self.pos_dir):
return False
for array in self.json["POS"]["ARRAYs"]:
if -1 == pos.cli.array_create(self.id, self.pw, self.nic_ssh, self.pos_cli, self.pos_dir, array["BUFFER_DEVICE"]["NAME"], array["USER_DEVICE_LIST"],
array["SPARE_DEVICE_LIST"], array["NAME"], array["RAID_TYPE"]):
return False
if -1 == pos.cli.array_mount(self.id, self.pw, self.nic_ssh, self.pos_cli, self.pos_dir, array["NAME"]):
return False
if "yes" != self.use_autogen:
for volume in array["VOLUMEs"]:
if -1 == pos.cli.volume_create(self.id, self.pw, self.nic_ssh, self.pos_cli, self.pos_dir, volume["NAME"],
volume["SIZE"], array["NAME"]):
return False
if -1 == pos.cli.volume_mount(self.id, self.pw, self.nic_ssh, self.pos_cli, self.pos_dir, volume["NAME"],
volume["SUBNQN"], array["NAME"]):
return False
# create, mount volume(auto)
if "yes" == self.use_autogen:
nqn_base = 0
for subsys in self.json["AUTO_GENERATE"]["SUBSYSTEMs"]:
for vol in subsys["VOLUMEs"]:
volume_list = []
for i in range(vol["NUM"]):
nqn = f"nqn.2020-10.pos:subsystem{i+nqn_base+1:02d}"
volume_name = f"VOL{i+nqn_base+1}"
volume_list.append(volume_name)
if -1 == pos.cli.volume_create(self.id, self.pw, self.nic_ssh, self.pos_cli, self.pos_dir, volume_name,
vol["SIZE"], vol["ARRAY"]):
return False
if -1 == pos.cli.volume_mount(self.id, self.pw, self.nic_ssh, self.pos_cli, self.pos_dir, volume_name,
nqn, vol["ARRAY"]):
return False
nqn_base += vol["NUM"]
self.array_volume_list[vol["ARRAY"]] = volume_list
pos.cli.logger_setlevel(self.id, self.pw, self.nic_ssh, self.pos_cli, self.pos_dir, "info")
# print subsystems
subsys = pos.cli.subsystem_list(self.id, self.pw, self.nic_ssh, self.pos_cli, self.pos_dir)
print(subsys)
lib.printer.green(f" '{self.name}' prepared")
return True
def CliInLocal(self):
pos.set_cli_in_local()
def Wrapup(self):
for array in self.json["POS"]["ARRAYs"]:
if -1 == pos.cli.array_unmount(self.id, self.pw, self.nic_ssh, self.pos_cli, self.pos_dir, array["NAME"]):
return False
if -1 == pos.cli.system_stop(self.id, self.pw, self.nic_ssh, self.pos_cli, self.pos_dir):
return False
lib.printer.green(f" '{self.name}' wrapped up")
return True
def ForcedExit(self):
pos.env.kill_pos(self.id, self.pw, self.nic_ssh, self.pos_bin)
time.sleep(1)
def DirtyBringup(self):
if -1 == pos.env.copy_pos_config(self.id, self.pw, self.nic_ssh, self.pos_dir, self.pos_cfg):
return False
if -1 == pos.env.execute_pos(self.id, self.pw, self.nic_ssh, self.pos_bin, self.pos_dir, self.pos_log):
return False
time.sleep(1)
# spdk setting
if -1 == pos.cli.transport_create(self.id, self.pw, self.nic_ssh, self.pos_cli, self.pos_dir, self.spdk_tp, self.spdk_no_shd_buf):
return False
if "yes" == self.use_autogen:
nqn_base = 0
for subsys in self.json["AUTO_GENERATE"]["SUBSYSTEMs"]:
for i in range(subsys["NUM"]):
nqn = f"nqn.2020-10.pos\\:subsystem{i+nqn_base+1:02d}"
sn = f"POS000000000000{i+nqn_base+1:02d}"
if -1 == pos.cli.subsystem_create(self.id, self.pw, self.nic_ssh, self.pos_cli, self.pos_dir, nqn, sn):
return False
if -1 == pos.cli.subsystem_add_listener(self.id, self.pw, self.nic_ssh, self.pos_cli, self.pos_dir, nqn,
self.spdk_tp, self.json["NIC"][subsys["IP"]], subsys["PORT"]):
return False
subsystem_tmp = [subsys["INITIATOR"], nqn, sn, self.json["NIC"][subsys["IP"]], subsys["PORT"]]
self.subsystem_list.append(subsystem_tmp)
nqn_base += subsys["NUM"]
else:
for subsys in self.json["SPDK"]["SUBSYSTEMs"]:
if -1 == pos.cli.subsystem_create(self.id, self.pw, self.nic_ssh, self.pos_cli, self.pos_dir, subsys["NQN"], subsys["SN"]):
return False
if -1 == pos.cli.subsystem_add_listener(self.id, self.pw, self.nic_ssh, self.pos_cli, self.pos_dir, subsys["NQN"],
self.spdk_tp, self.json["NIC"][subsys["IP"]], subsys["PORT"]):
return False
# pos setting
for array in self.json["POS"]["ARRAYs"]:
buf_dev = array["BUFFER_DEVICE"]
if -1 == pos.cli.bdev_malloc_create(self.id, self.pw, self.nic_ssh, self.pos_cli, self.pos_dir, buf_dev["NAME"],
buf_dev["TYPE"], buf_dev["NUM_BLOCKS"], buf_dev["BLOCK_SIZE"], buf_dev["NUMA"]):
return False
if -1 == pos.cli.device_scan(self.id, self.pw, self.nic_ssh, self.pos_cli, self.pos_dir):
return False
# pos setting
for array in self.json["POS"]["ARRAYs"]:
if -1 == pos.cli.array_mount(self.id, self.pw, self.nic_ssh, self.pos_cli, self.pos_dir, array["NAME"]):
return False
if "yes" != self.use_autogen:
for volume in array["VOLUMEs"]:
if -1 == pos.cli.volume_mount(self.id, self.pw, self.nic_ssh, self.pos_cli, self.pos_dir, volume["NAME"],
volume["SUBNQN"], array["NAME"]):
return False
# create, mount volume(auto)
if "yes" == self.use_autogen:
nqn_base = 0
for subsys in self.json["AUTO_GENERATE"]["SUBSYSTEMs"]:
for vol in subsys["VOLUMEs"]:
volume_list = []
for i in range(vol["NUM"]):
nqn = f"nqn.2020-10.pos:subsystem{i+nqn_base+1:02d}"
volume_name = f"VOL{i+nqn_base+1}"
volume_list.append(volume_name)
if -1 == pos.cli.volume_mount(self.id, self.pw, self.nic_ssh, self.pos_cli, self.pos_dir, volume_name,
nqn, vol["ARRAY"]):
return False
nqn_base += vol["NUM"]
self.array_volume_list[vol["ARRAY"]] = volume_list
pos.cli.logger_setlevel(self.id, self.pw, self.nic_ssh, self.pos_cli, self.pos_dir, "info")
# print subsystems
subsys = pos.cli.subsystem_list(self.id, self.pw, self.nic_ssh, self.pos_cli, self.pos_dir)
print(subsys)
lib.printer.green(f" '{self.name}' prepared")
return True
def DetachDevice(self, dev):
return pos.env.detach_device(self.id, self.pw, self.nic_ssh, dev)
def PcieScan(self):
return pos.env.pcie_scan(self.id, self.pw, self.nic_ssh)
def CheckRebuildComplete(self, arr_name):
return pos.cli.check_rebuild_complete(self.id, self.pw, self.nic_ssh, self.pos_cli, self.pos_dir, arr_name)
def DeviceList(self):
return pos.cli.device_list(self.id, self.pw, self.nic_ssh, self.pos_cli, self.pos_dir)
def AddSpare(self, arr_name, dev_name):
return pos.cli.add_spare(self.id, self.pw, self.nic_ssh, self.pos_cli, self.pos_dir, arr_name, dev_name)
def SetRebuildImpact(self, impact):
return pos.cli.set_rebuild_impact(self.id, self.pw, self.nic_ssh, self.pos_cli, self.pos_dir, impact)
| 53.624521
| 160
| 0.556445
| 1,890
| 13,996
| 3.948148
| 0.090476
| 0.086304
| 0.06111
| 0.091664
| 0.78022
| 0.776601
| 0.759984
| 0.746181
| 0.740552
| 0.727687
| 0
| 0.011353
| 0.301443
| 13,996
| 260
| 161
| 53.830769
| 0.751867
| 0.013075
| 0
| 0.625571
| 0
| 0
| 0.094189
| 0.017534
| 0
| 0
| 0
| 0
| 0
| 1
| 0.054795
| false
| 0
| 0.027397
| 0.027397
| 0.273973
| 0.027397
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
6762f18c73436e1bf56f644d10e9c95999f38808
| 324
|
py
|
Python
|
2015-09-16-complex/marko/main.py
|
EIK-LUG/CodeClubPython
|
fb0660ad85a7b0a17d33d37d18f8b41ae597e022
|
[
"WTFPL"
] | 2
|
2015-09-12T10:11:38.000Z
|
2015-09-13T13:18:25.000Z
|
2015-09-16-complex/marko/main.py
|
EIK-LUG/CodeClubPython
|
fb0660ad85a7b0a17d33d37d18f8b41ae597e022
|
[
"WTFPL"
] | 2
|
2015-09-12T07:18:15.000Z
|
2015-10-07T06:01:56.000Z
|
2015-09-16-complex/marko/main.py
|
EIK-LUG/CodeClubPython
|
fb0660ad85a7b0a17d33d37d18f8b41ae597e022
|
[
"WTFPL"
] | 5
|
2015-09-11T11:19:51.000Z
|
2018-02-08T18:17:44.000Z
|
from complex import Complex
if __name__ == "__main__":
print(Complex(1, 2))
print(Complex(0, 2))
print(Complex(1, -2))
print(Complex(0, -2))
print(Complex(0, -999))
print (Complex(1,2) + Complex(1,2))
print (Complex(1,2) - Complex(1,2))
print (Complex(4,5) * Complex(3,2))
print (Complex(2,1) / Complex(5,6))
| 20.25
| 37
| 0.635802
| 55
| 324
| 3.6
| 0.272727
| 0.545455
| 0.459596
| 0.282828
| 0.636364
| 0.636364
| 0.636364
| 0.636364
| 0.636364
| 0.343434
| 0
| 0.101818
| 0.151235
| 324
| 16
| 38
| 20.25
| 0.618182
| 0
| 0
| 0
| 0
| 0
| 0.024615
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.090909
| 0
| 0.090909
| 0.818182
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
6766dd2dd4e1b6341a29dec87099c64159c83dad
| 415
|
py
|
Python
|
fedn/fedn/clients/reducer/statestore/reducerstatestore.py
|
Muresan73/fedn
|
a10c9412ba95e318649399f56a2c0fc7c3dc989b
|
[
"Apache-2.0"
] | 1
|
2021-01-16T03:05:23.000Z
|
2021-01-16T03:05:23.000Z
|
fedn/fedn/clients/reducer/statestore/reducerstatestore.py
|
Muresan73/fedn
|
a10c9412ba95e318649399f56a2c0fc7c3dc989b
|
[
"Apache-2.0"
] | null | null | null |
fedn/fedn/clients/reducer/statestore/reducerstatestore.py
|
Muresan73/fedn
|
a10c9412ba95e318649399f56a2c0fc7c3dc989b
|
[
"Apache-2.0"
] | 2
|
2021-02-26T09:16:19.000Z
|
2021-03-13T12:07:06.000Z
|
from abc import ABC, abstractmethod
class ReducerStateStore(ABC):
def __init__(self):
pass
@abstractmethod
def state(self):
pass
@abstractmethod
def transition(self, state):
pass
@abstractmethod
def set_latest(self, model_id):
pass
@abstractmethod
def get_latest(self):
pass
@abstractmethod
def is_inited(self):
pass
| 15.961538
| 35
| 0.616867
| 43
| 415
| 5.767442
| 0.44186
| 0.362903
| 0.423387
| 0.302419
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.313253
| 415
| 26
| 36
| 15.961538
| 0.870175
| 0
| 0
| 0.578947
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.315789
| false
| 0.315789
| 0.052632
| 0
| 0.421053
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
67709762e39789865c663548d005a7fdd13ddc1a
| 24
|
py
|
Python
|
addons14/sale_product_multi_add/tests/__init__.py
|
odoochain/addons_oca
|
55d456d798aebe16e49b4a6070765f206a8885ca
|
[
"MIT"
] | 1
|
2021-06-10T14:59:13.000Z
|
2021-06-10T14:59:13.000Z
|
addons14/sale_product_multi_add/tests/__init__.py
|
odoochain/addons_oca
|
55d456d798aebe16e49b4a6070765f206a8885ca
|
[
"MIT"
] | null | null | null |
addons14/sale_product_multi_add/tests/__init__.py
|
odoochain/addons_oca
|
55d456d798aebe16e49b4a6070765f206a8885ca
|
[
"MIT"
] | 1
|
2021-04-09T09:44:44.000Z
|
2021-04-09T09:44:44.000Z
|
from . import test_sale
| 12
| 23
| 0.791667
| 4
| 24
| 4.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 24
| 1
| 24
| 24
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
678a1f9b69e6a79dd34939c4d3d34b10437f3fb4
| 47
|
py
|
Python
|
twccli/twcc/services/__init__.py
|
gitter-badger/TWCC-CLI
|
fba509d77ed9636adca6039933970749e4c74968
|
[
"Apache-2.0"
] | 12
|
2019-04-27T07:45:02.000Z
|
2020-11-13T08:16:18.000Z
|
twccli/twcc/services/__init__.py
|
gitter-badger/TWCC-CLI
|
fba509d77ed9636adca6039933970749e4c74968
|
[
"Apache-2.0"
] | 23
|
2021-03-05T07:53:37.000Z
|
2022-03-20T03:12:33.000Z
|
twccli/twcc/services/__init__.py
|
gitter-badger/TWCC-CLI
|
fba509d77ed9636adca6039933970749e4c74968
|
[
"Apache-2.0"
] | 6
|
2019-02-27T00:19:11.000Z
|
2020-11-13T08:16:19.000Z
|
# -*- coding: utf-8 -*-
# Copyright 2018 NCHC
| 11.75
| 23
| 0.574468
| 6
| 47
| 4.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.135135
| 0.212766
| 47
| 3
| 24
| 15.666667
| 0.594595
| 0.87234
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
67d07dbe497eb16b153e3c4fb2144916a97ec7d1
| 113
|
py
|
Python
|
Decorators - Lab/Number Increment.py
|
DiyanKalaydzhiev23/OOP---Python
|
7ac424d5fb08a6bd28dc36593e45d949b3ac0cd0
|
[
"MIT"
] | null | null | null |
Decorators - Lab/Number Increment.py
|
DiyanKalaydzhiev23/OOP---Python
|
7ac424d5fb08a6bd28dc36593e45d949b3ac0cd0
|
[
"MIT"
] | null | null | null |
Decorators - Lab/Number Increment.py
|
DiyanKalaydzhiev23/OOP---Python
|
7ac424d5fb08a6bd28dc36593e45d949b3ac0cd0
|
[
"MIT"
] | null | null | null |
def number_increment(numbers):
def increase():
return [x+1 for x in numbers]
return increase()
| 16.142857
| 37
| 0.637168
| 15
| 113
| 4.733333
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.012048
| 0.265487
| 113
| 6
| 38
| 18.833333
| 0.843373
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
e1e10a6acba047d87c84efdac7654085df0a6f88
| 143
|
py
|
Python
|
mvpa_itab/script/viviana-hcp/final-analysis.py
|
robbisg/mvpa_itab_wu
|
e3cdb198a21349672f601cd34381e0895fa6484c
|
[
"MIT"
] | 1
|
2022-01-12T08:59:22.000Z
|
2022-01-12T08:59:22.000Z
|
mvpa_itab/script/viviana-hcp/final-analysis.py
|
robbisg/mvpa_itab_wu
|
e3cdb198a21349672f601cd34381e0895fa6484c
|
[
"MIT"
] | 46
|
2016-08-04T14:49:37.000Z
|
2022-03-09T08:47:48.000Z
|
mvpa_itab/script/viviana-hcp/final-analysis.py
|
robbisg/mvpa_itab_wu
|
e3cdb198a21349672f601cd34381e0895fa6484c
|
[
"MIT"
] | null | null | null |
# 1. Predict the dexterity class
# 2. Predict the dexterity score
# 3. Fingerprint
# 3.1 Tavor
# 3.2 Finn Nature behav
# 4. Linear models
| 11.916667
| 32
| 0.692308
| 23
| 143
| 4.304348
| 0.695652
| 0.20202
| 0.383838
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.072072
| 0.223776
| 143
| 11
| 33
| 13
| 0.81982
| 0.874126
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
c025dd27e6dac631dbda3fd2af9ea864d13c0533
| 430
|
py
|
Python
|
first_task/influencers/models.py
|
kdvedi321/Influencers_List
|
ed9e8c65ed0005bec1b14d7e3d63e7dd206e6d2c
|
[
"MIT"
] | 2
|
2019-06-20T17:29:29.000Z
|
2019-08-27T19:52:29.000Z
|
first_task/influencers/models.py
|
kdvedi321/Influencers_List
|
ed9e8c65ed0005bec1b14d7e3d63e7dd206e6d2c
|
[
"MIT"
] | null | null | null |
first_task/influencers/models.py
|
kdvedi321/Influencers_List
|
ed9e8c65ed0005bec1b14d7e3d63e7dd206e6d2c
|
[
"MIT"
] | null | null | null |
from django.db import models
class Data(models.Model):
username = models.CharField(max_length=250, default='')
followerCount = models.CharField(max_length=500, default='0')
avgLikesRatio = models.CharField(max_length=500, default='0')
avgCommentsRatio = models.CharField(max_length=500, default='0')
picture = models.CharField(max_length=100000, null=True)
def __str__(self):
return self.username
| 39.090909
| 68
| 0.732558
| 54
| 430
| 5.666667
| 0.5
| 0.245098
| 0.294118
| 0.392157
| 0.343137
| 0.343137
| 0.343137
| 0
| 0
| 0
| 0
| 0.057221
| 0.146512
| 430
| 11
| 69
| 39.090909
| 0.776567
| 0
| 0
| 0
| 0
| 0
| 0.006961
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.111111
| false
| 0
| 0.111111
| 0.111111
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
c051d0ad64fc68b54abf46cdef2ed4a6dd8594f8
| 2,368
|
py
|
Python
|
tests/sync_lockfile_test.py
|
anmut-consulting/pipenv-setup
|
b3f26a9796079f96e97bb3cb735efbccae6f2bc0
|
[
"MIT"
] | null | null | null |
tests/sync_lockfile_test.py
|
anmut-consulting/pipenv-setup
|
b3f26a9796079f96e97bb3cb735efbccae6f2bc0
|
[
"MIT"
] | null | null | null |
tests/sync_lockfile_test.py
|
anmut-consulting/pipenv-setup
|
b3f26a9796079f96e97bb3cb735efbccae6f2bc0
|
[
"MIT"
] | null | null | null |
from typing import Any
from vistir.compat import Path
import pytest
from pipenv_setup import msg_formatter
from pipenv_setup.main import cmd
from tests.conftest import assert_kw_args_eq, data
@pytest.mark.parametrize(
("source_pipfile_dirname", "update_count"),
[("nasty_0", 23), ("no_original_kws_0", 23)],
)
def test_update(
capsys, tmp_path, shared_datadir, source_pipfile_dirname, update_count
): # type: (Any, Path, Path, str, int) -> None
"""
test updating setup.py (when it already exists)
"""
pipfile_dir = shared_datadir / source_pipfile_dirname
expected_setup_text = (pipfile_dir/'setup_lockfile_synced.py').read_text()
with data(str(pipfile_dir), tmp_path):
cmd(argv=["", "sync"])
generated_setup = Path("setup.py")
assert generated_setup.exists()
generated_setup_text = generated_setup.read_text()
for kw_arg_names in ("install_requires", "dependency_links"):
assert_kw_args_eq(
generated_setup_text,
expected_setup_text,
kw_arg_names,
ordering_matters=False,
)
captured = capsys.readouterr()
assert msg_formatter.update_success(update_count) in captured.out
@pytest.mark.parametrize(
("source_pipfile_dirname", "update_count"),
[("nasty_0", 23), ("no_original_kws_0", 23)],
)
def test_only_setup_missing(
capsys, tmp_path, shared_datadir, source_pipfile_dirname, update_count
): # type: (Any, Path, Path, str, int) -> None
"""
test setup.py generation (when it is missing)
"""
pipfile_dir = shared_datadir / source_pipfile_dirname
expected_setup_text = (pipfile_dir / "setup_lockfile_synced.py").read_text()
with data(source_pipfile_dirname, tmp_path, mode="pipfiles"):
# delete the setup.py file that was copied to the tmp_path
cmd(argv=["", "sync"])
generated_setup = Path("setup.py")
assert generated_setup.exists()
generated_setup_text = generated_setup.read_text()
for kw_arg_names in ("install_requires", "dependency_links"):
assert_kw_args_eq(
generated_setup_text,
expected_setup_text,
kw_arg_names,
ordering_matters=False,
)
captured = capsys.readouterr()
assert msg_formatter.generate_success(update_count) in captured.out, captured.out
| 35.878788
| 85
| 0.689189
| 302
| 2,368
| 5.05298
| 0.294702
| 0.091743
| 0.091743
| 0.068152
| 0.762779
| 0.762779
| 0.722149
| 0.722149
| 0.722149
| 0.722149
| 0
| 0.006397
| 0.20777
| 2,368
| 66
| 85
| 35.878788
| 0.807036
| 0.09924
| 0
| 0.653846
| 0
| 0
| 0.123751
| 0.043789
| 0
| 0
| 0
| 0
| 0.134615
| 1
| 0.038462
| false
| 0
| 0.115385
| 0
| 0.153846
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
c068f76b066fce420bef7c9e2372987a42b3d592
| 41
|
py
|
Python
|
build/lib/aidatafactory/__init__.py
|
ngocuong0105/dataFactory
|
11d913ced2f6a513794621656ada1c50480699dc
|
[
"MIT"
] | null | null | null |
build/lib/aidatafactory/__init__.py
|
ngocuong0105/dataFactory
|
11d913ced2f6a513794621656ada1c50480699dc
|
[
"MIT"
] | null | null | null |
build/lib/aidatafactory/__init__.py
|
ngocuong0105/dataFactory
|
11d913ced2f6a513794621656ada1c50480699dc
|
[
"MIT"
] | null | null | null |
from aidatafactory.factory import Factory
| 41
| 41
| 0.902439
| 5
| 41
| 7.4
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.073171
| 41
| 1
| 41
| 41
| 0.973684
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
fbeef8bb7cc1e298fb2f959b5842f8752352bdf1
| 8,821
|
py
|
Python
|
humann/tests/advanced_tests_blastx_coverage.py
|
wbazant/humann
|
13e8c7910d9aff4cabb58df19aa652a3c20e101b
|
[
"MIT"
] | 72
|
2020-02-28T11:17:46.000Z
|
2022-03-29T07:40:37.000Z
|
humann/tests/advanced_tests_blastx_coverage.py
|
wbazant/humann
|
13e8c7910d9aff4cabb58df19aa652a3c20e101b
|
[
"MIT"
] | 25
|
2020-03-20T10:30:13.000Z
|
2022-03-07T23:39:22.000Z
|
humann/tests/advanced_tests_blastx_coverage.py
|
wbazant/humann
|
13e8c7910d9aff4cabb58df19aa652a3c20e101b
|
[
"MIT"
] | 41
|
2020-03-20T15:01:59.000Z
|
2022-02-22T01:12:27.000Z
|
import unittest
import logging
import cfg
import utils
import tempfile
import re
from humann.search import blastx_coverage
from humann import store
from humann import config
class TestBasicHumannBlastx_CoverageFunctions(unittest.TestCase):
"""
Test the functions found in humann.search.nucleotide
"""
def setUp(self):
config.unnamed_temp_dir=tempfile.gettempdir()
config.temp_dir=tempfile.gettempdir()
config.file_basename="HUMAnN_test"
# set up nullhandler for logger
logging.getLogger('humann.search.blastx_coverage').addHandler(logging.NullHandler())
def test_blastx_coverage_gene_names_default(self):
"""
Test the blastx_coverage function
Test the gene names
Test without filter
"""
# set the coverage threshold to zero so as to not test with filter on
current_coverage_threshold=config.translated_subject_coverage_threshold
config.translated_subject_coverage_threshold=0
# get the set of allowed proteins
allowed_proteins = blastx_coverage.blastx_coverage(cfg.rapsearch2_output_file_without_header_coverage,
config.translated_subject_coverage_threshold, log_messages=True)
# load the blastm8-like output
file_handle=open(cfg.rapsearch2_output_file_without_header_coverage)
all_proteins = set()
for line in file_handle:
if not re.search("^#",line):
data=line.strip().split(config.blast_delimiter)
protein_name=data[config.blast_reference_index].split(config.chocophlan_delimiter)[0]
all_proteins.add(protein_name)
# reset the coverage threshold
config.translated_subject_coverage_threshold=current_coverage_threshold
# check the expected proteins are found
self.assertEqual(sorted(all_proteins),sorted(allowed_proteins))
def test_blastx_coverage_gene_names_custom_annotation(self):
"""
Test the blastx_coverage function
Test the gene names with custom annotation
Test without filter
"""
# create a set of alignments
alignments=store.Alignments()
# set the coverage threshold to zero so as to not test with filter on
current_coverage_threshold=config.translated_subject_coverage_threshold
config.translated_subject_coverage_threshold=0
# get the set of allowed proteins
allowed_proteins = blastx_coverage.blastx_coverage(cfg.rapsearch2_output_file_without_header_coverage_custom_annotations,
config.translated_subject_coverage_threshold, alignments, log_messages=True)
# load the blastm8-like output
file_handle=open(cfg.rapsearch2_output_file_without_header_coverage_custom_annotations)
all_proteins = set()
for line in file_handle:
if not re.search("^#",line):
data=line.strip().split(config.blast_delimiter)
protein_name=data[config.blast_reference_index].split(config.chocophlan_delimiter)[0]
all_proteins.add(protein_name)
# reset the coverage threshold
config.translated_subject_coverage_threshold=current_coverage_threshold
# check the expected proteins are found
self.assertEqual(sorted(all_proteins),sorted(allowed_proteins))
def test_blastx_coverage_gene_names_chocophlan_annoation(self):
"""
Test the blastx_coverage function
Test the gene names with chocophlan annotations
Test without filter
"""
# create a set of alignments
alignments=store.Alignments()
# set the coverage threshold to zero so as to not test with filter on
current_coverage_threshold=config.translated_subject_coverage_threshold
config.translated_subject_coverage_threshold=0
# get the set of allowed proteins
allowed_proteins = blastx_coverage.blastx_coverage(cfg.rapsearch2_output_file_without_header_coverage_chocophlan_annotations,
config.translated_subject_coverage_threshold, alignments, log_messages=True)
# load the blastm8-like output
file_handle=open(cfg.rapsearch2_output_file_without_header_coverage_chocophlan_annotations)
all_proteins = set()
for line in file_handle:
if not re.search("^#",line):
data=line.strip().split(config.blast_delimiter)
protein_name=data[config.blast_reference_index].split(config.chocophlan_delimiter)[config.chocophlan_gene_indexes[0]]
all_proteins.add(protein_name)
# reset the coverage threshold
config.translated_subject_coverage_threshold=current_coverage_threshold
# check the expected proteins are found
self.assertEqual(sorted(all_proteins),sorted(allowed_proteins))
def test_blastx_coverage_gene_names_id_mapping(self):
"""
Test the blastx_coverage function
Test the gene names with chocophlan annotations
Test without filter
"""
# create a set of alignments
alignments=store.Alignments()
# process the id mapping
alignments.process_id_mapping(cfg.coverage_id_mapping_file)
# set the coverage threshold to zero so as to not test with filter on
current_coverage_threshold=config.translated_subject_coverage_threshold
config.translated_subject_coverage_threshold=0
# get the set of allowed proteins
allowed_proteins = blastx_coverage.blastx_coverage(cfg.rapsearch2_output_file_without_header_coverage,
config.translated_subject_coverage_threshold, alignments, log_messages=True)
# load the blastm8-like output
file_handle=open(cfg.rapsearch2_output_file_without_header_coverage)
all_proteins = set()
for line in file_handle:
if not re.search("^#",line):
data=line.strip().split(config.blast_delimiter)
# just like the id mapping, remove the UniRef50_
protein_name=data[config.blast_reference_index].split(config.chocophlan_delimiter)[0]
protein_name = protein_name.replace("UniRef50_","")
all_proteins.add(protein_name)
# reset the coverage threshold
config.translated_subject_coverage_threshold=current_coverage_threshold
# check the expected proteins are found
self.assertEqual(sorted(all_proteins),sorted(allowed_proteins))
def test_blastx_coverage(self):
"""
Test the coverage filter
Test with one protein with one alignment passing threshold
Test with one protein with two alignments passing threshold (does not pass with only one alignment)
Test with other proteins with one more more alignments not passing threshold
"""
# create a set of alignments
alignments=store.Alignments()
# set the coverage threshold to a small value so as to have some alignments pass
current_coverage_threshold=config.translated_subject_coverage_threshold
config.translated_subject_coverage_threshold=50.0
# get the set of allowed proteins
allowed_proteins = blastx_coverage.blastx_coverage(cfg.rapsearch2_output_file_without_header_coverage,
config.translated_subject_coverage_threshold, alignments, True)
# load the blastm8-like output
file_handle=open(cfg.rapsearch2_output_file_without_header_coverage)
found_proteins=set()
for line in file_handle:
if not re.search("^#",line):
data=line.strip().split(config.blast_delimiter)
referenceid=data[config.blast_reference_index]
gene, length, bug = alignments.process_reference_annotation(referenceid)
queryid=data[config.blast_query_index]
identity=float(data[config.blast_identity_index])
alignment_length=float(data[config.blast_aligned_length_index])
# the proteins that pass have "_coverage50" as part of their names
if "_coverage50" in gene:
found_proteins.add(gene)
file_handle.close()
# reset the coverage threshold
config.translated_subject_coverage_threshold=current_coverage_threshold
# check the values are unchanged
self.assertEqual(sorted(allowed_proteins), sorted(found_proteins))
| 42.613527
| 133
| 0.682576
| 990
| 8,821
| 5.80404
| 0.138384
| 0.118343
| 0.080056
| 0.107901
| 0.783154
| 0.759659
| 0.754438
| 0.754438
| 0.754438
| 0.748869
| 0
| 0.005219
| 0.261422
| 8,821
| 206
| 134
| 42.820388
| 0.876746
| 0.22061
| 0
| 0.587629
| 0
| 0
| 0.010568
| 0.004378
| 0
| 0
| 0
| 0
| 0.051546
| 1
| 0.061856
| false
| 0
| 0.092784
| 0
| 0.164948
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
2263fb29cd8b1eadddf23f105d707403508ee172
| 53
|
py
|
Python
|
models/__init__.py
|
daemonslayer/image-completion-inpainting
|
944b47175866bd7cfc8339c3a475e4bcf1c72994
|
[
"MIT"
] | null | null | null |
models/__init__.py
|
daemonslayer/image-completion-inpainting
|
944b47175866bd7cfc8339c3a475e4bcf1c72994
|
[
"MIT"
] | null | null | null |
models/__init__.py
|
daemonslayer/image-completion-inpainting
|
944b47175866bd7cfc8339c3a475e4bcf1c72994
|
[
"MIT"
] | null | null | null |
from .generator import *
from .discriminator import *
| 26.5
| 28
| 0.792453
| 6
| 53
| 7
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.132075
| 53
| 2
| 28
| 26.5
| 0.913043
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
97d84b13a692f5017df353d235fffe66f51a1dfb
| 530
|
gyp
|
Python
|
deps/libgdal/gyp-formats/ogr_gtm.gyp
|
jimgambale/node-gdal
|
dc5c89fb23f1004732106250c8b7d57f380f9b61
|
[
"Apache-2.0"
] | 462
|
2015-01-07T23:09:18.000Z
|
2022-03-30T03:58:09.000Z
|
deps/libgdal/gyp-formats/ogr_gtm.gyp
|
jimgambale/node-gdal
|
dc5c89fb23f1004732106250c8b7d57f380f9b61
|
[
"Apache-2.0"
] | 196
|
2015-01-07T11:10:35.000Z
|
2022-03-29T08:50:30.000Z
|
deps/libgdal/gyp-formats/ogr_gtm.gyp
|
jimgambale/node-gdal
|
dc5c89fb23f1004732106250c8b7d57f380f9b61
|
[
"Apache-2.0"
] | 113
|
2015-01-15T02:24:18.000Z
|
2021-11-22T06:05:52.000Z
|
{
"includes": [
"../common.gypi"
],
"targets": [
{
"target_name": "libgdal_ogr_gtm_frmt",
"type": "static_library",
"sources": [
"../gdal/ogr/ogrsf_frmts/gtm/gtm.cpp",
"../gdal/ogr/ogrsf_frmts/gtm/gtmtracklayer.cpp",
"../gdal/ogr/ogrsf_frmts/gtm/gtmwaypointlayer.cpp",
"../gdal/ogr/ogrsf_frmts/gtm/ogrgtmdatasource.cpp",
"../gdal/ogr/ogrsf_frmts/gtm/ogrgtmdriver.cpp",
"../gdal/ogr/ogrsf_frmts/gtm/ogrgtmlayer.cpp"
],
"include_dirs": [
"../gdal/ogr/ogrsf_frmts/gtm"
]
}
]
}
| 23.043478
| 55
| 0.620755
| 63
| 530
| 5.015873
| 0.396825
| 0.155063
| 0.265823
| 0.376582
| 0.490506
| 0.363924
| 0
| 0
| 0
| 0
| 0
| 0
| 0.158491
| 530
| 22
| 56
| 24.090909
| 0.70852
| 0
| 0
| 0.090909
| 0
| 0
| 0.730189
| 0.54717
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
3f03b813fe539c89d4faf3241e9c124a5b6846c2
| 4,198
|
py
|
Python
|
tests/test_social.py
|
sarumanplaysguitar/panoptes-utils
|
d5ac4d716fd1acd4771c0de08b7e2784a8319092
|
[
"MIT"
] | 3
|
2019-03-26T03:35:46.000Z
|
2021-08-16T16:59:12.000Z
|
tests/test_social.py
|
sarumanplaysguitar/panoptes-utils
|
d5ac4d716fd1acd4771c0de08b7e2784a8319092
|
[
"MIT"
] | 254
|
2019-03-29T05:42:54.000Z
|
2022-02-18T05:03:09.000Z
|
tests/test_social.py
|
sarumanplaysguitar/panoptes-utils
|
d5ac4d716fd1acd4771c0de08b7e2784a8319092
|
[
"MIT"
] | 9
|
2019-03-25T09:55:35.000Z
|
2021-05-18T02:45:17.000Z
|
import pytest
import tweepy
import requests
import unittest.mock
from panoptes.utils.social.twitter import SocialTwitter
from panoptes.utils.social.slack import SocialSlack
@pytest.fixture(scope='module')
def twitter_config():
twitter_config = {'consumer_key': 'mock_consumer_key', 'consumer_secret': 'mock_consumer_secret',
'access_token': 'mock_access_token', 'access_token_secret': 'access_token_secret'}
return twitter_config
@pytest.fixture(scope='module')
def slack_config():
slack_config = {'webhook_url': 'mock_webhook_url', 'output_timestamp': False}
return slack_config
# Twitter sink tests
def test_no_consumer_key(twitter_config):
with unittest.mock.patch.dict(twitter_config), pytest.raises(ValueError) as ve:
del twitter_config['consumer_key']
SocialTwitter(**twitter_config)
assert False # We don't reach this point
assert 'consumer_key parameter is not defined.' == str(ve.value)
def test_no_consumer_secret(twitter_config):
with unittest.mock.patch.dict(twitter_config), pytest.raises(ValueError) as ve:
del twitter_config['consumer_secret']
SocialTwitter(**twitter_config)
assert False # We don't reach this point
assert 'consumer_secret parameter is not defined.' == str(ve.value)
def test_no_access_token(twitter_config):
with unittest.mock.patch.dict(twitter_config), pytest.raises(ValueError) as ve:
del twitter_config['access_token']
SocialTwitter(**twitter_config)
assert False # We don't reach this point
assert 'access_token parameter is not defined.' == str(ve.value)
def test_no_access_token_secret(twitter_config):
with unittest.mock.patch.dict(twitter_config), pytest.raises(ValueError) as ve:
del twitter_config['access_token_secret']
SocialTwitter(**twitter_config)
assert False # We don't reach this point
assert 'access_token_secret parameter is not defined.' == str(ve.value)
def test_send_message_twitter(twitter_config):
with unittest.mock.patch.object(tweepy.API, 'update_status') as mock_update_status:
social_twitter = SocialTwitter(**twitter_config)
mock_message = "mock_message"
mock_timestamp = "mock_timestamp"
social_twitter.send_message(mock_message, mock_timestamp)
mock_update_status.assert_called_once_with('{} - {}'.format(mock_message, mock_timestamp))
def test_send_message_twitter_no_timestamp(twitter_config):
with unittest.mock.patch.dict(twitter_config, {'output_timestamp': False}), unittest.mock.patch.object(tweepy.API, 'update_status') as mock_update_status:
social_twitter = SocialTwitter(**twitter_config)
mock_message = "mock_message"
mock_timestamp = "mock_timestamp"
social_twitter.send_message(mock_message, mock_timestamp)
mock_update_status.assert_called_once_with(mock_message)
# Slack sink tests
def test_no_webhook_url(slack_config):
with unittest.mock.patch.dict(slack_config), pytest.raises(ValueError) as ve:
del slack_config['webhook_url']
slack_config = SocialSlack(**slack_config)
assert 'webhook_url parameter is not defined.' == str(ve.value)
def test_send_message_slack(slack_config):
with unittest.mock.patch.object(requests, 'post') as mock_post:
social_slack = SocialSlack(**slack_config)
mock_message = "mock_message"
mock_timestamp = "mock_timestamp"
social_slack.send_message(mock_message, mock_timestamp)
mock_post.assert_called_once_with(slack_config['webhook_url'], json={'text': mock_message})
def test_send_message_slack_timestamp(slack_config):
with unittest.mock.patch.dict(slack_config, {'output_timestamp': True}), unittest.mock.patch.object(requests, 'post') as mock_post:
social_slack = SocialSlack(**slack_config)
mock_message = "mock_message"
mock_timestamp = "mock_timestamp"
social_slack.send_message(mock_message, mock_timestamp)
mock_post.assert_called_once_with(slack_config['webhook_url'], json={
'text': '{} - {}'.format(mock_message, mock_timestamp)})
| 40.757282
| 158
| 0.733921
| 535
| 4,198
| 5.439252
| 0.127103
| 0.107216
| 0.072165
| 0.082474
| 0.795876
| 0.723711
| 0.712715
| 0.702749
| 0.702749
| 0.652921
| 0
| 0
| 0.168175
| 4,198
| 102
| 159
| 41.156863
| 0.833333
| 0.033111
| 0
| 0.410959
| 0
| 0
| 0.164857
| 0
| 0
| 0
| 0
| 0
| 0.178082
| 1
| 0.150685
| false
| 0
| 0.082192
| 0
| 0.260274
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
3f06f9c8e3b30708b604afdd677fb9e09ae0f13d
| 158
|
py
|
Python
|
test/test_edit_group.py
|
SherMary/python_training
|
071f9405ffb97ea2243ac2906713c6fa8ac62ba7
|
[
"Apache-2.0"
] | null | null | null |
test/test_edit_group.py
|
SherMary/python_training
|
071f9405ffb97ea2243ac2906713c6fa8ac62ba7
|
[
"Apache-2.0"
] | null | null | null |
test/test_edit_group.py
|
SherMary/python_training
|
071f9405ffb97ea2243ac2906713c6fa8ac62ba7
|
[
"Apache-2.0"
] | null | null | null |
from model.group import Group
def test_edit_group(app):
app.group.edit_group(Group(name="Edited name", header="Edited header", footer="Edited footer"))
| 26.333333
| 99
| 0.753165
| 24
| 158
| 4.833333
| 0.5
| 0.155172
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.113924
| 158
| 5
| 100
| 31.6
| 0.828571
| 0
| 0
| 0
| 0
| 0
| 0.234177
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
3f29946c30434b65cc676e3cab111a3725998d71
| 1,410
|
py
|
Python
|
settings/config.template.py
|
philipsales/airflow-couchbase-elasticsearch
|
f9bf06d88956d3e2322a7dfa6207becbdc1d1769
|
[
"MIT"
] | null | null | null |
settings/config.template.py
|
philipsales/airflow-couchbase-elasticsearch
|
f9bf06d88956d3e2322a7dfa6207becbdc1d1769
|
[
"MIT"
] | null | null | null |
settings/config.template.py
|
philipsales/airflow-couchbase-elasticsearch
|
f9bf06d88956d3e2322a7dfa6207becbdc1d1769
|
[
"MIT"
] | null | null | null |
import sys
CouchbaseConfig = {
'local': {
'BUCKET': 'awhcurisdb_dev',
'USERNAME': '',
'PASSWORD': '',
'HOST': 'couchbase://127.0.0.1/',
'PORT': '',
'TIMEOUT': 7200
},
'dev': {
'BUCKET': '',
'USERNAME': '',
'PASSWORD': '',
'HOST': '',
'PORT': '',
'TIMEOUT': 7200
},
'uat': {
'BUCKET': '',
'USERNAME': '',
'PASSWORD': '',
'HOST': '',
'PORT': '',
'TIMEOUT': 7200
},
'prod': {
'BUCKET': '',
'USERNAME': '',
'PASSWORD': '',
'HOST': '',
'PORT': '',
'TIMEOUT': 7200
}
}
ElasticSearchConfig = {
'local': {
'USERNAME': '',
'PASSWORD': '',
'INDEX': 'philippines',
'TYPE': 'patients',
'SCHEME': 'HTTP',
'HOST': 'localhost',
'PORT': 9200,
'TIMEOUT': 7200
},
'dev': {
'USERNAME': '',
'PASSWORD': '',
'INDEX': 'philippines',
'TYPE': 'patients',
'SCHEME': 'HTTP',
'HOST': 'localhost',
'PORT': 9200,
'TIMEOUT': 7200
},
'prod': {
'USERNAME': '',
'PASSWORD': '',
'INDEX': 'philippines',
'TYPE': 'patients',
'SCHEME': 'HTTP',
'HOST': 'localhost',
'PORT': 9200,
'TIMEOUT': 7200
}
}
| 18.8
| 41
| 0.369504
| 88
| 1,410
| 5.909091
| 0.318182
| 0.215385
| 0.153846
| 0.15
| 0.732692
| 0.732692
| 0.732692
| 0.496154
| 0.496154
| 0.496154
| 0
| 0.05509
| 0.407801
| 1,410
| 74
| 42
| 19.054054
| 0.567665
| 0
| 0
| 0.776119
| 0
| 0
| 0.316726
| 0.015658
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.104478
| 0.014925
| 0
| 0.014925
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
3f3498a093c64365739f914b74edd4f11d9b42ea
| 2,323
|
py
|
Python
|
ALREC_Method/get_experimental_results.py
|
proy3/Abnormal_Trajectory_Classifier
|
a6b27c6847262e9703a0f3404c85c135415c1d4c
|
[
"MIT"
] | 6
|
2019-10-29T03:05:14.000Z
|
2022-03-18T05:14:25.000Z
|
ALREC_Method/get_experimental_results.py
|
proy3/Abnormal_Trajectory_Classifier
|
a6b27c6847262e9703a0f3404c85c135415c1d4c
|
[
"MIT"
] | 1
|
2022-03-11T03:49:34.000Z
|
2022-03-11T03:49:34.000Z
|
ALREC_Method/get_experimental_results.py
|
proy3/Abnormal_Trajectory_Classifier
|
a6b27c6847262e9703a0f3404c85c135415c1d4c
|
[
"MIT"
] | 1
|
2021-12-15T09:21:26.000Z
|
2021-12-15T09:21:26.000Z
|
"""
Generates a tex file containing the Latex table setup with all the relevant results.
"""
import input_data as data
import rene.analyse_results as rene_ar
import rouen.analyse_results as rouen_ar
import sherbrooke.analyse_results as sherb_ar
import stmarc.analyse_results as stmarc_ar
global_result_path_name = 'global_result/experimental_results_4.tex'
data.make_dir_if_new(global_result_path_name)
# Global result
with open(global_result_path_name, 'w') as global_result_file:
global_result_file.write(r'\documentclass{article}' + '\n\n')
global_result_file.write(r'\usepackage{multirow}' + '\n')
global_result_file.write(r'\begin{document}' + '\n\n')
global_result_file.write(r'\begin{table}' + '\n')
global_result_file.write(r'\centering' + '\n')
global_result_file.write(r'\caption{Results obtained by applying the trained model on the corresponding samples.}' \
r'\label{tab2}' + '\n')
global_result_file.write(r'\begin{tabular}{|c|c|c|c||c|c|c|c|c|c|c|c|c|c|}' + '\n')
global_result_file.write(r'\multicolumn{4}{c}{} & \multicolumn{10}{c}{Method (\%)} \\ \cline{5-14}' + '\n')
global_result_file.write(r'\multicolumn{4}{c}{} & \multicolumn{2}{|c|}{OC-SVM} & \multicolumn{2}{c|}{IF} & ' \
r'\multicolumn{2}{c|}{AE} & \multicolumn{2}{c|}{DAE} & \multicolumn{2}{c|}{ALREC (ours)} \\ \hline' + '\n')
global_result_file.write(r'Data & Type & Size$_N$ & Size$_A$ & \textnormal{NDA} & \textnormal{ADA} & ' \
r'\textnormal{NDA} & \textnormal{ADA} & \textnormal{NDA} & \textnormal{ADA} & ' \
r'\textnormal{NDA} & \textnormal{ADA} & \textnormal{NDA} & \textnormal{ADA} \\ \hline \hline' + '\n')
sherb_ar.get_comparison_results(global_result_file)
global_result_file.write(r'\hline' + '\n')
rouen_ar.get_comparison_results(global_result_file)
global_result_file.write(r'\hline' + '\n')
stmarc_ar.get_comparison_results(global_result_file)
global_result_file.write(r'\hline' + '\n')
rene_ar.get_comparison_results(global_result_file)
global_result_file.write(r'\hline' + '\n')
global_result_file.write(r'\end{tabular}' + '\n')
global_result_file.write(r'\end{table}' + '\n\n')
global_result_file.write(r'\end{document}')
| 46.46
| 125
| 0.672406
| 332
| 2,323
| 4.457831
| 0.243976
| 0.218919
| 0.237838
| 0.241216
| 0.585811
| 0.585811
| 0.511486
| 0.366216
| 0.340541
| 0.340541
| 0
| 0.007168
| 0.159277
| 2,323
| 49
| 126
| 47.408163
| 0.75064
| 0.042617
| 0
| 0.121212
| 1
| 0.212121
| 0.386282
| 0.126354
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.151515
| 0
| 0.151515
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
58b3765371d1d850790a7231f1fae23dbd77e217
| 62
|
py
|
Python
|
pyxb/bundles/opengis/iso19139/v20070417/gmx.py
|
eLBati/pyxb
|
14737c23a125fd12c954823ad64fc4497816fae3
|
[
"Apache-2.0"
] | 123
|
2015-01-12T06:43:22.000Z
|
2022-03-20T18:06:46.000Z
|
pyxb/bundles/opengis/iso19139/v20070417/gmx.py
|
eLBati/pyxb
|
14737c23a125fd12c954823ad64fc4497816fae3
|
[
"Apache-2.0"
] | 103
|
2015-01-08T18:35:57.000Z
|
2022-01-18T01:44:14.000Z
|
pyxb/bundles/opengis/iso19139/v20070417/gmx.py
|
eLBati/pyxb
|
14737c23a125fd12c954823ad64fc4497816fae3
|
[
"Apache-2.0"
] | 54
|
2015-02-15T17:12:00.000Z
|
2022-03-07T23:02:32.000Z
|
from pyxb.bundles.opengis.iso19139.v20070417.raw.gmx import *
| 31
| 61
| 0.822581
| 9
| 62
| 5.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.224138
| 0.064516
| 62
| 1
| 62
| 62
| 0.655172
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
58c1ce79a88303a2cb91508e5c056ef6773a65db
| 185
|
py
|
Python
|
mytest1.py
|
Yousef11111/test_tensorD_functions
|
ce672c7ba854c09d7c748dedb0a6141487caef56
|
[
"MIT"
] | null | null | null |
mytest1.py
|
Yousef11111/test_tensorD_functions
|
ce672c7ba854c09d7c748dedb0a6141487caef56
|
[
"MIT"
] | null | null | null |
mytest1.py
|
Yousef11111/test_tensorD_functions
|
ce672c7ba854c09d7c748dedb0a6141487caef56
|
[
"MIT"
] | null | null | null |
import tensorD
import numpy as np
M=np.matrix('2 3;4 5; 6 7')
print(M)
print(tensorD.base.ops._skip(M,1))
print(tensorD.base.ops._gen_perm(6,2))
print(tensorD.base.ops._gen_perm(8,4))
| 20.555556
| 38
| 0.72973
| 40
| 185
| 3.25
| 0.525
| 0.276923
| 0.369231
| 0.438462
| 0.4
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0.065089
| 0.086486
| 185
| 8
| 39
| 23.125
| 0.704142
| 0
| 0
| 0
| 0
| 0
| 0.065217
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.285714
| 0
| 0.285714
| 0.571429
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
58ed426adb2c5caf9d35327fe6c9b032181c7112
| 99
|
py
|
Python
|
statesampling/colvars/__init__.py
|
delemottelab/state-sampling
|
f0f56430ce581f0338771c126da212ecc2f218a0
|
[
"MIT"
] | null | null | null |
statesampling/colvars/__init__.py
|
delemottelab/state-sampling
|
f0f56430ce581f0338771c126da212ecc2f218a0
|
[
"MIT"
] | null | null | null |
statesampling/colvars/__init__.py
|
delemottelab/state-sampling
|
f0f56430ce581f0338771c126da212ecc2f218a0
|
[
"MIT"
] | null | null | null |
from .cvs import CV, ContactCv, InverseContactCv, RmsdCv
from .eval_utils import *
from . import io
| 33
| 56
| 0.787879
| 14
| 99
| 5.5
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.141414
| 99
| 3
| 57
| 33
| 0.905882
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
452c477832ff322f813981ef5c4ca6a0eb2d7a98
| 243
|
py
|
Python
|
tests/utils/setup_constants.py
|
SAP/service-fabrik-backup-restore
|
27c29444a4fdea4430da93a05c7f78b23546d603
|
[
"Apache-2.0"
] | 2
|
2021-03-01T00:38:47.000Z
|
2021-12-15T04:25:41.000Z
|
tests/utils/setup_constants.py
|
cloudfoundry-incubator/service-fabrik-backup-restore
|
27c29444a4fdea4430da93a05c7f78b23546d603
|
[
"Apache-2.0"
] | 57
|
2017-12-04T16:46:41.000Z
|
2022-03-24T11:43:31.000Z
|
tests/utils/setup_constants.py
|
cloudfoundry-incubator/service-fabrik-backup-restore
|
27c29444a4fdea4430da93a05c7f78b23546d603
|
[
"Apache-2.0"
] | 16
|
2017-12-04T16:45:23.000Z
|
2019-09-17T11:42:19.000Z
|
import os
#setting environment varibale before importing module
os.environ['SF_IAAS_CLIENT_MAX_RETRY'] = '1'
os.environ['SF_BACKUP_RESTORE_LOG_DIRECTORY'] = 'tests/logs'
os.environ['SF_BACKUP_RESTORE_LAST_OPERATION_DIRECTORY'] = 'tests/logs'
| 40.5
| 71
| 0.814815
| 35
| 243
| 5.285714
| 0.657143
| 0.145946
| 0.178378
| 0.183784
| 0.259459
| 0
| 0
| 0
| 0
| 0
| 0
| 0.004444
| 0.074074
| 243
| 6
| 71
| 40.5
| 0.817778
| 0.213992
| 0
| 0
| 0
| 0
| 0.621053
| 0.510526
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.25
| 0
| 0.25
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
453b53bdfe74812493c56d432388b3e9f7091914
| 116
|
py
|
Python
|
cookbook/core/containerization/workflow_labels_annotations.py
|
mayitbeegh/flytesnacks
|
35fe9db45f08fce3d94923b4245b1a9980a915ef
|
[
"Apache-2.0"
] | null | null | null |
cookbook/core/containerization/workflow_labels_annotations.py
|
mayitbeegh/flytesnacks
|
35fe9db45f08fce3d94923b4245b1a9980a915ef
|
[
"Apache-2.0"
] | null | null | null |
cookbook/core/containerization/workflow_labels_annotations.py
|
mayitbeegh/flytesnacks
|
35fe9db45f08fce3d94923b4245b1a9980a915ef
|
[
"Apache-2.0"
] | null | null | null |
"""
Adding Workflow Labels and Annotations
---------------------------------------
.. NOTE::
Coming soon 🛠
"""
| 14.5
| 39
| 0.413793
| 9
| 116
| 5.444444
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.155172
| 116
| 8
| 40
| 14.5
| 0.489796
| 0.931034
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
18af4312b1b7fc0810c4e9b7ffdbbd5b19e97b12
| 84
|
py
|
Python
|
catalyst/dl/runner/__init__.py
|
denyhoof/catalyst
|
a340450076f7846007bc5695e5163e15b7ad9575
|
[
"Apache-2.0"
] | 2
|
2021-02-22T12:15:41.000Z
|
2021-05-02T15:22:24.000Z
|
catalyst/dl/runner/__init__.py
|
denyhoof/catalyst
|
a340450076f7846007bc5695e5163e15b7ad9575
|
[
"Apache-2.0"
] | null | null | null |
catalyst/dl/runner/__init__.py
|
denyhoof/catalyst
|
a340450076f7846007bc5695e5163e15b7ad9575
|
[
"Apache-2.0"
] | 1
|
2020-09-24T00:34:07.000Z
|
2020-09-24T00:34:07.000Z
|
# flake8: noqa
from .runner import Runner
from .supervised import SupervisedRunner
| 16.8
| 40
| 0.809524
| 10
| 84
| 6.8
| 0.7
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.013889
| 0.142857
| 84
| 4
| 41
| 21
| 0.930556
| 0.142857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
18b30945f5149955dbe111fd3e688d0552aaaf17
| 61
|
py
|
Python
|
getbrightnessval.py
|
Justgamer101/picture-to-schematic
|
31f0a0c53f31e28caebdee3effd62ac16f4324b3
|
[
"MIT"
] | 1
|
2021-11-19T21:12:40.000Z
|
2021-11-19T21:12:40.000Z
|
imagetoschematic/getbrightnessval.py
|
Justgamer101/picture-to-schematic
|
31f0a0c53f31e28caebdee3effd62ac16f4324b3
|
[
"MIT"
] | null | null | null |
imagetoschematic/getbrightnessval.py
|
Justgamer101/picture-to-schematic
|
31f0a0c53f31e28caebdee3effd62ac16f4324b3
|
[
"MIT"
] | null | null | null |
def get_bright(r, g, b):
return sum([r, g, b]) / 3
| 12.2
| 30
| 0.47541
| 12
| 61
| 2.333333
| 0.75
| 0.142857
| 0.214286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.02439
| 0.327869
| 61
| 4
| 31
| 15.25
| 0.658537
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 5
|
18c47a0d84fa5ed12fc71aeccd129868ce3980b3
| 101
|
py
|
Python
|
cleansers.py
|
winsbe01/ledger-buddy
|
e850990fdb93b3ccae443245859ee4b909b3387b
|
[
"BSD-3-Clause"
] | null | null | null |
cleansers.py
|
winsbe01/ledger-buddy
|
e850990fdb93b3ccae443245859ee4b909b3387b
|
[
"BSD-3-Clause"
] | null | null | null |
cleansers.py
|
winsbe01/ledger-buddy
|
e850990fdb93b3ccae443245859ee4b909b3387b
|
[
"BSD-3-Clause"
] | null | null | null |
def fix_case(in_str):
return " ".join([s[0].upper() + s[1:].lower() for s in in_str.split(" ")])
| 33.666667
| 78
| 0.574257
| 19
| 101
| 2.894737
| 0.736842
| 0.181818
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.023529
| 0.158416
| 101
| 2
| 79
| 50.5
| 0.623529
| 0
| 0
| 0
| 0
| 0
| 0.019802
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
18d158d8dc4b4c9785c974a5d9e89b0b3514dfc9
| 23
|
py
|
Python
|
.history/py/main_20210614114409.py
|
minefarmer/Coding101-OOP
|
d5655977559e3bd1acf6a4f185a6121cc3b05ce4
|
[
"Unlicense"
] | null | null | null |
.history/py/main_20210614114409.py
|
minefarmer/Coding101-OOP
|
d5655977559e3bd1acf6a4f185a6121cc3b05ce4
|
[
"Unlicense"
] | null | null | null |
.history/py/main_20210614114409.py
|
minefarmer/Coding101-OOP
|
d5655977559e3bd1acf6a4f185a6121cc3b05ce4
|
[
"Unlicense"
] | null | null | null |
class Light:
pass
| 5.75
| 12
| 0.608696
| 3
| 23
| 4.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.347826
| 23
| 3
| 13
| 7.666667
| 0.933333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
18f255996476291591b055ffc59967844ead74dd
| 233
|
py
|
Python
|
src/wodoo/version.py
|
sbidoul/wodoo
|
ea9801d287446bd0e4613d407f92bbad3b02e5bc
|
[
"MIT"
] | 1
|
2021-12-22T12:55:19.000Z
|
2021-12-22T12:55:19.000Z
|
src/wodoo/version.py
|
sbidoul/wodoo
|
ea9801d287446bd0e4613d407f92bbad3b02e5bc
|
[
"MIT"
] | null | null | null |
src/wodoo/version.py
|
sbidoul/wodoo
|
ea9801d287446bd0e4613d407f92bbad3b02e5bc
|
[
"MIT"
] | 1
|
2019-12-19T13:35:03.000Z
|
2019-12-19T13:35:03.000Z
|
import sys
if sys.version_info < (3, 8): # pragma: no cover (<PY38)
import importlib_metadata
else: # pragma: no cover (PY38+)
import importlib.metadata as importlib_metadata
version = importlib_metadata.version("wodoo")
| 25.888889
| 57
| 0.729614
| 31
| 233
| 5.354839
| 0.516129
| 0.409639
| 0.156627
| 0.204819
| 0.481928
| 0.481928
| 0.481928
| 0
| 0
| 0
| 0
| 0.030928
| 0.167382
| 233
| 8
| 58
| 29.125
| 0.824742
| 0.2103
| 0
| 0
| 0
| 0
| 0.027624
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
7a0e7876539759e1e550849d8b02ec2c8d5228e9
| 1,383
|
py
|
Python
|
tark/operators/default_operators.py
|
TreeboHotels/Tark
|
591b253c980bbd926900ef65137b947dead5d89f
|
[
"BSD-2-Clause-FreeBSD"
] | null | null | null |
tark/operators/default_operators.py
|
TreeboHotels/Tark
|
591b253c980bbd926900ef65137b947dead5d89f
|
[
"BSD-2-Clause-FreeBSD"
] | null | null | null |
tark/operators/default_operators.py
|
TreeboHotels/Tark
|
591b253c980bbd926900ef65137b947dead5d89f
|
[
"BSD-2-Clause-FreeBSD"
] | 1
|
2019-04-11T07:55:01.000Z
|
2019-04-11T07:55:01.000Z
|
from tark.operators.base_operator import BaseOperator
class Equal(BaseOperator):
NAME = "equal"
def operate(self, operand_one, operand_two):
return operand_one.get_value() == operand_two.get_value()
class NotEqual(BaseOperator):
NAME = "not_equal"
def operate(self, operand_one, operand_two):
return operand_one.get_value() != operand_two.get_value()
class GreaterThan(BaseOperator):
NAME = "greater_than"
def operate(self, operand_one, operand_two):
return operand_one.get_value > operand_two.get_value()
class LessThan(BaseOperator):
NAME = "less_than"
def operate(self, operand_one, operand_two):
return operand_one.get_value() < operand_two.get_value()
class GreaterThanEqual(BaseOperator):
NAME = "greater_than_equal"
def operate(self, operand_one, operand_two):
return operand_one.get_value() >= operand_two.get_value()
class LessThanEqual(BaseOperator):
NAME = "less_than_equal"
def operate(self, operand_one, operand_two):
return operand_one.get_value() <= operand_two.get_value()
class And(BaseOperator):
NAME = "and"
def operate(self, operand_one, operand_two):
return operand_one and operand_two
class Or(BaseOperator):
NAME = "or"
def operate(self, operand_one, operand_two):
return operand_one or operand_two
| 20.641791
| 65
| 0.708604
| 176
| 1,383
| 5.272727
| 0.170455
| 0.172414
| 0.12069
| 0.181034
| 0.670259
| 0.670259
| 0.670259
| 0.670259
| 0.670259
| 0.670259
| 0
| 0
| 0.194505
| 1,383
| 66
| 66
| 20.954545
| 0.833034
| 0
| 0
| 0.242424
| 0
| 0
| 0.052822
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.242424
| false
| 0
| 0.030303
| 0.242424
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
e131aa9b0a9084142d87f9ac6e55c366473f26ae
| 3,827
|
py
|
Python
|
hring/src/Script/DeC/sim_BLESS_synth.py
|
anderson1008/Noculator
|
411964ce333c3bd587840554efef6e61c0b9b4d5
|
[
"MIT"
] | null | null | null |
hring/src/Script/DeC/sim_BLESS_synth.py
|
anderson1008/Noculator
|
411964ce333c3bd587840554efef6e61c0b9b4d5
|
[
"MIT"
] | null | null | null |
hring/src/Script/DeC/sim_BLESS_synth.py
|
anderson1008/Noculator
|
411964ce333c3bd587840554efef6e61c0b9b4d5
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
import sys
import os
# # # # # # # # # # # # # # # # # # # # # # # # # # 16-node BLESS # # # # # # # # # # # # # # # # # # # # # # # # # #
workload_dir = "../bin/workload_list/"
SIM_NUM = 500
insns = "100000"
#################################### uniform_random #########################################
traffic = 'uniform_random' # [bit_complement, transpose, uniform_random]
out_dir = "../results/Synthetic_16B/" + traffic + "/BLESS/4x4/"
synth_reads_fraction = 0.8
synth_rate_base = 0.0005
bSynthBitComplement = "false"
bSynthTranspose = "false"
bSynthHotspot = "false"
randomHotspot = "false"
workload = "workloads_null"
router_algorithm = "DR_FLIT_SWITCHED_OLDEST_FIRST"
router_addrPacketSize = "1"
router_dataPacketSize = "1"
router_maxPacketSize = "1"
network_nrX = "4"
network_nrY = "4"
topology = "Mesh"
randomize_defl = "true"
# Injection rate sweep: 0.0005 - 0.1 at 0.0005 internal
for sim_index in range(1, SIM_NUM+1, 1):
print ("New Simulation!")
synth_rate = str(synth_rate_base * sim_index)
out_file = "sim_" + str(sim_index) + ".out"
command_line = "mono ../bin/sim.exe -config ../bin/config.txt -output " + out_dir + out_file + " -workload " + workload_dir + workload + ' 1' + " -router.algorithm " + router_algorithm + " -router.addrPacketSize " + router_addrPacketSize + " -router.dataPacketSize " + router_dataPacketSize + " -router.maxPacketSize " + router_maxPacketSize + " -network_nrX " + network_nrX + " -network_nrY " + network_nrY + " -topology " + topology + " -randomize_defl " + randomize_defl + " -synth_rate " + synth_rate + ' -insns ' + insns + ' -bSynthBitComplement ' + bSynthBitComplement + ' -bSynthTranspose ' + bSynthTranspose + ' -bSynthHotspot ' + bSynthHotspot + ' -randomHotspot ' + randomHotspot
os.system (command_line)
out_dir = "../results/Synthetic_16B/" + traffic + "/BLESS/8x8/"
network_nrX = "8"
network_nrY = "8"
for sim_index in range(1, SIM_NUM+1, 1):
print ("New Simulation!")
synth_rate = str(synth_rate_base * sim_index)
out_file = "sim_" + str(sim_index) + ".out"
command_line = "mono ../bin/sim.exe -config ../bin/config.txt -output " + out_dir + out_file + " -workload " + workload_dir + workload + ' 1' + " -router.algorithm " + router_algorithm + " -router.addrPacketSize " + router_addrPacketSize + " -router.dataPacketSize " + router_dataPacketSize + " -router.maxPacketSize " + router_maxPacketSize + " -network_nrX " + network_nrX + " -network_nrY " + network_nrY + " -topology " + topology + " -randomize_defl " + randomize_defl + " -synth_rate " + synth_rate + ' -insns ' + insns + ' -bSynthBitComplement ' + bSynthBitComplement + ' -bSynthTranspose ' + bSynthTranspose + ' -bSynthHotspot ' + bSynthHotspot + ' -randomHotspot ' + randomHotspot
os.system (command_line)
out_dir = "../results/Synthetic_16B/" + traffic + "/BLESS/16x16/"
network_nrX = "16"
network_nrY = "16"
for sim_index in range(1, SIM_NUM+1, 1):
print ("New Simulation!")
synth_rate = str(synth_rate_base * sim_index)
out_file = "sim_" + str(sim_index) + ".out"
command_line = "mono ../bin/sim.exe -config ../bin/config.txt -output " + out_dir + out_file + " -workload " + workload_dir + workload + ' 1' + " -router.algorithm " + router_algorithm + " -router.addrPacketSize " + router_addrPacketSize + " -router.dataPacketSize " + router_dataPacketSize + " -router.maxPacketSize " + router_maxPacketSize + " -network_nrX " + network_nrX + " -network_nrY " + network_nrY + " -topology " + topology + " -randomize_defl " + randomize_defl + " -synth_rate " + synth_rate + ' -insns ' + insns + ' -bSynthBitComplement ' + bSynthBitComplement + ' -bSynthTranspose ' + bSynthTranspose + ' -bSynthHotspot ' + bSynthHotspot + ' -randomHotspot ' + randomHotspot
os.system (command_line)
| 60.746032
| 691
| 0.660831
| 413
| 3,827
| 5.854722
| 0.208232
| 0.048387
| 0.027295
| 0.027295
| 0.775434
| 0.775434
| 0.775434
| 0.760132
| 0.760132
| 0.760132
| 0
| 0.021109
| 0.17063
| 3,827
| 62
| 692
| 61.725806
| 0.740706
| 0.053044
| 0
| 0.391304
| 0
| 0
| 0.355893
| 0.093349
| 0.065217
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.043478
| 0
| 0.043478
| 0.065217
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
e15ccad94fd6e68c7dee2fe5bf4d8d1f6b6594d2
| 172
|
py
|
Python
|
ETL/Riders/cloudstitch_to_riders.py
|
RagtagOpen/backend
|
b5172b61f7b189632f3fa6c47e8d63bc8148da3d
|
[
"MIT"
] | null | null | null |
ETL/Riders/cloudstitch_to_riders.py
|
RagtagOpen/backend
|
b5172b61f7b189632f3fa6c47e8d63bc8148da3d
|
[
"MIT"
] | null | null | null |
ETL/Riders/cloudstitch_to_riders.py
|
RagtagOpen/backend
|
b5172b61f7b189632f3fa6c47e8d63bc8148da3d
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python3.5
print('Running ETL For Riders')
# Brings data using the Cloudstitch API
# into staging tables for further processing by other parts
# of the process
| 24.571429
| 59
| 0.761628
| 27
| 172
| 4.851852
| 0.925926
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.013889
| 0.162791
| 172
| 6
| 60
| 28.666667
| 0.895833
| 0.761628
| 0
| 0
| 0
| 0
| 0.611111
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
e17dbc862dbf5933b9431b72a9ff7748010ddb8e
| 184
|
py
|
Python
|
analyzer/models/__init__.py
|
AmirSalari/DoHLyzer
|
07e0a1e64310dad779934a97ebe7db2a03eff3d9
|
[
"Unlicense"
] | 23
|
2021-04-14T23:46:19.000Z
|
2022-03-31T04:46:47.000Z
|
analyzer/models/__init__.py
|
AmirSalari/DoHLyzer
|
07e0a1e64310dad779934a97ebe7db2a03eff3d9
|
[
"Unlicense"
] | 5
|
2021-04-19T20:12:20.000Z
|
2022-02-10T02:12:12.000Z
|
analyzer/models/__init__.py
|
AmirSalari/DoHLyzer
|
07e0a1e64310dad779934a97ebe7db2a03eff3d9
|
[
"Unlicense"
] | 8
|
2021-04-15T06:50:35.000Z
|
2022-01-15T00:44:45.000Z
|
import importlib
def create_model(version, segment_size):
module = importlib.import_module('.v{}'.format(version), package='models')
return module.create_model(segment_size)
| 26.285714
| 78
| 0.76087
| 23
| 184
| 5.869565
| 0.608696
| 0.162963
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.11413
| 184
| 6
| 79
| 30.666667
| 0.828221
| 0
| 0
| 0
| 0
| 0
| 0.054348
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.5
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
e1a9645491519dbe00909941d4379f7eb0250ee1
| 102
|
py
|
Python
|
Beta/The Chakitras language.py
|
mwk0408/codewars_solutions
|
9b4f502b5f159e68024d494e19a96a226acad5e5
|
[
"MIT"
] | 6
|
2020-09-03T09:32:25.000Z
|
2020-12-07T04:10:01.000Z
|
Beta/The Chakitras language.py
|
mwk0408/codewars_solutions
|
9b4f502b5f159e68024d494e19a96a226acad5e5
|
[
"MIT"
] | 1
|
2021-12-13T15:30:21.000Z
|
2021-12-13T15:30:21.000Z
|
Beta/The Chakitras language.py
|
mwk0408/codewars_solutions
|
9b4f502b5f159e68024d494e19a96a226acad5e5
|
[
"MIT"
] | null | null | null |
def chakitra_language(sentence):
return all(ord(max(j for j in i))%2==0 for i in sentence.split())
| 51
| 69
| 0.705882
| 20
| 102
| 3.55
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.022989
| 0.147059
| 102
| 2
| 69
| 51
| 0.793103
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
e1cd39c263fa24a72bf8508a0b33f30b6de99a61
| 253
|
py
|
Python
|
scarlet/__init__.py
|
vineetbansal/scarlet
|
02b25ec117ed26b937945dc640484de936756061
|
[
"MIT"
] | null | null | null |
scarlet/__init__.py
|
vineetbansal/scarlet
|
02b25ec117ed26b937945dc640484de936756061
|
[
"MIT"
] | null | null | null |
scarlet/__init__.py
|
vineetbansal/scarlet
|
02b25ec117ed26b937945dc640484de936756061
|
[
"MIT"
] | null | null | null |
# convenience: get vanilla NMF and deblend wrapper directly within scarlet
from . import operator
from .bbox import *
from .update import *
from .component import *
from .source import *
from .observation import *
from .blend import *
from . import psf
| 25.3
| 74
| 0.766798
| 34
| 253
| 5.705882
| 0.588235
| 0.309278
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.16996
| 253
| 9
| 75
| 28.111111
| 0.92381
| 0.284585
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
e1d108b5f72e280186112423dd1fcbe62bbc3a46
| 135
|
py
|
Python
|
Hotel_webapp/src/contact/admin.py
|
MDRCS/Fullstack-Django
|
20cbae6e1b70d7051662b579e7967061e529d71f
|
[
"MIT"
] | null | null | null |
Hotel_webapp/src/contact/admin.py
|
MDRCS/Fullstack-Django
|
20cbae6e1b70d7051662b579e7967061e529d71f
|
[
"MIT"
] | 19
|
2020-07-14T07:04:43.000Z
|
2022-03-12T00:41:14.000Z
|
Hotel_webapp/src/contact/admin.py
|
MDRCS/Fullstack-Django
|
20cbae6e1b70d7051662b579e7967061e529d71f
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
# Register your models here.
from .models import ContactDetails
admin.site.register(ContactDetails)
| 19.285714
| 35
| 0.822222
| 17
| 135
| 6.529412
| 0.647059
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.118519
| 135
| 7
| 35
| 19.285714
| 0.932773
| 0.192593
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
befad206d9b2da8f305738813e8b9909e6a2b421
| 3,086
|
py
|
Python
|
day_5_boarding_pass/boarding_pass_test.py
|
Akhilian/Advent-of-code-2020
|
a543884b0fe39f7ba96736398690664254e63142
|
[
"MIT"
] | null | null | null |
day_5_boarding_pass/boarding_pass_test.py
|
Akhilian/Advent-of-code-2020
|
a543884b0fe39f7ba96736398690664254e63142
|
[
"MIT"
] | null | null | null |
day_5_boarding_pass/boarding_pass_test.py
|
Akhilian/Advent-of-code-2020
|
a543884b0fe39f7ba96736398690664254e63142
|
[
"MIT"
] | null | null | null |
from day_5_boarding_pass.boarding_pass import BoardingPass, \
find_missing_boarding_pass
class TestColumn:
def test_should_be_column_5_with_full_sequence(self):
# given
boarding_pass = BoardingPass('FBFBBFFRLR')
# then
assert boarding_pass.get_column() == 5
def test_should_be_column_7_with_full_sequence(self):
# given
boarding_pass = BoardingPass('BFFFBBFRRR')
# then
assert boarding_pass.get_column() == 7
def test_should_be_column_0_with_full_sequence(self):
# given
boarding_pass = BoardingPass('BFFFBBFLLL')
# then
assert boarding_pass.get_column() == 0
class TestGetRows:
def test_when_starting_with_F_should_be_in_the_front(self):
# given
boarding_pass = BoardingPass('F')
# then
assert boarding_pass.get_row() <= 63
def test_when_starting_with_B_should_be_in_the_back(self):
# given
boarding_pass = BoardingPass('B')
# then
assert boarding_pass.get_row() > 63
def test_when_in_front_B_means_upper_half(self):
# given
boarding_pass = BoardingPass('FB')
# then
row = boarding_pass.get_row()
assert row <= 63
assert row >= 32
def test_when_in_lowerfront_F_means_lower_half(self):
# given
boarding_pass = BoardingPass('FBF')
# then
row = boarding_pass.get_row()
assert row <= 47
assert row >= 32
def test_when_in_another_B_means_lower_half(self):
# given
boarding_pass = BoardingPass('FBFB')
# then
row = boarding_pass.get_row()
assert row <= 47
assert row >= 40
class TestBoardingPass:
def test_should_be_raw_44_with_full_sequence(self):
# given
boarding_pass = BoardingPass('FBFBBFFRLR')
# then
assert boarding_pass.get_row() == 44
assert boarding_pass.get_column() == 5
assert boarding_pass.get_seat_id() == 357
def test_should_be_raw_70_with_full_sequence(self):
# given
boarding_pass = BoardingPass('BFFFBBFRRR')
# then
assert boarding_pass.get_row() == 70
assert boarding_pass.get_column() == 7
assert boarding_pass.get_seat_id() == 567
def test_should_be_raw_14_with_full_sequence(self):
# given
boarding_pass = BoardingPass('FFFBBBFRRR')
# then
assert boarding_pass.get_row() == 14
assert boarding_pass.get_column() == 7
assert boarding_pass.get_seat_id() == 119
def test_should_be_raw_102_with_full_sequence(self):
# given
boarding_pass = BoardingPass('BBFFBBFRLL')
# then
assert boarding_pass.get_row() == 102
assert boarding_pass.get_column() == 4
assert boarding_pass.get_seat_id() == 820
class TestIsBoardingPassMissing:
def test_is_2_when_surrounding_seats_id_are_there(self):
# given
seats = [1, 3]
# than
assert find_missing_boarding_pass(seats) == 2
| 26.603448
| 63
| 0.643876
| 379
| 3,086
| 4.815303
| 0.203166
| 0.236712
| 0.164384
| 0.195616
| 0.77863
| 0.634521
| 0.510685
| 0.489315
| 0.33863
| 0.33863
| 0
| 0.027293
| 0.275762
| 3,086
| 115
| 64
| 26.834783
| 0.789262
| 0.046014
| 0
| 0.271186
| 0
| 0
| 0.027768
| 0
| 0
| 0
| 0
| 0
| 0.40678
| 1
| 0.220339
| false
| 0.627119
| 0.016949
| 0
| 0.305085
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
832a8bb2cf3d5e9ac54e5e6ec9b5698a7c0f99af
| 34
|
py
|
Python
|
pythonlibs/mantis/tests/test_project/src/access/celery/__init__.py
|
adoggie/Tibet.6
|
3c53060edafd80b9c4dafa10699a68d86a410c66
|
[
"MIT"
] | 22
|
2019-10-28T07:28:12.000Z
|
2022-03-19T15:36:41.000Z
|
pythonlibs/mantis/tests/test_project/src/access/celery/__init__.py
|
adoggie/Tibet.6
|
3c53060edafd80b9c4dafa10699a68d86a410c66
|
[
"MIT"
] | 1
|
2019-11-07T04:54:14.000Z
|
2019-11-07T07:12:48.000Z
|
pythonlibs/mantis/tests/test_project/src/access/celery/__init__.py
|
adoggie/Tibet.6
|
3c53060edafd80b9c4dafa10699a68d86a410c66
|
[
"MIT"
] | 13
|
2019-10-28T07:29:07.000Z
|
2021-11-03T06:53:12.000Z
|
#coding:utf-8
#这里放 celery 服务的入口代码
| 11.333333
| 19
| 0.764706
| 6
| 34
| 4.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.033333
| 0.117647
| 34
| 3
| 19
| 11.333333
| 0.833333
| 0.882353
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
8347e30eeca7f10616da0be8353c8215e80fad3a
| 61
|
py
|
Python
|
pyffdl/utilities/__init__.py
|
Birion/ffdl
|
27f0309638e614a7c07d9e7e66fe495369f13e88
|
[
"MIT"
] | 4
|
2018-04-20T04:46:39.000Z
|
2022-02-27T01:24:08.000Z
|
pyffdl/utilities/__init__.py
|
Birion/ffdl
|
27f0309638e614a7c07d9e7e66fe495369f13e88
|
[
"MIT"
] | 4
|
2018-04-20T18:30:49.000Z
|
2021-05-17T05:38:06.000Z
|
pyffdl/utilities/__init__.py
|
Birion/ffdl
|
27f0309638e614a7c07d9e7e66fe495369f13e88
|
[
"MIT"
] | 1
|
2018-04-21T17:58:51.000Z
|
2018-04-21T17:58:51.000Z
|
from .misc import get_url_from_file, list2text # noqa: F401
| 30.5
| 60
| 0.786885
| 10
| 61
| 4.5
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.076923
| 0.147541
| 61
| 1
| 61
| 61
| 0.788462
| 0.163934
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
83602f439dfb2e5dd7bfcfa6765cfd961446a8f4
| 38
|
py
|
Python
|
badgyal/proto/__init__.py
|
kennyfrc/a0lite
|
a3b69ce6bc059be93c9b62fd7577360c07b98523
|
[
"MIT"
] | null | null | null |
badgyal/proto/__init__.py
|
kennyfrc/a0lite
|
a3b69ce6bc059be93c9b62fd7577360c07b98523
|
[
"MIT"
] | null | null | null |
badgyal/proto/__init__.py
|
kennyfrc/a0lite
|
a3b69ce6bc059be93c9b62fd7577360c07b98523
|
[
"MIT"
] | null | null | null |
#from proto import net_pb2, chunk_pb2
| 19
| 37
| 0.815789
| 7
| 38
| 4.142857
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.060606
| 0.131579
| 38
| 1
| 38
| 38
| 0.818182
| 0.947368
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
55e6b5df8a2ee72e1f72dbcaedb406a8c46bf617
| 93
|
py
|
Python
|
main/utils/_type.py
|
avizum/Groot
|
cc8dc05234808444de0686c91c746665f348ee94
|
[
"MIT"
] | null | null | null |
main/utils/_type.py
|
avizum/Groot
|
cc8dc05234808444de0686c91c746665f348ee94
|
[
"MIT"
] | null | null | null |
main/utils/_type.py
|
avizum/Groot
|
cc8dc05234808444de0686c91c746665f348ee94
|
[
"MIT"
] | null | null | null |
"""Typehints that are frequently used"""
import typing
from .subclasses import customContext
| 23.25
| 40
| 0.806452
| 11
| 93
| 6.818182
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.11828
| 93
| 4
| 41
| 23.25
| 0.914634
| 0.365591
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
363869922ceadb7155f9752491af34db30051a0e
| 1,718
|
py
|
Python
|
hknweb/studentservices/tests/views/test_tours.py
|
jyxzhang/hknweb
|
a01ffd8587859bf63c46213be6a0c8b87164a5c2
|
[
"MIT"
] | null | null | null |
hknweb/studentservices/tests/views/test_tours.py
|
jyxzhang/hknweb
|
a01ffd8587859bf63c46213be6a0c8b87164a5c2
|
[
"MIT"
] | null | null | null |
hknweb/studentservices/tests/views/test_tours.py
|
jyxzhang/hknweb
|
a01ffd8587859bf63c46213be6a0c8b87164a5c2
|
[
"MIT"
] | null | null | null |
from django.test import TestCase
from django.urls import reverse
from django.utils import timezone
from hknweb.utils import DATETIME_12_HOUR_FORMAT
class ToursViewTests(TestCase):
def test_tours_get(self):
response = self.client.get(reverse("studentservices:tours"))
self.assertEqual(response.status_code, 200)
def test_tours_form_valid_redirects(self):
data = {
"name": "test_name",
"datetime": timezone.now().strftime(DATETIME_12_HOUR_FORMAT),
"email": "test_email@email.com",
"phone": "9876543210",
"comments": "test_comments",
}
response = self.client.post(reverse("studentservices:tours"), data=data)
self.assertEqual(response.status_code, 302)
def test_tours_form_invalid_returns_form(self):
data = {
"name": "test_name",
"datetime": timezone.now().strftime(DATETIME_12_HOUR_FORMAT),
"email": "test_email",
"phone": "9876543210",
"comments": "test_comments",
}
response = self.client.post(reverse("studentservices:tours"), data=data)
self.assertEqual(response.status_code, 200)
def test_tours_form_datetime_invalid_returns_form(self):
data = {
"name": "test_name",
"datetime": (timezone.now() - timezone.timedelta(days=20)).strftime(
DATETIME_12_HOUR_FORMAT
),
"email": "test_email@email.com",
"phone": "9876543210",
"comments": "test_comments",
}
response = self.client.post(reverse("studentservices:tours"), data=data)
self.assertEqual(response.status_code, 200)
| 33.038462
| 80
| 0.622235
| 181
| 1,718
| 5.674033
| 0.254144
| 0.038948
| 0.054528
| 0.077897
| 0.72444
| 0.72444
| 0.72444
| 0.72444
| 0.72444
| 0.72444
| 0
| 0.040752
| 0.257276
| 1,718
| 51
| 81
| 33.686275
| 0.764107
| 0
| 0
| 0.55
| 0
| 0
| 0.186263
| 0.048894
| 0
| 0
| 0
| 0
| 0.1
| 1
| 0.1
| false
| 0
| 0.1
| 0
| 0.225
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
3654396c9978ff332e93294b28333565bfcede93
| 92,289
|
py
|
Python
|
nova/virt/libvirt/imagebackend.py
|
bopopescu/nova-token
|
ec98f69dea7b3e2b9013b27fd55a2c1a1ac6bfb2
|
[
"Apache-2.0"
] | null | null | null |
nova/virt/libvirt/imagebackend.py
|
bopopescu/nova-token
|
ec98f69dea7b3e2b9013b27fd55a2c1a1ac6bfb2
|
[
"Apache-2.0"
] | null | null | null |
nova/virt/libvirt/imagebackend.py
|
bopopescu/nova-token
|
ec98f69dea7b3e2b9013b27fd55a2c1a1ac6bfb2
|
[
"Apache-2.0"
] | 2
|
2017-07-20T17:31:34.000Z
|
2020-07-24T02:42:19.000Z
|
begin_unit
comment|'# Copyright 2012 Grid Dynamics'
nl|'\n'
comment|'# All Rights Reserved.'
nl|'\n'
comment|'#'
nl|'\n'
comment|'# Licensed under the Apache License, Version 2.0 (the "License"); you may'
nl|'\n'
comment|'# not use this file except in compliance with the License. You may obtain'
nl|'\n'
comment|'# a copy of the License at'
nl|'\n'
comment|'#'
nl|'\n'
comment|'# http://www.apache.org/licenses/LICENSE-2.0'
nl|'\n'
comment|'#'
nl|'\n'
comment|'# Unless required by applicable law or agreed to in writing, software'
nl|'\n'
comment|'# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT'
nl|'\n'
comment|'# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the'
nl|'\n'
comment|'# License for the specific language governing permissions and limitations'
nl|'\n'
comment|'# under the License.'
nl|'\n'
nl|'\n'
name|'import'
name|'abc'
newline|'\n'
name|'import'
name|'base64'
newline|'\n'
name|'import'
name|'contextlib'
newline|'\n'
name|'import'
name|'functools'
newline|'\n'
name|'import'
name|'os'
newline|'\n'
name|'import'
name|'shutil'
newline|'\n'
nl|'\n'
name|'from'
name|'oslo_log'
name|'import'
name|'log'
name|'as'
name|'logging'
newline|'\n'
name|'from'
name|'oslo_serialization'
name|'import'
name|'jsonutils'
newline|'\n'
name|'from'
name|'oslo_utils'
name|'import'
name|'excutils'
newline|'\n'
name|'from'
name|'oslo_utils'
name|'import'
name|'fileutils'
newline|'\n'
name|'from'
name|'oslo_utils'
name|'import'
name|'strutils'
newline|'\n'
name|'from'
name|'oslo_utils'
name|'import'
name|'units'
newline|'\n'
name|'import'
name|'six'
newline|'\n'
nl|'\n'
name|'import'
name|'nova'
op|'.'
name|'conf'
newline|'\n'
name|'from'
name|'nova'
name|'import'
name|'exception'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'i18n'
name|'import'
name|'_'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'i18n'
name|'import'
name|'_LE'
op|','
name|'_LI'
op|','
name|'_LW'
newline|'\n'
name|'from'
name|'nova'
name|'import'
name|'image'
newline|'\n'
name|'from'
name|'nova'
name|'import'
name|'keymgr'
newline|'\n'
name|'from'
name|'nova'
name|'import'
name|'utils'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'virt'
op|'.'
name|'disk'
name|'import'
name|'api'
name|'as'
name|'disk'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'virt'
op|'.'
name|'image'
name|'import'
name|'model'
name|'as'
name|'imgmodel'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'virt'
name|'import'
name|'images'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'virt'
op|'.'
name|'libvirt'
name|'import'
name|'config'
name|'as'
name|'vconfig'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'virt'
op|'.'
name|'libvirt'
op|'.'
name|'storage'
name|'import'
name|'dmcrypt'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'virt'
op|'.'
name|'libvirt'
op|'.'
name|'storage'
name|'import'
name|'lvm'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'virt'
op|'.'
name|'libvirt'
op|'.'
name|'storage'
name|'import'
name|'rbd_utils'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'virt'
op|'.'
name|'libvirt'
name|'import'
name|'utils'
name|'as'
name|'libvirt_utils'
newline|'\n'
nl|'\n'
DECL|variable|CONF
name|'CONF'
op|'='
name|'nova'
op|'.'
name|'conf'
op|'.'
name|'CONF'
newline|'\n'
nl|'\n'
DECL|variable|LOG
name|'LOG'
op|'='
name|'logging'
op|'.'
name|'getLogger'
op|'('
name|'__name__'
op|')'
newline|'\n'
DECL|variable|IMAGE_API
name|'IMAGE_API'
op|'='
name|'image'
op|'.'
name|'API'
op|'('
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
op|'@'
name|'six'
op|'.'
name|'add_metaclass'
op|'('
name|'abc'
op|'.'
name|'ABCMeta'
op|')'
newline|'\n'
DECL|class|Image
name|'class'
name|'Image'
op|'('
name|'object'
op|')'
op|':'
newline|'\n'
nl|'\n'
DECL|variable|SUPPORTS_CLONE
indent|' '
name|'SUPPORTS_CLONE'
op|'='
name|'False'
newline|'\n'
nl|'\n'
DECL|member|__init__
name|'def'
name|'__init__'
op|'('
name|'self'
op|','
name|'source_type'
op|','
name|'driver_format'
op|','
name|'is_block_dev'
op|'='
name|'False'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Image initialization.\n\n :source_type: block or file\n :driver_format: raw or qcow2\n :is_block_dev:\n """'
newline|'\n'
name|'if'
op|'('
name|'CONF'
op|'.'
name|'ephemeral_storage_encryption'
op|'.'
name|'enabled'
name|'and'
nl|'\n'
name|'not'
name|'self'
op|'.'
name|'_supports_encryption'
op|'('
op|')'
op|')'
op|':'
newline|'\n'
indent|' '
name|'raise'
name|'exception'
op|'.'
name|'NovaException'
op|'('
name|'_'
op|'('
string|"'Incompatible settings: '"
nl|'\n'
string|"'ephemeral storage encryption is supported '"
nl|'\n'
string|"'only for LVM images.'"
op|')'
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'self'
op|'.'
name|'source_type'
op|'='
name|'source_type'
newline|'\n'
name|'self'
op|'.'
name|'driver_format'
op|'='
name|'driver_format'
newline|'\n'
name|'self'
op|'.'
name|'driver_io'
op|'='
name|'None'
newline|'\n'
name|'self'
op|'.'
name|'discard_mode'
op|'='
name|'CONF'
op|'.'
name|'libvirt'
op|'.'
name|'hw_disk_discard'
newline|'\n'
name|'self'
op|'.'
name|'is_block_dev'
op|'='
name|'is_block_dev'
newline|'\n'
name|'self'
op|'.'
name|'preallocate'
op|'='
name|'False'
newline|'\n'
nl|'\n'
comment|'# NOTE(dripton): We store lines of json (path, disk_format) in this'
nl|'\n'
comment|'# file, for some image types, to prevent attacks based on changing the'
nl|'\n'
comment|'# disk_format.'
nl|'\n'
name|'self'
op|'.'
name|'disk_info_path'
op|'='
name|'None'
newline|'\n'
nl|'\n'
comment|'# NOTE(mikal): We need a lock directory which is shared along with'
nl|'\n'
comment|'# instance files, to cover the scenario where multiple compute nodes'
nl|'\n'
comment|'# are trying to create a base file at the same time'
nl|'\n'
name|'self'
op|'.'
name|'lock_path'
op|'='
name|'os'
op|'.'
name|'path'
op|'.'
name|'join'
op|'('
name|'CONF'
op|'.'
name|'instances_path'
op|','
string|"'locks'"
op|')'
newline|'\n'
nl|'\n'
DECL|member|_supports_encryption
dedent|''
name|'def'
name|'_supports_encryption'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Used to test that the backend supports encryption.\n Override in the subclass if backend supports encryption.\n """'
newline|'\n'
name|'return'
name|'False'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'abc'
op|'.'
name|'abstractmethod'
newline|'\n'
DECL|member|create_image
name|'def'
name|'create_image'
op|'('
name|'self'
op|','
name|'prepare_template'
op|','
name|'base'
op|','
name|'size'
op|','
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Create image from template.\n\n Contains specific behavior for each image type.\n\n :prepare_template: function, that creates template.\n Should accept `target` argument.\n :base: Template name\n :size: Size of created image in bytes\n\n """'
newline|'\n'
name|'pass'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'abc'
op|'.'
name|'abstractmethod'
newline|'\n'
DECL|member|resize_image
name|'def'
name|'resize_image'
op|'('
name|'self'
op|','
name|'size'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Resize image to size (in bytes).\n\n :size: Desired size of image in bytes\n\n """'
newline|'\n'
name|'pass'
newline|'\n'
nl|'\n'
DECL|member|libvirt_info
dedent|''
name|'def'
name|'libvirt_info'
op|'('
name|'self'
op|','
name|'disk_bus'
op|','
name|'disk_dev'
op|','
name|'device_type'
op|','
name|'cache_mode'
op|','
nl|'\n'
name|'extra_specs'
op|','
name|'hypervisor_version'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Get `LibvirtConfigGuestDisk` filled for this image.\n\n :disk_dev: Disk bus device name\n :disk_bus: Disk bus type\n :device_type: Device type for this image.\n :cache_mode: Caching mode for this image\n :extra_specs: Instance type extra specs dict.\n :hypervisor_version: the hypervisor version\n """'
newline|'\n'
name|'info'
op|'='
name|'vconfig'
op|'.'
name|'LibvirtConfigGuestDisk'
op|'('
op|')'
newline|'\n'
name|'info'
op|'.'
name|'source_type'
op|'='
name|'self'
op|'.'
name|'source_type'
newline|'\n'
name|'info'
op|'.'
name|'source_device'
op|'='
name|'device_type'
newline|'\n'
name|'info'
op|'.'
name|'target_bus'
op|'='
name|'disk_bus'
newline|'\n'
name|'info'
op|'.'
name|'target_dev'
op|'='
name|'disk_dev'
newline|'\n'
name|'info'
op|'.'
name|'driver_cache'
op|'='
name|'cache_mode'
newline|'\n'
name|'info'
op|'.'
name|'driver_discard'
op|'='
name|'self'
op|'.'
name|'discard_mode'
newline|'\n'
name|'info'
op|'.'
name|'driver_io'
op|'='
name|'self'
op|'.'
name|'driver_io'
newline|'\n'
name|'info'
op|'.'
name|'driver_format'
op|'='
name|'self'
op|'.'
name|'driver_format'
newline|'\n'
name|'driver_name'
op|'='
name|'libvirt_utils'
op|'.'
name|'pick_disk_driver_name'
op|'('
name|'hypervisor_version'
op|','
nl|'\n'
name|'self'
op|'.'
name|'is_block_dev'
op|')'
newline|'\n'
name|'info'
op|'.'
name|'driver_name'
op|'='
name|'driver_name'
newline|'\n'
name|'info'
op|'.'
name|'source_path'
op|'='
name|'self'
op|'.'
name|'path'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'disk_qos'
op|'('
name|'info'
op|','
name|'extra_specs'
op|')'
newline|'\n'
nl|'\n'
name|'return'
name|'info'
newline|'\n'
nl|'\n'
DECL|member|disk_qos
dedent|''
name|'def'
name|'disk_qos'
op|'('
name|'self'
op|','
name|'info'
op|','
name|'extra_specs'
op|')'
op|':'
newline|'\n'
indent|' '
name|'tune_items'
op|'='
op|'['
string|"'disk_read_bytes_sec'"
op|','
string|"'disk_read_iops_sec'"
op|','
nl|'\n'
string|"'disk_write_bytes_sec'"
op|','
string|"'disk_write_iops_sec'"
op|','
nl|'\n'
string|"'disk_total_bytes_sec'"
op|','
string|"'disk_total_iops_sec'"
op|']'
newline|'\n'
name|'for'
name|'key'
op|','
name|'value'
name|'in'
name|'six'
op|'.'
name|'iteritems'
op|'('
name|'extra_specs'
op|')'
op|':'
newline|'\n'
indent|' '
name|'scope'
op|'='
name|'key'
op|'.'
name|'split'
op|'('
string|"':'"
op|')'
newline|'\n'
name|'if'
name|'len'
op|'('
name|'scope'
op|')'
op|'>'
number|'1'
name|'and'
name|'scope'
op|'['
number|'0'
op|']'
op|'=='
string|"'quota'"
op|':'
newline|'\n'
indent|' '
name|'if'
name|'scope'
op|'['
number|'1'
op|']'
name|'in'
name|'tune_items'
op|':'
newline|'\n'
indent|' '
name|'setattr'
op|'('
name|'info'
op|','
name|'scope'
op|'['
number|'1'
op|']'
op|','
name|'value'
op|')'
newline|'\n'
nl|'\n'
DECL|member|libvirt_fs_info
dedent|''
dedent|''
dedent|''
dedent|''
name|'def'
name|'libvirt_fs_info'
op|'('
name|'self'
op|','
name|'target'
op|','
name|'driver_type'
op|'='
name|'None'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Get `LibvirtConfigGuestFilesys` filled for this image.\n\n :target: target directory inside a container.\n :driver_type: filesystem driver type, can be loop\n nbd or ploop.\n """'
newline|'\n'
name|'info'
op|'='
name|'vconfig'
op|'.'
name|'LibvirtConfigGuestFilesys'
op|'('
op|')'
newline|'\n'
name|'info'
op|'.'
name|'target_dir'
op|'='
name|'target'
newline|'\n'
nl|'\n'
name|'if'
name|'self'
op|'.'
name|'is_block_dev'
op|':'
newline|'\n'
indent|' '
name|'info'
op|'.'
name|'source_type'
op|'='
string|'"block"'
newline|'\n'
name|'info'
op|'.'
name|'source_dev'
op|'='
name|'self'
op|'.'
name|'path'
newline|'\n'
dedent|''
name|'else'
op|':'
newline|'\n'
indent|' '
name|'info'
op|'.'
name|'source_type'
op|'='
string|'"file"'
newline|'\n'
name|'info'
op|'.'
name|'source_file'
op|'='
name|'self'
op|'.'
name|'path'
newline|'\n'
name|'info'
op|'.'
name|'driver_format'
op|'='
name|'self'
op|'.'
name|'driver_format'
newline|'\n'
name|'if'
name|'driver_type'
op|':'
newline|'\n'
indent|' '
name|'info'
op|'.'
name|'driver_type'
op|'='
name|'driver_type'
newline|'\n'
dedent|''
name|'else'
op|':'
newline|'\n'
indent|' '
name|'if'
name|'self'
op|'.'
name|'driver_format'
op|'=='
string|'"raw"'
op|':'
newline|'\n'
indent|' '
name|'info'
op|'.'
name|'driver_type'
op|'='
string|'"loop"'
newline|'\n'
dedent|''
name|'else'
op|':'
newline|'\n'
indent|' '
name|'info'
op|'.'
name|'driver_type'
op|'='
string|'"nbd"'
newline|'\n'
nl|'\n'
dedent|''
dedent|''
dedent|''
name|'return'
name|'info'
newline|'\n'
nl|'\n'
DECL|member|check_image_exists
dedent|''
name|'def'
name|'check_image_exists'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'os'
op|'.'
name|'path'
op|'.'
name|'exists'
op|'('
name|'self'
op|'.'
name|'path'
op|')'
newline|'\n'
nl|'\n'
DECL|member|cache
dedent|''
name|'def'
name|'cache'
op|'('
name|'self'
op|','
name|'fetch_func'
op|','
name|'filename'
op|','
name|'size'
op|'='
name|'None'
op|','
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Creates image from template.\n\n Ensures that template and image not already exists.\n Ensures that base directory exists.\n Synchronizes on template fetching.\n\n :fetch_func: Function that creates the base image\n Should accept `target` argument.\n :filename: Name of the file in the image directory\n :size: Size of created image in bytes (optional)\n """'
newline|'\n'
op|'@'
name|'utils'
op|'.'
name|'synchronized'
op|'('
name|'filename'
op|','
name|'external'
op|'='
name|'True'
op|','
name|'lock_path'
op|'='
name|'self'
op|'.'
name|'lock_path'
op|')'
newline|'\n'
DECL|function|fetch_func_sync
name|'def'
name|'fetch_func_sync'
op|'('
name|'target'
op|','
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
comment|'# The image may have been fetched while a subsequent'
nl|'\n'
comment|'# call was waiting to obtain the lock.'
nl|'\n'
indent|' '
name|'if'
name|'not'
name|'os'
op|'.'
name|'path'
op|'.'
name|'exists'
op|'('
name|'target'
op|')'
op|':'
newline|'\n'
indent|' '
name|'fetch_func'
op|'('
name|'target'
op|'='
name|'target'
op|','
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
newline|'\n'
nl|'\n'
dedent|''
dedent|''
name|'base_dir'
op|'='
name|'os'
op|'.'
name|'path'
op|'.'
name|'join'
op|'('
name|'CONF'
op|'.'
name|'instances_path'
op|','
nl|'\n'
name|'CONF'
op|'.'
name|'image_cache_subdirectory_name'
op|')'
newline|'\n'
name|'if'
name|'not'
name|'os'
op|'.'
name|'path'
op|'.'
name|'exists'
op|'('
name|'base_dir'
op|')'
op|':'
newline|'\n'
indent|' '
name|'fileutils'
op|'.'
name|'ensure_tree'
op|'('
name|'base_dir'
op|')'
newline|'\n'
dedent|''
name|'base'
op|'='
name|'os'
op|'.'
name|'path'
op|'.'
name|'join'
op|'('
name|'base_dir'
op|','
name|'filename'
op|')'
newline|'\n'
nl|'\n'
name|'if'
name|'not'
name|'self'
op|'.'
name|'check_image_exists'
op|'('
op|')'
name|'or'
name|'not'
name|'os'
op|'.'
name|'path'
op|'.'
name|'exists'
op|'('
name|'base'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'create_image'
op|'('
name|'fetch_func_sync'
op|','
name|'base'
op|','
name|'size'
op|','
nl|'\n'
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'if'
name|'size'
op|':'
newline|'\n'
indent|' '
name|'if'
name|'size'
op|'>'
name|'self'
op|'.'
name|'get_disk_size'
op|'('
name|'base'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'resize_image'
op|'('
name|'size'
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'if'
op|'('
name|'self'
op|'.'
name|'preallocate'
name|'and'
name|'self'
op|'.'
name|'_can_fallocate'
op|'('
op|')'
name|'and'
nl|'\n'
name|'os'
op|'.'
name|'access'
op|'('
name|'self'
op|'.'
name|'path'
op|','
name|'os'
op|'.'
name|'W_OK'
op|')'
op|')'
op|':'
newline|'\n'
indent|' '
name|'utils'
op|'.'
name|'execute'
op|'('
string|"'fallocate'"
op|','
string|"'-n'"
op|','
string|"'-l'"
op|','
name|'size'
op|','
name|'self'
op|'.'
name|'path'
op|')'
newline|'\n'
nl|'\n'
DECL|member|_can_fallocate
dedent|''
dedent|''
dedent|''
name|'def'
name|'_can_fallocate'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Check once per class, whether fallocate(1) is available,\n and that the instances directory supports fallocate(2).\n """'
newline|'\n'
name|'can_fallocate'
op|'='
name|'getattr'
op|'('
name|'self'
op|'.'
name|'__class__'
op|','
string|"'can_fallocate'"
op|','
name|'None'
op|')'
newline|'\n'
name|'if'
name|'can_fallocate'
name|'is'
name|'None'
op|':'
newline|'\n'
indent|' '
name|'test_path'
op|'='
name|'self'
op|'.'
name|'path'
op|'+'
string|"'.fallocate_test'"
newline|'\n'
name|'_out'
op|','
name|'err'
op|'='
name|'utils'
op|'.'
name|'trycmd'
op|'('
string|"'fallocate'"
op|','
string|"'-l'"
op|','
string|"'1'"
op|','
name|'test_path'
op|')'
newline|'\n'
name|'fileutils'
op|'.'
name|'delete_if_exists'
op|'('
name|'test_path'
op|')'
newline|'\n'
name|'can_fallocate'
op|'='
name|'not'
name|'err'
newline|'\n'
name|'self'
op|'.'
name|'__class__'
op|'.'
name|'can_fallocate'
op|'='
name|'can_fallocate'
newline|'\n'
name|'if'
name|'not'
name|'can_fallocate'
op|':'
newline|'\n'
indent|' '
name|'LOG'
op|'.'
name|'warning'
op|'('
name|'_LW'
op|'('
string|"'Unable to preallocate image at path: '"
nl|'\n'
string|"'%(path)s'"
op|')'
op|','
op|'{'
string|"'path'"
op|':'
name|'self'
op|'.'
name|'path'
op|'}'
op|')'
newline|'\n'
dedent|''
dedent|''
name|'return'
name|'can_fallocate'
newline|'\n'
nl|'\n'
DECL|member|verify_base_size
dedent|''
name|'def'
name|'verify_base_size'
op|'('
name|'self'
op|','
name|'base'
op|','
name|'size'
op|','
name|'base_size'
op|'='
number|'0'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Check that the base image is not larger than size.\n Since images can\'t be generally shrunk, enforce this\n constraint taking account of virtual image size.\n """'
newline|'\n'
nl|'\n'
comment|'# Note(pbrady): The size and min_disk parameters of a glance'
nl|'\n'
comment|'# image are checked against the instance size before the image'
nl|'\n'
comment|'# is even downloaded from glance, but currently min_disk is'
nl|'\n'
comment|"# adjustable and doesn't currently account for virtual disk size,"
nl|'\n'
comment|'# so we need this extra check here.'
nl|'\n'
comment|'# NOTE(cfb): Having a flavor that sets the root size to 0 and having'
nl|'\n'
comment|'# nova effectively ignore that size and use the size of the'
nl|'\n'
comment|'# image is considered a feature at this time, not a bug.'
nl|'\n'
nl|'\n'
name|'if'
name|'size'
name|'is'
name|'None'
op|':'
newline|'\n'
indent|' '
name|'return'
newline|'\n'
nl|'\n'
dedent|''
name|'if'
name|'size'
name|'and'
name|'not'
name|'base_size'
op|':'
newline|'\n'
indent|' '
name|'base_size'
op|'='
name|'self'
op|'.'
name|'get_disk_size'
op|'('
name|'base'
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'if'
name|'size'
op|'<'
name|'base_size'
op|':'
newline|'\n'
indent|' '
name|'msg'
op|'='
name|'_LE'
op|'('
string|"'%(base)s virtual size %(base_size)s '"
nl|'\n'
string|"'larger than flavor root disk size %(size)s'"
op|')'
newline|'\n'
name|'LOG'
op|'.'
name|'error'
op|'('
name|'msg'
op|'%'
op|'{'
string|"'base'"
op|':'
name|'base'
op|','
nl|'\n'
string|"'base_size'"
op|':'
name|'base_size'
op|','
nl|'\n'
string|"'size'"
op|':'
name|'size'
op|'}'
op|')'
newline|'\n'
name|'raise'
name|'exception'
op|'.'
name|'FlavorDiskSmallerThanImage'
op|'('
nl|'\n'
name|'flavor_size'
op|'='
name|'size'
op|','
name|'image_size'
op|'='
name|'base_size'
op|')'
newline|'\n'
nl|'\n'
DECL|member|get_disk_size
dedent|''
dedent|''
name|'def'
name|'get_disk_size'
op|'('
name|'self'
op|','
name|'name'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'disk'
op|'.'
name|'get_disk_size'
op|'('
name|'name'
op|')'
newline|'\n'
nl|'\n'
DECL|member|snapshot_extract
dedent|''
name|'def'
name|'snapshot_extract'
op|'('
name|'self'
op|','
name|'target'
op|','
name|'out_format'
op|')'
op|':'
newline|'\n'
indent|' '
name|'raise'
name|'NotImplementedError'
op|'('
op|')'
newline|'\n'
nl|'\n'
DECL|member|_get_driver_format
dedent|''
name|'def'
name|'_get_driver_format'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'self'
op|'.'
name|'driver_format'
newline|'\n'
nl|'\n'
DECL|member|resolve_driver_format
dedent|''
name|'def'
name|'resolve_driver_format'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Return the driver format for self.path.\n\n First checks self.disk_info_path for an entry.\n If it\'s not there, calls self._get_driver_format(), and then\n stores the result in self.disk_info_path\n\n See https://bugs.launchpad.net/nova/+bug/1221190\n """'
newline|'\n'
DECL|function|_dict_from_line
name|'def'
name|'_dict_from_line'
op|'('
name|'line'
op|')'
op|':'
newline|'\n'
indent|' '
name|'if'
name|'not'
name|'line'
op|':'
newline|'\n'
indent|' '
name|'return'
op|'{'
op|'}'
newline|'\n'
dedent|''
name|'try'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'jsonutils'
op|'.'
name|'loads'
op|'('
name|'line'
op|')'
newline|'\n'
dedent|''
name|'except'
op|'('
name|'TypeError'
op|','
name|'ValueError'
op|')'
name|'as'
name|'e'
op|':'
newline|'\n'
indent|' '
name|'msg'
op|'='
op|'('
name|'_'
op|'('
string|'"Could not load line %(line)s, got error "'
nl|'\n'
string|'"%(error)s"'
op|')'
op|'%'
nl|'\n'
op|'{'
string|"'line'"
op|':'
name|'line'
op|','
string|"'error'"
op|':'
name|'e'
op|'}'
op|')'
newline|'\n'
name|'raise'
name|'exception'
op|'.'
name|'InvalidDiskInfo'
op|'('
name|'reason'
op|'='
name|'msg'
op|')'
newline|'\n'
nl|'\n'
dedent|''
dedent|''
op|'@'
name|'utils'
op|'.'
name|'synchronized'
op|'('
name|'self'
op|'.'
name|'disk_info_path'
op|','
name|'external'
op|'='
name|'False'
op|','
nl|'\n'
name|'lock_path'
op|'='
name|'self'
op|'.'
name|'lock_path'
op|')'
newline|'\n'
DECL|function|write_to_disk_info_file
name|'def'
name|'write_to_disk_info_file'
op|'('
op|')'
op|':'
newline|'\n'
comment|'# Use os.open to create it without group or world write permission.'
nl|'\n'
indent|' '
name|'fd'
op|'='
name|'os'
op|'.'
name|'open'
op|'('
name|'self'
op|'.'
name|'disk_info_path'
op|','
name|'os'
op|'.'
name|'O_RDONLY'
op|'|'
name|'os'
op|'.'
name|'O_CREAT'
op|','
number|'0o644'
op|')'
newline|'\n'
name|'with'
name|'os'
op|'.'
name|'fdopen'
op|'('
name|'fd'
op|','
string|'"r"'
op|')'
name|'as'
name|'disk_info_file'
op|':'
newline|'\n'
indent|' '
name|'line'
op|'='
name|'disk_info_file'
op|'.'
name|'read'
op|'('
op|')'
op|'.'
name|'rstrip'
op|'('
op|')'
newline|'\n'
name|'dct'
op|'='
name|'_dict_from_line'
op|'('
name|'line'
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'if'
name|'self'
op|'.'
name|'path'
name|'in'
name|'dct'
op|':'
newline|'\n'
indent|' '
name|'msg'
op|'='
name|'_'
op|'('
string|'"Attempted overwrite of an existing value."'
op|')'
newline|'\n'
name|'raise'
name|'exception'
op|'.'
name|'InvalidDiskInfo'
op|'('
name|'reason'
op|'='
name|'msg'
op|')'
newline|'\n'
dedent|''
name|'dct'
op|'.'
name|'update'
op|'('
op|'{'
name|'self'
op|'.'
name|'path'
op|':'
name|'driver_format'
op|'}'
op|')'
newline|'\n'
nl|'\n'
name|'tmp_path'
op|'='
name|'self'
op|'.'
name|'disk_info_path'
op|'+'
string|'".tmp"'
newline|'\n'
name|'fd'
op|'='
name|'os'
op|'.'
name|'open'
op|'('
name|'tmp_path'
op|','
name|'os'
op|'.'
name|'O_WRONLY'
op|'|'
name|'os'
op|'.'
name|'O_CREAT'
op|','
number|'0o644'
op|')'
newline|'\n'
name|'with'
name|'os'
op|'.'
name|'fdopen'
op|'('
name|'fd'
op|','
string|'"w"'
op|')'
name|'as'
name|'tmp_file'
op|':'
newline|'\n'
indent|' '
name|'tmp_file'
op|'.'
name|'write'
op|'('
string|"'%s\\n'"
op|'%'
name|'jsonutils'
op|'.'
name|'dumps'
op|'('
name|'dct'
op|')'
op|')'
newline|'\n'
dedent|''
name|'os'
op|'.'
name|'rename'
op|'('
name|'tmp_path'
op|','
name|'self'
op|'.'
name|'disk_info_path'
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'try'
op|':'
newline|'\n'
indent|' '
name|'if'
op|'('
name|'self'
op|'.'
name|'disk_info_path'
name|'is'
name|'not'
name|'None'
name|'and'
nl|'\n'
name|'os'
op|'.'
name|'path'
op|'.'
name|'exists'
op|'('
name|'self'
op|'.'
name|'disk_info_path'
op|')'
op|')'
op|':'
newline|'\n'
indent|' '
name|'with'
name|'open'
op|'('
name|'self'
op|'.'
name|'disk_info_path'
op|')'
name|'as'
name|'disk_info_file'
op|':'
newline|'\n'
indent|' '
name|'line'
op|'='
name|'disk_info_file'
op|'.'
name|'read'
op|'('
op|')'
op|'.'
name|'rstrip'
op|'('
op|')'
newline|'\n'
name|'dct'
op|'='
name|'_dict_from_line'
op|'('
name|'line'
op|')'
newline|'\n'
name|'for'
name|'path'
op|','
name|'driver_format'
name|'in'
name|'six'
op|'.'
name|'iteritems'
op|'('
name|'dct'
op|')'
op|':'
newline|'\n'
indent|' '
name|'if'
name|'path'
op|'=='
name|'self'
op|'.'
name|'path'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'driver_format'
newline|'\n'
dedent|''
dedent|''
dedent|''
dedent|''
name|'driver_format'
op|'='
name|'self'
op|'.'
name|'_get_driver_format'
op|'('
op|')'
newline|'\n'
name|'if'
name|'self'
op|'.'
name|'disk_info_path'
name|'is'
name|'not'
name|'None'
op|':'
newline|'\n'
indent|' '
name|'fileutils'
op|'.'
name|'ensure_tree'
op|'('
name|'os'
op|'.'
name|'path'
op|'.'
name|'dirname'
op|'('
name|'self'
op|'.'
name|'disk_info_path'
op|')'
op|')'
newline|'\n'
name|'write_to_disk_info_file'
op|'('
op|')'
newline|'\n'
dedent|''
dedent|''
name|'except'
name|'OSError'
name|'as'
name|'e'
op|':'
newline|'\n'
indent|' '
name|'raise'
name|'exception'
op|'.'
name|'DiskInfoReadWriteFail'
op|'('
name|'reason'
op|'='
name|'six'
op|'.'
name|'text_type'
op|'('
name|'e'
op|')'
op|')'
newline|'\n'
dedent|''
name|'return'
name|'driver_format'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'staticmethod'
newline|'\n'
DECL|member|is_shared_block_storage
name|'def'
name|'is_shared_block_storage'
op|'('
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""True if the backend puts images on a shared block storage."""'
newline|'\n'
name|'return'
name|'False'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'staticmethod'
newline|'\n'
DECL|member|is_file_in_instance_path
name|'def'
name|'is_file_in_instance_path'
op|'('
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""True if the backend stores images in files under instance path."""'
newline|'\n'
name|'return'
name|'False'
newline|'\n'
nl|'\n'
DECL|member|clone
dedent|''
name|'def'
name|'clone'
op|'('
name|'self'
op|','
name|'context'
op|','
name|'image_id_or_uri'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Clone an image.\n\n Note that clone operation is backend-dependent. The backend may ask\n the image API for a list of image "locations" and select one or more\n of those locations to clone an image from.\n\n :param image_id_or_uri: The ID or URI of an image to clone.\n\n :raises: exception.ImageUnacceptable if it cannot be cloned\n """'
newline|'\n'
name|'reason'
op|'='
name|'_'
op|'('
string|"'clone() is not implemented'"
op|')'
newline|'\n'
name|'raise'
name|'exception'
op|'.'
name|'ImageUnacceptable'
op|'('
name|'image_id'
op|'='
name|'image_id_or_uri'
op|','
nl|'\n'
name|'reason'
op|'='
name|'reason'
op|')'
newline|'\n'
nl|'\n'
DECL|member|direct_snapshot
dedent|''
name|'def'
name|'direct_snapshot'
op|'('
name|'self'
op|','
name|'context'
op|','
name|'snapshot_name'
op|','
name|'image_format'
op|','
name|'image_id'
op|','
nl|'\n'
name|'base_image_id'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Prepare a snapshot for direct reference from glance\n\n :raises: exception.ImageUnacceptable if it cannot be\n referenced directly in the specified image format\n :returns: URL to be given to glance\n """'
newline|'\n'
name|'raise'
name|'NotImplementedError'
op|'('
name|'_'
op|'('
string|"'direct_snapshot() is not implemented'"
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|cleanup_direct_snapshot
dedent|''
name|'def'
name|'cleanup_direct_snapshot'
op|'('
name|'self'
op|','
name|'location'
op|','
name|'also_destroy_volume'
op|'='
name|'False'
op|','
nl|'\n'
name|'ignore_errors'
op|'='
name|'False'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Performs any cleanup actions required after calling\n direct_snapshot(), for graceful exception handling and the like.\n\n This should be a no-op on any backend where it is not implemented.\n """'
newline|'\n'
name|'pass'
newline|'\n'
nl|'\n'
DECL|member|_get_lock_name
dedent|''
name|'def'
name|'_get_lock_name'
op|'('
name|'self'
op|','
name|'base'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Get an image\'s name of a base file."""'
newline|'\n'
name|'return'
name|'os'
op|'.'
name|'path'
op|'.'
name|'split'
op|'('
name|'base'
op|')'
op|'['
op|'-'
number|'1'
op|']'
newline|'\n'
nl|'\n'
DECL|member|get_model
dedent|''
name|'def'
name|'get_model'
op|'('
name|'self'
op|','
name|'connection'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Get the image information model\n\n :returns: an instance of nova.virt.image.model.Image\n """'
newline|'\n'
name|'raise'
name|'NotImplementedError'
op|'('
op|')'
newline|'\n'
nl|'\n'
DECL|member|import_file
dedent|''
name|'def'
name|'import_file'
op|'('
name|'self'
op|','
name|'instance'
op|','
name|'local_file'
op|','
name|'remote_name'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Import an image from local storage into this backend.\n\n Import a local file into the store used by this image type. Note that\n this is a noop for stores using local disk (the local file is\n considered "in the store").\n\n If the image already exists it will be overridden by the new file\n\n :param local_file: path to the file to import\n :param remote_name: the name for the file in the store\n """'
newline|'\n'
nl|'\n'
comment|'# NOTE(mikal): this is a noop for now for all stores except RBD, but'
nl|'\n'
comment|'# we should talk about if we want this functionality for everything.'
nl|'\n'
name|'pass'
newline|'\n'
nl|'\n'
DECL|member|create_snap
dedent|''
name|'def'
name|'create_snap'
op|'('
name|'self'
op|','
name|'name'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Create a snapshot on the image. A noop on backends that don\'t\n support snapshots.\n\n :param name: name of the snapshot\n """'
newline|'\n'
name|'pass'
newline|'\n'
nl|'\n'
DECL|member|remove_snap
dedent|''
name|'def'
name|'remove_snap'
op|'('
name|'self'
op|','
name|'name'
op|','
name|'ignore_errors'
op|'='
name|'False'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Remove a snapshot on the image. A noop on backends that don\'t\n support snapshots.\n\n :param name: name of the snapshot\n :param ignore_errors: don\'t log errors if the snapshot does not exist\n """'
newline|'\n'
name|'pass'
newline|'\n'
nl|'\n'
DECL|member|rollback_to_snap
dedent|''
name|'def'
name|'rollback_to_snap'
op|'('
name|'self'
op|','
name|'name'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Rollback the image to the named snapshot. A noop on backends that\n don\'t support snapshots.\n\n :param name: name of the snapshot\n """'
newline|'\n'
name|'pass'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|Raw
dedent|''
dedent|''
name|'class'
name|'Raw'
op|'('
name|'Image'
op|')'
op|':'
newline|'\n'
DECL|member|__init__
indent|' '
name|'def'
name|'__init__'
op|'('
name|'self'
op|','
name|'instance'
op|'='
name|'None'
op|','
name|'disk_name'
op|'='
name|'None'
op|','
name|'path'
op|'='
name|'None'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'disk_name'
op|'='
name|'disk_name'
newline|'\n'
name|'super'
op|'('
name|'Raw'
op|','
name|'self'
op|')'
op|'.'
name|'__init__'
op|'('
string|'"file"'
op|','
string|'"raw"'
op|','
name|'is_block_dev'
op|'='
name|'False'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'path'
op|'='
op|'('
name|'path'
name|'or'
nl|'\n'
name|'os'
op|'.'
name|'path'
op|'.'
name|'join'
op|'('
name|'libvirt_utils'
op|'.'
name|'get_instance_path'
op|'('
name|'instance'
op|')'
op|','
nl|'\n'
name|'disk_name'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'preallocate'
op|'='
op|'('
nl|'\n'
name|'strutils'
op|'.'
name|'to_slug'
op|'('
name|'CONF'
op|'.'
name|'preallocate_images'
op|')'
op|'=='
string|"'space'"
op|')'
newline|'\n'
name|'if'
name|'self'
op|'.'
name|'preallocate'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'driver_io'
op|'='
string|'"native"'
newline|'\n'
dedent|''
name|'self'
op|'.'
name|'disk_info_path'
op|'='
name|'os'
op|'.'
name|'path'
op|'.'
name|'join'
op|'('
name|'os'
op|'.'
name|'path'
op|'.'
name|'dirname'
op|'('
name|'self'
op|'.'
name|'path'
op|')'
op|','
nl|'\n'
string|"'disk.info'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'correct_format'
op|'('
op|')'
newline|'\n'
nl|'\n'
DECL|member|_get_driver_format
dedent|''
name|'def'
name|'_get_driver_format'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'try'
op|':'
newline|'\n'
indent|' '
name|'data'
op|'='
name|'images'
op|'.'
name|'qemu_img_info'
op|'('
name|'self'
op|'.'
name|'path'
op|')'
newline|'\n'
name|'return'
name|'data'
op|'.'
name|'file_format'
newline|'\n'
dedent|''
name|'except'
name|'exception'
op|'.'
name|'InvalidDiskInfo'
name|'as'
name|'e'
op|':'
newline|'\n'
indent|' '
name|'LOG'
op|'.'
name|'info'
op|'('
name|'_LI'
op|'('
string|"'Failed to get image info from path %(path)s; '"
nl|'\n'
string|"'error: %(error)s'"
op|')'
op|','
nl|'\n'
op|'{'
string|"'path'"
op|':'
name|'self'
op|'.'
name|'path'
op|','
nl|'\n'
string|"'error'"
op|':'
name|'e'
op|'}'
op|')'
newline|'\n'
name|'return'
string|"'raw'"
newline|'\n'
nl|'\n'
DECL|member|_supports_encryption
dedent|''
dedent|''
name|'def'
name|'_supports_encryption'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
comment|'# NOTE(dgenin): Kernel, ramdisk and disk.config are fetched using'
nl|'\n'
comment|'# the Raw backend regardless of which backend is configured for'
nl|'\n'
comment|'# ephemeral storage. Encryption for the Raw backend is not yet'
nl|'\n'
comment|'# implemented so this loophole is necessary to allow other'
nl|'\n'
comment|'# backends already supporting encryption to function. This can'
nl|'\n'
comment|'# be removed once encryption for Raw is implemented.'
nl|'\n'
indent|' '
name|'if'
name|'self'
op|'.'
name|'disk_name'
name|'not'
name|'in'
op|'['
string|"'kernel'"
op|','
string|"'ramdisk'"
op|','
string|"'disk.config'"
op|']'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'False'
newline|'\n'
dedent|''
name|'else'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'True'
newline|'\n'
nl|'\n'
DECL|member|correct_format
dedent|''
dedent|''
name|'def'
name|'correct_format'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'if'
name|'os'
op|'.'
name|'path'
op|'.'
name|'exists'
op|'('
name|'self'
op|'.'
name|'path'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'driver_format'
op|'='
name|'self'
op|'.'
name|'resolve_driver_format'
op|'('
op|')'
newline|'\n'
nl|'\n'
DECL|member|create_image
dedent|''
dedent|''
name|'def'
name|'create_image'
op|'('
name|'self'
op|','
name|'prepare_template'
op|','
name|'base'
op|','
name|'size'
op|','
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
indent|' '
name|'filename'
op|'='
name|'self'
op|'.'
name|'_get_lock_name'
op|'('
name|'base'
op|')'
newline|'\n'
nl|'\n'
op|'@'
name|'utils'
op|'.'
name|'synchronized'
op|'('
name|'filename'
op|','
name|'external'
op|'='
name|'True'
op|','
name|'lock_path'
op|'='
name|'self'
op|'.'
name|'lock_path'
op|')'
newline|'\n'
DECL|function|copy_raw_image
name|'def'
name|'copy_raw_image'
op|'('
name|'base'
op|','
name|'target'
op|','
name|'size'
op|')'
op|':'
newline|'\n'
indent|' '
name|'libvirt_utils'
op|'.'
name|'copy_image'
op|'('
name|'base'
op|','
name|'target'
op|')'
newline|'\n'
name|'if'
name|'size'
op|':'
newline|'\n'
comment|"# class Raw is misnamed, format may not be 'raw' in all cases"
nl|'\n'
indent|' '
name|'image'
op|'='
name|'imgmodel'
op|'.'
name|'LocalFileImage'
op|'('
name|'target'
op|','
nl|'\n'
name|'self'
op|'.'
name|'driver_format'
op|')'
newline|'\n'
name|'disk'
op|'.'
name|'extend'
op|'('
name|'image'
op|','
name|'size'
op|')'
newline|'\n'
nl|'\n'
dedent|''
dedent|''
name|'generating'
op|'='
string|"'image_id'"
name|'not'
name|'in'
name|'kwargs'
newline|'\n'
name|'if'
name|'generating'
op|':'
newline|'\n'
indent|' '
name|'if'
name|'not'
name|'self'
op|'.'
name|'check_image_exists'
op|'('
op|')'
op|':'
newline|'\n'
comment|'# Generating image in place'
nl|'\n'
indent|' '
name|'prepare_template'
op|'('
name|'target'
op|'='
name|'self'
op|'.'
name|'path'
op|','
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
newline|'\n'
dedent|''
dedent|''
name|'else'
op|':'
newline|'\n'
indent|' '
name|'if'
name|'not'
name|'os'
op|'.'
name|'path'
op|'.'
name|'exists'
op|'('
name|'base'
op|')'
op|':'
newline|'\n'
indent|' '
name|'prepare_template'
op|'('
name|'target'
op|'='
name|'base'
op|','
name|'max_size'
op|'='
name|'size'
op|','
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
newline|'\n'
nl|'\n'
comment|'# NOTE(mikal): Update the mtime of the base file so the image'
nl|'\n'
comment|'# cache manager knows it is in use.'
nl|'\n'
dedent|''
name|'libvirt_utils'
op|'.'
name|'update_mtime'
op|'('
name|'base'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'verify_base_size'
op|'('
name|'base'
op|','
name|'size'
op|')'
newline|'\n'
name|'if'
name|'not'
name|'os'
op|'.'
name|'path'
op|'.'
name|'exists'
op|'('
name|'self'
op|'.'
name|'path'
op|')'
op|':'
newline|'\n'
indent|' '
name|'with'
name|'fileutils'
op|'.'
name|'remove_path_on_error'
op|'('
name|'self'
op|'.'
name|'path'
op|')'
op|':'
newline|'\n'
indent|' '
name|'copy_raw_image'
op|'('
name|'base'
op|','
name|'self'
op|'.'
name|'path'
op|','
name|'size'
op|')'
newline|'\n'
nl|'\n'
dedent|''
dedent|''
dedent|''
name|'self'
op|'.'
name|'correct_format'
op|'('
op|')'
newline|'\n'
nl|'\n'
DECL|member|resize_image
dedent|''
name|'def'
name|'resize_image'
op|'('
name|'self'
op|','
name|'size'
op|')'
op|':'
newline|'\n'
indent|' '
name|'image'
op|'='
name|'imgmodel'
op|'.'
name|'LocalFileImage'
op|'('
name|'self'
op|'.'
name|'path'
op|','
name|'self'
op|'.'
name|'driver_format'
op|')'
newline|'\n'
name|'disk'
op|'.'
name|'extend'
op|'('
name|'image'
op|','
name|'size'
op|')'
newline|'\n'
nl|'\n'
DECL|member|snapshot_extract
dedent|''
name|'def'
name|'snapshot_extract'
op|'('
name|'self'
op|','
name|'target'
op|','
name|'out_format'
op|')'
op|':'
newline|'\n'
indent|' '
name|'images'
op|'.'
name|'convert_image'
op|'('
name|'self'
op|'.'
name|'path'
op|','
name|'target'
op|','
name|'self'
op|'.'
name|'driver_format'
op|','
name|'out_format'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'staticmethod'
newline|'\n'
DECL|member|is_file_in_instance_path
name|'def'
name|'is_file_in_instance_path'
op|'('
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'True'
newline|'\n'
nl|'\n'
DECL|member|get_model
dedent|''
name|'def'
name|'get_model'
op|'('
name|'self'
op|','
name|'connection'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'imgmodel'
op|'.'
name|'LocalFileImage'
op|'('
name|'self'
op|'.'
name|'path'
op|','
nl|'\n'
name|'imgmodel'
op|'.'
name|'FORMAT_RAW'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|Qcow2
dedent|''
dedent|''
name|'class'
name|'Qcow2'
op|'('
name|'Image'
op|')'
op|':'
newline|'\n'
DECL|member|__init__
indent|' '
name|'def'
name|'__init__'
op|'('
name|'self'
op|','
name|'instance'
op|'='
name|'None'
op|','
name|'disk_name'
op|'='
name|'None'
op|','
name|'path'
op|'='
name|'None'
op|')'
op|':'
newline|'\n'
indent|' '
name|'super'
op|'('
name|'Qcow2'
op|','
name|'self'
op|')'
op|'.'
name|'__init__'
op|'('
string|'"file"'
op|','
string|'"qcow2"'
op|','
name|'is_block_dev'
op|'='
name|'False'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'path'
op|'='
op|'('
name|'path'
name|'or'
nl|'\n'
name|'os'
op|'.'
name|'path'
op|'.'
name|'join'
op|'('
name|'libvirt_utils'
op|'.'
name|'get_instance_path'
op|'('
name|'instance'
op|')'
op|','
nl|'\n'
name|'disk_name'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'preallocate'
op|'='
op|'('
nl|'\n'
name|'strutils'
op|'.'
name|'to_slug'
op|'('
name|'CONF'
op|'.'
name|'preallocate_images'
op|')'
op|'=='
string|"'space'"
op|')'
newline|'\n'
name|'if'
name|'self'
op|'.'
name|'preallocate'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'driver_io'
op|'='
string|'"native"'
newline|'\n'
dedent|''
name|'self'
op|'.'
name|'disk_info_path'
op|'='
name|'os'
op|'.'
name|'path'
op|'.'
name|'join'
op|'('
name|'os'
op|'.'
name|'path'
op|'.'
name|'dirname'
op|'('
name|'self'
op|'.'
name|'path'
op|')'
op|','
nl|'\n'
string|"'disk.info'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'resolve_driver_format'
op|'('
op|')'
newline|'\n'
nl|'\n'
DECL|member|create_image
dedent|''
name|'def'
name|'create_image'
op|'('
name|'self'
op|','
name|'prepare_template'
op|','
name|'base'
op|','
name|'size'
op|','
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
indent|' '
name|'filename'
op|'='
name|'self'
op|'.'
name|'_get_lock_name'
op|'('
name|'base'
op|')'
newline|'\n'
nl|'\n'
op|'@'
name|'utils'
op|'.'
name|'synchronized'
op|'('
name|'filename'
op|','
name|'external'
op|'='
name|'True'
op|','
name|'lock_path'
op|'='
name|'self'
op|'.'
name|'lock_path'
op|')'
newline|'\n'
DECL|function|copy_qcow2_image
name|'def'
name|'copy_qcow2_image'
op|'('
name|'base'
op|','
name|'target'
op|','
name|'size'
op|')'
op|':'
newline|'\n'
comment|'# TODO(pbrady): Consider copying the cow image here'
nl|'\n'
comment|'# with preallocation=metadata set for performance reasons.'
nl|'\n'
comment|"# This would be keyed on a 'preallocate_images' setting."
nl|'\n'
indent|' '
name|'libvirt_utils'
op|'.'
name|'create_cow_image'
op|'('
name|'base'
op|','
name|'target'
op|')'
newline|'\n'
name|'if'
name|'size'
op|':'
newline|'\n'
indent|' '
name|'image'
op|'='
name|'imgmodel'
op|'.'
name|'LocalFileImage'
op|'('
name|'target'
op|','
name|'imgmodel'
op|'.'
name|'FORMAT_QCOW2'
op|')'
newline|'\n'
name|'disk'
op|'.'
name|'extend'
op|'('
name|'image'
op|','
name|'size'
op|')'
newline|'\n'
nl|'\n'
comment|'# Download the unmodified base image unless we already have a copy.'
nl|'\n'
dedent|''
dedent|''
name|'if'
name|'not'
name|'os'
op|'.'
name|'path'
op|'.'
name|'exists'
op|'('
name|'base'
op|')'
op|':'
newline|'\n'
indent|' '
name|'prepare_template'
op|'('
name|'target'
op|'='
name|'base'
op|','
name|'max_size'
op|'='
name|'size'
op|','
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
newline|'\n'
nl|'\n'
comment|'# NOTE(ankit): Update the mtime of the base file so the image'
nl|'\n'
comment|'# cache manager knows it is in use.'
nl|'\n'
dedent|''
name|'libvirt_utils'
op|'.'
name|'update_mtime'
op|'('
name|'base'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'verify_base_size'
op|'('
name|'base'
op|','
name|'size'
op|')'
newline|'\n'
nl|'\n'
name|'legacy_backing_size'
op|'='
name|'None'
newline|'\n'
name|'legacy_base'
op|'='
name|'base'
newline|'\n'
nl|'\n'
comment|'# Determine whether an existing qcow2 disk uses a legacy backing by'
nl|'\n'
comment|'# actually looking at the image itself and parsing the output of the'
nl|'\n'
comment|'# backing file it expects to be using.'
nl|'\n'
name|'if'
name|'os'
op|'.'
name|'path'
op|'.'
name|'exists'
op|'('
name|'self'
op|'.'
name|'path'
op|')'
op|':'
newline|'\n'
indent|' '
name|'backing_path'
op|'='
name|'libvirt_utils'
op|'.'
name|'get_disk_backing_file'
op|'('
name|'self'
op|'.'
name|'path'
op|')'
newline|'\n'
name|'if'
name|'backing_path'
name|'is'
name|'not'
name|'None'
op|':'
newline|'\n'
indent|' '
name|'backing_file'
op|'='
name|'os'
op|'.'
name|'path'
op|'.'
name|'basename'
op|'('
name|'backing_path'
op|')'
newline|'\n'
name|'backing_parts'
op|'='
name|'backing_file'
op|'.'
name|'rpartition'
op|'('
string|"'_'"
op|')'
newline|'\n'
name|'if'
name|'backing_file'
op|'!='
name|'backing_parts'
op|'['
op|'-'
number|'1'
op|']'
name|'and'
name|'backing_parts'
op|'['
op|'-'
number|'1'
op|']'
op|'.'
name|'isdigit'
op|'('
op|')'
op|':'
newline|'\n'
indent|' '
name|'legacy_backing_size'
op|'='
name|'int'
op|'('
name|'backing_parts'
op|'['
op|'-'
number|'1'
op|']'
op|')'
newline|'\n'
name|'legacy_base'
op|'+='
string|"'_%d'"
op|'%'
name|'legacy_backing_size'
newline|'\n'
name|'legacy_backing_size'
op|'*='
name|'units'
op|'.'
name|'Gi'
newline|'\n'
nl|'\n'
comment|'# Create the legacy backing file if necessary.'
nl|'\n'
dedent|''
dedent|''
dedent|''
name|'if'
name|'legacy_backing_size'
op|':'
newline|'\n'
indent|' '
name|'if'
name|'not'
name|'os'
op|'.'
name|'path'
op|'.'
name|'exists'
op|'('
name|'legacy_base'
op|')'
op|':'
newline|'\n'
indent|' '
name|'with'
name|'fileutils'
op|'.'
name|'remove_path_on_error'
op|'('
name|'legacy_base'
op|')'
op|':'
newline|'\n'
indent|' '
name|'libvirt_utils'
op|'.'
name|'copy_image'
op|'('
name|'base'
op|','
name|'legacy_base'
op|')'
newline|'\n'
name|'image'
op|'='
name|'imgmodel'
op|'.'
name|'LocalFileImage'
op|'('
name|'legacy_base'
op|','
nl|'\n'
name|'imgmodel'
op|'.'
name|'FORMAT_QCOW2'
op|')'
newline|'\n'
name|'disk'
op|'.'
name|'extend'
op|'('
name|'image'
op|','
name|'legacy_backing_size'
op|')'
newline|'\n'
nl|'\n'
dedent|''
dedent|''
dedent|''
name|'if'
name|'not'
name|'os'
op|'.'
name|'path'
op|'.'
name|'exists'
op|'('
name|'self'
op|'.'
name|'path'
op|')'
op|':'
newline|'\n'
indent|' '
name|'with'
name|'fileutils'
op|'.'
name|'remove_path_on_error'
op|'('
name|'self'
op|'.'
name|'path'
op|')'
op|':'
newline|'\n'
indent|' '
name|'copy_qcow2_image'
op|'('
name|'base'
op|','
name|'self'
op|'.'
name|'path'
op|','
name|'size'
op|')'
newline|'\n'
nl|'\n'
DECL|member|resize_image
dedent|''
dedent|''
dedent|''
name|'def'
name|'resize_image'
op|'('
name|'self'
op|','
name|'size'
op|')'
op|':'
newline|'\n'
indent|' '
name|'image'
op|'='
name|'imgmodel'
op|'.'
name|'LocalFileImage'
op|'('
name|'self'
op|'.'
name|'path'
op|','
name|'imgmodel'
op|'.'
name|'FORMAT_QCOW2'
op|')'
newline|'\n'
name|'disk'
op|'.'
name|'extend'
op|'('
name|'image'
op|','
name|'size'
op|')'
newline|'\n'
nl|'\n'
DECL|member|snapshot_extract
dedent|''
name|'def'
name|'snapshot_extract'
op|'('
name|'self'
op|','
name|'target'
op|','
name|'out_format'
op|')'
op|':'
newline|'\n'
indent|' '
name|'libvirt_utils'
op|'.'
name|'extract_snapshot'
op|'('
name|'self'
op|'.'
name|'path'
op|','
string|"'qcow2'"
op|','
nl|'\n'
name|'target'
op|','
nl|'\n'
name|'out_format'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'staticmethod'
newline|'\n'
DECL|member|is_file_in_instance_path
name|'def'
name|'is_file_in_instance_path'
op|'('
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'True'
newline|'\n'
nl|'\n'
DECL|member|get_model
dedent|''
name|'def'
name|'get_model'
op|'('
name|'self'
op|','
name|'connection'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'imgmodel'
op|'.'
name|'LocalFileImage'
op|'('
name|'self'
op|'.'
name|'path'
op|','
nl|'\n'
name|'imgmodel'
op|'.'
name|'FORMAT_QCOW2'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|Lvm
dedent|''
dedent|''
name|'class'
name|'Lvm'
op|'('
name|'Image'
op|')'
op|':'
newline|'\n'
indent|' '
op|'@'
name|'staticmethod'
newline|'\n'
DECL|member|escape
name|'def'
name|'escape'
op|'('
name|'filename'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'filename'
op|'.'
name|'replace'
op|'('
string|"'_'"
op|','
string|"'__'"
op|')'
newline|'\n'
nl|'\n'
DECL|member|__init__
dedent|''
name|'def'
name|'__init__'
op|'('
name|'self'
op|','
name|'instance'
op|'='
name|'None'
op|','
name|'disk_name'
op|'='
name|'None'
op|','
name|'path'
op|'='
name|'None'
op|')'
op|':'
newline|'\n'
indent|' '
name|'super'
op|'('
name|'Lvm'
op|','
name|'self'
op|')'
op|'.'
name|'__init__'
op|'('
string|'"block"'
op|','
string|'"raw"'
op|','
name|'is_block_dev'
op|'='
name|'True'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'ephemeral_key_uuid'
op|'='
name|'instance'
op|'.'
name|'get'
op|'('
string|"'ephemeral_key_uuid'"
op|')'
newline|'\n'
nl|'\n'
name|'if'
name|'self'
op|'.'
name|'ephemeral_key_uuid'
name|'is'
name|'not'
name|'None'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'key_manager'
op|'='
name|'keymgr'
op|'.'
name|'API'
op|'('
name|'CONF'
op|')'
newline|'\n'
dedent|''
name|'else'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'key_manager'
op|'='
name|'None'
newline|'\n'
nl|'\n'
dedent|''
name|'if'
name|'path'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'path'
op|'='
name|'path'
newline|'\n'
name|'if'
name|'self'
op|'.'
name|'ephemeral_key_uuid'
name|'is'
name|'None'
op|':'
newline|'\n'
indent|' '
name|'info'
op|'='
name|'lvm'
op|'.'
name|'volume_info'
op|'('
name|'path'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'vg'
op|'='
name|'info'
op|'['
string|"'VG'"
op|']'
newline|'\n'
name|'self'
op|'.'
name|'lv'
op|'='
name|'info'
op|'['
string|"'LV'"
op|']'
newline|'\n'
dedent|''
name|'else'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'vg'
op|'='
name|'CONF'
op|'.'
name|'libvirt'
op|'.'
name|'images_volume_group'
newline|'\n'
dedent|''
dedent|''
name|'else'
op|':'
newline|'\n'
indent|' '
name|'if'
name|'not'
name|'CONF'
op|'.'
name|'libvirt'
op|'.'
name|'images_volume_group'
op|':'
newline|'\n'
indent|' '
name|'raise'
name|'RuntimeError'
op|'('
name|'_'
op|'('
string|"'You should specify'"
nl|'\n'
string|"' images_volume_group'"
nl|'\n'
string|"' flag to use LVM images.'"
op|')'
op|')'
newline|'\n'
dedent|''
name|'self'
op|'.'
name|'vg'
op|'='
name|'CONF'
op|'.'
name|'libvirt'
op|'.'
name|'images_volume_group'
newline|'\n'
name|'self'
op|'.'
name|'lv'
op|'='
string|"'%s_%s'"
op|'%'
op|'('
name|'instance'
op|'.'
name|'uuid'
op|','
nl|'\n'
name|'self'
op|'.'
name|'escape'
op|'('
name|'disk_name'
op|')'
op|')'
newline|'\n'
name|'if'
name|'self'
op|'.'
name|'ephemeral_key_uuid'
name|'is'
name|'None'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'path'
op|'='
name|'os'
op|'.'
name|'path'
op|'.'
name|'join'
op|'('
string|"'/dev'"
op|','
name|'self'
op|'.'
name|'vg'
op|','
name|'self'
op|'.'
name|'lv'
op|')'
newline|'\n'
dedent|''
name|'else'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'lv_path'
op|'='
name|'os'
op|'.'
name|'path'
op|'.'
name|'join'
op|'('
string|"'/dev'"
op|','
name|'self'
op|'.'
name|'vg'
op|','
name|'self'
op|'.'
name|'lv'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'path'
op|'='
string|"'/dev/mapper/'"
op|'+'
name|'dmcrypt'
op|'.'
name|'volume_name'
op|'('
name|'self'
op|'.'
name|'lv'
op|')'
newline|'\n'
nl|'\n'
comment|'# TODO(pbrady): possibly deprecate libvirt.sparse_logical_volumes'
nl|'\n'
comment|'# for the more general preallocate_images'
nl|'\n'
dedent|''
dedent|''
name|'self'
op|'.'
name|'sparse'
op|'='
name|'CONF'
op|'.'
name|'libvirt'
op|'.'
name|'sparse_logical_volumes'
newline|'\n'
name|'self'
op|'.'
name|'preallocate'
op|'='
name|'not'
name|'self'
op|'.'
name|'sparse'
newline|'\n'
nl|'\n'
name|'if'
name|'not'
name|'self'
op|'.'
name|'sparse'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'driver_io'
op|'='
string|'"native"'
newline|'\n'
nl|'\n'
DECL|member|_supports_encryption
dedent|''
dedent|''
name|'def'
name|'_supports_encryption'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'True'
newline|'\n'
nl|'\n'
DECL|member|_can_fallocate
dedent|''
name|'def'
name|'_can_fallocate'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'False'
newline|'\n'
nl|'\n'
DECL|member|create_image
dedent|''
name|'def'
name|'create_image'
op|'('
name|'self'
op|','
name|'prepare_template'
op|','
name|'base'
op|','
name|'size'
op|','
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
DECL|function|encrypt_lvm_image
indent|' '
name|'def'
name|'encrypt_lvm_image'
op|'('
op|')'
op|':'
newline|'\n'
indent|' '
name|'dmcrypt'
op|'.'
name|'create_volume'
op|'('
name|'self'
op|'.'
name|'path'
op|'.'
name|'rpartition'
op|'('
string|"'/'"
op|')'
op|'['
number|'2'
op|']'
op|','
nl|'\n'
name|'self'
op|'.'
name|'lv_path'
op|','
nl|'\n'
name|'CONF'
op|'.'
name|'ephemeral_storage_encryption'
op|'.'
name|'cipher'
op|','
nl|'\n'
name|'CONF'
op|'.'
name|'ephemeral_storage_encryption'
op|'.'
name|'key_size'
op|','
nl|'\n'
name|'key'
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'filename'
op|'='
name|'self'
op|'.'
name|'_get_lock_name'
op|'('
name|'base'
op|')'
newline|'\n'
nl|'\n'
op|'@'
name|'utils'
op|'.'
name|'synchronized'
op|'('
name|'filename'
op|','
name|'external'
op|'='
name|'True'
op|','
name|'lock_path'
op|'='
name|'self'
op|'.'
name|'lock_path'
op|')'
newline|'\n'
DECL|function|create_lvm_image
name|'def'
name|'create_lvm_image'
op|'('
name|'base'
op|','
name|'size'
op|')'
op|':'
newline|'\n'
indent|' '
name|'base_size'
op|'='
name|'disk'
op|'.'
name|'get_disk_size'
op|'('
name|'base'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'verify_base_size'
op|'('
name|'base'
op|','
name|'size'
op|','
name|'base_size'
op|'='
name|'base_size'
op|')'
newline|'\n'
name|'resize'
op|'='
name|'size'
op|'>'
name|'base_size'
newline|'\n'
name|'size'
op|'='
name|'size'
name|'if'
name|'resize'
name|'else'
name|'base_size'
newline|'\n'
name|'lvm'
op|'.'
name|'create_volume'
op|'('
name|'self'
op|'.'
name|'vg'
op|','
name|'self'
op|'.'
name|'lv'
op|','
nl|'\n'
name|'size'
op|','
name|'sparse'
op|'='
name|'self'
op|'.'
name|'sparse'
op|')'
newline|'\n'
name|'if'
name|'self'
op|'.'
name|'ephemeral_key_uuid'
name|'is'
name|'not'
name|'None'
op|':'
newline|'\n'
indent|' '
name|'encrypt_lvm_image'
op|'('
op|')'
newline|'\n'
comment|"# NOTE: by calling convert_image_unsafe here we're"
nl|'\n'
comment|'# telling qemu-img convert to do format detection on the input,'
nl|'\n'
comment|"# because we don't know what the format is. For example,"
nl|'\n'
comment|'# we might have downloaded a qcow2 image, or created an'
nl|'\n'
comment|"# ephemeral filesystem locally, we just don't know here. Having"
nl|'\n'
comment|'# audited this, all current sources have been sanity checked,'
nl|'\n'
comment|"# either because they're locally generated, or because they have"
nl|'\n'
comment|'# come from images.fetch_to_raw. However, this is major code smell.'
nl|'\n'
dedent|''
name|'images'
op|'.'
name|'convert_image_unsafe'
op|'('
name|'base'
op|','
name|'self'
op|'.'
name|'path'
op|','
name|'self'
op|'.'
name|'driver_format'
op|','
nl|'\n'
name|'run_as_root'
op|'='
name|'True'
op|')'
newline|'\n'
name|'if'
name|'resize'
op|':'
newline|'\n'
indent|' '
name|'disk'
op|'.'
name|'resize2fs'
op|'('
name|'self'
op|'.'
name|'path'
op|','
name|'run_as_root'
op|'='
name|'True'
op|')'
newline|'\n'
nl|'\n'
dedent|''
dedent|''
name|'generated'
op|'='
string|"'ephemeral_size'"
name|'in'
name|'kwargs'
newline|'\n'
name|'if'
name|'self'
op|'.'
name|'ephemeral_key_uuid'
name|'is'
name|'not'
name|'None'
op|':'
newline|'\n'
indent|' '
name|'if'
string|"'context'"
name|'in'
name|'kwargs'
op|':'
newline|'\n'
indent|' '
name|'try'
op|':'
newline|'\n'
comment|'# NOTE(dgenin): Key manager corresponding to the'
nl|'\n'
comment|'# specific backend catches and reraises an'
nl|'\n'
comment|'# an exception if key retrieval fails.'
nl|'\n'
indent|' '
name|'key'
op|'='
name|'self'
op|'.'
name|'key_manager'
op|'.'
name|'get'
op|'('
name|'kwargs'
op|'['
string|"'context'"
op|']'
op|','
nl|'\n'
name|'self'
op|'.'
name|'ephemeral_key_uuid'
op|')'
op|'.'
name|'get_encoded'
op|'('
op|')'
newline|'\n'
dedent|''
name|'except'
name|'Exception'
op|':'
newline|'\n'
indent|' '
name|'with'
name|'excutils'
op|'.'
name|'save_and_reraise_exception'
op|'('
op|')'
op|':'
newline|'\n'
indent|' '
name|'LOG'
op|'.'
name|'error'
op|'('
name|'_LE'
op|'('
string|'"Failed to retrieve ephemeral encryption"'
nl|'\n'
string|'" key"'
op|')'
op|')'
newline|'\n'
dedent|''
dedent|''
dedent|''
name|'else'
op|':'
newline|'\n'
indent|' '
name|'raise'
name|'exception'
op|'.'
name|'NovaException'
op|'('
nl|'\n'
name|'_'
op|'('
string|'"Instance disk to be encrypted but no context provided"'
op|')'
op|')'
newline|'\n'
comment|'# Generate images with specified size right on volume'
nl|'\n'
dedent|''
dedent|''
name|'if'
name|'generated'
name|'and'
name|'size'
op|':'
newline|'\n'
indent|' '
name|'lvm'
op|'.'
name|'create_volume'
op|'('
name|'self'
op|'.'
name|'vg'
op|','
name|'self'
op|'.'
name|'lv'
op|','
nl|'\n'
name|'size'
op|','
name|'sparse'
op|'='
name|'self'
op|'.'
name|'sparse'
op|')'
newline|'\n'
name|'with'
name|'self'
op|'.'
name|'remove_volume_on_error'
op|'('
name|'self'
op|'.'
name|'path'
op|')'
op|':'
newline|'\n'
indent|' '
name|'if'
name|'self'
op|'.'
name|'ephemeral_key_uuid'
name|'is'
name|'not'
name|'None'
op|':'
newline|'\n'
indent|' '
name|'encrypt_lvm_image'
op|'('
op|')'
newline|'\n'
dedent|''
name|'prepare_template'
op|'('
name|'target'
op|'='
name|'self'
op|'.'
name|'path'
op|','
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
newline|'\n'
dedent|''
dedent|''
name|'else'
op|':'
newline|'\n'
indent|' '
name|'if'
name|'not'
name|'os'
op|'.'
name|'path'
op|'.'
name|'exists'
op|'('
name|'base'
op|')'
op|':'
newline|'\n'
indent|' '
name|'prepare_template'
op|'('
name|'target'
op|'='
name|'base'
op|','
name|'max_size'
op|'='
name|'size'
op|','
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
newline|'\n'
dedent|''
name|'with'
name|'self'
op|'.'
name|'remove_volume_on_error'
op|'('
name|'self'
op|'.'
name|'path'
op|')'
op|':'
newline|'\n'
indent|' '
name|'create_lvm_image'
op|'('
name|'base'
op|','
name|'size'
op|')'
newline|'\n'
nl|'\n'
comment|'# NOTE(nic): Resizing the image is already handled in create_image(),'
nl|'\n'
comment|'# and migrate/resize is not supported with LVM yet, so this is a no-op'
nl|'\n'
DECL|member|resize_image
dedent|''
dedent|''
dedent|''
name|'def'
name|'resize_image'
op|'('
name|'self'
op|','
name|'size'
op|')'
op|':'
newline|'\n'
indent|' '
name|'pass'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'contextlib'
op|'.'
name|'contextmanager'
newline|'\n'
DECL|member|remove_volume_on_error
name|'def'
name|'remove_volume_on_error'
op|'('
name|'self'
op|','
name|'path'
op|')'
op|':'
newline|'\n'
indent|' '
name|'try'
op|':'
newline|'\n'
indent|' '
name|'yield'
newline|'\n'
dedent|''
name|'except'
name|'Exception'
op|':'
newline|'\n'
indent|' '
name|'with'
name|'excutils'
op|'.'
name|'save_and_reraise_exception'
op|'('
op|')'
op|':'
newline|'\n'
indent|' '
name|'if'
name|'self'
op|'.'
name|'ephemeral_key_uuid'
name|'is'
name|'None'
op|':'
newline|'\n'
indent|' '
name|'lvm'
op|'.'
name|'remove_volumes'
op|'('
op|'['
name|'path'
op|']'
op|')'
newline|'\n'
dedent|''
name|'else'
op|':'
newline|'\n'
indent|' '
name|'dmcrypt'
op|'.'
name|'delete_volume'
op|'('
name|'path'
op|'.'
name|'rpartition'
op|'('
string|"'/'"
op|')'
op|'['
number|'2'
op|']'
op|')'
newline|'\n'
name|'lvm'
op|'.'
name|'remove_volumes'
op|'('
op|'['
name|'self'
op|'.'
name|'lv_path'
op|']'
op|')'
newline|'\n'
nl|'\n'
DECL|member|snapshot_extract
dedent|''
dedent|''
dedent|''
dedent|''
name|'def'
name|'snapshot_extract'
op|'('
name|'self'
op|','
name|'target'
op|','
name|'out_format'
op|')'
op|':'
newline|'\n'
indent|' '
name|'images'
op|'.'
name|'convert_image'
op|'('
name|'self'
op|'.'
name|'path'
op|','
name|'target'
op|','
name|'self'
op|'.'
name|'driver_format'
op|','
nl|'\n'
name|'out_format'
op|','
name|'run_as_root'
op|'='
name|'True'
op|')'
newline|'\n'
nl|'\n'
DECL|member|get_model
dedent|''
name|'def'
name|'get_model'
op|'('
name|'self'
op|','
name|'connection'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'imgmodel'
op|'.'
name|'LocalBlockImage'
op|'('
name|'self'
op|'.'
name|'path'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|Rbd
dedent|''
dedent|''
name|'class'
name|'Rbd'
op|'('
name|'Image'
op|')'
op|':'
newline|'\n'
nl|'\n'
DECL|variable|SUPPORTS_CLONE
indent|' '
name|'SUPPORTS_CLONE'
op|'='
name|'True'
newline|'\n'
nl|'\n'
DECL|member|__init__
name|'def'
name|'__init__'
op|'('
name|'self'
op|','
name|'instance'
op|'='
name|'None'
op|','
name|'disk_name'
op|'='
name|'None'
op|','
name|'path'
op|'='
name|'None'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
indent|' '
name|'super'
op|'('
name|'Rbd'
op|','
name|'self'
op|')'
op|'.'
name|'__init__'
op|'('
string|'"block"'
op|','
string|'"rbd"'
op|','
name|'is_block_dev'
op|'='
name|'False'
op|')'
newline|'\n'
name|'if'
name|'path'
op|':'
newline|'\n'
indent|' '
name|'try'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'rbd_name'
op|'='
name|'path'
op|'.'
name|'split'
op|'('
string|"'/'"
op|')'
op|'['
number|'1'
op|']'
newline|'\n'
dedent|''
name|'except'
name|'IndexError'
op|':'
newline|'\n'
indent|' '
name|'raise'
name|'exception'
op|'.'
name|'InvalidDevicePath'
op|'('
name|'path'
op|'='
name|'path'
op|')'
newline|'\n'
dedent|''
dedent|''
name|'else'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'rbd_name'
op|'='
string|"'%s_%s'"
op|'%'
op|'('
name|'instance'
op|'.'
name|'uuid'
op|','
name|'disk_name'
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'if'
name|'not'
name|'CONF'
op|'.'
name|'libvirt'
op|'.'
name|'images_rbd_pool'
op|':'
newline|'\n'
indent|' '
name|'raise'
name|'RuntimeError'
op|'('
name|'_'
op|'('
string|"'You should specify'"
nl|'\n'
string|"' images_rbd_pool'"
nl|'\n'
string|"' flag to use rbd images.'"
op|')'
op|')'
newline|'\n'
dedent|''
name|'self'
op|'.'
name|'pool'
op|'='
name|'CONF'
op|'.'
name|'libvirt'
op|'.'
name|'images_rbd_pool'
newline|'\n'
name|'self'
op|'.'
name|'discard_mode'
op|'='
name|'CONF'
op|'.'
name|'libvirt'
op|'.'
name|'hw_disk_discard'
newline|'\n'
name|'self'
op|'.'
name|'rbd_user'
op|'='
name|'CONF'
op|'.'
name|'libvirt'
op|'.'
name|'rbd_user'
newline|'\n'
name|'self'
op|'.'
name|'ceph_conf'
op|'='
name|'CONF'
op|'.'
name|'libvirt'
op|'.'
name|'images_rbd_ceph_conf'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'driver'
op|'='
name|'rbd_utils'
op|'.'
name|'RBDDriver'
op|'('
nl|'\n'
name|'pool'
op|'='
name|'self'
op|'.'
name|'pool'
op|','
nl|'\n'
name|'ceph_conf'
op|'='
name|'self'
op|'.'
name|'ceph_conf'
op|','
nl|'\n'
name|'rbd_user'
op|'='
name|'self'
op|'.'
name|'rbd_user'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'path'
op|'='
string|"'rbd:%s/%s'"
op|'%'
op|'('
name|'self'
op|'.'
name|'pool'
op|','
name|'self'
op|'.'
name|'rbd_name'
op|')'
newline|'\n'
name|'if'
name|'self'
op|'.'
name|'rbd_user'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'path'
op|'+='
string|"':id='"
op|'+'
name|'self'
op|'.'
name|'rbd_user'
newline|'\n'
dedent|''
name|'if'
name|'self'
op|'.'
name|'ceph_conf'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'path'
op|'+='
string|"':conf='"
op|'+'
name|'self'
op|'.'
name|'ceph_conf'
newline|'\n'
nl|'\n'
DECL|member|libvirt_info
dedent|''
dedent|''
name|'def'
name|'libvirt_info'
op|'('
name|'self'
op|','
name|'disk_bus'
op|','
name|'disk_dev'
op|','
name|'device_type'
op|','
name|'cache_mode'
op|','
nl|'\n'
name|'extra_specs'
op|','
name|'hypervisor_version'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Get `LibvirtConfigGuestDisk` filled for this image.\n\n :disk_dev: Disk bus device name\n :disk_bus: Disk bus type\n :device_type: Device type for this image.\n :cache_mode: Caching mode for this image\n :extra_specs: Instance type extra specs dict.\n """'
newline|'\n'
name|'info'
op|'='
name|'vconfig'
op|'.'
name|'LibvirtConfigGuestDisk'
op|'('
op|')'
newline|'\n'
nl|'\n'
name|'hosts'
op|','
name|'ports'
op|'='
name|'self'
op|'.'
name|'driver'
op|'.'
name|'get_mon_addrs'
op|'('
op|')'
newline|'\n'
name|'info'
op|'.'
name|'source_device'
op|'='
name|'device_type'
newline|'\n'
name|'info'
op|'.'
name|'driver_format'
op|'='
string|"'raw'"
newline|'\n'
name|'info'
op|'.'
name|'driver_cache'
op|'='
name|'cache_mode'
newline|'\n'
name|'info'
op|'.'
name|'driver_discard'
op|'='
name|'self'
op|'.'
name|'discard_mode'
newline|'\n'
name|'info'
op|'.'
name|'target_bus'
op|'='
name|'disk_bus'
newline|'\n'
name|'info'
op|'.'
name|'target_dev'
op|'='
name|'disk_dev'
newline|'\n'
name|'info'
op|'.'
name|'source_type'
op|'='
string|"'network'"
newline|'\n'
name|'info'
op|'.'
name|'source_protocol'
op|'='
string|"'rbd'"
newline|'\n'
name|'info'
op|'.'
name|'source_name'
op|'='
string|"'%s/%s'"
op|'%'
op|'('
name|'self'
op|'.'
name|'pool'
op|','
name|'self'
op|'.'
name|'rbd_name'
op|')'
newline|'\n'
name|'info'
op|'.'
name|'source_hosts'
op|'='
name|'hosts'
newline|'\n'
name|'info'
op|'.'
name|'source_ports'
op|'='
name|'ports'
newline|'\n'
name|'auth_enabled'
op|'='
op|'('
name|'CONF'
op|'.'
name|'libvirt'
op|'.'
name|'rbd_user'
name|'is'
name|'not'
name|'None'
op|')'
newline|'\n'
name|'if'
name|'CONF'
op|'.'
name|'libvirt'
op|'.'
name|'rbd_secret_uuid'
op|':'
newline|'\n'
indent|' '
name|'info'
op|'.'
name|'auth_secret_uuid'
op|'='
name|'CONF'
op|'.'
name|'libvirt'
op|'.'
name|'rbd_secret_uuid'
newline|'\n'
name|'auth_enabled'
op|'='
name|'True'
comment|'# Force authentication locally'
newline|'\n'
name|'if'
name|'CONF'
op|'.'
name|'libvirt'
op|'.'
name|'rbd_user'
op|':'
newline|'\n'
indent|' '
name|'info'
op|'.'
name|'auth_username'
op|'='
name|'CONF'
op|'.'
name|'libvirt'
op|'.'
name|'rbd_user'
newline|'\n'
dedent|''
dedent|''
name|'if'
name|'auth_enabled'
op|':'
newline|'\n'
indent|' '
name|'info'
op|'.'
name|'auth_secret_type'
op|'='
string|"'ceph'"
newline|'\n'
name|'info'
op|'.'
name|'auth_secret_uuid'
op|'='
name|'CONF'
op|'.'
name|'libvirt'
op|'.'
name|'rbd_secret_uuid'
newline|'\n'
nl|'\n'
dedent|''
name|'self'
op|'.'
name|'disk_qos'
op|'('
name|'info'
op|','
name|'extra_specs'
op|')'
newline|'\n'
nl|'\n'
name|'return'
name|'info'
newline|'\n'
nl|'\n'
DECL|member|_can_fallocate
dedent|''
name|'def'
name|'_can_fallocate'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'False'
newline|'\n'
nl|'\n'
DECL|member|check_image_exists
dedent|''
name|'def'
name|'check_image_exists'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'self'
op|'.'
name|'driver'
op|'.'
name|'exists'
op|'('
name|'self'
op|'.'
name|'rbd_name'
op|')'
newline|'\n'
nl|'\n'
DECL|member|get_disk_size
dedent|''
name|'def'
name|'get_disk_size'
op|'('
name|'self'
op|','
name|'name'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Returns the size of the virtual disk in bytes.\n\n The name argument is ignored since this backend already knows\n its name, and callers may pass a non-existent local file path.\n """'
newline|'\n'
name|'return'
name|'self'
op|'.'
name|'driver'
op|'.'
name|'size'
op|'('
name|'self'
op|'.'
name|'rbd_name'
op|')'
newline|'\n'
nl|'\n'
DECL|member|create_image
dedent|''
name|'def'
name|'create_image'
op|'('
name|'self'
op|','
name|'prepare_template'
op|','
name|'base'
op|','
name|'size'
op|','
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
nl|'\n'
indent|' '
name|'if'
name|'not'
name|'self'
op|'.'
name|'check_image_exists'
op|'('
op|')'
op|':'
newline|'\n'
indent|' '
name|'prepare_template'
op|'('
name|'target'
op|'='
name|'base'
op|','
name|'max_size'
op|'='
name|'size'
op|','
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
newline|'\n'
nl|'\n'
comment|'# prepare_template() may have cloned the image into a new rbd'
nl|'\n'
comment|'# image already instead of downloading it locally'
nl|'\n'
dedent|''
name|'if'
name|'not'
name|'self'
op|'.'
name|'check_image_exists'
op|'('
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'driver'
op|'.'
name|'import_image'
op|'('
name|'base'
op|','
name|'self'
op|'.'
name|'rbd_name'
op|')'
newline|'\n'
dedent|''
name|'self'
op|'.'
name|'verify_base_size'
op|'('
name|'base'
op|','
name|'size'
op|')'
newline|'\n'
nl|'\n'
name|'if'
name|'size'
name|'and'
name|'size'
op|'>'
name|'self'
op|'.'
name|'get_disk_size'
op|'('
name|'self'
op|'.'
name|'rbd_name'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'driver'
op|'.'
name|'resize'
op|'('
name|'self'
op|'.'
name|'rbd_name'
op|','
name|'size'
op|')'
newline|'\n'
nl|'\n'
DECL|member|resize_image
dedent|''
dedent|''
name|'def'
name|'resize_image'
op|'('
name|'self'
op|','
name|'size'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'driver'
op|'.'
name|'resize'
op|'('
name|'self'
op|'.'
name|'rbd_name'
op|','
name|'size'
op|')'
newline|'\n'
nl|'\n'
DECL|member|snapshot_extract
dedent|''
name|'def'
name|'snapshot_extract'
op|'('
name|'self'
op|','
name|'target'
op|','
name|'out_format'
op|')'
op|':'
newline|'\n'
indent|' '
name|'images'
op|'.'
name|'convert_image'
op|'('
name|'self'
op|'.'
name|'path'
op|','
name|'target'
op|','
string|"'raw'"
op|','
name|'out_format'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'staticmethod'
newline|'\n'
DECL|member|is_shared_block_storage
name|'def'
name|'is_shared_block_storage'
op|'('
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'True'
newline|'\n'
nl|'\n'
DECL|member|clone
dedent|''
name|'def'
name|'clone'
op|'('
name|'self'
op|','
name|'context'
op|','
name|'image_id_or_uri'
op|')'
op|':'
newline|'\n'
indent|' '
name|'image_meta'
op|'='
name|'IMAGE_API'
op|'.'
name|'get'
op|'('
name|'context'
op|','
name|'image_id_or_uri'
op|','
nl|'\n'
name|'include_locations'
op|'='
name|'True'
op|')'
newline|'\n'
name|'locations'
op|'='
name|'image_meta'
op|'['
string|"'locations'"
op|']'
newline|'\n'
nl|'\n'
name|'LOG'
op|'.'
name|'debug'
op|'('
string|"'Image locations are: %(locs)s'"
op|'%'
op|'{'
string|"'locs'"
op|':'
name|'locations'
op|'}'
op|')'
newline|'\n'
nl|'\n'
name|'if'
name|'image_meta'
op|'.'
name|'get'
op|'('
string|"'disk_format'"
op|')'
name|'not'
name|'in'
op|'['
string|"'raw'"
op|','
string|"'iso'"
op|']'
op|':'
newline|'\n'
indent|' '
name|'reason'
op|'='
name|'_'
op|'('
string|"'Image is not raw format'"
op|')'
newline|'\n'
name|'raise'
name|'exception'
op|'.'
name|'ImageUnacceptable'
op|'('
name|'image_id'
op|'='
name|'image_id_or_uri'
op|','
nl|'\n'
name|'reason'
op|'='
name|'reason'
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'for'
name|'location'
name|'in'
name|'locations'
op|':'
newline|'\n'
indent|' '
name|'if'
name|'self'
op|'.'
name|'driver'
op|'.'
name|'is_cloneable'
op|'('
name|'location'
op|','
name|'image_meta'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'self'
op|'.'
name|'driver'
op|'.'
name|'clone'
op|'('
name|'location'
op|','
name|'self'
op|'.'
name|'rbd_name'
op|')'
newline|'\n'
nl|'\n'
dedent|''
dedent|''
name|'reason'
op|'='
name|'_'
op|'('
string|"'No image locations are accessible'"
op|')'
newline|'\n'
name|'raise'
name|'exception'
op|'.'
name|'ImageUnacceptable'
op|'('
name|'image_id'
op|'='
name|'image_id_or_uri'
op|','
nl|'\n'
name|'reason'
op|'='
name|'reason'
op|')'
newline|'\n'
nl|'\n'
DECL|member|get_model
dedent|''
name|'def'
name|'get_model'
op|'('
name|'self'
op|','
name|'connection'
op|')'
op|':'
newline|'\n'
indent|' '
name|'secret'
op|'='
name|'None'
newline|'\n'
name|'if'
name|'CONF'
op|'.'
name|'libvirt'
op|'.'
name|'rbd_secret_uuid'
op|':'
newline|'\n'
indent|' '
name|'secretobj'
op|'='
name|'connection'
op|'.'
name|'secretLookupByUUIDString'
op|'('
nl|'\n'
name|'CONF'
op|'.'
name|'libvirt'
op|'.'
name|'rbd_secret_uuid'
op|')'
newline|'\n'
name|'secret'
op|'='
name|'base64'
op|'.'
name|'b64encode'
op|'('
name|'secretobj'
op|'.'
name|'value'
op|'('
op|')'
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'hosts'
op|','
name|'ports'
op|'='
name|'self'
op|'.'
name|'driver'
op|'.'
name|'get_mon_addrs'
op|'('
op|')'
newline|'\n'
name|'servers'
op|'='
op|'['
name|'str'
op|'('
string|"':'"
op|'.'
name|'join'
op|'('
name|'k'
op|')'
op|')'
name|'for'
name|'k'
name|'in'
name|'zip'
op|'('
name|'hosts'
op|','
name|'ports'
op|')'
op|']'
newline|'\n'
nl|'\n'
name|'return'
name|'imgmodel'
op|'.'
name|'RBDImage'
op|'('
name|'self'
op|'.'
name|'rbd_name'
op|','
nl|'\n'
name|'self'
op|'.'
name|'pool'
op|','
nl|'\n'
name|'self'
op|'.'
name|'rbd_user'
op|','
nl|'\n'
name|'secret'
op|','
nl|'\n'
name|'servers'
op|')'
newline|'\n'
nl|'\n'
DECL|member|import_file
dedent|''
name|'def'
name|'import_file'
op|'('
name|'self'
op|','
name|'instance'
op|','
name|'local_file'
op|','
name|'remote_name'
op|')'
op|':'
newline|'\n'
indent|' '
name|'name'
op|'='
string|"'%s_%s'"
op|'%'
op|'('
name|'instance'
op|'.'
name|'uuid'
op|','
name|'remote_name'
op|')'
newline|'\n'
name|'if'
name|'self'
op|'.'
name|'check_image_exists'
op|'('
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'driver'
op|'.'
name|'remove_image'
op|'('
name|'name'
op|')'
newline|'\n'
dedent|''
name|'self'
op|'.'
name|'driver'
op|'.'
name|'import_image'
op|'('
name|'local_file'
op|','
name|'name'
op|')'
newline|'\n'
nl|'\n'
DECL|member|create_snap
dedent|''
name|'def'
name|'create_snap'
op|'('
name|'self'
op|','
name|'name'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'self'
op|'.'
name|'driver'
op|'.'
name|'create_snap'
op|'('
name|'self'
op|'.'
name|'rbd_name'
op|','
name|'name'
op|')'
newline|'\n'
nl|'\n'
DECL|member|remove_snap
dedent|''
name|'def'
name|'remove_snap'
op|'('
name|'self'
op|','
name|'name'
op|','
name|'ignore_errors'
op|'='
name|'False'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'self'
op|'.'
name|'driver'
op|'.'
name|'remove_snap'
op|'('
name|'self'
op|'.'
name|'rbd_name'
op|','
name|'name'
op|','
name|'ignore_errors'
op|')'
newline|'\n'
nl|'\n'
DECL|member|rollback_to_snap
dedent|''
name|'def'
name|'rollback_to_snap'
op|'('
name|'self'
op|','
name|'name'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'self'
op|'.'
name|'driver'
op|'.'
name|'rollback_to_snap'
op|'('
name|'self'
op|'.'
name|'rbd_name'
op|','
name|'name'
op|')'
newline|'\n'
nl|'\n'
DECL|member|_get_parent_pool
dedent|''
name|'def'
name|'_get_parent_pool'
op|'('
name|'self'
op|','
name|'context'
op|','
name|'base_image_id'
op|','
name|'fsid'
op|')'
op|':'
newline|'\n'
indent|' '
name|'parent_pool'
op|'='
name|'None'
newline|'\n'
name|'try'
op|':'
newline|'\n'
comment|'# The easy way -- the image is an RBD clone, so use the parent'
nl|'\n'
comment|"# images' storage pool"
nl|'\n'
indent|' '
name|'parent_pool'
op|','
name|'_im'
op|','
name|'_snap'
op|'='
name|'self'
op|'.'
name|'driver'
op|'.'
name|'parent_info'
op|'('
name|'self'
op|'.'
name|'rbd_name'
op|')'
newline|'\n'
dedent|''
name|'except'
name|'exception'
op|'.'
name|'ImageUnacceptable'
op|':'
newline|'\n'
comment|'# The hard way -- the image is itself a parent, so ask Glance'
nl|'\n'
comment|'# where it came from'
nl|'\n'
indent|' '
name|'LOG'
op|'.'
name|'debug'
op|'('
string|"'No parent info for %s; asking the Image API where its '"
nl|'\n'
string|"'store is'"
op|','
name|'base_image_id'
op|')'
newline|'\n'
name|'try'
op|':'
newline|'\n'
indent|' '
name|'image_meta'
op|'='
name|'IMAGE_API'
op|'.'
name|'get'
op|'('
name|'context'
op|','
name|'base_image_id'
op|','
nl|'\n'
name|'include_locations'
op|'='
name|'True'
op|')'
newline|'\n'
dedent|''
name|'except'
name|'Exception'
name|'as'
name|'e'
op|':'
newline|'\n'
indent|' '
name|'LOG'
op|'.'
name|'debug'
op|'('
string|"'Unable to get image %(image_id)s; error: %(error)s'"
op|','
nl|'\n'
op|'{'
string|"'image_id'"
op|':'
name|'base_image_id'
op|','
string|"'error'"
op|':'
name|'e'
op|'}'
op|')'
newline|'\n'
name|'image_meta'
op|'='
op|'{'
op|'}'
newline|'\n'
nl|'\n'
comment|'# Find the first location that is in the same RBD cluster'
nl|'\n'
dedent|''
name|'for'
name|'location'
name|'in'
name|'image_meta'
op|'.'
name|'get'
op|'('
string|"'locations'"
op|','
op|'['
op|']'
op|')'
op|':'
newline|'\n'
indent|' '
name|'try'
op|':'
newline|'\n'
indent|' '
name|'parent_fsid'
op|','
name|'parent_pool'
op|','
name|'_im'
op|','
name|'_snap'
op|'='
name|'self'
op|'.'
name|'driver'
op|'.'
name|'parse_url'
op|'('
name|'location'
op|'['
string|"'url'"
op|']'
op|')'
newline|'\n'
name|'if'
name|'parent_fsid'
op|'=='
name|'fsid'
op|':'
newline|'\n'
indent|' '
name|'break'
newline|'\n'
dedent|''
name|'else'
op|':'
newline|'\n'
indent|' '
name|'parent_pool'
op|'='
name|'None'
newline|'\n'
dedent|''
dedent|''
name|'except'
name|'exception'
op|'.'
name|'ImageUnacceptable'
op|':'
newline|'\n'
indent|' '
name|'continue'
newline|'\n'
nl|'\n'
dedent|''
dedent|''
dedent|''
name|'if'
name|'not'
name|'parent_pool'
op|':'
newline|'\n'
indent|' '
name|'raise'
name|'exception'
op|'.'
name|'ImageUnacceptable'
op|'('
nl|'\n'
name|'_'
op|'('
string|"'Cannot determine the parent storage pool for %s; '"
nl|'\n'
string|"'cannot determine where to store images'"
op|')'
op|'%'
nl|'\n'
name|'base_image_id'
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'return'
name|'parent_pool'
newline|'\n'
nl|'\n'
DECL|member|direct_snapshot
dedent|''
name|'def'
name|'direct_snapshot'
op|'('
name|'self'
op|','
name|'context'
op|','
name|'snapshot_name'
op|','
name|'image_format'
op|','
nl|'\n'
name|'image_id'
op|','
name|'base_image_id'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Creates an RBD snapshot directly.\n """'
newline|'\n'
name|'fsid'
op|'='
name|'self'
op|'.'
name|'driver'
op|'.'
name|'get_fsid'
op|'('
op|')'
newline|'\n'
comment|"# NOTE(nic): Nova has zero comprehension of how Glance's image store"
nl|'\n'
comment|'# is configured, but we can infer what storage pool Glance is using'
nl|'\n'
comment|'# by looking at the parent image. If using authx, write access should'
nl|'\n'
comment|'# be enabled on that pool for the Nova user'
nl|'\n'
name|'parent_pool'
op|'='
name|'self'
op|'.'
name|'_get_parent_pool'
op|'('
name|'context'
op|','
name|'base_image_id'
op|','
name|'fsid'
op|')'
newline|'\n'
nl|'\n'
comment|"# Snapshot the disk and clone it into Glance's storage pool. librbd"
nl|'\n'
comment|'# requires that snapshots be set to "protected" in order to clone them'
nl|'\n'
name|'self'
op|'.'
name|'driver'
op|'.'
name|'create_snap'
op|'('
name|'self'
op|'.'
name|'rbd_name'
op|','
name|'snapshot_name'
op|','
name|'protect'
op|'='
name|'True'
op|')'
newline|'\n'
name|'location'
op|'='
op|'{'
string|"'url'"
op|':'
string|"'rbd://%(fsid)s/%(pool)s/%(image)s/%(snap)s'"
op|'%'
nl|'\n'
name|'dict'
op|'('
name|'fsid'
op|'='
name|'fsid'
op|','
nl|'\n'
name|'pool'
op|'='
name|'self'
op|'.'
name|'pool'
op|','
nl|'\n'
name|'image'
op|'='
name|'self'
op|'.'
name|'rbd_name'
op|','
nl|'\n'
name|'snap'
op|'='
name|'snapshot_name'
op|')'
op|'}'
newline|'\n'
name|'try'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'driver'
op|'.'
name|'clone'
op|'('
name|'location'
op|','
name|'image_id'
op|','
name|'dest_pool'
op|'='
name|'parent_pool'
op|')'
newline|'\n'
comment|'# Flatten the image, which detaches it from the source snapshot'
nl|'\n'
name|'self'
op|'.'
name|'driver'
op|'.'
name|'flatten'
op|'('
name|'image_id'
op|','
name|'pool'
op|'='
name|'parent_pool'
op|')'
newline|'\n'
dedent|''
name|'finally'
op|':'
newline|'\n'
comment|'# all done with the source snapshot, clean it up'
nl|'\n'
indent|' '
name|'self'
op|'.'
name|'cleanup_direct_snapshot'
op|'('
name|'location'
op|')'
newline|'\n'
nl|'\n'
comment|"# Glance makes a protected snapshot called 'snap' on uploaded"
nl|'\n'
comment|"# images and hands it out, so we'll do that too. The name of"
nl|'\n'
comment|"# the snapshot doesn't really matter, this just uses what the"
nl|'\n'
comment|'# glance-store rbd backend sets (which is not configurable).'
nl|'\n'
dedent|''
name|'self'
op|'.'
name|'driver'
op|'.'
name|'create_snap'
op|'('
name|'image_id'
op|','
string|"'snap'"
op|','
name|'pool'
op|'='
name|'parent_pool'
op|','
nl|'\n'
name|'protect'
op|'='
name|'True'
op|')'
newline|'\n'
name|'return'
op|'('
string|"'rbd://%(fsid)s/%(pool)s/%(image)s/snap'"
op|'%'
nl|'\n'
name|'dict'
op|'('
name|'fsid'
op|'='
name|'fsid'
op|','
name|'pool'
op|'='
name|'parent_pool'
op|','
name|'image'
op|'='
name|'image_id'
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|cleanup_direct_snapshot
dedent|''
name|'def'
name|'cleanup_direct_snapshot'
op|'('
name|'self'
op|','
name|'location'
op|','
name|'also_destroy_volume'
op|'='
name|'False'
op|','
nl|'\n'
name|'ignore_errors'
op|'='
name|'False'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Unprotects and destroys the name snapshot.\n\n With also_destroy_volume=True, it will also cleanup/destroy the parent\n volume. This is useful for cleaning up when the target volume fails\n to snapshot properly.\n """'
newline|'\n'
name|'if'
name|'location'
op|':'
newline|'\n'
indent|' '
name|'_fsid'
op|','
name|'_pool'
op|','
name|'_im'
op|','
name|'_snap'
op|'='
name|'self'
op|'.'
name|'driver'
op|'.'
name|'parse_url'
op|'('
name|'location'
op|'['
string|"'url'"
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'driver'
op|'.'
name|'remove_snap'
op|'('
name|'_im'
op|','
name|'_snap'
op|','
name|'pool'
op|'='
name|'_pool'
op|','
name|'force'
op|'='
name|'True'
op|','
nl|'\n'
name|'ignore_errors'
op|'='
name|'ignore_errors'
op|')'
newline|'\n'
name|'if'
name|'also_destroy_volume'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'driver'
op|'.'
name|'destroy_volume'
op|'('
name|'_im'
op|','
name|'pool'
op|'='
name|'_pool'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|Ploop
dedent|''
dedent|''
dedent|''
dedent|''
name|'class'
name|'Ploop'
op|'('
name|'Image'
op|')'
op|':'
newline|'\n'
DECL|member|__init__
indent|' '
name|'def'
name|'__init__'
op|'('
name|'self'
op|','
name|'instance'
op|'='
name|'None'
op|','
name|'disk_name'
op|'='
name|'None'
op|','
name|'path'
op|'='
name|'None'
op|')'
op|':'
newline|'\n'
indent|' '
name|'super'
op|'('
name|'Ploop'
op|','
name|'self'
op|')'
op|'.'
name|'__init__'
op|'('
string|'"file"'
op|','
string|'"ploop"'
op|','
name|'is_block_dev'
op|'='
name|'False'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'path'
op|'='
op|'('
name|'path'
name|'or'
nl|'\n'
name|'os'
op|'.'
name|'path'
op|'.'
name|'join'
op|'('
name|'libvirt_utils'
op|'.'
name|'get_instance_path'
op|'('
name|'instance'
op|')'
op|','
nl|'\n'
name|'disk_name'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'resolve_driver_format'
op|'('
op|')'
newline|'\n'
nl|'\n'
DECL|member|create_image
dedent|''
name|'def'
name|'create_image'
op|'('
name|'self'
op|','
name|'prepare_template'
op|','
name|'base'
op|','
name|'size'
op|','
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
indent|' '
name|'filename'
op|'='
name|'os'
op|'.'
name|'path'
op|'.'
name|'split'
op|'('
name|'base'
op|')'
op|'['
op|'-'
number|'1'
op|']'
newline|'\n'
nl|'\n'
op|'@'
name|'utils'
op|'.'
name|'synchronized'
op|'('
name|'filename'
op|','
name|'external'
op|'='
name|'True'
op|','
name|'lock_path'
op|'='
name|'self'
op|'.'
name|'lock_path'
op|')'
newline|'\n'
DECL|function|create_ploop_image
name|'def'
name|'create_ploop_image'
op|'('
name|'base'
op|','
name|'target'
op|','
name|'size'
op|')'
op|':'
newline|'\n'
indent|' '
name|'image_path'
op|'='
name|'os'
op|'.'
name|'path'
op|'.'
name|'join'
op|'('
name|'target'
op|','
string|'"root.hds"'
op|')'
newline|'\n'
name|'libvirt_utils'
op|'.'
name|'copy_image'
op|'('
name|'base'
op|','
name|'image_path'
op|')'
newline|'\n'
name|'utils'
op|'.'
name|'execute'
op|'('
string|"'ploop'"
op|','
string|"'restore-descriptor'"
op|','
string|"'-f'"
op|','
name|'self'
op|'.'
name|'pcs_format'
op|','
nl|'\n'
name|'target'
op|','
name|'image_path'
op|')'
newline|'\n'
name|'if'
name|'size'
op|':'
newline|'\n'
indent|' '
name|'dd_path'
op|'='
name|'os'
op|'.'
name|'path'
op|'.'
name|'join'
op|'('
name|'self'
op|'.'
name|'path'
op|','
string|'"DiskDescriptor.xml"'
op|')'
newline|'\n'
name|'utils'
op|'.'
name|'execute'
op|'('
string|"'ploop'"
op|','
string|"'grow'"
op|','
string|"'-s'"
op|','
string|"'%dK'"
op|'%'
op|'('
name|'size'
op|'>>'
number|'10'
op|')'
op|','
nl|'\n'
name|'dd_path'
op|','
name|'run_as_root'
op|'='
name|'True'
op|')'
newline|'\n'
nl|'\n'
dedent|''
dedent|''
name|'if'
name|'not'
name|'os'
op|'.'
name|'path'
op|'.'
name|'exists'
op|'('
name|'self'
op|'.'
name|'path'
op|')'
op|':'
newline|'\n'
indent|' '
name|'if'
name|'CONF'
op|'.'
name|'force_raw_images'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'pcs_format'
op|'='
string|'"raw"'
newline|'\n'
dedent|''
name|'else'
op|':'
newline|'\n'
indent|' '
name|'image_meta'
op|'='
name|'IMAGE_API'
op|'.'
name|'get'
op|'('
name|'kwargs'
op|'['
string|'"context"'
op|']'
op|','
nl|'\n'
name|'kwargs'
op|'['
string|'"image_id"'
op|']'
op|')'
newline|'\n'
name|'format'
op|'='
name|'image_meta'
op|'.'
name|'get'
op|'('
string|'"disk_format"'
op|')'
newline|'\n'
name|'if'
name|'format'
op|'=='
string|'"ploop"'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'pcs_format'
op|'='
string|'"expanded"'
newline|'\n'
dedent|''
name|'elif'
name|'format'
op|'=='
string|'"raw"'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'pcs_format'
op|'='
string|'"raw"'
newline|'\n'
dedent|''
name|'else'
op|':'
newline|'\n'
indent|' '
name|'reason'
op|'='
name|'_'
op|'('
string|'"PCS doesn\'t support images in %s format."'
nl|'\n'
string|'" You should either set force_raw_images=True"'
nl|'\n'
string|'" in config or upload an image in ploop"'
nl|'\n'
string|'" or raw format."'
op|')'
op|'%'
name|'format'
newline|'\n'
name|'raise'
name|'exception'
op|'.'
name|'ImageUnacceptable'
op|'('
nl|'\n'
name|'image_id'
op|'='
name|'kwargs'
op|'['
string|'"image_id"'
op|']'
op|','
nl|'\n'
name|'reason'
op|'='
name|'reason'
op|')'
newline|'\n'
nl|'\n'
dedent|''
dedent|''
dedent|''
name|'if'
name|'not'
name|'os'
op|'.'
name|'path'
op|'.'
name|'exists'
op|'('
name|'base'
op|')'
op|':'
newline|'\n'
indent|' '
name|'prepare_template'
op|'('
name|'target'
op|'='
name|'base'
op|','
name|'max_size'
op|'='
name|'size'
op|','
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
newline|'\n'
dedent|''
name|'self'
op|'.'
name|'verify_base_size'
op|'('
name|'base'
op|','
name|'size'
op|')'
newline|'\n'
nl|'\n'
name|'if'
name|'os'
op|'.'
name|'path'
op|'.'
name|'exists'
op|'('
name|'self'
op|'.'
name|'path'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
newline|'\n'
nl|'\n'
dedent|''
name|'fileutils'
op|'.'
name|'ensure_tree'
op|'('
name|'self'
op|'.'
name|'path'
op|')'
newline|'\n'
nl|'\n'
name|'remove_func'
op|'='
name|'functools'
op|'.'
name|'partial'
op|'('
name|'fileutils'
op|'.'
name|'delete_if_exists'
op|','
nl|'\n'
name|'remove'
op|'='
name|'shutil'
op|'.'
name|'rmtree'
op|')'
newline|'\n'
name|'with'
name|'fileutils'
op|'.'
name|'remove_path_on_error'
op|'('
name|'self'
op|'.'
name|'path'
op|','
name|'remove'
op|'='
name|'remove_func'
op|')'
op|':'
newline|'\n'
indent|' '
name|'create_ploop_image'
op|'('
name|'base'
op|','
name|'self'
op|'.'
name|'path'
op|','
name|'size'
op|')'
newline|'\n'
nl|'\n'
DECL|member|resize_image
dedent|''
dedent|''
name|'def'
name|'resize_image'
op|'('
name|'self'
op|','
name|'size'
op|')'
op|':'
newline|'\n'
indent|' '
name|'dd_path'
op|'='
name|'os'
op|'.'
name|'path'
op|'.'
name|'join'
op|'('
name|'self'
op|'.'
name|'path'
op|','
string|'"DiskDescriptor.xml"'
op|')'
newline|'\n'
name|'utils'
op|'.'
name|'execute'
op|'('
string|"'ploop'"
op|','
string|"'grow'"
op|','
string|"'-s'"
op|','
string|"'%dK'"
op|'%'
op|'('
name|'size'
op|'>>'
number|'10'
op|')'
op|','
name|'dd_path'
op|','
nl|'\n'
name|'run_as_root'
op|'='
name|'True'
op|')'
newline|'\n'
nl|'\n'
DECL|member|snapshot_extract
dedent|''
name|'def'
name|'snapshot_extract'
op|'('
name|'self'
op|','
name|'target'
op|','
name|'out_format'
op|')'
op|':'
newline|'\n'
indent|' '
name|'img_path'
op|'='
name|'os'
op|'.'
name|'path'
op|'.'
name|'join'
op|'('
name|'self'
op|'.'
name|'path'
op|','
string|'"root.hds"'
op|')'
newline|'\n'
name|'libvirt_utils'
op|'.'
name|'extract_snapshot'
op|'('
name|'img_path'
op|','
nl|'\n'
string|"'parallels'"
op|','
nl|'\n'
name|'target'
op|','
nl|'\n'
name|'out_format'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|Backend
dedent|''
dedent|''
name|'class'
name|'Backend'
op|'('
name|'object'
op|')'
op|':'
newline|'\n'
DECL|member|__init__
indent|' '
name|'def'
name|'__init__'
op|'('
name|'self'
op|','
name|'use_cow'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'BACKEND'
op|'='
op|'{'
nl|'\n'
string|"'raw'"
op|':'
name|'Raw'
op|','
nl|'\n'
string|"'qcow2'"
op|':'
name|'Qcow2'
op|','
nl|'\n'
string|"'lvm'"
op|':'
name|'Lvm'
op|','
nl|'\n'
string|"'rbd'"
op|':'
name|'Rbd'
op|','
nl|'\n'
string|"'ploop'"
op|':'
name|'Ploop'
op|','
nl|'\n'
string|"'default'"
op|':'
name|'Qcow2'
name|'if'
name|'use_cow'
name|'else'
name|'Raw'
nl|'\n'
op|'}'
newline|'\n'
nl|'\n'
DECL|member|backend
dedent|''
name|'def'
name|'backend'
op|'('
name|'self'
op|','
name|'image_type'
op|'='
name|'None'
op|')'
op|':'
newline|'\n'
indent|' '
name|'if'
name|'not'
name|'image_type'
op|':'
newline|'\n'
indent|' '
name|'image_type'
op|'='
name|'CONF'
op|'.'
name|'libvirt'
op|'.'
name|'images_type'
newline|'\n'
dedent|''
name|'image'
op|'='
name|'self'
op|'.'
name|'BACKEND'
op|'.'
name|'get'
op|'('
name|'image_type'
op|')'
newline|'\n'
name|'if'
name|'not'
name|'image'
op|':'
newline|'\n'
indent|' '
name|'raise'
name|'RuntimeError'
op|'('
name|'_'
op|'('
string|"'Unknown image_type=%s'"
op|')'
op|'%'
name|'image_type'
op|')'
newline|'\n'
dedent|''
name|'return'
name|'image'
newline|'\n'
nl|'\n'
DECL|member|image
dedent|''
name|'def'
name|'image'
op|'('
name|'self'
op|','
name|'instance'
op|','
name|'disk_name'
op|','
name|'image_type'
op|'='
name|'None'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Constructs image for selected backend\n\n :instance: Instance name.\n :name: Image name.\n :image_type: Image type.\n Optional, is CONF.libvirt.images_type by default.\n """'
newline|'\n'
name|'backend'
op|'='
name|'self'
op|'.'
name|'backend'
op|'('
name|'image_type'
op|')'
newline|'\n'
name|'return'
name|'backend'
op|'('
name|'instance'
op|'='
name|'instance'
op|','
name|'disk_name'
op|'='
name|'disk_name'
op|')'
newline|'\n'
nl|'\n'
DECL|member|snapshot
dedent|''
name|'def'
name|'snapshot'
op|'('
name|'self'
op|','
name|'instance'
op|','
name|'disk_path'
op|','
name|'image_type'
op|'='
name|'None'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Returns snapshot for given image\n\n :path: path to image\n :image_type: type of image\n """'
newline|'\n'
name|'backend'
op|'='
name|'self'
op|'.'
name|'backend'
op|'('
name|'image_type'
op|')'
newline|'\n'
name|'return'
name|'backend'
op|'('
name|'instance'
op|'='
name|'instance'
op|','
name|'path'
op|'='
name|'disk_path'
op|')'
newline|'\n'
dedent|''
dedent|''
endmarker|''
end_unit
| 13.267539
| 465
| 0.593332
| 13,741
| 92,289
| 3.897897
| 0.051379
| 0.170721
| 0.083456
| 0.079199
| 0.793899
| 0.753067
| 0.715278
| 0.672971
| 0.627658
| 0.581412
| 0
| 0.000909
| 0.130243
| 92,289
| 6,955
| 466
| 13.269446
| 0.666359
| 0
| 0
| 0.951689
| 0
| 0.001869
| 0.811267
| 0.067256
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.001294
| 0.005032
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
36a6753272bd3ef9dd1c7afc8a4056fbb5a3efd2
| 191
|
py
|
Python
|
src/route/routine.py
|
dkletz-hackathon/bncc-sano-api
|
0281962dcd6a8e4b9c1b97372964e98b59748a61
|
[
"MIT"
] | null | null | null |
src/route/routine.py
|
dkletz-hackathon/bncc-sano-api
|
0281962dcd6a8e4b9c1b97372964e98b59748a61
|
[
"MIT"
] | null | null | null |
src/route/routine.py
|
dkletz-hackathon/bncc-sano-api
|
0281962dcd6a8e4b9c1b97372964e98b59748a61
|
[
"MIT"
] | null | null | null |
from core.router import Router
from src.controller.routine import *
routine_router = Router()
routine_router.get("", get_routines)
routine_router.get("/<int:routine_id>", get_routine_by_id)
| 27.285714
| 58
| 0.795812
| 28
| 191
| 5.142857
| 0.428571
| 0.270833
| 0.222222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.08377
| 191
| 6
| 59
| 31.833333
| 0.822857
| 0
| 0
| 0
| 0
| 0
| 0.089005
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.4
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
36da18ad9192adb19727df83225528cc14764808
| 306
|
py
|
Python
|
corus/sources/taiga/__init__.py
|
Ilseyar/corus
|
61a4776f5e534469bb9df1e451b6a6d5fc0e991b
|
[
"MIT"
] | 205
|
2019-05-01T07:38:01.000Z
|
2022-03-30T04:02:54.000Z
|
corus/sources/taiga/__init__.py
|
Ilseyar/corus
|
61a4776f5e534469bb9df1e451b6a6d5fc0e991b
|
[
"MIT"
] | 78
|
2019-04-29T06:53:53.000Z
|
2021-09-20T14:51:25.000Z
|
corus/sources/taiga/__init__.py
|
Ilseyar/corus
|
61a4776f5e534469bb9df1e451b6a6d5fc0e991b
|
[
"MIT"
] | 18
|
2019-06-19T09:56:10.000Z
|
2022-01-30T14:55:14.000Z
|
from .arzamas import * # noqa
from .fontanka import * # noqa
from .interfax import * # noqa
from .kp import * # noqa
from .lenta import * # noqa
from .magazines import * # noqa
from .nplus1 import * # noqa
from .subtitles import * # noqa
from .social import * # noqa
from .proza import * # noqa
| 25.5
| 32
| 0.669935
| 40
| 306
| 5.125
| 0.325
| 0.487805
| 0.614634
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.004255
| 0.232026
| 306
| 11
| 33
| 27.818182
| 0.868085
| 0.160131
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
36ef34ad88bd79e626527b21b1db8fa951c449d1
| 121
|
py
|
Python
|
shop/admin.py
|
log-Z/django_shop
|
5c68dcd9a583bda3244edccdadfca74fd841c7ee
|
[
"MIT"
] | 4
|
2019-08-21T06:05:03.000Z
|
2021-01-26T04:21:19.000Z
|
shop/admin.py
|
log-Z/django_shop
|
5c68dcd9a583bda3244edccdadfca74fd841c7ee
|
[
"MIT"
] | null | null | null |
shop/admin.py
|
log-Z/django_shop
|
5c68dcd9a583bda3244edccdadfca74fd841c7ee
|
[
"MIT"
] | 2
|
2021-01-26T04:21:22.000Z
|
2021-02-22T07:51:39.000Z
|
from django.contrib import admin
from .models import User, Goods
admin.site.register(User)
admin.site.register(Goods)
| 15.125
| 32
| 0.793388
| 18
| 121
| 5.333333
| 0.555556
| 0.1875
| 0.354167
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.115702
| 121
| 7
| 33
| 17.285714
| 0.897196
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
7fe35a813e22bd1a9e882686a176638f6b5389b6
| 119
|
py
|
Python
|
src/localedb/__init__.py
|
momacs/localedb-py
|
8215653acbff7bfcd6336d2a8138fba66fd7e12e
|
[
"BSD-3-Clause"
] | null | null | null |
src/localedb/__init__.py
|
momacs/localedb-py
|
8215653acbff7bfcd6336d2a8138fba66fd7e12e
|
[
"BSD-3-Clause"
] | null | null | null |
src/localedb/__init__.py
|
momacs/localedb-py
|
8215653acbff7bfcd6336d2a8138fba66fd7e12e
|
[
"BSD-3-Clause"
] | null | null | null |
__all__ = ['clustering', 'localedb', 'util']
import localedb.clustering
import localedb.localedb
import localedb.util
| 19.833333
| 44
| 0.781513
| 13
| 119
| 6.846154
| 0.384615
| 0.47191
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.10084
| 119
| 5
| 45
| 23.8
| 0.831776
| 0
| 0
| 0
| 0
| 0
| 0.184874
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
7ff61e3013e0f57667163398a383355b4c8104ff
| 46,208
|
py
|
Python
|
src/main/antlrParser/Kotlin/KotlinParserListener.py
|
alschmut/code2semantics
|
af1daf0b8320b534344c5352ae972fb600e21e43
|
[
"MIT"
] | 2
|
2020-02-26T22:50:38.000Z
|
2020-10-29T10:46:10.000Z
|
src/main/antlrParser/Kotlin/KotlinParserListener.py
|
alschmut/linguistic-parser
|
af1daf0b8320b534344c5352ae972fb600e21e43
|
[
"MIT"
] | null | null | null |
src/main/antlrParser/Kotlin/KotlinParserListener.py
|
alschmut/linguistic-parser
|
af1daf0b8320b534344c5352ae972fb600e21e43
|
[
"MIT"
] | null | null | null |
# Generated from KotlinParser.g4 by ANTLR 4.7.2
from antlr4 import *
if __name__ is not None and "." in __name__:
from .KotlinParser import KotlinParser
else:
from KotlinParser import KotlinParser
# This class defines a complete listener for a parse tree produced by KotlinParser.
class KotlinParserListener(ParseTreeListener):
# Enter a parse tree produced by KotlinParser#kotlinFile.
def enterKotlinFile(self, ctx:KotlinParser.KotlinFileContext):
pass
# Exit a parse tree produced by KotlinParser#kotlinFile.
def exitKotlinFile(self, ctx:KotlinParser.KotlinFileContext):
pass
# Enter a parse tree produced by KotlinParser#script.
def enterScript(self, ctx:KotlinParser.ScriptContext):
pass
# Exit a parse tree produced by KotlinParser#script.
def exitScript(self, ctx:KotlinParser.ScriptContext):
pass
# Enter a parse tree produced by KotlinParser#preamble.
def enterPreamble(self, ctx:KotlinParser.PreambleContext):
pass
# Exit a parse tree produced by KotlinParser#preamble.
def exitPreamble(self, ctx:KotlinParser.PreambleContext):
pass
# Enter a parse tree produced by KotlinParser#fileAnnotations.
def enterFileAnnotations(self, ctx:KotlinParser.FileAnnotationsContext):
pass
# Exit a parse tree produced by KotlinParser#fileAnnotations.
def exitFileAnnotations(self, ctx:KotlinParser.FileAnnotationsContext):
pass
# Enter a parse tree produced by KotlinParser#fileAnnotation.
def enterFileAnnotation(self, ctx:KotlinParser.FileAnnotationContext):
pass
# Exit a parse tree produced by KotlinParser#fileAnnotation.
def exitFileAnnotation(self, ctx:KotlinParser.FileAnnotationContext):
pass
# Enter a parse tree produced by KotlinParser#packageHeader.
def enterPackageHeader(self, ctx:KotlinParser.PackageHeaderContext):
pass
# Exit a parse tree produced by KotlinParser#packageHeader.
def exitPackageHeader(self, ctx:KotlinParser.PackageHeaderContext):
pass
# Enter a parse tree produced by KotlinParser#importList.
def enterImportList(self, ctx:KotlinParser.ImportListContext):
pass
# Exit a parse tree produced by KotlinParser#importList.
def exitImportList(self, ctx:KotlinParser.ImportListContext):
pass
# Enter a parse tree produced by KotlinParser#importHeader.
def enterImportHeader(self, ctx:KotlinParser.ImportHeaderContext):
pass
# Exit a parse tree produced by KotlinParser#importHeader.
def exitImportHeader(self, ctx:KotlinParser.ImportHeaderContext):
pass
# Enter a parse tree produced by KotlinParser#importAlias.
def enterImportAlias(self, ctx:KotlinParser.ImportAliasContext):
pass
# Exit a parse tree produced by KotlinParser#importAlias.
def exitImportAlias(self, ctx:KotlinParser.ImportAliasContext):
pass
# Enter a parse tree produced by KotlinParser#topLevelObject.
def enterTopLevelObject(self, ctx:KotlinParser.TopLevelObjectContext):
pass
# Exit a parse tree produced by KotlinParser#topLevelObject.
def exitTopLevelObject(self, ctx:KotlinParser.TopLevelObjectContext):
pass
# Enter a parse tree produced by KotlinParser#classDeclaration.
def enterClassDeclaration(self, ctx:KotlinParser.ClassDeclarationContext):
pass
# Exit a parse tree produced by KotlinParser#classDeclaration.
def exitClassDeclaration(self, ctx:KotlinParser.ClassDeclarationContext):
pass
# Enter a parse tree produced by KotlinParser#primaryConstructor.
def enterPrimaryConstructor(self, ctx:KotlinParser.PrimaryConstructorContext):
pass
# Exit a parse tree produced by KotlinParser#primaryConstructor.
def exitPrimaryConstructor(self, ctx:KotlinParser.PrimaryConstructorContext):
pass
# Enter a parse tree produced by KotlinParser#classParameters.
def enterClassParameters(self, ctx:KotlinParser.ClassParametersContext):
pass
# Exit a parse tree produced by KotlinParser#classParameters.
def exitClassParameters(self, ctx:KotlinParser.ClassParametersContext):
pass
# Enter a parse tree produced by KotlinParser#classParameter.
def enterClassParameter(self, ctx:KotlinParser.ClassParameterContext):
pass
# Exit a parse tree produced by KotlinParser#classParameter.
def exitClassParameter(self, ctx:KotlinParser.ClassParameterContext):
pass
# Enter a parse tree produced by KotlinParser#delegationSpecifiers.
def enterDelegationSpecifiers(self, ctx:KotlinParser.DelegationSpecifiersContext):
pass
# Exit a parse tree produced by KotlinParser#delegationSpecifiers.
def exitDelegationSpecifiers(self, ctx:KotlinParser.DelegationSpecifiersContext):
pass
# Enter a parse tree produced by KotlinParser#delegationSpecifier.
def enterDelegationSpecifier(self, ctx:KotlinParser.DelegationSpecifierContext):
pass
# Exit a parse tree produced by KotlinParser#delegationSpecifier.
def exitDelegationSpecifier(self, ctx:KotlinParser.DelegationSpecifierContext):
pass
# Enter a parse tree produced by KotlinParser#constructorInvocation.
def enterConstructorInvocation(self, ctx:KotlinParser.ConstructorInvocationContext):
pass
# Exit a parse tree produced by KotlinParser#constructorInvocation.
def exitConstructorInvocation(self, ctx:KotlinParser.ConstructorInvocationContext):
pass
# Enter a parse tree produced by KotlinParser#explicitDelegation.
def enterExplicitDelegation(self, ctx:KotlinParser.ExplicitDelegationContext):
pass
# Exit a parse tree produced by KotlinParser#explicitDelegation.
def exitExplicitDelegation(self, ctx:KotlinParser.ExplicitDelegationContext):
pass
# Enter a parse tree produced by KotlinParser#classBody.
def enterClassBody(self, ctx:KotlinParser.ClassBodyContext):
pass
# Exit a parse tree produced by KotlinParser#classBody.
def exitClassBody(self, ctx:KotlinParser.ClassBodyContext):
pass
# Enter a parse tree produced by KotlinParser#classMemberDeclaration.
def enterClassMemberDeclaration(self, ctx:KotlinParser.ClassMemberDeclarationContext):
pass
# Exit a parse tree produced by KotlinParser#classMemberDeclaration.
def exitClassMemberDeclaration(self, ctx:KotlinParser.ClassMemberDeclarationContext):
pass
# Enter a parse tree produced by KotlinParser#anonymousInitializer.
def enterAnonymousInitializer(self, ctx:KotlinParser.AnonymousInitializerContext):
pass
# Exit a parse tree produced by KotlinParser#anonymousInitializer.
def exitAnonymousInitializer(self, ctx:KotlinParser.AnonymousInitializerContext):
pass
# Enter a parse tree produced by KotlinParser#secondaryConstructor.
def enterSecondaryConstructor(self, ctx:KotlinParser.SecondaryConstructorContext):
pass
# Exit a parse tree produced by KotlinParser#secondaryConstructor.
def exitSecondaryConstructor(self, ctx:KotlinParser.SecondaryConstructorContext):
pass
# Enter a parse tree produced by KotlinParser#constructorDelegationCall.
def enterConstructorDelegationCall(self, ctx:KotlinParser.ConstructorDelegationCallContext):
pass
# Exit a parse tree produced by KotlinParser#constructorDelegationCall.
def exitConstructorDelegationCall(self, ctx:KotlinParser.ConstructorDelegationCallContext):
pass
# Enter a parse tree produced by KotlinParser#enumClassBody.
def enterEnumClassBody(self, ctx:KotlinParser.EnumClassBodyContext):
pass
# Exit a parse tree produced by KotlinParser#enumClassBody.
def exitEnumClassBody(self, ctx:KotlinParser.EnumClassBodyContext):
pass
# Enter a parse tree produced by KotlinParser#enumEntries.
def enterEnumEntries(self, ctx:KotlinParser.EnumEntriesContext):
pass
# Exit a parse tree produced by KotlinParser#enumEntries.
def exitEnumEntries(self, ctx:KotlinParser.EnumEntriesContext):
pass
# Enter a parse tree produced by KotlinParser#enumEntry.
def enterEnumEntry(self, ctx:KotlinParser.EnumEntryContext):
pass
# Exit a parse tree produced by KotlinParser#enumEntry.
def exitEnumEntry(self, ctx:KotlinParser.EnumEntryContext):
pass
# Enter a parse tree produced by KotlinParser#functionDeclaration.
def enterFunctionDeclaration(self, ctx:KotlinParser.FunctionDeclarationContext):
pass
# Exit a parse tree produced by KotlinParser#functionDeclaration.
def exitFunctionDeclaration(self, ctx:KotlinParser.FunctionDeclarationContext):
pass
# Enter a parse tree produced by KotlinParser#functionValueParameters.
def enterFunctionValueParameters(self, ctx:KotlinParser.FunctionValueParametersContext):
pass
# Exit a parse tree produced by KotlinParser#functionValueParameters.
def exitFunctionValueParameters(self, ctx:KotlinParser.FunctionValueParametersContext):
pass
# Enter a parse tree produced by KotlinParser#functionValueParameter.
def enterFunctionValueParameter(self, ctx:KotlinParser.FunctionValueParameterContext):
pass
# Exit a parse tree produced by KotlinParser#functionValueParameter.
def exitFunctionValueParameter(self, ctx:KotlinParser.FunctionValueParameterContext):
pass
# Enter a parse tree produced by KotlinParser#parameter.
def enterParameter(self, ctx:KotlinParser.ParameterContext):
pass
# Exit a parse tree produced by KotlinParser#parameter.
def exitParameter(self, ctx:KotlinParser.ParameterContext):
pass
# Enter a parse tree produced by KotlinParser#functionBody.
def enterFunctionBody(self, ctx:KotlinParser.FunctionBodyContext):
pass
# Exit a parse tree produced by KotlinParser#functionBody.
def exitFunctionBody(self, ctx:KotlinParser.FunctionBodyContext):
pass
# Enter a parse tree produced by KotlinParser#objectDeclaration.
def enterObjectDeclaration(self, ctx:KotlinParser.ObjectDeclarationContext):
pass
# Exit a parse tree produced by KotlinParser#objectDeclaration.
def exitObjectDeclaration(self, ctx:KotlinParser.ObjectDeclarationContext):
pass
# Enter a parse tree produced by KotlinParser#companionObject.
def enterCompanionObject(self, ctx:KotlinParser.CompanionObjectContext):
pass
# Exit a parse tree produced by KotlinParser#companionObject.
def exitCompanionObject(self, ctx:KotlinParser.CompanionObjectContext):
pass
# Enter a parse tree produced by KotlinParser#propertyDeclaration.
def enterPropertyDeclaration(self, ctx:KotlinParser.PropertyDeclarationContext):
pass
# Exit a parse tree produced by KotlinParser#propertyDeclaration.
def exitPropertyDeclaration(self, ctx:KotlinParser.PropertyDeclarationContext):
pass
# Enter a parse tree produced by KotlinParser#multiVariableDeclaration.
def enterMultiVariableDeclaration(self, ctx:KotlinParser.MultiVariableDeclarationContext):
pass
# Exit a parse tree produced by KotlinParser#multiVariableDeclaration.
def exitMultiVariableDeclaration(self, ctx:KotlinParser.MultiVariableDeclarationContext):
pass
# Enter a parse tree produced by KotlinParser#variableDeclaration.
def enterVariableDeclaration(self, ctx:KotlinParser.VariableDeclarationContext):
pass
# Exit a parse tree produced by KotlinParser#variableDeclaration.
def exitVariableDeclaration(self, ctx:KotlinParser.VariableDeclarationContext):
pass
# Enter a parse tree produced by KotlinParser#getter.
def enterGetter(self, ctx:KotlinParser.GetterContext):
pass
# Exit a parse tree produced by KotlinParser#getter.
def exitGetter(self, ctx:KotlinParser.GetterContext):
pass
# Enter a parse tree produced by KotlinParser#setter.
def enterSetter(self, ctx:KotlinParser.SetterContext):
pass
# Exit a parse tree produced by KotlinParser#setter.
def exitSetter(self, ctx:KotlinParser.SetterContext):
pass
# Enter a parse tree produced by KotlinParser#typeAlias.
def enterTypeAlias(self, ctx:KotlinParser.TypeAliasContext):
pass
# Exit a parse tree produced by KotlinParser#typeAlias.
def exitTypeAlias(self, ctx:KotlinParser.TypeAliasContext):
pass
# Enter a parse tree produced by KotlinParser#typeParameters.
def enterTypeParameters(self, ctx:KotlinParser.TypeParametersContext):
pass
# Exit a parse tree produced by KotlinParser#typeParameters.
def exitTypeParameters(self, ctx:KotlinParser.TypeParametersContext):
pass
# Enter a parse tree produced by KotlinParser#typeParameter.
def enterTypeParameter(self, ctx:KotlinParser.TypeParameterContext):
pass
# Exit a parse tree produced by KotlinParser#typeParameter.
def exitTypeParameter(self, ctx:KotlinParser.TypeParameterContext):
pass
# Enter a parse tree produced by KotlinParser#type.
def enterType(self, ctx:KotlinParser.TypeContext):
pass
# Exit a parse tree produced by KotlinParser#type.
def exitType(self, ctx:KotlinParser.TypeContext):
pass
# Enter a parse tree produced by KotlinParser#typeModifierList.
def enterTypeModifierList(self, ctx:KotlinParser.TypeModifierListContext):
pass
# Exit a parse tree produced by KotlinParser#typeModifierList.
def exitTypeModifierList(self, ctx:KotlinParser.TypeModifierListContext):
pass
# Enter a parse tree produced by KotlinParser#parenthesizedType.
def enterParenthesizedType(self, ctx:KotlinParser.ParenthesizedTypeContext):
pass
# Exit a parse tree produced by KotlinParser#parenthesizedType.
def exitParenthesizedType(self, ctx:KotlinParser.ParenthesizedTypeContext):
pass
# Enter a parse tree produced by KotlinParser#nullableType.
def enterNullableType(self, ctx:KotlinParser.NullableTypeContext):
pass
# Exit a parse tree produced by KotlinParser#nullableType.
def exitNullableType(self, ctx:KotlinParser.NullableTypeContext):
pass
# Enter a parse tree produced by KotlinParser#typeReference.
def enterTypeReference(self, ctx:KotlinParser.TypeReferenceContext):
pass
# Exit a parse tree produced by KotlinParser#typeReference.
def exitTypeReference(self, ctx:KotlinParser.TypeReferenceContext):
pass
# Enter a parse tree produced by KotlinParser#functionType.
def enterFunctionType(self, ctx:KotlinParser.FunctionTypeContext):
pass
# Exit a parse tree produced by KotlinParser#functionType.
def exitFunctionType(self, ctx:KotlinParser.FunctionTypeContext):
pass
# Enter a parse tree produced by KotlinParser#functionTypeReceiver.
def enterFunctionTypeReceiver(self, ctx:KotlinParser.FunctionTypeReceiverContext):
pass
# Exit a parse tree produced by KotlinParser#functionTypeReceiver.
def exitFunctionTypeReceiver(self, ctx:KotlinParser.FunctionTypeReceiverContext):
pass
# Enter a parse tree produced by KotlinParser#userType.
def enterUserType(self, ctx:KotlinParser.UserTypeContext):
pass
# Exit a parse tree produced by KotlinParser#userType.
def exitUserType(self, ctx:KotlinParser.UserTypeContext):
pass
# Enter a parse tree produced by KotlinParser#simpleUserType.
def enterSimpleUserType(self, ctx:KotlinParser.SimpleUserTypeContext):
pass
# Exit a parse tree produced by KotlinParser#simpleUserType.
def exitSimpleUserType(self, ctx:KotlinParser.SimpleUserTypeContext):
pass
# Enter a parse tree produced by KotlinParser#functionTypeParameters.
def enterFunctionTypeParameters(self, ctx:KotlinParser.FunctionTypeParametersContext):
pass
# Exit a parse tree produced by KotlinParser#functionTypeParameters.
def exitFunctionTypeParameters(self, ctx:KotlinParser.FunctionTypeParametersContext):
pass
# Enter a parse tree produced by KotlinParser#typeConstraints.
def enterTypeConstraints(self, ctx:KotlinParser.TypeConstraintsContext):
pass
# Exit a parse tree produced by KotlinParser#typeConstraints.
def exitTypeConstraints(self, ctx:KotlinParser.TypeConstraintsContext):
pass
# Enter a parse tree produced by KotlinParser#typeConstraint.
def enterTypeConstraint(self, ctx:KotlinParser.TypeConstraintContext):
pass
# Exit a parse tree produced by KotlinParser#typeConstraint.
def exitTypeConstraint(self, ctx:KotlinParser.TypeConstraintContext):
pass
# Enter a parse tree produced by KotlinParser#block.
def enterBlock(self, ctx:KotlinParser.BlockContext):
pass
# Exit a parse tree produced by KotlinParser#block.
def exitBlock(self, ctx:KotlinParser.BlockContext):
pass
# Enter a parse tree produced by KotlinParser#statements.
def enterStatements(self, ctx:KotlinParser.StatementsContext):
pass
# Exit a parse tree produced by KotlinParser#statements.
def exitStatements(self, ctx:KotlinParser.StatementsContext):
pass
# Enter a parse tree produced by KotlinParser#statement.
def enterStatement(self, ctx:KotlinParser.StatementContext):
pass
# Exit a parse tree produced by KotlinParser#statement.
def exitStatement(self, ctx:KotlinParser.StatementContext):
pass
# Enter a parse tree produced by KotlinParser#blockLevelExpression.
def enterBlockLevelExpression(self, ctx:KotlinParser.BlockLevelExpressionContext):
pass
# Exit a parse tree produced by KotlinParser#blockLevelExpression.
def exitBlockLevelExpression(self, ctx:KotlinParser.BlockLevelExpressionContext):
pass
# Enter a parse tree produced by KotlinParser#declaration.
def enterDeclaration(self, ctx:KotlinParser.DeclarationContext):
pass
# Exit a parse tree produced by KotlinParser#declaration.
def exitDeclaration(self, ctx:KotlinParser.DeclarationContext):
pass
# Enter a parse tree produced by KotlinParser#expression.
def enterExpression(self, ctx:KotlinParser.ExpressionContext):
pass
# Exit a parse tree produced by KotlinParser#expression.
def exitExpression(self, ctx:KotlinParser.ExpressionContext):
pass
# Enter a parse tree produced by KotlinParser#disjunction.
def enterDisjunction(self, ctx:KotlinParser.DisjunctionContext):
pass
# Exit a parse tree produced by KotlinParser#disjunction.
def exitDisjunction(self, ctx:KotlinParser.DisjunctionContext):
pass
# Enter a parse tree produced by KotlinParser#conjunction.
def enterConjunction(self, ctx:KotlinParser.ConjunctionContext):
pass
# Exit a parse tree produced by KotlinParser#conjunction.
def exitConjunction(self, ctx:KotlinParser.ConjunctionContext):
pass
# Enter a parse tree produced by KotlinParser#equalityComparison.
def enterEqualityComparison(self, ctx:KotlinParser.EqualityComparisonContext):
pass
# Exit a parse tree produced by KotlinParser#equalityComparison.
def exitEqualityComparison(self, ctx:KotlinParser.EqualityComparisonContext):
pass
# Enter a parse tree produced by KotlinParser#comparison.
def enterComparison(self, ctx:KotlinParser.ComparisonContext):
pass
# Exit a parse tree produced by KotlinParser#comparison.
def exitComparison(self, ctx:KotlinParser.ComparisonContext):
pass
# Enter a parse tree produced by KotlinParser#namedInfix.
def enterNamedInfix(self, ctx:KotlinParser.NamedInfixContext):
pass
# Exit a parse tree produced by KotlinParser#namedInfix.
def exitNamedInfix(self, ctx:KotlinParser.NamedInfixContext):
pass
# Enter a parse tree produced by KotlinParser#elvisExpression.
def enterElvisExpression(self, ctx:KotlinParser.ElvisExpressionContext):
pass
# Exit a parse tree produced by KotlinParser#elvisExpression.
def exitElvisExpression(self, ctx:KotlinParser.ElvisExpressionContext):
pass
# Enter a parse tree produced by KotlinParser#infixFunctionCall.
def enterInfixFunctionCall(self, ctx:KotlinParser.InfixFunctionCallContext):
pass
# Exit a parse tree produced by KotlinParser#infixFunctionCall.
def exitInfixFunctionCall(self, ctx:KotlinParser.InfixFunctionCallContext):
pass
# Enter a parse tree produced by KotlinParser#rangeExpression.
def enterRangeExpression(self, ctx:KotlinParser.RangeExpressionContext):
pass
# Exit a parse tree produced by KotlinParser#rangeExpression.
def exitRangeExpression(self, ctx:KotlinParser.RangeExpressionContext):
pass
# Enter a parse tree produced by KotlinParser#additiveExpression.
def enterAdditiveExpression(self, ctx:KotlinParser.AdditiveExpressionContext):
pass
# Exit a parse tree produced by KotlinParser#additiveExpression.
def exitAdditiveExpression(self, ctx:KotlinParser.AdditiveExpressionContext):
pass
# Enter a parse tree produced by KotlinParser#multiplicativeExpression.
def enterMultiplicativeExpression(self, ctx:KotlinParser.MultiplicativeExpressionContext):
pass
# Exit a parse tree produced by KotlinParser#multiplicativeExpression.
def exitMultiplicativeExpression(self, ctx:KotlinParser.MultiplicativeExpressionContext):
pass
# Enter a parse tree produced by KotlinParser#typeRHS.
def enterTypeRHS(self, ctx:KotlinParser.TypeRHSContext):
pass
# Exit a parse tree produced by KotlinParser#typeRHS.
def exitTypeRHS(self, ctx:KotlinParser.TypeRHSContext):
pass
# Enter a parse tree produced by KotlinParser#prefixUnaryExpression.
def enterPrefixUnaryExpression(self, ctx:KotlinParser.PrefixUnaryExpressionContext):
pass
# Exit a parse tree produced by KotlinParser#prefixUnaryExpression.
def exitPrefixUnaryExpression(self, ctx:KotlinParser.PrefixUnaryExpressionContext):
pass
# Enter a parse tree produced by KotlinParser#postfixUnaryExpression.
def enterPostfixUnaryExpression(self, ctx:KotlinParser.PostfixUnaryExpressionContext):
pass
# Exit a parse tree produced by KotlinParser#postfixUnaryExpression.
def exitPostfixUnaryExpression(self, ctx:KotlinParser.PostfixUnaryExpressionContext):
pass
# Enter a parse tree produced by KotlinParser#atomicExpression.
def enterAtomicExpression(self, ctx:KotlinParser.AtomicExpressionContext):
pass
# Exit a parse tree produced by KotlinParser#atomicExpression.
def exitAtomicExpression(self, ctx:KotlinParser.AtomicExpressionContext):
pass
# Enter a parse tree produced by KotlinParser#parenthesizedExpression.
def enterParenthesizedExpression(self, ctx:KotlinParser.ParenthesizedExpressionContext):
pass
# Exit a parse tree produced by KotlinParser#parenthesizedExpression.
def exitParenthesizedExpression(self, ctx:KotlinParser.ParenthesizedExpressionContext):
pass
# Enter a parse tree produced by KotlinParser#callSuffix.
def enterCallSuffix(self, ctx:KotlinParser.CallSuffixContext):
pass
# Exit a parse tree produced by KotlinParser#callSuffix.
def exitCallSuffix(self, ctx:KotlinParser.CallSuffixContext):
pass
# Enter a parse tree produced by KotlinParser#annotatedLambda.
def enterAnnotatedLambda(self, ctx:KotlinParser.AnnotatedLambdaContext):
pass
# Exit a parse tree produced by KotlinParser#annotatedLambda.
def exitAnnotatedLambda(self, ctx:KotlinParser.AnnotatedLambdaContext):
pass
# Enter a parse tree produced by KotlinParser#arrayAccess.
def enterArrayAccess(self, ctx:KotlinParser.ArrayAccessContext):
pass
# Exit a parse tree produced by KotlinParser#arrayAccess.
def exitArrayAccess(self, ctx:KotlinParser.ArrayAccessContext):
pass
# Enter a parse tree produced by KotlinParser#valueArguments.
def enterValueArguments(self, ctx:KotlinParser.ValueArgumentsContext):
pass
# Exit a parse tree produced by KotlinParser#valueArguments.
def exitValueArguments(self, ctx:KotlinParser.ValueArgumentsContext):
pass
# Enter a parse tree produced by KotlinParser#typeArguments.
def enterTypeArguments(self, ctx:KotlinParser.TypeArgumentsContext):
pass
# Exit a parse tree produced by KotlinParser#typeArguments.
def exitTypeArguments(self, ctx:KotlinParser.TypeArgumentsContext):
pass
# Enter a parse tree produced by KotlinParser#typeProjection.
def enterTypeProjection(self, ctx:KotlinParser.TypeProjectionContext):
pass
# Exit a parse tree produced by KotlinParser#typeProjection.
def exitTypeProjection(self, ctx:KotlinParser.TypeProjectionContext):
pass
# Enter a parse tree produced by KotlinParser#typeProjectionModifierList.
def enterTypeProjectionModifierList(self, ctx:KotlinParser.TypeProjectionModifierListContext):
pass
# Exit a parse tree produced by KotlinParser#typeProjectionModifierList.
def exitTypeProjectionModifierList(self, ctx:KotlinParser.TypeProjectionModifierListContext):
pass
# Enter a parse tree produced by KotlinParser#valueArgument.
def enterValueArgument(self, ctx:KotlinParser.ValueArgumentContext):
pass
# Exit a parse tree produced by KotlinParser#valueArgument.
def exitValueArgument(self, ctx:KotlinParser.ValueArgumentContext):
pass
# Enter a parse tree produced by KotlinParser#literalConstant.
def enterLiteralConstant(self, ctx:KotlinParser.LiteralConstantContext):
pass
# Exit a parse tree produced by KotlinParser#literalConstant.
def exitLiteralConstant(self, ctx:KotlinParser.LiteralConstantContext):
pass
# Enter a parse tree produced by KotlinParser#stringLiteral.
def enterStringLiteral(self, ctx:KotlinParser.StringLiteralContext):
pass
# Exit a parse tree produced by KotlinParser#stringLiteral.
def exitStringLiteral(self, ctx:KotlinParser.StringLiteralContext):
pass
# Enter a parse tree produced by KotlinParser#lineStringLiteral.
def enterLineStringLiteral(self, ctx:KotlinParser.LineStringLiteralContext):
pass
# Exit a parse tree produced by KotlinParser#lineStringLiteral.
def exitLineStringLiteral(self, ctx:KotlinParser.LineStringLiteralContext):
pass
# Enter a parse tree produced by KotlinParser#multiLineStringLiteral.
def enterMultiLineStringLiteral(self, ctx:KotlinParser.MultiLineStringLiteralContext):
pass
# Exit a parse tree produced by KotlinParser#multiLineStringLiteral.
def exitMultiLineStringLiteral(self, ctx:KotlinParser.MultiLineStringLiteralContext):
pass
# Enter a parse tree produced by KotlinParser#lineStringContent.
def enterLineStringContent(self, ctx:KotlinParser.LineStringContentContext):
pass
# Exit a parse tree produced by KotlinParser#lineStringContent.
def exitLineStringContent(self, ctx:KotlinParser.LineStringContentContext):
pass
# Enter a parse tree produced by KotlinParser#lineStringExpression.
def enterLineStringExpression(self, ctx:KotlinParser.LineStringExpressionContext):
pass
# Exit a parse tree produced by KotlinParser#lineStringExpression.
def exitLineStringExpression(self, ctx:KotlinParser.LineStringExpressionContext):
pass
# Enter a parse tree produced by KotlinParser#multiLineStringContent.
def enterMultiLineStringContent(self, ctx:KotlinParser.MultiLineStringContentContext):
pass
# Exit a parse tree produced by KotlinParser#multiLineStringContent.
def exitMultiLineStringContent(self, ctx:KotlinParser.MultiLineStringContentContext):
pass
# Enter a parse tree produced by KotlinParser#multiLineStringExpression.
def enterMultiLineStringExpression(self, ctx:KotlinParser.MultiLineStringExpressionContext):
pass
# Exit a parse tree produced by KotlinParser#multiLineStringExpression.
def exitMultiLineStringExpression(self, ctx:KotlinParser.MultiLineStringExpressionContext):
pass
# Enter a parse tree produced by KotlinParser#functionLiteral.
def enterFunctionLiteral(self, ctx:KotlinParser.FunctionLiteralContext):
pass
# Exit a parse tree produced by KotlinParser#functionLiteral.
def exitFunctionLiteral(self, ctx:KotlinParser.FunctionLiteralContext):
pass
# Enter a parse tree produced by KotlinParser#lambdaParameters.
def enterLambdaParameters(self, ctx:KotlinParser.LambdaParametersContext):
pass
# Exit a parse tree produced by KotlinParser#lambdaParameters.
def exitLambdaParameters(self, ctx:KotlinParser.LambdaParametersContext):
pass
# Enter a parse tree produced by KotlinParser#lambdaParameter.
def enterLambdaParameter(self, ctx:KotlinParser.LambdaParameterContext):
pass
# Exit a parse tree produced by KotlinParser#lambdaParameter.
def exitLambdaParameter(self, ctx:KotlinParser.LambdaParameterContext):
pass
# Enter a parse tree produced by KotlinParser#objectLiteral.
def enterObjectLiteral(self, ctx:KotlinParser.ObjectLiteralContext):
pass
# Exit a parse tree produced by KotlinParser#objectLiteral.
def exitObjectLiteral(self, ctx:KotlinParser.ObjectLiteralContext):
pass
# Enter a parse tree produced by KotlinParser#collectionLiteral.
def enterCollectionLiteral(self, ctx:KotlinParser.CollectionLiteralContext):
pass
# Exit a parse tree produced by KotlinParser#collectionLiteral.
def exitCollectionLiteral(self, ctx:KotlinParser.CollectionLiteralContext):
pass
# Enter a parse tree produced by KotlinParser#thisExpression.
def enterThisExpression(self, ctx:KotlinParser.ThisExpressionContext):
pass
# Exit a parse tree produced by KotlinParser#thisExpression.
def exitThisExpression(self, ctx:KotlinParser.ThisExpressionContext):
pass
# Enter a parse tree produced by KotlinParser#superExpression.
def enterSuperExpression(self, ctx:KotlinParser.SuperExpressionContext):
pass
# Exit a parse tree produced by KotlinParser#superExpression.
def exitSuperExpression(self, ctx:KotlinParser.SuperExpressionContext):
pass
# Enter a parse tree produced by KotlinParser#conditionalExpression.
def enterConditionalExpression(self, ctx:KotlinParser.ConditionalExpressionContext):
pass
# Exit a parse tree produced by KotlinParser#conditionalExpression.
def exitConditionalExpression(self, ctx:KotlinParser.ConditionalExpressionContext):
pass
# Enter a parse tree produced by KotlinParser#ifExpression.
def enterIfExpression(self, ctx:KotlinParser.IfExpressionContext):
pass
# Exit a parse tree produced by KotlinParser#ifExpression.
def exitIfExpression(self, ctx:KotlinParser.IfExpressionContext):
pass
# Enter a parse tree produced by KotlinParser#controlStructureBody.
def enterControlStructureBody(self, ctx:KotlinParser.ControlStructureBodyContext):
pass
# Exit a parse tree produced by KotlinParser#controlStructureBody.
def exitControlStructureBody(self, ctx:KotlinParser.ControlStructureBodyContext):
pass
# Enter a parse tree produced by KotlinParser#whenExpression.
def enterWhenExpression(self, ctx:KotlinParser.WhenExpressionContext):
pass
# Exit a parse tree produced by KotlinParser#whenExpression.
def exitWhenExpression(self, ctx:KotlinParser.WhenExpressionContext):
pass
# Enter a parse tree produced by KotlinParser#whenEntry.
def enterWhenEntry(self, ctx:KotlinParser.WhenEntryContext):
pass
# Exit a parse tree produced by KotlinParser#whenEntry.
def exitWhenEntry(self, ctx:KotlinParser.WhenEntryContext):
pass
# Enter a parse tree produced by KotlinParser#whenCondition.
def enterWhenCondition(self, ctx:KotlinParser.WhenConditionContext):
pass
# Exit a parse tree produced by KotlinParser#whenCondition.
def exitWhenCondition(self, ctx:KotlinParser.WhenConditionContext):
pass
# Enter a parse tree produced by KotlinParser#rangeTest.
def enterRangeTest(self, ctx:KotlinParser.RangeTestContext):
pass
# Exit a parse tree produced by KotlinParser#rangeTest.
def exitRangeTest(self, ctx:KotlinParser.RangeTestContext):
pass
# Enter a parse tree produced by KotlinParser#typeTest.
def enterTypeTest(self, ctx:KotlinParser.TypeTestContext):
pass
# Exit a parse tree produced by KotlinParser#typeTest.
def exitTypeTest(self, ctx:KotlinParser.TypeTestContext):
pass
# Enter a parse tree produced by KotlinParser#tryExpression.
def enterTryExpression(self, ctx:KotlinParser.TryExpressionContext):
pass
# Exit a parse tree produced by KotlinParser#tryExpression.
def exitTryExpression(self, ctx:KotlinParser.TryExpressionContext):
pass
# Enter a parse tree produced by KotlinParser#catchBlock.
def enterCatchBlock(self, ctx:KotlinParser.CatchBlockContext):
pass
# Exit a parse tree produced by KotlinParser#catchBlock.
def exitCatchBlock(self, ctx:KotlinParser.CatchBlockContext):
pass
# Enter a parse tree produced by KotlinParser#finallyBlock.
def enterFinallyBlock(self, ctx:KotlinParser.FinallyBlockContext):
pass
# Exit a parse tree produced by KotlinParser#finallyBlock.
def exitFinallyBlock(self, ctx:KotlinParser.FinallyBlockContext):
pass
# Enter a parse tree produced by KotlinParser#loopExpression.
def enterLoopExpression(self, ctx:KotlinParser.LoopExpressionContext):
pass
# Exit a parse tree produced by KotlinParser#loopExpression.
def exitLoopExpression(self, ctx:KotlinParser.LoopExpressionContext):
pass
# Enter a parse tree produced by KotlinParser#forExpression.
def enterForExpression(self, ctx:KotlinParser.ForExpressionContext):
pass
# Exit a parse tree produced by KotlinParser#forExpression.
def exitForExpression(self, ctx:KotlinParser.ForExpressionContext):
pass
# Enter a parse tree produced by KotlinParser#whileExpression.
def enterWhileExpression(self, ctx:KotlinParser.WhileExpressionContext):
pass
# Exit a parse tree produced by KotlinParser#whileExpression.
def exitWhileExpression(self, ctx:KotlinParser.WhileExpressionContext):
pass
# Enter a parse tree produced by KotlinParser#doWhileExpression.
def enterDoWhileExpression(self, ctx:KotlinParser.DoWhileExpressionContext):
pass
# Exit a parse tree produced by KotlinParser#doWhileExpression.
def exitDoWhileExpression(self, ctx:KotlinParser.DoWhileExpressionContext):
pass
# Enter a parse tree produced by KotlinParser#jumpExpression.
def enterJumpExpression(self, ctx:KotlinParser.JumpExpressionContext):
pass
# Exit a parse tree produced by KotlinParser#jumpExpression.
def exitJumpExpression(self, ctx:KotlinParser.JumpExpressionContext):
pass
# Enter a parse tree produced by KotlinParser#callableReference.
def enterCallableReference(self, ctx:KotlinParser.CallableReferenceContext):
pass
# Exit a parse tree produced by KotlinParser#callableReference.
def exitCallableReference(self, ctx:KotlinParser.CallableReferenceContext):
pass
# Enter a parse tree produced by KotlinParser#assignmentOperator.
def enterAssignmentOperator(self, ctx:KotlinParser.AssignmentOperatorContext):
pass
# Exit a parse tree produced by KotlinParser#assignmentOperator.
def exitAssignmentOperator(self, ctx:KotlinParser.AssignmentOperatorContext):
pass
# Enter a parse tree produced by KotlinParser#equalityOperation.
def enterEqualityOperation(self, ctx:KotlinParser.EqualityOperationContext):
pass
# Exit a parse tree produced by KotlinParser#equalityOperation.
def exitEqualityOperation(self, ctx:KotlinParser.EqualityOperationContext):
pass
# Enter a parse tree produced by KotlinParser#comparisonOperator.
def enterComparisonOperator(self, ctx:KotlinParser.ComparisonOperatorContext):
pass
# Exit a parse tree produced by KotlinParser#comparisonOperator.
def exitComparisonOperator(self, ctx:KotlinParser.ComparisonOperatorContext):
pass
# Enter a parse tree produced by KotlinParser#inOperator.
def enterInOperator(self, ctx:KotlinParser.InOperatorContext):
pass
# Exit a parse tree produced by KotlinParser#inOperator.
def exitInOperator(self, ctx:KotlinParser.InOperatorContext):
pass
# Enter a parse tree produced by KotlinParser#isOperator.
def enterIsOperator(self, ctx:KotlinParser.IsOperatorContext):
pass
# Exit a parse tree produced by KotlinParser#isOperator.
def exitIsOperator(self, ctx:KotlinParser.IsOperatorContext):
pass
# Enter a parse tree produced by KotlinParser#additiveOperator.
def enterAdditiveOperator(self, ctx:KotlinParser.AdditiveOperatorContext):
pass
# Exit a parse tree produced by KotlinParser#additiveOperator.
def exitAdditiveOperator(self, ctx:KotlinParser.AdditiveOperatorContext):
pass
# Enter a parse tree produced by KotlinParser#multiplicativeOperation.
def enterMultiplicativeOperation(self, ctx:KotlinParser.MultiplicativeOperationContext):
pass
# Exit a parse tree produced by KotlinParser#multiplicativeOperation.
def exitMultiplicativeOperation(self, ctx:KotlinParser.MultiplicativeOperationContext):
pass
# Enter a parse tree produced by KotlinParser#typeOperation.
def enterTypeOperation(self, ctx:KotlinParser.TypeOperationContext):
pass
# Exit a parse tree produced by KotlinParser#typeOperation.
def exitTypeOperation(self, ctx:KotlinParser.TypeOperationContext):
pass
# Enter a parse tree produced by KotlinParser#prefixUnaryOperation.
def enterPrefixUnaryOperation(self, ctx:KotlinParser.PrefixUnaryOperationContext):
pass
# Exit a parse tree produced by KotlinParser#prefixUnaryOperation.
def exitPrefixUnaryOperation(self, ctx:KotlinParser.PrefixUnaryOperationContext):
pass
# Enter a parse tree produced by KotlinParser#postfixUnaryOperation.
def enterPostfixUnaryOperation(self, ctx:KotlinParser.PostfixUnaryOperationContext):
pass
# Exit a parse tree produced by KotlinParser#postfixUnaryOperation.
def exitPostfixUnaryOperation(self, ctx:KotlinParser.PostfixUnaryOperationContext):
pass
# Enter a parse tree produced by KotlinParser#memberAccessOperator.
def enterMemberAccessOperator(self, ctx:KotlinParser.MemberAccessOperatorContext):
pass
# Exit a parse tree produced by KotlinParser#memberAccessOperator.
def exitMemberAccessOperator(self, ctx:KotlinParser.MemberAccessOperatorContext):
pass
# Enter a parse tree produced by KotlinParser#modifierList.
def enterModifierList(self, ctx:KotlinParser.ModifierListContext):
pass
# Exit a parse tree produced by KotlinParser#modifierList.
def exitModifierList(self, ctx:KotlinParser.ModifierListContext):
pass
# Enter a parse tree produced by KotlinParser#modifier.
def enterModifier(self, ctx:KotlinParser.ModifierContext):
pass
# Exit a parse tree produced by KotlinParser#modifier.
def exitModifier(self, ctx:KotlinParser.ModifierContext):
pass
# Enter a parse tree produced by KotlinParser#classModifier.
def enterClassModifier(self, ctx:KotlinParser.ClassModifierContext):
pass
# Exit a parse tree produced by KotlinParser#classModifier.
def exitClassModifier(self, ctx:KotlinParser.ClassModifierContext):
pass
# Enter a parse tree produced by KotlinParser#memberModifier.
def enterMemberModifier(self, ctx:KotlinParser.MemberModifierContext):
pass
# Exit a parse tree produced by KotlinParser#memberModifier.
def exitMemberModifier(self, ctx:KotlinParser.MemberModifierContext):
pass
# Enter a parse tree produced by KotlinParser#visibilityModifier.
def enterVisibilityModifier(self, ctx:KotlinParser.VisibilityModifierContext):
pass
# Exit a parse tree produced by KotlinParser#visibilityModifier.
def exitVisibilityModifier(self, ctx:KotlinParser.VisibilityModifierContext):
pass
# Enter a parse tree produced by KotlinParser#varianceAnnotation.
def enterVarianceAnnotation(self, ctx:KotlinParser.VarianceAnnotationContext):
pass
# Exit a parse tree produced by KotlinParser#varianceAnnotation.
def exitVarianceAnnotation(self, ctx:KotlinParser.VarianceAnnotationContext):
pass
# Enter a parse tree produced by KotlinParser#functionModifier.
def enterFunctionModifier(self, ctx:KotlinParser.FunctionModifierContext):
pass
# Exit a parse tree produced by KotlinParser#functionModifier.
def exitFunctionModifier(self, ctx:KotlinParser.FunctionModifierContext):
pass
# Enter a parse tree produced by KotlinParser#propertyModifier.
def enterPropertyModifier(self, ctx:KotlinParser.PropertyModifierContext):
pass
# Exit a parse tree produced by KotlinParser#propertyModifier.
def exitPropertyModifier(self, ctx:KotlinParser.PropertyModifierContext):
pass
# Enter a parse tree produced by KotlinParser#inheritanceModifier.
def enterInheritanceModifier(self, ctx:KotlinParser.InheritanceModifierContext):
pass
# Exit a parse tree produced by KotlinParser#inheritanceModifier.
def exitInheritanceModifier(self, ctx:KotlinParser.InheritanceModifierContext):
pass
# Enter a parse tree produced by KotlinParser#parameterModifier.
def enterParameterModifier(self, ctx:KotlinParser.ParameterModifierContext):
pass
# Exit a parse tree produced by KotlinParser#parameterModifier.
def exitParameterModifier(self, ctx:KotlinParser.ParameterModifierContext):
pass
# Enter a parse tree produced by KotlinParser#typeParameterModifier.
def enterTypeParameterModifier(self, ctx:KotlinParser.TypeParameterModifierContext):
pass
# Exit a parse tree produced by KotlinParser#typeParameterModifier.
def exitTypeParameterModifier(self, ctx:KotlinParser.TypeParameterModifierContext):
pass
# Enter a parse tree produced by KotlinParser#labelDefinition.
def enterLabelDefinition(self, ctx:KotlinParser.LabelDefinitionContext):
pass
# Exit a parse tree produced by KotlinParser#labelDefinition.
def exitLabelDefinition(self, ctx:KotlinParser.LabelDefinitionContext):
pass
# Enter a parse tree produced by KotlinParser#annotations.
def enterAnnotations(self, ctx:KotlinParser.AnnotationsContext):
pass
# Exit a parse tree produced by KotlinParser#annotations.
def exitAnnotations(self, ctx:KotlinParser.AnnotationsContext):
pass
# Enter a parse tree produced by KotlinParser#annotation.
def enterAnnotation(self, ctx:KotlinParser.AnnotationContext):
pass
# Exit a parse tree produced by KotlinParser#annotation.
def exitAnnotation(self, ctx:KotlinParser.AnnotationContext):
pass
# Enter a parse tree produced by KotlinParser#annotationList.
def enterAnnotationList(self, ctx:KotlinParser.AnnotationListContext):
pass
# Exit a parse tree produced by KotlinParser#annotationList.
def exitAnnotationList(self, ctx:KotlinParser.AnnotationListContext):
pass
# Enter a parse tree produced by KotlinParser#annotationUseSiteTarget.
def enterAnnotationUseSiteTarget(self, ctx:KotlinParser.AnnotationUseSiteTargetContext):
pass
# Exit a parse tree produced by KotlinParser#annotationUseSiteTarget.
def exitAnnotationUseSiteTarget(self, ctx:KotlinParser.AnnotationUseSiteTargetContext):
pass
# Enter a parse tree produced by KotlinParser#unescapedAnnotation.
def enterUnescapedAnnotation(self, ctx:KotlinParser.UnescapedAnnotationContext):
pass
# Exit a parse tree produced by KotlinParser#unescapedAnnotation.
def exitUnescapedAnnotation(self, ctx:KotlinParser.UnescapedAnnotationContext):
pass
# Enter a parse tree produced by KotlinParser#identifier.
def enterIdentifier(self, ctx:KotlinParser.IdentifierContext):
pass
# Exit a parse tree produced by KotlinParser#identifier.
def exitIdentifier(self, ctx:KotlinParser.IdentifierContext):
pass
# Enter a parse tree produced by KotlinParser#simpleIdentifier.
def enterSimpleIdentifier(self, ctx:KotlinParser.SimpleIdentifierContext):
pass
# Exit a parse tree produced by KotlinParser#simpleIdentifier.
def exitSimpleIdentifier(self, ctx:KotlinParser.SimpleIdentifierContext):
pass
# Enter a parse tree produced by KotlinParser#semi.
def enterSemi(self, ctx:KotlinParser.SemiContext):
pass
# Exit a parse tree produced by KotlinParser#semi.
def exitSemi(self, ctx:KotlinParser.SemiContext):
pass
# Enter a parse tree produced by KotlinParser#anysemi.
def enterAnysemi(self, ctx:KotlinParser.AnysemiContext):
pass
# Exit a parse tree produced by KotlinParser#anysemi.
def exitAnysemi(self, ctx:KotlinParser.AnysemiContext):
pass
| 34.873962
| 98
| 0.755172
| 4,425
| 46,208
| 7.884068
| 0.140339
| 0.050391
| 0.083985
| 0.151174
| 0.831427
| 0.489294
| 0.488377
| 0.488233
| 0
| 0
| 0
| 0.000133
| 0.187608
| 46,208
| 1,324
| 99
| 34.900302
| 0.92922
| 0.377878
| 0
| 0.494915
| 1
| 0
| 0.000036
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.494915
| false
| 0.494915
| 0.015254
| 0
| 0.511864
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 5
|
3d26e6ffc282fcde504ce92ad56a3c945fb06640
| 58
|
py
|
Python
|
tests/import/import_pkg6.py
|
learnforpractice/micropython-cpp
|
004bc8382f74899e7b876cc29bfa6a9cc976ba10
|
[
"MIT"
] | 13,648
|
2015-01-01T01:34:51.000Z
|
2022-03-31T16:19:53.000Z
|
tests/import/import_pkg6.py
|
learnforpractice/micropython-cpp
|
004bc8382f74899e7b876cc29bfa6a9cc976ba10
|
[
"MIT"
] | 7,092
|
2015-01-01T07:59:11.000Z
|
2022-03-31T23:52:18.000Z
|
tests/import/import_pkg6.py
|
learnforpractice/micropython-cpp
|
004bc8382f74899e7b876cc29bfa6a9cc976ba10
|
[
"MIT"
] | 4,942
|
2015-01-02T11:48:50.000Z
|
2022-03-31T19:57:10.000Z
|
# This tests relative imports as used in pkg6
import pkg6
| 19.333333
| 45
| 0.793103
| 10
| 58
| 4.6
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.042553
| 0.189655
| 58
| 2
| 46
| 29
| 0.93617
| 0.741379
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
3d272e39a259a6940e2d95620faa0f9cddde6161
| 300
|
py
|
Python
|
lang/py/cookbook/v2/source/cb2_3_3_exm_1.py
|
ch1huizong/learning
|
632267634a9fd84a5f5116de09ff1e2681a6cc85
|
[
"MIT"
] | null | null | null |
lang/py/cookbook/v2/source/cb2_3_3_exm_1.py
|
ch1huizong/learning
|
632267634a9fd84a5f5116de09ff1e2681a6cc85
|
[
"MIT"
] | null | null | null |
lang/py/cookbook/v2/source/cb2_3_3_exm_1.py
|
ch1huizong/learning
|
632267634a9fd84a5f5116de09ff1e2681a6cc85
|
[
"MIT"
] | null | null | null |
if __name__=='__main__':
starts = [datetime.date(2005, 01, 04), datetime.date(2005, 01, 03)]
end = datetime.date(2005, 01, 10)
for s in starts:
days = rrule.rrule(rrule.DAILY, dtstart=s, until=end).count()
print "%d days shows as %d weeks "% (days, weeks_between(s, end))
| 42.857143
| 73
| 0.626667
| 46
| 300
| 3.891304
| 0.586957
| 0.201117
| 0.268156
| 0.301676
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.101266
| 0.21
| 300
| 6
| 74
| 50
| 0.654008
| 0
| 0
| 0
| 0
| 0
| 0.113333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.166667
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
3d3f962ebbbdb1516a8abdc28d610e03c1e46db2
| 53
|
py
|
Python
|
drfpasswordless/exceptions.py
|
MadalinaDinga/django-rest-framework-passwordless
|
d8af9d67dd7fb929c521c2596371d4375fce554f
|
[
"MIT"
] | null | null | null |
drfpasswordless/exceptions.py
|
MadalinaDinga/django-rest-framework-passwordless
|
d8af9d67dd7fb929c521c2596371d4375fce554f
|
[
"MIT"
] | null | null | null |
drfpasswordless/exceptions.py
|
MadalinaDinga/django-rest-framework-passwordless
|
d8af9d67dd7fb929c521c2596371d4375fce554f
|
[
"MIT"
] | 1
|
2021-01-31T21:55:42.000Z
|
2021-01-31T21:55:42.000Z
|
class DRFPwdlessValidationError(Exception):
pass
| 17.666667
| 43
| 0.811321
| 4
| 53
| 10.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.132075
| 53
| 2
| 44
| 26.5
| 0.934783
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
3d40070bf645e24576661c6143aa3856d2711f4d
| 37
|
py
|
Python
|
src/server_design/algorithms/compressor/designSolutions/sol_840.py
|
robertpardillo/Funnel
|
f45e419f55e085bbb95e17c47b4c94a7c625ba9b
|
[
"MIT"
] | 1
|
2021-05-18T16:10:49.000Z
|
2021-05-18T16:10:49.000Z
|
src/server_design/algorithms/compressor/designSolutions/sol_840.py
|
robertpardillo/Funnel
|
f45e419f55e085bbb95e17c47b4c94a7c625ba9b
|
[
"MIT"
] | null | null | null |
src/server_design/algorithms/compressor/designSolutions/sol_840.py
|
robertpardillo/Funnel
|
f45e419f55e085bbb95e17c47b4c94a7c625ba9b
|
[
"MIT"
] | null | null | null |
def sol840(design_parameters):
pass
| 18.5
| 31
| 0.810811
| 5
| 37
| 5.8
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.090909
| 0.108108
| 37
| 2
| 32
| 18.5
| 0.787879
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
3d6290cfe8a305d1546c7acdef7edc29e6a5adf7
| 129
|
py
|
Python
|
lc/diffcali.py
|
RapidLzj/LightCurve
|
984627e78fee97872183dcc8d4d4ccf02b44eeb4
|
[
"MIT"
] | null | null | null |
lc/diffcali.py
|
RapidLzj/LightCurve
|
984627e78fee97872183dcc8d4d4ccf02b44eeb4
|
[
"MIT"
] | null | null | null |
lc/diffcali.py
|
RapidLzj/LightCurve
|
984627e78fee97872183dcc8d4d4ccf02b44eeb4
|
[
"MIT"
] | null | null | null |
"""
201901, Dr. Jie Zheng, Beijing & Xinglong, NAOC
Light_Curve
"""
import numpy as np
import astropy.io.fits as fits
| 12.9
| 51
| 0.666667
| 19
| 129
| 4.473684
| 0.842105
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.060606
| 0.232558
| 129
| 9
| 52
| 14.333333
| 0.79798
| 0.457364
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
182ae90533d8975d25551042d7f9000ff1ff470d
| 41
|
py
|
Python
|
neurosynth/analysis/ml.py
|
wanirepo/Neurosynth
|
5b770ec31c5095c16e27ebe664fa5d515c662298
|
[
"MIT"
] | 2
|
2016-12-26T15:29:18.000Z
|
2017-04-22T20:10:37.000Z
|
neurosynth/analysis/ml.py
|
wanirepo/Neurosynth
|
5b770ec31c5095c16e27ebe664fa5d515c662298
|
[
"MIT"
] | null | null | null |
neurosynth/analysis/ml.py
|
wanirepo/Neurosynth
|
5b770ec31c5095c16e27ebe664fa5d515c662298
|
[
"MIT"
] | null | null | null |
""" Machine-learning-related methods. """
| 41
| 41
| 0.707317
| 4
| 41
| 7.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.073171
| 41
| 1
| 41
| 41
| 0.763158
| 0.804878
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
186f56fc9a00bdd8768584971425c06bb7c1a532
| 122
|
py
|
Python
|
app/core/events.py
|
melhin/streamchat
|
8a3e7ffdcf4bc84045df71259556f4267a755351
|
[
"MIT"
] | null | null | null |
app/core/events.py
|
melhin/streamchat
|
8a3e7ffdcf4bc84045df71259556f4267a755351
|
[
"MIT"
] | 3
|
2020-09-16T13:30:17.000Z
|
2020-09-19T09:56:50.000Z
|
app/core/events.py
|
melhin/streamchat
|
8a3e7ffdcf4bc84045df71259556f4267a755351
|
[
"MIT"
] | null | null | null |
from typing import Callable
from fastapi import FastAPI
from loguru import logger
# DB startup initializations go here
| 15.25
| 36
| 0.819672
| 17
| 122
| 5.882353
| 0.705882
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.172131
| 122
| 7
| 37
| 17.428571
| 0.990099
| 0.278689
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
1880a2baede31cad311881e55053952cb8993933
| 2,669
|
py
|
Python
|
Crypto/ctf/teaser_dragon/solve2.py
|
kamithanthanh/hacmao.github.io
|
87b06df827cc65f737831301bae1d5f3a2d014ff
|
[
"MIT"
] | 1
|
2019-09-27T13:23:00.000Z
|
2019-09-27T13:23:00.000Z
|
Crypto/ctf/teaser_dragon/solve2.py
|
kamithanthanh/hacmao.github.io
|
87b06df827cc65f737831301bae1d5f3a2d014ff
|
[
"MIT"
] | null | null | null |
Crypto/ctf/teaser_dragon/solve2.py
|
kamithanthanh/hacmao.github.io
|
87b06df827cc65f737831301bae1d5f3a2d014ff
|
[
"MIT"
] | 1
|
2019-08-25T09:17:07.000Z
|
2019-08-25T09:17:07.000Z
|
from gmpy2 import gcd, invert, iroot, is_prime
from Crypto.Util.number import long_to_bytes
p1 = 282595361018796512312481928903796535047168039821441204226899357708165480989181288601210607191471483534037953052604722708819774231230476577951670676743338887609132820418468389978419501153422449272224388422022777
q1 = 142270506848638924547091203976235495577725242858694711068289574174127601000137457280276860615471044907560710121669055364010408768146949985099404319539891688093875478389341632242096859500255283810703767020918479
k = 877
p2 = 291668652611471250039066078554824884845341136873092210122454888337748213391694969640183343019452438800975699247613989121123985462360872265327833435184781051854777074884190706087067889456284908187292126902073849
q2 = 90298557884682577669238320760096423994217812898822512514104930945042122418007925771281125855142645396913218673571816112036657123492733042972301983242487835472292994595416656844378721884370309120262139835889657
k = 1041
p3 = 267307309343866797026967908679365544381223264502857628608660439661084648014195234872217075156454448820508389018205344581075300847474799458610853350116251989700007053821013120164193801622760845268409925117073227
p4 = 188689169745401648234984799686937623590015544678958930140026860499157441295507274434268349194461155162481283679350641089523071656015001291946438485044113564467435184782104140072331748380561726605546500856968771
f = open("output.txt", "r")
data = f.read().split("\n")
for i in range(4) :
data[i] = map(int, data[i].split(" ")[1:])
e = 1667
r = gcd(data[1][0], data[0][0])
d2 = invert(e, (p1 - 1)*(q1- 1)*(r-1))
print data[1][0] % q1
d1 = invert(e, (p2 - 1)*(q2 - 1)*(r - 1))
print data[2][0] % (p3 ** 2)
q3 = data[2][0] / (p3**2)
d3 = invert(e, (p3 - 1)*p3*(q3-1))
q4 = data[3][0] / p4
assert data[3][0] == p4 * q4
d4 = invert(e, (p4-1)*(q4-1))
c = 594744523070645240942929359037746826510854567332177011620057998249212031582656570895820012394249671104987340986625186067934908726882826886403853350036347685535238091672944302281583099599474583019751882763474741100766908948169830205008225271404703602995718048181715640523980687208077859421140848814778358928590611556775259065145896624024470165717487152605409627124554333901173541260152787825789663724638794217683229247154941119470880060888700805864373121475407572771283720944279236600821215173142912640154867341243164010769049585665362567363683571268650798207317025536004271505222437026243088918839778445295683434396247524954340356
c = pow(c, d4, data[3][0])
c = pow(c, d3, data[2][0])
c = pow(c, d2, data[1][0])
c = pow(c, d1, data[0][0])
print long_to_bytes(c)
| 76.257143
| 635
| 0.85163
| 161
| 2,669
| 14.086957
| 0.385093
| 0.012346
| 0.008818
| 0.007937
| 0.016755
| 0
| 0
| 0
| 0
| 0
| 0
| 0.798054
| 0.076058
| 2,669
| 35
| 636
| 76.257143
| 0.121655
| 0
| 0
| 0
| 0
| 0
| 0.005311
| 0
| 0
| 1
| 0
| 0
| 0.032258
| 0
| null | null | 0
| 0.064516
| null | null | 0.096774
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
43fea4b5a1b384cf0c836e62fbd151c97b4558ac
| 119
|
py
|
Python
|
tests/helpers.py
|
jellevandehaterd/mypackage
|
c97e55446f37fe231ae71bf5cc955fd6b90b85c0
|
[
"Apache-2.0"
] | null | null | null |
tests/helpers.py
|
jellevandehaterd/mypackage
|
c97e55446f37fe231ae71bf5cc955fd6b90b85c0
|
[
"Apache-2.0"
] | null | null | null |
tests/helpers.py
|
jellevandehaterd/mypackage
|
c97e55446f37fe231ae71bf5cc955fd6b90b85c0
|
[
"Apache-2.0"
] | 1
|
2020-07-17T08:37:02.000Z
|
2020-07-17T08:37:02.000Z
|
class AnyArg(object):
"""AnyArg for wildcard mock assertions"""
def __eq__(self, b: any):
return True
| 19.833333
| 45
| 0.630252
| 15
| 119
| 4.733333
| 0.933333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.252101
| 119
| 5
| 46
| 23.8
| 0.797753
| 0.294118
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
43ffd990f3a3affa9e8aa1ce5ff01c9a19f036b7
| 173
|
py
|
Python
|
pymcuprog/version.py
|
xthanhn/mcuprog
|
84eb4baf82dcefe696d5d50344d7d5ca155cbefd
|
[
"MIT"
] | 1
|
2021-08-25T08:59:33.000Z
|
2021-08-25T08:59:33.000Z
|
pymcuprog/version.py
|
xthanhn/mcuprog
|
84eb4baf82dcefe696d5d50344d7d5ca155cbefd
|
[
"MIT"
] | null | null | null |
pymcuprog/version.py
|
xthanhn/mcuprog
|
84eb4baf82dcefe696d5d50344d7d5ca155cbefd
|
[
"MIT"
] | null | null | null |
""" This file was generated when mcuprog was built """
VERSION = '3.9.1.120'
COMMIT_ID = '84ffb61b46baa4fb20896deb0179d09fe3097b5c'
BUILD_DATE = '2021-08-23 18:16:12 +0700'
| 34.6
| 54
| 0.745665
| 25
| 173
| 5.08
| 0.96
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.309211
| 0.121387
| 173
| 4
| 55
| 43.25
| 0.526316
| 0.265896
| 0
| 0
| 1
| 0
| 0.621849
| 0.336134
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
a12a6e88872a9c8733f8bcedae4eedb663907579
| 57
|
py
|
Python
|
schemadown/generate/__init__.py
|
DrTexx/schema-down
|
5cf4c49689dfc6796dbdb2e6c7179d8d12dea870
|
[
"Apache-2.0"
] | null | null | null |
schemadown/generate/__init__.py
|
DrTexx/schema-down
|
5cf4c49689dfc6796dbdb2e6c7179d8d12dea870
|
[
"Apache-2.0"
] | null | null | null |
schemadown/generate/__init__.py
|
DrTexx/schema-down
|
5cf4c49689dfc6796dbdb2e6c7179d8d12dea870
|
[
"Apache-2.0"
] | null | null | null |
"""imports."""
from .document import SchemaDownDocument
| 14.25
| 40
| 0.754386
| 5
| 57
| 8.6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.105263
| 57
| 3
| 41
| 19
| 0.843137
| 0.140351
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
a144f3ae44a9f02f7f68270958bf3f5d5c48b0e5
| 159
|
py
|
Python
|
app.py
|
avim2809/CameraSiteBlocker
|
bfc0434e75e8f3f95c459a4adc86b7673200816e
|
[
"Apache-2.0"
] | null | null | null |
app.py
|
avim2809/CameraSiteBlocker
|
bfc0434e75e8f3f95c459a4adc86b7673200816e
|
[
"Apache-2.0"
] | null | null | null |
app.py
|
avim2809/CameraSiteBlocker
|
bfc0434e75e8f3f95c459a4adc86b7673200816e
|
[
"Apache-2.0"
] | null | null | null |
from camera_age import get_age_from_camera
from web_blocker import block_host , unblock_host
ages=get_age_from_camera()
print("the res is: "+str(ages[0]))
| 17.666667
| 49
| 0.792453
| 28
| 159
| 4.142857
| 0.607143
| 0.258621
| 0.172414
| 0.275862
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.007143
| 0.119497
| 159
| 8
| 50
| 19.875
| 0.821429
| 0
| 0
| 0
| 0
| 0
| 0.075949
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0.25
| 1
| 0
| 0
| null | 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
a14a58bf6de8d555cad7124b412932f9342cb8a9
| 201
|
py
|
Python
|
Calibration/TkAlCaRecoProducers/python/AlcaSiStripGainsHarvester_cfi.py
|
ckamtsikis/cmssw
|
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
|
[
"Apache-2.0"
] | 852
|
2015-01-11T21:03:51.000Z
|
2022-03-25T21:14:00.000Z
|
Calibration/TkAlCaRecoProducers/python/AlcaSiStripGainsHarvester_cfi.py
|
ckamtsikis/cmssw
|
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
|
[
"Apache-2.0"
] | 30,371
|
2015-01-02T00:14:40.000Z
|
2022-03-31T23:26:05.000Z
|
Calibration/TkAlCaRecoProducers/python/AlcaSiStripGainsHarvester_cfi.py
|
ckamtsikis/cmssw
|
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
|
[
"Apache-2.0"
] | 3,240
|
2015-01-02T05:53:18.000Z
|
2022-03-31T17:24:21.000Z
|
import FWCore.ParameterSet.Config as cms
from CalibTracker.SiStripChannelGain.SiStripGainsPCLHarvester_cfi import SiStripGainsPCLHarvester
alcaSiStripGainsHarvester = SiStripGainsPCLHarvester.clone()
| 40.2
| 97
| 0.900498
| 16
| 201
| 11.25
| 0.8125
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.059701
| 201
| 4
| 98
| 50.25
| 0.952381
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
a1be8c7fd399038f87423cc06e68b997124ecc38
| 328
|
py
|
Python
|
peek_storage/_private/service/sw_install/PluginSwInstallManager.py
|
Synerty/peek-storage
|
b25a67b97e22675cb0d47ddc01796cc34c4e6746
|
[
"MIT"
] | null | null | null |
peek_storage/_private/service/sw_install/PluginSwInstallManager.py
|
Synerty/peek-storage
|
b25a67b97e22675cb0d47ddc01796cc34c4e6746
|
[
"MIT"
] | null | null | null |
peek_storage/_private/service/sw_install/PluginSwInstallManager.py
|
Synerty/peek-storage
|
b25a67b97e22675cb0d47ddc01796cc34c4e6746
|
[
"MIT"
] | 1
|
2016-12-12T21:44:39.000Z
|
2016-12-12T21:44:39.000Z
|
from peek_platform import PeekPlatformConfig
from peek_platform.sw_install.PluginSwInstallManagerABC import PluginSwInstallManagerABC
class PluginSwInstallManager(PluginSwInstallManagerABC):
def notifyOfPluginVersionUpdate(self, pluginName, targetVersion):
PeekPlatformConfig.pluginLoader.loadPlugin(pluginName)
| 32.8
| 88
| 0.862805
| 25
| 328
| 11.2
| 0.68
| 0.057143
| 0.114286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.094512
| 328
| 9
| 89
| 36.444444
| 0.942761
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0.4
| 0
| 0.8
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
a1d8afb002227dcaecb84514e2ee56a18a474018
| 121
|
py
|
Python
|
pretorched/gans/sagan.py
|
schwettmann/pretorched-x
|
ce8c3712434b3cd5d85dcbe8582ff51ddfa7d4ed
|
[
"MIT"
] | 16
|
2019-05-31T06:09:36.000Z
|
2021-03-04T12:00:34.000Z
|
pretorched/gans/sagan.py
|
schwettmann/pretorched-x
|
ce8c3712434b3cd5d85dcbe8582ff51ddfa7d4ed
|
[
"MIT"
] | 3
|
2022-02-27T06:43:34.000Z
|
2022-03-18T08:30:30.000Z
|
pretorched/gans/sagan.py
|
schwettmann/pretorched-x
|
ce8c3712434b3cd5d85dcbe8582ff51ddfa7d4ed
|
[
"MIT"
] | 3
|
2019-05-31T13:07:23.000Z
|
2020-07-22T18:58:47.000Z
|
from . import biggan
class Generator(biggan.Generator):
pass
class Discriminator(biggan.Discriminator):
pass
| 12.1
| 42
| 0.743802
| 13
| 121
| 6.923077
| 0.538462
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.181818
| 121
| 9
| 43
| 13.444444
| 0.909091
| 0
| 0
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.4
| 0.2
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 5
|
a1da1697ddaa97920d8022385fb91ca0dde9afde
| 908
|
py
|
Python
|
main/old/transfg_ctfg/utils/common_nni.py
|
xuritian317/pytorch-image-models
|
034139e4871cddd0f6c24931b6ac380ba9f28711
|
[
"Apache-2.0"
] | null | null | null |
main/old/transfg_ctfg/utils/common_nni.py
|
xuritian317/pytorch-image-models
|
034139e4871cddd0f6c24931b6ac380ba9f28711
|
[
"Apache-2.0"
] | null | null | null |
main/old/transfg_ctfg/utils/common_nni.py
|
xuritian317/pytorch-image-models
|
034139e4871cddd0f6c24931b6ac380ba9f28711
|
[
"Apache-2.0"
] | null | null | null |
import os
import time
def file_write_log(args, *texts):
if args['local_rank'] in [-1, 0]:
with open(os.path.join(args['output_dir'], args['train_log_name']), 'a') as f:
f.write('\n')
localtime = time.asctime(time.localtime(time.time()))
f.write(str(localtime))
f.write('\n')
for test in texts:
f.write(test)
def file_write_log_ori(args, *texts):
# print('')
if args is None:
args.local_rank = 0
args.output_dir = '/.output'
args.train_log_nam = 'text'
if args.local_rank in [-1, 0]:
with open(os.path.join(args.output_dir, args.train_log_name), 'a') as f:
f.write('\n')
localtime = time.asctime(time.localtime(time.time()))
f.write(str(localtime))
f.write('\n')
for test in texts:
f.write(test)
| 29.290323
| 86
| 0.538546
| 127
| 908
| 3.716535
| 0.299213
| 0.101695
| 0.059322
| 0.063559
| 0.707627
| 0.707627
| 0.707627
| 0.707627
| 0.707627
| 0.707627
| 0
| 0.007987
| 0.310573
| 908
| 30
| 87
| 30.266667
| 0.746006
| 0.009912
| 0
| 0.5
| 0
| 0
| 0.06243
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.083333
| false
| 0
| 0.083333
| 0
| 0.166667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.