hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
15f4d69db682093ca5736a39d9b778587d349ed4
| 75
|
py
|
Python
|
trello2toggl/__init__.py
|
dobarkod/trello2toggl
|
303c76db46932d7594f9ef19529c3992d641ec9f
|
[
"Unlicense"
] | 1
|
2015-02-01T22:40:47.000Z
|
2015-02-01T22:40:47.000Z
|
trello2toggl/__init__.py
|
dobarkod/trello2toggl
|
303c76db46932d7594f9ef19529c3992d641ec9f
|
[
"Unlicense"
] | null | null | null |
trello2toggl/__init__.py
|
dobarkod/trello2toggl
|
303c76db46932d7594f9ef19529c3992d641ec9f
|
[
"Unlicense"
] | 1
|
2021-07-08T09:48:45.000Z
|
2021-07-08T09:48:45.000Z
|
from .toggl import Toggl
from .trello import Trello
from .sync import Sync
| 18.75
| 26
| 0.8
| 12
| 75
| 5
| 0.416667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.16
| 75
| 3
| 27
| 25
| 0.952381
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
15f98b3fdf6e445a4ed4d8fc40bb23b2f95d1769
| 657
|
py
|
Python
|
Projects/project05/dequeue-shrink.py
|
tonysulfaro/CSE-331
|
b4f743b1127ebe531ba8417420d043e9c149135a
|
[
"MIT"
] | 2
|
2019-02-13T17:49:18.000Z
|
2020-09-30T04:51:53.000Z
|
Projects/project05/dequeue-shrink.py
|
tonysulfaro/CSE-331
|
b4f743b1127ebe531ba8417420d043e9c149135a
|
[
"MIT"
] | null | null | null |
Projects/project05/dequeue-shrink.py
|
tonysulfaro/CSE-331
|
b4f743b1127ebe531ba8417420d043e9c149135a
|
[
"MIT"
] | null | null | null |
from CircularQueue import CircularQueue
def main():
test = CircularQueue(15)
test.enqueue(3)
test.enqueue(3)
test.enqueue(3)
test.enqueue(3)
test.enqueue(3)
test.enqueue(3)
test.enqueue(3)
test.enqueue(3)
print(test)
print('REMOVE')
test.dequeue()
test.dequeue()
test.enqueue(5)
test.enqueue(5)
test.enqueue(5)
test.enqueue(5)
test.dequeue()
test.dequeue()
test.dequeue()
test.dequeue()
test.dequeue()
# test.dequeue()
# test.dequeue()
# test.dequeue()
# test.dequeue()
# test.dequeue()
print(test)
if __name__ == '__main__':
main()
| 16.02439
| 39
| 0.592085
| 79
| 657
| 4.822785
| 0.189873
| 0.346457
| 0.433071
| 0.577428
| 0.724409
| 0.666667
| 0.666667
| 0.666667
| 0.666667
| 0.666667
| 0
| 0.028747
| 0.258752
| 657
| 40
| 40
| 16.425
| 0.753593
| 0.112633
| 0
| 0.777778
| 0
| 0
| 0.024263
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.037037
| false
| 0
| 0.037037
| 0
| 0.074074
| 0.111111
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
c6211a0a6b5d830c804d3c7b5d6cf0a9360420f4
| 1,568
|
py
|
Python
|
test/test_v1alpha1_application_spec.py
|
RyanSiu1995/argocd-python-client
|
2e8f097fe09f247a46ac70692241a93d1acd076a
|
[
"MIT"
] | 1
|
2021-11-20T13:37:43.000Z
|
2021-11-20T13:37:43.000Z
|
test/test_v1alpha1_application_spec.py
|
RyanSiu1995/argocd-python-client
|
2e8f097fe09f247a46ac70692241a93d1acd076a
|
[
"MIT"
] | null | null | null |
test/test_v1alpha1_application_spec.py
|
RyanSiu1995/argocd-python-client
|
2e8f097fe09f247a46ac70692241a93d1acd076a
|
[
"MIT"
] | null | null | null |
"""
Consolidate Services
Description of all APIs # noqa: E501
The version of the OpenAPI document: version not set
Generated by: https://openapi-generator.tech
"""
import sys
import unittest
import argocd_python_client
from argocd_python_client.model.v1alpha1_application_destination import V1alpha1ApplicationDestination
from argocd_python_client.model.v1alpha1_application_source import V1alpha1ApplicationSource
from argocd_python_client.model.v1alpha1_info import V1alpha1Info
from argocd_python_client.model.v1alpha1_resource_ignore_differences import V1alpha1ResourceIgnoreDifferences
from argocd_python_client.model.v1alpha1_sync_policy import V1alpha1SyncPolicy
globals()['V1alpha1ApplicationDestination'] = V1alpha1ApplicationDestination
globals()['V1alpha1ApplicationSource'] = V1alpha1ApplicationSource
globals()['V1alpha1Info'] = V1alpha1Info
globals()['V1alpha1ResourceIgnoreDifferences'] = V1alpha1ResourceIgnoreDifferences
globals()['V1alpha1SyncPolicy'] = V1alpha1SyncPolicy
from argocd_python_client.model.v1alpha1_application_spec import V1alpha1ApplicationSpec
class TestV1alpha1ApplicationSpec(unittest.TestCase):
"""V1alpha1ApplicationSpec unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testV1alpha1ApplicationSpec(self):
"""Test V1alpha1ApplicationSpec"""
# FIXME: construct object with mandatory attributes with example values
# model = V1alpha1ApplicationSpec() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| 34.086957
| 109
| 0.803571
| 146
| 1,568
| 8.39726
| 0.458904
| 0.068516
| 0.102773
| 0.107667
| 0.198206
| 0.198206
| 0.112561
| 0
| 0
| 0
| 0
| 0.044021
| 0.13074
| 1,568
| 45
| 110
| 34.844444
| 0.855466
| 0.220026
| 0
| 0.130435
| 1
| 0
| 0.10615
| 0.074136
| 0
| 0
| 0
| 0.022222
| 0
| 1
| 0.130435
| false
| 0.130435
| 0.391304
| 0
| 0.565217
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
|
0
| 5
|
d6a73b4f51ee405dcff8b521c7e25aaace010e74
| 247
|
py
|
Python
|
batch/twitter_setting.py
|
happou31/dora-stat
|
1405fff5d77fc01c4af7811388bc1003a840545d
|
[
"MIT"
] | null | null | null |
batch/twitter_setting.py
|
happou31/dora-stat
|
1405fff5d77fc01c4af7811388bc1003a840545d
|
[
"MIT"
] | null | null | null |
batch/twitter_setting.py
|
happou31/dora-stat
|
1405fff5d77fc01c4af7811388bc1003a840545d
|
[
"MIT"
] | null | null | null |
consumer_key = "TSKf1HtYKBsnYU9qfpvbRJkxo"
consumer_secret = "Y5lH7u6MDu5DjV5hUxEkwUpPwugYvEuqfpqyvP7y5ulen9XUML"
access_token = '115029611-4TampaXbxHmUHcpK5Cll8h7rioP80SJMNneZiYCN'
access_secret = 'wx4iaVvgvY5ZknCn6bOiKFiTzAU62R5PXC8b29RC6alwU'
| 41.166667
| 70
| 0.894737
| 13
| 247
| 16.692308
| 0.769231
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.149573
| 0.052632
| 247
| 5
| 71
| 49.4
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0.691057
| 0.691057
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
d6b55b5caf47a3dab29dc3b05fc2f754488f7b0e
| 37,057
|
py
|
Python
|
ros/genpy/src/genpy/msg/_TestManyFields.py
|
numberen/apollo-platform
|
8f359c8d00dd4a98f56ec2276c5663cb6c100e47
|
[
"Apache-2.0"
] | 742
|
2017-07-05T02:49:36.000Z
|
2022-03-30T12:55:43.000Z
|
ros/genpy/src/genpy/msg/_TestManyFields.py
|
numberen/apollo-platform
|
8f359c8d00dd4a98f56ec2276c5663cb6c100e47
|
[
"Apache-2.0"
] | 73
|
2017-07-06T12:50:51.000Z
|
2022-03-07T08:07:07.000Z
|
ros/genpy/src/genpy/msg/_TestManyFields.py
|
numberen/apollo-platform
|
8f359c8d00dd4a98f56ec2276c5663cb6c100e47
|
[
"Apache-2.0"
] | 425
|
2017-07-04T22:03:29.000Z
|
2022-03-29T06:59:06.000Z
|
"""autogenerated by genpy from genpy/TestManyFields.msg. Do not edit."""
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
class TestManyFields(genpy.Message):
_md5sum = "e95ce9e480ec14cc0488f63b5e806d93"
_type = "genpy/TestManyFields"
_has_header = False #flag to mark the presence of a Header object
_full_text = """int32 v1
int32 v2
int32 v3
int32 v4
int32 v5
int32 v6
int32 v7
int32 v8
int32 v9
int32 v10
int32 v11
int32 v12
int32 v13
int32 v14
int32 v15
int32 v16
int32 v17
int32 v18
int32 v19
int32 v20
int32 v21
int32 v22
int32 v23
int32 v24
int32 v25
int32 v26
int32 v27
int32 v28
int32 v29
int32 v30
int32 v31
int32 v32
int32 v33
int32 v34
int32 v35
int32 v36
int32 v37
int32 v38
int32 v39
int32 v40
int32 v41
int32 v42
int32 v43
int32 v44
int32 v45
int32 v46
int32 v47
int32 v48
int32 v49
int32 v50
int32 v51
int32 v52
int32 v53
int32 v54
int32 v55
int32 v56
int32 v57
int32 v58
int32 v59
int32 v60
int32 v61
int32 v62
int32 v63
int32 v64
int32 v65
int32 v66
int32 v67
int32 v68
int32 v69
int32 v70
int32 v71
int32 v72
int32 v73
int32 v74
int32 v75
int32 v76
int32 v77
int32 v78
int32 v79
int32 v80
int32 v81
int32 v82
int32 v83
int32 v84
int32 v85
int32 v86
int32 v87
int32 v88
int32 v89
int32 v90
int32 v91
int32 v92
int32 v93
int32 v94
int32 v95
int32 v96
int32 v97
int32 v98
int32 v99
int32 v100
int32 v101
int32 v102
int32 v103
int32 v104
int32 v105
int32 v106
int32 v107
int32 v108
int32 v109
int32 v110
int32 v111
int32 v112
int32 v113
int32 v114
int32 v115
int32 v116
int32 v117
int32 v118
int32 v119
int32 v120
int32 v121
int32 v122
int32 v123
int32 v124
int32 v125
int32 v126
int32 v127
int32 v128
int32 v129
int32 v130
int32 v131
int32 v132
int32 v133
int32 v134
int32 v135
int32 v136
int32 v137
int32 v138
int32 v139
int32 v140
int32 v141
int32 v142
int32 v143
int32 v144
int32 v145
int32 v146
int32 v147
int32 v148
int32 v149
int32 v150
int32 v151
int32 v152
int32 v153
int32 v154
int32 v155
int32 v156
int32 v157
int32 v158
int32 v159
int32 v160
int32 v161
int32 v162
int32 v163
int32 v164
int32 v165
int32 v166
int32 v167
int32 v168
int32 v169
int32 v170
int32 v171
int32 v172
int32 v173
int32 v174
int32 v175
int32 v176
int32 v177
int32 v178
int32 v179
int32 v180
int32 v181
int32 v182
int32 v183
int32 v184
int32 v185
int32 v186
int32 v187
int32 v188
int32 v189
int32 v190
int32 v191
int32 v192
int32 v193
int32 v194
int32 v195
int32 v196
int32 v197
int32 v198
int32 v199
int32 v200
int32 v201
int32 v202
int32 v203
int32 v204
int32 v205
int32 v206
int32 v207
int32 v208
int32 v209
int32 v210
int32 v211
int32 v212
int32 v213
int32 v214
int32 v215
int32 v216
int32 v217
int32 v218
int32 v219
int32 v220
int32 v221
int32 v222
int32 v223
int32 v224
int32 v225
int32 v226
int32 v227
int32 v228
int32 v229
int32 v230
int32 v231
int32 v232
int32 v233
int32 v234
int32 v235
int32 v236
int32 v237
int32 v238
int32 v239
int32 v240
int32 v241
int32 v242
int32 v243
int32 v244
int32 v245
int32 v246
int32 v247
int32 v248
int32 v249
int32 v250
int32 v251
int32 v252
int32 v253
int32 v254
int32 v255
int32 v256
"""
__slots__ = ['v1','v2','v3','v4','v5','v6','v7','v8','v9','v10','v11','v12','v13','v14','v15','v16','v17','v18','v19','v20','v21','v22','v23','v24','v25','v26','v27','v28','v29','v30','v31','v32','v33','v34','v35','v36','v37','v38','v39','v40','v41','v42','v43','v44','v45','v46','v47','v48','v49','v50','v51','v52','v53','v54','v55','v56','v57','v58','v59','v60','v61','v62','v63','v64','v65','v66','v67','v68','v69','v70','v71','v72','v73','v74','v75','v76','v77','v78','v79','v80','v81','v82','v83','v84','v85','v86','v87','v88','v89','v90','v91','v92','v93','v94','v95','v96','v97','v98','v99','v100','v101','v102','v103','v104','v105','v106','v107','v108','v109','v110','v111','v112','v113','v114','v115','v116','v117','v118','v119','v120','v121','v122','v123','v124','v125','v126','v127','v128','v129','v130','v131','v132','v133','v134','v135','v136','v137','v138','v139','v140','v141','v142','v143','v144','v145','v146','v147','v148','v149','v150','v151','v152','v153','v154','v155','v156','v157','v158','v159','v160','v161','v162','v163','v164','v165','v166','v167','v168','v169','v170','v171','v172','v173','v174','v175','v176','v177','v178','v179','v180','v181','v182','v183','v184','v185','v186','v187','v188','v189','v190','v191','v192','v193','v194','v195','v196','v197','v198','v199','v200','v201','v202','v203','v204','v205','v206','v207','v208','v209','v210','v211','v212','v213','v214','v215','v216','v217','v218','v219','v220','v221','v222','v223','v224','v225','v226','v227','v228','v229','v230','v231','v232','v233','v234','v235','v236','v237','v238','v239','v240','v241','v242','v243','v244','v245','v246','v247','v248','v249','v250','v251','v252','v253','v254','v255','v256']
_slot_types = ['int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32','int32']
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
v1,v2,v3,v4,v5,v6,v7,v8,v9,v10,v11,v12,v13,v14,v15,v16,v17,v18,v19,v20,v21,v22,v23,v24,v25,v26,v27,v28,v29,v30,v31,v32,v33,v34,v35,v36,v37,v38,v39,v40,v41,v42,v43,v44,v45,v46,v47,v48,v49,v50,v51,v52,v53,v54,v55,v56,v57,v58,v59,v60,v61,v62,v63,v64,v65,v66,v67,v68,v69,v70,v71,v72,v73,v74,v75,v76,v77,v78,v79,v80,v81,v82,v83,v84,v85,v86,v87,v88,v89,v90,v91,v92,v93,v94,v95,v96,v97,v98,v99,v100,v101,v102,v103,v104,v105,v106,v107,v108,v109,v110,v111,v112,v113,v114,v115,v116,v117,v118,v119,v120,v121,v122,v123,v124,v125,v126,v127,v128,v129,v130,v131,v132,v133,v134,v135,v136,v137,v138,v139,v140,v141,v142,v143,v144,v145,v146,v147,v148,v149,v150,v151,v152,v153,v154,v155,v156,v157,v158,v159,v160,v161,v162,v163,v164,v165,v166,v167,v168,v169,v170,v171,v172,v173,v174,v175,v176,v177,v178,v179,v180,v181,v182,v183,v184,v185,v186,v187,v188,v189,v190,v191,v192,v193,v194,v195,v196,v197,v198,v199,v200,v201,v202,v203,v204,v205,v206,v207,v208,v209,v210,v211,v212,v213,v214,v215,v216,v217,v218,v219,v220,v221,v222,v223,v224,v225,v226,v227,v228,v229,v230,v231,v232,v233,v234,v235,v236,v237,v238,v239,v240,v241,v242,v243,v244,v245,v246,v247,v248,v249,v250,v251,v252,v253,v254,v255,v256
:param args: complete set of field values, in .msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(TestManyFields, self).__init__(*args, **kwds)
#message fields cannot be None, assign default values for those that are
if self.v1 is None:
self.v1 = 0
if self.v2 is None:
self.v2 = 0
if self.v3 is None:
self.v3 = 0
if self.v4 is None:
self.v4 = 0
if self.v5 is None:
self.v5 = 0
if self.v6 is None:
self.v6 = 0
if self.v7 is None:
self.v7 = 0
if self.v8 is None:
self.v8 = 0
if self.v9 is None:
self.v9 = 0
if self.v10 is None:
self.v10 = 0
if self.v11 is None:
self.v11 = 0
if self.v12 is None:
self.v12 = 0
if self.v13 is None:
self.v13 = 0
if self.v14 is None:
self.v14 = 0
if self.v15 is None:
self.v15 = 0
if self.v16 is None:
self.v16 = 0
if self.v17 is None:
self.v17 = 0
if self.v18 is None:
self.v18 = 0
if self.v19 is None:
self.v19 = 0
if self.v20 is None:
self.v20 = 0
if self.v21 is None:
self.v21 = 0
if self.v22 is None:
self.v22 = 0
if self.v23 is None:
self.v23 = 0
if self.v24 is None:
self.v24 = 0
if self.v25 is None:
self.v25 = 0
if self.v26 is None:
self.v26 = 0
if self.v27 is None:
self.v27 = 0
if self.v28 is None:
self.v28 = 0
if self.v29 is None:
self.v29 = 0
if self.v30 is None:
self.v30 = 0
if self.v31 is None:
self.v31 = 0
if self.v32 is None:
self.v32 = 0
if self.v33 is None:
self.v33 = 0
if self.v34 is None:
self.v34 = 0
if self.v35 is None:
self.v35 = 0
if self.v36 is None:
self.v36 = 0
if self.v37 is None:
self.v37 = 0
if self.v38 is None:
self.v38 = 0
if self.v39 is None:
self.v39 = 0
if self.v40 is None:
self.v40 = 0
if self.v41 is None:
self.v41 = 0
if self.v42 is None:
self.v42 = 0
if self.v43 is None:
self.v43 = 0
if self.v44 is None:
self.v44 = 0
if self.v45 is None:
self.v45 = 0
if self.v46 is None:
self.v46 = 0
if self.v47 is None:
self.v47 = 0
if self.v48 is None:
self.v48 = 0
if self.v49 is None:
self.v49 = 0
if self.v50 is None:
self.v50 = 0
if self.v51 is None:
self.v51 = 0
if self.v52 is None:
self.v52 = 0
if self.v53 is None:
self.v53 = 0
if self.v54 is None:
self.v54 = 0
if self.v55 is None:
self.v55 = 0
if self.v56 is None:
self.v56 = 0
if self.v57 is None:
self.v57 = 0
if self.v58 is None:
self.v58 = 0
if self.v59 is None:
self.v59 = 0
if self.v60 is None:
self.v60 = 0
if self.v61 is None:
self.v61 = 0
if self.v62 is None:
self.v62 = 0
if self.v63 is None:
self.v63 = 0
if self.v64 is None:
self.v64 = 0
if self.v65 is None:
self.v65 = 0
if self.v66 is None:
self.v66 = 0
if self.v67 is None:
self.v67 = 0
if self.v68 is None:
self.v68 = 0
if self.v69 is None:
self.v69 = 0
if self.v70 is None:
self.v70 = 0
if self.v71 is None:
self.v71 = 0
if self.v72 is None:
self.v72 = 0
if self.v73 is None:
self.v73 = 0
if self.v74 is None:
self.v74 = 0
if self.v75 is None:
self.v75 = 0
if self.v76 is None:
self.v76 = 0
if self.v77 is None:
self.v77 = 0
if self.v78 is None:
self.v78 = 0
if self.v79 is None:
self.v79 = 0
if self.v80 is None:
self.v80 = 0
if self.v81 is None:
self.v81 = 0
if self.v82 is None:
self.v82 = 0
if self.v83 is None:
self.v83 = 0
if self.v84 is None:
self.v84 = 0
if self.v85 is None:
self.v85 = 0
if self.v86 is None:
self.v86 = 0
if self.v87 is None:
self.v87 = 0
if self.v88 is None:
self.v88 = 0
if self.v89 is None:
self.v89 = 0
if self.v90 is None:
self.v90 = 0
if self.v91 is None:
self.v91 = 0
if self.v92 is None:
self.v92 = 0
if self.v93 is None:
self.v93 = 0
if self.v94 is None:
self.v94 = 0
if self.v95 is None:
self.v95 = 0
if self.v96 is None:
self.v96 = 0
if self.v97 is None:
self.v97 = 0
if self.v98 is None:
self.v98 = 0
if self.v99 is None:
self.v99 = 0
if self.v100 is None:
self.v100 = 0
if self.v101 is None:
self.v101 = 0
if self.v102 is None:
self.v102 = 0
if self.v103 is None:
self.v103 = 0
if self.v104 is None:
self.v104 = 0
if self.v105 is None:
self.v105 = 0
if self.v106 is None:
self.v106 = 0
if self.v107 is None:
self.v107 = 0
if self.v108 is None:
self.v108 = 0
if self.v109 is None:
self.v109 = 0
if self.v110 is None:
self.v110 = 0
if self.v111 is None:
self.v111 = 0
if self.v112 is None:
self.v112 = 0
if self.v113 is None:
self.v113 = 0
if self.v114 is None:
self.v114 = 0
if self.v115 is None:
self.v115 = 0
if self.v116 is None:
self.v116 = 0
if self.v117 is None:
self.v117 = 0
if self.v118 is None:
self.v118 = 0
if self.v119 is None:
self.v119 = 0
if self.v120 is None:
self.v120 = 0
if self.v121 is None:
self.v121 = 0
if self.v122 is None:
self.v122 = 0
if self.v123 is None:
self.v123 = 0
if self.v124 is None:
self.v124 = 0
if self.v125 is None:
self.v125 = 0
if self.v126 is None:
self.v126 = 0
if self.v127 is None:
self.v127 = 0
if self.v128 is None:
self.v128 = 0
if self.v129 is None:
self.v129 = 0
if self.v130 is None:
self.v130 = 0
if self.v131 is None:
self.v131 = 0
if self.v132 is None:
self.v132 = 0
if self.v133 is None:
self.v133 = 0
if self.v134 is None:
self.v134 = 0
if self.v135 is None:
self.v135 = 0
if self.v136 is None:
self.v136 = 0
if self.v137 is None:
self.v137 = 0
if self.v138 is None:
self.v138 = 0
if self.v139 is None:
self.v139 = 0
if self.v140 is None:
self.v140 = 0
if self.v141 is None:
self.v141 = 0
if self.v142 is None:
self.v142 = 0
if self.v143 is None:
self.v143 = 0
if self.v144 is None:
self.v144 = 0
if self.v145 is None:
self.v145 = 0
if self.v146 is None:
self.v146 = 0
if self.v147 is None:
self.v147 = 0
if self.v148 is None:
self.v148 = 0
if self.v149 is None:
self.v149 = 0
if self.v150 is None:
self.v150 = 0
if self.v151 is None:
self.v151 = 0
if self.v152 is None:
self.v152 = 0
if self.v153 is None:
self.v153 = 0
if self.v154 is None:
self.v154 = 0
if self.v155 is None:
self.v155 = 0
if self.v156 is None:
self.v156 = 0
if self.v157 is None:
self.v157 = 0
if self.v158 is None:
self.v158 = 0
if self.v159 is None:
self.v159 = 0
if self.v160 is None:
self.v160 = 0
if self.v161 is None:
self.v161 = 0
if self.v162 is None:
self.v162 = 0
if self.v163 is None:
self.v163 = 0
if self.v164 is None:
self.v164 = 0
if self.v165 is None:
self.v165 = 0
if self.v166 is None:
self.v166 = 0
if self.v167 is None:
self.v167 = 0
if self.v168 is None:
self.v168 = 0
if self.v169 is None:
self.v169 = 0
if self.v170 is None:
self.v170 = 0
if self.v171 is None:
self.v171 = 0
if self.v172 is None:
self.v172 = 0
if self.v173 is None:
self.v173 = 0
if self.v174 is None:
self.v174 = 0
if self.v175 is None:
self.v175 = 0
if self.v176 is None:
self.v176 = 0
if self.v177 is None:
self.v177 = 0
if self.v178 is None:
self.v178 = 0
if self.v179 is None:
self.v179 = 0
if self.v180 is None:
self.v180 = 0
if self.v181 is None:
self.v181 = 0
if self.v182 is None:
self.v182 = 0
if self.v183 is None:
self.v183 = 0
if self.v184 is None:
self.v184 = 0
if self.v185 is None:
self.v185 = 0
if self.v186 is None:
self.v186 = 0
if self.v187 is None:
self.v187 = 0
if self.v188 is None:
self.v188 = 0
if self.v189 is None:
self.v189 = 0
if self.v190 is None:
self.v190 = 0
if self.v191 is None:
self.v191 = 0
if self.v192 is None:
self.v192 = 0
if self.v193 is None:
self.v193 = 0
if self.v194 is None:
self.v194 = 0
if self.v195 is None:
self.v195 = 0
if self.v196 is None:
self.v196 = 0
if self.v197 is None:
self.v197 = 0
if self.v198 is None:
self.v198 = 0
if self.v199 is None:
self.v199 = 0
if self.v200 is None:
self.v200 = 0
if self.v201 is None:
self.v201 = 0
if self.v202 is None:
self.v202 = 0
if self.v203 is None:
self.v203 = 0
if self.v204 is None:
self.v204 = 0
if self.v205 is None:
self.v205 = 0
if self.v206 is None:
self.v206 = 0
if self.v207 is None:
self.v207 = 0
if self.v208 is None:
self.v208 = 0
if self.v209 is None:
self.v209 = 0
if self.v210 is None:
self.v210 = 0
if self.v211 is None:
self.v211 = 0
if self.v212 is None:
self.v212 = 0
if self.v213 is None:
self.v213 = 0
if self.v214 is None:
self.v214 = 0
if self.v215 is None:
self.v215 = 0
if self.v216 is None:
self.v216 = 0
if self.v217 is None:
self.v217 = 0
if self.v218 is None:
self.v218 = 0
if self.v219 is None:
self.v219 = 0
if self.v220 is None:
self.v220 = 0
if self.v221 is None:
self.v221 = 0
if self.v222 is None:
self.v222 = 0
if self.v223 is None:
self.v223 = 0
if self.v224 is None:
self.v224 = 0
if self.v225 is None:
self.v225 = 0
if self.v226 is None:
self.v226 = 0
if self.v227 is None:
self.v227 = 0
if self.v228 is None:
self.v228 = 0
if self.v229 is None:
self.v229 = 0
if self.v230 is None:
self.v230 = 0
if self.v231 is None:
self.v231 = 0
if self.v232 is None:
self.v232 = 0
if self.v233 is None:
self.v233 = 0
if self.v234 is None:
self.v234 = 0
if self.v235 is None:
self.v235 = 0
if self.v236 is None:
self.v236 = 0
if self.v237 is None:
self.v237 = 0
if self.v238 is None:
self.v238 = 0
if self.v239 is None:
self.v239 = 0
if self.v240 is None:
self.v240 = 0
if self.v241 is None:
self.v241 = 0
if self.v242 is None:
self.v242 = 0
if self.v243 is None:
self.v243 = 0
if self.v244 is None:
self.v244 = 0
if self.v245 is None:
self.v245 = 0
if self.v246 is None:
self.v246 = 0
if self.v247 is None:
self.v247 = 0
if self.v248 is None:
self.v248 = 0
if self.v249 is None:
self.v249 = 0
if self.v250 is None:
self.v250 = 0
if self.v251 is None:
self.v251 = 0
if self.v252 is None:
self.v252 = 0
if self.v253 is None:
self.v253 = 0
if self.v254 is None:
self.v254 = 0
if self.v255 is None:
self.v255 = 0
if self.v256 is None:
self.v256 = 0
else:
self.v1 = 0
self.v2 = 0
self.v3 = 0
self.v4 = 0
self.v5 = 0
self.v6 = 0
self.v7 = 0
self.v8 = 0
self.v9 = 0
self.v10 = 0
self.v11 = 0
self.v12 = 0
self.v13 = 0
self.v14 = 0
self.v15 = 0
self.v16 = 0
self.v17 = 0
self.v18 = 0
self.v19 = 0
self.v20 = 0
self.v21 = 0
self.v22 = 0
self.v23 = 0
self.v24 = 0
self.v25 = 0
self.v26 = 0
self.v27 = 0
self.v28 = 0
self.v29 = 0
self.v30 = 0
self.v31 = 0
self.v32 = 0
self.v33 = 0
self.v34 = 0
self.v35 = 0
self.v36 = 0
self.v37 = 0
self.v38 = 0
self.v39 = 0
self.v40 = 0
self.v41 = 0
self.v42 = 0
self.v43 = 0
self.v44 = 0
self.v45 = 0
self.v46 = 0
self.v47 = 0
self.v48 = 0
self.v49 = 0
self.v50 = 0
self.v51 = 0
self.v52 = 0
self.v53 = 0
self.v54 = 0
self.v55 = 0
self.v56 = 0
self.v57 = 0
self.v58 = 0
self.v59 = 0
self.v60 = 0
self.v61 = 0
self.v62 = 0
self.v63 = 0
self.v64 = 0
self.v65 = 0
self.v66 = 0
self.v67 = 0
self.v68 = 0
self.v69 = 0
self.v70 = 0
self.v71 = 0
self.v72 = 0
self.v73 = 0
self.v74 = 0
self.v75 = 0
self.v76 = 0
self.v77 = 0
self.v78 = 0
self.v79 = 0
self.v80 = 0
self.v81 = 0
self.v82 = 0
self.v83 = 0
self.v84 = 0
self.v85 = 0
self.v86 = 0
self.v87 = 0
self.v88 = 0
self.v89 = 0
self.v90 = 0
self.v91 = 0
self.v92 = 0
self.v93 = 0
self.v94 = 0
self.v95 = 0
self.v96 = 0
self.v97 = 0
self.v98 = 0
self.v99 = 0
self.v100 = 0
self.v101 = 0
self.v102 = 0
self.v103 = 0
self.v104 = 0
self.v105 = 0
self.v106 = 0
self.v107 = 0
self.v108 = 0
self.v109 = 0
self.v110 = 0
self.v111 = 0
self.v112 = 0
self.v113 = 0
self.v114 = 0
self.v115 = 0
self.v116 = 0
self.v117 = 0
self.v118 = 0
self.v119 = 0
self.v120 = 0
self.v121 = 0
self.v122 = 0
self.v123 = 0
self.v124 = 0
self.v125 = 0
self.v126 = 0
self.v127 = 0
self.v128 = 0
self.v129 = 0
self.v130 = 0
self.v131 = 0
self.v132 = 0
self.v133 = 0
self.v134 = 0
self.v135 = 0
self.v136 = 0
self.v137 = 0
self.v138 = 0
self.v139 = 0
self.v140 = 0
self.v141 = 0
self.v142 = 0
self.v143 = 0
self.v144 = 0
self.v145 = 0
self.v146 = 0
self.v147 = 0
self.v148 = 0
self.v149 = 0
self.v150 = 0
self.v151 = 0
self.v152 = 0
self.v153 = 0
self.v154 = 0
self.v155 = 0
self.v156 = 0
self.v157 = 0
self.v158 = 0
self.v159 = 0
self.v160 = 0
self.v161 = 0
self.v162 = 0
self.v163 = 0
self.v164 = 0
self.v165 = 0
self.v166 = 0
self.v167 = 0
self.v168 = 0
self.v169 = 0
self.v170 = 0
self.v171 = 0
self.v172 = 0
self.v173 = 0
self.v174 = 0
self.v175 = 0
self.v176 = 0
self.v177 = 0
self.v178 = 0
self.v179 = 0
self.v180 = 0
self.v181 = 0
self.v182 = 0
self.v183 = 0
self.v184 = 0
self.v185 = 0
self.v186 = 0
self.v187 = 0
self.v188 = 0
self.v189 = 0
self.v190 = 0
self.v191 = 0
self.v192 = 0
self.v193 = 0
self.v194 = 0
self.v195 = 0
self.v196 = 0
self.v197 = 0
self.v198 = 0
self.v199 = 0
self.v200 = 0
self.v201 = 0
self.v202 = 0
self.v203 = 0
self.v204 = 0
self.v205 = 0
self.v206 = 0
self.v207 = 0
self.v208 = 0
self.v209 = 0
self.v210 = 0
self.v211 = 0
self.v212 = 0
self.v213 = 0
self.v214 = 0
self.v215 = 0
self.v216 = 0
self.v217 = 0
self.v218 = 0
self.v219 = 0
self.v220 = 0
self.v221 = 0
self.v222 = 0
self.v223 = 0
self.v224 = 0
self.v225 = 0
self.v226 = 0
self.v227 = 0
self.v228 = 0
self.v229 = 0
self.v230 = 0
self.v231 = 0
self.v232 = 0
self.v233 = 0
self.v234 = 0
self.v235 = 0
self.v236 = 0
self.v237 = 0
self.v238 = 0
self.v239 = 0
self.v240 = 0
self.v241 = 0
self.v242 = 0
self.v243 = 0
self.v244 = 0
self.v245 = 0
self.v246 = 0
self.v247 = 0
self.v248 = 0
self.v249 = 0
self.v250 = 0
self.v251 = 0
self.v252 = 0
self.v253 = 0
self.v254 = 0
self.v255 = 0
self.v256 = 0
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
_x = self
buff.write(_struct_255i.pack(_x.v1, _x.v2, _x.v3, _x.v4, _x.v5, _x.v6, _x.v7, _x.v8, _x.v9, _x.v10, _x.v11, _x.v12, _x.v13, _x.v14, _x.v15, _x.v16, _x.v17, _x.v18, _x.v19, _x.v20, _x.v21, _x.v22, _x.v23, _x.v24, _x.v25, _x.v26, _x.v27, _x.v28, _x.v29, _x.v30, _x.v31, _x.v32, _x.v33, _x.v34, _x.v35, _x.v36, _x.v37, _x.v38, _x.v39, _x.v40, _x.v41, _x.v42, _x.v43, _x.v44, _x.v45, _x.v46, _x.v47, _x.v48, _x.v49, _x.v50, _x.v51, _x.v52, _x.v53, _x.v54, _x.v55, _x.v56, _x.v57, _x.v58, _x.v59, _x.v60, _x.v61, _x.v62, _x.v63, _x.v64, _x.v65, _x.v66, _x.v67, _x.v68, _x.v69, _x.v70, _x.v71, _x.v72, _x.v73, _x.v74, _x.v75, _x.v76, _x.v77, _x.v78, _x.v79, _x.v80, _x.v81, _x.v82, _x.v83, _x.v84, _x.v85, _x.v86, _x.v87, _x.v88, _x.v89, _x.v90, _x.v91, _x.v92, _x.v93, _x.v94, _x.v95, _x.v96, _x.v97, _x.v98, _x.v99, _x.v100, _x.v101, _x.v102, _x.v103, _x.v104, _x.v105, _x.v106, _x.v107, _x.v108, _x.v109, _x.v110, _x.v111, _x.v112, _x.v113, _x.v114, _x.v115, _x.v116, _x.v117, _x.v118, _x.v119, _x.v120, _x.v121, _x.v122, _x.v123, _x.v124, _x.v125, _x.v126, _x.v127, _x.v128, _x.v129, _x.v130, _x.v131, _x.v132, _x.v133, _x.v134, _x.v135, _x.v136, _x.v137, _x.v138, _x.v139, _x.v140, _x.v141, _x.v142, _x.v143, _x.v144, _x.v145, _x.v146, _x.v147, _x.v148, _x.v149, _x.v150, _x.v151, _x.v152, _x.v153, _x.v154, _x.v155, _x.v156, _x.v157, _x.v158, _x.v159, _x.v160, _x.v161, _x.v162, _x.v163, _x.v164, _x.v165, _x.v166, _x.v167, _x.v168, _x.v169, _x.v170, _x.v171, _x.v172, _x.v173, _x.v174, _x.v175, _x.v176, _x.v177, _x.v178, _x.v179, _x.v180, _x.v181, _x.v182, _x.v183, _x.v184, _x.v185, _x.v186, _x.v187, _x.v188, _x.v189, _x.v190, _x.v191, _x.v192, _x.v193, _x.v194, _x.v195, _x.v196, _x.v197, _x.v198, _x.v199, _x.v200, _x.v201, _x.v202, _x.v203, _x.v204, _x.v205, _x.v206, _x.v207, _x.v208, _x.v209, _x.v210, _x.v211, _x.v212, _x.v213, _x.v214, _x.v215, _x.v216, _x.v217, _x.v218, _x.v219, _x.v220, _x.v221, _x.v222, _x.v223, _x.v224, _x.v225, _x.v226, _x.v227, _x.v228, _x.v229, _x.v230, _x.v231, _x.v232, _x.v233, _x.v234, _x.v235, _x.v236, _x.v237, _x.v238, _x.v239, _x.v240, _x.v241, _x.v242, _x.v243, _x.v244, _x.v245, _x.v246, _x.v247, _x.v248, _x.v249, _x.v250, _x.v251, _x.v252, _x.v253, _x.v254, _x.v255))
buff.write(_struct_i.pack(self.v256))
except struct.error as se: self._check_types(se)
except TypeError as te: self._check_types(te)
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
try:
end = 0
_x = self
start = end
end += 1020
(_x.v1, _x.v2, _x.v3, _x.v4, _x.v5, _x.v6, _x.v7, _x.v8, _x.v9, _x.v10, _x.v11, _x.v12, _x.v13, _x.v14, _x.v15, _x.v16, _x.v17, _x.v18, _x.v19, _x.v20, _x.v21, _x.v22, _x.v23, _x.v24, _x.v25, _x.v26, _x.v27, _x.v28, _x.v29, _x.v30, _x.v31, _x.v32, _x.v33, _x.v34, _x.v35, _x.v36, _x.v37, _x.v38, _x.v39, _x.v40, _x.v41, _x.v42, _x.v43, _x.v44, _x.v45, _x.v46, _x.v47, _x.v48, _x.v49, _x.v50, _x.v51, _x.v52, _x.v53, _x.v54, _x.v55, _x.v56, _x.v57, _x.v58, _x.v59, _x.v60, _x.v61, _x.v62, _x.v63, _x.v64, _x.v65, _x.v66, _x.v67, _x.v68, _x.v69, _x.v70, _x.v71, _x.v72, _x.v73, _x.v74, _x.v75, _x.v76, _x.v77, _x.v78, _x.v79, _x.v80, _x.v81, _x.v82, _x.v83, _x.v84, _x.v85, _x.v86, _x.v87, _x.v88, _x.v89, _x.v90, _x.v91, _x.v92, _x.v93, _x.v94, _x.v95, _x.v96, _x.v97, _x.v98, _x.v99, _x.v100, _x.v101, _x.v102, _x.v103, _x.v104, _x.v105, _x.v106, _x.v107, _x.v108, _x.v109, _x.v110, _x.v111, _x.v112, _x.v113, _x.v114, _x.v115, _x.v116, _x.v117, _x.v118, _x.v119, _x.v120, _x.v121, _x.v122, _x.v123, _x.v124, _x.v125, _x.v126, _x.v127, _x.v128, _x.v129, _x.v130, _x.v131, _x.v132, _x.v133, _x.v134, _x.v135, _x.v136, _x.v137, _x.v138, _x.v139, _x.v140, _x.v141, _x.v142, _x.v143, _x.v144, _x.v145, _x.v146, _x.v147, _x.v148, _x.v149, _x.v150, _x.v151, _x.v152, _x.v153, _x.v154, _x.v155, _x.v156, _x.v157, _x.v158, _x.v159, _x.v160, _x.v161, _x.v162, _x.v163, _x.v164, _x.v165, _x.v166, _x.v167, _x.v168, _x.v169, _x.v170, _x.v171, _x.v172, _x.v173, _x.v174, _x.v175, _x.v176, _x.v177, _x.v178, _x.v179, _x.v180, _x.v181, _x.v182, _x.v183, _x.v184, _x.v185, _x.v186, _x.v187, _x.v188, _x.v189, _x.v190, _x.v191, _x.v192, _x.v193, _x.v194, _x.v195, _x.v196, _x.v197, _x.v198, _x.v199, _x.v200, _x.v201, _x.v202, _x.v203, _x.v204, _x.v205, _x.v206, _x.v207, _x.v208, _x.v209, _x.v210, _x.v211, _x.v212, _x.v213, _x.v214, _x.v215, _x.v216, _x.v217, _x.v218, _x.v219, _x.v220, _x.v221, _x.v222, _x.v223, _x.v224, _x.v225, _x.v226, _x.v227, _x.v228, _x.v229, _x.v230, _x.v231, _x.v232, _x.v233, _x.v234, _x.v235, _x.v236, _x.v237, _x.v238, _x.v239, _x.v240, _x.v241, _x.v242, _x.v243, _x.v244, _x.v245, _x.v246, _x.v247, _x.v248, _x.v249, _x.v250, _x.v251, _x.v252, _x.v253, _x.v254, _x.v255,) = _struct_255i.unpack(str[start:end])
start = end
end += 4
(self.v256,) = _struct_i.unpack(str[start:end])
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
def serialize_numpy(self, buff, numpy):
"""
serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module
"""
try:
_x = self
buff.write(_struct_255i.pack(_x.v1, _x.v2, _x.v3, _x.v4, _x.v5, _x.v6, _x.v7, _x.v8, _x.v9, _x.v10, _x.v11, _x.v12, _x.v13, _x.v14, _x.v15, _x.v16, _x.v17, _x.v18, _x.v19, _x.v20, _x.v21, _x.v22, _x.v23, _x.v24, _x.v25, _x.v26, _x.v27, _x.v28, _x.v29, _x.v30, _x.v31, _x.v32, _x.v33, _x.v34, _x.v35, _x.v36, _x.v37, _x.v38, _x.v39, _x.v40, _x.v41, _x.v42, _x.v43, _x.v44, _x.v45, _x.v46, _x.v47, _x.v48, _x.v49, _x.v50, _x.v51, _x.v52, _x.v53, _x.v54, _x.v55, _x.v56, _x.v57, _x.v58, _x.v59, _x.v60, _x.v61, _x.v62, _x.v63, _x.v64, _x.v65, _x.v66, _x.v67, _x.v68, _x.v69, _x.v70, _x.v71, _x.v72, _x.v73, _x.v74, _x.v75, _x.v76, _x.v77, _x.v78, _x.v79, _x.v80, _x.v81, _x.v82, _x.v83, _x.v84, _x.v85, _x.v86, _x.v87, _x.v88, _x.v89, _x.v90, _x.v91, _x.v92, _x.v93, _x.v94, _x.v95, _x.v96, _x.v97, _x.v98, _x.v99, _x.v100, _x.v101, _x.v102, _x.v103, _x.v104, _x.v105, _x.v106, _x.v107, _x.v108, _x.v109, _x.v110, _x.v111, _x.v112, _x.v113, _x.v114, _x.v115, _x.v116, _x.v117, _x.v118, _x.v119, _x.v120, _x.v121, _x.v122, _x.v123, _x.v124, _x.v125, _x.v126, _x.v127, _x.v128, _x.v129, _x.v130, _x.v131, _x.v132, _x.v133, _x.v134, _x.v135, _x.v136, _x.v137, _x.v138, _x.v139, _x.v140, _x.v141, _x.v142, _x.v143, _x.v144, _x.v145, _x.v146, _x.v147, _x.v148, _x.v149, _x.v150, _x.v151, _x.v152, _x.v153, _x.v154, _x.v155, _x.v156, _x.v157, _x.v158, _x.v159, _x.v160, _x.v161, _x.v162, _x.v163, _x.v164, _x.v165, _x.v166, _x.v167, _x.v168, _x.v169, _x.v170, _x.v171, _x.v172, _x.v173, _x.v174, _x.v175, _x.v176, _x.v177, _x.v178, _x.v179, _x.v180, _x.v181, _x.v182, _x.v183, _x.v184, _x.v185, _x.v186, _x.v187, _x.v188, _x.v189, _x.v190, _x.v191, _x.v192, _x.v193, _x.v194, _x.v195, _x.v196, _x.v197, _x.v198, _x.v199, _x.v200, _x.v201, _x.v202, _x.v203, _x.v204, _x.v205, _x.v206, _x.v207, _x.v208, _x.v209, _x.v210, _x.v211, _x.v212, _x.v213, _x.v214, _x.v215, _x.v216, _x.v217, _x.v218, _x.v219, _x.v220, _x.v221, _x.v222, _x.v223, _x.v224, _x.v225, _x.v226, _x.v227, _x.v228, _x.v229, _x.v230, _x.v231, _x.v232, _x.v233, _x.v234, _x.v235, _x.v236, _x.v237, _x.v238, _x.v239, _x.v240, _x.v241, _x.v242, _x.v243, _x.v244, _x.v245, _x.v246, _x.v247, _x.v248, _x.v249, _x.v250, _x.v251, _x.v252, _x.v253, _x.v254, _x.v255))
buff.write(_struct_i.pack(self.v256))
except struct.error as se: self._check_types(se)
except TypeError as te: self._check_types(te)
def deserialize_numpy(self, str, numpy):
"""
unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module
"""
try:
end = 0
_x = self
start = end
end += 1020
(_x.v1, _x.v2, _x.v3, _x.v4, _x.v5, _x.v6, _x.v7, _x.v8, _x.v9, _x.v10, _x.v11, _x.v12, _x.v13, _x.v14, _x.v15, _x.v16, _x.v17, _x.v18, _x.v19, _x.v20, _x.v21, _x.v22, _x.v23, _x.v24, _x.v25, _x.v26, _x.v27, _x.v28, _x.v29, _x.v30, _x.v31, _x.v32, _x.v33, _x.v34, _x.v35, _x.v36, _x.v37, _x.v38, _x.v39, _x.v40, _x.v41, _x.v42, _x.v43, _x.v44, _x.v45, _x.v46, _x.v47, _x.v48, _x.v49, _x.v50, _x.v51, _x.v52, _x.v53, _x.v54, _x.v55, _x.v56, _x.v57, _x.v58, _x.v59, _x.v60, _x.v61, _x.v62, _x.v63, _x.v64, _x.v65, _x.v66, _x.v67, _x.v68, _x.v69, _x.v70, _x.v71, _x.v72, _x.v73, _x.v74, _x.v75, _x.v76, _x.v77, _x.v78, _x.v79, _x.v80, _x.v81, _x.v82, _x.v83, _x.v84, _x.v85, _x.v86, _x.v87, _x.v88, _x.v89, _x.v90, _x.v91, _x.v92, _x.v93, _x.v94, _x.v95, _x.v96, _x.v97, _x.v98, _x.v99, _x.v100, _x.v101, _x.v102, _x.v103, _x.v104, _x.v105, _x.v106, _x.v107, _x.v108, _x.v109, _x.v110, _x.v111, _x.v112, _x.v113, _x.v114, _x.v115, _x.v116, _x.v117, _x.v118, _x.v119, _x.v120, _x.v121, _x.v122, _x.v123, _x.v124, _x.v125, _x.v126, _x.v127, _x.v128, _x.v129, _x.v130, _x.v131, _x.v132, _x.v133, _x.v134, _x.v135, _x.v136, _x.v137, _x.v138, _x.v139, _x.v140, _x.v141, _x.v142, _x.v143, _x.v144, _x.v145, _x.v146, _x.v147, _x.v148, _x.v149, _x.v150, _x.v151, _x.v152, _x.v153, _x.v154, _x.v155, _x.v156, _x.v157, _x.v158, _x.v159, _x.v160, _x.v161, _x.v162, _x.v163, _x.v164, _x.v165, _x.v166, _x.v167, _x.v168, _x.v169, _x.v170, _x.v171, _x.v172, _x.v173, _x.v174, _x.v175, _x.v176, _x.v177, _x.v178, _x.v179, _x.v180, _x.v181, _x.v182, _x.v183, _x.v184, _x.v185, _x.v186, _x.v187, _x.v188, _x.v189, _x.v190, _x.v191, _x.v192, _x.v193, _x.v194, _x.v195, _x.v196, _x.v197, _x.v198, _x.v199, _x.v200, _x.v201, _x.v202, _x.v203, _x.v204, _x.v205, _x.v206, _x.v207, _x.v208, _x.v209, _x.v210, _x.v211, _x.v212, _x.v213, _x.v214, _x.v215, _x.v216, _x.v217, _x.v218, _x.v219, _x.v220, _x.v221, _x.v222, _x.v223, _x.v224, _x.v225, _x.v226, _x.v227, _x.v228, _x.v229, _x.v230, _x.v231, _x.v232, _x.v233, _x.v234, _x.v235, _x.v236, _x.v237, _x.v238, _x.v239, _x.v240, _x.v241, _x.v242, _x.v243, _x.v244, _x.v245, _x.v246, _x.v247, _x.v248, _x.v249, _x.v250, _x.v251, _x.v252, _x.v253, _x.v254, _x.v255,) = _struct_255i.unpack(str[start:end])
start = end
end += 4
(self.v256,) = _struct_i.unpack(str[start:end])
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
_struct_I = genpy.struct_I
_struct_i = struct.Struct("<i")
_struct_255i = struct.Struct("<255i")
| 32.735866
| 2,232
| 0.578487
| 6,546
| 37,057
| 3.109685
| 0.059578
| 0.075457
| 0.125761
| 0.248575
| 0.429259
| 0.429259
| 0.423364
| 0.423364
| 0.419532
| 0.414522
| 0
| 0.302483
| 0.268721
| 37,057
| 1,131
| 2,233
| 32.76481
| 0.448688
| 0.063281
| 0
| 0.505535
| 1
| 0
| 0.143647
| 0.000926
| 0
| 0
| 0.000289
| 0
| 0
| 1
| 0.005535
| false
| 0
| 0.002768
| 0
| 0.017528
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
d6bd7accf3d4f7cc0e1580ef5dc4d91664e5c335
| 122
|
py
|
Python
|
model/networkmodel.py
|
boti996/szdoga
|
a15827e78e4f6a7acbbf4469baecbfb7499b38ed
|
[
"MIT"
] | null | null | null |
model/networkmodel.py
|
boti996/szdoga
|
a15827e78e4f6a7acbbf4469baecbfb7499b38ed
|
[
"MIT"
] | null | null | null |
model/networkmodel.py
|
boti996/szdoga
|
a15827e78e4f6a7acbbf4469baecbfb7499b38ed
|
[
"MIT"
] | null | null | null |
from abc import ABC, abstractmethod
class NetworkModel(ABC):
@abstractmethod
def get_model(self):
pass
| 13.555556
| 35
| 0.688525
| 14
| 122
| 5.928571
| 0.785714
| 0.409639
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.245902
| 122
| 8
| 36
| 15.25
| 0.902174
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0.2
| 0.2
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 5
|
d6ca17204714f31ac1d287dd6337d8580ded9dc1
| 5,195
|
py
|
Python
|
nats_messenger/tests/test_durable_messenger.py
|
tombenke/py-msgp
|
3f8e6bfa687424e33390d45032ff19a7142635ef
|
[
"MIT"
] | 1
|
2022-01-20T11:19:09.000Z
|
2022-01-20T11:19:09.000Z
|
nats_messenger/tests/test_durable_messenger.py
|
tombenke/py-msgp
|
3f8e6bfa687424e33390d45032ff19a7142635ef
|
[
"MIT"
] | null | null | null |
nats_messenger/tests/test_durable_messenger.py
|
tombenke/py-msgp
|
3f8e6bfa687424e33390d45032ff19a7142635ef
|
[
"MIT"
] | null | null | null |
"""Test the messenger module"""
import unittest
import asyncio
from loguru import logger
from nats_messenger.messenger import Messenger
from nats_messenger.tests.config_test import (
URL,
CREDENTIALS,
CLUSTER_ID,
CLIENT_ID,
TEST_PAYLOAD,
TEST_TOPIC,
)
class MessengerDurableTestCase(unittest.TestCase):
"""The Messenger test cases for durable functions"""
def test_publish_subscribe_durable(self) -> None:
"""Test the Messenger's synchronous publish and subscribe methods with durable subject"""
async def run():
messenger = Messenger(URL, CREDENTIALS, CLUSTER_ID, CLIENT_ID, logger)
await messenger.open()
total_messages = 0
callback_called = asyncio.Future()
async def callback(msg: bytes):
nonlocal total_messages
nonlocal callback_called
logger.debug(f"Received a message: '{msg}'")
self.assertEqual(TEST_PAYLOAD, msg)
total_messages += 1
if total_messages >= 2:
callback_called.set_result(None)
subscriber = await messenger.subscribe_durable(
TEST_TOPIC, callback=callback
)
logger.debug("Publish messages")
await messenger.publish_durable(TEST_TOPIC, TEST_PAYLOAD)
await messenger.publish_durable(TEST_TOPIC, TEST_PAYLOAD)
logger.debug("Wait for callbacks")
await asyncio.wait_for(callback_called, 1)
logger.debug("Unsubscribe")
await subscriber.unsubscribe()
logger.debug("Close messenger")
await messenger.close()
asyncio.run(run())
def test_publish_async_subscribe_durable(self) -> None:
"""Test the Messenger's asynchronous publish and subscribe methods with durable subject"""
async def run():
messenger = Messenger(URL, CREDENTIALS, CLUSTER_ID, CLIENT_ID, logger)
await messenger.open()
total_messages = 0
callback_called = asyncio.Future()
total_ack_messages = 0
ack_called = asyncio.Future()
async def callback(msg: bytes):
nonlocal total_messages
nonlocal callback_called
logger.debug(f"Received a message: '{msg}'")
self.assertEqual(TEST_PAYLOAD, msg)
total_messages += 1
if total_messages >= 2:
callback_called.set_result(None)
subscriber = await messenger.subscribe_durable(
TEST_TOPIC, callback=callback
)
async def ack_handler(ack):
nonlocal total_ack_messages
nonlocal ack_called
logger.debug(f"the ack_handler of publish_async is called with '{ack}'")
total_ack_messages += 1
if total_ack_messages >= 2:
ack_called.set_result(None)
logger.debug("Publish messages")
await messenger.publish_async_durable(
TEST_TOPIC, TEST_PAYLOAD, ack_handler=ack_handler
)
await messenger.publish_async_durable(
TEST_TOPIC, TEST_PAYLOAD, ack_handler=ack_handler
)
logger.debug("Wait for publish acknowledgements")
await asyncio.wait_for(ack_called, 1)
logger.debug("Wait for callbacks")
await asyncio.wait_for(callback_called, 1)
logger.debug("Unsubscribe")
await subscriber.unsubscribe()
logger.debug("Close messenger")
await messenger.close()
asyncio.run(run())
def test_publish_subscribe_durable_with_ack(self) -> None:
"""Test the Messenger's synchronous publish and subscribe methods with durable subject using manual acknowledge"""
async def run():
messenger = Messenger(URL, CREDENTIALS, CLUSTER_ID, CLIENT_ID, logger)
await messenger.open()
total_messages = 0
callback_called = asyncio.Future()
async def callback(msg: bytes) -> bool:
nonlocal total_messages
nonlocal callback_called
total_messages += 1
if total_messages >= 2:
callback_called.set_result(None)
logger.debug(
f"Received a message: '{msg}' total_messages: {total_messages}"
)
self.assertEqual(TEST_PAYLOAD, msg)
return True
subscriber = await messenger.subscribe_durable_with_ack(
TEST_TOPIC, callback=callback
)
logger.debug("Publish messages")
await messenger.publish_durable(TEST_TOPIC, TEST_PAYLOAD)
await messenger.publish_durable(TEST_TOPIC, TEST_PAYLOAD)
logger.debug("Wait for callbacks")
await asyncio.wait_for(callback_called, 1)
await subscriber.unsubscribe()
logger.debug("Close messenger")
await messenger.close()
asyncio.run(run())
| 35.101351
| 122
| 0.597498
| 527
| 5,195
| 5.683112
| 0.149905
| 0.058765
| 0.042738
| 0.040067
| 0.802003
| 0.768948
| 0.737896
| 0.718865
| 0.699833
| 0.699833
| 0
| 0.004592
| 0.329355
| 5,195
| 147
| 123
| 35.340136
| 0.855052
| 0.067372
| 0
| 0.657658
| 0
| 0
| 0.076987
| 0
| 0
| 0
| 0
| 0
| 0.027027
| 1
| 0.027027
| false
| 0
| 0.045045
| 0
| 0.09009
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
ba3caacd98b30eba31b58caebcf17246a14551c8
| 128
|
py
|
Python
|
ex1.py
|
keerthana1502/python_practice
|
8c0499e014826af78f9a88730551ace3fa79686d
|
[
"bzip2-1.0.6"
] | null | null | null |
ex1.py
|
keerthana1502/python_practice
|
8c0499e014826af78f9a88730551ace3fa79686d
|
[
"bzip2-1.0.6"
] | null | null | null |
ex1.py
|
keerthana1502/python_practice
|
8c0499e014826af78f9a88730551ace3fa79686d
|
[
"bzip2-1.0.6"
] | null | null | null |
for i in range(5):
for j in range (5):
if(i==j):
print("1",end=" ")
elif(i=-j):
print()
| 18.285714
| 30
| 0.375
| 20
| 128
| 2.4
| 0.55
| 0.291667
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.040541
| 0.421875
| 128
| 6
| 31
| 21.333333
| 0.608108
| 0
| 0
| 0
| 0
| 0
| 0.015625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.333333
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
ba64836a8d42f8c4e9f67ebcfdd272084c5ab82e
| 111
|
py
|
Python
|
cttools/__init__.py
|
nik849/ct-tools
|
1218f62de1b8974651506ebf8fd9197c1b038a0a
|
[
"MIT"
] | null | null | null |
cttools/__init__.py
|
nik849/ct-tools
|
1218f62de1b8974651506ebf8fd9197c1b038a0a
|
[
"MIT"
] | 4
|
2021-06-08T20:42:17.000Z
|
2022-03-12T00:07:59.000Z
|
cttools/__init__.py
|
nik849/ct-tools
|
1218f62de1b8974651506ebf8fd9197c1b038a0a
|
[
"MIT"
] | null | null | null |
__version__ = '0.0.1'
from .recon import *
from .config import *
from .parse import *
from .utilities import *
| 18.5
| 24
| 0.711712
| 16
| 111
| 4.6875
| 0.5625
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.032609
| 0.171171
| 111
| 5
| 25
| 22.2
| 0.782609
| 0
| 0
| 0
| 0
| 0
| 0.045045
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.8
| 0
| 0.8
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
babe8b95850690a2a05a8ea7152e079552a4a989
| 218
|
py
|
Python
|
boaapi/status.py
|
boalang/api-python
|
f68ba63bbec5c8c5ed769eb1d0788561fc358a3b
|
[
"Apache-2.0"
] | 3
|
2020-08-19T09:55:00.000Z
|
2021-12-29T16:30:11.000Z
|
boaapi/status.py
|
boalang/api-python
|
f68ba63bbec5c8c5ed769eb1d0788561fc358a3b
|
[
"Apache-2.0"
] | 1
|
2022-02-06T19:11:49.000Z
|
2022-02-07T01:28:45.000Z
|
boaapi/status.py
|
boalang/api-python
|
f68ba63bbec5c8c5ed769eb1d0788561fc358a3b
|
[
"Apache-2.0"
] | 1
|
2019-09-02T17:53:21.000Z
|
2019-09-02T17:53:21.000Z
|
from enum import Enum
class CompilerStatus(Enum):
WAITING = 1
RUNNING = 2
FINISHED = 3
ERROR = 4
class ExecutionStatus(Enum):
WAITING = 1
RUNNING = 2
FINISHED = 3
ERROR = 4
| 14.533333
| 28
| 0.582569
| 26
| 218
| 4.884615
| 0.538462
| 0.173228
| 0.188976
| 0.299213
| 0.551181
| 0.551181
| 0.551181
| 0.551181
| 0.551181
| 0
| 0
| 0.056738
| 0.353211
| 218
| 14
| 29
| 15.571429
| 0.843972
| 0
| 0
| 0.727273
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.090909
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
2458cea0079732ecbd9cb5ab67b6c195d92758a3
| 109
|
py
|
Python
|
src/news/models/__init__.py
|
thimmy687/tunews
|
422a9b14cc0be28139e29a2bdda56840a1c8e1e7
|
[
"MIT"
] | 3
|
2021-05-12T21:47:09.000Z
|
2021-10-30T12:58:58.000Z
|
src/news/models/__init__.py
|
thimmy687/tunews
|
422a9b14cc0be28139e29a2bdda56840a1c8e1e7
|
[
"MIT"
] | 8
|
2020-04-06T11:57:24.000Z
|
2020-06-04T08:44:23.000Z
|
src/news/models/__init__.py
|
thimmy687/tunews
|
422a9b14cc0be28139e29a2bdda56840a1c8e1e7
|
[
"MIT"
] | 1
|
2020-04-15T17:56:35.000Z
|
2020-04-15T17:56:35.000Z
|
from .newslanguage import NewsLanguage
from .newscategory import NewsCategory
from .newsitem import NewsItem
| 27.25
| 38
| 0.862385
| 12
| 109
| 7.833333
| 0.416667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.110092
| 109
| 3
| 39
| 36.333333
| 0.969072
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
0324d9ece5af63a368124e9fa17ba3a69c38e54b
| 167
|
py
|
Python
|
frites/dataset/ds_fmri.py
|
StanSStanman/frites
|
53f4745979dc2e7b27145cd63eab6a82fe893ec7
|
[
"BSD-3-Clause"
] | null | null | null |
frites/dataset/ds_fmri.py
|
StanSStanman/frites
|
53f4745979dc2e7b27145cd63eab6a82fe893ec7
|
[
"BSD-3-Clause"
] | null | null | null |
frites/dataset/ds_fmri.py
|
StanSStanman/frites
|
53f4745979dc2e7b27145cd63eab6a82fe893ec7
|
[
"BSD-3-Clause"
] | null | null | null |
"""Dataset representation of fMRI data."""
class DatasetFMRI(object):
"""docstring for DatasetFMRI."""
def __init__(self):
"""Init."""
pass
| 16.7
| 42
| 0.592814
| 16
| 167
| 5.9375
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.245509
| 167
| 9
| 43
| 18.555556
| 0.753968
| 0.413174
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0.333333
| 0
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 5
|
034d20fd175ab2693519395e5e5f96e1611dc15d
| 100
|
py
|
Python
|
chain/p2p/start.py
|
tsifrer/ark
|
c678091e226d79fabe4a2c554e1d8e704a5b5cec
|
[
"MIT"
] | 5
|
2019-02-01T01:22:27.000Z
|
2019-05-24T12:20:38.000Z
|
chain/p2p/start.py
|
tsifrer/ark
|
c678091e226d79fabe4a2c554e1d8e704a5b5cec
|
[
"MIT"
] | 15
|
2019-03-29T13:12:10.000Z
|
2019-08-25T19:19:35.000Z
|
chain/p2p/start.py
|
tsifrer/ark
|
c678091e226d79fabe4a2c554e1d8e704a5b5cec
|
[
"MIT"
] | 4
|
2019-01-31T13:52:03.000Z
|
2020-08-12T02:12:03.000Z
|
from chain.p2p.websocket_server import start_server
if __name__ == "__main__":
start_server()
| 16.666667
| 51
| 0.76
| 13
| 100
| 5
| 0.769231
| 0.338462
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.011765
| 0.15
| 100
| 5
| 52
| 20
| 0.752941
| 0
| 0
| 0
| 0
| 0
| 0.08
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
30382846d676ea90b88e0072451aedfdb11e25dd
| 125
|
py
|
Python
|
python/dc/roms_pv.py
|
subond/tools
|
05b93e6c78eab65ef6587e684303b12c686a3480
|
[
"MIT"
] | null | null | null |
python/dc/roms_pv.py
|
subond/tools
|
05b93e6c78eab65ef6587e684303b12c686a3480
|
[
"MIT"
] | null | null | null |
python/dc/roms_pv.py
|
subond/tools
|
05b93e6c78eab65ef6587e684303b12c686a3480
|
[
"MIT"
] | 1
|
2021-12-04T15:41:53.000Z
|
2021-12-04T15:41:53.000Z
|
def roms_pv(fname,tindices):
import h5py
u = dc_roms_read_data(fname,'u')
v = dc_roms_read_data(fname,'v')
| 17.857143
| 36
| 0.648
| 21
| 125
| 3.52381
| 0.571429
| 0.162162
| 0.27027
| 0.378378
| 0.513514
| 0
| 0
| 0
| 0
| 0
| 0
| 0.010309
| 0.224
| 125
| 6
| 37
| 20.833333
| 0.752577
| 0
| 0
| 0
| 0
| 0
| 0.016
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.25
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
3058406d71c3bd564d8b8aecb2fd9d384c4a2543
| 305
|
py
|
Python
|
uontypes/units/mass.py
|
uon-language/uon-parser
|
666894cf4917d8da01512918a147882550382269
|
[
"MIT"
] | 1
|
2020-05-29T06:47:16.000Z
|
2020-05-29T06:47:16.000Z
|
uontypes/units/mass.py
|
Cell00phane/uon-parser
|
666894cf4917d8da01512918a147882550382269
|
[
"MIT"
] | 1
|
2020-07-16T08:19:08.000Z
|
2020-07-16T08:19:08.000Z
|
uontypes/units/mass.py
|
uon-language/uon-parser
|
666894cf4917d8da01512918a147882550382269
|
[
"MIT"
] | null | null | null |
from uontypes.units.quantity import Quantity
class Mass(Quantity):
pass
class Kilogram(Mass):
def __str__(self):
return "kg"
def to_binary(self):
return b"\x21"
class Gram(Mass):
def __str__(self):
return "g"
def to_binary(self):
return b"\x69"
| 13.863636
| 44
| 0.606557
| 40
| 305
| 4.375
| 0.525
| 0.228571
| 0.114286
| 0.16
| 0.48
| 0.251429
| 0
| 0
| 0
| 0
| 0
| 0.018349
| 0.285246
| 305
| 21
| 45
| 14.52381
| 0.784404
| 0
| 0
| 0.307692
| 0
| 0
| 0.036066
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.307692
| false
| 0.076923
| 0.076923
| 0.307692
| 0.923077
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 5
|
3079c0ec4f06a3b22ee375fd5b4cd5a9fabbb751
| 21
|
py
|
Python
|
__init__.py
|
AnthonyJFeola/DS-Unit-3-Sprint-Challenge-1-Software-Engineering
|
5a956e01828acc12b4f0580a3715974b5403a6c4
|
[
"MIT"
] | 17
|
2015-06-11T21:05:11.000Z
|
2021-04-23T17:54:09.000Z
|
__init__.py
|
AnthonyJFeola/DS-Unit-3-Sprint-Challenge-1-Software-Engineering
|
5a956e01828acc12b4f0580a3715974b5403a6c4
|
[
"MIT"
] | 134
|
2015-01-07T14:29:54.000Z
|
2022-01-10T15:56:24.000Z
|
__init__.py
|
AnthonyJFeola/DS-Unit-3-Sprint-Challenge-1-Software-Engineering
|
5a956e01828acc12b4f0580a3715974b5403a6c4
|
[
"MIT"
] | 22
|
2015-01-30T18:30:32.000Z
|
2022-01-20T20:04:10.000Z
|
# Nothing to see here
| 21
| 21
| 0.761905
| 4
| 21
| 4
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.190476
| 21
| 1
| 21
| 21
| 0.941176
| 0.904762
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
0672acf71c0ab31701749cd2541d300d25d0a5bb
| 23,657
|
py
|
Python
|
optimization/first_sdEta_mjj_optimization/sdEta_mistake_analyses/sdEta_mmjj_gridsearch/analysis_deltaeta6.1_mmjj_750/Output/Histos/MadAnalysis5job_0/selection_6.py
|
sheride/axion_pheno
|
7d3fc08f5ae5b17a3500eba19a2e43f87f076ce5
|
[
"MIT"
] | null | null | null |
optimization/first_sdEta_mjj_optimization/sdEta_mistake_analyses/sdEta_mmjj_gridsearch/analysis_deltaeta6.1_mmjj_750/Output/Histos/MadAnalysis5job_0/selection_6.py
|
sheride/axion_pheno
|
7d3fc08f5ae5b17a3500eba19a2e43f87f076ce5
|
[
"MIT"
] | null | null | null |
optimization/first_sdEta_mjj_optimization/sdEta_mistake_analyses/sdEta_mmjj_gridsearch/analysis_deltaeta6.1_mmjj_750/Output/Histos/MadAnalysis5job_0/selection_6.py
|
sheride/axion_pheno
|
7d3fc08f5ae5b17a3500eba19a2e43f87f076ce5
|
[
"MIT"
] | null | null | null |
def selection_6():
# Library import
import numpy
import matplotlib
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
# Library version
matplotlib_version = matplotlib.__version__
numpy_version = numpy.__version__
# Histo binning
xBinning = numpy.linspace(0.0,15.0,76,endpoint=True)
# Creating data sequence: middle of each bin
xData = numpy.array([0.1,0.3,0.5,0.7,0.9,1.1,1.3,1.5,1.7,1.9,2.1,2.3,2.5,2.7,2.9,3.1,3.3,3.5,3.7,3.9,4.1,4.3,4.5,4.7,4.9,5.1,5.3,5.5,5.7,5.9,6.1,6.3,6.5,6.7,6.9,7.1,7.3,7.5,7.7,7.9,8.1,8.3,8.5,8.7,8.9,9.1,9.3,9.5,9.7,9.9,10.1,10.3,10.5,10.7,10.9,11.1,11.3,11.5,11.7,11.9,12.1,12.3,12.5,12.7,12.9,13.1,13.3,13.5,13.7,13.9,14.1,14.3,14.5,14.7,14.9])
# Creating weights for histo: y7_DELTAR_0
y7_DELTAR_0_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,106.208708076,196.29087011,253.366460711,299.961260383,337.450787936,373.417316807,397.674735812,418.931237414,344.406621916,262.320252961,201.527185578,160.856540778,129.356688042,105.856628381,86.1231654603,72.0231376639,60.2690278371,50.0051567205,42.038043616,35.8232329949,29.9072821152,25.3792260342,20.8225099781,17.5799967845,15.0866989424,11.6067299544,9.80123551702,7.49626951197,6.10018672028,4.73275990379,3.57413530658,2.72666004007,1.92012553813,1.44930554562,0.925262799184,0.552701521636,0.364373484634,0.13101068661,0.0163763338263,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y7_DELTAR_1
y7_DELTAR_1_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0485729053353,0.77773071928,2.72191551768,7.78648410534,20.92014004,44.5525684517,88.4046713014,145.092755237,191.986778273,221.756746615,238.531790226,242.541792568,239.745097431,234.044436218,225.801117315,220.454193835,207.091431533,195.558764296,185.408882732,178.03582835,161.176506153,145.664760086,128.952925414,117.743873527,101.204962169,85.1253771145,72.5633815443,59.9164664866,46.2965022836,35.5309864809,26.0040254077,19.6489433811,14.6536626861,9.94101968139,7.2659837264,4.83560802479,3.34137073271,1.82219475022,1.05748154746,0.425182259955,0.255060887775,0.0486514959176,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y7_DELTAR_2
y7_DELTAR_2_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.301065617111,1.42556583629,3.91508658006,7.98143551265,16.7370252218,32.9929906598,64.0747913751,133.65317609,281.462864792,480.355672947,489.239287697,500.128213503,504.767328941,509.68825401,504.453702093,499.709217951,483.689869301,464.643339774,451.447459173,428.251881983,398.038459878,369.243259204,341.892105623,312.424363372,280.293726712,250.511283266,214.588695546,180.620511335,148.243229266,118.885277072,89.3875770517,65.1906666081,44.4765843725,30.892422164,20.0703579659,13.4845453964,8.34367658762,5.14110020671,3.20267927035,1.76731249178,0.883624015461,0.371539015992,0.110484746837,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y7_DELTAR_3
y7_DELTAR_3_weights = numpy.array([0.0,0.0,0.0,0.0824868914374,0.462082659552,1.42998230494,2.72791864876,4.06463298939,5.97334797086,9.14134439507,13.9654901883,22.5794608144,40.2844487785,77.2638545603,172.551523135,362.406729986,361.007623176,366.669417089,367.96350659,370.317438085,363.618471291,348.272659595,329.101292804,312.526148895,288.097985311,262.074005514,236.068144756,208.630107909,181.035621678,156.060514949,128.980390091,105.305344834,81.3801268152,62.6480045684,44.3559393779,30.2406949132,18.2291644944,10.4627099583,6.08308191796,3.44855432924,1.95232499327,0.792079544842,0.324676265598,0.176061051285,0.0275332836871,0.0164857349758,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y7_DELTAR_4
y7_DELTAR_4_weights = numpy.array([0.0,0.0,0.0345308579793,0.173671421931,0.241827866128,0.336542777947,0.513200440691,0.795369954677,1.11911620113,1.60855610399,2.56769090999,3.99161683662,6.82405788995,13.2719546961,31.5464128955,85.080025353,86.8294178983,86.1723735714,85.4711174404,82.3001730982,78.0545572426,71.9156901245,65.4876224929,58.1522709216,50.7102979551,42.8029987568,35.3414531485,28.4700208875,21.8035502213,15.7146308177,10.7035895861,6.96813706473,4.25928423753,2.35359234267,1.23651878301,0.560519897697,0.259549544502,0.101647907915,0.0335517529058,0.0128315361842,0.000987667653345,0.000986323662601,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y7_DELTAR_5
y7_DELTAR_5_weights = numpy.array([0.0,0.0,0.0257116480221,0.0378142017046,0.057975420775,0.0922630221412,0.126792083235,0.180739507646,0.295678018787,0.430544499301,0.624129984523,1.01185949313,1.76506540581,3.44689022127,8.53282667631,27.2688372078,27.5877801013,26.3514342794,25.1909952337,23.2315105364,21.1460546789,18.8754690278,16.1034593447,13.5030201012,10.8988919452,8.3651812076,6.13429737714,4.31329493477,2.82763024139,1.68768986623,0.968978486099,0.476194191189,0.208732551994,0.0826807718799,0.0267190092025,0.00907671321185,0.00327711462565,0.000504083489874,0.000504337552719,0.000251541981316,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y7_DELTAR_6
y7_DELTAR_6_weights = numpy.array([0.0,0.0,0.0105965838232,0.0220445819147,0.0246335330716,0.036659517193,0.0541441594077,0.0901870935238,0.126800446209,0.180636179124,0.273406045878,0.440072413223,0.789007111946,1.5556659239,4.05313578167,15.7287614821,15.6981317614,14.1111543455,12.8958322034,11.4667717911,9.68266353166,8.00078696361,6.2318146246,4.70749718025,3.36936125403,2.26604552297,1.3892747642,0.809907397883,0.410813332469,0.200961557997,0.0735794369958,0.0274838165264,0.0100306237574,0.00286040005965,0.00143093578291,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y7_DELTAR_7
y7_DELTAR_7_weights = numpy.array([0.0,0.0,0.0015120887282,0.0025699127657,0.00399414868973,0.00557153368721,0.00893884523264,0.0116833312118,0.0174252186633,0.025588088418,0.042047766521,0.0680626739318,0.126317601203,0.267415051074,0.782551498671,3.75849153154,3.6196591648,3.06057041504,2.53501136641,2.01994645834,1.52943046313,1.09714814706,0.727797146796,0.444742431606,0.239862400031,0.11778616667,0.0529282289242,0.0188474345491,0.0065427404128,0.00159827730584,0.000626337163876,0.000237723045424,6.47734900894e-05,4.32688838282e-05,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y7_DELTAR_8
y7_DELTAR_8_weights = numpy.array([0.0,0.0,0.00031204174528,0.000596577177178,0.000907172863031,0.0012498475166,0.00212951332377,0.00269294636746,0.00348263915304,0.00552912508447,0.0108297065501,0.0179724189316,0.0355705077542,0.0859228672158,0.292804462415,1.92345356789,1.72205214886,1.2494041059,0.905775510474,0.601716642854,0.384017281713,0.215347918235,0.115560031875,0.0524788003918,0.0193568819884,0.00654326743897,0.00187024873815,0.00059551839676,0.000141957358155,0.000113491474901,0.0,2.84511078463e-05,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y7_DELTAR_9
y7_DELTAR_9_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,52.1293724655,623.023856931,1855.82327085,4566.51833926,9386.42858355,19413.5543961,38106.0119859,64902.3270658,83204.3916249,89502.006032,85263.5805681,75039.2721871,60592.6882406,48059.7903435,36387.5974335,27282.8499936,20357.2775511,14928.3738663,11033.3606097,7746.76785681,5620.15308809,3810.65849228,2512.84766258,1686.36892439,1165.06174149,654.201620206,414.453687639,208.582473977,143.334923608,88.5729254516,65.1565341562,15.6315019881,2.60827125955,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y7_DELTAR_10
y7_DELTAR_10_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,4.20992814798,127.4648631,629.762222343,1568.32017551,3495.7659018,6676.35404169,12275.5619029,21963.0595621,44475.1038477,96967.2038093,163208.238756,128648.926673,96752.0069688,72410.9088154,54111.5591095,40549.3101727,30397.4136534,22808.6373206,16723.137697,12605.3097952,8936.87122606,6457.42886693,4503.57006589,3210.27360713,2163.33886219,1418.7643352,979.525383143,601.413799653,326.483272026,230.689243054,130.593393458,56.8867634198,28.4423236171,18.9643860493,12.6358829062,2.10485095425,2.10713066331,1.05142452132,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y7_DELTAR_11
y7_DELTAR_11_weights = numpy.array([0.0,0.0,0.0,23.4958481084,143.978089129,410.689887727,791.21498257,1152.34488444,1536.86367229,2040.99460325,2711.3509986,3923.78993543,6256.64995607,11920.9123616,28650.8435723,59123.2937514,35806.2358328,23474.319667,16571.7818896,11942.0834685,8521.0900311,6115.64884732,4342.44761342,3036.68174984,2113.54041719,1427.63267193,993.425565193,623.720020234,407.914974226,234.481151391,148.796533155,85.6869620429,49.286913164,24.644209674,11.7491228537,5.98716981431,2.30385445151,1.61207335708,0.0,0.230404695105,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y7_DELTAR_12
y7_DELTAR_12_weights = numpy.array([0.0,0.0,9.66472600856,43.7500050963,64.6575651774,83.7935902555,102.179998422,117.602717167,138.705981699,174.368741766,223.494175291,319.032725612,521.055502718,1048.09486042,2847.67903945,8553.2963096,5043.08864714,3097.07275794,2056.65218962,1403.34864431,962.665847282,645.177982676,427.994516118,286.549172234,179.106043651,114.225997868,69.4464573257,40.7922018119,21.3496936379,11.3807081853,5.56627777849,2.60314773211,1.05230364103,0.498288846531,0.221571832062,0.0554093276329,0.0,0.0276409781179,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y7_DELTAR_13
y7_DELTAR_13_weights = numpy.array([0.0,0.0,7.87346431067,11.3535897502,13.5102053263,15.7677732554,17.2401750961,19.6494264235,23.3193745701,28.6322864583,38.2301997352,55.469064077,94.4802609738,195.743553993,577.640977327,2175.76351158,1291.2252573,753.755318426,487.588274406,315.985901509,207.1243601,133.151274656,83.295521862,49.3109766138,30.0054350719,15.8803585527,8.70110247882,4.6476094243,1.97639171691,1.00805014485,0.393204438829,0.211687362762,0.0302185964871,0.0201784189377,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y7_DELTAR_14
y7_DELTAR_14_weights = numpy.array([0.0,0.0,3.66388129673,3.92687516306,4.08518088053,4.71938868855,5.07287759159,5.66696071368,6.89153888877,8.6855805805,11.9701260435,18.0021371408,31.7838683233,69.5879169443,221.631197821,1038.29675249,620.115882889,348.41266281,213.528735231,132.984997673,82.0197211869,49.1317526265,29.2762158829,15.7413386628,7.84283521114,3.8703335594,1.86729992669,0.752527346481,0.265961992723,0.118844094956,0.0367957259207,0.0226352937279,0.0169645894799,0.0,0.00282706133047,0.00283195831966,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y7_DELTAR_15
y7_DELTAR_15_weights = numpy.array([0.0,0.0,0.417498927285,0.406566599605,0.425134824592,0.435821252297,0.441724387838,0.475219131261,0.627590737741,0.865206093423,1.22820597643,1.84640631286,3.61931795606,9.21152887102,32.8721828224,198.789155758,119.815206019,62.8627038934,35.8039462505,20.7399908805,11.4534820142,5.95634730251,2.92169228214,1.40318726438,0.575638134817,0.17360313735,0.0609456175852,0.0137336140531,0.0106506475455,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y7_DELTAR_16
y7_DELTAR_16_weights = numpy.array([0.0,0.0,0.0543461063394,0.0577787040255,0.0518208291538,0.0613940059203,0.0646422818645,0.0751126888062,0.0994859078122,0.126014412576,0.211267261994,0.38262104881,0.842287199238,2.33311156389,9.92480534291,80.4032157585,48.1682469736,22.9519735956,11.5936449961,5.77188938155,2.74595182721,1.20504016668,0.456496709964,0.145171934789,0.0429785462478,0.00740329084729,0.00270790860536,0.000360835304987,0.000360624139899,0.000360558591298,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating a new Canvas
fig = plt.figure(figsize=(12,6),dpi=80)
frame = gridspec.GridSpec(1,1,right=0.7)
pad = fig.add_subplot(frame[0])
# Creating a new Stack
pad.hist(x=xData, bins=xBinning, weights=y7_DELTAR_0_weights+y7_DELTAR_1_weights+y7_DELTAR_2_weights+y7_DELTAR_3_weights+y7_DELTAR_4_weights+y7_DELTAR_5_weights+y7_DELTAR_6_weights+y7_DELTAR_7_weights+y7_DELTAR_8_weights+y7_DELTAR_9_weights+y7_DELTAR_10_weights+y7_DELTAR_11_weights+y7_DELTAR_12_weights+y7_DELTAR_13_weights+y7_DELTAR_14_weights+y7_DELTAR_15_weights+y7_DELTAR_16_weights,\
label="$bg\_dip\_1600\_inf$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#e5e5e5", linewidth=4, linestyle="dashdot",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y7_DELTAR_0_weights+y7_DELTAR_1_weights+y7_DELTAR_2_weights+y7_DELTAR_3_weights+y7_DELTAR_4_weights+y7_DELTAR_5_weights+y7_DELTAR_6_weights+y7_DELTAR_7_weights+y7_DELTAR_8_weights+y7_DELTAR_9_weights+y7_DELTAR_10_weights+y7_DELTAR_11_weights+y7_DELTAR_12_weights+y7_DELTAR_13_weights+y7_DELTAR_14_weights+y7_DELTAR_15_weights,\
label="$bg\_dip\_1200\_1600$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#f2f2f2", linewidth=4, linestyle="dashdot",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y7_DELTAR_0_weights+y7_DELTAR_1_weights+y7_DELTAR_2_weights+y7_DELTAR_3_weights+y7_DELTAR_4_weights+y7_DELTAR_5_weights+y7_DELTAR_6_weights+y7_DELTAR_7_weights+y7_DELTAR_8_weights+y7_DELTAR_9_weights+y7_DELTAR_10_weights+y7_DELTAR_11_weights+y7_DELTAR_12_weights+y7_DELTAR_13_weights+y7_DELTAR_14_weights,\
label="$bg\_dip\_800\_1200$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#ccc6aa", linewidth=4, linestyle="dashdot",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y7_DELTAR_0_weights+y7_DELTAR_1_weights+y7_DELTAR_2_weights+y7_DELTAR_3_weights+y7_DELTAR_4_weights+y7_DELTAR_5_weights+y7_DELTAR_6_weights+y7_DELTAR_7_weights+y7_DELTAR_8_weights+y7_DELTAR_9_weights+y7_DELTAR_10_weights+y7_DELTAR_11_weights+y7_DELTAR_12_weights+y7_DELTAR_13_weights,\
label="$bg\_dip\_600\_800$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#ccc6aa", linewidth=4, linestyle="dashdot",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y7_DELTAR_0_weights+y7_DELTAR_1_weights+y7_DELTAR_2_weights+y7_DELTAR_3_weights+y7_DELTAR_4_weights+y7_DELTAR_5_weights+y7_DELTAR_6_weights+y7_DELTAR_7_weights+y7_DELTAR_8_weights+y7_DELTAR_9_weights+y7_DELTAR_10_weights+y7_DELTAR_11_weights+y7_DELTAR_12_weights,\
label="$bg\_dip\_400\_600$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#c1bfa8", linewidth=4, linestyle="dashdot",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y7_DELTAR_0_weights+y7_DELTAR_1_weights+y7_DELTAR_2_weights+y7_DELTAR_3_weights+y7_DELTAR_4_weights+y7_DELTAR_5_weights+y7_DELTAR_6_weights+y7_DELTAR_7_weights+y7_DELTAR_8_weights+y7_DELTAR_9_weights+y7_DELTAR_10_weights+y7_DELTAR_11_weights,\
label="$bg\_dip\_200\_400$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#bab5a3", linewidth=4, linestyle="dashdot",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y7_DELTAR_0_weights+y7_DELTAR_1_weights+y7_DELTAR_2_weights+y7_DELTAR_3_weights+y7_DELTAR_4_weights+y7_DELTAR_5_weights+y7_DELTAR_6_weights+y7_DELTAR_7_weights+y7_DELTAR_8_weights+y7_DELTAR_9_weights+y7_DELTAR_10_weights,\
label="$bg\_dip\_100\_200$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#b2a596", linewidth=4, linestyle="dashdot",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y7_DELTAR_0_weights+y7_DELTAR_1_weights+y7_DELTAR_2_weights+y7_DELTAR_3_weights+y7_DELTAR_4_weights+y7_DELTAR_5_weights+y7_DELTAR_6_weights+y7_DELTAR_7_weights+y7_DELTAR_8_weights+y7_DELTAR_9_weights,\
label="$bg\_dip\_0\_100$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#b7a39b", linewidth=4, linestyle="dashdot",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y7_DELTAR_0_weights+y7_DELTAR_1_weights+y7_DELTAR_2_weights+y7_DELTAR_3_weights+y7_DELTAR_4_weights+y7_DELTAR_5_weights+y7_DELTAR_6_weights+y7_DELTAR_7_weights+y7_DELTAR_8_weights,\
label="$bg\_vbf\_1600\_inf$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#ad998c", linewidth=4, linestyle="dashdot",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y7_DELTAR_0_weights+y7_DELTAR_1_weights+y7_DELTAR_2_weights+y7_DELTAR_3_weights+y7_DELTAR_4_weights+y7_DELTAR_5_weights+y7_DELTAR_6_weights+y7_DELTAR_7_weights,\
label="$bg\_vbf\_1200\_1600$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#9b8e82", linewidth=4, linestyle="dashdot",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y7_DELTAR_0_weights+y7_DELTAR_1_weights+y7_DELTAR_2_weights+y7_DELTAR_3_weights+y7_DELTAR_4_weights+y7_DELTAR_5_weights+y7_DELTAR_6_weights,\
label="$bg\_vbf\_800\_1200$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#876656", linewidth=4, linestyle="dashdot",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y7_DELTAR_0_weights+y7_DELTAR_1_weights+y7_DELTAR_2_weights+y7_DELTAR_3_weights+y7_DELTAR_4_weights+y7_DELTAR_5_weights,\
label="$bg\_vbf\_600\_800$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#afcec6", linewidth=4, linestyle="dashdot",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y7_DELTAR_0_weights+y7_DELTAR_1_weights+y7_DELTAR_2_weights+y7_DELTAR_3_weights+y7_DELTAR_4_weights,\
label="$bg\_vbf\_400\_600$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#84c1a3", linewidth=4, linestyle="dashdot",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y7_DELTAR_0_weights+y7_DELTAR_1_weights+y7_DELTAR_2_weights+y7_DELTAR_3_weights,\
label="$bg\_vbf\_200\_400$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#89a8a0", linewidth=4, linestyle="dashdot",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y7_DELTAR_0_weights+y7_DELTAR_1_weights+y7_DELTAR_2_weights,\
label="$bg\_vbf\_100\_200$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#829e8c", linewidth=4, linestyle="dashdot",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y7_DELTAR_0_weights+y7_DELTAR_1_weights,\
label="$bg\_vbf\_0\_100$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#adbcc6", linewidth=4, linestyle="dashdot",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y7_DELTAR_0_weights,\
label="$signal$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#7a8e99", linewidth=3, linestyle="dashed",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
# Axis
plt.rc('text',usetex=False)
plt.xlabel(r"\Delta R [ j_{1} , j_{2} ] ",\
fontsize=16,color="black")
plt.ylabel(r"$\mathrm{Events}$ $(\mathcal{L}_{\mathrm{int}} = 40.0\ \mathrm{fb}^{-1})$ ",\
fontsize=16,color="black")
# Boundary of y-axis
ymax=(y7_DELTAR_0_weights+y7_DELTAR_1_weights+y7_DELTAR_2_weights+y7_DELTAR_3_weights+y7_DELTAR_4_weights+y7_DELTAR_5_weights+y7_DELTAR_6_weights+y7_DELTAR_7_weights+y7_DELTAR_8_weights+y7_DELTAR_9_weights+y7_DELTAR_10_weights+y7_DELTAR_11_weights+y7_DELTAR_12_weights+y7_DELTAR_13_weights+y7_DELTAR_14_weights+y7_DELTAR_15_weights+y7_DELTAR_16_weights).max()*1.1
ymin=0 # linear scale
#ymin=min([x for x in (y7_DELTAR_0_weights+y7_DELTAR_1_weights+y7_DELTAR_2_weights+y7_DELTAR_3_weights+y7_DELTAR_4_weights+y7_DELTAR_5_weights+y7_DELTAR_6_weights+y7_DELTAR_7_weights+y7_DELTAR_8_weights+y7_DELTAR_9_weights+y7_DELTAR_10_weights+y7_DELTAR_11_weights+y7_DELTAR_12_weights+y7_DELTAR_13_weights+y7_DELTAR_14_weights+y7_DELTAR_15_weights+y7_DELTAR_16_weights) if x])/100. # log scale
plt.gca().set_ylim(ymin,ymax)
# Log/Linear scale for X-axis
plt.gca().set_xscale("linear")
#plt.gca().set_xscale("log",nonposx="clip")
# Log/Linear scale for Y-axis
plt.gca().set_yscale("linear")
#plt.gca().set_yscale("log",nonposy="clip")
# Legend
plt.legend(bbox_to_anchor=(1.05,1), loc=2, borderaxespad=0.)
# Saving the image
plt.savefig('../../HTML/MadAnalysis5job_0/selection_6.png')
plt.savefig('../../PDF/MadAnalysis5job_0/selection_6.png')
plt.savefig('../../DVI/MadAnalysis5job_0/selection_6.eps')
# Running!
if __name__ == '__main__':
selection_6()
| 121.943299
| 779
| 0.753815
| 4,493
| 23,657
| 3.806365
| 0.221456
| 0.154836
| 0.225412
| 0.291896
| 0.472401
| 0.472401
| 0.470588
| 0.462344
| 0.451117
| 0.449772
| 0
| 0.442919
| 0.069874
| 23,657
| 193
| 780
| 122.57513
| 0.334303
| 0.059982
| 0
| 0.185841
| 0
| 0.00885
| 0.048329
| 0.009008
| 0
| 0
| 0
| 0
| 0
| 1
| 0.00885
| false
| 0
| 0.035398
| 0
| 0.044248
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
068ceaf5678325d3d7a2846702865a013eb065db
| 293
|
py
|
Python
|
authentication/admin.py
|
nicbou/markdown-notes
|
a5d398b032b7a837909b684bb3121c7b68f49e7b
|
[
"CC0-1.0"
] | 121
|
2015-04-11T20:59:48.000Z
|
2021-05-12T02:15:36.000Z
|
authentication/admin.py
|
nicbou/markdown-notes
|
a5d398b032b7a837909b684bb3121c7b68f49e7b
|
[
"CC0-1.0"
] | 56
|
2015-08-10T08:16:35.000Z
|
2022-03-11T23:12:33.000Z
|
authentication/admin.py
|
nicbou/markdown-notes
|
a5d398b032b7a837909b684bb3121c7b68f49e7b
|
[
"CC0-1.0"
] | 32
|
2015-08-11T02:50:44.000Z
|
2021-09-02T10:15:00.000Z
|
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
UserAdmin.list_display = ('username', 'email', 'first_name', 'last_name', 'is_active', 'date_joined')
admin.site.unregister(User)
admin.site.register(User, UserAdmin)
| 36.625
| 101
| 0.791809
| 41
| 293
| 5.536585
| 0.560976
| 0.132159
| 0.22467
| 0.185022
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.081911
| 293
| 8
| 102
| 36.625
| 0.843866
| 0
| 0
| 0
| 0
| 0
| 0.176871
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
ebf9d76d9cd99647188017512ee26862092ecf9b
| 430
|
py
|
Python
|
lightly/active_learning/scorers/__init__.py
|
CodeGuy-007/lightly
|
64143fe8a477c04288009c65fa1265cef8aa48f8
|
[
"MIT"
] | 1,515
|
2020-10-05T13:04:17.000Z
|
2022-03-31T16:14:55.000Z
|
lightly/active_learning/scorers/__init__.py
|
CodeGuy-007/lightly
|
64143fe8a477c04288009c65fa1265cef8aa48f8
|
[
"MIT"
] | 628
|
2020-10-14T11:38:51.000Z
|
2022-03-31T14:40:54.000Z
|
lightly/active_learning/scorers/__init__.py
|
CodeGuy-007/lightly
|
64143fe8a477c04288009c65fa1265cef8aa48f8
|
[
"MIT"
] | 108
|
2020-10-17T08:31:06.000Z
|
2022-03-20T16:44:22.000Z
|
""" Collection of Active Learning Scorers """
# Copyright (c) 2020. Lightly AG and its affiliates.
# All Rights Reserved
from lightly.active_learning.scorers.scorer import Scorer
from lightly.active_learning.scorers.classification import ScorerClassification
from lightly.active_learning.scorers.detection import ScorerObjectDetection
from lightly.active_learning.scorers.semantic_segmentation import ScorerSemanticSegmentation
| 43
| 92
| 0.855814
| 49
| 430
| 7.408163
| 0.530612
| 0.192837
| 0.289256
| 0.275482
| 0.352617
| 0
| 0
| 0
| 0
| 0
| 0
| 0.010204
| 0.088372
| 430
| 9
| 93
| 47.777778
| 0.915816
| 0.255814
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
2324866ef10c5dc7bc1e2347c344724b740507c1
| 54
|
py
|
Python
|
nocd/metrics/__init__.py
|
sckangz/overlapping-community-detection
|
414856fd0d8f61d5bbeec06deb60c3e7b477a342
|
[
"MIT"
] | 91
|
2019-09-27T04:46:09.000Z
|
2022-03-31T11:57:20.000Z
|
nocd/metrics/__init__.py
|
wrnge/overlapping-community-detection
|
414856fd0d8f61d5bbeec06deb60c3e7b477a342
|
[
"MIT"
] | 9
|
2019-12-03T07:28:10.000Z
|
2022-01-07T14:08:24.000Z
|
nocd/metrics/__init__.py
|
wrnge/overlapping-community-detection
|
414856fd0d8f61d5bbeec06deb60c3e7b477a342
|
[
"MIT"
] | 31
|
2019-10-06T03:13:11.000Z
|
2022-03-22T05:41:02.000Z
|
from .supervised import *
from .unsupervised import *
| 18
| 27
| 0.777778
| 6
| 54
| 7
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.148148
| 54
| 2
| 28
| 27
| 0.913043
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
233d7daa47f73f704d7d73258f9969b6032b47d2
| 16,268
|
py
|
Python
|
lib/models/external/modules/dcn_deform_conv.py
|
Zhen-Dong/CoDeNet
|
5f5cb78859a691db532c8318a38c1b124adfb104
|
[
"MIT"
] | 15
|
2021-03-03T03:16:32.000Z
|
2022-03-25T04:14:57.000Z
|
lib/models/external/modules/dcn_deform_conv.py
|
Zhen-Dong/CoDeNet
|
5f5cb78859a691db532c8318a38c1b124adfb104
|
[
"MIT"
] | 3
|
2021-07-12T07:04:12.000Z
|
2022-03-30T12:20:47.000Z
|
lib/models/external/modules/dcn_deform_conv.py
|
Zhen-Dong/CoDeNet
|
5f5cb78859a691db532c8318a38c1b124adfb104
|
[
"MIT"
] | 3
|
2021-03-04T14:50:49.000Z
|
2021-12-16T08:38:56.000Z
|
import math
import torch
import torch.nn as nn
from torch.nn.modules.utils import _pair
import sys
import os
dirname = os.path.dirname(__file__)
sys.path.insert(0, os.path.join(dirname,"../"))
from functions.dcn_deform_conv import deform_conv, modulated_deform_conv
class DeformConv(nn.Module):
def __init__(self,
in_channels,
out_channels,
kernel_size=3,
stride=1,
padding=1,
dilation=1,
groups=1,
deformable_groups=1,
bias=False):
super(DeformConv, self).__init__()
assert not bias
assert in_channels % groups == 0, \
'in_channels {} cannot be divisible by groups {}'.format(
in_channels, groups)
assert out_channels % groups == 0, \
'out_channels {} cannot be divisible by groups {}'.format(
out_channels, groups)
self.in_channels = in_channels
self.out_channels = out_channels
self.kernel_size = _pair(kernel_size)
self.stride = _pair(stride)
self.padding = _pair(padding)
self.dilation = _pair(dilation)
self.groups = groups
self.deformable_groups = deformable_groups
self.weight = nn.Parameter(
torch.Tensor(out_channels, in_channels // self.groups,
*self.kernel_size))
self.reset_parameters()
def reset_parameters(self):
n = self.in_channels
for k in self.kernel_size:
n *= k
stdv = 1. / math.sqrt(n)
self.weight.data.uniform_(-stdv, stdv)
def forward(self, x, offset):
return deform_conv(x, offset, self.weight, self.stride, self.padding,
self.dilation, self.groups, self.deformable_groups)
class DeformConvPack(DeformConv):
def __init__(self, *args, **kwargs):
super(DeformConvPack, self).__init__(*args, **kwargs)
self.conv_offset = nn.Conv2d(
self.in_channels,
self.deformable_groups * 2 * self.kernel_size[0] *
self.kernel_size[1],
kernel_size=self.kernel_size,
stride=_pair(self.stride),
padding=_pair(self.padding),
bias=True)
self.init_offset()
def init_offset(self):
self.conv_offset.weight.data.zero_()
self.conv_offset.bias.data.zero_()
def forward(self, x):
offset = self.conv_offset(x)
return deform_conv(x, offset, self.weight, self.stride, self.padding,
self.dilation, self.groups, self.deformable_groups)
class DeformConvPack1x1(DeformConv):
def __init__(self, *args, **kwargs):
super(DeformConvPack1x1, self).__init__(*args, **kwargs)
self.conv_offset = nn.Conv2d(
self.in_channels,
self.deformable_groups * 2 * self.kernel_size[0] *
self.kernel_size[1],
kernel_size=1,
stride=1,
padding=0,
bias=True)
self.init_offset()
def init_offset(self):
self.conv_offset.weight.data.zero_()
self.conv_offset.bias.data.zero_()
def forward(self, x):
offset = self.conv_offset(x)
return deform_conv(x, offset, self.weight, self.stride, self.padding,
self.dilation, self.groups, self.deformable_groups)
class DeformConvPackDW(DeformConv):
def __init__(self, *args, **kwargs):
super(DeformConvPackDW, self).__init__(*args, **kwargs)
inp = int(self.in_channels)
oup = int(self.deformable_groups * 18)
self.conv_dw = nn.Conv2d(inp, inp,
3, 1, 1,
groups=inp,
bias=True)
self.conv_pw = nn.Conv2d(inp, oup, 1, 1, 0, bias=True)
self.conv_pw.weight.data.zero_()
self.conv_pw.bias.data.zero_()
def forward(self, x):
offset = self.conv_pw(self.conv_dw(x))
return deform_conv(x, offset, self.weight, self.stride, self.padding,
self.dilation, self.groups, self.deformable_groups)
class ModulatedDeformConv(nn.Module):
def __init__(self,
in_channels,
out_channels,
kernel_size,
stride=1,
padding=0,
dilation=1,
groups=1,
deformable_groups=1,
bias=False):
super(ModulatedDeformConv, self).__init__()
self.in_channels = in_channels
self.out_channels = out_channels
self.kernel_size = _pair(kernel_size)
self.stride = stride
self.padding = padding
self.dilation = dilation
self.groups = groups
self.deformable_groups = deformable_groups
self.with_bias = bias
self.weight = nn.Parameter(
torch.Tensor(out_channels, in_channels // groups,
*self.kernel_size))
if bias:
self.bias = nn.Parameter(torch.Tensor(out_channels))
else:
self.register_parameter('bias', None)
self.reset_parameters()
def reset_parameters(self):
n = self.in_channels
for k in self.kernel_size:
n *= k
stdv = 1. / math.sqrt(n)
self.weight.data.uniform_(-stdv, stdv)
if self.bias is not None:
self.bias.data.zero_()
def forward(self, x, offset, mask):
return modulated_deform_conv(x, offset, mask, self.weight, self.bias,
self.stride, self.padding, self.dilation,
self.groups, self.deformable_groups)
class ModulatedDeformConvPack(ModulatedDeformConv):
def __init__(self, *args, **kwargs):
super(ModulatedDeformConvPack, self).__init__(*args, **kwargs)
self.conv_offset_mask = nn.Conv2d(
self.in_channels,
self.deformable_groups * 3 * self.kernel_size[0] *
self.kernel_size[1],
kernel_size=self.kernel_size,
stride=_pair(self.stride),
padding=_pair(self.padding),
bias=True)
self.init_offset()
def init_offset(self):
self.conv_offset_mask.weight.data.zero_()
self.conv_offset_mask.bias.data.zero_()
def forward(self, x):
out = self.conv_offset_mask(x)
o1, o2, mask = torch.chunk(out, 3, dim=1)
offset = torch.cat((o1, o2), dim=1)
mask = torch.sigmoid(mask)
return modulated_deform_conv(x, offset, mask, self.weight, self.bias,
self.stride, self.padding, self.dilation,
self.groups, self.deformable_groups)
class DeformConvWithOffsetBound(nn.Module):
def __init__(self, in_channels, out_channels, kernel_size=3, stride=1, padding=1, dilation=1, groups=1, deformable_groups=1, bias=False, offset_bound=8):
super(DeformConvWithOffsetBound, self).__init__()
self.conv_offset = nn.Conv2d(in_channels, kernel_size * kernel_size *
2 * deformable_groups, kernel_size=3, stride=1, padding=1, bias=True)
self.conv_offset.weight.data.zero_()
self.conv_offset.bias.data.zero_()
self.conv_bound = torch.nn.Hardtanh(
min_val=-offset_bound, max_val=offset_bound, inplace=True)
self.conv = DeformConv(in_channels, out_channels, kernel_size=kernel_size, stride=stride,
padding=padding, dilation=dilation, groups=groups, deformable_groups=deformable_groups, bias=bias)
def forward(self, x):
return self.conv(x, self.conv_bound(self.conv_offset(x)))
class DeformConvWithOffsetRound(nn.Module):
def __init__(self, in_channels, out_channels, kernel_size=3, stride=1, padding=1, dilation=1, groups=1, deformable_groups=1, bias=False):
super(DeformConvWithOffsetRound, self).__init__()
self.conv_offset = nn.Conv2d(in_channels, kernel_size * kernel_size *
2 * deformable_groups, kernel_size=3, stride=1, padding=1, bias=True)
self.conv_offset.weight.data.zero_()
self.conv_offset.bias.data.zero_()
self.conv = DeformConv(in_channels, out_channels, kernel_size=kernel_size, stride=stride,
padding=padding, dilation=dilation, groups=groups, deformable_groups=deformable_groups, bias=bias)
def forward(self, x):
return self.conv(x, self.conv_offset(x).round_())
class DeformConvWithOffsetScale(nn.Module):
def __init__(self, in_channels, out_channels, kernel_size=3, stride=1, padding=1, dilation=1, groups=1, deformable_groups=1, bias=False):
super(DeformConvWithOffsetScale, self).__init__()
self.conv_scale = nn.Conv2d(
in_channels, deformable_groups, kernel_size=3, stride=1, padding=1, bias=True)
self.conv_scale.weight.data.zero_()
# self.conv_scale.bias.data.zero_()
nn.init.constant_(self.conv_scale.bias.data, 1)
self.conv = DeformConv(in_channels, out_channels, kernel_size=kernel_size, stride=stride,
padding=padding, dilation=dilation, groups=groups, deformable_groups=deformable_groups, bias=bias)
self.anchor_offset = torch.FloatTensor([-1, -1, -1, 0, -1, 1,
0, -1, 0, 0, 0, 1,
1, -1, 1, 0, 1, 1]).unsqueeze(0).unsqueeze(2).unsqueeze(2)
def forward(self, x):
o = self.anchor_offset.to(x.device) * (self.conv_scale(x) - 1)
return self.conv(x, o)
class DeformConvWithOffsetScaleBound(nn.Module):
def __init__(self, in_channels, out_channels, kernel_size=3, stride=1, padding=1, dilation=1, groups=1, deformable_groups=1, bias=False, offset_bound=8):
super(DeformConvWithOffsetScaleBound, self).__init__()
self.conv_scale = nn.Conv2d(
in_channels, deformable_groups, kernel_size=3, stride=1, padding=1, bias=True)
self.conv_scale.weight.data.zero_()
# self.conv_scale.bias.data.zero_()
nn.init.constant_(self.conv_scale.bias.data, 1)
self.conv_bound = torch.nn.Hardtanh(
min_val=-offset_bound, max_val=offset_bound, inplace=True)
self.conv = DeformConv(in_channels, out_channels, kernel_size=kernel_size, stride=stride,
padding=padding, dilation=dilation, groups=groups, deformable_groups=deformable_groups, bias=bias)
self.anchor_offset = torch.FloatTensor([-1, -1, -1, 0, -1, 1,
0, -1, 0, 0, 0, 1,
1, -1, 1, 0, 1, 1]).unsqueeze(0).unsqueeze(2).unsqueeze(2)
def forward(self, x):
s = self.conv_bound(self.conv_scale(x))
o = self.anchor_offset.to(x.device) * (s - 1)
return self.conv(x, o)
class DeformConvWithOffsetScaleBoundPositive(nn.Module):
"""
This is the Co-Designed Deformable Convolution Module used in CoDeNet.
"""
def __init__(self, in_channels, out_channels, kernel_size=3, stride=1, padding=1, dilation=1,
groups=1, deformable_groups=1, bias=False, offset_bound=8, hidden_state=64, BN_MOMENTUM=0.1):
super(DeformConvWithOffsetScaleBoundPositive, self).__init__()
self.in_channels = in_channels
self.out_channels = out_channels
self.conv_scale = nn.Conv2d(in_channels, deformable_groups, kernel_size=1, stride=stride, padding=0, bias=True)
for m in self.conv_scale.modules():
if isinstance(m, nn.Conv2d):
m.weight.data.zero_()
if m.bias is not None:
print("initialize offset bias")
nn.init.constant_(m.bias, 1)
self.conv_bound = torch.nn.Hardtanh(
min_val=-offset_bound+1, max_val=offset_bound, inplace=True)
self.conv = DeformConv(in_channels, in_channels, kernel_size=kernel_size, stride=stride,
padding=padding, dilation=dilation, groups=in_channels, deformable_groups=deformable_groups,
bias=bias)
if in_channels != out_channels:
self.conv_channel = nn.Conv2d(in_channels, out_channels, 1, 1, 0, bias=False)
if type(self.conv_channel) == nn.Conv2d:
torch.nn.init.kaiming_normal_(self.conv_channel.weight, nonlinearity='relu')
if self.conv_channel.bias is not None:
nn.init.constant_(self.conv_channel.bias, 0)
self.anchor_offset = torch.FloatTensor([-1, -1, -1, 0, -1, 1,
0, -1, 0, 0, 0, 1,
1, -1, 1, 0, 1, 1]).unsqueeze(0).unsqueeze(2).unsqueeze(2)
def forward(self, x):
s = self.conv_bound(self.conv_scale(x))
o = self.anchor_offset.to(x.device) * (s - 1)
if self.in_channels != self.out_channels:
return self.conv_channel(self.conv(x, o))
else:
return self.conv(x, o)
class ModulatedDeformConvWithOffsetScaleBoundPositive(nn.Module):
def __init__(self, in_channels, out_channels, kernel_size=3, stride=1, padding=1, dilation=1, groups=1, deformable_groups=1, bias=False, offset_bound=8):
super(ModulatedDeformConvWithOffsetScaleBoundPositive, self).__init__()
self.conv_mask = nn.Conv2d(
in_channels, deformable_groups * 9, kernel_size=3, stride=1, padding=1, bias=True)
self.conv_scale = nn.Conv2d(
in_channels, deformable_groups, kernel_size=3, stride=1, padding=1, bias=True)
self.conv_scale.weight.data.zero_()
# self.conv_scale.bias.data.zero_()
nn.init.constant_(self.conv_scale.bias.data, 1)
self.conv_bound = torch.nn.Hardtanh(
min_val=0, max_val=offset_bound, inplace=True)
self.conv = ModulatedDeformConv(in_channels, out_channels, kernel_size=kernel_size, stride=stride,
padding=padding, dilation=dilation, groups=groups, deformable_groups=deformable_groups, bias=bias)
self.anchor_offset = torch.FloatTensor([-1, -1, -1, 0, -1, 1,
0, -1, 0, 0, 0, 1,
1, -1, 1, 0, 1, 1]).unsqueeze(0).unsqueeze(2).unsqueeze(2)
def forward(self, x):
m = self.conv_mask(x)
s = self.conv_bound(self.conv_scale(x))
o = self.anchor_offset.to(x.device) * (s - 1)
return self.conv(x, o, m)
class ModulatedDeformConvWithOffset1x1ScaleBoundPositive(nn.Module):
def __init__(self, in_channels, out_channels, kernel_size=3, stride=1, padding=1, dilation=1, groups=1, deformable_groups=1, bias=False, offset_bound=8):
super(ModulatedDeformConvWithOffset1x1ScaleBoundPositive, self).__init__()
self.conv_mask = nn.Conv2d(
in_channels, deformable_groups * 9, kernel_size=1, stride=1, padding=0, bias=True)
self.conv_scale = nn.Conv2d(
in_channels, deformable_groups, kernel_size=1, stride=1, padding=0, bias=True)
self.conv_scale.weight.data.zero_()
# self.conv_scale.bias.data.zero_()
nn.init.constant_(self.conv_scale.bias.data, 1)
self.conv_bound = torch.nn.Hardtanh(
min_val=0, max_val=offset_bound, inplace=True)
self.conv = ModulatedDeformConv(in_channels, out_channels, kernel_size=kernel_size, stride=stride,
padding=padding, dilation=dilation, groups=groups, deformable_groups=deformable_groups, bias=bias)
self.anchor_offset = torch.FloatTensor([-1, -1, -1, 0, -1, 1,
0, -1, 0, 0, 0, 1,
1, -1, 1, 0, 1, 1]).unsqueeze(0).unsqueeze(2).unsqueeze(2)
def forward(self, x):
m = self.conv_mask(x)
s = self.conv_bound(self.conv_scale(x))
o = self.anchor_offset.to(x.device) * (s - 1)
return self.conv(x, o, m)
| 42.254545
| 157
| 0.603455
| 1,987
| 16,268
| 4.708103
| 0.071968
| 0.071833
| 0.031962
| 0.038161
| 0.791021
| 0.769107
| 0.753073
| 0.723998
| 0.713202
| 0.713202
| 0
| 0.023338
| 0.286206
| 16,268
| 384
| 158
| 42.364583
| 0.782294
| 0.012724
| 0
| 0.646667
| 0
| 0
| 0.00798
| 0
| 0
| 0
| 0
| 0
| 0.01
| 1
| 0.103333
| false
| 0
| 0.023333
| 0.013333
| 0.216667
| 0.003333
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
0000d30a718318da726920f84eb7f02264e21495
| 188
|
py
|
Python
|
tests/test_lesson4_frog_river_one.py
|
ardenn/codility
|
45f8d5ae7de92cfde60a3f3f5ebee2a233273bd4
|
[
"MIT"
] | null | null | null |
tests/test_lesson4_frog_river_one.py
|
ardenn/codility
|
45f8d5ae7de92cfde60a3f3f5ebee2a233273bd4
|
[
"MIT"
] | null | null | null |
tests/test_lesson4_frog_river_one.py
|
ardenn/codility
|
45f8d5ae7de92cfde60a3f3f5ebee2a233273bd4
|
[
"MIT"
] | null | null | null |
from solutions.lesson4_frog_river_one import solution
def test_for_x_5_time_found():
solution([1,3,1,4,2,3,5,4],5)
def test_for_x_5_time_not_found():
solution([1,3,1,4,2,3,6,4],5)
| 31.333333
| 53
| 0.739362
| 43
| 188
| 2.906977
| 0.488372
| 0.112
| 0.16
| 0.176
| 0.56
| 0.56
| 0.304
| 0.304
| 0
| 0
| 0
| 0.123529
| 0.095745
| 188
| 6
| 54
| 31.333333
| 0.611765
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| true
| 0
| 0.2
| 0
| 0.6
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
0019a677f2c6e27a8a2b78df5f91152c8e307d1f
| 13,471
|
py
|
Python
|
lib/test/lib/classifiers/hmm/test_topologies.py
|
eonu/tempora
|
b857c72737532983ebe7403d627fc632cbc06dde
|
[
"MIT"
] | 32
|
2019-12-31T15:53:47.000Z
|
2022-03-31T12:40:40.000Z
|
lib/test/lib/classifiers/hmm/test_topologies.py
|
eonu/tempora
|
b857c72737532983ebe7403d627fc632cbc06dde
|
[
"MIT"
] | 25
|
2020-06-01T18:32:47.000Z
|
2022-01-22T13:15:36.000Z
|
lib/test/lib/classifiers/hmm/test_topologies.py
|
eonu/tempora
|
b857c72737532983ebe7403d627fc632cbc06dde
|
[
"MIT"
] | 6
|
2019-12-31T15:54:26.000Z
|
2022-01-23T07:09:28.000Z
|
import pytest, warnings, numpy as np
from sequentia.classifiers import _Topology, _LeftRightTopology, _ErgodicTopology, _LinearTopology
from ....support import assert_equal, assert_all_equal, assert_distribution
# Set seed for reproducible randomness
seed = 0
np.random.seed(seed)
rng = np.random.RandomState(seed)
# ========= #
# _Topology #
# ========= #
# --------------------------- #
# _Topology.uniform_initial() #
# --------------------------- #
def test_uniform_initial_min():
"""Generate a uniform initial state distribution with the minimum number of states"""
topology = _Topology(n_states=1, random_state=rng)
initial = topology.uniform_initial()
assert_distribution(initial)
assert_equal(initial, np.array([
1.
]))
def test_uniform_initial_small():
"""Generate a uniform initial state distribution with a few states"""
topology = _Topology(n_states=2, random_state=rng)
initial = topology.uniform_initial()
assert_distribution(initial)
assert_equal(initial, np.array([
0.5, 0.5
]))
def test_uniform_initial_many():
"""Generate a uniform initial state distribution with many states"""
topology = _Topology(n_states=5, random_state=rng)
initial = topology.uniform_initial()
assert_distribution(initial)
assert_equal(initial, np.array([
0.2, 0.2, 0.2, 0.2, 0.2
]))
# -------------------------- #
# _Topology.random_initial() #
# -------------------------- #
def test_random_initial_min():
"""Generate a random initial state distribution with minimal states"""
topology = _Topology(n_states=1, random_state=rng)
initial = topology.random_initial()
assert_distribution(initial)
assert_equal(initial, np.array([
1.
]))
def test_random_initial_small():
"""Generate a random initial state distribution with few states"""
topology = _Topology(n_states=2, random_state=rng)
initial = topology.random_initial()
assert_distribution(initial)
assert_equal(initial, np.array([
0.57633871, 0.42366129
]))
def test_random_initial_many():
"""Generate a random initial state distribution with many states"""
topology = _Topology(n_states=5, random_state=rng)
initial = topology.random_initial()
assert_distribution(initial)
assert_equal(initial, np.array([
0.15210286, 0.10647349, 0.20059295, 0.11120171, 0.42962898
]))
# ================== #
# _LeftRightTopology #
# ================== #
# ---------------------------------------- #
# _LeftRightTopology.uniform_transitions() #
# ---------------------------------------- #
def test_left_right_uniform_transitions_min():
"""Generate a uniform left-right transition matrix with minimal states"""
topology = _LeftRightTopology(n_states=1, random_state=rng)
transitions = topology.uniform_transitions()
assert_distribution(transitions)
assert_equal(transitions, np.array([
[1.]
]))
def test_left_right_uniform_transitions_small():
"""Generate a uniform left-right transition matrix with few states"""
topology = _LeftRightTopology(n_states=2, random_state=rng)
transitions = topology.uniform_transitions()
assert_distribution(transitions)
assert_equal(transitions, np.array([
[0.5, 0.5],
[0. , 1. ]
]))
def test_left_right_uniform_transitions_many():
"""Generate a uniform left-right transition matrix with many states"""
topology = _LeftRightTopology(n_states=5, random_state=rng)
transitions = topology.uniform_transitions()
assert_distribution(transitions)
assert_equal(transitions, np.array([
[0.2, 0.2 , 0.2 , 0.2 , 0.2 ],
[0. , 0.25, 0.25 , 0.25 , 0.25 ],
[0. , 0. , 0.33333333, 0.33333333, 0.33333333],
[0. , 0. , 0. , 0.5 , 0.5 ] ,
[0. , 0. , 0. , 0. , 1. ]
]))
# --------------------------------------- #
# _LeftRightTopology.random_transitions() #
# --------------------------------------- #
def test_left_right_random_transitions_min():
"""Generate a random left-right transition matrix with minimal states"""
topology = _LeftRightTopology(n_states=1, random_state=rng)
transitions = topology.random_transitions()
assert_distribution(transitions)
assert_equal(transitions, np.array([
[1.]
]))
def test_left_right_random_transitions_small():
"""Generate a random left-right transition matrix with few states"""
topology = _LeftRightTopology(n_states=2, random_state=rng)
transitions = topology.random_transitions()
assert_distribution(transitions)
assert_equal(transitions, np.array([
[0.23561633, 0.76438367],
[0. , 1. ]
]))
def test_left_right_random_transitions_many():
"""Generate a random left-right transition matrix with many states"""
topology = _LeftRightTopology(n_states=5, random_state=rng)
transitions = topology.random_transitions()
assert_distribution(transitions)
assert_equal(transitions, np.array([
[0.23169814, 0.71716356, 0.02033845, 0.02516204, 0.00563782],
[0. , 0.19474072, 0.16405008, 0.22228532, 0.41892388],
[0. , 0. , 0.42912755, 0.16545797, 0.40541448],
[0. , 0. , 0. , 0.109713 , 0.890287 ],
[0. , 0. , 0. , 0. , 1. ]
]))
# ----------------------------------------- #
# _LeftRightTopology.validate_transitions() #
# ----------------------------------------- #
def test_left_right_validate_transitions_invalid():
"""Validate an invalid left-right transition matrix"""
topology = _LeftRightTopology(n_states=5, random_state=rng)
transitions = _ErgodicTopology(n_states=5, random_state=rng).random_transitions()
with pytest.raises(ValueError) as e:
topology.validate_transitions(transitions)
assert str(e.value) == 'Left-right transition matrix must be upper-triangular'
def test_left_right_validate_transitions_valid():
"""Validate a valid left-right transition matrix"""
topology = _LeftRightTopology(n_states=5, random_state=rng)
transitions = topology.random_transitions()
topology.validate_transitions(transitions)
# -------------------------------------- #
# _ErgodicTopology.uniform_transitions() #
# -------------------------------------- #
def test_ergodic_uniform_transitions_min():
"""Generate a uniform ergodic transition matrix with minimal states"""
topology = _ErgodicTopology(n_states=1, random_state=rng)
transitions = topology.uniform_transitions()
assert_distribution(transitions)
assert_equal(transitions, np.array([
[1.]
]))
def test_ergodic_uniform_transitions_small():
"""Generate a uniform ergodic transition matrix with few states"""
topology = _ErgodicTopology(n_states=2, random_state=rng)
transitions = topology.uniform_transitions()
assert_distribution(transitions)
assert_equal(transitions, np.array([
[0.5, 0.5],
[0.5, 0.5]
]))
def test_ergodic_uniform_transitions_many():
"""Generate a uniform ergodic transition matrix with many states"""
topology = _ErgodicTopology(n_states=5, random_state=rng)
transitions = topology.uniform_transitions()
assert_distribution(transitions)
assert_equal(transitions, np.array([
[0.2, 0.2, 0.2, 0.2, 0.2],
[0.2, 0.2, 0.2, 0.2, 0.2],
[0.2, 0.2, 0.2, 0.2, 0.2],
[0.2, 0.2, 0.2, 0.2, 0.2],
[0.2, 0.2, 0.2, 0.2, 0.2]
]))
# ------------------------------------- #
# _ErgodicTopology.random_transitions() #
# ------------------------------------- #
def test_ergodic_random_transitions_min():
"""Generate a random ergodic transition matrix with minimal states"""
topology = _ErgodicTopology(n_states=1, random_state=rng)
transitions = topology.random_transitions()
assert_distribution(transitions)
assert_equal(transitions, np.array([
[1.]
]))
def test_ergodic_random_transitions_small():
"""Generate a random ergodic transition matrix with few states"""
topology = _ErgodicTopology(n_states=2, random_state=rng)
transitions = topology.random_transitions()
assert_distribution(transitions)
assert_equal(transitions, np.array([
[0.9474011 , 0.0525989 ],
[0.85567599, 0.14432401]
]))
def test_ergodic_random_transitions_many():
"""Generate a random ergodic transition matrix with many states"""
topology = _ErgodicTopology(n_states=5, random_state=rng)
transitions = topology.random_transitions()
assert_distribution(transitions)
assert_equal(transitions, np.array([
[0.58715548, 0.14491542, 0.20980762, 0.00623944, 0.05188205],
[0.0840705 , 0.23055049, 0.08297536, 0.25124688, 0.35115677],
[0.02117615, 0.37664662, 0.26705912, 0.09851123, 0.23660688],
[0.01938041, 0.16853843, 0.52046123, 0.07535256, 0.21626737],
[0.04996846, 0.44545843, 0.12079423, 0.07154241, 0.31223646]
]))
# --------------------------------------- #
# _ErgodicTopology.validate_transitions() #
# --------------------------------------- #
def test_ergodic_validate_transitions_invalid():
"""Validate an invalid ergodic transition matrix"""
topology = _ErgodicTopology(n_states=5, random_state=rng)
transitions = _LeftRightTopology(n_states=5, random_state=rng).random_transitions()
with pytest.warns(UserWarning):
topology.validate_transitions(transitions)
def test_ergodic_validate_transitions_valid():
"""Validate a valid ergodic transition matrix"""
topology = _ErgodicTopology(n_states=5, random_state=rng)
transitions = topology.random_transitions()
topology.validate_transitions(transitions)
# =============== #
# _LinearTopology #
# =============== #
# ------------------------------------- #
# _LinearTopology.uniform_transitions() #
# ------------------------------------- #
def test_linear_uniform_transitions_min():
"""Generate a uniform linear transition matrix with minimal states"""
topology = _LinearTopology(n_states=1, random_state=rng)
transitions = topology.uniform_transitions()
assert_distribution(transitions)
assert_equal(transitions, np.array([
[1.]
]))
def test_linear_uniform_transitions_small():
"""Generate a uniform linear transition matrix with few states"""
topology = _LinearTopology(n_states=2, random_state=rng)
transitions = topology.uniform_transitions()
assert_distribution(transitions)
assert_equal(transitions, np.array([
[0.5, 0.5],
[0. , 1. ]
]))
def test_linear_uniform_transitions_many():
"""Generate a uniform linear transition matrix with many states"""
topology = _LinearTopology(n_states=5, random_state=rng)
transitions = topology.uniform_transitions()
assert_distribution(transitions)
assert_equal(transitions, np.array([
[0.5, 0.5, 0. , 0. , 0. ],
[0. , 0.5, 0.5, 0. , 0. ],
[0. , 0. , 0.5, 0.5, 0. ],
[0. , 0. , 0. , 0.5, 0.5],
[0. , 0. , 0. , 0. , 1. ]
]))
# ------------------------------------ #
# _LinearTopology.random_transitions() #
# ------------------------------------ #
def test_linear_random_transitions_min():
"""Generate a random linear transition matrix with minimal states"""
topology = _LinearTopology(n_states=1, random_state=rng)
transitions = topology.random_transitions()
assert_distribution(transitions)
assert_equal(transitions, np.array([
[1.]
]))
def test_linear_random_transitions_small():
"""Generate a random linear transition matrix with few states"""
topology = _LinearTopology(n_states=2, random_state=rng)
transitions = topology.random_transitions()
assert_distribution(transitions)
assert_equal(transitions, np.array([
[0.65157396, 0.34842604],
[0. , 1. ]
]))
def test_linear_random_transitions_many():
"""Generate a random linear transition matrix with many states"""
topology = _LinearTopology(n_states=5, random_state=rng)
transitions = topology.random_transitions()
assert_distribution(transitions)
assert_equal(transitions, np.array([
[0.44455421, 0.55544579, 0. , 0. , 0. ],
[0. , 0.57553614, 0.42446386, 0. , 0. ],
[0. , 0. , 0.92014965, 0.07985035, 0. ],
[0. , 0. , 0. , 0.66790982, 0.33209018],
[0. , 0. , 0. , 0. , 1. ]
]))
# -------------------------------------- #
# _LinearTopology.validate_transitions() #
# -------------------------------------- #
def test_linear_validate_transitions_invalid():
"""Validate an invalid linear transition matrix"""
topology = _LinearTopology(n_states=5, random_state=rng)
transitions = _ErgodicTopology(n_states=5, random_state=rng).random_transitions()
with pytest.raises(ValueError) as e:
topology.validate_transitions(transitions)
assert str(e.value) == 'Left-right transition matrix must be upper-triangular'
def test_linear_validate_transitions_valid():
"""Validate a valid linear transition matrix"""
topology = _LinearTopology(n_states=5, random_state=rng)
transitions = topology.random_transitions()
topology.validate_transitions(transitions)
| 38.378917
| 98
| 0.633361
| 1,476
| 13,471
| 5.551491
| 0.092141
| 0.011716
| 0.056383
| 0.015621
| 0.845009
| 0.824262
| 0.72907
| 0.665975
| 0.654625
| 0.654625
| 0
| 0.073965
| 0.191077
| 13,471
| 351
| 99
| 38.378917
| 0.677985
| 0.240294
| 0
| 0.716738
| 0
| 0
| 0.010601
| 0
| 0
| 0
| 0
| 0
| 0.218884
| 1
| 0.128755
| false
| 0
| 0.012876
| 0
| 0.141631
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
002bac4eea10efd744be40dd55bdc6061bff3121
| 258
|
py
|
Python
|
TwitterStatsLib/test/__init__.py
|
pecet/pytosg
|
3e3ba1a3bbc5435deab382e2b4d9ad80f80d7b36
|
[
"MIT"
] | null | null | null |
TwitterStatsLib/test/__init__.py
|
pecet/pytosg
|
3e3ba1a3bbc5435deab382e2b4d9ad80f80d7b36
|
[
"MIT"
] | null | null | null |
TwitterStatsLib/test/__init__.py
|
pecet/pytosg
|
3e3ba1a3bbc5435deab382e2b4d9ad80f80d7b36
|
[
"MIT"
] | null | null | null |
""" Main unit test module """
import unittest
from .test_LazyDict import TestLazyDict
from .test_TwitterStatsGenerator import TestMap
from .test_InsertableOrderedDict import TestInsertableOrderedDict
if __name__ == '__main__':
unittest.main()
| 25.8
| 66
| 0.77907
| 26
| 258
| 7.307692
| 0.576923
| 0.126316
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.155039
| 258
| 9
| 67
| 28.666667
| 0.87156
| 0.081395
| 0
| 0
| 0
| 0
| 0.036364
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
0050d64ba9fb0f6c667a6583995c1b0ec04f5745
| 51
|
py
|
Python
|
debug.py
|
danieltodor/rpi-gpio-devices
|
6a9d9981d2b58d1ba3b2419bf89bdaa03bad6577
|
[
"MIT"
] | null | null | null |
debug.py
|
danieltodor/rpi-gpio-devices
|
6a9d9981d2b58d1ba3b2419bf89bdaa03bad6577
|
[
"MIT"
] | null | null | null |
debug.py
|
danieltodor/rpi-gpio-devices
|
6a9d9981d2b58d1ba3b2419bf89bdaa03bad6577
|
[
"MIT"
] | null | null | null |
""" Write debug scripts here """
from src import *
| 17
| 32
| 0.666667
| 7
| 51
| 4.857143
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.196078
| 51
| 2
| 33
| 25.5
| 0.829268
| 0.470588
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
cc73c8137c5d039612655e690ae8e609203601d1
| 31
|
py
|
Python
|
pf_py_common/py_object_copier.py
|
problemfighter/pf-py-common
|
3c70f8da7d61daf5de217aa5d82d6a9e5b1b02f4
|
[
"Apache-2.0"
] | 3
|
2022-01-06T14:14:56.000Z
|
2022-01-15T09:08:35.000Z
|
pf_py_common/py_object_copier.py
|
problemfighter/pf-py-common
|
3c70f8da7d61daf5de217aa5d82d6a9e5b1b02f4
|
[
"Apache-2.0"
] | null | null | null |
pf_py_common/py_object_copier.py
|
problemfighter/pf-py-common
|
3c70f8da7d61daf5de217aa5d82d6a9e5b1b02f4
|
[
"Apache-2.0"
] | null | null | null |
class PyObjectCopier:
pass
| 10.333333
| 21
| 0.741935
| 3
| 31
| 7.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.225806
| 31
| 2
| 22
| 15.5
| 0.958333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
aeb124393b8f8e3c0b88de2b28b1611261ee8b56
| 141
|
py
|
Python
|
auth_api/custom_auth.py
|
ripoul/gpx-storage
|
8663cca4a443fcd59c19eb197b890fc924f68b51
|
[
"Apache-2.0"
] | null | null | null |
auth_api/custom_auth.py
|
ripoul/gpx-storage
|
8663cca4a443fcd59c19eb197b890fc924f68b51
|
[
"Apache-2.0"
] | null | null | null |
auth_api/custom_auth.py
|
ripoul/gpx-storage
|
8663cca4a443fcd59c19eb197b890fc924f68b51
|
[
"Apache-2.0"
] | null | null | null |
from rest_framework.authentication import TokenAuthentication
class CustomTokenAuthentication(TokenAuthentication):
keyword = "Bearer"
| 23.5
| 61
| 0.843972
| 11
| 141
| 10.727273
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.106383
| 141
| 5
| 62
| 28.2
| 0.936508
| 0
| 0
| 0
| 0
| 0
| 0.042553
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
aecf640914bf461b95f443461ad0c8df1183b354
| 144
|
py
|
Python
|
microservices/svc/echo/run.py
|
Syu-syusan/internship
|
f4a486bec374e0b40f865956928cd39d1ce62c37
|
[
"MIT"
] | null | null | null |
microservices/svc/echo/run.py
|
Syu-syusan/internship
|
f4a486bec374e0b40f865956928cd39d1ce62c37
|
[
"MIT"
] | null | null | null |
microservices/svc/echo/run.py
|
Syu-syusan/internship
|
f4a486bec374e0b40f865956928cd39d1ce62c37
|
[
"MIT"
] | null | null | null |
import asyncio
import conf
import server
if __name__ == "__main__":
asyncio.run(server.run(host=conf.APP_IP_ADDRESS, port=conf.APP_PORT))
| 18
| 73
| 0.763889
| 22
| 144
| 4.5
| 0.590909
| 0.141414
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 144
| 7
| 74
| 20.571429
| 0.785714
| 0
| 0
| 0
| 0
| 0
| 0.055556
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.6
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
aed4f1ee109c22297a0d52a0d952b30541dae915
| 157
|
py
|
Python
|
beekeeper/admin.py
|
evan10s/beekeeper
|
13fd3bf812e3c78988429a165e149bfbaffb5b17
|
[
"MIT"
] | null | null | null |
beekeeper/admin.py
|
evan10s/beekeeper
|
13fd3bf812e3c78988429a165e149bfbaffb5b17
|
[
"MIT"
] | null | null | null |
beekeeper/admin.py
|
evan10s/beekeeper
|
13fd3bf812e3c78988429a165e149bfbaffb5b17
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from django.conf import settings
# Register your models here.
from .models import Treatment
admin.site.register(Treatment)
| 22.428571
| 32
| 0.821656
| 22
| 157
| 5.863636
| 0.590909
| 0.155039
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.121019
| 157
| 6
| 33
| 26.166667
| 0.934783
| 0.165605
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
aed6a9a9793d4d307257b84480bb52bd263ec688
| 104
|
py
|
Python
|
enthought/help/help_plugin/help_code.py
|
enthought/etsproxy
|
4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347
|
[
"BSD-3-Clause"
] | 3
|
2016-12-09T06:05:18.000Z
|
2018-03-01T13:00:29.000Z
|
enthought/help/help_plugin/help_code.py
|
enthought/etsproxy
|
4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347
|
[
"BSD-3-Clause"
] | 1
|
2020-12-02T00:51:32.000Z
|
2020-12-02T08:48:55.000Z
|
enthought/help/help_plugin/help_code.py
|
enthought/etsproxy
|
4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347
|
[
"BSD-3-Clause"
] | null | null | null |
# proxy module
from __future__ import absolute_import
from apptools.help.help_plugin.help_code import *
| 26
| 49
| 0.846154
| 15
| 104
| 5.4
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.105769
| 104
| 3
| 50
| 34.666667
| 0.870968
| 0.115385
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
d1ab5807d8b99408dab65d93a14014b6b8af5dae
| 90
|
py
|
Python
|
cblib/scripts/dist/INTEGER.py
|
HFriberg/cblib-base
|
164a00eb73ef3ac61f5b54f30492209cc69b854b
|
[
"Zlib"
] | 3
|
2019-06-13T06:57:31.000Z
|
2020-06-18T09:58:11.000Z
|
cblib/scripts/dist/INTEGER.py
|
HFriberg/cblib-base
|
164a00eb73ef3ac61f5b54f30492209cc69b854b
|
[
"Zlib"
] | 1
|
2019-04-27T18:28:57.000Z
|
2019-04-30T17:16:53.000Z
|
cblib/scripts/dist/INTEGER.py
|
HFriberg/cblib-base
|
164a00eb73ef3ac61f5b54f30492209cc69b854b
|
[
"Zlib"
] | 3
|
2019-04-30T11:19:34.000Z
|
2019-05-31T13:12:17.000Z
|
import math
def primdist(x):
return max([abs(xj - math.floor(xj + 0.5)) for xj in x])
| 15
| 58
| 0.633333
| 18
| 90
| 3.166667
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.027778
| 0.2
| 90
| 5
| 59
| 18
| 0.763889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 0
| 0
|
0
| 5
|
d1ccb7f1318fff6a3fed0dd50c7ef077fedf09e6
| 242
|
py
|
Python
|
tests/test_main.py
|
Ronald-TR/nina
|
38d780c1c1dabcc7ee660ccc13f8c8db63064da9
|
[
"MIT"
] | 9
|
2019-07-11T20:35:06.000Z
|
2020-05-08T07:59:11.000Z
|
tests/test_main.py
|
Ronald-TR/nina
|
38d780c1c1dabcc7ee660ccc13f8c8db63064da9
|
[
"MIT"
] | 3
|
2019-07-18T19:13:11.000Z
|
2019-07-24T01:31:13.000Z
|
tests/test_main.py
|
Ronald-TR/nina
|
38d780c1c1dabcc7ee660ccc13f8c8db63064da9
|
[
"MIT"
] | 2
|
2019-07-12T12:57:50.000Z
|
2019-07-23T03:21:19.000Z
|
def test_questions():
# assert 'Project name' in main.q()
assert True
def test_import():
try:
from automail.cli import main
assert True
except (ImportError, ModuleNotFoundError):
raise AssertionError
| 20.166667
| 46
| 0.652893
| 27
| 242
| 5.777778
| 0.740741
| 0.089744
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.272727
| 242
| 11
| 47
| 22
| 0.886364
| 0.136364
| 0
| 0.25
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.375
| 1
| 0.25
| true
| 0
| 0.375
| 0
| 0.625
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
d1d5beb5b643bfeaf72b7734cbc319933193809c
| 14,911
|
py
|
Python
|
Scripts/PerceptronFive.py
|
targamadze28/Analysis-of-the-latent-space-of-pretrained-deep-convolutional-neural-networks
|
8147e875590a8067432b26e158937d5a00afba61
|
[
"MIT"
] | null | null | null |
Scripts/PerceptronFive.py
|
targamadze28/Analysis-of-the-latent-space-of-pretrained-deep-convolutional-neural-networks
|
8147e875590a8067432b26e158937d5a00afba61
|
[
"MIT"
] | null | null | null |
Scripts/PerceptronFive.py
|
targamadze28/Analysis-of-the-latent-space-of-pretrained-deep-convolutional-neural-networks
|
8147e875590a8067432b26e158937d5a00afba61
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import numpy as np
import matplotlib.pyplot as plt
import h5py as h5
import os.path as fs
import keras
from keras.models import Sequential
from keras.utils import np_utils
from keras.layers.core import Dense, Activation, Dropout
from sklearn.model_selection import train_test_split
from sklearn.datasets import load_digits
from sklearn.linear_model import Perceptron
from sklearn.metrics import f1_score
from scipy.spatial import distance
from sklearn.preprocessing import normalize
from sklearn.metrics import accuracy_score
import logging as logg
from sklearn.decomposition import PCA
from sklearn import preprocessing
import umap
#%%
def readHDF5file(PathToSave, SavedFileName, list_group_name):
data = []
ff = h5.File(fs.join(PathToSave, SavedFileName), 'r')
for group in list_group_name:
data.append(ff[group][...])
ff.close()
return data
def saveHDF5file(PathToSave, SavedFileName, list_group_name, data):
num_group = len(list_group_name)
num_data = len(data)
if num_group != num_data:
raise RuntimeError('Group name list and data list length do not match!')
ff = h5.File(fs.join(PathToSave, SavedFileName), 'w')
for i, group in enumerate(list_group_name):
ff.create_dataset(group, data = data[i])
ff.close()
return None
#%%
def pca_result(activations, n_comp):
embedding = PCA(n_components= n_comp).fit_transform(activations)
return embedding
def umap_result(activations, n_comp):
embedding = umap.UMAP(n_components=n_comp).fit_transform(activations)
return embedding
#%%
def TrainPerceptron(latent_space):
n = len(latent_space)
shape_ls = latent_space.shape[1]
labels = np.empty((n, 1), dtype = np.int32)
labels[:15000], labels[15000:30000], labels[30000:] = 0, 1, 2
#labels[:5000], labels[5000:10000], labels[10000:] = 0, 1, 2
y_train = np_utils.to_categorical(labels)
standardized_latent_space = preprocessing.scale(latent_space)
model = Sequential()
model.add(Dense(3, input_dim= shape_ls))
model.add(Activation('softmax'))
model.compile(loss='categorical_crossentropy', optimizer='Nadam')
model.summary()
model.fit(standardized_latent_space, y_train, epochs = 250, batch_size=128, validation_split=0.3, shuffle = True, verbose=2)
optim = keras.optimizers.SGD(lr=0.02, decay=1e-2/300)
model.compile(loss='categorical_crossentropy', optimizer=optim)
model.fit(standardized_latent_space, y_train, epochs = 300, batch_size=128, validation_split=0.3, shuffle = True, verbose=2)
predict = model.predict(standardized_latent_space, batch_size=4096)
predict = np.heaviside(predict - 0.5, 1).astype(np.int32)
score = f1_score(y_train, predict, average='micro')
return score
#%%
RootPathLatentSpace = ''
logg.basicConfig(filename=fs.join(RootPathLatentSpace, "LatentSpaceLogger.log"), level=logg.INFO)
logg
umap_shape_mnist = [0, 1024, 512, 256, 128, 64, 32, 16, 8, 4, 2]
umap_shape_unet = [0, 512, 256, 128, 64, 32, 16, 8, 4, 2]
PathToDataSet = ''
PathToModel = ''
NamesDataSet = ['']
name_loss_list = ['']
name_NN_list = ['']
num_layers = [[], []]
precision = np.ones((len(name_NN_list), len(name_loss_list), 5, len(NamesDataSet),\
7, 11, 2), dtype = np.float32)
precision = precision*(-1.)
"""
for iter_NN in range(len(name_NN_list)):
for iter_loss in range(len(name_loss_list)):
for launch_num in range(5):
for data_iter, data in enumerate(NamesDataSet):
number_layer = num_layers[iter_NN]
for li, layer_iter in enumerate(number_layer):
latent_space= readHDF5file(RootPathLatentSpace,\
'LatentSpace_Model%s_Loss%s_Launch%d_Layer%d,hdf5'%(name_NN_list[iter_NN],\
name_loss_list[iter_loss],\
launch_num + 1,\
layer_iter),\
['latent_space'])[0]
if iter_NN == 0:
compress_list = umap_shape_mnist
else:
compress_list = umap_shape_unet
for dim_iter, dim in enumerate(compress_list):
if dim != 0:
ls_pca = pca_result(latent_space, dim)
f1_score_pca = TrainPerceptron(ls_pca)
logg.info('%d / %d, %d / %d, %d / %d, %d / %d, %d / %d, %d / %d pca score = %f'%(iter_NN + 1, len(name_NN_list), \
iter_loss + 1, len(name_loss_list),\
launch_num + 1, 5,\
data_iter + 1, len(NamesDataSet),\
li + 1, len(number_layer),\
dim_iter + 1, len(compress_list),\
f1_score_pca))
precision[iter_NN, iter_loss, launch_num, data_iter, li,\
dim_iter, 0] = f1_score_pca
ls_umap = umap_result(latent_space, dim)
f1_score_umap = TrainPerceptron(ls_umap)
logg.info('%d / %d, %d / %d, %d / %d, %d / %d, %d / %d, %d / %d umap score = %f'%(iter_NN + 1, len(name_NN_list), \
iter_loss + 1, len(name_loss_list),\
launch_num + 1, 5,\
data_iter + 1, len(NamesDataSet),\
li + 1, len(number_layer),\
dim_iter + 1, len(compress_list),\
f1_score_umap))
precision[iter_NN, iter_loss, launch_num, data_iter, li,\
dim_iter, 1] = f1_score_umap
else:
f1_score = TrainPerceptron(latent_space)
logg.info('%d / %d, %d / %d, %d / %d, %d / %d, %d / %d, %d / %d score = %f'%(iter_NN + 1, len(name_NN_list), \
iter_loss + 1, len(name_loss_list),\
launch_num + 1, 5,\
data_iter + 1, len(NamesDataSet),\
li + 1, len(number_layer),\
dim_iter + 1, len(compress_list),\
f1_score))
precision[iter_NN, iter_loss, launch_num, data_iter, li,\
dim_iter, 0] = f1_score
ff = h5.File(fs.join(RootPathLatentSpace, 'preceptron', 'perceptron.hdf5'), 'w')
ff.create_dataset('precision', precision)
ff.close()
"""
#%%
"""
NN1
"""
RootPathLatentSpace = ''
logg.basicConfig(filename=fs.join(RootPathLatentSpace, "LatentSpaceLogger.log"), level=logg.INFO)
logg
umap_shape_mnist = [0, 1024, 512, 256, 128, 64, 32, 16, 8, 4, 2]
PathToDataSet = ''
PathToModel = ''
NamesDataSet = ['OnlyColor.hdf5',\
'OnlyH.hdf5',\
'OnlyX.hdf5',\
'Only.hdf5']
name_loss_list = ['weighted_categorical_crossentropy',\
'dice_loss']
name_NN_list = ['ezConvAutoEncoderForMnist', 'UnetСircumcised',\
'UnetWithSeparableConvСircumcised']
num_layers = [6, 7]
iter_NN = 0
for iter_loss in range(len(name_loss_list)):
for launch_num in range(5):
for data_iter, data in enumerate(NamesDataSet):
number_layer = num_layers[iter_NN]
for li, layer_iter in enumerate(number_layer):
latent_space= readHDF5file(RootPathLatentSpace,\
'LatentSpace_Model%s_Loss%s_Launch%d_Layer%d,hdf5'%(name_NN_list[iter_NN],\
name_loss_list[iter_loss],\
launch_num + 1,\
layer_iter),\
['latent_space'])[0]
if iter_NN == 0:
compress_list = umap_shape_mnist
else:
compress_list = umap_shape_unet
precision = np.ones(len(compress_list), 2)
for dim_iter, dim in enumerate(compress_list):
if dim != 0:
ls_pca = pca_result(latent_space, dim)
f1_score_pca = TrainPerceptron(ls_pca)
logg.info('%d / %d, %d / %d, %d / %d, %d / %d, %d / %d, %d / %d pca score = %f'%(iter_NN + 1, len(name_NN_list), \
iter_loss + 1, len(name_loss_list),\
launch_num + 1, 5,\
data_iter + 1, len(NamesDataSet),\
li + 1, len(number_layer),\
dim_iter + 1, len(compress_list),\
f1_score_pca))
precision[dim_iter, 0] = f1_score_pca
ls_umap = umap_result(latent_space, dim)
f1_score_umap = TrainPerceptron(ls_umap)
logg.info('%d / %d, %d / %d, %d / %d, %d / %d, %d / %d, %d / %d umap score = %f'%(iter_NN + 1, len(name_NN_list), \
iter_loss + 1, len(name_loss_list),\
launch_num + 1, 5,\
data_iter + 1, len(NamesDataSet),\
li + 1, len(number_layer),\
dim_iter + 1, len(compress_list),\
f1_score_umap))
precision[dim_iter, 1] = f1_score_umap
else:
f1_score = TrainPerceptron(latent_space)
logg.info('%d / %d, %d / %d, %d / %d, %d / %d, %d / %d, %d / %d score = %f'%(iter_NN + 1, len(name_NN_list), \
iter_loss + 1, len(name_loss_list),\
launch_num + 1, 5,\
data_iter + 1, len(NamesDataSet),\
li + 1, len(number_layer),\
dim_iter + 1, len(compress_list),\
f1_score))
precision[dim_iter, 0] = f1_score
precision[dim_iter, 1] = f1_score
ff = h5.File(fs.join(RootPathLatentSpace, 'preceptron',\
'perceptron_Model%s_Loss%s_Launch%d_Layer%d.hdf5'%(name_NN_list[0],\
name_loss_list[iter_loss],\
launch_num + 1,\
layer_iter)), 'w')
ff.create_dataset('precision', precision)
ff.close()
#%%
"""
NN2
"""
RootPathLatentSpace = ''
logg.basicConfig(filename=fs.join(RootPathLatentSpace, "LatentSpaceLogger.log"), level=logg.INFO)
logg
umap_shape_mnist = [0, 1024, 512, 256, 128, 64, 32, 16, 8, 4, 2]
PathToDataSet = ''
PathToModel = ''
NamesDataSet = ['OnlyColor.hdf5',\
'OnlyH.hdf5',\
'OnlyX.hdf5',\
'Only.hdf5']
name_loss_list = ['weighted_categorical_crossentropy',\
'dice_loss']
name_NN_list = ['ezConvAutoEncoderForMnist', 'UnetСircumcised',\
'UnetWithSeparableConvСircumcised']
num_layers = [6, 7]
iter_NN = 0
for iter_loss in range(len(name_loss_list)):
for launch_num in range(5):
for data_iter, data in enumerate(NamesDataSet):
number_layer = num_layers[iter_NN]
for li, layer_iter in enumerate(number_layer):
latent_space= readHDF5file(RootPathLatentSpace,\
'LatentSpace_Model%s_Loss%s_Launch%d_Layer%d,hdf5'%(name_NN_list[iter_NN],\
name_loss_list[iter_loss],\
launch_num + 1,\
layer_iter),\
['latent_space'])[0]
if iter_NN == 0:
compress_list = umap_shape_mnist
else:
compress_list = umap_shape_unet
precision = np.ones(len(compress_list), 2)
for dim_iter, dim in enumerate(compress_list):
if dim != 0:
ls_pca = pca_result(latent_space, dim)
f1_score_pca = TrainPerceptron(ls_pca)
logg.info('%d / %d, %d / %d, %d / %d, %d / %d, %d / %d, %d / %d pca score = %f'%(iter_NN + 1, len(name_NN_list), \
iter_loss + 1, len(name_loss_list),\
launch_num + 1, 5,\
data_iter + 1, len(NamesDataSet),\
li + 1, len(number_layer),\
dim_iter + 1, len(compress_list),\
f1_score_pca))
precision[dim_iter, 0] = f1_score_pca
ls_umap = umap_result(latent_space, dim)
f1_score_umap = TrainPerceptron(ls_umap)
logg.info('%d / %d, %d / %d, %d / %d, %d / %d, %d / %d, %d / %d umap score = %f'%(iter_NN + 1, len(name_NN_list), \
iter_loss + 1, len(name_loss_list),\
launch_num + 1, 5,\
data_iter + 1, len(NamesDataSet),\
li + 1, len(number_layer),\
dim_iter + 1, len(compress_list),\
f1_score_umap))
precision[dim_iter, 1] = f1_score_umap
else:
f1_score = TrainPerceptron(latent_space)
logg.info('%d / %d, %d / %d, %d / %d, %d / %d, %d / %d, %d / %d score = %f'%(iter_NN + 1, len(name_NN_list), \
iter_loss + 1, len(name_loss_list),\
launch_num + 1, 5,\
data_iter + 1, len(NamesDataSet),\
li + 1, len(number_layer),\
dim_iter + 1, len(compress_list),\
f1_score))
precision[dim_iter, 0] = f1_score
precision[dim_iter, 1] = f1_score
ff = h5.File(fs.join(RootPathLatentSpace, 'preceptron',\
'perceptron_Model%s_Loss%s_Launch%d_Layer%d.hdf5'%(name_NN_list[0],\
name_loss_list[iter_loss],\
launch_num + 1,\
layer_iter)), 'w')
ff.create_dataset('precision', precision)
ff.close()
| 43.34593
| 129
| 0.504527
| 1,665
| 14,911
| 4.264865
| 0.121922
| 0.027883
| 0.038023
| 0.045627
| 0.777778
| 0.769047
| 0.744261
| 0.73384
| 0.715956
| 0.715956
| 0
| 0.037304
| 0.385152
| 14,911
| 343
| 130
| 43.472303
| 0.737238
| 0.007847
| 0
| 0.7
| 0
| 0.027273
| 0.100616
| 0.042304
| 0
| 0
| 0
| 0
| 0
| 1
| 0.022727
| false
| 0
| 0.086364
| 0
| 0.131818
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
060a600b661aa064e7f385c5329ef54d41eca4ee
| 208
|
py
|
Python
|
pursuit/agents/base_agent.py
|
goncalo-rodrigues/thesis
|
c6d531b72ddeaf2320a4346912df5519138d11d7
|
[
"MIT"
] | null | null | null |
pursuit/agents/base_agent.py
|
goncalo-rodrigues/thesis
|
c6d531b72ddeaf2320a4346912df5519138d11d7
|
[
"MIT"
] | null | null | null |
pursuit/agents/base_agent.py
|
goncalo-rodrigues/thesis
|
c6d531b72ddeaf2320a4346912df5519138d11d7
|
[
"MIT"
] | null | null | null |
class Agent(object):
def __init__(self, id):
self.id = id
def act(self, state):
raise NotImplementedError()
def transition(self, state, actions, new_state, reward):
pass
| 20.8
| 60
| 0.615385
| 25
| 208
| 4.92
| 0.64
| 0.097561
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.278846
| 208
| 9
| 61
| 23.111111
| 0.82
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.428571
| false
| 0.142857
| 0
| 0
| 0.571429
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 5
|
06185ae943196fc4fed7d0073ecd998208c713c1
| 242
|
py
|
Python
|
catalyst/dl/core/__init__.py
|
andrey-avdeev/catalyst
|
fd17aaba7775c99b7e2b1ce86e60aa8f2379acc3
|
[
"Apache-2.0"
] | 1
|
2019-12-15T18:29:15.000Z
|
2019-12-15T18:29:15.000Z
|
catalyst/dl/core/__init__.py
|
andrey-avdeev/catalyst
|
fd17aaba7775c99b7e2b1ce86e60aa8f2379acc3
|
[
"Apache-2.0"
] | null | null | null |
catalyst/dl/core/__init__.py
|
andrey-avdeev/catalyst
|
fd17aaba7775c99b7e2b1ce86e60aa8f2379acc3
|
[
"Apache-2.0"
] | 1
|
2021-12-20T07:32:25.000Z
|
2021-12-20T07:32:25.000Z
|
# flake8: noqa
from .callback import (
Callback, CallbackOrder, LoggerCallback, MeterMetricsCallback,
MetricCallback, MultiMetricCallback
)
from .experiment import Experiment
from .runner import Runner
from .state import RunnerState
| 24.2
| 66
| 0.801653
| 23
| 242
| 8.434783
| 0.608696
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.004831
| 0.144628
| 242
| 9
| 67
| 26.888889
| 0.932367
| 0.049587
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.571429
| 0
| 0.571429
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
881ff081df48ae08e74e97b6fa3d9edb3a08142b
| 268
|
py
|
Python
|
impacts_estimation/exceptions.py
|
openfoodfacts/off-product-environmental-impact
|
a78958fec1a21f057339184be27cf299d4fe12d1
|
[
"MIT"
] | 3
|
2021-09-07T13:46:25.000Z
|
2022-01-12T14:38:29.000Z
|
impacts_estimation/exceptions.py
|
openfoodfacts/off-product-environmental-impact
|
a78958fec1a21f057339184be27cf299d4fe12d1
|
[
"MIT"
] | 18
|
2021-09-13T16:19:26.000Z
|
2022-03-24T16:22:38.000Z
|
impacts_estimation/exceptions.py
|
openfoodfacts/off-product-environmental-impact
|
a78958fec1a21f057339184be27cf299d4fe12d1
|
[
"MIT"
] | null | null | null |
""" Exceptions used by the impact estimation program """
class RecipeCreationError(Exception):
pass
class SolverTimeoutError(Exception):
pass
class NoKnownIngredientsError(Exception):
pass
class NoCharacterizedIngredientsError(Exception):
pass
| 14.888889
| 56
| 0.764925
| 23
| 268
| 8.913043
| 0.608696
| 0.253659
| 0.263415
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.16791
| 268
| 17
| 57
| 15.764706
| 0.919283
| 0.179104
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
882956852fd393395f22b544276d201ba5b82919
| 2,435
|
py
|
Python
|
reid/loss/virtual_ce.py
|
JeremyXSC/MCN-MT
|
df62689e5ab3ac3c40a9e793425036594c903cbc
|
[
"MIT"
] | 51
|
2020-10-16T01:27:29.000Z
|
2022-01-20T03:10:45.000Z
|
reid/loss/virtual_ce.py
|
TencentYoutuResearch/PersonReID-ACT
|
264b1b43f9424c297638ebf6f8f8ace09512ed29
|
[
"MIT"
] | 1
|
2021-03-09T08:46:23.000Z
|
2021-03-09T08:52:42.000Z
|
reid/loss/virtual_ce.py
|
TencentYoutuResearch/PersonReID-ACT
|
264b1b43f9424c297638ebf6f8f8ace09512ed29
|
[
"MIT"
] | 2
|
2021-01-08T08:31:15.000Z
|
2021-02-04T02:06:49.000Z
|
from __future__ import absolute_import
import torch
from torch import nn
from torch.autograd import Variable
from torch.nn import functional as F
from scipy.stats import norm
import numpy as np
class VirtualCE(nn.Module):
def __init__(self, beta=0.1):
super(VirtualCE, self).__init__()
self.beta = beta
def forward(self, inputs, targets):
# norm first
n = inputs.shape[0]
inputs = F.normalize(inputs, p=2)
allPids = targets.cpu().numpy().tolist()
# All Centers
centerHash = {
pid: F.normalize(inputs[targets == pid, :].mean(dim=0, keepdim=True), p=2).detach() for pid in set(allPids)
}
allCenters = torch.autograd.Variable(torch.cat(list(centerHash.values()))).cuda()
centerPID = torch.from_numpy(np.asarray(list(centerHash.keys())))
# sampler vs center
samplerCenter = torch.autograd.Variable(torch.cat([allCenters[centerPID == pid, :] for pid in allPids])).cuda()
# inputs--(128*1024), allCenters--(32*1024)
vce = torch.diag(torch.exp(samplerCenter.mm(inputs.t()) / self.beta)) # 1*128
centerScore = torch.exp(allCenters.mm(inputs.t()) / self.beta).sum(dim=0) # 32(center number)*128->1*128
return -torch.log(vce.div(centerScore)).mean()
class VirtualKCE(nn.Module):
def __init__(self, beta=0.1):
super(VirtualKCE, self).__init__()
self.beta = beta
def forward(self, inputs, targets):
# norm first
n = inputs.shape[0]
inputs = F.normalize(inputs, p=2)
allPids = targets.cpu().numpy().tolist()
# All Centers
centerHash = {
pid: F.normalize(inputs[targets == pid, :].mean(dim=0, keepdim=True), p=2).detach() for pid in set(allPids)
}
allCenters = torch.autograd.Variable(torch.cat(list(centerHash.values()))).cuda()
centerPID = torch.from_numpy(np.asarray(list(centerHash.keys())))
samplerCenter = torch.autograd.Variable(torch.cat([allCenters[centerPID == pid, :] for pid in allPids])).cuda()
# inputs--(128*1024), allCenters--(32*1024)
vce = torch.diag(torch.exp(samplerCenter.mm(inputs.t()) / self.beta)) # 1*128
centerScore = torch.exp(allCenters.mm(inputs.t()) / self.beta).sum(dim=0) # 32*128->1*128
kNegScore = torch.diag(inputs.mm(inputs.t()))
return -torch.log(vce.div(kNegScore + centerScore)).mean()
| 41.982759
| 119
| 0.632444
| 317
| 2,435
| 4.785489
| 0.246057
| 0.042189
| 0.029664
| 0.068556
| 0.789717
| 0.763349
| 0.763349
| 0.763349
| 0.763349
| 0.723797
| 0
| 0.034501
| 0.214374
| 2,435
| 57
| 120
| 42.719298
| 0.758495
| 0.082957
| 0
| 0.619048
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.095238
| false
| 0
| 0.166667
| 0
| 0.357143
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
882b2043c1fab1e715221de038c9c035a686a496
| 137
|
py
|
Python
|
tests/refs/some_pkg/__init__.py
|
amirkdv/sphinxcontrib-wiki
|
b9369eb0e3fc04ba2670cd2d95167a1e73fadf85
|
[
"MIT"
] | 1
|
2016-11-09T00:03:29.000Z
|
2016-11-09T00:03:29.000Z
|
tests/refs/some_pkg/__init__.py
|
amirkdv/sphinxcontrib-wiki
|
b9369eb0e3fc04ba2670cd2d95167a1e73fadf85
|
[
"MIT"
] | null | null | null |
tests/refs/some_pkg/__init__.py
|
amirkdv/sphinxcontrib-wiki
|
b9369eb0e3fc04ba2670cd2d95167a1e73fadf85
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
First paragraph of docs.
.. wikisection:: faq
:title: Why?
Well...
Second paragraph of docs.
"""
| 10.538462
| 25
| 0.562044
| 16
| 137
| 4.8125
| 0.8125
| 0.285714
| 0.38961
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.009615
| 0.240876
| 137
| 12
| 26
| 11.416667
| 0.730769
| 0.919708
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
882ff020b0aed594beed98af9ed8f3aaf6a63b20
| 165,758
|
py
|
Python
|
tests/test/mixed_vschema/vschema_basic.py
|
mace84/script-languages
|
d586cbe212bbb4efbfb39e095183729c65489360
|
[
"MIT"
] | 6
|
2019-01-09T11:55:15.000Z
|
2021-06-25T19:52:42.000Z
|
tests/test/mixed_vschema/vschema_basic.py
|
mace84/script-languages
|
d586cbe212bbb4efbfb39e095183729c65489360
|
[
"MIT"
] | 65
|
2018-12-12T08:40:38.000Z
|
2022-02-28T09:19:45.000Z
|
tests/test/mixed_vschema/vschema_basic.py
|
mace84/script-languages
|
d586cbe212bbb4efbfb39e095183729c65489360
|
[
"MIT"
] | 9
|
2018-11-23T08:59:09.000Z
|
2020-02-04T12:56:35.000Z
|
#!/usr/opt/bs-python-2.7/bin/python
# -*- coding: utf-8 -*-
import unittest
import datetime
import re
from datetime import datetime, timedelta
from textwrap import dedent
import sys
import os
sys.path.append(os.path.realpath(__file__ + '/../../../lib'))
import udf
import exatest
from vschema_common import VSchemaTest, TestUtils
# Assumptions
# - JDBC Adapter correct for EXASolution remote database (except pushdown, which is not used here).
#
#
# Virtual Schema
# Corner Cases
# - Huge adapaterNotes => Add test
# - Many tables/columns (long json) => Test how much is possible
#@unittest.skip("skipped test")
class CreateVirtualSchemaTest(VSchemaTest):
setupDone = False
def setUp(self):
# TODO Remove this workaround
if self.__class__.setupDone:
self.query(''' OPEN SCHEMA VS1 ''')
return
# Create a simple native schema with tables
self.query('DROP SCHEMA IF EXISTS NATIVE CASCADE')
self.query('CREATE SCHEMA NATIVE')
self.query('CREATE TABLE T1(a int, b varchar(100), c double)')
self.query('CREATE TABLE T2(a date, b timestamp, c boolean)')
self.query('CREATE TABLE T3(c1 char, c2 decimal(18,5))')
self.query('CREATE TABLE T4(c1 integer identity)')
self.query('CREATE TABLE T5(c1 int identity)')
self.query('CREATE TABLE T6(c1 smallint identity)')
self.query('CREATE TABLE T7(c1 decimal(5,0) identity)')
self.query('''CREATE TABLE T8(c1 boolean default TRUE, c2 char(10) default 'foo', c3 date default '2016-06-01', c4 decimal(5,0) default 0)''')
self.query('''CREATE TABLE T9(c1 double default 1E2, c2 geometry default 'POINT(2 5)', c3 interval year to month default '3-5', c4 interval day to second default '2 12:50:10.123')''')
self.query('''CREATE TABLE TA(c1 timestamp default '2016-06-01 00:00:01.000', c2 timestamp with local time zone default '2016-06-01 00:00:02.000', c3 varchar(100) default 'bar')''')
self.query('''CREATE TABLE TB(c1 boolean default NULL, c2 char(10) default NULL, c3 date default NULL, c4 decimal(5,0) default NULL)''')
self.query('''CREATE TABLE TC(c1 double default NULL, c2 geometry default NULL, c3 interval year to month default NULL, c4 interval day to second default NULL)''')
self.query('''CREATE TABLE TD(c1 timestamp default NULL, c2 timestamp with local time zone default NULL, c3 varchar(100) default NULL)''')
self.query('''CREATE TABLE TE(c1 integer comment is '', c2 integer comment is 'This is a comment.')''')
self.query('''CREATE TABLE TF(c1 integer NOT NULL, c2 varchar(100) NOT NULL, c3 double NOT NULL)''')
self.query('DROP SCHEMA IF EXISTS NATIVE2 CASCADE')
self.query('CREATE SCHEMA NATIVE2')
self.commit() # commit, otherwise adapter doesn't see tables
self.createJdbcAdapter(schemaName="ADAPTER", adapterName="JDBC_ADAPTER")
self.createVirtualSchemaJdbc("VS2", "NATIVE2", "ADAPTER.JDBC_ADAPTER", True)
self.createVirtualSchemaJdbc("VS1", "NATIVE", "ADAPTER.JDBC_ADAPTER", True)
self.commit()
self.__class__.setupDone = True
def testCurrentSchema(self):
rows = self.query(''' SELECT CURRENT_SCHEMA ''')
self.assertRowsEqual([("VS1",)], rows)
def testCAT(self):
rows = self.query(''' SELECT * FROM CAT order by table_name ''')
self.assertRowsEqual([("T1","TABLE"),("T2","TABLE"),("T3","TABLE"),
("T4","TABLE"),("T5","TABLE"),("T6","TABLE"),("T7","TABLE"),
("T8","TABLE"),("T9","TABLE"),("TA","TABLE"),("TB","TABLE"),
("TC","TABLE"),("TD","TABLE"),("TE","TABLE"),("TF","TABLE")], rows)
def testDescribe(self):
for tableName in ["T1", "T2", "T3", "T4", "T5", "T6", "T7", "T8", "T9", "TA", "TB", "TC", "TD", "TE", "TF"]:
rows = self.query(''' DESCRIBE {t} '''.format(t = tableName))
self.assertEqual(['COLUMN_NAME', 'SQL_TYPE', 'NULLABLE', 'DISTRIBUTION_KEY', 'PARTITION_KEY'], self.columnNames())
rows_native = self.query(''' DESCRIBE native.{t} '''.format(t = tableName))
self.assertEqual(self.getColumn(rows_native,0), self.getColumn(rows,0))
# GD201606: TODO: This is only a workaround for the GEOMETRY Column Type
if (tableName == "T9" or tableName == "TC"):
self.assertEqual(['DOUBLE', 'GEOMETRY(3857)', 'INTERVAL YEAR(2) TO MONTH','INTERVAL DAY(2) TO SECOND(3)'], self.getColumn(rows,1))
else:
self.assertEqual(self.getColumn(rows_native,1), self.getColumn(rows,1))
# nullable and distributionkey column should be NULL
self.assertColumnEqualConst(rows, 2, None)
self.assertColumnEqualConst(rows, 3, None)
def testSysTableSchemas(self):
rows = self.query('''
SELECT * FROM EXA_SCHEMAS WHERE SCHEMA_NAME = 'VS1'
''')
self.assertEqual(['SCHEMA_NAME', 'SCHEMA_OWNER', 'SCHEMA_OBJECT_ID', 'SCHEMA_IS_VIRTUAL', 'SCHEMA_COMMENT'], self.columnNames())
self.assertEqual(1, self.rowcount())
self.assertEqual("VS1", rows[0][0])
self.assertEqual("SYS", rows[0][1])
schemaObjectId = rows[0][2]
self.assertTrue(schemaObjectId != None and schemaObjectId > 0)
self.assertEqual(True, rows[0][3])
self.assertEqual(None, rows[0][4])
rows = self.query('''
SELECT * FROM EXA_VIRTUAL_SCHEMAS WHERE SCHEMA_NAME = 'VS1'
''')
self.assertEqual(['SCHEMA_NAME', 'SCHEMA_OWNER', 'SCHEMA_OBJECT_ID', 'ADAPTER_SCRIPT', 'LAST_REFRESH', 'LAST_REFRESH_BY', 'ADAPTER_NOTES'], self.columnNames())
self.assertEqual(1, self.rowcount())
self.assertEqual("VS1", rows[0][0])
self.assertEqual("SYS", rows[0][1])
self.assertEqual(schemaObjectId, rows[0][2])
self.assertEqual("ADAPTER.JDBC_ADAPTER", rows[0][3])
self.assertEqual("SYS", rows[0][5])
# Check last refreshed. Take server time
schemaLastRefreshed = rows[0][4]
currentTime = self.queryCurrentTimestamp()
diff = currentTime - schemaLastRefreshed
self.assertGreaterEqual(diff.total_seconds(), 0)
self.assertLess(diff.total_seconds(), 10)
def testSysTableTables(self):
rows = self.query('''
SELECT * FROM EXA_DBA_TABLES WHERE TABLE_SCHEMA='VS1' ORDER BY TABLE_NAME
''')
self.assertEqual(['TABLE_SCHEMA', 'TABLE_NAME', 'TABLE_OWNER', 'TABLE_OBJECT_ID', 'TABLE_IS_VIRTUAL', 'TABLE_HAS_DISTRIBUTION_KEY', 'TABLE_HAS_PARTITION_KEY', 'TABLE_ROW_COUNT', 'DELETE_PERCENTAGE', 'TABLE_COMMENT'], self.columnNames())
self.assertEqual(15, self.rowcount())
self.assertColumnEqualConst(rows, 0, 'VS1')
self.assertEqual(["T1", "T2", "T3", "T4", "T5", "T6", "T7", "T8", "T9", "TA", "TB", "TC", "TD", "TE", "TF"], self.getColumn(rows,1))
self.assertColumnEqualConst(rows, 2, 'SYS')
for i in range(0,self.rowcount()):
tableObjectId = rows[i][3]
self.assertTrue(tableObjectId != None and tableObjectId > 0)
self.assertColumnEqualConst(rows, 4, True)
self.assertColumnEqualConst(rows, 5, None)
self.assertColumnEqualConst(rows, 7, None)
self.assertColumnEqualConst(rows, 8, None)
rows = self.query('''
SELECT * FROM EXA_DBA_VIRTUAL_TABLES WHERE TABLE_SCHEMA = 'VS1' ORDER BY TABLE_NAME
''')
self.assertEqual(['TABLE_SCHEMA', 'TABLE_NAME', 'TABLE_OBJECT_ID', 'LAST_REFRESH', 'LAST_REFRESH_BY', 'ADAPTER_NOTES'], self.columnNames())
self.assertEqual(15, self.rowcount())
self.assertColumnEqualConst(rows, 0, 'VS1')
self.assertEqual(["T1", "T2", "T3", "T4", "T5", "T6", "T7", "T8", "T9", "TA", "TB", "TC", "TD", "TE", "TF"], self.getColumn(rows,1))
for i in range(0,self.rowcount()):
tableObjectId = rows[i][2]
self.assertTrue(tableObjectId != None and tableObjectId > 0)
# check last refreshed
tableLastRefreshed = rows[i][3]
diff = tableLastRefreshed - self.getLastSchemaRefresh('VS1')
self.assertGreaterEqual(diff.total_seconds(), 0)
self.assertLess(diff.total_seconds(), 10)
self.assertColumnEqualConst(rows, 4, 'SYS')
def testSysTableColumns(self):
rows = self.query('''
SELECT * FROM EXA_DBA_COLUMNS WHERE COLUMN_SCHEMA = 'VS1' ORDER BY COLUMN_TABLE, COLUMN_ORDINAL_POSITION
''')
expectedCols = ["COLUMN_SCHEMA", "COLUMN_TABLE", "COLUMN_OBJECT_TYPE", "COLUMN_NAME", "COLUMN_TYPE", "COLUMN_TYPE_ID",
"COLUMN_MAXSIZE", "COLUMN_NUM_PREC", "COLUMN_NUM_SCALE", "COLUMN_ORDINAL_POSITION", "COLUMN_IS_VIRTUAL",
"COLUMN_IS_NULLABLE", "COLUMN_IS_DISTRIBUTION_KEY", "COLUMN_PARTITION_KEY_ORDINAL_POSITION", "COLUMN_DEFAULT", "COLUMN_IDENTITY", "COLUMN_OWNER", "COLUMN_OBJECT_ID",
"STATUS", "COLUMN_COMMENT"]
self.assertEqual(expectedCols, self.columnNames())
self.assertEqual(39, self.rowcount())
rows_native = self.query('''
SELECT * FROM EXA_DBA_COLUMNS WHERE COLUMN_SCHEMA = 'VS1' ORDER BY COLUMN_TABLE, COLUMN_ORDINAL_POSITION
''')
self.assertEqual(expectedCols, self.columnNames())
self.assertEqual(39, self.rowcount())
for i in range (1,10):
self.assertEqual(self.getColumn(rows_native,i), self.getColumn(rows,i))
self.assertColumnEqualConst(rows, 10, True)
for i in range (11,14):
self.assertColumnEqualConst(rows, i, None)
self.assertEqual(self.getColumn(rows_native,14), self.getColumn(rows,14))
self.assertEqual(self.getColumn(rows_native,15), self.getColumn(rows,15))
self.assertColumnEqualConst(rows, 16, 'SYS')
for i in range(0,self.rowcount()):
colObjectId = rows[i][17]
self.assertTrue(colObjectId != None and colObjectId > 0)
self.assertColumnEqualConst(rows, 18, None)
self.assertEqual(self.getColumn(rows_native,19), self.getColumn(rows,19))
rows = self.query('''
SELECT * FROM EXA_DBA_VIRTUAL_COLUMNS WHERE COLUMN_SCHEMA = 'VS1' ORDER BY COLUMN_TABLE, COLUMN_NAME
''')
self.assertEqual(["COLUMN_SCHEMA", "COLUMN_TABLE", "COLUMN_NAME", "COLUMN_OBJECT_ID", "ADAPTER_NOTES"], self.columnNames())
self.assertEqual(39, self.rowcount())
self.assertColumnEqualConst(rows, 0, 'VS1')
rows_native = self.query('''
SELECT COLUMN_TABLE, COLUMN_NAME FROM EXA_DBA_COLUMNS WHERE COLUMN_SCHEMA='NATIVE' ORDER BY COLUMN_TABLE, COLUMN_NAME
''')
self.assertEqual(self.getColumn(rows_native,0), self.getColumn(rows,1))
self.assertEqual(self.getColumn(rows_native,1), self.getColumn(rows,2))
for i in range(0,self.rowcount()):
colObjectId = rows[i][3]
self.assertTrue(colObjectId != None and colObjectId > 0)
# self.assertEqual([None]*self.rowcount(), self.getColumn(rows,4))
def testSysTableSchemaProperties(self):
rows = self.query('''
SELECT * FROM EXA_DBA_VIRTUAL_SCHEMA_PROPERTIES WHERE SCHEMA_NAME='VS1' ORDER BY PROPERTY_NAME
''')
self.assertEqual(["SCHEMA_NAME", "SCHEMA_OBJECT_ID", "PROPERTY_NAME", "PROPERTY_VALUE"], self.columnNames())
self.assertEqual(7, self.rowcount())
self.assertColumnEqualConst(rows, 0, 'VS1')
for i in range(0,self.rowcount()):
colObjectId = rows[i][1]
self.assertTrue(colObjectId != None and colObjectId > 0)
self.assertEqual(["CONNECTION_STRING", "EXCEPTION_HANDLING","IS_LOCAL", "PASSWORD", "SCHEMA_NAME", "SQL_DIALECT", "USERNAME"], self.getColumn(rows,2))
self.assertEqual(["jdbc:exa:{hostport}".format(hostport = udf.opts.server), "NONE", "True", "exasol", "NATIVE", "EXASOL", "sys"], self.getColumn(rows,3))
def testSysTableObjects(self):
rows = self.query('''
SELECT OBJECT_NAME, OBJECT_TYPE from EXA_DBA_OBJECTS WHERE OBJECT_IS_VIRTUAL = true AND ROOT_NAME = 'VS1' ORDER BY OBJECT_NAME
''')
self.assertEqual(["T1", "T2", "T3", "T4", "T5", "T6", "T7", "T8", "T9", "TA", "TB", "TC", "TD", "TE", "TF"], self.getColumn(rows,0))
self.assertColumnEqualConst(rows, 1, 'TABLE')
def testCreateWithConnection(self):
# Create a Virtual Schema, now using a connection (will drop the existing virtual schema w/o connection)
self.createVirtualSchemaJdbc("VS1", "NATIVE", "ADAPTER.JDBC_ADAPTER", True, useConnection=True)
rows = self.query('''
SELECT OBJECT_NAME, OBJECT_TYPE from EXA_DBA_OBJECTS WHERE OBJECT_IS_VIRTUAL = true AND ROOT_NAME = 'VS1' ORDER BY OBJECT_NAME
''')
self.assertEqual(["T1", "T2", "T3", "T4", "T5", "T6", "T7", "T8", "T9", "TA", "TB", "TC", "TD", "TE", "TF"], self.getColumn(rows,0))
self.assertColumnEqualConst(rows, 1, 'TABLE')
def testNotNull(self):
# Not null is currently not carried over to the virtual schema.
rows = self.query('''
SELECT COLUMN_IS_NULLABLE FROM EXA_ALL_COLUMNS WHERE COLUMN_TABLE='TF' AND COLUMN_SCHEMA='NATIVE';
''')
self.assertEqual([False, False, False], self.getColumn(rows,0))
rows = self.query('''
SELECT COLUMN_IS_NULLABLE FROM EXA_ALL_COLUMNS WHERE COLUMN_TABLE='TF' AND COLUMN_SCHEMA='VS1';
''')
self.assertEqual([None, None, None], self.getColumn(rows,0))
def testEmptySchema(self):
# Test JDBC adapter on empty schema
with self.assertRaisesRegexp(Exception, '''object VS2.DUMMY not found'''):
rows = self.query("SELECT * FROM VS2.DUMMY")
def testQuotedSchemaNames(self):
self.createFastAdapter(schemaName='''"quoted_adapter"''', adapterName='''"fast_adapter"''')
self.query('DROP VIRTUAL SCHEMA IF EXISTS VS1 CASCADE')
with self.assertRaisesRegexp(Exception, '''Could not find adapter script'''):
self.query('''CREATE VIRTUAL SCHEMA VS1 USING quoted_adapter.fast_adapter ''')
with self.assertRaisesRegexp(Exception, '''Could not find adapter script'''):
self.query('''CREATE VIRTUAL SCHEMA VS1 USING quoted_adapter."fast_adapter" ''')
with self.assertRaisesRegexp(Exception, '''Could not find adapter script'''):
self.query('''CREATE VIRTUAL SCHEMA VS1 USING "quoted_adapter".fast_adapter ''')
self.createFastAdapter(schemaName='''quoted_adapter''', adapterName='''"fast_adapter"''')
self.createFastAdapter(schemaName='''"quoted_adapter2"''', adapterName='''fast_adapter''')
self.query('''CREATE VIRTUAL SCHEMA VS1 USING "quoted_adapter"."fast_adapter" ''')
rows = self.query('''
SELECT * from VS1.DUMMY
''')
self.assertRowsEqual([('FOO', 'BAR')],rows)
self.query('DROP VIRTUAL SCHEMA IF EXISTS VS1 CASCADE')
self.query('''CREATE VIRTUAL SCHEMA VS1 USING "quoted_adapter2".fast_adapter ''')
rows = self.query('''
SELECT * from VS1.DUMMY
''')
self.assertRowsEqual([('FOO', 'BAR')],rows)
self.query('DROP VIRTUAL SCHEMA IF EXISTS VS1 CASCADE')
self.query('''CREATE VIRTUAL SCHEMA VS1 USING quoted_adapter."fast_adapter" ''')
rows = self.query('''
SELECT * from VS1.DUMMY
''')
self.assertRowsEqual([('FOO', 'BAR')],rows)
self.query('DROP VIRTUAL SCHEMA IF EXISTS VS1 CASCADE')
#self.query('''DROP SCHEMA IF EXISTS "quoted_adapter" CASCADE''')
#self.query('''DROP SCHEMA IF EXISTS quoted_adapter CASCADE''')
class CreateForceVirtualSchemaTest(VSchemaTest):
def testCreateForceVirtualSchema(self):
self.createFailingAdapter(schemaName="ADAPTER", adapterName="FAILING_ADAPTER")
self.query('DROP VIRTUAL SCHEMA IF EXISTS VS1 CASCADE')
self.query('CREATE FORCE VIRTUAL SCHEMA VS1 USING ADAPTER.FAILING_ADAPTER')
rows = self.query("SELECT * FROM EXA_SCHEMAS WHERE SCHEMA_NAME = 'VS1' ")
self.assertEqual(1, self.rowcount())
rows = self.query('''
SELECT * from EXA_DBA_OBJECTS WHERE OBJECT_IS_VIRTUAL = true AND ROOT_NAME = 'VS1'
''')
self.assertEqual(0, self.rowcount())
class CreateVirtualSchemaWithProperties(VSchemaTest):
def testWithNormalValue(self):
self.createFastAdapter(schemaName="ADAPTER", adapterName="FAST_ADAPTER")
self.query('DROP VIRTUAL SCHEMA IF EXISTS VS1 CASCADE')
self.query('''CREATE VIRTUAL SCHEMA VS1 USING ADAPTER.FAST_ADAPTER WITH UNUSED = 'default' ''')
rows = self.query('''
SELECT PROPERTY_NAME, PROPERTY_VALUE FROM EXA_DBA_VIRTUAL_SCHEMA_PROPERTIES WHERE SCHEMA_NAME = 'VS1'
''')
self.assertRowsEqual([('UNUSED', 'default')],rows)
def testPropertyValueEmptyString(self):
self.createFastAdapter(schemaName="ADAPTER", adapterName="FAST_ADAPTER")
self.query('DROP VIRTUAL SCHEMA IF EXISTS VS1 CASCADE')
with self.assertRaisesRegexp(Exception, '''Value of property UNUSED must not be null or empty.'''):
self.query('''CREATE VIRTUAL SCHEMA VS1 USING ADAPTER.FAST_ADAPTER WITH UNUSED = '' ''')
def testPropertyValueNull(self):
self.createFastAdapter(schemaName="ADAPTER", adapterName="FAST_ADAPTER")
self.query('DROP VIRTUAL SCHEMA IF EXISTS VS1 CASCADE')
with self.assertRaisesRegexp(Exception, '''Value of property UNUSED must not be null or empty.'''):
self.query('''CREATE VIRTUAL SCHEMA VS1 USING ADAPTER.FAST_ADAPTER WITH UNUSED = null ''')
def testDuplicate(self):
self.createFastAdapter(schemaName="ADAPTER", adapterName="FAST_ADAPTER")
self.query('DROP VIRTUAL SCHEMA IF EXISTS VS1 CASCADE')
with self.assertRaisesRegexp(Exception, '''Duplicate property names \(UNUSED\) are not allowed.'''):
self.query('''CREATE VIRTUAL SCHEMA VS1 USING ADAPTER.FAST_ADAPTER WITH UNUSED = 'default1' UNUSED = 'default2' ''')
def testDuplicateWithEmptyString(self):
self.createFastAdapter(schemaName="ADAPTER", adapterName="FAST_ADAPTER")
self.query('DROP VIRTUAL SCHEMA IF EXISTS VS1 CASCADE')
with self.assertRaisesRegexp(Exception, '''Value of property UNUSED must not be null or empty.'''):
self.query('''CREATE VIRTUAL SCHEMA VS1 USING ADAPTER.FAST_ADAPTER WITH UNUSED = 'default1' UNUSED = '' ''')
def testDuplicateWithNull(self):
self.createFastAdapter(schemaName="ADAPTER", adapterName="FAST_ADAPTER")
self.query('DROP VIRTUAL SCHEMA IF EXISTS VS1 CASCADE')
with self.assertRaisesRegexp(Exception, '''Value of property UNUSED must not be null or empty.'''):
self.query('''CREATE VIRTUAL SCHEMA VS1 USING ADAPTER.FAST_ADAPTER WITH UNUSED = 'default1' UNUSED = null ''')
#@unittest.skip("skipped test")
class UnicodeAndCaseSensitivityTest(VSchemaTest):
def setUp(self):
self.createJdbcAdapter(schemaName="ADAPTER", adapterName="JDBC_ADAPTER")
self.query('DROP SCHEMA IF EXISTS NATIVE CASCADE')
self.query('CREATE SCHEMA NATIVE')
# 茶 is 3-byte and ¥ is 2 byte in utf-8.
self.query(u'''
CREATE OR REPLACE TABLE "¥tAbLe"("a茶A" double, "b¥B" varchar(3)) ''')
self.query(u'''
INSERT INTO "¥tAbLe" VALUES
(1.1, 'v茶V'),
(2.2, 'v¥V') ''')
self.commit()
self.query('DROP VIRTUAL SCHEMA IF EXISTS VS1 CASCADE')
self.createVirtualSchemaJdbc("VS1", "NATIVE", "ADAPTER.JDBC_ADAPTER", True) # TODO Make this IS_LOCAL = False as soon as the udf_wrapper.tcs file supports etlJdbcConfigDir
self.commit()
def test(self):
# Create with IS_LOCAL true, because the EXASolution jdbc driver returns too big integer types which are converted
rows = self.queryUnicode(u'''
SELECT "a茶A", "b¥B" FROM VS1."¥tAbLe" ORDER BY 1 ''')
# Bug: Column names are requested from pyodbc as UTF-16 or so (according to MT), but 茶 doesn't fit into two bytes.
#self.assertEqual([u"a茶A", u"b¥B"], self.columnNames())
self.assertRowsEqual([(1.1, u'v茶V'), (2.2, u'v¥V')],rows)
# Join system tables to make sure that everything is consistent
rows = self.decodeUtf8Fields(self.queryColumnMetadata('VS1'))
self.assertEqual([u"¥tAbLe", u"¥tAbLe"], self.getColumnByName(rows, 'TABLE_NAME'))
self.assertEqual([u"a茶A", u"b¥B"], self.getColumnByName(rows, 'COLUMN_NAME'))
# Test special characters in properties
with self.assertRaisesRegexp(Exception, 'Quoted property names and special characters in the property name are not allowed'):
self.query(u'''
ALTER VIRTUAL SCHEMA VS1 SET "Foo茶¥Bar" = 'v茶V¥v'
''')
with self.assertRaisesRegexp(Exception, '''character is not allowed within property names'''):
self.query(u'''
ALTER VIRTUAL SCHEMA VS1 SET A.B = 'v茶V¥v'
''')
self.query(u'''
ALTER VIRTUAL SCHEMA VS1 SET UNUSED = 'v茶V¥v'
''')
propValue = self.queryScalarUnicode(u'''
SELECT PROPERTY_VALUE FROM EXA_DBA_VIRTUAL_SCHEMA_PROPERTIES WHERE SCHEMA_NAME = 'VS1' AND PROPERTY_NAME = 'UNUSED'
''')
self.assertEqual(u'v茶V¥v', propValue)
class RefreshTest(VSchemaTest):
def setUp(self):
# Create a simple native schema with tables
self.query('DROP SCHEMA IF EXISTS NATIVE CASCADE')
self.query('CREATE SCHEMA NATIVE')
self.query('CREATE TABLE T1(a int, b varchar(100))')
self.query('CREATE TABLE T2(c date)')
self.query('CREATE TABLE T3(d double)')
self.commit() # commit, otherwise adapter doesn't see tables
self.createJdbcAdapter(schemaName="ADAPTER", adapterName="JDBC_ADAPTER")
self.createVirtualSchemaJdbc("VS1", "NATIVE", "ADAPTER.JDBC_ADAPTER", True)
self.commit()
def testRefreshAll(self):
# Refresh all (new/changed/deleted tables). Metadata for all tables are rewritten.
self.query('DROP TABLE NATIVE.T3')
self.query('ALTER TABLE NATIVE.T2 ADD COLUMN d int')
self.query('CREATE TABLE NATIVE.T4(a int)')
self.commit()
timeBefore = self.queryCurrentTimestamp()
self.query('''
ALTER VIRTUAL SCHEMA VS1 REFRESH ''')
self.commit()
timeAfter = self.queryCurrentTimestamp()
# Check last_refreshed
self.assertBetween(self.getLastTableRefresh ('VS1', 'T1'), timeBefore, timeAfter)
self.assertBetween(self.getLastTableRefresh ('VS1', 'T2'), timeBefore, timeAfter)
# check metadata
rows = self.queryColumnMetadata('VS1')
self.assertRowsEqual([('T1', 'A', 'DECIMAL(18,0)'), ('T1', 'B', 'VARCHAR(100) UTF8'), ('T2', 'C', 'DATE'), ('T2', 'D', 'DECIMAL(18,0)'), ('T4', 'A', 'DECIMAL(18,0)')], rows)
def testRefreshTables(self):
# Remember date from T1 for later
table1Before = self.getLastTableRefresh('VS1', 'T1')
# Refresh specific Tables
self.query('ALTER TABLE NATIVE.T1 ADD COLUMN e int') # this should be ignored during the refresh
self.query('DROP TABLE NATIVE.T2')
self.query('ALTER TABLE NATIVE.T3 ADD COLUMN f int')
self.query('CREATE TABLE NATIVE.T4(e double)')
# Now we have T1, T3, T4
self.commit()
timeBefore = self.queryCurrentTimestamp()
self.query('''
ALTER VIRTUAL SCHEMA VS1 REFRESH TABLES T2 T3 T4 ''')
self.commit()
timeAfter = self.queryCurrentTimestamp()
# check last refresh
self.assertBetween(self.getLastSchemaRefresh('VS1'), timeBefore, timeAfter)
self.assertEqual (self.getLastTableRefresh ('VS1', 'T1'), table1Before) # t1 should be unchanged
self.assertBetween(self.getLastTableRefresh ('VS1', 'T3'), timeBefore, timeAfter)
self.assertBetween(self.getLastTableRefresh ('VS1', 'T4'), timeBefore, timeAfter)
# check metadata
rows = self.queryColumnMetadata('VS1')
self.assertRowsEqual([('T1', 'A', 'DECIMAL(18,0)'), ('T1', 'B', 'VARCHAR(100) UTF8'), ('T3', 'D', 'DOUBLE'), ('T3', 'F', 'DECIMAL(18,0)'), ('T4', 'E', 'DOUBLE')], rows)
class SetPropertiesTest(VSchemaTest):
def testCreateProperty(self):
self.createFastAdapter(schemaName="ADAPTER", adapterName="FAST_ADAPTER")
self.query('DROP VIRTUAL SCHEMA IF EXISTS VS1 CASCADE')
self.query('CREATE VIRTUAL SCHEMA VS1 USING ADAPTER.FAST_ADAPTER')
self.query(u'''
ALTER VIRTUAL SCHEMA VS1 SET UNUSED = 'default'
''')
rows = self.query('''
SELECT PROPERTY_NAME, PROPERTY_VALUE FROM EXA_DBA_VIRTUAL_SCHEMA_PROPERTIES WHERE SCHEMA_NAME = 'VS1'
''')
self.assertRowsEqual([('UNUSED', 'default')],rows)
def testChangeProperty(self):
self.createFastAdapter(schemaName="ADAPTER", adapterName="FAST_ADAPTER")
self.query('DROP VIRTUAL SCHEMA IF EXISTS VS1 CASCADE')
self.query('CREATE VIRTUAL SCHEMA VS1 USING ADAPTER.FAST_ADAPTER')
self.query('''
ALTER VIRTUAL SCHEMA VS1 SET UNUSED = 'default'
''')
rows = self.query('''
SELECT PROPERTY_NAME, PROPERTY_VALUE FROM EXA_DBA_VIRTUAL_SCHEMA_PROPERTIES WHERE SCHEMA_NAME = 'VS1'
''')
self.assertRowsEqual([('UNUSED', 'default')],rows)
self.query('''
ALTER VIRTUAL SCHEMA VS1 SET UNUSED = 'newValue'
''')
rows = self.query('''
SELECT PROPERTY_NAME, PROPERTY_VALUE FROM EXA_DBA_VIRTUAL_SCHEMA_PROPERTIES WHERE SCHEMA_NAME = 'VS1'
''')
self.assertRowsEqual([('UNUSED', 'newValue')],rows)
def testChangePropertyFromCreate(self):
self.createFastAdapter(schemaName="ADAPTER", adapterName="FAST_ADAPTER")
self.query('DROP VIRTUAL SCHEMA IF EXISTS VS1 CASCADE')
self.query('''CREATE VIRTUAL SCHEMA VS1 USING ADAPTER.FAST_ADAPTER WITH UNUSED = 'default' ''')
rows = self.query('''
SELECT PROPERTY_NAME, PROPERTY_VALUE FROM EXA_DBA_VIRTUAL_SCHEMA_PROPERTIES WHERE SCHEMA_NAME = 'VS1'
''')
self.assertRowsEqual([('UNUSED', 'default')],rows)
self.query('''
ALTER VIRTUAL SCHEMA VS1 SET UNUSED = 'newValue'
''')
rows = self.query('''
SELECT PROPERTY_NAME, PROPERTY_VALUE FROM EXA_DBA_VIRTUAL_SCHEMA_PROPERTIES WHERE SCHEMA_NAME = 'VS1'
''')
self.assertRowsEqual([('UNUSED', 'newValue')],rows)
def testDeleteNonExistingPropertyWithEmptyString(self):
self.createFastAdapter(schemaName="ADAPTER", adapterName="FAST_ADAPTER")
self.query('DROP VIRTUAL SCHEMA IF EXISTS VS1 CASCADE')
self.query('CREATE VIRTUAL SCHEMA VS1 USING ADAPTER.FAST_ADAPTER')
self.query('''
ALTER VIRTUAL SCHEMA VS1 SET UNUSED = ''
''')
self.query('''
SELECT PROPERTY_NAME, PROPERTY_VALUE FROM EXA_DBA_VIRTUAL_SCHEMA_PROPERTIES WHERE SCHEMA_NAME = 'VS1'
''')
self.assertEqual(0, self.rowcount())
def testDeleteNonExistingPropertyWithNull(self):
self.createFastAdapter(schemaName="ADAPTER", adapterName="FAST_ADAPTER")
self.query('DROP VIRTUAL SCHEMA IF EXISTS VS1 CASCADE')
self.query('CREATE VIRTUAL SCHEMA VS1 USING ADAPTER.FAST_ADAPTER')
self.query('''
ALTER VIRTUAL SCHEMA VS1 SET UNUSED = NULL
''')
self.query('''
SELECT PROPERTY_NAME, PROPERTY_VALUE FROM EXA_DBA_VIRTUAL_SCHEMA_PROPERTIES WHERE SCHEMA_NAME = 'VS1'
''')
self.assertEqual(0, self.rowcount())
def testDeletePropertyWithEmptyString(self):
self.createFastAdapter(schemaName="ADAPTER", adapterName="FAST_ADAPTER")
self.query('DROP VIRTUAL SCHEMA IF EXISTS VS1 CASCADE')
self.query('CREATE VIRTUAL SCHEMA VS1 USING ADAPTER.FAST_ADAPTER')
self.query('''
ALTER VIRTUAL SCHEMA VS1 SET UNUSED = 'default'
''')
rows = self.query('''
SELECT PROPERTY_NAME, PROPERTY_VALUE FROM EXA_DBA_VIRTUAL_SCHEMA_PROPERTIES WHERE SCHEMA_NAME = 'VS1'
''')
self.assertRowsEqual([('UNUSED', 'default')],rows)
self.query('''
ALTER VIRTUAL SCHEMA VS1 SET UNUSED = ''
''')
self.query('''
SELECT PROPERTY_NAME, PROPERTY_VALUE FROM EXA_DBA_VIRTUAL_SCHEMA_PROPERTIES WHERE SCHEMA_NAME = 'VS1'
''')
self.assertEqual(0, self.rowcount())
def testDeletePropertyWithNull(self):
self.createFastAdapter(schemaName="ADAPTER", adapterName="FAST_ADAPTER")
self.query('DROP VIRTUAL SCHEMA IF EXISTS VS1 CASCADE')
self.query('CREATE VIRTUAL SCHEMA VS1 USING ADAPTER.FAST_ADAPTER')
self.query('''
ALTER VIRTUAL SCHEMA VS1 SET UNUSED = 'default'
''')
rows = self.query('''
SELECT PROPERTY_NAME, PROPERTY_VALUE FROM EXA_DBA_VIRTUAL_SCHEMA_PROPERTIES WHERE SCHEMA_NAME = 'VS1'
''')
self.assertRowsEqual([('UNUSED', 'default')],rows)
self.query('''
ALTER VIRTUAL SCHEMA VS1 SET UNUSED = NULL
''')
self.query('''
SELECT PROPERTY_NAME, PROPERTY_VALUE FROM EXA_DBA_VIRTUAL_SCHEMA_PROPERTIES WHERE SCHEMA_NAME = 'VS1'
''')
self.assertEqual(0, self.rowcount())
def testDeletePropertyFromCreateWithEmptyString(self):
self.createFastAdapter(schemaName="ADAPTER", adapterName="FAST_ADAPTER")
self.query('DROP VIRTUAL SCHEMA IF EXISTS VS1 CASCADE')
self.query('''CREATE VIRTUAL SCHEMA VS1 USING ADAPTER.FAST_ADAPTER WITH UNUSED = 'default' ''')
rows = self.query('''
SELECT PROPERTY_NAME, PROPERTY_VALUE FROM EXA_DBA_VIRTUAL_SCHEMA_PROPERTIES WHERE SCHEMA_NAME = 'VS1'
''')
self.assertRowsEqual([('UNUSED', 'default')],rows)
self.query('''
ALTER VIRTUAL SCHEMA VS1 SET UNUSED = ''
''')
self.query('''
SELECT PROPERTY_NAME, PROPERTY_VALUE FROM EXA_DBA_VIRTUAL_SCHEMA_PROPERTIES WHERE SCHEMA_NAME = 'VS1'
''')
self.assertEqual(0, self.rowcount())
def testDeletePropertyFromCreateWithNull(self):
self.createFastAdapter(schemaName="ADAPTER", adapterName="FAST_ADAPTER")
self.query('DROP VIRTUAL SCHEMA IF EXISTS VS1 CASCADE')
self.query('''CREATE VIRTUAL SCHEMA VS1 USING ADAPTER.FAST_ADAPTER WITH UNUSED = 'default' ''')
rows = self.query('''
SELECT PROPERTY_NAME, PROPERTY_VALUE FROM EXA_DBA_VIRTUAL_SCHEMA_PROPERTIES WHERE SCHEMA_NAME = 'VS1'
''')
self.assertRowsEqual([('UNUSED', 'default')],rows)
self.query('''
ALTER VIRTUAL SCHEMA VS1 SET UNUSED = NULL
''')
self.query('''
SELECT PROPERTY_NAME, PROPERTY_VALUE FROM EXA_DBA_VIRTUAL_SCHEMA_PROPERTIES WHERE SCHEMA_NAME = 'VS1'
''')
self.assertEqual(0, self.rowcount())
def testDeletePropertyTwice(self):
self.createFastAdapter(schemaName="ADAPTER", adapterName="FAST_ADAPTER")
self.query('DROP VIRTUAL SCHEMA IF EXISTS VS1 CASCADE')
self.query('CREATE VIRTUAL SCHEMA VS1 USING ADAPTER.FAST_ADAPTER')
self.query('''
ALTER VIRTUAL SCHEMA VS1 SET UNUSED = 'default'
''')
rows = self.query('''
SELECT PROPERTY_NAME, PROPERTY_VALUE FROM EXA_DBA_VIRTUAL_SCHEMA_PROPERTIES WHERE SCHEMA_NAME = 'VS1'
''')
self.assertRowsEqual([('UNUSED', 'default')],rows)
self.query('''
ALTER VIRTUAL SCHEMA VS1 SET UNUSED = NULL
''')
self.query('''
SELECT PROPERTY_NAME, PROPERTY_VALUE FROM EXA_DBA_VIRTUAL_SCHEMA_PROPERTIES WHERE SCHEMA_NAME = 'VS1'
''')
self.assertEqual(0, self.rowcount())
self.query('''
ALTER VIRTUAL SCHEMA VS1 SET UNUSED = NULL
''')
self.query('''
SELECT PROPERTY_NAME, PROPERTY_VALUE FROM EXA_DBA_VIRTUAL_SCHEMA_PROPERTIES WHERE SCHEMA_NAME = 'VS1'
''')
self.assertEqual(0, self.rowcount())
def testDeleteOnlyOneProperty(self):
self.createFastAdapter(schemaName="ADAPTER", adapterName="FAST_ADAPTER")
self.query('DROP VIRTUAL SCHEMA IF EXISTS VS1 CASCADE')
self.query('CREATE VIRTUAL SCHEMA VS1 USING ADAPTER.FAST_ADAPTER')
self.query('''
ALTER VIRTUAL SCHEMA VS1 SET UNUSED = 'default' UNUSED2 = 'default2'
''')
rows = self.query('''
SELECT PROPERTY_NAME, PROPERTY_VALUE FROM EXA_DBA_VIRTUAL_SCHEMA_PROPERTIES WHERE SCHEMA_NAME = 'VS1' ORDER BY PROPERTY_NAME
''')
self.assertRowsEqual([('UNUSED', 'default'), ('UNUSED2', 'default2')],rows)
self.query('''
ALTER VIRTUAL SCHEMA VS1 SET UNUSED2 = 'Not deleted' UNUSED = NULL
''')
rows = self.query('''
SELECT PROPERTY_NAME, PROPERTY_VALUE FROM EXA_DBA_VIRTUAL_SCHEMA_PROPERTIES WHERE SCHEMA_NAME = 'VS1'
''')
self.assertRowsEqual([('UNUSED2', 'Not deleted')],rows)
def testDuplicatePropertyName(self):
self.createFastAdapter(schemaName="ADAPTER", adapterName="FAST_ADAPTER")
self.query('DROP VIRTUAL SCHEMA IF EXISTS VS1 CASCADE')
self.query('CREATE VIRTUAL SCHEMA VS1 USING ADAPTER.FAST_ADAPTER')
with self.assertRaisesRegexp(Exception, 'Duplicate property names \\(UNUSED\\) are not allowed.'):
self.query('''
ALTER VIRTUAL SCHEMA VS1 SET UNUSED = 'default' UNUSED = 'default2'
''')
with self.assertRaisesRegexp(Exception, 'Duplicate property names \\(UNUSED\\) are not allowed.'):
self.query('''
ALTER VIRTUAL SCHEMA VS1 SET UNUSED = null UNUSED = 'default2'
''')
with self.assertRaisesRegexp(Exception, 'Duplicate property names \\(UNUSED\\) are not allowed.'):
self.query('''
ALTER VIRTUAL SCHEMA VS1 SET UNUSED = 'default' UNUSED = null
''')
with self.assertRaisesRegexp(Exception, 'Duplicate property names \\(UNUSED\\) are not allowed.'):
self.query('''
ALTER VIRTUAL SCHEMA VS1 SET UNUSED = null UNUSED = null
''')
def testOldPropertiesInSchemaMetadataInfo(self):
self.createTestPropertyAdapter(schemaName="ADAPTER", adapterName="TEST_PROPERTY_ADAPTER")
self.query('DROP VIRTUAL SCHEMA IF EXISTS VS1 CASCADE')
self.query('CREATE VIRTUAL SCHEMA VS1 USING ADAPTER.TEST_PROPERTY_ADAPTER')
self.query('''
ALTER VIRTUAL SCHEMA VS1 SET P1='1' P2='2'
''')
self.query('''
ALTER VIRTUAL SCHEMA VS1 SET P1='1new' P2=null P3='3'
''')
def testInvalidPropertiesInSchemaMetadataInfo(self):
# Invalid properties => Add test with custom adapter for invalid properties displaying correct error message
self.createTestPropertyAdapter(schemaName="ADAPTER", adapterName="TEST_PROPERTY_ADAPTER")
self.query('DROP VIRTUAL SCHEMA IF EXISTS VS1 CASCADE')
self.query('CREATE VIRTUAL SCHEMA VS1 USING ADAPTER.TEST_PROPERTY_ADAPTER')
self.query('''
ALTER VIRTUAL SCHEMA VS1 SET P2='2'
''')
with self.assertRaisesRegexp(Exception, 'Expected different values for old properties'):
self.query('''
ALTER VIRTUAL SCHEMA VS1 SET P1='1'
''')
with self.assertRaisesRegexp(Exception, 'Expected different values for old properties'):
self.query('''
ALTER VIRTUAL SCHEMA VS1 SET P1='42' P2='2'
''')
self.query('DROP VIRTUAL SCHEMA IF EXISTS VS1 CASCADE')
self.query('CREATE VIRTUAL SCHEMA VS1 USING ADAPTER.TEST_PROPERTY_ADAPTER')
self.query('''
ALTER VIRTUAL SCHEMA VS1 SET P1='1' P2='2'
''')
with self.assertRaisesRegexp(Exception, 'Expected different values for new properties'):
self.query('''
ALTER VIRTUAL SCHEMA VS1 SET P1=null P2=null P3='4'
''')
def createTestPropertyAdapter(self, schemaName="ADAPTER", adapterName="FAST_ADAPTER"):
self.dropOldAdapter(schemaName, adapterName)
self.query('CREATE SCHEMA {schema}'.format(schema=schemaName))
self.query(udf.fixindent('''
CREATE OR REPLACE PYTHON ADAPTER SCRIPT {schema}.{adapter} AS
import json
import string
def adapter_call(request):
# database expects utf-8 encoded string of type str. unicode not yet supported
root = json.loads(request)
if root["type"] == "createVirtualSchema":
res = {{
"type": "createVirtualSchema",
"schemaMetadata": {{
"tables": [
{{
"name": "DUMMY",
"columns": [{{
"name": "KEY",
"dataType": {{"type": "VARCHAR", "size": 2000000}}
}}]
}}]
}}
}}
return json.dumps(res).encode('utf-8')
elif root["type"] == "dropVirtualSchema":
return json.dumps({{"type": "dropVirtualSchema"}}).encode('utf-8')
elif root["type"] == "setProperties":
expectedOldProperties = {{'P1': '1', 'P2': '2'}}
expectedNewProperties = {{'P1': '1new', 'P2': None,'P3': '3'}}
if (root["schemaMetadataInfo"].get("properties", None) != None and len(root["schemaMetadataInfo"]["properties"]) > 0):
assert (len(root["schemaMetadataInfo"]["properties"]) == len(expectedOldProperties)), 'Expected different values for old properties. Expected: ' + str(expectedOldProperties) + ' Actual: ' + str(root["schemaMetadataInfo"]["properties"])
for propertyName, propertyValue in root["schemaMetadataInfo"]["properties"].iteritems():
assert (propertyName in expectedOldProperties), 'Expected different values for old properties. Expected: ' + str(expectedOldProperties) + ' actual: ' + str(root["schemaMetadataInfo"]["properties"])
assert (propertyValue == expectedOldProperties.get(propertyName, None)), 'Expected different values for old properties. Expected: ' + str(expectedOldProperties) + ' Actual: ' + str(root["schemaMetadataInfo"]["properties"])
assert (len(root["properties"]) == len(expectedNewProperties)), 'Expected different values for new properties. Expected: ' + str(expectedNewProperties) + ' Actual: ' + str(root["properties"])
for propertyName, propertyValue in root["properties"].iteritems():
assert (propertyName in expectedNewProperties), 'Expected different values for new properties. Expected: ' + str(expectedNewProperties) + ' Actual: ' + str(root["properties"])
assert (propertyValue == expectedNewProperties.get(propertyName, None)), 'Expected different values for new properties. Expected: ' + str(expectedNewProperties) + ' Actual: ' + str(root["properties"])
return json.dumps({{"type": "setProperties"}}).encode('utf-8')
else:
raise ValueError('Unsupported callback')
/
''').format(schema = schemaName, adapter = adapterName))
class SetPropertiesRefreshTest(VSchemaTest):
def setUp(self):
# Create a simple native schema with tables
self.createNative()
self.commit() # commit, otherwise adapter doesn't see tables
self.createJdbcAdapter(schemaName="ADAPTER", adapterName="JDBC_ADAPTER")
self.createVirtualSchemaJdbc("VS1", "NATIVE", "ADAPTER.JDBC_ADAPTER", True)
self.commit()
def testWithRefresh(self):
self.query('DROP TABLE NATIVE.T_DATETIME')
self.query('ALTER TABLE NATIVE.T ADD COLUMN d int')
self.query('CREATE TABLE NATIVE.T_NEW(a int)')
self.query('DROP SCHEMA IF EXISTS NATIVE_RENAMED CASCADE')
self.query('RENAME SCHEMA NATIVE TO NATIVE_RENAMED')
self.commit()
timeBefore = self.queryCurrentTimestamp()
self.query('''
ALTER VIRTUAL SCHEMA VS1 SET SCHEMA_NAME='{remoteSchema}' CONNECTION_STRING='jdbc:exa:{host_port};schema={remoteSchema}'
'''.format(host_port = 'localhost:8888',remoteSchema='NATIVE_RENAMED'))
self.commit() # without this commit, the refresh time does not get updated
timeAfter = self.queryCurrentTimestamp()
rows = self.queryColumnMetadata('VS1')
self.assertRowsEqual(
[('G', 'K', 'DECIMAL(18,0)'), ('G', 'V1', 'DECIMAL(18,0)'), ('G', 'V2', 'VARCHAR(100) UTF8'),
('NUMBERS1', 'A', 'DECIMAL(18,0)'), ('NUMBERS1', 'B', 'DECIMAL(18,0)'), ('NUMBERS1', 'C', 'DECIMAL(18,0)'), ('NUMBERS1', 'D', 'DECIMAL(18,0)'),
('NUMBERS2', 'E', 'DECIMAL(18,0)'), ('NUMBERS2', 'F', 'DECIMAL(18,0)'), ('NUMBERS2', 'G', 'DECIMAL(18,0)'), ('NUMBERS2', 'H', 'DECIMAL(18,0)'),
('T', 'A', 'DECIMAL(18,0)'), ('T', 'B', 'VARCHAR(100) UTF8'), ('T', 'C', 'DOUBLE'), ('T', 'D', 'DECIMAL(18,0)'),
('TEST', 'A', 'TIMESTAMP WITH LOCAL TIME ZONE'),
('T_CONNECT', 'PARENT', 'DECIMAL(18,0)'),
('T_CONNECT', 'VAL', 'DECIMAL(18,0)'),
('T_DATATYPES', 'A1', 'DECIMAL(18,0)'),
('T_DATATYPES', 'A10', 'GEOMETRY(3857)'),
('T_DATATYPES', 'A11', 'DECIMAL(10,5)'),
('T_DATATYPES', 'A12', 'DOUBLE'),
('T_DATATYPES', 'A13', 'DECIMAL(36,0)'),
('T_DATATYPES', 'A14', 'DECIMAL(18,0)'),
('T_DATATYPES', 'A15', 'DECIMAL(29,0)'),
('T_DATATYPES', 'A16', 'DECIMAL(18,0)'),
('T_DATATYPES', 'A17', 'DECIMAL(25,0)'),
('T_DATATYPES', 'A18', 'DECIMAL(27,9)'),
('T_DATATYPES', 'A19', 'DOUBLE'),
('T_DATATYPES', 'A2', 'DOUBLE'),
('T_DATATYPES', 'A20', 'DECIMAL(18,0)'),
('T_DATATYPES', 'A21', 'DOUBLE'),
('T_DATATYPES', 'A22', 'DECIMAL(1,0)'),
('T_DATATYPES', 'A23', 'DECIMAL(3,2)'),
('T_DATATYPES', 'A24', 'DECIMAL(18,0)'),
('T_DATATYPES', 'A25', 'DECIMAL(6,0)'),
('T_DATATYPES', 'A26', 'DECIMAL(6,3)'),
('T_DATATYPES', 'A27', 'DOUBLE'),
('T_DATATYPES', 'A28', 'DECIMAL(9,0)'),
('T_DATATYPES', 'A29', 'DECIMAL(9,0)'),
('T_DATATYPES', 'A3', 'DATE'),
('T_DATATYPES', 'A30', 'DECIMAL(3,0)'),
('T_DATATYPES', 'A31', 'DATE'),
('T_DATATYPES', 'A32', 'TIMESTAMP WITH LOCAL TIME ZONE'),
('T_DATATYPES', 'A4', 'TIMESTAMP'),
('T_DATATYPES', 'A5', 'VARCHAR(3000) UTF8'),
('T_DATATYPES', 'A6', 'CHAR(10) UTF8'),
('T_DATATYPES', 'A7', 'BOOLEAN'),
('T_DATATYPES', 'A8', 'INTERVAL DAY(2) TO SECOND(3)'),
('T_DATATYPES', 'A9', 'INTERVAL YEAR(2) TO MONTH'),
('T_GEOMETRY', 'A', 'GEOMETRY(3857)'),
('T_GEOMETRY', 'ID', 'DECIMAL(18,0)'),
('T_INTERVAL', 'A', 'INTERVAL YEAR(2) TO MONTH'),
('T_INTERVAL', 'B', 'INTERVAL DAY(2) TO SECOND(3)'),
('T_NEW', 'A', 'DECIMAL(18,0)'),
('T_NULLS', 'A', 'DECIMAL(18,0)'),
('T_NULLS', 'B', 'VARCHAR(100) UTF8')], rows)
# Check refresh time
self.assertBetween(self.getLastSchemaRefresh('VS1'), timeBefore, timeAfter)
self.assertBetween(self.getLastTableRefresh ('VS1', 'T'), timeBefore, timeAfter)
self.assertBetween(self.getLastTableRefresh ('VS1', 'G'), timeBefore, timeAfter)
self.assertBetween(self.getLastTableRefresh ('VS1', 'T_NEW'), timeBefore, timeAfter)
def testWithoutRefresh(self):
schemaRefreshBefore = self.getLastSchemaRefresh('VS1')
tRefreshBefore = self.getLastTableRefresh ('VS1', 'T')
gRefreshBefore = self.getLastTableRefresh ('VS1', 'G')
tNewRefreshBefore = self.getLastTableRefresh ('VS1', 'T_DATETIME')
metaBefore = self.queryColumnMetadata('VS1')
# Change the source schema
self.query('DROP TABLE NATIVE.T_DATETIME')
self.query('ALTER TABLE NATIVE.T ADD COLUMN d int')
self.query('CREATE TABLE NATIVE.T_NEW(a int)')
# Setting this property should not refresh tables
timeBefore = self.queryCurrentTimestamp()
self.query('''
ALTER VIRTUAL SCHEMA VS1 SET IS_LOCAL='false'
'''.format(host_port = 'localhost:8888',remoteSchema='NATIVE_RENAMED'))
self.commit() # without this commit, the refresh time does not get updated
timeAfter = self.queryCurrentTimestamp()
self.assertBetween(self.getLastSchemaRefresh('VS1'), timeBefore, timeAfter)
self.assertEqual (self.getLastTableRefresh ('VS1', 'T'), tRefreshBefore)
self.assertEqual (self.getLastTableRefresh ('VS1', 'G'), gRefreshBefore)
self.assertEqual (self.getLastTableRefresh ('VS1', 'T_DATETIME'), tNewRefreshBefore)
metaAfter = self.queryColumnMetadata('VS1')
self.assertRowsEqual(metaBefore, metaAfter)
class DropVSchemaTest(VSchemaTest):
def setUp(self):
self.createJdbcAdapter(schemaName="ADAPTER", adapterName="JDBC_ADAPTER")
def testDropVSchema(self):
self.createNative()
self.commit() # commit, otherwise adapter doesn't see tables
self.createVirtualSchemaJdbc("VS1", "NATIVE", "ADAPTER.JDBC_ADAPTER", True)
self.commit()
self.query('DROP VIRTUAL SCHEMA VS1 CASCADE')
rows = self.query("SELECT * FROM EXA_SCHEMAS WHERE SCHEMA_NAME = 'VS1' ")
self.assertEqual(0, self.rowcount())
def testDropEmptyVSchema(self):
self.query('DROP SCHEMA IF EXISTS NATIVE CASCADE')
self.query('CREATE SCHEMA NATIVE')
self.commit()
self.createVirtualSchemaJdbc("VS1", "NATIVE", "ADAPTER.JDBC_ADAPTER", True)
self.commit()
self.query('DROP VIRTUAL SCHEMA VS1')
rows = self.query("SELECT * FROM EXA_SCHEMAS WHERE SCHEMA_NAME = 'VS1' ")
self.assertEqual(0, self.rowcount())
def testDropVSchemaInvalidAdapterScript(self):
self.createNative()
self.commit()
self.createVirtualSchemaJdbc("VS1", "NATIVE", "ADAPTER.JDBC_ADAPTER", True)
self.commit()
self.query(udf.fixindent('''
CREATE OR REPLACE PYTHON ADAPTER SCRIPT ADAPTER.JDBC_ADAPTER AS
/
'''))
with self.assertRaisesRegexp(Exception, 'SyntaxError: invalid syntax \\(JDBC_ADAPTER, line 1\\)'):
self.query('DROP VIRTUAL SCHEMA VS1 CASCADE')
rows = self.query("SELECT * FROM EXA_SCHEMAS WHERE SCHEMA_NAME = 'VS1' ")
self.assertEqual(1, self.rowcount())
self.query('DROP FORCE VIRTUAL SCHEMA VS1 CASCADE')
rows = self.query("SELECT * FROM EXA_SCHEMAS WHERE SCHEMA_NAME = 'VS1' ")
self.assertEqual(0, self.rowcount())
def testDropVSchemaInvalidJson(self):
self.createNative()
self.commit()
self.createVirtualSchemaJdbc("VS1", "NATIVE", "ADAPTER.JDBC_ADAPTER", True)
self.commit()
self.query(udf.fixindent('''
CREATE OR REPLACE PYTHON ADAPTER SCRIPT ADAPTER.JDBC_ADAPTER AS
import json
def adapter_call(request):
# missing brackets
return """ "type": "dropVirtualSchema"} """
/
'''))
with self.assertRaisesRegexp(Exception, 'Unknown exception while parsing the response: in Json::Value::find\\(key, end, found\\): requires objectValue or nullValue'):
self.query('DROP VIRTUAL SCHEMA VS1 CASCADE')
rows = self.query("SELECT * FROM EXA_SCHEMAS WHERE SCHEMA_NAME = 'VS1' ")
self.assertEqual(1, self.rowcount())
self.query('DROP FORCE VIRTUAL SCHEMA VS1 CASCADE')
rows = self.query("SELECT * FROM EXA_SCHEMAS WHERE SCHEMA_NAME = 'VS1' ")
self.assertEqual(0, self.rowcount())
def testDropVSchemaMissingCascade(self):
self.createNative()
self.commit() # commit, otherwise adapter doesn't see tables
self.createVirtualSchemaJdbc("VS1", "NATIVE", "ADAPTER.JDBC_ADAPTER", True)
self.commit()
with self.assertRaisesRegexp(Exception, 'schema is not empty - use DROP VIRTUAL SCHEMA VS1 CASCADE to delete it'):
self.query('DROP VIRTUAL SCHEMA VS1')
def testDropSchemaMissingVirtual(self):
self.createNative()
self.commit() # commit, otherwise adapter doesn't see tables
self.createVirtualSchemaJdbc("VS1", "NATIVE", "ADAPTER.JDBC_ADAPTER", True)
self.commit()
with self.assertRaisesRegexp(Exception, 'schema VS1 is a virtual schema. Please use DROP VIRTUAL SCHEMA instead'):
self.query('DROP SCHEMA VS1 CASCADE')
def testDropForceVirtualSchema(self):
self.createFastAdapter(schemaName="ADAPTER", adapterName="FAST_ADAPTER")
self.query('DROP VIRTUAL SCHEMA IF EXISTS VS1 CASCADE')
self.query('CREATE VIRTUAL SCHEMA VS1 USING ADAPTER.FAST_ADAPTER')
self.query(udf.fixindent('''
CREATE OR REPLACE PYTHON ADAPTER SCRIPT ADAPTER.FAST_ADAPTER AS
import json
import string
def adapter_call(request):
raise ValueError('This should never be called')
/
'''))
self.query('DROP FORCE VIRTUAL SCHEMA VS1 CASCADE')
rows = self.query("SELECT * FROM EXA_SCHEMAS WHERE SCHEMA_NAME = 'VS1' ")
self.assertEqual(0, self.rowcount())
class UnsupportedActions(VSchemaTest):
setupDone = False
def setUp(self):
# TODO Remove this workaround
if self.__class__.setupDone:
self.query(''' OPEN SCHEMA VS1 ''')
return
# Create a simple native schema with tables
self.createNative()
self.commit() # commit, otherwise adapter doesn't see tables
self.createJdbcAdapter(schemaName="ADAPTER", adapterName="JDBC_ADAPTER")
self.createVirtualSchemaJdbc("VS1", "NATIVE", "ADAPTER.JDBC_ADAPTER", True)
self.commit()
self.__class__.setupDone = True
def testDDLCreateTable(self):
with self.assertRaisesRegexp(Exception, 'Creating tables in virtual schemas is not allowed'):
self.query('CREATE TABLE t_new (a int)')
with self.assertRaisesRegexp(Exception, 'Creating tables in virtual schemas is not allowed'):
self.query('CREATE TABLE t_new AS SELECT * FROM NATIVE.T')
# SELECT INTO FROM is similar to CREATE TABLE AS
with self.assertRaisesRegexp(Exception, 'Creating tables in virtual schemas is not allowed'):
self.query('SELECT * INTO TABLE t_new FROM NATIVE.T;')
with self.assertRaisesRegexp(Exception, 'Creating tables in virtual schemas is not allowed'):
self.query('CREATE TABLE t_new LIKE NATIVE.T')
def testDDLDropTable(self):
with self.assertRaisesRegexp(Exception, 'Dropping virtual tables is not allowed'):
self.query('DROP TABLE t')
def testDDLAlterTableColumn(self):
alterError = 'Altering virtual tables is not allowed'
with self.assertRaisesRegexp(Exception, alterError):
self.query('ALTER TABLE t ADD COLUMN new_col int')
with self.assertRaisesRegexp(Exception, alterError):
self.query('ALTER TABLE t DROP COLUMN a')
with self.assertRaisesRegexp(Exception, alterError):
self.query('ALTER TABLE t MODIFY COLUMN a double')
with self.assertRaisesRegexp(Exception, alterError):
self.query('ALTER TABLE t RENAME COLUMN a TO x')
with self.assertRaisesRegexp(Exception, alterError):
self.query('ALTER TABLE t ALTER COLUMN a SET DEFAULT 1')
with self.assertRaisesRegexp(Exception, alterError):
self.query('ALTER TABLE t ALTER COLUMN a DROP DEFAULT')
with self.assertRaisesRegexp(Exception, alterError):
self.query('ALTER TABLE t ALTER COLUMN a SET IDENTITY')
with self.assertRaisesRegexp(Exception, alterError):
self.query('ALTER TABLE t ALTER COLUMN a DROP IDENTITY')
def testDDLAlterTableDistribution(self):
alterError = 'Altering virtual tables is not allowed'
with self.assertRaisesRegexp(Exception, alterError):
self.query('ALTER TABLE t DISTRIBUTE BY a')
with self.assertRaisesRegexp(Exception, alterError):
self.query('ALTER TABLE t DROP DISTRIBUTION KEYS')
def testDDLAlterTableConstraints(self):
alterError = 'Altering virtual tables is not allowed'
with self.assertRaisesRegexp(Exception, alterError):
self.query('ALTER TABLE t ADD CONSTRAINT my_prim_key PRIMARY KEY (a)')
with self.assertRaisesRegexp(Exception, alterError):
self.query('ALTER TABLE t MODIFY CONSTRAINT my_constraint DISABLE')
with self.assertRaisesRegexp(Exception, alterError):
self.query('ALTER TABLE t MODIFY PRIMARY KEY DISABLE')
with self.assertRaisesRegexp(Exception, alterError):
self.query('ALTER TABLE t DROP CONSTRAINT my_constraint')
with self.assertRaisesRegexp(Exception, alterError):
self.query('ALTER TABLE t DROP PRIMARY KEY')
with self.assertRaisesRegexp(Exception, alterError):
self.query('ALTER TABLE t RENAME CONSTRAINT my_constraint TO my_constraint_new')
def testDDLAlterTableForeignKey(self):
with self.assertRaisesRegexp(Exception, 'references to virtual tables are not supported'):
self.query('ALTER TABLE native.t ADD CONSTRAINT foreign key (a) REFERENCES vs1.t (a)')
def testDDLCreateView(self):
with self.assertRaisesRegexp(Exception, 'Creating views in virtual schemas is not allowed'):
self.query('CREATE VIEW new_view AS SELECT * FROM native.t')
with self.assertRaisesRegexp(Exception, 'Creating views in virtual schemas is not allowed'):
self.query('CREATE FORCE VIEW new_view AS SELECT * FROM native.t')
def testDDLDropView(self):
with self.assertRaisesRegexp(Exception, 'view NON_EXISTING_VIEW does not exist'):
self.query('DROP VIEW non_existing_view')
def testDDLCreateFunction(self):
with self.assertRaisesRegexp(Exception, 'Creating functions in virtual schemas is not allowed'):
self.query(udf.fixindent('''
CREATE OR REPLACE FUNCTION my_fun (a DECIMAL)
RETURN VARCHAR(10)
IS
res VARCHAR(10);
BEGIN
res := 'foo';
RETURN res;
END my_fun;
/
'''))
def testDDLDropFunction(self):
with self.assertRaisesRegexp(Exception, 'function NON_EXISTING_FUNC does not exist'):
self.query('DROP FUNCTION non_existing_func')
def testDDLCreateScriptingScript(self):
with self.assertRaisesRegexp(Exception, 'Creating scripts in virtual schemas is not allowed'):
self.query(udf.fixindent('''
CREATE SCRIPT SCRIPT_B AS
output("foo");
/
'''))
def testDDLCreateUdfScript(self):
with self.assertRaisesRegexp(Exception, 'Creating scripts in virtual schemas is not allowed'):
self.query(udf.fixindent('''
CREATE or replace PYTHON SET SCRIPT dummy(a int)
EMITS (a int) AS
def run(ctx):
ctx.emit(1)
/
'''))
def testDDLDropScript(self):
with self.assertRaisesRegexp(Exception, 'script NON_EXISTING_SCRIPT does not exist'):
self.query('DROP SCRIPT non_existing_script')
def testDDLRenameObject(self):
with self.assertRaisesRegexp(Exception, 'Renaming virtual schema objects is not allowed'):
self.query('RENAME TABLE t TO t_new')
with self.assertRaisesRegexp(Exception, 'object NON_EXISTING_VIEW does not exist'):
self.query('RENAME VIEW non_existing_view TO view_new')
def testDDLComment(self):
with self.assertRaisesRegexp(Exception, 'Creating comments for virtual tables is not allowed'):
self.query("COMMENT ON TABLE t IS 'table comment' ")
with self.assertRaisesRegexp(Exception, 'Creating comments for virtual tables is not allowed'):
self.query("COMMENT ON TABLE t IS 'table comment' (a IS 'col comment')")
with self.assertRaisesRegexp(Exception, 'Creating column comments for virtual tables is not allowed'):
self.query("COMMENT ON COLUMN t.a IS 'col comment'")
with self.assertRaisesRegexp(Exception, 'script NON_EXISTING_SCRIPT not found'):
self.query("COMMENT ON SCRIPT non_existing_script IS 'comment'")
def testDMLInsert(self):
with self.assertRaisesRegexp(Exception, 'Inserting into virtual tables is not supported'):
self.query("INSERT INTO t values (1,'a',1)")
with self.assertRaisesRegexp(Exception, 'Inserting into virtual tables is not supported'):
self.query('INSERT INTO t DEFAULT VALUES')
with self.assertRaisesRegexp(Exception, 'Inserting into virtual tables is not supported'):
self.query('INSERT INTO t SELECT * FROM native.t')
def testDMLUpdate(self):
with self.assertRaisesRegexp(Exception, 'Updating virtual tables is not supported'):
self.query('UPDATE t set a = 1')
with self.assertRaisesRegexp(Exception, 'Updating virtual tables is not supported'):
self.query('UPDATE t SET a = 1 WHERE a = 2')
with self.assertRaisesRegexp(Exception, 'Updating virtual tables is not supported'):
self.query('UPDATE t AS t1 SET t1.a = t2.a FROM t AS t2 WHERE t1.a = t2.a;')
def testDMLMerge(self):
with self.assertRaisesRegexp(Exception, 'cannot merge into a virtual table'):
self.query('MERGE INTO t t1 USING native.t t2 ON (t1.a = t2.a) WHEN MATCHED THEN UPDATE SET a = t2.a')
with self.assertRaisesRegexp(Exception, 'cannot use virtual table T2 as source for merge'):
self.query('MERGE INTO native.t t1 USING t t2 ON (t1.a = t2.a) WHEN MATCHED THEN UPDATE SET b = t2.b')
def testDMLDelete(self):
with self.assertRaisesRegexp(Exception, 'Deleting from virtual tables is not supported'):
self.query('DELETE FROM t WHERE a = 1')
with self.assertRaisesRegexp(Exception, 'Truncating virtual tables is not supported'):
self.query('DELETE FROM t')
def testDMLTruncate(self):
with self.assertRaisesRegexp(Exception, 'Truncating virtual tables is not supported'):
self.query('TRUNCATE TABLE t')
def testDMLImport(self):
with self.assertRaisesRegexp(Exception, 'Inserting into virtual tables is not supported'):
self.query("IMPORT INTO t FROM jdbc at 'jdbc:exa:invalid-host:5555' user 'sys' identified by 'exasol' statement 'SELECT * FROM NATIVE.T'")
with self.assertRaisesRegexp(Exception, 'virtual tables cannot be used as error table'):
self.query("IMPORT INTO native.t FROM jdbc at 'jdbc:exa:invalid-host:5555' user 'sys' identified by 'exasol' statement 'SELECT * FROM NATIVE.T' ERRORS INTO t")
self.query('CREATE OR REPLACE TABLE native.t_copy LIKE native.t')
with self.assertRaisesRegexp(Exception, re.escape('''Execution of SQL Statement (for reading data) failed on external EXASolution. [IMPORT directly from a virtual table is not supported. Use STATEMENT option instead with SELECT * FROM "VS1"."T" ''')):
self.query("IMPORT INTO native.t_copy FROM exa at '{host_port}' user 'sys' identified by 'exasol' table VS1.T".format(host_port = 'localhost:8888'))
def testDMLExport(self):
self.query('CREATE OR REPLACE TABLE native.t_copy LIKE native.t')
with self.assertRaisesRegexp(Exception, re.escape('''EXPORT directly from a virtual table is not supported. Use EXPORT (SELECT * FROM VS1.T) instead.''')):
self.query("EXPORT VS1.T INTO JDBC at 'jdbc:exa:{host_port}' user 'sys' identified by 'exasol' TABLE native.t_copy".format(host_port = 'localhost:8888'))
def testEnforceIndex(self):
with self.assertRaisesRegexp(Exception, 'Enforcing indexes on virtual tables is not allowed'):
self.query('ENFORCE INDEX ON t(a)')
def testReorganize(self):
with self.assertRaisesRegexp(Exception, 'Reorganizing a virtual table is not allowed'):
self.query('REORGANIZE TABLE t')
with self.assertRaisesRegexp(Exception, 'Reorganizing virtual schemas is not allowed'):
self.query('REORGANIZE SCHEMA vs1')
# Following query may not throw an exception
self.query('REORGANIZE DATABASE')
def testRecompress(self):
with self.assertRaisesRegexp(Exception, 'Recompressing virtual tables is not allowed'):
self.query('RECOMPRESS TABLE t')
with self.assertRaisesRegexp(Exception, 'Recompressing virtual schemas is not allowed'):
self.query('RECOMPRESS SCHEMA vs1')
# Following query may not throw an exception
self.query('RECOMPRESS DATABASE')
def testPreload(self):
with self.assertRaisesRegexp(Exception, 'Preloading a virtual table is not supported'):
self.query('PRELOAD TABLE t')
with self.assertRaisesRegexp(Exception, 'Preloading of virtual schemas is not supported'):
self.query('PRELOAD SCHEMA vs1')
# Following query may not throw an exception
self.query('PRELOAD DATABASE')
def testAnalyze(self):
with self.assertRaisesRegexp(Exception, 'Analyzing statistics of virtual tables is not allowed'):
self.query('ANALYZE TABLE t ESTIMATE STATISTICS')
with self.assertRaisesRegexp(Exception, 'Analyzing statistics of virtual tables is not allowed'):
self.query('ANALYZE TABLE t EXACT STATISTICS')
with self.assertRaisesRegexp(Exception, 'Analyzing statistics of virtual schemas is not allowed'):
self.query('ANALYZE SCHEMA vs1 ESTIMATE STATISTICS')
# Following query may not throw an exception
self.query('ANALYZE DATABASE ESTIMATE STATISTICS')
class AdapterScriptTest(VSchemaTest):
def testDropAdapterWithExistingVSchema(self):
self.createFastAdapter(schemaName="ADAPTER", adapterName="FAST_ADAPTER")
self.query('DROP VIRTUAL SCHEMA IF EXISTS VS1 CASCADE')
self.query('CREATE VIRTUAL SCHEMA VS1 USING ADAPTER.FAST_ADAPTER')
with self.assertRaisesRegexp(Exception, 'At least one virtual schema existing for this Adapter Script, please drop all Virtual Schemas of this Adapter first'):
self.query('DROP ADAPTER SCRIPT ADAPTER.FAST_ADAPTER')
rows = self.query("SELECT * FROM EXA_SCHEMAS WHERE SCHEMA_NAME = 'VS1' ")
self.assertEqual(1, self.rowcount())
def testDropSchemaWithAdapterScriptsAndVSchemasNotAllowed(self):
self.createFastAdapter(schemaName="ADAPTER", adapterName="FAST_ADAPTER")
self.query('DROP VIRTUAL SCHEMA IF EXISTS VS1 CASCADE')
self.query('CREATE VIRTUAL SCHEMA VS1 USING ADAPTER.FAST_ADAPTER')
with self.assertRaisesRegexp(Exception, 'The schema contains the Adapter Script FAST_ADAPTER for which at least one Virtual Schema exists \(VS1\). Please drop all Virtual Schemas of this Adapter first.'):
self.query('DROP SCHEMA ADAPTER CASCADE')
def testDropSchemaWithAdapterScriptsButNoVSchemasAllowed(self):
self.createFastAdapter(schemaName="ADAPTER", adapterName="FAST_ADAPTER")
self.query('DROP SCHEMA ADAPTER CASCADE')
def testCreateVSchemaWithNonAdapterScriptFails(self):
self.query(udf.fixindent('''
CREATE OR REPLACE PYTHON SET SCRIPT ADAPTER.ADAPTER_UDF (a int) EMITS (a varchar(100)) AS
/
'''))
self.query('DROP VIRTUAL SCHEMA IF EXISTS VS1 CASCADE')
with self.assertRaisesRegexp(Exception, 'Script ADAPTER.ADAPTER_UDF exists, but is not an Adapter Script as expected'):
self.query('CREATE VIRTUAL SCHEMA VS1 USING ADAPTER.ADAPTER_UDF')
def testDropAdapterScriptFailsForNonAdapterScripts(self):
self.query(udf.fixindent('''
CREATE OR REPLACE PYTHON SET SCRIPT ADAPTER.ADAPTER_UDF (a int) EMITS (a varchar(100)) AS
/
'''))
with self.assertRaisesRegexp(Exception, 'script ADAPTER_UDF is not an adapter script. Please use DROP SCRIPT instead.'):
self.query('DROP ADAPTER SCRIPT ADAPTER.ADAPTER_UDF')
def testDropScriptFailsForAdapterScripts(self):
self.createFastAdapter(schemaName="ADAPTER", adapterName="FAST_ADAPTER")
with self.assertRaisesRegexp(Exception, 'script FAST_ADAPTER is an adapter script. Please use DROP ADAPTER SCRIPT instead.'):
self.query('DROP SCRIPT ADAPTER.FAST_ADAPTER')
def testOverwriteAdapterScriptByNonAdapterScriptFails(self):
self.createFastAdapter(schemaName="ADAPTER", adapterName="FAST_ADAPTER")
with self.assertRaisesRegexp(Exception, ''):
self.query(udf.fixindent('''
CREATE OR REPLACE PYTHON SET SCRIPT ADAPTER.FAST_ADAPTER (a int) EMITS (a varchar(100)) AS
/
'''))
def testOverwriteNonAdapterScriptByAdapterScriptFails(self):
self.query('DROP SCHEMA IF EXISTS ADAPTER_TMP CASCADE')
self.query('CREATE SCHEMA ADAPTER_TMP')
self.query(udf.fixindent('''
CREATE PYTHON SET SCRIPT ADAPTER_TMP.MY_UDF (a int) EMITS (a varchar(100)) AS
/
'''))
with self.assertRaisesRegexp(Exception, 'object MY_UDF already exists and is not an adapter script'):
self.query(udf.fixindent('''
CREATE OR REPLACE PYTHON ADAPTER SCRIPT ADAPTER_TMP.MY_UDF AS
/
'''))
def testOverwriteAdapterScriptByAnotherAdapter(self):
self.createFastAdapter(schemaName="ADAPTER", adapterName="FAST_ADAPTER")
self.query('DROP SCHEMA IF EXISTS VS1 CASCADE')
self.query('CREATE VIRTUAL SCHEMA VS1 USING ADAPTER.FAST_ADAPTER')
rows = self.query("SELECT * FROM VS1.DUMMY")
self.assertRowsEqual([('FOO', 'BAR')], rows)
self.query(udf.fixindent('''
CREATE OR REPLACE PYTHON ADAPTER SCRIPT adapter.fast_adapter AS
import json
import string
def adapter_call(request):
# database expects utf-8 encoded string of type str. unicode not yet supported
root = json.loads(request)
if root["type"] == "createVirtualSchema":
res = {
"type": "createVirtualSchema",
"schemaMetadata": {
"tables": [
{
"name": "DUMMY",
"columns": [{
"name": "A",
"dataType": {"type": "VARCHAR", "size": 2000000}
},{
"name": "B",
"dataType": {"type": "VARCHAR", "size": 2000000}
}]
}]
}
}
return json.dumps(res).encode('utf-8')
elif root["type"] == "dropVirtualSchema":
return json.dumps({"type": "dropVirtualSchema"}).encode('utf-8')
elif root["type"] == "setProperties":
return json.dumps({"type": "setProperties"}).encode('utf-8')
elif root["type"] == "refresh":
return json.dumps({"type": "refresh"}).encode('utf-8')
if root["type"] == "getCapabilities":
return json.dumps({
"type": "getCapabilities",
"capabilities": []
}).encode('utf-8') # database expects utf-8 encoded string of type str. unicode not yet supported.
elif root["type"] == "pushdown":
res = {
"type": "pushdown",
"sql": "SELECT * FROM (VALUES ('X', 'Y')) t"
}
return json.dumps(res).encode('utf-8')
else:
raise ValueError('Unsupported callback')
/
'''))
rows = self.query("SELECT * FROM VS1.DUMMY")
self.assertRowsEqual([('X', 'Y')], rows)
# Tests only the supported parts of IMPORT/EXPORT
class ImportExportTest(VSchemaTest):
setupDone = False
def setUp(self):
# TODO Remove this workaround
if self.__class__.setupDone:
self.query(''' OPEN SCHEMA VS1 ''')
return
# Create a simple native schema with tables
self.createNative()
self.commit() # commit, otherwise adapter doesn't see tables
self.createJdbcAdapter(schemaName="ADAPTER", adapterName="JDBC_ADAPTER")
self.createVirtualSchemaJdbc("VS1", "NATIVE", "ADAPTER.JDBC_ADAPTER", True)
self.commit()
self.__class__.setupDone = True
def testExaImportFromQueryWithVTable(self):
self.query('CREATE OR REPLACE TABLE native.g_copy LIKE native.g')
self.query("IMPORT INTO native.g_copy FROM exa at '{host_port}' user 'sys' identified by 'exasol' statement 'SELECT * FROM vs1.g'".format(host_port = 'localhost:8888'))
self.assertRowsEqualIgnoreOrder(
self.query("SELECT * FROM native.g"),
self.query("SELECT * FROM native.g_copy"))
def testExaImportFromViewWithVTable(self):
self.query('CREATE OR REPLACE TABLE native.g_copy LIKE native.g')
self.query("CREATE OR REPLACE VIEW native.g_view AS SELECT * FROM vs1.g")
self.commit()
self.query("IMPORT INTO native.g_copy FROM exa at '{host_port}' user 'sys' identified by 'exasol' statement 'SELECT * FROM native.g_view'".format(host_port = 'localhost:8888'))
self.assertRowsEqualIgnoreOrder(
self.query("SELECT * FROM native.g"),
self.query("SELECT * FROM native.g_copy"))
def testJdbcImportFromVTable(self):
self.query('CREATE OR REPLACE TABLE native.g_copy LIKE native.g')
self.query("IMPORT INTO native.g_copy FROM jdbc at 'jdbc:exa:{host_port}' user 'sys' identified by 'exasol' TABLE vs1.g".format(host_port = 'localhost:8888'))
self.assertRowsEqualIgnoreOrder(
self.query("SELECT * FROM native.g"),
self.query("SELECT * FROM native.g_copy"))
def testJdbcImportFromQueryWithVTable(self):
self.query('CREATE OR REPLACE TABLE native.g_copy LIKE native.g')
self.query("IMPORT INTO native.g_copy FROM jdbc at 'jdbc:exa:{host_port}' user 'sys' identified by 'exasol' STATEMENT 'SELECT * FROM vs1.g'".format(host_port = 'localhost:8888'))
self.assertRowsEqualIgnoreOrder(
self.query("SELECT * FROM native.g"),
self.query("SELECT * FROM native.g_copy"))
def testJdbcImportFromViewWithVTable(self):
self.query('CREATE OR REPLACE TABLE native.g_copy LIKE native.g')
self.query("CREATE OR REPLACE VIEW native.g_view AS SELECT * FROM vs1.g")
self.commit()
self.query("IMPORT INTO native.g_copy FROM jdbc at 'jdbc:exa:{host_port}' user 'sys' identified by 'exasol' STATEMENT 'SELECT * FROM native.g_view'".format(host_port = 'localhost:8888'))
self.assertRowsEqualIgnoreOrder(
self.query("SELECT * FROM native.g"),
self.query("SELECT * FROM native.g_copy"))
def testExportFromQueryWithVTable(self):
self.query('CREATE OR REPLACE TABLE native.g_copy LIKE native.g')
self.commit()
self.query("EXPORT (SELECT * FROM vs1.g) INTO JDBC at 'jdbc:exa:{host_port}' user 'sys' identified by 'exasol' TABLE native.g_copy".format(host_port = 'localhost:8888'))
self.assertRowsEqualIgnoreOrder(
self.query("SELECT * FROM native.g"),
self.query("SELECT * FROM native.g_copy"))
def testExportFromViewWithVTable(self):
self.query('CREATE OR REPLACE TABLE native.g_copy LIKE native.g')
self.query("CREATE OR REPLACE VIEW native.g_view AS SELECT * FROM vs1.g")
self.commit()
self.query("EXPORT native.g_view INTO JDBC at 'jdbc:exa:{host_port}' USER 'sys' IDENTIFIED BY 'exasol' TABLE native.g_copy".format(host_port = 'localhost:8888'))
self.assertRowsEqualIgnoreOrder(
self.query("SELECT * FROM native.g"),
self.query("SELECT * FROM native.g_copy"))
class MergeAndInsertTest(VSchemaTest):
def setUp(self):
self.createNative()
self.commit() # commit, otherwise adapter doesn't see tables
self.createJdbcAdapter(schemaName="ADAPTER", adapterName="JDBC_ADAPTER")
self.createVirtualSchemaJdbc("VS1", "NATIVE", "ADAPTER.JDBC_ADAPTER", True)
self.commit()
def testMergeFromQueryWithVTable(self):
self.query('CREATE OR REPLACE TABLE native.t_copy AS SELECT a+1 a, b, c FROM native.t')
self.query('MERGE INTO native.t_copy t1 USING (SELECT * FROM t) t2 ON (t1.b = t2.b) WHEN MATCHED THEN UPDATE SET a = t2.a')
self.assertRowsEqualIgnoreOrder(
self.query('SELECT * FROM native.t'),
self.query('SELECT * FROM native.t_copy'))
def testMergeFromViewOnVTable(self):
self.query('CREATE OR REPLACE TABLE native.t_copy AS SELECT a+1 a, b, c FROM native.t')
self.query('CREATE VIEW native.t_view AS SELECT * FROM t')
self.commit()
self.query('MERGE INTO native.t_copy t1 USING native.t_view t2 ON (t1.b = t2.b) WHEN MATCHED THEN UPDATE SET a = t2.a')
self.assertRowsEqualIgnoreOrder(
self.query('SELECT * FROM native.t'),
self.query('SELECT * FROM native.t_copy'))
def testInsertIntoFromQueryWithVTable(self):
self.query('CREATE OR REPLACE TABLE native.t_copy LIKE native.t')
self.query('INSERT INTO native.t_copy SELECT * FROM vs1.t')
self.assertRowsEqualIgnoreOrder(
self.query('SELECT * FROM native.t'),
self.query('SELECT * FROM native.t_copy'))
def testCreateTableAsWithVTable(self):
self.query('CREATE OR REPLACE TABLE native.t_copy AS SELECT * FROM vs1.t')
self.assertRowsEqualIgnoreOrder(
self.query('SELECT * FROM native.t'),
self.query('SELECT * FROM native.t_copy'))
class AccessControl(VSchemaTest):
def setUp(self):
# make sure that there are no Virtual Schemas
virtualSchemas = self.query('SELECT SCHEMA_NAME FROM EXA_VIRTUAL_SCHEMAS')
for schema in virtualSchemas:
self.query('DROP VIRTUAL SCHEMA IF EXISTS {schema} CASCADE'.format(schema=schema[0]))
# make sure there are no things assigned to PUBLIC role
self.assertEquals(0, self.queryScalar("SELECT COUNT(*) FROM EXA_DBA_SYS_PRIVS WHERE GRANTEE='PUBLIC'"))
self.createFastAdapter(schemaName="ADAPTER", adapterName="FAST_ADAPTER")
self.commit()
def getConnection(self, username, password):
client = exatest.ODBCClient('exatest')
self.log.debug('connecting to DSN "exa" for user {username}'.format(username=username))
client.connect(uid = username, pwd = password)
return client
def createUser(self, username, password):
self.query('DROP USER IF EXISTS {username} CASCADE'.format(username = username))
self.query('CREATE USER {username} IDENTIFIED BY "{password}"'.format(username = username, password = password))
self.query('GRANT CREATE SESSION TO {username}'.format(username=username))
# Test Sys Privileges
def testSysPrivsExists(self):
sysPrivs = self.queryScalar("""
SELECT COUNT(*) FROM EXA_DBA_SYS_PRIVS WHERE GRANTEE='DBA' AND ADMIN_OPTION=TRUE AND
PRIVILEGE IN ('CREATE VIRTUAL SCHEMA', 'ALTER ANY VIRTUAL SCHEMA', 'ALTER ANY VIRTUAL SCHEMA REFRESH', 'DROP ANY VIRTUAL SCHEMA')
""")
self.assertEqual(4, sysPrivs)
# Create Adapter Script
def testCreateAdapterNoPrivs(self):
self.createUser("user2", "user2")
self.commit()
conn = self.getConnection('user2', 'user2')
with self.assertRaisesRegexp(Exception, 'insufficient privileges for creating an adapter script'):
conn.query(udf.fixindent('''
CREATE OR REPLACE PYTHON ADAPTER SCRIPT ADAPTER.FAST_ADAPTER_2 AS
def adapter_call(request):
pass
/
'''))
def testCreateAdapterOwnerNoPrivs(self):
self.createUser("user2", "user2")
self.query('GRANT CREATE SCHEMA TO user2')
self.commit()
conn = self.getConnection('user2', 'user2')
# Even if you are owner you need explicit privs for creating scripts
conn.query('CREATE SCHEMA ADAPTER_2')
with self.assertRaisesRegexp(Exception, 'insufficient privileges for creating an adapter script'):
conn.query(udf.fixindent('''
CREATE OR REPLACE PYTHON ADAPTER SCRIPT ADAPTER_2.FAST_ADAPTER_2 AS
def adapter_call(request):
pass
/
'''))
def testCreateAdapterWithSysPrivCreateAnyScript(self):
self.createUser("user2", "user2")
self.query('GRANT CREATE ANY SCRIPT TO user2')
self.commit()
conn = self.getConnection('user2', 'user2')
conn.query(udf.fixindent('''
CREATE OR REPLACE PYTHON ADAPTER SCRIPT ADAPTER.FAST_ADAPTER_2 AS
def adapter_call(request):
pass
/
'''))
self.assertRowsEqual([(1,)], conn.query("SELECT COUNT(*) FROM EXA_ALL_SCRIPTS WHERE SCRIPT_SCHEMA='ADAPTER' AND SCRIPT_NAME='FAST_ADAPTER_2'"))
def testCreateAdapterWithSysPrivCreateScript(self):
self.createUser("user2", "user2")
self.query('GRANT CREATE SCRIPT TO user2')
self.query('GRANT CREATE SCHEMA TO user2')
self.commit()
conn = self.getConnection('user2', 'user2')
# Creating Adapter Scripts in non-owned schemas is not allowed
with self.assertRaisesRegexp(Exception, 'insufficient privileges for creating an adapter script'):
conn.query(udf.fixindent('''
CREATE OR REPLACE PYTHON ADAPTER SCRIPT ADAPTER.FAST_ADAPTER_2 AS
def adapter_call(request):
pass
/
'''))
self.assertRowsEqual([(0,)], conn.query("SELECT COUNT(*) FROM EXA_ALL_SCRIPTS WHERE SCRIPT_SCHEMA='ADAPTER' AND SCRIPT_NAME='FAST_ADAPTER_2'"))
# Creating Adapter Scripts in your own schemas is allowed
conn.query('CREATE SCHEMA ADAPTER_2')
conn.query(udf.fixindent('''
CREATE OR REPLACE PYTHON ADAPTER SCRIPT ADAPTER_2.FAST_ADAPTER_2 AS
def adapter_call(request):
pass
/
'''))
self.assertRowsEqual([(1,)], conn.query("SELECT COUNT(*) FROM EXA_ALL_SCRIPTS WHERE SCRIPT_SCHEMA='ADAPTER_2' AND SCRIPT_NAME='FAST_ADAPTER_2'"))
# Create Or Replace Adapter Script
def testCreateOrReplaceAdapter(self):
self.createUser("user2", "user2")
self.query('GRANT CREATE ANY SCRIPT TO user2')
self.query(udf.fixindent('''
CREATE OR REPLACE PYTHON ADAPTER SCRIPT ADAPTER.FAST_ADAPTER_REPLACE AS
def adapter_call(request):
pass
/
'''))
self.commit()
oldAdapterObjectId = self.queryScalar("SELECT SCRIPT_OBJECT_ID FROM EXA_ALL_SCRIPTS WHERE SCRIPT_SCHEMA='ADAPTER' AND SCRIPT_NAME='FAST_ADAPTER_REPLACE'")
# We need sys priv DROP ANY SCRIPT for the old Adapter Script if we want to replace it
conn = self.getConnection('user2', 'user2')
with self.assertRaisesRegexp(Exception, 'insufficient privileges for replacing script'):
conn.query(udf.fixindent('''
CREATE OR REPLACE PYTHON ADAPTER SCRIPT ADAPTER.FAST_ADAPTER_REPLACE AS
def adapter_call(request):
pass
/
'''))
self.assertRowsEqual([(oldAdapterObjectId,)], conn.query("SELECT SCRIPT_OBJECT_ID FROM EXA_ALL_SCRIPTS WHERE SCRIPT_SCHEMA='ADAPTER' AND SCRIPT_NAME='FAST_ADAPTER_REPLACE'")) # todo replace by conn.queryScalar if available
# now again with DROP privileges
self.query('GRANT DROP ANY SCRIPT TO user2')
self.commit()
conn.commit() # Commit, to get new privileges
conn.query(udf.fixindent('''
CREATE OR REPLACE PYTHON ADAPTER SCRIPT ADAPTER.FAST_ADAPTER_REPLACE AS
def adapter_call(request):
pass
/
'''))
newObjectId = conn.query("SELECT SCRIPT_OBJECT_ID FROM EXA_ALL_SCRIPTS WHERE SCRIPT_SCHEMA='ADAPTER' AND SCRIPT_NAME='FAST_ADAPTER_REPLACE'") # todo replace by conn.queryScalar if available
self.assertEqual(1, len(newObjectId))
self.assertEqual(1, len(newObjectId[0]))
self.assertNotEqual(oldAdapterObjectId, newObjectId[0][0])
# Drop Adapter Script
def testDropAdapterNoPrivs(self):
self.createUser("user2", "user2")
self.commit()
conn = self.getConnection('user2', 'user2')
with self.assertRaisesRegexp(Exception, 'insufficient privileges for dropping script'):
conn.query('DROP ADAPTER SCRIPT ADAPTER.FAST_ADAPTER')
def testDropAdapterWithSysPriv(self):
self.createUser("user2", "user2")
self.query('GRANT DROP ANY SCRIPT TO user2')
self.commit()
conn = self.getConnection('user2', 'user2')
conn.query('DROP ADAPTER SCRIPT ADAPTER.FAST_ADAPTER')
def testDropAdapterAsSchemaOwner(self):
self.createUser("user2", "user2")
self.query('GRANT CREATE SCHEMA TO user2')
self.query('GRANT CREATE ANY SCRIPT TO user2')
self.commit()
conn = self.getConnection('user2', 'user2')
conn.query('CREATE SCHEMA ADAPTER_2')
conn.query(udf.fixindent('''
CREATE OR REPLACE PYTHON ADAPTER SCRIPT ADAPTER_2.FAST_ADAPTER_DROP AS
def adapter_call(request):
pass
/
'''))
conn.query('DROP ADAPTER SCRIPT ADAPTER_2.FAST_ADAPTER_DROP')
# Create Virtual Schema
def testCreateVSchemaNoPrivs(self):
self.createUser("user2", "user2")
self.commit()
conn = self.getConnection('user2', 'user2')
with self.assertRaisesRegexp(Exception, 'insufficient privileges for creating virtual schema'):
conn.query('CREATE VIRTUAL SCHEMA VS1 USING ADAPTER.FAST_ADAPTER')
def testCreateVSchemaNoScriptPrivs(self):
self.createUser("user2", "user2")
self.query('GRANT DROP ANY VIRTUAL SCHEMA TO user2')
self.query('GRANT CREATE VIRTUAL SCHEMA TO user2')
self.commit()
conn = self.getConnection('user2', 'user2')
with self.assertRaisesRegexp(Exception, 'insufficient privileges for calling adapter script'):
conn.query('CREATE VIRTUAL SCHEMA VS1 USING ADAPTER.FAST_ADAPTER')
def testCreateVSchema(self):
self.createUser("user2", "user2")
self.query('GRANT DROP ANY VIRTUAL SCHEMA TO user2')
self.query('GRANT CREATE VIRTUAL SCHEMA TO user2')
self.query('GRANT EXECUTE ON ADAPTER.FAST_ADAPTER TO user2')
self.commit()
conn = self.getConnection('user2', 'user2')
conn.query('CREATE VIRTUAL SCHEMA VS1 USING ADAPTER.FAST_ADAPTER')
# Drop Virtual Schema
def testDropVSchemaNoDropPrivs(self):
self.query('CREATE VIRTUAL SCHEMA VS1 USING ADAPTER.FAST_ADAPTER')
self.createUser("user2", "user2")
self.commit()
conn = self.getConnection('user2', 'user2')
with self.assertRaisesRegexp(Exception, 'insufficient privileges for dropping virtual schema'):
conn.query('DROP VIRTUAL SCHEMA VS1 CASCADE')
def testDropVSchemaAsOwner(self):
# user has no privileges to drop, but is owner
self.createUser("user2", "user2")
self.query('GRANT CREATE VIRTUAL SCHEMA TO user2')
self.query('GRANT EXECUTE ON ADAPTER.FAST_ADAPTER TO user2')
self.commit()
conn = self.getConnection('user2', 'user2')
conn.query('CREATE VIRTUAL SCHEMA VS1 USING ADAPTER.FAST_ADAPTER')
conn.query('DROP VIRTUAL SCHEMA VS1 CASCADE')
def testDropVSchemaWithPrivs(self):
# user is not owner, but has privileges to drop
self.createUser("user2", "user2")
self.query('GRANT CREATE VIRTUAL SCHEMA TO user2')
self.query('GRANT DROP ANY VIRTUAL SCHEMA TO user2')
#self.query('GRANT EXECUTE ON ADAPTER.FAST_ADAPTER TO user2')
self.query('CREATE VIRTUAL SCHEMA VS1 USING ADAPTER.FAST_ADAPTER')
self.commit()
conn = self.getConnection('user2', 'user2')
conn.query('DROP VIRTUAL SCHEMA VS1 CASCADE')
# Alter Virtual Schema Refresh
def testAlterVSchemaRefreshNoPrivs(self):
self.createUser("user2", "user2")
self.query('CREATE VIRTUAL SCHEMA VS1 USING ADAPTER.FAST_ADAPTER')
self.commit()
conn = self.getConnection('user2', 'user2')
with self.assertRaisesRegexp(Exception, 'insufficient privileges for altering virtual schema'):
conn.query('ALTER VIRTUAL SCHEMA VS1 REFRESH')
def testAlterVSchemaRefreshNoScriptPrivs(self):
self.createUser("user2", "user2")
self.query('CREATE VIRTUAL SCHEMA VS1 USING ADAPTER.FAST_ADAPTER')
self.query('GRANT ALTER ANY VIRTUAL SCHEMA TO user2')
self.commit()
conn = self.getConnection('user2', 'user2')
#with self.assertRaisesRegexp(Exception, 'insufficient privileges for calling adapter script'):
conn.query('ALTER VIRTUAL SCHEMA VS1 REFRESH')
def testAlterVSchemaRefreshWithAlterSysPriv(self):
self.createUser("user2", "user2")
self.query('CREATE VIRTUAL SCHEMA VS1 USING ADAPTER.FAST_ADAPTER')
#self.query('GRANT EXECUTE ON ADAPTER.FAST_ADAPTER TO user2')
self.query('GRANT ALTER ANY VIRTUAL SCHEMA TO user2')
self.commit()
conn = self.getConnection('user2', 'user2')
conn.query('ALTER VIRTUAL SCHEMA VS1 REFRESH')
def testAlterVSchemaRefreshWithRefreshSysPriv(self):
self.createUser("user2", "user2")
self.query('CREATE VIRTUAL SCHEMA VS1 USING ADAPTER.FAST_ADAPTER')
#self.query('GRANT EXECUTE ON ADAPTER.FAST_ADAPTER TO user2')
self.query('GRANT ALTER ANY VIRTUAL SCHEMA REFRESH TO user2')
self.commit()
conn = self.getConnection('user2', 'user2')
conn.query('ALTER VIRTUAL SCHEMA VS1 REFRESH')
def testAlterVSchemaRefreshWithAlterObjPriv(self):
self.createUser("user2", "user2")
self.query('CREATE VIRTUAL SCHEMA VS1 USING ADAPTER.FAST_ADAPTER')
#self.query('GRANT EXECUTE ON ADAPTER.FAST_ADAPTER TO user2')
self.query('GRANT ALTER ON VS1 TO user2')
self.commit()
conn = self.getConnection('user2', 'user2')
conn.query('ALTER VIRTUAL SCHEMA VS1 REFRESH')
def testAlterVSchemaRefreshWithRefreshObjPriv(self):
self.createUser("user2", "user2")
self.query('CREATE VIRTUAL SCHEMA VS1 USING ADAPTER.FAST_ADAPTER')
#self.query('GRANT EXECUTE ON ADAPTER.FAST_ADAPTER TO user2')
self.query('GRANT REFRESH ON VS1 TO user2')
self.commit()
conn = self.getConnection('user2', 'user2')
conn.query('ALTER VIRTUAL SCHEMA VS1 REFRESH')
def testAlterVSchemaRefreshAsOwner(self):
self.createUser("user2", "user2")
self.query('GRANT EXECUTE ON ADAPTER.FAST_ADAPTER TO user2')
self.query('GRANT CREATE VIRTUAL SCHEMA TO user2')
self.commit()
conn = self.getConnection('user2', 'user2')
conn.query('CREATE VIRTUAL SCHEMA VS1 USING ADAPTER.FAST_ADAPTER')
conn.query('ALTER VIRTUAL SCHEMA VS1 REFRESH')
# Alter Virtual Schema Set
def testAlterVSchemaSetNoPrivs(self):
self.createUser("user2", "user2")
self.query('CREATE VIRTUAL SCHEMA VS1 USING ADAPTER.FAST_ADAPTER')
self.commit()
conn = self.getConnection('user2', 'user2')
with self.assertRaisesRegexp(Exception, 'insufficient privileges for altering virtual schema'):
conn.query("ALTER VIRTUAL SCHEMA VS1 SET FOO='BAR'")
self.assertEqual(0, self.queryScalar("SELECT COUNT(*) FROM EXA_DBA_VIRTUAL_SCHEMA_PROPERTIES WHERE SCHEMA_NAME = 'VS1'"))
def testAlterVSchemaSetNoScriptPrivs(self):
self.createUser("user2", "user2")
self.query('CREATE VIRTUAL SCHEMA VS1 USING ADAPTER.FAST_ADAPTER')
self.query('GRANT ALTER ANY VIRTUAL SCHEMA TO user2')
self.commit()
conn = self.getConnection('user2', 'user2')
conn.query("ALTER VIRTUAL SCHEMA VS1 SET FOO='BAR'")
self.assertRowsEqual(
[("FOO","BAR")],
conn.query("SELECT PROPERTY_NAME, PROPERTY_VALUE FROM EXA_ALL_VIRTUAL_SCHEMA_PROPERTIES WHERE SCHEMA_NAME = 'VS1'"))
# with self.assertRaisesRegexp(Exception, 'insufficient privileges for calling adapter script'):
# conn.query("ALTER VIRTUAL SCHEMA VS1 SET FOO='BAR'")
# self.assertEqual(0, self.queryScalar("SELECT COUNT(*) FROM EXA_DBA_VIRTUAL_SCHEMA_PROPERTIES WHERE SCHEMA_NAME = 'VS1'"))
def testAlterVSchemaSetWithAlterSysPriv(self):
self.createUser("user2", "user2")
self.query('CREATE VIRTUAL SCHEMA VS1 USING ADAPTER.FAST_ADAPTER')
#self.query('GRANT EXECUTE ON ADAPTER.FAST_ADAPTER TO user2')
self.query('GRANT ALTER ANY VIRTUAL SCHEMA TO user2')
self.commit()
conn = self.getConnection('user2', 'user2')
conn.query("ALTER VIRTUAL SCHEMA VS1 SET FOO='BAR'")
self.assertRowsEqual(
[("FOO","BAR")],
conn.query("SELECT PROPERTY_NAME, PROPERTY_VALUE FROM EXA_ALL_VIRTUAL_SCHEMA_PROPERTIES WHERE SCHEMA_NAME = 'VS1'"))
def testAlterVSchemaSetWithAlterObjPriv(self):
self.createUser("user2", "user2")
self.query('CREATE VIRTUAL SCHEMA VS1 USING ADAPTER.FAST_ADAPTER')
#self.query('GRANT EXECUTE ON ADAPTER.FAST_ADAPTER TO user2')
self.query('GRANT ALTER ON VS1 TO user2')
self.commit()
conn = self.getConnection('user2', 'user2')
conn.query("ALTER VIRTUAL SCHEMA VS1 SET FOO='BAR'")
self.assertRowsEqual(
[("FOO","BAR")],
conn.query("SELECT PROPERTY_NAME, PROPERTY_VALUE FROM EXA_ALL_VIRTUAL_SCHEMA_PROPERTIES WHERE SCHEMA_NAME = 'VS1'"))
def testAlterVSchemaSetAsOwner(self):
self.createUser("user2", "user2")
self.query('GRANT EXECUTE ON ADAPTER.FAST_ADAPTER TO user2')
self.query('GRANT CREATE VIRTUAL SCHEMA TO user2')
self.commit()
conn = self.getConnection('user2', 'user2')
conn.query('CREATE VIRTUAL SCHEMA VS1 USING ADAPTER.FAST_ADAPTER')
conn.query("ALTER VIRTUAL SCHEMA VS1 SET FOO='BAR'")
self.assertRowsEqual(
[("FOO","BAR")],
conn.query("SELECT PROPERTY_NAME, PROPERTY_VALUE FROM EXA_ALL_VIRTUAL_SCHEMA_PROPERTIES WHERE SCHEMA_NAME = 'VS1'"))
# Alter Virtual Schema Change Owner
def testAlterVSchemaChangeOwnerNoPrivs(self):
self.createUser("user2", "user2")
self.query('CREATE VIRTUAL SCHEMA VS1 USING ADAPTER.FAST_ADAPTER')
self.commit()
conn = self.getConnection('user2', 'user2')
with self.assertRaisesRegexp(Exception, 'insufficient privileges for altering virtual schema'):
conn.query("ALTER VIRTUAL SCHEMA VS1 CHANGE OWNER sys")
self.assertEqual("SYS", self.queryScalar("SELECT SCHEMA_OWNER FROM EXA_VIRTUAL_SCHEMAS WHERE SCHEMA_NAME = 'VS1'"))
## added, so that the schema can be dropped -- SPOT-4245
self.query('GRANT EXECUTE ON ADAPTER.FAST_ADAPTER TO user2')
def testAlterVSchemaChangeOwnerWithSysPriv(self):
self.createUser("user2", "user2")
self.query('CREATE VIRTUAL SCHEMA VS1 USING ADAPTER.FAST_ADAPTER')
self.query('GRANT ALTER ANY VIRTUAL SCHEMA TO user2')
self.commit()
conn = self.getConnection('user2', 'user2')
conn.query("ALTER VIRTUAL SCHEMA VS1 CHANGE OWNER USER2")
self.assertRowsEqual([("USER2",)], conn.query("SELECT SCHEMA_OWNER FROM EXA_VIRTUAL_SCHEMAS WHERE SCHEMA_NAME = 'VS1'"))
## added, so that the schema can be dropped -- SPOT-4245
self.query('GRANT EXECUTE ON ADAPTER.FAST_ADAPTER TO user2')
def testAlterVSchemaChangeOwnerObjPrivNotSufficient(self):
# Object privilege ALTER must not be sufficient to change the owner
self.createUser("user2", "user2")
self.query('CREATE VIRTUAL SCHEMA VS1 USING ADAPTER.FAST_ADAPTER')
self.query('GRANT ALTER ON VS1 TO user2')
self.commit()
conn = self.getConnection('user2', 'user2')
with self.assertRaisesRegexp(Exception, 'insufficient privileges for altering virtual schema'):
conn.query("ALTER VIRTUAL SCHEMA VS1 CHANGE OWNER sys")
self.assertEqual("SYS", self.queryScalar("SELECT SCHEMA_OWNER FROM EXA_VIRTUAL_SCHEMAS WHERE SCHEMA_NAME = 'VS1'"))
# Open Virtual Schema
def testOpenVSchema(self):
# Must be possible without granted privileges
self.createUser("user2", "user2")
self.query('CREATE VIRTUAL SCHEMA VS1 USING ADAPTER.FAST_ADAPTER')
self.commit()
conn = self.getConnection('user2', 'user2')
conn.query("OPEN SCHEMA VS1")
self.assertEqual("VS1", self.queryScalar("SELECT CURRENT_SCHEMA"))
# Select Virtual Table
def testSelectVTableNoPrivs(self):
self.createUser("user2", "user2")
self.query('CREATE VIRTUAL SCHEMA VS1 USING ADAPTER.FAST_ADAPTER')
self.commit()
conn = self.getConnection('user2', 'user2')
with self.assertRaisesRegexp(Exception, 'insufficient privileges: SELECT on table DUMMY'):
conn.query("SELECT * FROM VS1.DUMMY")
def testSelectVTableNoScriptPrivsWorks(self):
self.createUser("user2", "user2")
self.query('CREATE VIRTUAL SCHEMA VS1 USING ADAPTER.FAST_ADAPTER')
self.query('GRANT SELECT ON VS1 TO USER2')
self.commit()
conn = self.getConnection('user2', 'user2')
self.assertRowsEqual([('FOO', 'BAR')], conn.query("SELECT * FROM VS1.DUMMY"))
#with self.assertRaisesRegexp(Exception, 'insufficient privileges for calling adapter script'):
# conn.query("SELECT * FROM VS1.DUMMY")
def testSelectVTableWithSysPriv(self):
self.createUser("user2", "user2")
self.query('CREATE VIRTUAL SCHEMA VS1 USING ADAPTER.FAST_ADAPTER')
#self.query('GRANT EXECUTE ON ADAPTER.FAST_ADAPTER TO user2')
self.query('GRANT SELECT ANY TABLE TO USER2')
self.commit()
conn = self.getConnection('user2', 'user2')
self.assertRowsEqual([('FOO', 'BAR')], conn.query("SELECT * FROM VS1.DUMMY"))
def testSelectVTableWithObjPriv(self):
self.createUser("user2", "user2")
self.query('CREATE VIRTUAL SCHEMA VS1 USING ADAPTER.FAST_ADAPTER')
#self.query('GRANT EXECUTE ON ADAPTER.FAST_ADAPTER TO user2')
self.query('GRANT SELECT ON VS1 TO USER2')
self.commit()
conn = self.getConnection('user2', 'user2')
self.assertRowsEqual([('FOO', 'BAR')], conn.query("SELECT * FROM VS1.DUMMY"))
def testSelectVTableAsOwner(self):
self.createUser("user2", "user2")
self.query('CREATE VIRTUAL SCHEMA VS1 USING ADAPTER.FAST_ADAPTER')
self.query('GRANT EXECUTE ON ADAPTER.FAST_ADAPTER TO user2')
self.assertRowsEqual([('FOO', 'BAR')], self.query("SELECT * FROM VS1.DUMMY"))
# Describe Virtual Table
def testDescribeVTableNoPrivs(self):
self.createUser("user2", "user2")
self.query('CREATE VIRTUAL SCHEMA VS1 USING ADAPTER.FAST_ADAPTER')
self.commit()
conn = self.getConnection('user2', 'user2')
with self.assertRaisesRegexp(Exception, 'insufficient privileges for describing object'):
conn.query("DESCRIBE VS1.DUMMY")
def testDescribeVTableWithSysPriv(self):
self.createUser("user2", "user2")
self.query('CREATE VIRTUAL SCHEMA VS1 USING ADAPTER.FAST_ADAPTER')
self.query('GRANT SELECT ANY TABLE TO USER2')
self.commit()
conn = self.getConnection('user2', 'user2')
self.assertRowsEqual(
[('KEY', 'VARCHAR(2000000) UTF8', None, None, None),('VALUE', 'VARCHAR(2000000) UTF8', None, None, None)],
conn.query("DESCRIBE VS1.DUMMY"))
def testDescribeVTableWithObjPriv(self):
self.createUser("user2", "user2")
self.query('CREATE VIRTUAL SCHEMA VS1 USING ADAPTER.FAST_ADAPTER')
self.query('GRANT SELECT ON VS1 TO USER2')
self.commit()
conn = self.getConnection('user2', 'user2')
self.assertRowsEqual(
[('KEY', 'VARCHAR(2000000) UTF8', None, None, None),('VALUE', 'VARCHAR(2000000) UTF8', None, None, None)],
conn.query("DESCRIBE VS1.DUMMY"))
def testDescribeVTableTableObjPrivsNotSupported(self):
self.createUser("user2", "user2")
self.query('CREATE VIRTUAL SCHEMA VS1 USING ADAPTER.FAST_ADAPTER')
with self.assertRaisesRegexp(Exception, 'object privileges for virtual tables are not supported'):
self.query('GRANT SELECT ON VS1.DUMMY TO USER2')
def testDescribeVTableAsOwner(self):
self.createUser("user2", "user2")
self.query('CREATE VIRTUAL SCHEMA VS1 USING ADAPTER.FAST_ADAPTER')
self.assertRowsEqual(
[('KEY', 'VARCHAR(2000000) UTF8', None, None, None),('VALUE', 'VARCHAR(2000000) UTF8', None, None, None)],
self.query("DESCRIBE VS1.DUMMY"))
# Create Views with Virtual Tables
def testCreateViewWithVTableNoSelectPrivs(self):
self.createUser("user2", "user2")
self.query('CREATE VIRTUAL SCHEMA VS1 USING ADAPTER.FAST_ADAPTER')
self.query('GRANT EXECUTE ON ADAPTER.FAST_ADAPTER TO user2')
self.query('GRANT CREATE VIEW TO user2')
self.query('GRANT CREATE SCHEMA TO user2')
self.commit()
conn = self.getConnection('user2', 'user2')
conn.query('CREATE SCHEMA VIEWS')
with self.assertRaisesRegexp(Exception, 'insufficient privileges: SELECT on table DUMMY'):
conn.query('CREATE VIEW VIEWS.VTABLE_VIEW AS SELECT * FROM VS1.DUMMY')
def testCreateViewWithVTableWithSelectPriv(self):
self.createUser("user2", "user2")
self.query('CREATE VIRTUAL SCHEMA VS1 USING ADAPTER.FAST_ADAPTER')
#self.query('GRANT EXECUTE ON ADAPTER.FAST_ADAPTER TO user2')
self.query('GRANT CREATE VIEW TO user2')
self.query('GRANT CREATE SCHEMA TO user2')
self.query('GRANT SELECT ON VS1 TO user2')
self.commit()
conn = self.getConnection('user2', 'user2')
conn.query('CREATE SCHEMA VIEWS')
conn.query('CREATE VIEW VIEWS.VTABLE_VIEW AS SELECT * FROM VS1.DUMMY')
self.assertRowsEqual(
self.query('SELECT * FROM VS1.DUMMY'),
conn.query('SELECT * FROM VIEWS.VTABLE_VIEW'))
# Query Views with Virtual Tables
def testQueryViewWithVTable(self):
self.createUser("user2", "user2")
self.query('CREATE VIRTUAL SCHEMA VS1 USING ADAPTER.FAST_ADAPTER')
#self.query('GRANT EXECUTE ON ADAPTER.FAST_ADAPTER TO user2')
self.query('GRANT CREATE VIEW TO user2')
self.query('GRANT CREATE SCHEMA TO user2')
self.query('GRANT SELECT ON VS1 TO user2')
self.commit()
conn = self.getConnection('user2', 'user2')
conn.query('CREATE SCHEMA VIEWS')
conn.query('CREATE VIEW VIEWS.VTABLE_VIEW AS SELECT * FROM VS1.DUMMY')
conn.commit()
# The owner of the view (user2) has privileges, so works
self.assertRowsEqual(
self.query('SELECT * FROM VS1.DUMMY'),
self.query('SELECT * FROM VIEWS.VTABLE_VIEW'))
self.query('REVOKE SELECT ON VS1 FROM user2')
self.commit()
# The owner of the view has no more privileges to SELECT from vtable
with self.assertRaisesRegexp(Exception, 'insufficient privileges: SELECT on table DUMMY'):
self.query('SELECT * FROM VIEWS.VTABLE_VIEW')
def testRowlevelSecurityUseCase(self):
self.query('''CREATE SCHEMA adapter_4091_schema''')
self.query('''CREATE SCHEMA data_4091_schema''')
self.query('''CREATE TABLE data_4091_schema.t(a1 varchar(100), a2 varchar(100), userName varchar(100))''')
self.query('''INSERT INTO data_4091_schema.t values('a', 'b', 'SYS')''')
self.query('''INSERT INTO data_4091_schema.t values('c', 'd', 'SYS')''')
self.query('''INSERT INTO data_4091_schema.t values('e', 'f', 'U2')''')
self.query('''INSERT INTO data_4091_schema.t values('g', 'h', 'U2')''')
self.query('''INSERT INTO data_4091_schema.t values('i', 'j', 'USER4091')''')
self.query('''INSERT INTO data_4091_schema.t values('k', 'l', 'USER4091')''')
all_data_rows = [('a','b','SYS'),('c','d','SYS'),
('e','f','U2'),('g','h','U2'),
('i','j','USER4091'),('k','l','USER4091')]
user4091_data_rows = [('i','j','USER4091'),('k','l','USER4091')]
self.query('''
CREATE OR REPLACE PYTHON ADAPTER SCRIPT adapter_4091_schema.rls_adapter AS
import json
import string
def adapter_call(request):
# database expects utf-8 encoded string of type str. unicode not yet supported
root = json.loads(request)
if root["type"] == "createVirtualSchema":
res = {
"type": "createVirtualSchema",
"schemaMetadata": {
"tables": [
{
"name": "T",
"columns": [{
"name": "a1",
"dataType": {"type": "VARCHAR", "size": 2000000}
},{
"name": "a2",
"dataType": {"type": "VARCHAR", "size": 2000000}
},{
"name": "userName",
"dataType": {"type": "VARCHAR", "size": 100}
}]
}]
}
}
return json.dumps(res).encode('utf-8')
elif root["type"] == "dropVirtualSchema":
return json.dumps({"type": "dropVirtualSchema"}).encode('utf-8')
elif root["type"] == "setProperties":
return json.dumps({"type": "setProperties"}).encode('utf-8')
elif root["type"] == "refresh":
return json.dumps({"type": "refresh"}).encode('utf-8')
if root["type"] == "getCapabilities":
return json.dumps({
"type": "getCapabilities",
"capabilities": []
}).encode('utf-8') # database expects utf-8 encoded string of type str. unicode not yet supported.
elif root["type"] == "pushdown":
res = {
"type": "pushdown",
"sql": "SELECT * FROM data_4091_schema.t WHERE userName = current_user or current_user = 'SYS'"
}
return json.dumps(res).encode('utf-8')
else:
raise ValueError('Unsupported callback')
/
''')
#self.query('''DROP VIRTUAL SCHEMA RSL_SCHEMA CASCADE''')
self.query('''CREATE VIRTUAL SCHEMA RSL_SCHEMA USING adapter_4091_schema.rls_adapter''')
self.assertRowsEqualIgnoreOrder(all_data_rows,self.query('''SELECT * FROM RSL_SCHEMA.T'''))
self.query('''CREATE SCHEMA USER_SCHEMA''')
self.query('''CREATE OR REPLACE VIEW USER_SCHEMA.RSL_VIEW AS SELECT * FROM RSL_SCHEMA.T''')
self.assertRowsEqualIgnoreOrder(all_data_rows,self.query('''SELECT * FROM USER_SCHEMA.RSL_VIEW'''))
self.createUser("user4091","user4091")
self.query('''GRANT CREATE SESSION TO user4091''')
self.query('''GRANT SELECT ON USER_SCHEMA.RSL_VIEW TO user4091''')
#GRANT EXECUTE ON adapter_4091_schema.fast_adapter to U1;
self.commit()
conn = self.getConnection('user4091','user4091')
self.assertRowsEqualIgnoreOrder(user4091_data_rows, conn.query('''SELECT * FROM USER_SCHEMA.RSL_VIEW'''))
conn.close()
self.query('''DROP USER USER4091''')
self.query('''DROP VIRTUAL SCHEMA RSL_SCHEMA CASCADE''')
self.query('''DROP SCHEMA USER_SCHEMA CASCADE''')
self.query('''DROP ADAPTER SCRIPT adapter_4091_schema.rls_adapter''')
self.query('''DROP SCHEMA data_4091_schema cascade''')
self.query('''DROP SCHEMA adapter_4091_schema cascade''')
# EXA_*_VIRTUAL_SCHEMA_PROPERTIES
# Test Variants:
#-- owner | obj priv ALTER schema | sys ALTER ANY VS
#-- no | no | no
#-- yes | no | no
#-- no | direct | no
#-- no | via role | no
#-- no | via public role | no
#-- no | no | direct
#-- no | no | via role
#-- no | no | via public role
# - Other privs not sufficient (ALTER/DROP ANY VIRTUAL SCHEMA REFRESH, SELECT ANY TABLE, SELECT on schema, REFRESH on schema)
def testSysTableVSchemaPropertiesNoDBAAccess(self):
self.createUser("user2", "user2")
self.commit()
conn = self.getConnection('user2', 'user2')
with self.assertRaisesRegexp(Exception, 'insufficient privileges: SELECT on table EXA_DBA_VIRTUAL_SCHEMA_PROPERTIES'):
conn.query("SELECT * FROM EXA_DBA_VIRTUAL_SCHEMA_PROPERTIES")
def testSysTableVSchemaPropertiesNoPrivs(self):
self.createUser("user2", "user2")
self.query("CREATE VIRTUAL SCHEMA VS1 USING ADAPTER.FAST_ADAPTER WITH P='V1'")
self.commit()
conn = self.getConnection('user2', 'user2')
self.assertRowsEqual(
[],
conn.query("SELECT SCHEMA_NAME, PROPERTY_NAME, PROPERTY_VALUE FROM EXA_ALL_VIRTUAL_SCHEMA_PROPERTIES"))
self.assertRowsEqual(
[],
conn.query("SELECT SCHEMA_NAME, PROPERTY_NAME, PROPERTY_VALUE FROM EXA_USER_VIRTUAL_SCHEMA_PROPERTIES"))
# User has privs, but not the ones required to view virtual schema properties
def testSysTableVSchemaPropertiesWrongPrivs(self):
self.createUser("user2", "user2")
self.query("CREATE VIRTUAL SCHEMA VS1 USING ADAPTER.FAST_ADAPTER WITH P='V1'")
self.query("CREATE VIRTUAL SCHEMA VS2 USING ADAPTER.FAST_ADAPTER WITH P='V2'")
#self.query("GRANT SELECT ANY TABLE to user2")
#self.query("GRANT ALTER ANY SCHEMA to user2")
#self.query("GRANT ALTER ANY VIRTUAL SCHEMA REFRESH to user2")
#self.query("GRANT SELECT on VS1 to user2")
#self.query("GRANT REFRESH on VS1 to user2")
self.commit()
conn = self.getConnection('user2', 'user2')
self.assertRowsEqual(
[],
conn.query("SELECT SCHEMA_NAME, PROPERTY_NAME, PROPERTY_VALUE FROM EXA_ALL_VIRTUAL_SCHEMA_PROPERTIES"))
self.assertRowsEqual(
[],
conn.query("SELECT SCHEMA_NAME, PROPERTY_NAME, PROPERTY_VALUE FROM EXA_USER_VIRTUAL_SCHEMA_PROPERTIES"))
# def testSysTableVSchemaPropertiesOwner(self):
# self.createUser("user2", "user2")
# self.query("CREATE VIRTUAL SCHEMA VS1 USING ADAPTER.FAST_ADAPTER WITH P='V1'")
# self.query("CREATE VIRTUAL SCHEMA VS2 USING ADAPTER.FAST_ADAPTER WITH P='V2'")
# self.query("ALTER VIRTUAL SCHEMA VS1 CHANGE OWNER user2")
# self.commit()
# conn = self.getConnection('user2', 'user2')
# self.assertRowsEqual(
# [('VS1', 'P', 'V1')],
# conn.query("SELECT SCHEMA_NAME, PROPERTY_NAME, PROPERTY_VALUE FROM EXA_ALL_VIRTUAL_SCHEMA_PROPERTIES"))
# self.assertRowsEqual(
# [('VS1', 'P', 'V1')],
# conn.query("SELECT SCHEMA_NAME, PROPERTY_NAME, PROPERTY_VALUE FROM EXA_USER_VIRTUAL_SCHEMA_PROPERTIES"))
# ## added, so that the schema can be dropped -- SPOT-4245
# self.query('GRANT EXECUTE ON ADAPTER.FAST_ADAPTER TO user2')
def testSysTableVSchemaPropertiesAlterObjPriv(self):
self.createUser("user2", "user2")
self.query("CREATE VIRTUAL SCHEMA VS1 USING ADAPTER.FAST_ADAPTER WITH P='V1'")
self.query("CREATE VIRTUAL SCHEMA VS2 USING ADAPTER.FAST_ADAPTER WITH P='V2'")
self.query("GRANT ALTER ON VS1 TO user2")
self.commit()
conn = self.getConnection('user2', 'user2')
self.assertRowsEqual(
[('VS1', 'P', 'V1')],
conn.query("SELECT SCHEMA_NAME, PROPERTY_NAME, PROPERTY_VALUE FROM EXA_ALL_VIRTUAL_SCHEMA_PROPERTIES"))
self.assertRowsEqual(
[],
conn.query("SELECT SCHEMA_NAME, PROPERTY_NAME, PROPERTY_VALUE FROM EXA_USER_VIRTUAL_SCHEMA_PROPERTIES"))
def testSysTableVSchemaPropertiesAlterObjPrivViaRole(self):
self.createUser("user2", "user2")
self.query("CREATE VIRTUAL SCHEMA VS1 USING ADAPTER.FAST_ADAPTER WITH P='V1'")
self.query("CREATE VIRTUAL SCHEMA VS2 USING ADAPTER.FAST_ADAPTER WITH P='V2'")
self.query("DROP ROLE IF EXISTS role_01")
self.query("DROP ROLE IF EXISTS role_02")
self.query("DROP ROLE IF EXISTS role_03")
self.query("CREATE ROLE role_01")
self.query("CREATE ROLE role_02")
self.query("CREATE ROLE role_03")
self.query("GRANT role_01 TO role_02")
self.query("GRANT role_02 TO role_03")
self.query("GRANT role_03 TO user2")
self.query("GRANT ALTER ON VS1 TO role_01")
self.commit()
conn = self.getConnection('user2', 'user2')
self.assertRowsEqual(
[('VS1', 'P', 'V1')],
conn.query("SELECT SCHEMA_NAME, PROPERTY_NAME, PROPERTY_VALUE FROM EXA_ALL_VIRTUAL_SCHEMA_PROPERTIES"))
self.assertRowsEqual(
[],
conn.query("SELECT SCHEMA_NAME, PROPERTY_NAME, PROPERTY_VALUE FROM EXA_USER_VIRTUAL_SCHEMA_PROPERTIES"))
def testSysTableVSchemaPropertiesAlterObjPrivViaPublicRole(self):
self.createUser("user2", "user2")
self.query("CREATE VIRTUAL SCHEMA VS1 USING ADAPTER.FAST_ADAPTER WITH P='V1'")
self.query("CREATE VIRTUAL SCHEMA VS2 USING ADAPTER.FAST_ADAPTER WITH P='V2'")
self.query("GRANT ALTER ON VS1 TO public")
self.commit()
conn = self.getConnection('user2', 'user2')
self.assertRowsEqual(
[('VS1', 'P', 'V1')],
conn.query("SELECT SCHEMA_NAME, PROPERTY_NAME, PROPERTY_VALUE FROM EXA_ALL_VIRTUAL_SCHEMA_PROPERTIES"))
self.assertRowsEqual(
[],
conn.query("SELECT SCHEMA_NAME, PROPERTY_NAME, PROPERTY_VALUE FROM EXA_USER_VIRTUAL_SCHEMA_PROPERTIES"))
def testSysTableVSchemaPropertiesAlterSysPriv(self):
self.createUser("user2", "user2")
self.query("CREATE VIRTUAL SCHEMA VS1 USING ADAPTER.FAST_ADAPTER WITH P='V1'")
self.query("CREATE VIRTUAL SCHEMA VS2 USING ADAPTER.FAST_ADAPTER WITH P='V2'")
self.query("GRANT ALTER ANY VIRTUAL SCHEMA TO user2")
self.commit()
conn = self.getConnection('user2', 'user2')
self.assertRowsEqual(
[('VS1', 'P', 'V1'), ('VS2', 'P', 'V2')],
conn.query("SELECT SCHEMA_NAME, PROPERTY_NAME, PROPERTY_VALUE FROM EXA_ALL_VIRTUAL_SCHEMA_PROPERTIES ORDER BY SCHEMA_NAME"))
self.assertRowsEqual(
[],
conn.query("SELECT SCHEMA_NAME, PROPERTY_NAME, PROPERTY_VALUE FROM EXA_USER_VIRTUAL_SCHEMA_PROPERTIES"))
def testSysTableVSchemaPropertiesAlterSysPrivViaRole(self):
self.createUser("user2", "user2")
self.query("CREATE VIRTUAL SCHEMA VS1 USING ADAPTER.FAST_ADAPTER WITH P='V1'")
self.query("CREATE VIRTUAL SCHEMA VS2 USING ADAPTER.FAST_ADAPTER WITH P='V2'")
self.query("DROP ROLE IF EXISTS role_01")
self.query("DROP ROLE IF EXISTS role_02")
self.query("DROP ROLE IF EXISTS role_03")
self.query("CREATE ROLE role_01")
self.query("CREATE ROLE role_02")
self.query("CREATE ROLE role_03")
self.query("GRANT role_01 TO role_02")
self.query("GRANT role_02 TO role_03")
self.query("GRANT role_03 TO user2")
self.query("GRANT ALTER ANY VIRTUAL SCHEMA TO role_01")
self.commit()
conn = self.getConnection('user2', 'user2')
self.assertRowsEqual(
[('VS1', 'P', 'V1'), ('VS2', 'P', 'V2')],
conn.query("SELECT SCHEMA_NAME, PROPERTY_NAME, PROPERTY_VALUE FROM EXA_ALL_VIRTUAL_SCHEMA_PROPERTIES ORDER BY SCHEMA_NAME"))
self.assertRowsEqual(
[],
conn.query("SELECT SCHEMA_NAME, PROPERTY_NAME, PROPERTY_VALUE FROM EXA_USER_VIRTUAL_SCHEMA_PROPERTIES"))
def testSysTableVSchemaPropertiesAlterSysPrivViaPublicRole(self):
self.createUser("user2", "user2")
self.query("CREATE VIRTUAL SCHEMA VS1 USING ADAPTER.FAST_ADAPTER WITH P='V1'")
self.query("CREATE VIRTUAL SCHEMA VS2 USING ADAPTER.FAST_ADAPTER WITH P='V2'")
self.query("GRANT ALTER ANY VIRTUAL SCHEMA TO public")
self.commit()
conn = self.getConnection('user2', 'user2')
self.assertRowsEqual(
[('VS1', 'P', 'V1'), ('VS2', 'P', 'V2')],
conn.query("SELECT SCHEMA_NAME, PROPERTY_NAME, PROPERTY_VALUE FROM EXA_ALL_VIRTUAL_SCHEMA_PROPERTIES ORDER BY SCHEMA_NAME"))
self.assertRowsEqual(
[],
conn.query("SELECT SCHEMA_NAME, PROPERTY_NAME, PROPERTY_VALUE FROM EXA_USER_VIRTUAL_SCHEMA_PROPERTIES"))
self.query("REVOKE ALTER ANY VIRTUAL SCHEMA FROM public")
self.commit()
def testSysTableVSchemaPropertiesMultiPrivs(self):
self.createUser("user2", "user2")
self.query("CREATE VIRTUAL SCHEMA VS1 USING ADAPTER.FAST_ADAPTER WITH P='V1'")
self.query("CREATE VIRTUAL SCHEMA VS2 USING ADAPTER.FAST_ADAPTER WITH P='V2'")
# Give the user all kinds of privs
self.query("GRANT ALTER ANY VIRTUAL SCHEMA TO user2")
self.query("GRANT ALTER on VS1 TO user2")
self.query("ALTER VIRTUAL SCHEMA VS1 CHANGE OWNER user2")
self.query("DROP ROLE IF EXISTS role_01")
self.query("DROP ROLE IF EXISTS role_02")
self.query("DROP ROLE IF EXISTS role_03")
self.query("CREATE ROLE role_01")
self.query("CREATE ROLE role_02")
self.query("CREATE ROLE role_03")
self.query("GRANT role_01 TO role_02")
self.query("GRANT role_02 TO role_03")
self.query("GRANT role_03 TO user2")
self.query("GRANT ALTER ANY VIRTUAL SCHEMA TO role_01")
self.query("GRANT ALTER on VS1 TO role_01")
self.commit()
conn = self.getConnection('user2', 'user2')
self.assertRowsEqual(
[('VS1', 'P', 'V1'), ('VS2', 'P', 'V2')],
conn.query("SELECT SCHEMA_NAME, PROPERTY_NAME, PROPERTY_VALUE FROM EXA_ALL_VIRTUAL_SCHEMA_PROPERTIES ORDER BY SCHEMA_NAME"))
self.assertRowsEqual(
[('VS1', 'P', 'V1')],
conn.query("SELECT SCHEMA_NAME, PROPERTY_NAME, PROPERTY_VALUE FROM EXA_USER_VIRTUAL_SCHEMA_PROPERTIES"))
## added, so that the schema can be dropped -- SPOT-4245
self.query('GRANT EXECUTE ON ADAPTER.FAST_ADAPTER TO user2')
class MiscTest(VSchemaTest):
def testBig(self):
self.query('DROP SCHEMA IF EXISTS NBIG CASCADE')
self.query('CREATE SCHEMA NBIG')
size = 1
sizeColumns = 1000
for i in range(0,size):
columns = ""
for j in range(0,sizeColumns):
if (j != 0):
columns += ", "
columns += "a{col} int, b{col} varchar(100)".format(col=j)
queryString = "CREATE TABLE T{idx}(".format(idx=i)
queryString += columns + ")"
self.query(queryString)
self.commit() # commit, otherwise adapter doesn't see tables
for i in range(0,size):
values = ""
for j in range(0,sizeColumns):
if (j != 0):
values += ", "
values += "{col}, '{col}'".format(col=j)
queryString = "INSERT INTO T{idx} VALUES (".format(idx=i)
queryString += values + ")"
self.query(queryString)
self.createJdbcAdapter()
self.createVirtualSchemaJdbc("VS1", "NBIG", "ADAPTER.JDBC_ADAPTER", True)
select = "SELECT "
for i in range(0,size):
if (i != 0):
select += ", "
for j in range(0,sizeColumns):
if (j != 0):
select += ", "
select += "T{idx}.a{col} a{idx}_{col}, T{idx}.b{col} b{idx}_{col} ".format(idx=i, col=j);
select += "FROM "
selectNative = select
for i in range(0,size):
if (i != 0):
select += ", "
select += "VS1.T{idx}".format(idx=i)
for i in range(0,size):
if (i != 0):
selectNative += ", "
selectNative += "NBIG.T{idx}".format(idx=i)
rowsNative = self.query(selectNative)
rows = self.query(select)
self.assertRowsEqual(rows, rowsNative)
self.query('DROP SCHEMA IF EXISTS NBIG CASCADE')
def testLargeData(self):
self.query('DROP SCHEMA IF EXISTS NBIG2 CASCADE')
self.query('CREATE SCHEMA NBIG2')
self.query('CREATE TABLE Tlarge(a varchar(2000000)) ')
self.commit()
size = 10
for i in range(0,size):
value =""
char = i % 10
c = `i`
for j in range(0,2000000):
value+= c
queryString = "INSERT INTO Tlarge VALUES ('" + value +"')"
self.query(queryString)
self.createJdbcAdapter()
self.createVirtualSchemaJdbc("VS1", "NBIG2", "ADAPTER.JDBC_ADAPTER", True)
rowsNative = self.query("SELECT * FROM NBIG2.Tlarge");
rows = self.query("SELECT * FROM VS1.Tlarge");
self.assertRowsEqual(rows, rowsNative)
self.query('DROP SCHEMA IF EXISTS NBIG2 CASCADE')
def testLargeAdapterNotes(self):
self.query('DROP VIRTUAL SCHEMA IF EXISTS VS_NOTES CASCADE')
self.createNotesAdapter(schemaName="ADAPTER", adapterName="NOTES_ADAPTER")
self.query('CREATE VIRTUAL SCHEMA VS_NOTES USING ADAPTER.NOTES_ADAPTER')
self.query('SELECT * FROM VS_NOTES.DUMMY')
self.query('DROP VIRTUAL SCHEMA IF EXISTS VS_NOTES CASCADE')
def testManyAdapterNoteEntries(self):
self.query('DROP VIRTUAL SCHEMA IF EXISTS VS_NOTES2 CASCADE')
self.createSecondNotesAdapter(schemaName="ADAPTER", adapterName="SECOND_NOTES_ADAPTER")
self.query('CREATE VIRTUAL SCHEMA VS_NOTES2 USING ADAPTER.SECOND_NOTES_ADAPTER')
self.query('SELECT * FROM VS_NOTES2.DUMMY')
self.query('DROP VIRTUAL SCHEMA IF EXISTS VS_NOTES2 CASCADE')
def createNotesAdapter(self, schemaName="ADAPTER", adapterName="FAST_ADAPTER"):
self.dropOldAdapter(schemaName, adapterName)
self.query('CREATE SCHEMA {schema}'.format(schema=schemaName))
self.query(udf.fixindent('''
CREATE OR REPLACE PYTHON ADAPTER SCRIPT {schema}.{adapter} AS
import json
import string
def adapter_call(request):
# database expects utf-8 encoded string of type str. unicode not yet supported
root = json.loads(request)
if root["type"] == "createVirtualSchema":
c = "0"
value = ""
value += '\"'
for j in range(0,60000000):
value+= c
value += '\"'
res = {{
"type": "createVirtualSchema",
"schemaMetadata": {{
"adapterNotes": "",
"tables": [
{{
"name": "DUMMY",
"columns": [{{
"name": "KEY",
"dataType": {{"type": "VARCHAR", "size": 2000000}}
}},{{
"name": "VALUE",
"dataType": {{"type": "VARCHAR", "size": 2000000}}
}}]
}}]
}}
}}
res["schemaMetadata"]["adapterNotes"] = value
return json.dumps(res).encode('utf-8')
elif root["type"] == "dropVirtualSchema":
return json.dumps({{"type": "dropVirtualSchema"}}).encode('utf-8')
elif root["type"] == "setProperties":
return json.dumps({{"type": "setProperties"}}).encode('utf-8')
elif root["type"] == "refresh":
return json.dumps({{"type": "refresh"}}).encode('utf-8')
if root["type"] == "getCapabilities":
return json.dumps({{
"type": "getCapabilities",
"capabilities": []
}}).encode('utf-8') # database expects utf-8 encoded string of type str. unicode not yet supported.
elif root["type"] == "pushdown":
res = {{
"type": "pushdown",
"sql": "SELECT * FROM (VALUES ('FOO', 'BAR')) t"
}}
return json.dumps(res).encode('utf-8')
else:
raise ValueError('Unsupported callback')
/
''').format(schema = schemaName, adapter = adapterName))
def createSecondNotesAdapter(self, schemaName="ADAPTER", adapterName="FAST_ADAPTER"):
self.dropOldAdapter(schemaName, adapterName)
self.query('CREATE SCHEMA {schema}'.format(schema=schemaName))
self.query(udf.fixindent('''
CREATE OR REPLACE PYTHON ADAPTER SCRIPT {schema}.{adapter} AS
import json
import string
def adapter_call(request):
# database expects utf-8 encoded string of type str. unicode not yet supported
root = json.loads(request)
if root["type"] == "createVirtualSchema":
res = {{
"type": "createVirtualSchema",
"schemaMetadata": {{
"adapterNotes": "",
"tables": [
{{
"name": "DUMMY",
"columns": [{{
"name": "KEY",
"dataType": {{"type": "VARCHAR", "size": 2000000}}
}},{{
"name": "VALUE",
"dataType": {{"type": "VARCHAR", "size": 2000000}}
}}]
}}]
}}
}}
tmp = {{}}
for j in range(0,1000000):
key = "k"+`j`
tmp[key] = `j`
res["schemaMetadata"]["adapterNotes"] = '\"' + json.dumps(tmp) + '\"'
return json.dumps(res).encode('utf-8')
elif root["type"] == "dropVirtualSchema":
return json.dumps({{"type": "dropVirtualSchema"}}).encode('utf-8')
elif root["type"] == "setProperties":
return json.dumps({{"type": "setProperties"}}).encode('utf-8')
elif root["type"] == "refresh":
return json.dumps({{"type": "refresh"}}).encode('utf-8')
if root["type"] == "getCapabilities":
return json.dumps({{
"type": "getCapabilities",
"capabilities": []
}}).encode('utf-8') # database expects utf-8 encoded string of type str. unicode not yet supported.
elif root["type"] == "pushdown":
res = {{
"type": "pushdown",
"sql": "SELECT * FROM (VALUES ('FOO', 'BAR')) t"
}}
return json.dumps(res).encode('utf-8')
else:
raise ValueError('Unsupported callback')
/
''').format(schema = schemaName, adapter = adapterName))
class ExplainVirtualBasic(VSchemaTest):
def setUp(self):
self.query('DROP FORCE VIRTUAL SCHEMA IF EXISTS VS1 CASCADE')
self.createFastAdapter(schemaName="ADAPTER", adapterName="FAST_ADAPTER")
self.createVirtualSchemaJdbc("VS1", "NATIVE", "ADAPTER.FAST_ADAPTER", True)
self.commit()
def testExplainWithoutVirtual(self):
with self.assertRaisesRegexp(Exception, 'syntax error, unexpected SELECT_'):
self.query('EXPLAIN SELECT * FROM DUMMY')
def testExplainWithoutVirtualTable(self):
with self.assertRaisesRegexp(Exception, 'Explain virtual not possible on queries without virtual tables'):
self.query('EXPLAIN VIRTUAL (SELECT 2 FROM DUAL);')
def testExplainVirtualWithSimpleRequest(self):
rows = self.query('EXPLAIN VIRTUAL SELECT * FROM DUMMY;')
self.assertEqual([1,], self.getColumn(rows,0))
self.assertEqual(['''SELECT * FROM (VALUES ('FOO', 'BAR')) t''',], self.getColumn(rows,1))
self.assertEqual(['DUMMY',], self.getColumn(rows,3))
def testExplainVirtualWithSubqueryId(self):
rows = self.query('SELECT PUSHDOWN_ID FROM (EXPLAIN VIRTUAL SELECT * FROM DUMMY);')
self.assertRowsEqual([(1,)], rows)
def testExplainVirtualWithSubquerySQL(self):
rows = self.query('SELECT PUSHDOWN_SQL FROM (EXPLAIN VIRTUAL SELECT * FROM DUMMY);')
self.assertRowsEqual([('''SELECT * FROM (VALUES ('FOO', 'BAR')) t''',)], rows)
def testExplainVirtualWithSubqueryJson(self):
rows = self.query('SELECT PUSHDOWN_JSON FROM (EXPLAIN VIRTUAL SELECT * FROM DUMMY);')
self.assertTrue("schemaMetadataInfo" in rows[0][0])
def testExplainVirtualWithSubqueryInvolvedTables(self):
rows = self.query('SELECT PUSHDOWN_INVOLVED_TABLES FROM (EXPLAIN VIRTUAL SELECT * FROM DUMMY);')
self.assertRowsEqual([('''DUMMY''',)], rows)
def testExplainVirtualWithSubqueryALL(self):
rows = self.query('SELECT * FROM (EXPLAIN VIRTUAL SELECT * FROM DUMMY);')
self.assertEqual(len(rows), 1)
self.assertEqual(len(rows[0]), 4)
self.assertEqual(rows[0][0], 1)
self.assertEqual(rows[0][1], '''SELECT * FROM (VALUES ('FOO', 'BAR')) t''')
self.assertEqual(rows[0][3], '''DUMMY''')
self.assertTrue("schemaMetadataInfo" in rows[0][2])
self.assertTrue("capabilities" in rows[0][2])
self.assertTrue("pushdownRequest" in rows[0][2])
self.assertTrue("pushdown" in rows[0][2])
def testExplainVirtualWithCreate(self):
with self.assertRaisesRegexp(Exception, 'syntax error, unexpected CREATE_'):
self.query('EXPLAIN VIRTUAL CREATE VIRTUAL SCHEMA VS1 USING ADAPTER.FAST_ADAPTER')
def testExplainVirtualWithAlter(self):
with self.assertRaisesRegexp(Exception, 'syntax error, unexpected ALTER_'):
self.query('''EXPLAIN VIRTUAL ALTER VIRTUAL SCHEMA VS1 SET UNUSED='FOO' ''')
def testExplainVirtualWithAlterRefresh(self):
with self.assertRaisesRegexp(Exception, 'syntax error, unexpected ALTER_'):
self.query('EXPLAIN VIRTUAL ALTER VIRTUAL SCHEMA VS1 REFRESH')
def testExplainVirtualWithDrop(self):
with self.assertRaisesRegexp(Exception, 'syntax error, unexpected DROP_'):
self.query('EXPLAIN VIRTUAL DROP VIRTUAL SCHEMA VS1')
def testExplainVirtualWithInvalidPushdownSql(self):
self.query(udf.fixindent('''
CREATE OR REPLACE PYTHON ADAPTER SCRIPT adapter.invalid_fast_adapter AS
import json
import string
def adapter_call(request):
# database expects utf-8 encoded string of type str. unicode not yet supported
root = json.loads(request)
if root["type"] == "createVirtualSchema":
res = {
"type": "createVirtualSchema",
"schemaMetadata": {
"tables": [
{
"name": "DUMMY",
"columns": [{
"name": "A",
"dataType": {"type": "VARCHAR", "size": 2000000}
},{
"name": "B",
"dataType": {"type": "VARCHAR", "size": 2000000}
}]
}]
}
}
return json.dumps(res).encode('utf-8')
elif root["type"] == "dropVirtualSchema":
return json.dumps({"type": "dropVirtualSchema"}).encode('utf-8')
if root["type"] == "getCapabilities":
return json.dumps({
"type": "getCapabilities",
"capabilities": []
}).encode('utf-8') # database expects utf-8 encoded string of type str. unicode not yet supported.
elif root["type"] == "pushdown":
res = {
"type": "pushdown",
"sql": "IMPORT FROM JDBC AT 'jdbc:exa:non-existing-host:1234' USER 'alice' IDENTIFIED BY 'bob' STATEMENT 'SELECT * FROM non-existing-table'"
}
return json.dumps(res).encode('utf-8')
else:
raise ValueError('Unsupported callback')
/
'''))
self.query('DROP VIRTUAL SCHEMA IF EXISTS VS_INVALID_EXPLAIN CASCADE')
self.query('CREATE VIRTUAL SCHEMA VS_INVALID_EXPLAIN USING ADAPTER.invalid_fast_adapter')
rows = self.query('SELECT PUSHDOWN_SQL FROM (EXPLAIN VIRTUAL SELECT * FROM VS_INVALID_EXPLAIN.dummy)')
self.assertRowsEqual([("""IMPORT FROM JDBC AT 'jdbc:exa:non-existing-host:1234' USER 'alice' IDENTIFIED BY 'bob' STATEMENT 'SELECT * FROM non-existing-table'""",)], rows)
self.commit()
class ExplainVirtualPushdown(VSchemaTest):
setupDone = False
def setUp(self):
# TODO This is another ugly workaround for the problem that the framework doesn't offer us a query in classmethod setUpClass. Rewrite!
if self.__class__.setupDone:
self.query(''' CLOSE SCHEMA ''')
return
self.createJdbcAdapter()
self.createNative()
self.commit() # We have to commit, otherwise the adapter won't see these tables
self.createVirtualSchemaJdbc("VS1", "NATIVE", "ADAPTER.JDBC_ADAPTER", True)
self.commit()
self.query(''' CLOSE SCHEMA ''')
self.__class__.setupDone = True
def testPushdownResponses(self):
# Single Group
self.compareWithExplainVirtual('''
SELECT a, c FROM {v}.T;
''', '''SELECT A, C FROM NATIVE.T''')
self.compareWithExplainVirtual('''
SELECT t1.a FROM {v}.t t1, {v}.t t2
''', ['''SELECT A FROM NATIVE.T''','''SELECT true FROM NATIVE.T'''])
self.compareWithExplainVirtual('''
SELECT (a+1) a1 FROM {v}.t
''', '''SELECT (A + 1) FROM NATIVE.T''')
def testNestedPushdowns(self):
self.compareWithExplainVirtual('''
SELECT a FROM (SELECT a FROM VS1.t ORDER BY false);
''', '''SELECT A FROM NATIVE.T ORDER BY false''')
rows = self.query('''
SELECT PUSHDOWN_SQL||'X' FROM (EXPLAIN VIRTUAL SELECT a FROM (SELECT a FROM VS1.t ORDER BY false));
''')
self.assertRowsEqual([('''SELECT A FROM NATIVE.T ORDER BY falseX''',)], rows)
self.compareWithExplainVirtual('''
SELECT a FROM (SELECT a FROM VS1.t ORDER BY false), (SELECT b FROM VS1.t ORDER BY false);
''', ['SELECT A FROM NATIVE.T ORDER BY false', 'SELECT NULL FROM NATIVE.T ORDER BY false']) # review!
rows = self.query('''
SELECT PUSHDOWN_SQL||'X' FROM (EXPLAIN VIRTUAL SELECT a FROM (SELECT a FROM VS1.t ORDER BY false), (SELECT b FROM VS1.t ORDER BY false));
''')
self.assertRowsEqualIgnoreOrder([('''SELECT A FROM NATIVE.T ORDER BY falseX''',),('''SELECT NULL FROM NATIVE.T ORDER BY falseX''',)], rows)
self.compareWithExplainVirtual('''
SELECT * FROM VS1.t WHERE a IN (SELECT DISTINCT a FROM VS1.t ORDER BY a DESC LIMIT 2);
''', ['''SELECT * FROM NATIVE.T''','''SELECT A FROM NATIVE.T GROUP BY A ORDER BY A DESC LIMIT 2'''])
rows = self.query('''
SELECT PUSHDOWN_SQL||'X' FROM (EXPLAIN VIRTUAL SELECT * FROM VS1.t WHERE a IN (SELECT DISTINCT a FROM VS1.t ORDER BY a DESC LIMIT 2));
''')
self.assertRowsEqualIgnoreOrder([('''SELECT * FROM NATIVE.TX''',),('''SELECT A FROM NATIVE.T GROUP BY A ORDER BY A DESC LIMIT 2X''',)], rows)
rows = self.query('''
SELECT * FROM (EXPLAIN VIRTUAL SELECT a FROM VS1.t ORDER BY false), (EXPLAIN VIRTUAL SELECT b FROM VS1.t ORDER BY false);
''')
self.assertEquals(['''SELECT A FROM NATIVE.T ORDER BY false''', '''SELECT A FROM NATIVE.T ORDER BY false'''], self.getColumn(rows, 1))
self.assertEquals(['''SELECT A FROM NATIVE.T ORDER BY false''', '''SELECT B FROM NATIVE.T ORDER BY false'''], self.getColumn(rows, 5))
def testJoins(self):
# Equi Join
self.compareWithExplainVirtual('''
select t1.a FROM {v}.t t1 join {v}.t t2 on t1.b=t2.b
''', ['''SELECT A, B FROM NATIVE.T''','''SELECT B FROM NATIVE.T'''])
# Outer Join
self.compareWithExplainVirtual('''
select * FROM {v}.t t1 left join {v}.t t2 on t1.a=t2.a where coalesce(t2.a, 1) = 1
''', ['''SELECT * FROM NATIVE.T''', '''SELECT * FROM NATIVE.T'''])
# Cross Join
self.compareWithExplainVirtual('''
select t1.a FROM {v}.t t1, {v}.t t2
''', ['''SELECT A FROM NATIVE.T''', '''SELECT true FROM NATIVE.T'''])
# Join with native table
self.compareWithExplainVirtual('''
select * from {v}.t vt join {n}.t nt on vt.a = nt.a where nt.a = 1
''', '''SELECT * FROM NATIVE.T''')
def testSelectListExpressions(self):
self.compareWithExplainVirtual('''
select a+1 from {v}.t order by c desc
''', '''SELECT (A + 1) FROM NATIVE.T ORDER BY C DESC''')
def testPredicates(self):
self.compareWithExplainVirtual('''
SELECT a=1, b FROM {v}.t WHERE a=(a*2/2)
''', '''SELECT A = 1, B FROM NATIVE.T WHERE A = ((A * 2) / 2)''')
def testOrderByLimit(self):
self.compareWithExplainVirtual('''
select a+1 as a1, c from {v}.t order by a+1
''', '''SELECT (A + 1), C FROM NATIVE.T ORDER BY (A + 1)''')
def testAggregation(self):
# Single Group
self.compareWithExplainVirtual('''
select count(*) from {v}.t
''', '''SELECT COUNT(*) FROM NATIVE.T''')
# Group By Expression
self.compareWithExplainVirtual('''
select a*2, count(*), max(b) from {v}.t group by a*2
''', '''SELECT (A * 2), COUNT(*), MAX(B) FROM NATIVE.T GROUP BY (A * 2)''')
# Aggregation On Join
self.compareWithExplainVirtual('''
select sum(t1.a) from {v}.t t1, {v}.t t2 group by t1.a
''', ['SELECT A FROM NATIVE.T', 'SELECT true FROM NATIVE.T'])
def testScalarFunctions(self):
# Aggregation On Join
self.compareWithExplainVirtual('''
select * from {v}.t where abs(a) = 1
''', 'SELECT * FROM NATIVE.T WHERE ABS(A) = 1')
def testMultiPushdown(self):
self.createVirtualSchemaJdbc("VS2", "NATIVE", "ADAPTER.JDBC_ADAPTER", True)
# Create an additional virtual schema using another adapter
self.createJdbcAdapter(schemaName="ADAPTER2", adapterName="JDBC_ADAPTER")
self.createVirtualSchemaJdbc("VS3", "NATIVE", "ADAPTER2.JDBC_ADAPTER", True)
# 1 virtual schema, n virtual tables
self.compareWithExplainVirtual('''
select * from {v}.t t1, {v}.t t2, {v}.t t3 where t1.a = t2.a and t2.a = t3.a;
''', ['SELECT * FROM NATIVE.T', 'SELECT * FROM NATIVE.T', 'SELECT * FROM NATIVE.T'])
# 1 adapter, n virtual schemas
self.compareWithExplainVirtual('''
select * from {v}.t t1, {v2}.t t2, {v}.t t3 where t1.a = t2.a and t2.a = t3.a;
''', ['SELECT * FROM NATIVE.T', 'SELECT * FROM NATIVE.T', 'SELECT * FROM NATIVE.T'])
# different adapters, different schemas
self.compareWithExplainVirtual('''
select * from {v}.t t1, {v3}.t t2 where t1.a = t2.a;
''', ['SELECT * FROM NATIVE.T', 'SELECT * FROM NATIVE.T'])
self.compareWithExplainVirtual('''
select * from {v}.t t1, (select a, b from {v3}.t) t2 where t1.a = t2.a;
''', ['SELECT * FROM NATIVE.T','SELECT A, B FROM NATIVE.T'])
self.compareWithExplainVirtual('''
select * from {v}.t where a in (select distinct a from {v3}.t order by a desc limit 2);
''', ['SELECT * FROM NATIVE.T','SELECT A FROM NATIVE.T GROUP BY A ORDER BY A DESC LIMIT 2'])
def testWithAnalytical(self):
self.compareWithExplainVirtual('''
SELECT k, v1, sum(v1) over (PARTITION BY k ORDER BY v1) AS SUM FROM {v}.g order by k desc, sum;
''', 'SELECT K, V1 FROM NATIVE.G')
def testMixed(self):
# Special Case: c*c is removed from select list, so only lookups in selectlist. Should still pushdown agg.
self.compareWithExplainVirtual('''
SELECT count(a) FROM (
SELECT a,c*c as x, sum(c) mysum FROM {v}.t GROUP BY a,c*c) subsel
WHERE subsel.x<15 AND mysum>2;
''', '''SELECT A FROM NATIVE.T WHERE (C * C) < 15 GROUP BY (C * C), A HAVING 2 < SUM(C)''') # review!
# ... same with b only in filter
self.compareWithExplainVirtual('''
SELECT count(a) FROM (
SELECT a,c*c as x, sum(c) mysum FROM {v}.t WHERE b!='f' GROUP BY a,c*c) subsel
WHERE subsel.x<15 AND mysum>2;
''', '''SELECT A FROM NATIVE.T WHERE (B != 'f' AND (C * C) < 15) GROUP BY (C * C), A HAVING 2 < SUM(C)''') # review!
# ... same with join
self.compareWithExplainVirtual('''
SELECT count(a) FROM (
SELECT t1.a,t1.c*t1.c as x, sum(t1.c) mysum FROM {v}.t t1 JOIN {v}.t t2 ON t1.b=t2.b GROUP BY t1.a,t1.c*t1.c) subsel
WHERE subsel.x<15 AND mysum>2;
''', ['SELECT * FROM NATIVE.T WHERE (C * C) < 15', 'SELECT B FROM NATIVE.T'])
class AdapterNotes(VSchemaTest):
def testStringWithQuotesSchema(self):
self.query('DROP VIRTUAL SCHEMA IF EXISTS VS_ILLEGAL_NOTES CASCADE')
self.createIllegalNotesAdapter(schemaName="ADAPTER", adapterName="ILLEGAL_NOTES_ADAPTER", replacement="\\\"string\\\"")
self.query('CREATE VIRTUAL SCHEMA VS_ILLEGAL_NOTES USING ADAPTER.ILLEGAL_NOTES_ADAPTER')
self.query('SELECT * FROM VS_ILLEGAL_NOTES.DUMMY')
rows = self.query("SELECT ADAPTER_NOTES FROM EXA_VIRTUAL_SCHEMAS WHERE SCHEMA_NAME='VS_ILLEGAL_NOTES'")
self.assertRowsEqual([('string',)], rows)
self.query('DROP VIRTUAL SCHEMA IF EXISTS VS_ILLEGAL_NOTES CASCADE')
def testJsonObjectSchema(self):
self.query('DROP VIRTUAL SCHEMA IF EXISTS VS_ILLEGAL_NOTES CASCADE')
self.createIllegalNotesAdapter(schemaName="ADAPTER", adapterName="ILLEGAL_NOTES_ADAPTER", replacement="[{\\\"dataType\\\": {\\\"type\\\": \\\"VARCHAR\\\", \\\"size\\\": 100}, \\\"name\\\": \\\"KEY\\\"}, {\\\"dataType\\\": {\\\"type\\\": \\\"VARCHAR\\\", \\\"size\\\": 100}, \\\"name\\\": \\\"VALUE\\\"}]")
with self.assertRaisesRegexp(Exception, "No valid json string"):
self.query('CREATE VIRTUAL SCHEMA VS_ILLEGAL_NOTES USING ADAPTER.ILLEGAL_NOTES_ADAPTER')
self.query('DROP VIRTUAL SCHEMA IF EXISTS VS_ILLEGAL_NOTES CASCADE')
def testJsonNonExistingObjectSchema(self):
self.query('DROP VIRTUAL SCHEMA IF EXISTS VS_ILLEGAL_NOTES CASCADE')
self.createIllegalNotesAdapter(schemaName="ADAPTER", adapterName="ILLEGAL_NOTES_ADAPTER", replacement="[{\\\"data\\\": {\\\"type\\\": \\\"VARCHAR\\\", \\\"size\\\": 100}, \\\"name\\\": \\\"KEY\\\"}, {\\\"dataType\\\": {\\\"type\\\": \\\"VARCHAR\\\", \\\"size\\\": 100}, \\\"name\\\": \\\"VALUE\\\"}]")
with self.assertRaisesRegexp(Exception, "No valid json string"):
self.query('CREATE VIRTUAL SCHEMA VS_ILLEGAL_NOTES USING ADAPTER.ILLEGAL_NOTES_ADAPTER')
self.query('DROP VIRTUAL SCHEMA IF EXISTS VS_ILLEGAL_NOTES CASCADE')
def testJsonBooleanSchema(self):
self.query('DROP VIRTUAL SCHEMA IF EXISTS VS_ILLEGAL_NOTES CASCADE')
self.createIllegalNotesAdapter(schemaName="ADAPTER", adapterName="ILLEGAL_NOTES_ADAPTER", replacement="true")
with self.assertRaisesRegexp(Exception, "No valid json string"):
self.query('CREATE VIRTUAL SCHEMA VS_ILLEGAL_NOTES USING ADAPTER.ILLEGAL_NOTES_ADAPTER')
self.query('DROP VIRTUAL SCHEMA IF EXISTS VS_ILLEGAL_NOTES CASCADE')
def testIllegalQuoteSchema(self):
self.query('DROP VIRTUAL SCHEMA IF EXISTS VS_ILLEGAL_NOTES CASCADE')
self.createIllegalNotesAdapter(schemaName="ADAPTER", adapterName="ILLEGAL_NOTES_ADAPTER", replacement='''\\\"\\\\\\'t\\\'\\\"''')
with self.assertRaisesRegexp(Exception, "Bad escape sequence in string."):
self.query('CREATE VIRTUAL SCHEMA VS_ILLEGAL_NOTES USING ADAPTER.ILLEGAL_NOTES_ADAPTER')
self.query('DROP VIRTUAL SCHEMA IF EXISTS VS_ILLEGAL_NOTES CASCADE')
def testAdapterNoteWithoutQuotesSchema(self):
self.query('DROP VIRTUAL SCHEMA IF EXISTS VS_ILLEGAL_NOTES CASCADE')
self.createIllegalNotesAdapter(schemaName="ADAPTER", adapterName="ILLEGAL_NOTES_ADAPTER", replacement="no quotes")
with self.assertRaisesRegexp(Exception, "Syntax error: value, object or array expected."):
self.query('CREATE VIRTUAL SCHEMA VS_ILLEGAL_NOTES USING ADAPTER.ILLEGAL_NOTES_ADAPTER')
self.query('DROP VIRTUAL SCHEMA IF EXISTS VS_ILLEGAL_NOTES CASCADE')
def testIncompleteAdapterNoteSchema(self):
self.query('DROP VIRTUAL SCHEMA IF EXISTS VS_ILLEGAL_NOTES CASCADE')
self.createIllegalNotesAdapter(schemaName="ADAPTER", adapterName="ILLEGAL_NOTES_ADAPTER", replacement="\\\"x")
with self.assertRaisesRegexp(Exception, "Missing ',' or '}' in object declaration"):
self.query('CREATE VIRTUAL SCHEMA VS_ILLEGAL_NOTES USING ADAPTER.ILLEGAL_NOTES_ADAPTER')
self.query('DROP VIRTUAL SCHEMA IF EXISTS VS_ILLEGAL_NOTES CASCADE')
def testStringWithQuotesTable(self):
self.query('DROP VIRTUAL SCHEMA IF EXISTS VS_ILLEGAL_NOTES CASCADE')
self.createIllegalNotesAdapter(schemaName="ADAPTER", adapterName="ILLEGAL_NOTES_ADAPTER", replacement2="\\\"string\\\"")
self.query('CREATE VIRTUAL SCHEMA VS_ILLEGAL_NOTES USING ADAPTER.ILLEGAL_NOTES_ADAPTER')
self.query('SELECT * FROM VS_ILLEGAL_NOTES.DUMMY')
rows = self.query("SELECT ADAPTER_NOTES FROM EXA_DBA_VIRTUAL_TABLES WHERE TABLE_SCHEMA='VS_ILLEGAL_NOTES' AND TABLE_NAME='DUMMY'")
self.assertRowsEqual([('string',)], rows)
self.query('DROP VIRTUAL SCHEMA IF EXISTS VS_ILLEGAL_NOTES CASCADE')
def testJsonObjectTable(self):
self.query('DROP VIRTUAL SCHEMA IF EXISTS VS_ILLEGAL_NOTES CASCADE')
self.createIllegalNotesAdapter(schemaName="ADAPTER", adapterName="ILLEGAL_NOTES_ADAPTER", replacement2="[{\\\"dataType\\\": {\\\"type\\\": \\\"VARCHAR\\\", \\\"size\\\": 100}, \\\"name\\\": \\\"KEY\\\"}, {\\\"dataType\\\": {\\\"type\\\": \\\"VARCHAR\\\", \\\"size\\\": 100}, \\\"name\\\": \\\"VALUE\\\"}]")
with self.assertRaisesRegexp(Exception, "No valid json string"):
self.query('CREATE VIRTUAL SCHEMA VS_ILLEGAL_NOTES USING ADAPTER.ILLEGAL_NOTES_ADAPTER')
self.query('DROP VIRTUAL SCHEMA IF EXISTS VS_ILLEGAL_NOTES CASCADE')
def testJsonNonExistingObjectTable(self):
self.query('DROP VIRTUAL SCHEMA IF EXISTS VS_ILLEGAL_NOTES CASCADE')
self.createIllegalNotesAdapter(schemaName="ADAPTER", adapterName="ILLEGAL_NOTES_ADAPTER", replacement2="[{\\\"data\\\": {\\\"type\\\": \\\"VARCHAR\\\", \\\"size\\\": 100}, \\\"name\\\": \\\"KEY\\\"}, {\\\"dataType\\\": {\\\"type\\\": \\\"VARCHAR\\\", \\\"size\\\": 100}, \\\"name\\\": \\\"VALUE\\\"}]")
with self.assertRaisesRegexp(Exception, "No valid json string"):
self.query('CREATE VIRTUAL SCHEMA VS_ILLEGAL_NOTES USING ADAPTER.ILLEGAL_NOTES_ADAPTER')
self.query('DROP VIRTUAL SCHEMA IF EXISTS VS_ILLEGAL_NOTES CASCADE')
def testJsonBooleanTable(self):
self.query('DROP VIRTUAL SCHEMA IF EXISTS VS_ILLEGAL_NOTES CASCADE')
self.createIllegalNotesAdapter(schemaName="ADAPTER", adapterName="ILLEGAL_NOTES_ADAPTER", replacement2="true")
with self.assertRaisesRegexp(Exception, "No valid json string"):
self.query('CREATE VIRTUAL SCHEMA VS_ILLEGAL_NOTES USING ADAPTER.ILLEGAL_NOTES_ADAPTER')
self.query('DROP VIRTUAL SCHEMA IF EXISTS VS_ILLEGAL_NOTES CASCADE')
def testIllegalQuoteTable(self):
self.query('DROP VIRTUAL SCHEMA IF EXISTS VS_ILLEGAL_NOTES CASCADE')
self.createIllegalNotesAdapter(schemaName="ADAPTER", adapterName="ILLEGAL_NOTES_ADAPTER", replacement2='''\\\"\\\\\\'t\\\'\\\"''')
with self.assertRaisesRegexp(Exception, "Bad escape sequence in string."):
self.query('CREATE VIRTUAL SCHEMA VS_ILLEGAL_NOTES USING ADAPTER.ILLEGAL_NOTES_ADAPTER')
self.query('DROP VIRTUAL SCHEMA IF EXISTS VS_ILLEGAL_NOTES CASCADE')
def testAdapterNoteWithoutQuotesTable(self):
self.query('DROP VIRTUAL SCHEMA IF EXISTS VS_ILLEGAL_NOTES CASCADE')
self.createIllegalNotesAdapter(schemaName="ADAPTER", adapterName="ILLEGAL_NOTES_ADAPTER", replacement2="no quotes")
with self.assertRaisesRegexp(Exception, "Syntax error: value, object or array expected."):
self.query('CREATE VIRTUAL SCHEMA VS_ILLEGAL_NOTES USING ADAPTER.ILLEGAL_NOTES_ADAPTER')
self.query('DROP VIRTUAL SCHEMA IF EXISTS VS_ILLEGAL_NOTES CASCADE')
def testIncompleteAdapterNoteTable(self):
self.query('DROP VIRTUAL SCHEMA IF EXISTS VS_ILLEGAL_NOTES CASCADE')
self.createIllegalNotesAdapter(schemaName="ADAPTER", adapterName="ILLEGAL_NOTES_ADAPTER", replacement2="\\\"x")
with self.assertRaisesRegexp(Exception, "Missing ',' or '}' in object declaration"):
self.query('CREATE VIRTUAL SCHEMA VS_ILLEGAL_NOTES USING ADAPTER.ILLEGAL_NOTES_ADAPTER')
self.query('DROP VIRTUAL SCHEMA IF EXISTS VS_ILLEGAL_NOTES CASCADE')
def testStringWithQuotesColumn(self):
self.query('DROP VIRTUAL SCHEMA IF EXISTS VS_ILLEGAL_NOTES CASCADE')
self.createIllegalNotesAdapter(schemaName="ADAPTER", adapterName="ILLEGAL_NOTES_ADAPTER", replacement3="\\\"string\\\"")
self.query('CREATE VIRTUAL SCHEMA VS_ILLEGAL_NOTES USING ADAPTER.ILLEGAL_NOTES_ADAPTER')
self.query('SELECT * FROM VS_ILLEGAL_NOTES.DUMMY')
rows = self.query("SELECT ADAPTER_NOTES FROM EXA_DBA_VIRTUAL_COLUMNS WHERE COLUMN_SCHEMA='VS_ILLEGAL_NOTES' AND COLUMN_TABLE='DUMMY' ORDER BY COLUMN_NAME")
self.assertRowsEqual([('string',),(None,)], rows)
self.query('DROP VIRTUAL SCHEMA IF EXISTS VS_ILLEGAL_NOTES CASCADE')
def testJsonObjectColumn(self):
self.query('DROP VIRTUAL SCHEMA IF EXISTS VS_ILLEGAL_NOTES CASCADE')
self.createIllegalNotesAdapter(schemaName="ADAPTER", adapterName="ILLEGAL_NOTES_ADAPTER", replacement3="[{\\\"dataType\\\": {\\\"type\\\": \\\"VARCHAR\\\", \\\"size\\\": 100}, \\\"name\\\": \\\"KEY\\\"}, {\\\"dataType\\\": {\\\"type\\\": \\\"VARCHAR\\\", \\\"size\\\": 100}, \\\"name\\\": \\\"VALUE\\\"}]")
with self.assertRaisesRegexp(Exception, "No valid json string"):
self.query('CREATE VIRTUAL SCHEMA VS_ILLEGAL_NOTES USING ADAPTER.ILLEGAL_NOTES_ADAPTER')
self.query('DROP VIRTUAL SCHEMA IF EXISTS VS_ILLEGAL_NOTES CASCADE')
def testJsonNonExistingObjectColumn(self):
self.query('DROP VIRTUAL SCHEMA IF EXISTS VS_ILLEGAL_NOTES CASCADE')
self.createIllegalNotesAdapter(schemaName="ADAPTER", adapterName="ILLEGAL_NOTES_ADAPTER", replacement3="[{\\\"data\\\": {\\\"type\\\": \\\"VARCHAR\\\", \\\"size\\\": 100}, \\\"name\\\": \\\"KEY\\\"}, {\\\"dataType\\\": {\\\"type\\\": \\\"VARCHAR\\\", \\\"size\\\": 100}, \\\"name\\\": \\\"VALUE\\\"}]")
with self.assertRaisesRegexp(Exception, "No valid json string"):
self.query('CREATE VIRTUAL SCHEMA VS_ILLEGAL_NOTES USING ADAPTER.ILLEGAL_NOTES_ADAPTER')
self.query('DROP VIRTUAL SCHEMA IF EXISTS VS_ILLEGAL_NOTES CASCADE')
def testJsonBooleanColumn(self):
self.query('DROP VIRTUAL SCHEMA IF EXISTS VS_ILLEGAL_NOTES CASCADE')
self.createIllegalNotesAdapter(schemaName="ADAPTER", adapterName="ILLEGAL_NOTES_ADAPTER", replacement3="true")
with self.assertRaisesRegexp(Exception, "No valid json string"):
self.query('CREATE VIRTUAL SCHEMA VS_ILLEGAL_NOTES USING ADAPTER.ILLEGAL_NOTES_ADAPTER')
self.query('DROP VIRTUAL SCHEMA IF EXISTS VS_ILLEGAL_NOTES CASCADE')
def testIllegalQuoteColumn(self):
self.query('DROP VIRTUAL SCHEMA IF EXISTS VS_ILLEGAL_NOTES CASCADE')
self.createIllegalNotesAdapter(schemaName="ADAPTER", adapterName="ILLEGAL_NOTES_ADAPTER", replacement3='''\\\"\\\\\\'t\\\'\\\"''')
with self.assertRaisesRegexp(Exception, "Bad escape sequence in string."):
self.query('CREATE VIRTUAL SCHEMA VS_ILLEGAL_NOTES USING ADAPTER.ILLEGAL_NOTES_ADAPTER')
self.query('DROP VIRTUAL SCHEMA IF EXISTS VS_ILLEGAL_NOTES CASCADE')
def testAdapterNoteWithoutQuotesColumn(self):
self.query('DROP VIRTUAL SCHEMA IF EXISTS VS_ILLEGAL_NOTES CASCADE')
self.createIllegalNotesAdapter(schemaName="ADAPTER", adapterName="ILLEGAL_NOTES_ADAPTER", replacement3="no quotes")
with self.assertRaisesRegexp(Exception, "Syntax error: value, object or array expected."):
self.query('CREATE VIRTUAL SCHEMA VS_ILLEGAL_NOTES USING ADAPTER.ILLEGAL_NOTES_ADAPTER')
self.query('DROP VIRTUAL SCHEMA IF EXISTS VS_ILLEGAL_NOTES CASCADE')
def testIncompleteAdapterNoteColumn(self):
self.query('DROP VIRTUAL SCHEMA IF EXISTS VS_ILLEGAL_NOTES CASCADE')
self.createIllegalNotesAdapter(schemaName="ADAPTER", adapterName="ILLEGAL_NOTES_ADAPTER", replacement3="\\\"x")
with self.assertRaisesRegexp(Exception, "Missing ',' or '}' in object declaration"):
self.query('CREATE VIRTUAL SCHEMA VS_ILLEGAL_NOTES USING ADAPTER.ILLEGAL_NOTES_ADAPTER')
self.query('DROP VIRTUAL SCHEMA IF EXISTS VS_ILLEGAL_NOTES CASCADE')
def createIllegalNotesAdapter(self, schemaName="ADAPTER", adapterName="FAST_ADAPTER", replacement="\\\"PLACEHOLDER\\\"", replacement2="\\\"PLACEHOLDER2\\\"", replacement3="\\\"PLACEHOLDER3\\\""):
self.dropOldAdapter(schemaName, adapterName)
self.query('CREATE SCHEMA {schema}'.format(schema=schemaName))
self.query(udf.fixindent('''
CREATE OR REPLACE PYTHON ADAPTER SCRIPT {schema}.{adapter} AS
import json
import string
def adapter_call(request):
# database expects utf-8 encoded string of type str. unicode not yet supported
root = json.loads(request)
if root["type"] == "createVirtualSchema":
res = {{
"type": "createVirtualSchema",
"schemaMetadata": {{
"adapterNotes": "PLACEHOLDER",
"tables": [
{{
"adapterNotes": "PLACEHOLDER2",
"name": "DUMMY",
"columns": [{{
"adapterNotes": "PLACEHOLDER3",
"name": "KEY",
"dataType": {{"type": "VARCHAR", "size": 100}}
}},{{
"name": "VALUE",
"dataType": {{"type": "VARCHAR", "size": 100}}
}}]
}}]
}}
}}
return json.dumps(res).replace("\\"PLACEHOLDER\\"","{replace}").replace("\\"PLACEHOLDER\\"","{replace}").replace("\\"PLACEHOLDER2\\"","{replace2}").replace("\\"PLACEHOLDER3\\"","{replace3}").encode('utf-8')
elif root["type"] == "dropVirtualSchema":
return json.dumps({{"type": "dropVirtualSchema"}}).encode('utf-8')
elif root["type"] == "setProperties":
return json.dumps({{"type": "setProperties"}}).encode('utf-8')
elif root["type"] == "refresh":
return json.dumps({{"type": "refresh"}}).encode('utf-8')
if root["type"] == "getCapabilities":
return json.dumps({{
"type": "getCapabilities",
"capabilities": []
}}).encode('utf-8') # database expects utf-8 encoded string of type str. unicode not yet supported.
elif root["type"] == "pushdown":
res = {{
"type": "pushdown",
"sql": "SELECT * FROM (VALUES ('FOO', 'BAR')) t"
}}
return json.dumps(res).encode('utf-8')
else:
raise ValueError('Unsupported callback')
/
''').format(schema = schemaName, adapter = adapterName, replace = replacement, replace2 = replacement2, replace3 = replacement3))
class ViewPrivileges(VSchemaTest):
def testGetConnection(self):
self.createUser("foo", "foo")
self.query('''CREATE SCHEMA IF NOT EXISTS SPOT4245''')
self.query('''
create or replace connection AC_FOOCONN to 'a' user 'b' identified by 'c'
''')
self.createConnectionAdapter(schemaName="SPOT4245", adapterName="FAST_ADAPTER")
self.query('DROP VIRTUAL SCHEMA IF EXISTS SPOT4245VS CASCADE')
self.query('CREATE VIRTUAL SCHEMA SPOT4245VS USING SPOT4245.FAST_ADAPTER')
rows = self.query('''
SELECT * from SPOT4245VS.DUMMY
''')
self.assertRowsEqual([('password','a','b','c')],rows)
self.query("OPEN SCHEMA SPOT4245")
self.query("create or replace view SPOT4245.SPOT4245VIEW as SELECT * from SPOT4245VS.DUMMY")
self.query("grant select on SPOT4245.SPOT4245VIEW to foo")
self.commit()
foo_conn = self.getConnection('foo', 'foo')
rows = foo_conn.query('''select * from SPOT4245.SPOT4245VIEW''')
self.assertRowsEqual([('password','a','b','c')],rows)
self.query('DROP VIRTUAL SCHEMA IF EXISTS SPOT4245VS CASCADE')
self.query('''DROP SCHEMA IF EXISTS SPOT4245 CASCADE''')
def testGetInput(self):
self.createUser("foo", "foo")
self.query('''CREATE SCHEMA IF NOT EXISTS SPOT4245''')
self.query(udf.fixindent('''
CREATE OR REPLACE python SCALAR SCRIPT
spot42542script()
RETURNS VARCHAR(200) AS
def f():
return "42"
/
'''))
self.createImportAdapter(schemaName="SPOT4245", adapterName="FAST_ADAPTER")
self.query('DROP VIRTUAL SCHEMA IF EXISTS SPOT4245VS CASCADE')
self.query('CREATE VIRTUAL SCHEMA SPOT4245VS USING SPOT4245.FAST_ADAPTER')
rows = self.query('''
SELECT * from SPOT4245VS.DUMMY
''')
self.assertRowsEqual([('42',)],rows)
self.query("OPEN SCHEMA SPOT4245")
self.query("create or replace view SPOT4245.SPOT4245VIEW as SELECT * from SPOT4245VS.DUMMY")
self.query("grant select on SPOT4245.SPOT4245VIEW to foo")
self.commit()
foo_conn = self.getConnection('foo', 'foo')
rows = foo_conn.query('''select * from SPOT4245.SPOT4245VIEW''')
self.assertRowsEqual([('42', )],rows)
self.query('DROP VIRTUAL SCHEMA IF EXISTS SPOT4245VS CASCADE')
self.query('''DROP SCHEMA IF EXISTS SPOT4245 CASCADE''')
def createConnectionAdapter(self, schemaName="ADAPTER", adapterName="FAST_ADAPTER"):
self.query(udf.fixindent('''
CREATE OR REPLACE PYTHON ADAPTER SCRIPT {schema}.{adapter} AS
import json
import string
def adapter_call(request):
# database expects utf-8 encoded string of type str. unicode not yet supported
root = json.loads(request)
if root["type"] == "createVirtualSchema":
res = {{
"type": "createVirtualSchema",
"schemaMetadata": {{
"tables": [
{{
"name": "DUMMY",
"columns": [{{
"name": "TYPE",
"dataType": {{"type": "VARCHAR", "size": 200}}
}},{{
"name": "HOST",
"dataType": {{"type": "VARCHAR", "size": 200}}
}},{{
"name": "CONN",
"dataType": {{"type": "VARCHAR", "size": 200}}
}},{{
"name": "PWD",
"dataType": {{"type": "VARCHAR", "size": 200}}
}}]
}}]
}}
}}
return json.dumps(res).encode('utf-8')
elif root["type"] == "dropVirtualSchema":
return json.dumps({{"type": "dropVirtualSchema"}}).encode('utf-8')
elif root["type"] == "setProperties":
return json.dumps({{"type": "setProperties"}}).encode('utf-8')
elif root["type"] == "refresh":
return json.dumps({{"type": "refresh"}}).encode('utf-8')
if root["type"] == "getCapabilities":
return json.dumps({{
"type": "getCapabilities",
"capabilities": []
}}).encode('utf-8') # database expects utf-8 encoded string of type str. unicode not yet supported.
elif root["type"] == "pushdown":
res = {{
"type": "pushdown",
"sql": "SELECT * FROM (VALUES ('PLACEHOLDER1', 'PLACEHOLDER2', 'PLACEHOLDER3', 'PLACEHOLDER4')) t"
}}
c = exa.get_connection('AC_FOOCONN')
return json.dumps(res).replace("PLACEHOLDER1",c.type).replace("PLACEHOLDER2",c.address).replace("PLACEHOLDER3",c.user).replace("PLACEHOLDER4",c.password).encode('utf-8')
else:
raise ValueError('Unsupported callback')
/
''').format(schema = schemaName, adapter = adapterName))
def createImportAdapter(self, schemaName="ADAPTER", adapterName="FAST_ADAPTER"):
self.query(udf.fixindent('''
CREATE OR REPLACE PYTHON ADAPTER SCRIPT {schema}.{adapter} AS
import json
import string
def adapter_call(request):
# database expects utf-8 encoded string of type str. unicode not yet supported
root = json.loads(request)
if root["type"] == "createVirtualSchema":
res = {{
"type": "createVirtualSchema",
"schemaMetadata": {{
"tables": [
{{
"name": "DUMMY",
"columns": [{{
"name": "VALUE",
"dataType": {{"type": "VARCHAR", "size": 200}}
}}]
}}]
}}
}}
return json.dumps(res).encode('utf-8')
elif root["type"] == "dropVirtualSchema":
return json.dumps({{"type": "dropVirtualSchema"}}).encode('utf-8')
elif root["type"] == "setProperties":
return json.dumps({{"type": "setProperties"}}).encode('utf-8')
elif root["type"] == "refresh":
return json.dumps({{"type": "refresh"}}).encode('utf-8')
if root["type"] == "getCapabilities":
return json.dumps({{
"type": "getCapabilities",
"capabilities": []
}}).encode('utf-8') # database expects utf-8 encoded string of type str. unicode not yet supported.
elif root["type"] == "pushdown":
res = {{
"type": "pushdown",
"sql": "SELECT * FROM (VALUES ('PLACEHOLDER1')) t"
}}
c = exa.import_script('spot42542script')
return json.dumps(res).replace("PLACEHOLDER1",c.f()).encode('utf-8')
else:
raise ValueError('Unsupported callback')
/
''').format(schema = schemaName, adapter = adapterName))
class ReportedBugs(VSchemaTest):
setupDone = False
def setUp(self):
# TODO This is another ugly workaround for the problem that the framework doesn't offer us a query in classmethod setUpClass. Rewrite!
if self.__class__.setupDone:
self.query(''' CLOSE SCHEMA ''')
return
self.createJdbcAdapter()
self.createNative()
self.commit() # We have to commit, otherwise the adapter won't see these tables
self.createVirtualSchemaJdbc("VS1", "NATIVE", "ADAPTER.JDBC_ADAPTER", False)
self.commit()
self.query(''' CLOSE SCHEMA ''')
self.__class__.setupDone = True
def testEqualColumns(self):
# Single Group
self.compareWithNativeExtended('''
select 1 from {v}.t WHERE t.A = t.A
''', ignoreOrder=True, explainResponse='''SELECT 1 FROM NATIVE.T''')
self.assertExpectations()
def testJoinWithSubselect(self):
# Single Group
self.compareWithNativeExtended('''
SELECT 1 FROM {v}.t_nulls VS LEFT JOIN (SELECT DISTINCT DUMMY FROM SYS.DUAL) D ON VS.A=D.DUMMY
LEFT JOIN SYS.DUAL D1 ON 1=1
''', ignoreOrder=True, explainResponse='''SELECT A FROM NATIVE.T_NULLS''')
self.assertExpectations()
if __name__ == '__main__':
udf.main()
| 53.264139
| 314
| 0.614993
| 18,199
| 165,758
| 5.519644
| 0.056157
| 0.061104
| 0.027774
| 0.047037
| 0.790915
| 0.757108
| 0.724097
| 0.700046
| 0.673944
| 0.657249
| 0
| 0.019587
| 0.261393
| 165,758
| 3,111
| 315
| 53.28126
| 0.80076
| 0.050194
| 0
| 0.636157
| 0
| 0.029879
| 0.526178
| 0.054167
| 0
| 0
| 0
| 0.000321
| 0.136536
| 0
| null | null | 0.00643
| 0.019667
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
883041056e660a1c407249cae65cf461d1e5446b
| 58
|
py
|
Python
|
CursoEmVideoPython/hello_world.py
|
miguelabreuss/scripts_python
|
cf33934731a9d1b731672d4309aaea0a24ae151a
|
[
"MIT"
] | null | null | null |
CursoEmVideoPython/hello_world.py
|
miguelabreuss/scripts_python
|
cf33934731a9d1b731672d4309aaea0a24ae151a
|
[
"MIT"
] | 1
|
2020-07-04T16:27:25.000Z
|
2020-07-04T16:27:25.000Z
|
CursoEmVideoPython/hello_world.py
|
miguelabreuss/scripts_python
|
cf33934731a9d1b731672d4309aaea0a24ae151a
|
[
"MIT"
] | null | null | null |
import emoji
print(emoji.emojize('Python é :thumbs_up:'))
| 19.333333
| 44
| 0.758621
| 9
| 58
| 4.777778
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.086207
| 58
| 3
| 44
| 19.333333
| 0.811321
| 0
| 0
| 0
| 0
| 0
| 0.338983
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0.5
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 5
|
884684bde3a96ea0347e750b9f51251b0fc2a2e0
| 137
|
py
|
Python
|
praw_memories/prawdebug.py
|
elnuno/praw_memories
|
dcab9cf795d8d9c34684fb1087c7907c56630cc2
|
[
"Apache-2.0"
] | 1
|
2017-04-08T03:16:48.000Z
|
2017-04-08T03:16:48.000Z
|
praw_memories/prawdebug.py
|
elnuno/praw_memories
|
dcab9cf795d8d9c34684fb1087c7907c56630cc2
|
[
"Apache-2.0"
] | null | null | null |
praw_memories/prawdebug.py
|
elnuno/praw_memories
|
dcab9cf795d8d9c34684fb1087c7907c56630cc2
|
[
"Apache-2.0"
] | null | null | null |
"""Debugging facilities for PRAW-based projects."""
import datetime
import json
import logging
import time
import praw
import prawcore
| 13.7
| 51
| 0.80292
| 18
| 137
| 6.111111
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.138686
| 137
| 9
| 52
| 15.222222
| 0.932203
| 0.328467
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
884b504a09c02924992e2ed0397cc3a86d991b80
| 62
|
py
|
Python
|
hydroserver/hydroserver_core/core_database_models/__init__.py
|
kjlippold/his_hydroserver
|
aaf3939965d12dd5bc74f69d22b653ce548bec0a
|
[
"MIT"
] | 1
|
2021-01-27T19:19:05.000Z
|
2021-01-27T19:19:05.000Z
|
hydroserver/hydroserver_core/core_database_models/__init__.py
|
CUAHSI-APPS/his_hydroserver
|
aaf3939965d12dd5bc74f69d22b653ce548bec0a
|
[
"MIT"
] | 1
|
2019-09-27T16:20:49.000Z
|
2019-09-27T16:20:49.000Z
|
hydroserver/hydroserver_core/core_database_models/__init__.py
|
kjlippold/his_hydroserver
|
aaf3939965d12dd5bc74f69d22b653ce548bec0a
|
[
"MIT"
] | 1
|
2020-06-08T21:43:38.000Z
|
2020-06-08T21:43:38.000Z
|
from hydroserver_core.core_database_models import odm2_sqlite
| 31
| 61
| 0.919355
| 9
| 62
| 5.888889
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.017241
| 0.064516
| 62
| 1
| 62
| 62
| 0.896552
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
885932a210830fd014b4c4e7c17035fd0d093062
| 304
|
py
|
Python
|
Python/34 Calendar Module.py
|
hnwarid/HackerRankExercises
|
5f6f710387b61cbf225e51517e702009659f4416
|
[
"MIT"
] | null | null | null |
Python/34 Calendar Module.py
|
hnwarid/HackerRankExercises
|
5f6f710387b61cbf225e51517e702009659f4416
|
[
"MIT"
] | null | null | null |
Python/34 Calendar Module.py
|
hnwarid/HackerRankExercises
|
5f6f710387b61cbf225e51517e702009659f4416
|
[
"MIT"
] | null | null | null |
# Enter your code here. Read input from STDIN. Print output to STDOUT
import calendar
mm, dd, yyyy = map(int, input().split())
# weekday = calendar.weekday(yyyy, mm, dd)
# day_name = calendar.day_name[weekday]
# print(day_name.upper())
print(calendar.day_name[calendar.weekday(yyyy, mm, dd)].upper())
| 30.4
| 69
| 0.723684
| 47
| 304
| 4.595745
| 0.510638
| 0.12963
| 0.175926
| 0.194444
| 0.212963
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.128289
| 304
| 9
| 70
| 33.777778
| 0.815094
| 0.559211
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0.333333
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
8870e8927fc8fc205499d31541f2a794beb97349
| 331
|
py
|
Python
|
ems/datasets/case/case_set.py
|
EMSTrack/Algorithms
|
139160619a935001582a60d3f43c0e33082bce99
|
[
"BSD-3-Clause"
] | 1
|
2020-07-15T00:16:48.000Z
|
2020-07-15T00:16:48.000Z
|
ems/datasets/case/case_set.py
|
EMSTrack/Algorithms
|
139160619a935001582a60d3f43c0e33082bce99
|
[
"BSD-3-Clause"
] | 40
|
2018-12-06T23:13:52.000Z
|
2019-07-11T01:24:13.000Z
|
ems/datasets/case/case_set.py
|
EMSTrack/Algorithms
|
139160619a935001582a60d3f43c0e33082bce99
|
[
"BSD-3-Clause"
] | null | null | null |
# Interface for a "set" of cases
class CaseSet:
def __init__(self, time):
self.time = time
def __len__(self):
raise NotImplementedError()
def iterator(self):
raise NotImplementedError()
def get_time(self):
return self.time
def set_time(self, time):
self.time = time
| 18.388889
| 35
| 0.613293
| 40
| 331
| 4.825
| 0.45
| 0.207254
| 0.186529
| 0.165803
| 0.207254
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.296073
| 331
| 17
| 36
| 19.470588
| 0.828326
| 0.090634
| 0
| 0.363636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.454545
| false
| 0
| 0
| 0.090909
| 0.636364
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
88893e5984bb1933a5a2eff5ce092fb5e2284e15
| 50
|
py
|
Python
|
pyreq/collector/__init__.py
|
ksks2211/pyreq
|
6949039109b39f3ca0e9a39980759495173195ac
|
[
"MIT"
] | null | null | null |
pyreq/collector/__init__.py
|
ksks2211/pyreq
|
6949039109b39f3ca0e9a39980759495173195ac
|
[
"MIT"
] | null | null | null |
pyreq/collector/__init__.py
|
ksks2211/pyreq
|
6949039109b39f3ca0e9a39980759495173195ac
|
[
"MIT"
] | null | null | null |
from .selector import collect_attr, collect_links
| 25
| 49
| 0.86
| 7
| 50
| 5.857143
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1
| 50
| 1
| 50
| 50
| 0.911111
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
888ac9bf14693083f3623c7db7f95cc85e46695d
| 108
|
py
|
Python
|
Python/python/inp.py
|
manishaverma1012/programs
|
dd77546219eab2f2ee81dd0d599b78ebd8f95957
|
[
"MIT"
] | null | null | null |
Python/python/inp.py
|
manishaverma1012/programs
|
dd77546219eab2f2ee81dd0d599b78ebd8f95957
|
[
"MIT"
] | null | null | null |
Python/python/inp.py
|
manishaverma1012/programs
|
dd77546219eab2f2ee81dd0d599b78ebd8f95957
|
[
"MIT"
] | null | null | null |
a = int (input(' enter the first number '))
b = int (input('enter the second number '))
z = a + b
print(a+b)
| 27
| 43
| 0.62037
| 20
| 108
| 3.35
| 0.55
| 0.238806
| 0.38806
| 0.477612
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.203704
| 108
| 4
| 44
| 27
| 0.77907
| 0
| 0
| 0
| 0
| 0
| 0.440367
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.25
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
88967a0bb5776cbd0a969ab46518674c991a8090
| 149
|
py
|
Python
|
savu/plugins/azimuthal_integrators/pyfai_azimuthal_integrator_tools.py
|
elainehoml/Savu
|
e4772704606f71d6803d832084e10faa585e7358
|
[
"Apache-2.0"
] | 39
|
2015-03-30T14:03:42.000Z
|
2022-03-16T16:50:33.000Z
|
savu/plugins/azimuthal_integrators/pyfai_azimuthal_integrator_tools.py
|
elainehoml/Savu
|
e4772704606f71d6803d832084e10faa585e7358
|
[
"Apache-2.0"
] | 670
|
2015-02-11T11:08:09.000Z
|
2022-03-21T09:27:57.000Z
|
savu/plugins/azimuthal_integrators/pyfai_azimuthal_integrator_tools.py
|
elainehoml/Savu
|
e4772704606f71d6803d832084e10faa585e7358
|
[
"Apache-2.0"
] | 54
|
2015-02-13T14:09:52.000Z
|
2022-01-24T13:57:09.000Z
|
from savu.plugins.plugin_tools import PluginTools
class PyfaiAzimuthalIntegratorTools(PluginTools):
"""1D azimuthal integrator by pyFAI
"""
| 24.833333
| 49
| 0.785235
| 15
| 149
| 7.733333
| 0.933333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.007813
| 0.14094
| 149
| 5
| 50
| 29.8
| 0.898438
| 0.214765
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
ee0e11a3510630ea4335d1cad7681b4795b1ab78
| 167
|
py
|
Python
|
api_postgres/urls.py
|
DCMidwood/dap-backend
|
92ddc605618679ccc3ec97a21f5ea296f74fa51f
|
[
"MIT"
] | null | null | null |
api_postgres/urls.py
|
DCMidwood/dap-backend
|
92ddc605618679ccc3ec97a21f5ea296f74fa51f
|
[
"MIT"
] | null | null | null |
api_postgres/urls.py
|
DCMidwood/dap-backend
|
92ddc605618679ccc3ec97a21f5ea296f74fa51f
|
[
"MIT"
] | null | null | null |
from django.db import router
from django.urls import path, include
from .router import router
from . import views
urlpatterns = [
path('', include(router.urls))
]
| 20.875
| 37
| 0.742515
| 23
| 167
| 5.391304
| 0.434783
| 0.16129
| 0.258065
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.161677
| 167
| 8
| 38
| 20.875
| 0.885714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.571429
| 0
| 0.571429
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
ee18b0a3a39e7737a9543d1d38ad02d01fdabee0
| 2,415
|
py
|
Python
|
terrascript/resource/terraform_provider_graylog/graylog.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 507
|
2017-07-26T02:58:38.000Z
|
2022-01-21T12:35:13.000Z
|
terrascript/resource/terraform_provider_graylog/graylog.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 135
|
2017-07-20T12:01:59.000Z
|
2021-10-04T22:25:40.000Z
|
terrascript/resource/terraform_provider_graylog/graylog.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 81
|
2018-02-20T17:55:28.000Z
|
2022-01-31T07:08:40.000Z
|
# terrascript/resource/terraform-provider-graylog/graylog.py
# Automatically generated by tools/makecode.py (24-Sep-2021 15:17:31 UTC)
import terrascript
class graylog_alarm_callback(terrascript.Resource):
pass
class graylog_alert_condition(terrascript.Resource):
pass
class graylog_dashboard(terrascript.Resource):
pass
class graylog_dashboard_widget(terrascript.Resource):
pass
class graylog_dashboard_widget_positions(terrascript.Resource):
pass
class graylog_event_definition(terrascript.Resource):
pass
class graylog_event_notification(terrascript.Resource):
pass
class graylog_extractor(terrascript.Resource):
pass
class graylog_grok_pattern(terrascript.Resource):
pass
class graylog_index_set(terrascript.Resource):
pass
class graylog_input(terrascript.Resource):
pass
class graylog_input_static_fields(terrascript.Resource):
pass
class graylog_ldap_setting(terrascript.Resource):
pass
class graylog_output(terrascript.Resource):
pass
class graylog_pipeline(terrascript.Resource):
pass
class graylog_pipeline_connection(terrascript.Resource):
pass
class graylog_pipeline_rule(terrascript.Resource):
pass
class graylog_role(terrascript.Resource):
pass
class graylog_sidecar_collector(terrascript.Resource):
pass
class graylog_sidecar_configuration(terrascript.Resource):
pass
class graylog_sidecars(terrascript.Resource):
pass
class graylog_stream(terrascript.Resource):
pass
class graylog_stream_output(terrascript.Resource):
pass
class graylog_stream_rule(terrascript.Resource):
pass
class graylog_user(terrascript.Resource):
pass
__all__ = [
"graylog_alarm_callback",
"graylog_alert_condition",
"graylog_dashboard",
"graylog_dashboard_widget",
"graylog_dashboard_widget_positions",
"graylog_event_definition",
"graylog_event_notification",
"graylog_extractor",
"graylog_grok_pattern",
"graylog_index_set",
"graylog_input",
"graylog_input_static_fields",
"graylog_ldap_setting",
"graylog_output",
"graylog_pipeline",
"graylog_pipeline_connection",
"graylog_pipeline_rule",
"graylog_role",
"graylog_sidecar_collector",
"graylog_sidecar_configuration",
"graylog_sidecars",
"graylog_stream",
"graylog_stream_output",
"graylog_stream_rule",
"graylog_user",
]
| 18.157895
| 73
| 0.767288
| 259
| 2,415
| 6.80695
| 0.204633
| 0.280204
| 0.326149
| 0.381168
| 0.553035
| 0.414067
| 0.056722
| 0
| 0
| 0
| 0
| 0.005862
| 0.152381
| 2,415
| 132
| 74
| 18.295455
| 0.855398
| 0.05383
| 0
| 0.320513
| 1
| 0
| 0.223488
| 0.132778
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.320513
| 0.012821
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
ee29f08458f5e7a5ac9704d063e8d9622f1e15d7
| 167
|
py
|
Python
|
test/speedTests/setup.py
|
henrystoldt/MAPLEAF
|
af970d3e8200832f5e70d537b15ad38dd74fa551
|
[
"MIT"
] | 15
|
2020-09-11T19:25:07.000Z
|
2022-03-12T16:34:53.000Z
|
test/speedTests/setup.py
|
henrystoldt/MAPLEAF
|
af970d3e8200832f5e70d537b15ad38dd74fa551
|
[
"MIT"
] | null | null | null |
test/speedTests/setup.py
|
henrystoldt/MAPLEAF
|
af970d3e8200832f5e70d537b15ad38dd74fa551
|
[
"MIT"
] | 3
|
2021-12-24T19:39:53.000Z
|
2022-03-29T01:06:28.000Z
|
import numpy
from setuptools import setup
from Cython.Build import cythonize
setup(ext_modules=cythonize("addScalarCython.pyx"), include_dirs=[numpy.get_include()])
| 23.857143
| 87
| 0.820359
| 22
| 167
| 6.090909
| 0.681818
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083832
| 167
| 6
| 88
| 27.833333
| 0.875817
| 0
| 0
| 0
| 0
| 0
| 0.113772
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
ee2b54054802ca4d528abc0c273ba16fb239f0db
| 7,239
|
py
|
Python
|
mw4/test/test_units/environment/test_skymeter.py
|
Raddock/MountWizzard4
|
15efed77c1634461184e90a7cf6419eec0dec909
|
[
"Apache-2.0"
] | null | null | null |
mw4/test/test_units/environment/test_skymeter.py
|
Raddock/MountWizzard4
|
15efed77c1634461184e90a7cf6419eec0dec909
|
[
"Apache-2.0"
] | null | null | null |
mw4/test/test_units/environment/test_skymeter.py
|
Raddock/MountWizzard4
|
15efed77c1634461184e90a7cf6419eec0dec909
|
[
"Apache-2.0"
] | null | null | null |
############################################################
# -*- coding: utf-8 -*-
#
# # # # # # #
# ## ## # ## # #
# # # # # # # # # # #
# # ## # ## ## ######
# # # # # # #
#
# Python-based Tool for interaction with the 10micron mounts
# GUI with PyQT5 for python
# Python v3.7.5
#
# Michael Würtenberger
# (c) 2019
#
# Licence APL2.0
#
###########################################################
# standard libraries
from unittest import mock
import pytest
import datetime
# external packages
import indibase
# local import
from mw4.test.test_units.setupQt import setupQt
host_ip = 'astro-mount.fritz.box'
@pytest.fixture(autouse=True, scope='module')
def module_setup_teardown():
global app, spy, mwGlob, test
app, spy, mwGlob, test = setupQt()
def test_name():
name = 'MBox'
app.skymeter.name = name
assert name == app.skymeter.name
def test_newDevice_1():
with mock.patch.object(app.skymeter.client,
'isServerConnected',
return_value=True):
with mock.patch.object(app.skymeter.client,
'getDevice',
return_value=1):
suc = app.skymeter.newDevice('test')
assert suc
assert app.skymeter.device is None
def test_newDevice_2():
app.skymeter.name = 'Test'
with mock.patch.object(app.skymeter.client,
'isServerConnected',
return_value=True):
with mock.patch.object(app.skymeter.client,
'getDevice',
return_value=1):
suc = app.skymeter.newDevice('Test')
assert suc
assert app.skymeter.device == 1
def test_removeDevice_1():
app.skymeter.name = 'Test'
with mock.patch.object(app.skymeter.client,
'isServerConnected',
return_value=True):
suc = app.skymeter.removeDevice('Test')
assert suc
assert app.skymeter.device is None
assert app.skymeter.data == {}
def test_startCommunication_1():
app.skymeter.name = ''
with mock.patch.object(app.skymeter.client,
'connectServer',
return_value=False):
suc = app.skymeter.startCommunication()
assert not suc
def test_setUpdateRate_1():
app.skymeter.name = 'test'
suc = app.skymeter.setUpdateConfig('false')
assert not suc
def test_setUpdateRate_2():
app.skymeter.name = 'test'
app.skymeter.device = None
suc = app.skymeter.setUpdateConfig('test')
assert not suc
def test_setUpdateRate_3():
class Test:
@staticmethod
def getNumber(test):
return {}
app.skymeter.name = 'test'
app.skymeter.device = Test()
suc = app.skymeter.setUpdateConfig('test')
assert not suc
def test_setUpdateRate_4():
class Test:
@staticmethod
def getNumber(test):
return {'PERIOD': 1}
app.skymeter.name = 'test'
app.skymeter.device = Test()
suc = app.skymeter.setUpdateConfig('test')
assert suc
def test_setUpdateRate_5():
class Test:
@staticmethod
def getNumber(test):
return {'PERIOD': 10}
app.skymeter.name = 'test'
app.skymeter.device = Test()
with mock.patch.object(app.skymeter.client,
'sendNewNumber',
return_value=False):
suc = app.skymeter.setUpdateConfig('test')
assert not suc
def test_setUpdateRate_6():
class Test:
@staticmethod
def getNumber(test):
return {'PERIOD': 10}
app.skymeter.name = 'test'
app.skymeter.device = Test()
with mock.patch.object(app.skymeter.client,
'sendNewNumber',
return_value=True):
suc = app.skymeter.setUpdateConfig('test')
assert suc
def test_updateNumber_1():
app.skymeter.device = None
app.skymeter.name = 'test'
suc = app.skymeter.updateNumber('false', 'WEATHER_HUMIDITY')
assert not suc
def test_updateNumber_2():
app.skymeter.device = 1
app.skymeter.name = 'test'
suc = app.skymeter.updateNumber('false', 'WEATHER_HUMIDITY')
assert not suc
def test_updateNumber_3():
app.skymeter.device = indibase.indiBase.Device()
app.skymeter.name = 'test'
values = {'WEATHER_DEWPOINT': 5,
'WEATHER_TEMPERATURE': 10,
'WEATHER_HUMIDITY': 50,
}
with mock.patch.object(app.skymeter.device,
'getNumber',
return_value=values):
suc = app.skymeter.updateNumber('test', 'WEATHER_PARAMETERS')
assert suc
assert app.skymeter.data['WEATHER_DEWPOINT'] == 5
assert app.skymeter.data['WEATHER_TEMPERATURE'] == 10
assert app.skymeter.data['WEATHER_HUMIDITY'] == 50
def test_updateNumber_4():
app.skymeter.device = indibase.indiBase.Device()
app.skymeter.name = 'test'
values = {'WEATHER_DEWPOINT': 5,
}
with mock.patch.object(app.skymeter.device,
'getNumber',
return_value=values):
suc = app.skymeter.updateNumber('test', 'WEATHER_PARAMETERS')
assert suc
def test_updateNumber_5():
app.skymeter.device = indibase.indiBase.Device()
app.skymeter.name = 'test'
values = {'WEATHER_HUMIDITY': 50,
}
with mock.patch.object(app.skymeter.device,
'getNumber',
return_value=values):
suc = app.skymeter.updateNumber('test', 'WEATHER_PARAMETERS')
assert suc
def test_updateNumber_6():
app.skymeter.device = indibase.indiBase.Device()
app.skymeter.name = 'test'
values = {'WEATHER_TEMPERATURE': 10,
}
with mock.patch.object(app.skymeter.device,
'getNumber',
return_value=values):
suc = app.skymeter.updateNumber('test', 'WEATHER_PARAMETERS')
assert suc
def test_updateNumber_7():
app.skymeter.device = indibase.indiBase.Device()
app.skymeter.name = 'test'
values = {'WEATHER_TEMPERATURE': 20,
'WEATHER_HUMIDITY': 50,
}
with mock.patch.object(app.skymeter.device,
'getNumber',
return_value=values):
suc = app.skymeter.updateNumber('test', 'WEATHER_PARAMETERS')
assert suc
def test_updateNumber_8():
app.skymeter.device = indibase.indiBase.Device()
app.skymeter.name = 'test'
t = datetime.datetime.utcnow()
values = {'WEATHER_TEMPERATURE': 10,
'WEATHER_HUMIDITY': 50,
}
app.skymeter.data = {'WEATHER_TEMPERATURE': 10,
'WEATHER_HUMIDITY': 50,
}
with mock.patch.object(app.skymeter.device,
'getNumber',
return_value=values):
suc = app.skymeter.updateNumber('test', 'WEATHER_PARAMETERS')
assert suc
| 29.307692
| 69
| 0.561404
| 722
| 7,239
| 5.520776
| 0.144044
| 0.198695
| 0.093828
| 0.076267
| 0.80858
| 0.785248
| 0.734069
| 0.705218
| 0.684145
| 0.657301
| 0
| 0.013649
| 0.311783
| 7,239
| 246
| 70
| 29.426829
| 0.786431
| 0.0431
| 0
| 0.677596
| 0
| 0
| 0.110353
| 0.003115
| 0
| 0
| 0
| 0
| 0.142077
| 1
| 0.131148
| false
| 0
| 0.027322
| 0.021858
| 0.202186
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
ee46d35ab1668d42f5f7b29ff1150e7d34015b12
| 136
|
py
|
Python
|
pyembroidery/StringHelper.py
|
teosavv/pyembroidery
|
00985f423e64ea1a454e5484012c19a64f26eb2c
|
[
"MIT"
] | 45
|
2018-07-08T09:49:30.000Z
|
2022-03-23T07:01:15.000Z
|
pyembroidery/StringHelper.py
|
teosavv/pyembroidery
|
00985f423e64ea1a454e5484012c19a64f26eb2c
|
[
"MIT"
] | 59
|
2018-07-05T22:05:58.000Z
|
2022-02-20T01:01:20.000Z
|
pyembroidery/StringHelper.py
|
teosavv/pyembroidery
|
00985f423e64ea1a454e5484012c19a64f26eb2c
|
[
"MIT"
] | 23
|
2018-08-10T17:58:04.000Z
|
2022-03-29T03:41:46.000Z
|
def is_string(thing):
try:
return isinstance(thing, basestring)
except NameError:
return isinstance(thing, str)
| 22.666667
| 44
| 0.661765
| 15
| 136
| 5.933333
| 0.733333
| 0.359551
| 0.47191
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.257353
| 136
| 5
| 45
| 27.2
| 0.881188
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
ee5aa8111bf2aa378c7a86c2ca612023d1ed7e35
| 34
|
py
|
Python
|
modules/2.79/bpy/types/XnorController.py
|
cmbasnett/fake-bpy-module
|
acb8b0f102751a9563e5b5e5c7cd69a4e8aa2a55
|
[
"MIT"
] | null | null | null |
modules/2.79/bpy/types/XnorController.py
|
cmbasnett/fake-bpy-module
|
acb8b0f102751a9563e5b5e5c7cd69a4e8aa2a55
|
[
"MIT"
] | null | null | null |
modules/2.79/bpy/types/XnorController.py
|
cmbasnett/fake-bpy-module
|
acb8b0f102751a9563e5b5e5c7cd69a4e8aa2a55
|
[
"MIT"
] | null | null | null |
class XnorController:
pass
| 5.666667
| 21
| 0.676471
| 3
| 34
| 7.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.294118
| 34
| 5
| 22
| 6.8
| 0.958333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
ee7dd54917825e792568a4b85296e41851c52bfa
| 22
|
py
|
Python
|
twitter/__init__.py
|
alexcchan/twitter
|
0940dd67434e682b6be3e4c5b6ab5b0bb8b4b43a
|
[
"MIT"
] | null | null | null |
twitter/__init__.py
|
alexcchan/twitter
|
0940dd67434e682b6be3e4c5b6ab5b0bb8b4b43a
|
[
"MIT"
] | null | null | null |
twitter/__init__.py
|
alexcchan/twitter
|
0940dd67434e682b6be3e4c5b6ab5b0bb8b4b43a
|
[
"MIT"
] | null | null | null |
from twitter import *
| 11
| 21
| 0.772727
| 3
| 22
| 5.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.181818
| 22
| 1
| 22
| 22
| 0.944444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
c9abb685f203127b85cf8787c0edb02d103f9e6e
| 775
|
py
|
Python
|
Assignments/Sprint1/BinaryToAscii.py
|
mark-morelos/CS_Notes
|
339c47ae5d7e678b7ac98d6d78857d016c611e38
|
[
"MIT"
] | 1
|
2021-02-28T07:43:59.000Z
|
2021-02-28T07:43:59.000Z
|
Assignments/Sprint1/BinaryToAscii.py
|
mark-morelos/CS_Notes
|
339c47ae5d7e678b7ac98d6d78857d016c611e38
|
[
"MIT"
] | null | null | null |
Assignments/Sprint1/BinaryToAscii.py
|
mark-morelos/CS_Notes
|
339c47ae5d7e678b7ac98d6d78857d016c611e38
|
[
"MIT"
] | 1
|
2021-03-03T03:52:21.000Z
|
2021-03-03T03:52:21.000Z
|
"""
Given a binary string (ASCII encoded), write a function that returns the equivalent decoded text.
Every eight bits in the binary string represents one character on the ASCII table.
Examples:
csBinaryToASCII("011011000110000101101101011000100110010001100001") -> "lambda"
01101100 -> 108 -> "l"
01100001 -> 97 -> "a"
01101101 -> 109 -> "m"
01100010 -> 98 -> "b"
01100100 -> 100 -> "d"
01100001 -> 97 -> "a"
csBinaryToASCII("") -> ""
Notes:
The input string will always be a valid binary string.
Characters can be in the range from "00000000" to "11111111" (inclusive).
In the case of an empty input string, your function should return an empty string.
"""
def csBinaryToASCII(binary):
return "".join([chr(int(binary[i:i+8], 2)) for i in range(0, len(binary), 8)])
| 32.291667
| 97
| 0.710968
| 109
| 775
| 5.055046
| 0.66055
| 0.065336
| 0.039927
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.201229
| 0.16
| 775
| 24
| 98
| 32.291667
| 0.645161
| 0.843871
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
c9c7687b7e1d2fe3b4a86c1356054ddc4ec28e9a
| 25,926
|
py
|
Python
|
Plot_final.py
|
david0811/atms597_proj3
|
3f89c4e4d745b19879dee9bf71fa01ce23e5a909
|
[
"MIT"
] | null | null | null |
Plot_final.py
|
david0811/atms597_proj3
|
3f89c4e4d745b19879dee9bf71fa01ce23e5a909
|
[
"MIT"
] | null | null | null |
Plot_final.py
|
david0811/atms597_proj3
|
3f89c4e4d745b19879dee9bf71fa01ce23e5a909
|
[
"MIT"
] | 2
|
2020-02-18T19:33:07.000Z
|
2020-02-28T05:53:05.000Z
|
import xarray as xr
import pandas as pd
import cartopy
import cartopy.crs as ccrs
import matplotlib as mpl
import matplotlib.pyplot as plt
from matplotlib.cm import get_cmap
import numpy as np
from cartopy.mpl.gridliner import LONGITUDE_FORMATTER, LATITUDE_FORMATTER
import shapely.geometry as sgeom
import cartopy.feature as cfeature
from copy import copy
# Define functions for plotting
def find_side(ls, side):
"""
Given a shapely LineString which is assumed to be rectangular, return the
line corresponding to a given side of the rectangle.
"""
minx, miny, maxx, maxy = ls.bounds
points = {'left': [(minx, miny), (minx, maxy)],
'right': [(maxx, miny), (maxx, maxy)],
'bottom': [(minx, miny), (maxx, miny)],
'top': [(minx, maxy), (maxx, maxy)],}
return sgeom.LineString(points[side])
def lambert_xticks(ax, ticks):
"""
Draw ticks on the bottom x-axis of a Lambert Conformal projection.
"""
te = lambda xy: xy[0]
lc = lambda t, n, b: np.vstack((np.zeros(n) + t, np.linspace(b[2], b[3], n))).T
xticks, xticklabels = _lambert_ticks(ax, ticks, 'bottom', lc, te)
ax.xaxis.tick_bottom()
ax.set_xticks(xticks)
ax.set_xticklabels([ax.xaxis.get_major_formatter()(xtick) for xtick in xticklabels])
def lambert_yticks(ax, ticks):
"""
Draw ticks on the left y-axis of a Lambert Conformal projection.
"""
te = lambda xy: xy[1]
lc = lambda t, n, b: np.vstack((np.linspace(b[0], b[1], n), np.zeros(n) + t)).T
yticks, yticklabels = _lambert_ticks(ax, ticks, 'left', lc, te)
ax.yaxis.tick_left()
ax.set_yticks(yticks)
ax.set_yticklabels([ax.yaxis.get_major_formatter()(ytick) for ytick in yticklabels])
def _lambert_ticks(ax, ticks, tick_location, line_constructor, tick_extractor):
"""
Get the tick locations and labels for an axis of a Lambert Conformal projection.
"""
outline_patch = sgeom.LineString(ax.outline_patch.get_path().vertices.tolist())
axis = find_side(outline_patch, tick_location)
n_steps = 30
extent = ax.get_extent(ccrs.PlateCarree())
_ticks = []
for t in ticks:
xy = line_constructor(t, n_steps, extent)
proj_xyz = ax.projection.transform_points(ccrs.Geodetic(), xy[:, 0], xy[:, 1])
xyt = proj_xyz[..., :2]
ls = sgeom.LineString(xyt.tolist())
locs = axis.intersection(ls)
if not locs:
tick = [None]
else:
tick = tick_extractor(locs.xy)
_ticks.append(tick[0])
# Remove ticks that aren't visible:
ticklabels = copy(ticks)
while True:
try:
index = _ticks.index(None)
except ValueError:
break
_ticks.pop(index)
ticklabels.pop(index)
return _ticks, ticklabels
def plot_250hPa_winds(lon, lat, u, v, wspd, mode):
"""
Plot filled contours overlayed with vectors
Input
-------
lon = lon values extracted from xarray dataset (1-D)
lat = lat values extracted from xarray dataset (1-D)
u = U-wind at 250 hPa, shape = lon X lat
v = V-wind at 250 hPa, shape = lon X lat
wspd = Wind speed at 250 hPa, shape = lon X lat
mode = 'A' for anomaly data, 'LM' for long term means, and 'EM' for extreme precipitation days
Output
--------
matplotlib figure with filled contours of wind speed overlayed with wind vectors
"""
# change data and lon to cyclic coordinates
u, lon_new = add_cyclic_point(u.values, coord = lon.values)
v, lon_new = add_cyclic_point(v.values, coord = lon.values)
wspd, lon = add_cyclic_point(wspd.values, coord = lon.values)
# Create a figure
fig = plt.figure(figsize = (10, 5))
# Set the GeoAxes to the PlateCarree projection
ax = plt.axes(projection = ccrs.PlateCarree())
# Add coastlines
ax.coastlines('50m', linewidth = 0.8)
# Assign data for filled contour
data = wspd
if mode == 'EM' or mode == 'LM':
# Plot filled contours
plt.contourf(lon, lat, data, 20, transform = ccrs.PlateCarree(),
cmap = get_cmap("viridis"))
# Add a color bar
cbar = plt.colorbar(ax = ax, shrink = .75)
cbar.ax.set_ylabel('m/s', fontsize = 18)
# Plot the vectors and reference vector
rd = 5 #regrid_delta
quiver = plt.quiver(lon[::rd], lat[::rd], u[::rd, ::rd], v[::rd, ::rd],
transform = ccrs.PlateCarree(), headwidth = 5., headlength = 5.)
ax.quiverkey(quiver, X = 0.9, Y = 1.03, U = 20., label = '20 m/s',
coordinates='axes', labelpos='E')
elif mode == 'A':
# Plot filled contours
maxval, minval = np.abs(np.amax(data)), np.abs(np.amin(data))
normmax = np.amax([maxval, minval])
norm = mpl.colors.Normalize(vmin = -normmax, vmax = normmax)
plt.contourf(lon, lat, data, 20, transform = ccrs.PlateCarree(),
norm = norm, cmap = get_cmap("RdBu_r"))
# Add a color bar
cbar = plt.colorbar(ax = ax, shrink = .75)
cbar.ax.set_ylabel('m/s', fontsize = 18)
# Plot the vectors and reference vector
rd = 5 #regrid_delta
quiver = plt.quiver(lon[::rd], lat[::rd], u[::rd, ::rd], v[::rd, ::rd],
transform = ccrs.PlateCarree(), headwidth = 5., headlength = 5.)
ax.quiverkey(quiver, X = 0.9, Y = 1.03, U = 3., label = '3 m/s',
coordinates = 'axes', labelpos = 'E')
# *must* call draw in order to get the axis boundary used to add ticks:
fig.canvas.draw()
# Add the tick marks
xticks = np.arange(0., 360., 30.)
yticks = np.arange(-90., 100., 15.)
# Label the end-points of the gridlines using the custom tick makers:
ax.xaxis.set_major_formatter(LONGITUDE_FORMATTER)
ax.yaxis.set_major_formatter(LATITUDE_FORMATTER)
lambert_xticks(ax, xticks)
lambert_yticks(ax, yticks)
# Set title and figure name
if mode == 'LM':
plt.title('250 hPa Winds'+'\n'+'long term mean', fontsize=18)
pname = 'p250_longterm.png'
elif mode == 'EM':
plt.title('250 hPa Winds'+'\n'+'extreme precipitation days', fontsize=18)
pname = 'p250_extreme.png'
elif mode == 'A':
plt.title('250 hPa Winds'+'\n'+'anomaly fields', fontsize=18)
pname = 'p250_anom.png'
ax.set_global(); ax.gridlines();
plt.tight_layout()
#plot_dir = '/mnt/a/u/sciteam/chug/Laplata_tracers/plots/dipole_assessment/'
#pname = plot_dir + name + '.png'
plt.savefig(pname, bbox_inches = 'tight')
plt.show()
def plot_500hPa_winds_geopot(lon, lat, u, v, z, mode):
"""
Plot filled contours overlayed with vectors
Input
-------
lon = lon values extracted from xarray dataset (1-D)
lat = lat values extracted from xarray dataset (1-D)
u = U-wind at 500 hPa, shape = lon X lat
v = V-wind at 500 hPa, shape = lon X lat
z = Geopotential height at 500 hPa, shape = lon X lat
mode = 'A' for anomaly data, 'LM' for long term means, and 'EM' for extreme precipitation days
Output
--------
matplotlib figure with filled contours of geopotential height overlayed with wind vectors
"""
# change data and lon to cyclic coordinates
u, lon_new = add_cyclic_point(u.values, coord = lon.values)
v, lon_new = add_cyclic_point(v.values, coord = lon.values)
z, lon = add_cyclic_point(z.values, coord = lon.values)
# Create a figure
fig = plt.figure(figsize = (10, 5))
# Set the GeoAxes to the PlateCarree projection
ax = plt.axes(projection = ccrs.PlateCarree())
# Add coastlines
ax.coastlines('50m', linewidth=0.8)
# Assign data for filled contour
data = z
if mode == 'EM' or mode == 'LM':
# Plot filled contours
plt.contourf(lon, lat, data, 20, transform = ccrs.PlateCarree(),
cmap = get_cmap("viridis"))
# Add a color bar
cbar = plt.colorbar(ax = ax, shrink = .75)
cbar.ax.set_ylabel('m', fontsize = 18)
# Plot the vectors and reference vector
rd = 5 #regrid_delta
quiver = plt.quiver(lon[::rd], lat[::rd], u[::rd, ::rd], v[::rd, ::rd],
transform = ccrs.PlateCarree(), headwidth = 5., headlength = 5.)
ax.quiverkey(quiver, X = 0.9, Y = 1.03, U = 10., label = '10 m/s',
coordinates = 'axes', labelpos = 'E')
elif mode == 'A':
# Plot filled contours
maxval, minval = np.abs(np.amax(data)), np.abs(np.amin(data))
normmax = np.amax([maxval, minval])
norm = mpl.colors.Normalize(vmin = -normmax, vmax = normmax)
plt.contourf(lon, lat, data, 20, transform = ccrs.PlateCarree(),
norm = norm, cmap = get_cmap("RdBu_r"))
# Add a color bar
cbar = plt.colorbar(ax = ax, shrink = .75)
cbar.ax.set_ylabel('m', fontsize = 18)
# Plot the vectors and reference vector
rd = 5 #regrid_delta
quiver = plt.quiver(lon[::rd], lat[::rd], u[::rd, ::rd], v[::rd, ::rd],
transform = ccrs.PlateCarree(), headwidth = 5., headlength = 5.)
ax.quiverkey(quiver, X = 0.9, Y = 1.03, U = 3., label = '3 m/s',
coordinates = 'axes', labelpos = 'E')
# *must* call draw in order to get the axis boundary used to add ticks:
fig.canvas.draw()
# Add the tick marks
xticks = np.arange(0., 360., 30.)
yticks = np.arange(-90., 100., 15.)
# Label the end-points of the gridlines using the custom tick makers:
ax.xaxis.set_major_formatter(LONGITUDE_FORMATTER)
ax.yaxis.set_major_formatter(LATITUDE_FORMATTER)
lambert_xticks(ax, xticks)
lambert_yticks(ax, yticks)
#Set title and figure name
if mode == 'LM':
plt.title('500 hPa Winds, GPH'+'\n'+'long term mean', fontsize=18)
pname = 'p500_longterm.png'
elif mode == 'EM':
plt.title('500 hPa Winds, GPH'+'\n'+'extreme precipitation days', fontsize=18)
pname = 'p500_extreme.png'
elif mode == 'A':
plt.title('500 hPa Winds, GPH'+'\n'+'anomaly fields', fontsize=18)
pname = 'p500_anom.png'
ax.set_global(); ax.gridlines();
plt.tight_layout()
#plot_dir = '/mnt/a/u/sciteam/chug/Laplata_tracers/plots/dipole_assessment/'
#pname = plot_dir + name + '.png'
plt.savefig(pname, bbox_inches = 'tight')
plt.show()
def plot_850hPa(lon, lat, u, v, t, q, mode):
"""
Plot filled contours overlayed with contours and vectors
Input
-------
lon = lon values extracted from xarray dataset (1-D)
lat = lat values extracted from xarray dataset (1-D)
u = U-wind at 850 hPa, shape = lon X lat
v = V-wind at 850 hPa, shape = lon X lat
t = Temperature at 850 hPa, shape = lon X lat
q = Specific humidity at 850 hPa, shape = lon X lat
mode = 'A' for anomaly data, 'LM' for long term means, and 'EM' for extreme precipitation days
Output
--------
matplotlib figure with filled contours of temperature overlayed with contours of spec humidity and wind vectors
"""
# change data and lon to cyclic coordinates
u, lon_new = add_cyclic_point(u.values, coord = lon.values)
v, lon_new = add_cyclic_point(v.values, coord = lon.values)
q, lon_new = add_cyclic_point(q.values, coord = lon.values)
t, lon = add_cyclic_point(t.values, coord = lon.values)
# Create a figure
fig = plt.figure(figsize = (10, 5))
# Set the GeoAxes to the PlateCarree projection
ax = plt.axes(projection = ccrs.PlateCarree())
# Add coastlines
ax.coastlines('50m', linewidth = 0.8)
# Assign data for filled contour
data = t
if mode == 'EM' or mode == 'LM':
# Plot filled contours
plt.contourf(lon, lat, data, 20, transform = ccrs.PlateCarree(),
cmap = get_cmap("viridis"))
# Add a color bar
cbar = plt.colorbar(ax = ax, shrink = .75)
cbar.ax.set_ylabel('$^{o}C$', fontsize = 18)
# Plot contours
plt.contour(lon, lat, q, transform = ccrs.PlateCarree(), colors = 'w')
# Plot the vectors and reference vector
rd = 5 #regrid_delta
quiver = plt.quiver(lon[::rd], lat[::rd], u[::rd, ::rd], v[::rd, ::rd],
transform = ccrs.PlateCarree(), headwidth = 5., headlength = 5.)
ax.quiverkey(quiver, X = 0.9, Y = 1.03, U = 8., label = '8 m/s',
coordinates = 'axes', labelpos = 'E')
elif mode == 'A':
# Plot filled contours
maxval, minval = np.abs(np.amax(data)), np.abs(np.amin(data))
normmax = np.amax([maxval, minval])
norm = mpl.colors.Normalize(vmin = -normmax, vmax = normmax)
plt.contourf(lon, lat, data, 20, transform = ccrs.PlateCarree(),
norm = norm, cmap = get_cmap("RdBu_r"))
# Add a color bar
cbar = plt.colorbar(ax = ax, shrink = .75)
cbar.ax.set_ylabel('$^{o}C$', fontsize = 18)
# Plot contours
plt.contour(lon, lat, q, transform = ccrs.PlateCarree())
# Plot the vectors and reference vector
rd = 5 #regrid_delta
quiver = plt.quiver(lon[::rd], lat[::rd], u[::rd, ::rd], v[::rd, ::rd],
transform = ccrs.PlateCarree(), headwidth = 5., headlength = 5.)
ax.quiverkey(quiver, X = 0.9, Y = 1.03, U = 3., label = '3 m/s',
coordinates = 'axes', labelpos = 'E')
# *must* call draw in order to get the axis boundary used to add ticks:
fig.canvas.draw()
# Add the tick marks
xticks = np.arange(0., 360., 30.)
yticks = np.arange(-90., 100., 15.)
# Label the end-points of the gridlines using the custom tick makers:
ax.xaxis.set_major_formatter(LONGITUDE_FORMATTER)
ax.yaxis.set_major_formatter(LATITUDE_FORMATTER)
lambert_xticks(ax, xticks)
lambert_yticks(ax, yticks)
# Set title and figure name
if mode == 'LM':
plt.title('850 hPa Winds, Temp, Humidity'+'\n'+'long term mean', fontsize = 18)
pname = 'p850_longterm.png'
elif mode == 'EM':
plt.title('850 hPa Winds, Temp, Humidity'+'\n'+'extreme precipitation days', fontsize = 18)
pname = 'p850_extreme.png'
elif mode == 'A':
plt.title('850 hPa Winds, Temp, Humidity'+'\n'+'anomaly fields', fontsize = 18)
pname = 'p850_anom.png'
ax.set_global(); ax.gridlines();
plt.tight_layout()
#plot_dir = '/mnt/a/u/sciteam/chug/Laplata_tracers/plots/dipole_assessment/'
#pname = plot_dir + name + '.png'
plt.savefig(pname, bbox_inches = 'tight')
plt.show()
def plot_sfc_winds_skt(lonu, latu, u, v, lont, latt, t, mode):
"""
Plot filled contours overlayed with contours and vectors
Input
-------
lonu = lon values extracted from wind dataset (1-D)
latu = lat values extracted from wind dataset (1-D)
u = U-wind at surface, shape = lonu X latu
v = V-wind at surface, shape = lonu X latu
lont = lon values extracted from skin temperature dataset (1-D)
latt = lat values extracted from skin temperature dataset (1-D)
t = Skin temperature, shape = lont X latt
mode = 'A' for anomaly data, 'LM' for long term means, and 'EM' for extreme precipitation days
Output
--------
matplotlib figure with filled contours of skin temperature overlayed with wind vectors
"""
# change data and lon to cyclic coordinates
u, lonu_new = add_cyclic_point(u.values, coord = lonu.values)
v, lonu = add_cyclic_point(v.values, coord = lonu.values)
t, lont = add_cyclic_point(t.values, coord = lont.values)
# Create a figure
fig = plt.figure(figsize=(10, 5))
# Set the GeoAxes to the PlateCarree projection
ax = plt.axes(projection = ccrs.PlateCarree())
# Add coastlines
ax.coastlines('50m', linewidth = 0.8)
# Assign data for filled contour
data = t
if mode == 'EM' or mode == 'LM':
# Plot filled contours
plt.contourf(lont, latt, data, 20, transform = ccrs.PlateCarree(),
cmap = get_cmap("viridis"))
# Add a color bar
cbar = plt.colorbar(ax = ax, shrink = .75)
cbar.ax.set_ylabel('$^{o}C$', fontsize = 18)
# Plot the vectors and reference vector
rd = 5 #regrid_delta
quiver = plt.quiver(lonu[::rd], latu[::rd], u[::rd, ::rd], v[::rd, ::rd],
transform = ccrs.PlateCarree(), headwidth = 5., headlength = 5.)
ax.quiverkey(quiver, X = 0.9, Y = 1.03, U = 5., label = '5 m/s',
coordinates = 'axes', labelpos = 'E')
elif mode == 'A':
# Plot filled contours
maxval, minval = np.abs(np.amax(data)), np.abs(np.amin(data))
normmax = np.amax([maxval, minval])
norm = mpl.colors.Normalize(vmin = -normmax, vmax = normmax)
plt.contourf(lont, latt, data, 20, transform = ccrs.PlateCarree(),
norm = norm, cmap = get_cmap("RdBu_r"))
# Add a color bar
cbar = plt.colorbar(ax = ax, shrink = .75)
cbar.ax.set_ylabel('$^{o}C$', fontsize = 18)
# Plot the vectors and reference vector
rd = 5 #regrid_delta
quiver = plt.quiver(lonu[::rd], latu[::rd], u[::rd, ::rd], v[::rd, ::rd],
transform = ccrs.PlateCarree(), headwidth = 5., headlength = 5.)
ax.quiverkey(quiver, X = 0.9, Y = 1.03, U = 3., label = '3 m/s',
coordinates = 'axes', labelpos = 'E')
# *must* call draw in order to get the axis boundary used to add ticks:
fig.canvas.draw()
# Add the tick marks
xticks = np.arange(0., 360., 30.)
yticks = np.arange(-80., 80., 20.)
# Label the end-points of the gridlines using the custom tick makers:
ax.xaxis.set_major_formatter(LONGITUDE_FORMATTER)
ax.yaxis.set_major_formatter(LATITUDE_FORMATTER)
lambert_xticks(ax, xticks)
lambert_yticks(ax, yticks)
# Set title and figure name
if mode == 'LM':
plt.title('Surface Winds, Skin temp'+'\n'+'long term mean', fontsize = 18)
pname = 'sfc_longterm.png'
elif mode == 'EM':
plt.title('Surface Winds, Skin temp'+'\n'+'extreme precipitation days', fontsize = 18)
pname = 'sfc_extreme.png'
elif mode == 'A':
plt.title('Surface Winds, Skin temp'+'\n'+'anomaly fields', fontsize = 18)
pname = 'sfc_anom.png'
ax.set_global(); ax.gridlines();
plt.tight_layout()
#plot_dir = '/mnt/a/u/sciteam/chug/Laplata_tracers/plots/dipole_assessment/'
#pname = plot_dir + name + '.png'
plt.savefig(pname, bbox_inches = 'tight')
plt.show()
def plot_TCWV(lon, lat, q, mode):
"""
Plot filled contours of total column water vapor
Input
-------
lon = lon values extracted from xarray dataset (1-D)
lat = lat values extracted from xarray dataset (1-D)
q = Total column water vapor, shape = lon X lat
mode = 'A' for anomaly data, 'LM' for long term means, and 'EM' for extreme precipitation days
Output
--------
matplotlib figure with filled contours of total column water vapor
"""
# change data and lon to cyclic coordinates
q, lon = add_cyclic_point(q.values, coord = lon.values)
# Create a figure
fig = plt.figure(figsize=(10, 5))
# Set the GeoAxes to the PlateCarree projection
ax = plt.axes(projection = ccrs.PlateCarree())
# Add coastlines
ax.coastlines('50m', linewidth = 0.8)
# Assign data for filled contour
data = q
if mode == 'EM' or mode == 'LM':
data[data > 80.] = 80.
# Plot filled contours
plt.contourf(lon, lat, data, 20, transform = ccrs.PlateCarree(),
cmap = get_cmap("viridis"))
# Add a color bar
cbar = plt.colorbar(ax = ax, shrink = .75)
cbar.ax.set_ylabel('$mm$', fontsize = 18)
elif mode == 'A':
maxval, minval = np.abs(np.amax(data)), np.abs(np.amin(data))
normmax = np.amax([maxval, minval])
norm = mpl.colors.Normalize(vmin = -normmax, vmax = normmax)
plt.contourf(lon, lat, data, 20, transform = ccrs.PlateCarree(),
norm = norm, cmap = get_cmap("RdBu_r"))
# Add a color bar
cbar = plt.colorbar(ax = ax, shrink = .75)
cbar.ax.set_ylabel('$mm$', fontsize = 18)
# *must* call draw in order to get the axis boundary used to add ticks:
fig.canvas.draw()
# Add the tick marks
xticks = np.arange(0., 360., 30.)
yticks = np.arange(-80., 80., 20.)
# Label the end-points of the gridlines using the custom tick makers:
ax.xaxis.set_major_formatter(LONGITUDE_FORMATTER)
ax.yaxis.set_major_formatter(LATITUDE_FORMATTER)
lambert_xticks(ax, xticks)
lambert_yticks(ax, yticks)
# Set title and figure name
if mode == 'LM':
plt.title('Total column water vapor'+'\n'+'long term mean', fontsize = 18)
pname = 'tcwv_longterm.png'
elif mode == 'EM':
plt.title('Total column water vapor'+'\n'+'extreme precipitation days',fontsize = 18)
pname = 'tcwv_extreme.png'
elif mode == 'A':
plt.title('Total column water vapor'+'\n'+'anomaly field',fontsize = 18)
pname = 'tcwv_anom.png'
ax.set_global(); ax.gridlines();
plt.tight_layout()
#plot_dir = '/mnt/a/u/sciteam/chug/Laplata_tracers/plots/dipole_assessment/'
#pname = plot_dir + name + '.png'
plt.savefig(pname, bbox_inches = 'tight')
plt.show()
###############################
# Open datasets and plot data #
# Set path to netcdf files
path = 'atms597_proj3/data/'
# First let's plot the anomalies
# 250 hPa anomalies
xrdata = xr.open_dataset(path+'pressure_anomaly.nc')
lat = xrdata['lat']
lon = xrdata['lon']
u = xrdata['u_wind_250']
v = xrdata['v_wind_250']
xrdata = xr.open_dataset('atms597_proj3/data/pressure_anomaly_new.nc')
wspd = xrdata['wind_spd_250']
plot_250hPa_winds(lon, lat, u, v, wspd, 'A')
# 500 hPa anomalies
u = xrdata['u_wind_500']
v = xrdata['v_wind_500']
z = xrdata['height_500']
plot_500hPa_winds_geopot(lon, lat, u, v, z, 'A')
# 850 hPa anomalies
u = xrdata['u_wind_850']
v = xrdata['v_wind_850']
t = xrdata['temp_850']
q = xrdata['q_850']
plot_850hPa(lon, lat, u, v, t, q, 'A')
# Next we move to surface anomalies
xrdata = xr.open_dataset(path+'surface_anomaly.nc')
latu = xrdata['lat']
lonu = xrdata['lon']
u = xrdata['sfc_u_wind_surface']
v = xrdata['sfc_v_wind_surface']
xrdata = xr.open_dataset(path+'surface_gauss_anomaly.nc')
t = xrdata['skin_temp_surface']-273 #convert to Celcius
latt = xrdata['lat']
lont = xrdata['lon']
plot_sfc_winds_skt(lonu, latu, u, v, lont, latt, t, 'A')
# TCWV anomalies
xrdata = xr.open_dataset(path+'total_column_anomaly.nc')
lat = xrdata['lat']
lon = xrdata['lon']
q = xrdata['total_column_q']
plot_TCWV(lon, lat, q, 'A')
# Next we plot the long term means
# 250 hPa long term means
xrdata = xr.open_dataset(path+'pressure_long_term_mean.nc')
lat = xrdata['lat']
lon = xrdata['lon']
u = xrdata['u_wind_250']
v = xrdata['v_wind_250']
wspd = np.sqrt(np.multiply(u, u) + np.multiply(v, v))
plot_250hPa_winds(lon, lat, u, v, wspd, 'LM')
# 500 hPa long term means
u = xrdata['u_wind_500']
v = xrdata['v_wind_500']
z = xrdata['height_500']
plot_500hPa_winds_geopot(lon, lat, u, v, z, 'LM')
# 850 hPa long term means
u = xrdata['u_wind_850']
v = xrdata['v_wind_850']
t = xrdata['temp_850']
q = xrdata['q_850']
plot_850hPa(lon, lat, u, v, t, q, 'LM')
# surface long term means
xrdata = xr.open_dataset(path+'surface_long_term_mean.nc')
latu = xrdata['lat']
lonu = xrdata['lon']
u = xrdata['sfc_u_wind_surface']
v = xrdata['sfc_v_wind_surface']
xrdata = xr.open_dataset(path+'surface_gauss_long_term_mean.nc')
t = xrdata['skin_temp_surface']
latt = xrdata['lat']
lont = xrdata['lon']
plot_sfc_winds_skt(lonu, latu, u, v, lont, latt, t, 'LM')
# TCWV long term means
xrdata = xr.open_dataset(path+'total_column_long_term_mean.nc')
lat = xrdata['lat']
lon = xrdata['lon']
q = xrdata['total_column_q']
plot_TCWV(lon, lat, q, 'LM')
# Finally we plot the mean of extreme precipitation days
# 250 hPa extreme means
xrdata = xr.open_dataset(path+'pressure_extreme_precip_mean.nc')
lat = xrdata['lat']
lon = xrdata['lon']
u = xrdata['u_wind_250']
v = xrdata['v_wind_250']
wspd = np.sqrt(np.multiply(u, u) + np.multiply(v, v))
plot_250hPa_winds(lon, lat, u, v, wspd, 'EM')
# 500 hPa extreme means
u = xrdata['u_wind_500']
v = xrdata['v_wind_500']
z = xrdata['height_500']
plot_500hPa_winds_geopot(lon, lat, u, v, z, 'EM')
# 850 hPa extreme means
u = xrdata['u_wind_850']
v = xrdata['v_wind_850']
t = xrdata['temp_850']
q = xrdata['q_850']
plot_850hPa(lon, lat, u, v, t, q, 'EM')
# surface extreme means
xrdata = xr.open_dataset(path+'surface_extreme_precip_mean.nc')
latu = xrdata['lat']
lonu = xrdata['lon']
u = xrdata['sfc_u_wind_surface']
v = xrdata['sfc_v_wind_surface']
xrdata = xr.open_dataset(path+'surface_gauss_extreme_precip_mean.nc')
t = xrdata['skin_temp_surface']-273
latt = xrdata['lat']
lont = xrdata['lon']
plot_sfc_winds_skt(lonu, latu, u, v, lont, latt, t, 'EM')
# TCWV extreme means
xrdata = xr.open_dataset(path+'total_column_extreme_precip_mean.nc')
lat = xrdata['lat']
lon = xrdata['lon']
q = xrdata['total_column_q']
plot_TCWV(lon, lat, q, 'EM')
| 34.8
| 115
| 0.606457
| 3,651
| 25,926
| 4.204875
| 0.093399
| 0.025404
| 0.031266
| 0.016089
| 0.83976
| 0.838132
| 0.811034
| 0.726746
| 0.688119
| 0.680888
| 0
| 0.028323
| 0.257811
| 25,926
| 745
| 116
| 34.8
| 0.769515
| 0.252642
| 0
| 0.643392
| 0
| 0
| 0.112355
| 0.017749
| 0
| 0
| 0
| 0
| 0
| 1
| 0.022444
| false
| 0
| 0.029925
| 0
| 0.057357
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
c9cf330ba1bd8124dfd2589c2c620374854164c9
| 129
|
py
|
Python
|
vsm/extensions/corpusbuilders/__init__.py
|
inpho/vsm
|
d5fc930ccc95f275e10e151c8f05db2c05aba01f
|
[
"MIT"
] | 31
|
2015-01-30T20:09:48.000Z
|
2021-08-13T12:57:49.000Z
|
vsm/extensions/corpusbuilders/__init__.py
|
inpho/vsm
|
d5fc930ccc95f275e10e151c8f05db2c05aba01f
|
[
"MIT"
] | 80
|
2015-01-30T19:08:40.000Z
|
2019-11-02T20:15:56.000Z
|
vsm/extensions/corpusbuilders/__init__.py
|
inpho/vsm
|
d5fc930ccc95f275e10e151c8f05db2c05aba01f
|
[
"MIT"
] | 10
|
2015-09-18T01:52:23.000Z
|
2019-07-14T08:22:06.000Z
|
"""
[Documentation about the corpusbuilders extension]
"""
from __future__ import absolute_import
from .corpusbuilders import *
| 18.428571
| 50
| 0.79845
| 13
| 129
| 7.538462
| 0.692308
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.124031
| 129
| 6
| 51
| 21.5
| 0.867257
| 0.387597
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
a00b1b76675e9434fa7443fb20131e2c3b8ba4bd
| 133
|
py
|
Python
|
testproject/test.py
|
nilakshdas/jobby
|
d156e009b86efe621c4014cca97b01863ddc7774
|
[
"MIT"
] | null | null | null |
testproject/test.py
|
nilakshdas/jobby
|
d156e009b86efe621c4014cca97b01863ddc7774
|
[
"MIT"
] | null | null | null |
testproject/test.py
|
nilakshdas/jobby
|
d156e009b86efe621c4014cca97b01863ddc7774
|
[
"MIT"
] | null | null | null |
from __future__ import print_function
import time
from jobby import JobbyJob
with JobbyJob(dict()) as job:
print(time.time())
| 14.777778
| 37
| 0.759398
| 19
| 133
| 5.052632
| 0.631579
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.165414
| 133
| 8
| 38
| 16.625
| 0.864865
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.6
| 0
| 0.6
| 0.4
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
4e631b29436789cc1f071e150cf015c0f1c34347
| 29
|
py
|
Python
|
src/rocket/stage2/src/trajectory/__init__.py
|
proballstar/atlas
|
6e4eb36b7e43e750dbb281c2051439198c82f296
|
[
"MIT"
] | null | null | null |
src/rocket/stage2/src/trajectory/__init__.py
|
proballstar/atlas
|
6e4eb36b7e43e750dbb281c2051439198c82f296
|
[
"MIT"
] | null | null | null |
src/rocket/stage2/src/trajectory/__init__.py
|
proballstar/atlas
|
6e4eb36b7e43e750dbb281c2051439198c82f296
|
[
"MIT"
] | null | null | null |
from . import trajectory_calc
| 29
| 29
| 0.862069
| 4
| 29
| 6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.103448
| 29
| 1
| 29
| 29
| 0.923077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
4ea41f1cfae3e0e482def88f3051cdd115473a14
| 1,162
|
py
|
Python
|
myblog/forms.py
|
kaka4NERV/my-blog
|
5021ef500b0f2d280f2bb824ba22b2336fd09367
|
[
"MIT"
] | null | null | null |
myblog/forms.py
|
kaka4NERV/my-blog
|
5021ef500b0f2d280f2bb824ba22b2336fd09367
|
[
"MIT"
] | null | null | null |
myblog/forms.py
|
kaka4NERV/my-blog
|
5021ef500b0f2d280f2bb824ba22b2336fd09367
|
[
"MIT"
] | null | null | null |
from flask_wtf import FlaskForm
from wtforms import StringField, PasswordField, BooleanField, SubmitField, TextAreaField
from wtforms.validators import DataRequired, Length
class LoginForm(FlaskForm):
username = StringField('Username', validators=[DataRequired(), Length(5, 20)])
password = PasswordField('Password', validators=[DataRequired(), Length(5, 20)])
remember = BooleanField('Remember me')
submit = SubmitField('Log in')
class RegisterForm(FlaskForm):
username = StringField('Username', validators=[DataRequired(), Length(5, 20)])
password = PasswordField('Password', validators=[DataRequired(), Length(5, 20)])
submit = SubmitField('Register')
class CreateForm(FlaskForm):
title = StringField('Title', validators=[DataRequired(), Length(5, 70)])
body = TextAreaField('Body', validators=[DataRequired()])
submit = SubmitField('Create')
class UpdateForm(FlaskForm):
title = StringField('Title', validators=[DataRequired(), Length(5, 70)])
body = TextAreaField('Body', validators=[DataRequired()])
submit = SubmitField('Update')
class DeleteForm(FlaskForm):
submit = SubmitField('Delete')
| 36.3125
| 88
| 0.725473
| 112
| 1,162
| 7.517857
| 0.321429
| 0.209026
| 0.199525
| 0.206651
| 0.589074
| 0.589074
| 0.589074
| 0.589074
| 0.589074
| 0.589074
| 0
| 0.017893
| 0.134251
| 1,162
| 31
| 89
| 37.483871
| 0.819085
| 0
| 0
| 0.363636
| 0
| 0
| 0.080034
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.136364
| 0.136364
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 5
|
090bf6cb918945a32dc2489c3aad8b96c8158111
| 84
|
py
|
Python
|
codeChef/practice/easy/muffins3.py
|
ferhatelmas/algo
|
a7149c7a605708bc01a5cd30bf5455644cefd04d
|
[
"WTFPL"
] | 25
|
2015-01-21T16:39:18.000Z
|
2021-05-24T07:01:24.000Z
|
codeChef/practice/easy/muffins3.py
|
ferhatelmas/algo
|
a7149c7a605708bc01a5cd30bf5455644cefd04d
|
[
"WTFPL"
] | 2
|
2020-09-30T19:39:36.000Z
|
2020-10-01T17:15:16.000Z
|
codeChef/practice/easy/muffins3.py
|
ferhatelmas/algo
|
a7149c7a605708bc01a5cd30bf5455644cefd04d
|
[
"WTFPL"
] | 15
|
2015-01-21T16:39:27.000Z
|
2020-10-01T17:00:22.000Z
|
import sys
print "\n".join(str(int(i) / 2 + 1) for i in sys.stdin.readlines()[1:])
| 21
| 71
| 0.619048
| 17
| 84
| 3.058824
| 0.823529
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.042254
| 0.154762
| 84
| 3
| 72
| 28
| 0.690141
| 0
| 0
| 0
| 0
| 0
| 0.02381
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.5
| null | null | 0.5
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 1
|
0
| 5
|
093bb0f3b3b32fbfcbd3f7ae851dbe3109a2278d
| 9,146
|
py
|
Python
|
1525. Number of Good Ways to Split a String.py
|
Nriver/leetcode
|
e0b30fae8a31513172fcbd13d67eded8922ba785
|
[
"MIT"
] | null | null | null |
1525. Number of Good Ways to Split a String.py
|
Nriver/leetcode
|
e0b30fae8a31513172fcbd13d67eded8922ba785
|
[
"MIT"
] | null | null | null |
1525. Number of Good Ways to Split a String.py
|
Nriver/leetcode
|
e0b30fae8a31513172fcbd13d67eded8922ba785
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# @Author: zengjq
# @Date: 2020-10-21 16:22:29
# @Last Modified by: zengjq
# @Last Modified time: 2020-10-21 16:49:17
import collections
class Solution:
# 超时
def numSplits1(self, s: str) -> int:
res = 0
for x in range(1,len(s)):
if len(collections.Counter(s[:x]))==len(collections.Counter(s[x:])):
res += 1
return res
# 别人的, 上面的思路还差了一口气
# 不过这个解法并不好
# 58 mem 8
def numSplits(self, s: str) -> int:
res = 0
s1 = collections.Counter('')
s2 = collections.Counter(s)
for x in s:
s1[x] += 1
s2[x] -= 1
if s2[x] == 0:
s2.pop(x)
if len(s1) == len(s2):
res += 1
return res
s = Solution()
print(s.numSplits('acbadbaada'))
print(s.numSplits('aacaba'))
print(s.numSplits('aaaaa'))
# print(s.numSplits("feggbadfedggfgfbaccbcgbcfffecgdebcccedbeeeebagdbfbccdbddaccdbafbacgfeeecgdcgbceddfbgefecdcaefccbgfcegfaaegcefagadbfbebgadbbadaaeaafdaacbfacefcabdgfaeecbfdgaadaegababgfecgabcbgdbefgdddebbbbbeeceefaecdegacgfgeegbcdcfbaaaedafdgfbdggddfdgadgcebfefcfgdeabcebebffbageacbgfdagbefcbceeeeegdcddbbacccafdcfbfffdbbebgdfceebeaadccfcfaabaecfadbebebgggeddebbcbffgcddafgdfaeccceafcbegdgcgfcedaaggdfedbfeffcgeafdgfccaeeccdeabdbecbaefcacfegbfaaefdbdfebgbefgeecgfebbdacddagabefbabdfbfccafbgfcbedafedgafgbefbadfggfdeacddfebbffeabcbfgacacfccbceagcabfcagcgdfadgbfdfeeffbefbfecbgaefddgfgeabagcefdbcbccceddggcdfaaggfabgccafbeeabdbgaadfbdeeccddcdebdecfbggcbaafgeeffbcfeabccbbegfecgfdcfcdedbegegbafgcecbbgcegeddebcgdegegffefgggccggccaegaffabgfbcgafgeddaccedbddggfgeefcbbfccegaefadaacfgdgffafbcaaefgadgfeaabfgcgddabgfbabccefgdbbfefdbccbgecedafceadaagfffgccafbbadaafagbeaebdccgbcffaccfagcefdbdfgbdagbdfdgacceebfabeccaadcfbdgdgddddccdaeeabdadebgadfffabcdfcceccdgbedgaeabcgfaddfbegbfgfgbdeceeebgdcdaaabcbbbdfgabgbgbdaffaedcbgcgfbfdbfadfbbeecdfebaadffbcecbdfgegggaeagbdfceeefgfdfcgebeegcfcfacedffdedggafgbgcfdaefeaggadffgcaadbgbdaccdbcfacacfdbaefefabfbbgffddefcbbfadafagebbgcadaefgegfbgafbcfbcdcebebbaddgaebbbabcbeegecbcggaabeecgcedcgfbbfegfceeadagdggfeeecccceggabebcfaaabeccgdgaagagccedgaaaacaccddfccbddddeebeeaccebcedgdefgccabfgbecafefcaafafbfffbbffbcagbfecdceedecebgcbgbfdddbabebgdbfcgcgbaabfbebafacfbfcacdcfaffdabgfabaddfecccgbfcaddgbbeaafggcbdfabagfbaeaafebgbgbeaefgbbeffgggegeeeedbgaebegcbdbeedaabdgdegacfdfeeccaeebbebecddddccaegbdcbegdbadccccefeeeceeafcebebbbabdceddbdgcaaaefgbggcaggcbddecfdgbdfedgcgfgfcbeebcbdcdeaffcbffbafdagecdcbgcefcbagafdcdegcdeaabcafabbbbbeccgbfeagbdgfbcfcbfbbaaegdcecbgdeeaaaddbagbeagccbaaddffadeeafcedcgaccdbfdfbbfffabgeacegbefefbdbfcbabcecbdbdbdecbcbcbeccabcbfbafcgbfbefacfbedfecfgdacgdgceaebgadfeeaccgfcceabbfdaaeagbfadecfafagaaecccbacedgdffcbgebeabaabgeccecaacgaefgcgffbedaabbbaabeadbbfaagcgdfgcageffdegegfcabddddgaafbgaafgacgdecggbcdfcfccfccfeggdgadcebdbdbffcfcgefadcdafaceddbegabafaabbgffeddbdccefgadfgfddeeafbgfbddggbccdddfbcffdadedbfaeeaffgfgcdebaaafdegfefaaedbbageggaecaaadfgbeaeceabaebdefgdcgfcbcgbaceggccefgaggeccgbafgcaagabdaacfeeaacgbeabgfaecfggbddcaaeaffegdfdfbbfdbcgdfbbaaabedacbbbgfcdcagbgfedaedeffbbcadgagagbbeceafeffcgcefgfecffeggdecggeafbaedbebabbgadgaceacbbeeeegadefaebbgacgafbgfgagccccaacdgcdagagcbcddadbceadgaaebffdbdfeeeebdaedaedbggefaffgcbccaffbgdgaaacdgcbbdagfbccdadcfeecegdfbffbcfccggbbgddcgfcbgdgfcgcdgfcdgdabfcbeagdebgabgbgeggedgbacffcefefffcdceccaecffbgfdbedcddbafbefdfafebedbbfbfbfefacbeceggcfcafffcafddfcdeedcacgeaacddfabcdcadfbagbfadegbedbfbfaaecgcfgbddcgfggegcfcefdgefdeddaeaddagacefbfbdeebeggabebafbggafdaddbcfbggabcdaabcfbbdbfeagacaabddcfcffcegaabbaffbfbbcbbggcecdfggdcdbcfdefcgccaedcdgceaeccfcbfaddggdbgbgffdfcdgdbeecffbddeggecgfdfgcfebcdabedgbaegbfccceegcbddbeabeeaedddeaegcbeecgbaedaegfeddfccegacdffbfefcabcdcgdcbfeebddfdbgdgabccfbbecdfddbeceecedfabbebccbddggdcedbfedeeebgafeagddefebgbcbccfcgfdaaafbbggdeaabedgdebedacffeafeabgbadafaeeaddfgcdefcaccacffecefgeddcbebcbfcdcfffecbffagcgdaeadbbfadcbgfdefefaagcdgacfbaffcdgaffcacbfeebebafgagdgfeecafbcfbedbdabaggcbegccbadaeefegbabgddccbagceeafcgcaebdddgbgcadeaccgdeeddgdbeagddgbdgbgfcggfbebbcddcagecfgegdacecdeedffebafgagfgffebbebffabgegcagggacdddcfgdabdfefeggeefaeeccaegddbbefedfdegfcgfbbefgcffbdgcfdaddaefafbceedefgffcaacbeadfggacaecacdabadcbcccfagagdbafgadadffadbcfffacabadeeeecaedgfaecfbaebdcddfbeegdfabfbddacefcegafgaefgaebgceddabdcagbacfbeffdebafacdffdgcdffedeaagfcdegdffadacdbeffefffbcabgfaagaeedecdgffgdcccdfbdcfbgccbfaefdcafagcafgfbgcgbfdcgcbfaabbfgbffbfeggcbcefdbebcggefafefaedcbeedgeccafbeagffaebecadeebfbfbebdcfdecgfgaaefebfbabdceecfeabecgegaadcdbebfbcdcegagedcccbbdbgddgeagaccafbgbgaaefcgecbfdgaeddcfgcecaegabcgedbfccefeaeecdacaccfdcfaaddebadbaeceegdgeafbggfgefcdbecebbbcgfaeccgdcegfbfcacfaeggadbfcfbedcfgggefffafecdcdefecdefegcfdgbcbefbagcfbebgdgfbgcbfaeagffdfbdcefecegabecfffagdaddcaaeeefgcbegcgcfdbdegfbdegfbdfdedbbddcafgbcddbdbdadagadfebfeaaceafdbggbfabebgbfdagcccggeeebedacddgegadcebaefeggbaagbacafgbfgabfgdeccbdfdbabfggdeadbcgeeacdddgedddfegbebegbadeffaecbfedfafddccfaeeagcffadfgaaeecbbaaddgfcggbbebdgebacegcbgebccafgacfeadfccgeegdgabecgagffedeadfbbebbcfcgfddebgebbbgbggagdabgacgeggaffeeegabgaggadfdfddcaabcgdadbdfbfbaedfebgeagccfgaaegcffcgfgedfcadabafaebbaeabbfbbeggfbebbgacbfdddcfagcadagdcegdaaacgcgccgbdeaggdcdbecffggfdeebcbffbdabdbaeebggbdbadgfabaafcedbdebgdbcecgacbcdddcgfeedabfbcgdgccfabfgdbcdgcadcdcaeebeeebecegcddbggefbcagebdcdbbbfcbgdcgggecdceabaebecbdceefabbcgfcfecdabfecgbfbfegfdadggcgagdggababdaedabecbebafbccadfdgbgcfbabbffeabgacgcbgbbcfdcfagfefegeeaeaffafcfccebfeadcaeeggacgbddefcfcfecgegbbfdfabgbfegbebcfgfefeaddgdbaffdababfgbcbfbfbegdfacgbfcbfaaaaabaegeeaafbbebfafcbgffccfebcgffgaeecdfebdfdafbfdadcagbbbagcdfedfecfbdgbbeedcfdfaacbdgcagbbaccfabbdbbedfgdgbdgddagbedgafddedgfcdfacdbccbbcbacbabafgbbgbcegegdagabfdfgbbgcaefbdbdgaadaafebegaggafadfefbdcadcddaecdaeeffbddfgeaabcegcecbaceeafdcgcdceccccddecgeddcebggcbbfebgafbbaacgaefegfbegaagafgcbebacdabebfcbgfebeggddfdbdecgaabfegdgdbabdafebgaaaedfddbdgcbecagegdgfaabfgcabcccgebdgbffaceccgcgbgabgdbcgdefcdfaabdegbebfgbfebgedecgagefdeeeacfababbccbbcegcgcafeddeggegbbbgacebaegdeedcddgcdfeeedbbegecedaabagfagdbeeegdcegcdafgdgdgbcaebffgacdagdefeccgceegdccbebgfbcfcgbdacfgebbgedacgfbfdbcfgaadgcdgebeaefadgdfdcgbeecfdefccbdefeceaacceegeccafgfgecdbbdcbededgfadgbcbdbbfefbdcgcfafaecfedffcegdgfdbccagfacgfbfdcfabgbbfbfcbcgbbaefdfdbccgadegffeddegcddeecbcageebbgeagaedeadeecdaebbcadfbcaabeebgfbddaebgcebgecdfgcccbacgefadfcfadfbbcgffdbfbdadcddacbfabaecdbaeebgceabaadbbbcedebbbcgdgbdfbfbbcegbacgbbacacgagegbgdbdecbgcgdcdgegebfaaefefbadaeeabbegdcaecbcebbdfdefbdeadfeaaffcbggeabccaeedgffdebggedebbfddcecfafbffecedgbfgbgdgbggeebageffbbdddfcacddddedbdaadaagcecggbbfagfeefcecadbaeacgdbgbfbfaecdacccacbfffggceggcdbecfeebbfbgdbcbagdagfbbbcdbabgcbfadfdecddccacaeagcadcgabafeaafdeacdfgabdceeccedffcgcbdbcfcaafeceffbdacdgbafbgdfcaggfcaacbcdbbaabdgdbfgfdcbfcdfceebcggfbabfbeeafbdebfcbbagedgafbgaecggcccdbadeadbgbcbaddffafbeedcfgagecebbgffedbedafgbeegddabcgdddbcaffbafbadbgbecgaedgabcedbdadbfgbdedeggbecgdbeafcdgebddffbgdebbgddfegbcbddeadbecbbabcffcgcfdadfdddebcedfacdfbdfgcdcageddfbgffgbdcedebdbdcbefgccebgdbfdbaddffbfbbaefbfdbfbaegdgbcadafedadcggfcdebfbgbcedgaadabbcgefbecdcedbdcfbbfgegdefaecdcafgcdbbgdaecfegbgcdcagcaccbccegccgfddcbfcbdaecccaabddfceebgdadafbccdffebfdbfdggfgcbcbacbdadacccacdggdggcfgbddcfgabdebaebdedcagcbgdcgegedeagbdeeegafddgdabfgeeaffecaegfedcgcecaecgfdecbdafccadggbgdabffbcffcgcgdgedgfefdcdagfgacfbfcggcdgefcbfbbbddgeefacfcbdaddeceaebdfbaagccaaecaecdcgbabaaeaebeagabbdacagbffgedbbadfaffaabbeecgcffffaefdefcbegagdgfdfgedcffcbgcbadggffadefeacbbfccfcgebebgdggcbebdcfceafcgdbgaadggbbaffegdefccbaggceeabccdgfcaddgffagefcfeffeegagfgbaecefbfaaggcedaffeddbeegdaadbdfccbbbbedcgdgcebaegaefdacbdfefddfedbgddgbfccagffggefdagcfffdbdfaeadbcbfecfadaccfegfaecbbafdeafeeeccbdfcdabfgbbbabcgdegdbbgaegcafdabebfbegeacfdefbegecgbfeagdgccdaeddgbefafcafcbeadggcgfeaeafcgeadbfagadedfcdcgcagaffbcdggaagacfffgcfbfacgfddgfaaadbgaececbbfcbfaccbbgegbgadgfbcefbebdcfbaadaddgcccfgdcaddfgfaadabdagddafbceaaddfdeadgeeadeeecffcadbaaabcbcaaggeadacfcffabcdbddcgbddegccbdaedddeafebaegegedeaagaeceaaeggecgeadfbgfddgcagffeebbbeegdedadagbbefdefagceeabegdceeabfaagadfbadgfgccecgebbcabffabbdeabdaaddecfabebfcbcbgadgbbfegfdbcfdeabbddcaabfbgbfeaeaebaccbgebfdcdagcbbaecdafcaeccgcbegcdeebbbcbcfddcbgfbdbdbeadbdgcbadbcgcecddefdafaadbdcgggedaedebbegaggbgbbbgcbbddfdagfgcefafcbbbcddcdddeedgddbfgaecdaccafdfdceggaccgcccbgddbgefgagcbcbfbdcaeggcfafbfceaaffegceeeacacaacbfcegegfgbebcdfffcabdecaadbdfbafbdfcdaafddbfgfbaadbfccebcdfagdegffaagccfcacffbdcedddbcaffbgffggcgcfdcedgddbbffcgdbacfcbdbgffcdgbagfgagbfagfbcdggadeceecgabaaecfefgeafgfgbecefgfdeceeeegggabcabcggedfefbcegefdegaededbcedbfbbacfbgddgccbfefgdedegabcaffbfcbgafcbfebddabcdaedafbcabdagfadefebegdaecaeeedbedcbfeebeffgaabgafadffddgebedaaffccbcgeddbdbbgaabcffcgfdbgfdfddfafafdebgedceagfcdfeddfffecafcgecagfecfdeagdeafegfaafafcffffacffbecbdaababfgfeeceeedabcgfcedecbddbgagabeccgafebdcceebeabaefbgceeedfdcbgfbedegfebbceegffcfagfdadafffgbabdbcfbbbdedfecdedbacaecbgdefgeabfcedeaaceadfdaccgdegfaecgebeacebeefbgadcgdadacgffgggadgdgcbdgbadaacaaegebcedegebaedbgggbbadccdgeeacgadgabbacfdgdebcgabfddadafgafeffabbegeddeadafebdbagfcceagdecgbgefefefecafcggcccbgefgbcbgdbaggbab"))
| 234.512821
| 8,249
| 0.950251
| 125
| 9,146
| 69.528
| 0.432
| 0.008284
| 0.006904
| 0.002301
| 0.008745
| 0.003452
| 0
| 0
| 0
| 0
| 0
| 0.005545
| 0.033785
| 9,146
| 39
| 8,249
| 234.512821
| 0.977934
| 0.92084
| 0
| 0.25
| 0
| 0
| 0.029412
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0.083333
| false
| 0
| 0.041667
| 0
| 0.25
| 0.125
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
0959b6e48c7d735c236a0084789c3d308942cacc
| 73
|
py
|
Python
|
utils/__init__.py
|
atlashugs/dqn_pytorch
|
131295448e9a993fd5e0a3a509bd76204f644396
|
[
"MIT"
] | null | null | null |
utils/__init__.py
|
atlashugs/dqn_pytorch
|
131295448e9a993fd5e0a3a509bd76204f644396
|
[
"MIT"
] | null | null | null |
utils/__init__.py
|
atlashugs/dqn_pytorch
|
131295448e9a993fd5e0a3a509bd76204f644396
|
[
"MIT"
] | null | null | null |
from .utils import set_requires_grad, create_exp_logfile, performance_avg
| 73
| 73
| 0.890411
| 11
| 73
| 5.454545
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.068493
| 73
| 1
| 73
| 73
| 0.882353
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
11ad07b33bbd8e296a06cb7a7b887b52b1448e45
| 180
|
py
|
Python
|
app/PyDrive/__init__.py
|
eduardo98m/Tree-Finder
|
50cf7a7391c487ef0f9e850799f800efc09f7294
|
[
"MIT"
] | 1
|
2021-09-21T01:25:26.000Z
|
2021-09-21T01:25:26.000Z
|
app/PyDrive/__init__.py
|
eduardo98m/Tree-Finder
|
50cf7a7391c487ef0f9e850799f800efc09f7294
|
[
"MIT"
] | 1
|
2022-01-27T14:11:12.000Z
|
2022-01-27T14:11:12.000Z
|
app/PyDrive/__init__.py
|
eduardo98m/Tree-Finder
|
50cf7a7391c487ef0f9e850799f800efc09f7294
|
[
"MIT"
] | null | null | null |
#from pydrive_functions import write_trees_csvs
"""
write_trees_csvs()
df = get_trees_dataframes()
df2 = get_image_ids(df, ids_file.images_ids)
df2.to_csv('result.csv')
"""
| 22.5
| 48
| 0.75
| 28
| 180
| 4.392857
| 0.642857
| 0.162602
| 0.227642
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.012658
| 0.122222
| 180
| 8
| 49
| 22.5
| 0.765823
| 0.905556
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
11b3a5e2d44830b589bfce4d9f1e4ca790071f36
| 213
|
py
|
Python
|
answers/hackerrank/Raw Input.py
|
FeiZhan/Algo-Collection
|
708c4a38112e0b381864809788b9e44ac5ae4d05
|
[
"MIT"
] | 3
|
2015-09-04T21:32:31.000Z
|
2020-12-06T00:37:32.000Z
|
answers/hackerrank/Raw Input.py
|
FeiZhan/Algo-Collection
|
708c4a38112e0b381864809788b9e44ac5ae4d05
|
[
"MIT"
] | null | null | null |
answers/hackerrank/Raw Input.py
|
FeiZhan/Algo-Collection
|
708c4a38112e0b381864809788b9e44ac5ae4d05
|
[
"MIT"
] | null | null | null |
#@result Submitted a few seconds ago • Score: 10.00 Status: Accepted Test Case #0: 0.01s Test Case #1: 0s Test Case #2: 0s
# Enter your code here. Read input from STDIN. Print output to STDOUT
print raw_input()
| 35.5
| 122
| 0.7277
| 40
| 213
| 3.875
| 0.8
| 0.154839
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.069364
| 0.187793
| 213
| 5
| 123
| 42.6
| 0.820809
| 0.873239
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2
| 0
| 0
| null | null | 0
| 0
| null | null | 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
11db112c6507b55a494b0c2bfbd98c7425d3426c
| 187
|
py
|
Python
|
dutil/jupyter/_jupyter.py
|
mysterious-ben/dutil
|
bacfba8982b8562ea6d943ac1e9c74f0fba75a7e
|
[
"Apache-2.0"
] | null | null | null |
dutil/jupyter/_jupyter.py
|
mysterious-ben/dutil
|
bacfba8982b8562ea6d943ac1e9c74f0fba75a7e
|
[
"Apache-2.0"
] | 6
|
2020-11-25T12:32:57.000Z
|
2020-12-28T04:38:15.000Z
|
dutil/jupyter/_jupyter.py
|
mysterious-ben/dutil
|
bacfba8982b8562ea6d943ac1e9c74f0fba75a7e
|
[
"Apache-2.0"
] | null | null | null |
from IPython.display import display
from dutil.transform import ht
def dht(arr, n: int = 2) -> None:
"""Display first and last (top and bottom) entries"""
display(ht(arr, n))
| 18.7
| 57
| 0.673797
| 29
| 187
| 4.344828
| 0.689655
| 0.063492
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006711
| 0.203209
| 187
| 9
| 58
| 20.777778
| 0.838926
| 0.251337
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.5
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
11ddab5bf1926ee324722f85b90689af3a28097d
| 120
|
py
|
Python
|
app/home/views.py
|
zhouzhuowei/movie_project
|
282cb3501ef5da0cee0700f195e09e63685d8ec0
|
[
"Apache-2.0"
] | null | null | null |
app/home/views.py
|
zhouzhuowei/movie_project
|
282cb3501ef5da0cee0700f195e09e63685d8ec0
|
[
"Apache-2.0"
] | null | null | null |
app/home/views.py
|
zhouzhuowei/movie_project
|
282cb3501ef5da0cee0700f195e09e63685d8ec0
|
[
"Apache-2.0"
] | null | null | null |
#coding:utf8
from . import home
@home.route("/")
def index():
return "<h1 style='color:green'>this is home</h1>"
| 13.333333
| 54
| 0.633333
| 18
| 120
| 4.222222
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.03
| 0.166667
| 120
| 8
| 55
| 15
| 0.73
| 0.091667
| 0
| 0
| 0
| 0
| 0.396226
| 0.226415
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0.25
| 0.25
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
|
0
| 5
|
11eadfb2be5a1b8deb1b22d358de50179bcef4fa
| 116
|
py
|
Python
|
mozumder/template/components/__init__.py
|
mozumder/django-mozumder
|
887ce303249eac2d77de062fd57023dbc4b782dd
|
[
"MIT"
] | 1
|
2020-06-13T06:12:16.000Z
|
2020-06-13T06:12:16.000Z
|
mozumder/template/components/__init__.py
|
mozumder/django-mozumder
|
887ce303249eac2d77de062fd57023dbc4b782dd
|
[
"MIT"
] | 4
|
2020-06-18T03:53:29.000Z
|
2021-06-09T17:56:12.000Z
|
mozumder/template/components/__init__.py
|
mozumder/django-mozumder
|
887ce303249eac2d77de062fd57023dbc4b782dd
|
[
"MIT"
] | null | null | null |
from .component import code, Component
from .components import Components
from .raw import raw
from .div import div
| 23.2
| 38
| 0.810345
| 17
| 116
| 5.529412
| 0.411765
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.146552
| 116
| 4
| 39
| 29
| 0.949495
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
ee9cd8af90360348f0c21f560a9403b1ca579459
| 179
|
py
|
Python
|
apps/puzzles/admin.py
|
madjaqk/puzzle_master_v2
|
e50b0f02abbf32aebe9583152bd9a5b43f5da7d7
|
[
"MIT"
] | null | null | null |
apps/puzzles/admin.py
|
madjaqk/puzzle_master_v2
|
e50b0f02abbf32aebe9583152bd9a5b43f5da7d7
|
[
"MIT"
] | 8
|
2020-02-07T04:11:07.000Z
|
2022-02-10T07:04:57.000Z
|
apps/puzzles/admin.py
|
madjaqk/puzzle_master_v2
|
e50b0f02abbf32aebe9583152bd9a5b43f5da7d7
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from .models import PuzzleSet, Puzzle, PuzzleAnswer
admin.site.register(Puzzle)
admin.site.register(PuzzleSet)
admin.site.register(PuzzleAnswer)
| 25.571429
| 51
| 0.832402
| 23
| 179
| 6.478261
| 0.478261
| 0.181208
| 0.342282
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.078212
| 179
| 7
| 52
| 25.571429
| 0.90303
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.4
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
ee9d02a183cd846bc680679ffaff406bc4497237
| 241
|
py
|
Python
|
makmal/admin.py
|
ParmenidesSartre/Makmal-Record-System
|
f121e5b62899b5a084bbd62f8b7132dca5c72691
|
[
"MIT"
] | null | null | null |
makmal/admin.py
|
ParmenidesSartre/Makmal-Record-System
|
f121e5b62899b5a084bbd62f8b7132dca5c72691
|
[
"MIT"
] | null | null | null |
makmal/admin.py
|
ParmenidesSartre/Makmal-Record-System
|
f121e5b62899b5a084bbd62f8b7132dca5c72691
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from .models import Report
@admin.register(Report)
class ReportAdmin(admin.ModelAdmin):
list_display = ['project_name','contractor_name', 'done_on']
search_fields = ['project_name','contractor_name']
| 34.428571
| 64
| 0.771784
| 30
| 241
| 5.966667
| 0.666667
| 0.122905
| 0.234637
| 0.27933
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.107884
| 241
| 7
| 65
| 34.428571
| 0.832558
| 0
| 0
| 0
| 0
| 0
| 0.252066
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.833333
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
eec24f4f728e76d4c2f32efa18d811ca23845931
| 111
|
py
|
Python
|
bot.py
|
ruberVulpes/a-discord-bot
|
0996a39b01917bf4f1a9b2a33ca46033ca2c9eb5
|
[
"MIT"
] | null | null | null |
bot.py
|
ruberVulpes/a-discord-bot
|
0996a39b01917bf4f1a9b2a33ca46033ca2c9eb5
|
[
"MIT"
] | 2
|
2021-02-22T05:08:30.000Z
|
2021-04-06T20:29:39.000Z
|
bot.py
|
ruberVulpes/a-discord-bot
|
0996a39b01917bf4f1a9b2a33ca46033ca2c9eb5
|
[
"MIT"
] | null | null | null |
from bot import client
from env import discord_token
if __name__ == '__main__':
client.run(discord_token)
| 18.5
| 29
| 0.765766
| 16
| 111
| 4.6875
| 0.6875
| 0.32
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.162162
| 111
| 5
| 30
| 22.2
| 0.806452
| 0
| 0
| 0
| 0
| 0
| 0.072072
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
eee40af86edb02a1603fc38dde4c32dda368fc54
| 299
|
py
|
Python
|
accounts/admin.py
|
devmedtz/sogea
|
54cf257856cae451ad87e2396b8e44a34c0c6daf
|
[
"MIT"
] | 3
|
2021-04-08T12:50:40.000Z
|
2021-05-09T07:36:52.000Z
|
accounts/admin.py
|
devmedtz/sogea
|
54cf257856cae451ad87e2396b8e44a34c0c6daf
|
[
"MIT"
] | 5
|
2021-06-05T00:07:57.000Z
|
2022-03-12T01:00:20.000Z
|
accounts/admin.py
|
devmedtz/sogea
|
54cf257856cae451ad87e2396b8e44a34c0c6daf
|
[
"MIT"
] | 2
|
2021-04-07T15:17:46.000Z
|
2021-05-09T06:29:57.000Z
|
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin as BaseUserAdmin
from django.contrib.auth.models import Group
from . import forms
from . models import CustomUser, Profile
# Register the new UserAdmin...
admin.site.register(CustomUser)
admin.site.register(Profile)
| 27.181818
| 64
| 0.816054
| 41
| 299
| 5.95122
| 0.439024
| 0.122951
| 0.209016
| 0.172131
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.110368
| 299
| 10
| 65
| 29.9
| 0.917293
| 0.09699
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.714286
| 0
| 0.714286
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
e11651292bb41cb3e4a196b410d59da0b224bbed
| 180
|
py
|
Python
|
adminlte/views/__init__.py
|
riffy/gta-adminlte-django
|
cc3efd382aa7ad3acb2828e7c7038a8146426e9d
|
[
"MIT"
] | 2
|
2021-05-28T06:45:04.000Z
|
2022-03-26T18:45:50.000Z
|
adminlte/views/__init__.py
|
riffy/gta-adminlte-django
|
cc3efd382aa7ad3acb2828e7c7038a8146426e9d
|
[
"MIT"
] | 2
|
2021-05-27T08:14:37.000Z
|
2021-05-27T15:42:04.000Z
|
adminlte/views/__init__.py
|
riffy/gta-adminlte-django
|
cc3efd382aa7ad3acb2828e7c7038a8146426e9d
|
[
"MIT"
] | null | null | null |
from .index import index
from .dashboard import dashboard
from .player_list import player_list
from .player_info import player_info
from .server_statistics import server_statistics
| 36
| 48
| 0.866667
| 26
| 180
| 5.769231
| 0.346154
| 0.133333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.105556
| 180
| 5
| 48
| 36
| 0.931677
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
e11e386e4f90322a1d63e6c01faf87cb100eab96
| 69
|
py
|
Python
|
src/__init.py
|
asepscareer/cnnindonesia-api
|
81811203548d1aad9435e1a4751d35576bebc143
|
[
"Apache-2.0"
] | 1
|
2022-03-20T08:00:38.000Z
|
2022-03-20T08:00:38.000Z
|
src/__init.py
|
asepscareer/cnnindonesia-api
|
81811203548d1aad9435e1a4751d35576bebc143
|
[
"Apache-2.0"
] | null | null | null |
src/__init.py
|
asepscareer/cnnindonesia-api
|
81811203548d1aad9435e1a4751d35576bebc143
|
[
"Apache-2.0"
] | null | null | null |
from .base import GetData
from .utils import parse, headers, base_url
| 34.5
| 43
| 0.811594
| 11
| 69
| 5
| 0.727273
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.130435
| 69
| 2
| 43
| 34.5
| 0.916667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
0100d037b53fb4705c1f95b329350b7af5f63d92
| 8
|
py
|
Python
|
.idea/VirtualEnvironment/Lib/site-packages/tests/outcomes/imports/test_import_absolute_error/main2.py
|
Vladpetr/NewsPortal
|
cd4127fbc09d9c8f5e65c8ae699856c6d380a320
|
[
"Apache-2.0"
] | null | null | null |
.idea/VirtualEnvironment/Lib/site-packages/tests/outcomes/imports/test_import_absolute_error/main2.py
|
Vladpetr/NewsPortal
|
cd4127fbc09d9c8f5e65c8ae699856c6d380a320
|
[
"Apache-2.0"
] | 5
|
2021-04-08T22:02:15.000Z
|
2022-02-10T14:53:45.000Z
|
.idea/VirtualEnvironment/Lib/site-packages/tests/outcomes/imports/test_import_absolute_error/main2.py
|
Vladpetr/NewsPortal
|
cd4127fbc09d9c8f5e65c8ae699856c6d380a320
|
[
"Apache-2.0"
] | null | null | null |
x = 103
| 4
| 7
| 0.5
| 2
| 8
| 2
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.6
| 0.375
| 8
| 1
| 8
| 8
| 0.2
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
0121a309aee84342b0c11d6c28a1b0d3428f8c34
| 38
|
py
|
Python
|
conjureup/controllers/juju/clouds/common.py
|
iMichka/conjure-up
|
8e4599e6f58b52163384150d8d71e7802462d126
|
[
"MIT"
] | 1
|
2019-06-26T23:39:13.000Z
|
2019-06-26T23:39:13.000Z
|
conjureup/controllers/juju/clouds/common.py
|
iMichka/conjure-up
|
8e4599e6f58b52163384150d8d71e7802462d126
|
[
"MIT"
] | null | null | null |
conjureup/controllers/juju/clouds/common.py
|
iMichka/conjure-up
|
8e4599e6f58b52163384150d8d71e7802462d126
|
[
"MIT"
] | 1
|
2020-10-05T14:42:31.000Z
|
2020-10-05T14:42:31.000Z
|
class BaseCloudController:
pass
| 7.6
| 26
| 0.736842
| 3
| 38
| 9.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.236842
| 38
| 4
| 27
| 9.5
| 0.965517
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
012814f7e74de219ea4ae2201ad5b9fddc5f39ac
| 450
|
py
|
Python
|
geonet/tests.py
|
bbengfort/kahu
|
57a2ba417d545a57a987b3620e46e56f023134d6
|
[
"MIT"
] | 1
|
2018-08-27T10:07:06.000Z
|
2018-08-27T10:07:06.000Z
|
geonet/tests.py
|
bbengfort/kahu
|
57a2ba417d545a57a987b3620e46e56f023134d6
|
[
"MIT"
] | 22
|
2018-06-09T14:16:36.000Z
|
2018-06-15T10:56:58.000Z
|
geonet/tests.py
|
bbengfort/kahu
|
57a2ba417d545a57a987b3620e46e56f023134d6
|
[
"MIT"
] | null | null | null |
# geonet.tests
# Test Cases for the geonet app
#
# Author: Benjamin Bengfort <benjamin@bengfort.com>
# Created: Mon Jun 11 08:10:28 2018 -0400
#
# ID: tests.py [] benjamin@bengfort.com $
"""
Test Cases for the geonet app
"""
##########################################################################
## Imports
##########################################################################
from django.test import TestCase
# Create your tests here.
| 22.5
| 74
| 0.468889
| 45
| 450
| 4.688889
| 0.666667
| 0.227488
| 0.113744
| 0.14218
| 0.227488
| 0.227488
| 0
| 0
| 0
| 0
| 0
| 0.040609
| 0.124444
| 450
| 19
| 75
| 23.684211
| 0.494924
| 0.524444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
01504092bc68b766c3ea753454d86ccb90b45033
| 74
|
py
|
Python
|
Solutions/Python/No zeros for heros(8 kyu).py
|
collenirwin/Codewars-Solutions
|
14bad3878d3fc37c7e73cbaaaa24cd28f759ce3b
|
[
"MIT"
] | null | null | null |
Solutions/Python/No zeros for heros(8 kyu).py
|
collenirwin/Codewars-Solutions
|
14bad3878d3fc37c7e73cbaaaa24cd28f759ce3b
|
[
"MIT"
] | null | null | null |
Solutions/Python/No zeros for heros(8 kyu).py
|
collenirwin/Codewars-Solutions
|
14bad3878d3fc37c7e73cbaaaa24cd28f759ce3b
|
[
"MIT"
] | null | null | null |
def no_boring_zeros(n):
return int(str(n).strip("0")) if n != 0 else n
| 37
| 50
| 0.635135
| 16
| 74
| 2.8125
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.032787
| 0.175676
| 74
| 2
| 50
| 37
| 0.704918
| 0
| 0
| 0
| 0
| 0
| 0.013333
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
015b386dca0de94100bb74f9f968da406871608d
| 7,305
|
py
|
Python
|
z2/part2/interactive/jm/random_fuzzy_arrows_1/609933792.py
|
kozakusek/ipp-2020-testy
|
09aa008fa53d159672cc7cbf969a6b237e15a7b8
|
[
"MIT"
] | 1
|
2020-04-16T12:13:47.000Z
|
2020-04-16T12:13:47.000Z
|
z2/part2/interactive/jm/random_fuzzy_arrows_1/609933792.py
|
kozakusek/ipp-2020-testy
|
09aa008fa53d159672cc7cbf969a6b237e15a7b8
|
[
"MIT"
] | 18
|
2020-03-06T17:50:15.000Z
|
2020-05-19T14:58:30.000Z
|
z2/part2/interactive/jm/random_fuzzy_arrows_1/609933792.py
|
kozakusek/ipp-2020-testy
|
09aa008fa53d159672cc7cbf969a6b237e15a7b8
|
[
"MIT"
] | 18
|
2020-03-06T17:45:13.000Z
|
2020-06-09T19:18:31.000Z
|
from part1 import (
gamma_board,
gamma_busy_fields,
gamma_delete,
gamma_free_fields,
gamma_golden_move,
gamma_golden_possible,
gamma_move,
gamma_new,
)
"""
scenario: test_random_actions
uuid: 609933792
"""
"""
random actions, total chaos
"""
board = gamma_new(8, 5, 5, 9)
assert board is not None
assert gamma_move(board, 2, 0, 4) == 1
assert gamma_move(board, 2, 4, 4) == 1
assert gamma_move(board, 3, 1, 3) == 1
assert gamma_move(board, 3, 1, 2) == 1
board437283538 = gamma_board(board)
assert board437283538 is not None
assert board437283538 == ("2...2...\n"
".3......\n"
".3......\n"
"........\n"
"........\n")
del board437283538
board437283538 = None
assert gamma_move(board, 4, 2, 6) == 0
assert gamma_move(board, 4, 3, 4) == 1
assert gamma_busy_fields(board, 4) == 1
assert gamma_move(board, 5, 1, 3) == 0
assert gamma_golden_possible(board, 5) == 1
assert gamma_move(board, 1, 0, 4) == 0
assert gamma_move(board, 1, 4, 0) == 1
assert gamma_move(board, 2, 3, 0) == 1
assert gamma_move(board, 2, 7, 2) == 1
assert gamma_free_fields(board, 2) == 32
assert gamma_move(board, 3, 1, 4) == 1
board485125495 = gamma_board(board)
assert board485125495 is not None
assert board485125495 == ("23.42...\n"
".3......\n"
".3.....2\n"
"........\n"
"...21...\n")
del board485125495
board485125495 = None
assert gamma_golden_possible(board, 4) == 1
assert gamma_move(board, 5, 1, 7) == 0
assert gamma_move(board, 5, 3, 3) == 1
assert gamma_free_fields(board, 5) == 30
assert gamma_move(board, 1, 4, 5) == 0
assert gamma_move(board, 1, 6, 2) == 1
assert gamma_golden_possible(board, 1) == 1
assert gamma_move(board, 2, 1, 5) == 0
assert gamma_move(board, 2, 3, 4) == 0
assert gamma_move(board, 4, 1, 3) == 0
assert gamma_move(board, 5, 0, 0) == 1
assert gamma_move(board, 5, 2, 4) == 1
assert gamma_move(board, 1, 3, 3) == 0
board786131318 = gamma_board(board)
assert board786131318 is not None
assert board786131318 == ("23542...\n"
".3.5....\n"
".3....12\n"
"........\n"
"5..21...\n")
del board786131318
board786131318 = None
assert gamma_golden_possible(board, 2) == 1
assert gamma_move(board, 3, 3, 0) == 0
assert gamma_move(board, 3, 3, 1) == 1
assert gamma_move(board, 4, 1, 4) == 0
assert gamma_move(board, 4, 1, 1) == 1
assert gamma_move(board, 5, 1, 5) == 0
assert gamma_move(board, 5, 3, 1) == 0
assert gamma_busy_fields(board, 5) == 3
assert gamma_golden_move(board, 5, 1, 1) == 1
assert gamma_move(board, 2, 6, 1) == 1
assert gamma_move(board, 2, 5, 2) == 1
assert gamma_golden_move(board, 2, 4, 3) == 0
assert gamma_move(board, 3, 6, 0) == 1
assert gamma_move(board, 4, 6, 1) == 0
assert gamma_move(board, 4, 2, 0) == 1
assert gamma_move(board, 5, 4, 6) == 0
assert gamma_golden_possible(board, 5) == 0
assert gamma_move(board, 1, 1, 3) == 0
assert gamma_move(board, 2, 3, 0) == 0
assert gamma_move(board, 3, 2, 4) == 0
assert gamma_move(board, 3, 0, 2) == 1
assert gamma_move(board, 4, 3, 6) == 0
assert gamma_golden_move(board, 4, 4, 0) == 1
assert gamma_move(board, 5, 3, 0) == 0
assert gamma_free_fields(board, 1) == 20
assert gamma_move(board, 2, 1, 0) == 1
assert gamma_move(board, 2, 6, 3) == 1
assert gamma_move(board, 3, 1, 2) == 0
assert gamma_move(board, 4, 2, 2) == 1
assert gamma_move(board, 5, 2, 4) == 0
assert gamma_move(board, 5, 2, 4) == 0
assert gamma_move(board, 1, 3, 4) == 0
assert gamma_move(board, 2, 1, 5) == 0
assert gamma_move(board, 2, 1, 0) == 0
assert gamma_busy_fields(board, 2) == 8
assert gamma_move(board, 3, 3, 0) == 0
assert gamma_move(board, 4, 1, 1) == 0
assert gamma_golden_possible(board, 4) == 0
assert gamma_move(board, 5, 1, 4) == 0
assert gamma_golden_possible(board, 5) == 0
board893840549 = gamma_board(board)
assert board893840549 is not None
assert board893840549 == ("23542...\n"
".3.5..2.\n"
"334..212\n"
".5.3..2.\n"
"52424.3.\n")
del board893840549
board893840549 = None
assert gamma_move(board, 1, 6, 1) == 0
assert gamma_move(board, 1, 6, 3) == 0
assert gamma_free_fields(board, 1) == 17
assert gamma_move(board, 2, 1, 7) == 0
assert gamma_move(board, 2, 5, 0) == 1
assert gamma_move(board, 3, 1, 4) == 0
assert gamma_move(board, 4, 3, 7) == 0
assert gamma_move(board, 4, 6, 3) == 0
assert gamma_golden_move(board, 4, 1, 3) == 0
assert gamma_move(board, 5, 3, 5) == 0
assert gamma_move(board, 5, 2, 4) == 0
assert gamma_move(board, 1, 0, 1) == 1
assert gamma_move(board, 2, 0, 1) == 0
assert gamma_move(board, 3, 2, 0) == 0
assert gamma_move(board, 3, 6, 2) == 0
assert gamma_move(board, 4, 0, 7) == 0
assert gamma_move(board, 5, 4, 6) == 0
assert gamma_move(board, 1, 2, 3) == 1
assert gamma_move(board, 1, 0, 0) == 0
assert gamma_golden_possible(board, 1) == 1
assert gamma_move(board, 2, 1, 7) == 0
assert gamma_move(board, 3, 3, 5) == 0
assert gamma_move(board, 3, 7, 0) == 1
assert gamma_golden_move(board, 3, 1, 1) == 1
assert gamma_move(board, 4, 2, 3) == 0
assert gamma_move(board, 4, 2, 0) == 0
assert gamma_golden_move(board, 4, 0, 0) == 0
board959278144 = gamma_board(board)
assert board959278144 is not None
assert board959278144 == ("23542...\n"
".315..2.\n"
"334..212\n"
"13.3..2.\n"
"52424233\n")
del board959278144
board959278144 = None
assert gamma_move(board, 5, 4, 7) == 0
assert gamma_move(board, 1, 3, 0) == 0
assert gamma_move(board, 2, 4, 4) == 0
assert gamma_move(board, 2, 6, 2) == 0
assert gamma_move(board, 3, 1, 1) == 0
assert gamma_move(board, 3, 0, 0) == 0
assert gamma_golden_possible(board, 3) == 0
assert gamma_move(board, 4, 2, 3) == 0
assert gamma_move(board, 5, 1, 4) == 0
assert gamma_move(board, 5, 3, 1) == 0
assert gamma_golden_possible(board, 5) == 0
assert gamma_move(board, 1, 1, 5) == 0
assert gamma_move(board, 1, 1, 2) == 0
assert gamma_move(board, 2, 3, 5) == 0
assert gamma_move(board, 2, 2, 0) == 0
assert gamma_move(board, 3, 1, 4) == 0
assert gamma_move(board, 3, 4, 4) == 0
assert gamma_busy_fields(board, 3) == 8
assert gamma_move(board, 4, 3, 1) == 0
assert gamma_golden_possible(board, 4) == 0
board186630370 = gamma_board(board)
assert board186630370 is not None
assert board186630370 == ("23542...\n"
".315..2.\n"
"334..212\n"
"13.3..2.\n"
"52424233\n")
del board186630370
board186630370 = None
assert gamma_move(board, 5, 2, 4) == 0
assert gamma_free_fields(board, 5) == 13
assert gamma_move(board, 1, 2, 1) == 1
assert gamma_move(board, 2, 3, 5) == 0
assert gamma_move(board, 3, 4, 7) == 0
assert gamma_move(board, 4, 1, 4) == 0
assert gamma_move(board, 4, 1, 3) == 0
assert gamma_free_fields(board, 4) == 12
assert gamma_move(board, 5, 2, 4) == 0
assert gamma_move(board, 5, 2, 0) == 0
assert gamma_move(board, 2, 4, 6) == 0
assert gamma_move(board, 2, 0, 2) == 0
assert gamma_golden_possible(board, 2) == 1
assert gamma_move(board, 3, 2, 3) == 0
assert gamma_move(board, 4, 3, 4) == 0
assert gamma_move(board, 4, 7, 0) == 0
assert gamma_move(board, 5, 3, 3) == 0
assert gamma_move(board, 1, 1, 5) == 0
assert gamma_free_fields(board, 1) == 12
assert gamma_move(board, 2, 7, 0) == 0
assert gamma_busy_fields(board, 2) == 9
assert gamma_move(board, 3, 1, 4) == 0
assert gamma_move(board, 3, 7, 1) == 1
assert gamma_move(board, 4, 1, 5) == 0
assert gamma_move(board, 5, 1, 2) == 0
gamma_delete(board)
| 31.487069
| 46
| 0.654483
| 1,316
| 7,305
| 3.487082
| 0.044073
| 0.335585
| 0.359555
| 0.479407
| 0.788843
| 0.770538
| 0.667684
| 0.468512
| 0.363478
| 0.325343
| 0
| 0.150473
| 0.17577
| 7,305
| 231
| 47
| 31.623377
| 0.611692
| 0
| 0
| 0.26087
| 0
| 0
| 0.041569
| 0
| 0
| 0
| 0
| 0
| 0.73913
| 1
| 0
| false
| 0
| 0.004831
| 0
| 0.004831
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
017bb5fd19c38e12ebbd9b1174d2ef3839c48ec4
| 116
|
py
|
Python
|
source_code/central_monitor/alerts/admin.py
|
Doctor-Venom/Cyber-Oracle
|
0cc3475416ea552704f4b1086d850fa90117ccc6
|
[
"MIT"
] | null | null | null |
source_code/central_monitor/alerts/admin.py
|
Doctor-Venom/Cyber-Oracle
|
0cc3475416ea552704f4b1086d850fa90117ccc6
|
[
"MIT"
] | null | null | null |
source_code/central_monitor/alerts/admin.py
|
Doctor-Venom/Cyber-Oracle
|
0cc3475416ea552704f4b1086d850fa90117ccc6
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from .models import Alert
# Register your models here.
admin.site.register(Alert)
| 19.333333
| 32
| 0.801724
| 17
| 116
| 5.470588
| 0.647059
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.12931
| 116
| 5
| 33
| 23.2
| 0.920792
| 0.224138
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
6d7e22756a6d95260f1fae465aebfb31d39e3235
| 55
|
py
|
Python
|
RadiAIDD/Backend/__init__.py
|
jo-mueller/RadiAide
|
0ad9eb11673f5df4998670dfc75eaad38b5ae54a
|
[
"MIT"
] | null | null | null |
RadiAIDD/Backend/__init__.py
|
jo-mueller/RadiAide
|
0ad9eb11673f5df4998670dfc75eaad38b5ae54a
|
[
"MIT"
] | 2
|
2020-05-18T10:36:49.000Z
|
2020-05-19T07:09:45.000Z
|
RadiAIDD/Backend/__init__.py
|
jo-mueller/RadiAide
|
0ad9eb11673f5df4998670dfc75eaad38b5ae54a
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from .UI import ressources_rc
| 13.75
| 29
| 0.636364
| 8
| 55
| 4.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.022222
| 0.181818
| 55
| 3
| 30
| 18.333333
| 0.733333
| 0.381818
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
6d815f60998760396e245b33a49f965635ddcbee
| 129
|
py
|
Python
|
src/main1.py
|
yamap55/python_import_sample
|
c468ac0d076cb61f54ef6133d906f1cb112e2aae
|
[
"MIT"
] | null | null | null |
src/main1.py
|
yamap55/python_import_sample
|
c468ac0d076cb61f54ef6133d906f1cb112e2aae
|
[
"MIT"
] | 1
|
2021-03-26T01:09:24.000Z
|
2021-03-26T01:09:24.000Z
|
src/main1.py
|
yamap55/python_import_sample
|
c468ac0d076cb61f54ef6133d906f1cb112e2aae
|
[
"MIT"
] | null | null | null |
import b
def get_str():
return f"123_{b.get_hoge()}_456" # 123_hoge_456
if __name__ == "__main__":
print(get_str())
| 12.9
| 52
| 0.643411
| 21
| 129
| 3.238095
| 0.666667
| 0.176471
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.116505
| 0.20155
| 129
| 9
| 53
| 14.333333
| 0.543689
| 0.093023
| 0
| 0
| 0
| 0
| 0.26087
| 0.191304
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| true
| 0
| 0.2
| 0.2
| 0.6
| 0.2
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
6d9226f9a9787d242a2a4b15e7fe827ebe4a40ef
| 48
|
py
|
Python
|
Modules/ExposureNotification/exposure_notification_exceptions.py
|
pvieito/Radar-STATS
|
9ff991a4db776259bc749a823ee6f0b0c0d38108
|
[
"Apache-2.0"
] | 9
|
2020-10-14T16:58:32.000Z
|
2021-10-05T12:01:56.000Z
|
Modules/ExposureNotification/exposure_notification_exceptions.py
|
pvieito/Radar-STATS
|
9ff991a4db776259bc749a823ee6f0b0c0d38108
|
[
"Apache-2.0"
] | 3
|
2020-10-08T04:48:35.000Z
|
2020-10-10T20:46:58.000Z
|
Modules/ExposureNotification/exposure_notification_exceptions.py
|
pvieito/Radar-STATS
|
9ff991a4db776259bc749a823ee6f0b0c0d38108
|
[
"Apache-2.0"
] | 3
|
2020-09-27T07:39:26.000Z
|
2020-10-02T07:48:56.000Z
|
class NoKeysFoundException(Exception):
pass
| 16
| 38
| 0.791667
| 4
| 48
| 9.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.145833
| 48
| 2
| 39
| 24
| 0.926829
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
6da9d8223a6e1eed92bad6bd9af4166ef81b6a62
| 2,311
|
py
|
Python
|
tests/calculate_with_alfas_test.py
|
parvex/residual-continual-learning-benchmark
|
8eeb2e57ecf0711e075eb02e8ed06fc8e7b9f20d
|
[
"MIT"
] | null | null | null |
tests/calculate_with_alfas_test.py
|
parvex/residual-continual-learning-benchmark
|
8eeb2e57ecf0711e075eb02e8ed06fc8e7b9f20d
|
[
"MIT"
] | null | null | null |
tests/calculate_with_alfas_test.py
|
parvex/residual-continual-learning-benchmark
|
8eeb2e57ecf0711e075eb02e8ed06fc8e7b9f20d
|
[
"MIT"
] | null | null | null |
import unittest
import torch
class TensorMultiplying(unittest.TestCase):
def test_tensor_multiplying_last_layer(self):
out = torch.tensor(
[[[1.0, 2.0, 3.0], [4.0, 5.0, 6.0], [7.0, 8.0, 9.0]], [[1.0, 2.0, 3.0], [4.0, 5.0, 6.0], [7.0, 8.0, 9.0]],
[[1.0, 2.0, 3.0], [4.0, 5.0, 6.0], [7.0, 8.0, 9.0]]])
out = out.unsqueeze(0).repeat(128,1,1,1)
alfa = torch.tensor([2.0, 3.0, 4.0])
expected = torch.tensor([[[2., 4., 6.],
[8., 10., 12.],
[14., 16., 18.]],
[[3., 6., 9.],
[12., 15., 18.],
[21., 24., 27.]],
[[4., 8., 12.],
[16., 20., 24.],
[28., 32., 36.]]])
expected = expected.unsqueeze(0).repeat(128, 1, 1, 1)
result = out * alfa[None, :, None, None]
self.assertTrue(torch.equal(expected, result))
def test_tensor_multiplying_prelast_layer(self):
out = torch.tensor(
[[[[1.0, 1.0, 1.0], [1.0, 1.0, 1.0], [1.0, 1.0, 1.0]], [[1.0, 1.0, 1.0], [1.0, 1.0, 1.0], [1.0, 1.0, 1.0]],
[[1.0, 1.0, 1.0], [1.0, 1.0, 1.0], [1.0, 1.0, 1.0]]], [[[2.0, 2.0, 2.0], [2.0, 2.0, 2.0], [2.0, 2.0, 2.0]], [[2.0, 2.0, 2.0], [2.0, 2.0, 2.0], [2.0, 2.0, 2.0]], [[2.0, 2.0, 2.0], [2.0, 2.0, 2.0], [2.0, 2.0, 2.0]]], [[[3.0, 3.0, 3.0], [3.0, 3.0, 3.0], [3.0, 3.0, 3.0]], [[3.0, 3.0, 3.0], [3.0, 3.0, 3.0], [3.0, 3.0, 3.0]], [[3.0, 3.0, 3.0], [3.0, 3.0, 3.0], [3.0, 3.0, 3.0]]]])
alfa = torch.tensor([2.0, 3.0, 4.0])
expected = torch.tensor([[[2., 4., 6.],
[8., 10., 12.],
[14., 16., 18.]],
[[3., 6., 9.],
[12., 15., 18.],
[21., 24., 27.]],
[[4., 8., 12.],
[16., 20., 24.],
[28., 32., 36.]]])
result = out * alfa[None, :, None, None]
self.assertTrue(torch.equal(expected, result))
if __name__ == '__main__':
unittest.main()
| 39.169492
| 389
| 0.339247
| 369
| 2,311
| 2.081301
| 0.135501
| 0.083333
| 0.125
| 0.135417
| 0.783854
| 0.783854
| 0.783854
| 0.666667
| 0.666667
| 0.666667
| 0
| 0.24269
| 0.408048
| 2,311
| 58
| 390
| 39.844828
| 0.318713
| 0
| 0
| 0.666667
| 0
| 0
| 0.003462
| 0
| 0
| 0
| 0
| 0
| 0.051282
| 1
| 0.051282
| false
| 0
| 0.051282
| 0
| 0.128205
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
6db510c92d24aae7e24aa0c949fd6c1c08e43285
| 6,483
|
py
|
Python
|
perfrunner/workloads/ycsb.py
|
bochun/perfrunner
|
e215c73240381cf82fddc40856f560369c9b75a8
|
[
"Apache-2.0"
] | 18
|
2015-10-28T23:12:07.000Z
|
2022-01-04T14:23:37.000Z
|
perfrunner/workloads/ycsb.py
|
bochun/perfrunner
|
e215c73240381cf82fddc40856f560369c9b75a8
|
[
"Apache-2.0"
] | 11
|
2019-03-19T12:02:31.000Z
|
2022-02-11T03:39:44.000Z
|
perfrunner/workloads/ycsb.py
|
bochun/perfrunner
|
e215c73240381cf82fddc40856f560369c9b75a8
|
[
"Apache-2.0"
] | 39
|
2015-06-07T09:17:16.000Z
|
2022-03-06T20:32:01.000Z
|
from perfrunner.helpers.local import restart_memcached, run_ycsb
from perfrunner.settings import PhaseSettings, TargetSettings
def ycsb_data_load(workload_settings: PhaseSettings,
target: TargetSettings,
timer: int,
instance: int):
soe_params = None
if workload_settings.recorded_load_cache_size:
restart_memcached()
soe_params = {
'insertstart': (instance + 1) * workload_settings.inserts_per_workerinstance,
'recorded_load_cache_size': workload_settings.recorded_load_cache_size,
}
phase_params = None
if workload_settings.phase:
phase_params = {
'insertstart': instance * workload_settings.inserts_per_workerinstance +
workload_settings.insertstart,
'inserts_per_workerinstance': workload_settings.inserts_per_workerinstance,
}
host = target.node
if target.cloud:
host = target.cloud['cluster_svc']
run_ycsb(host=host,
bucket=target.bucket,
password=target.password,
action='load',
ycsb_client=workload_settings.ycsb_client,
workload=workload_settings.workload_path,
items=workload_settings.items,
workers=workload_settings.workers,
target=int(workload_settings.target),
soe_params=soe_params,
instance=instance,
epoll=workload_settings.epoll,
boost=workload_settings.boost,
persist_to=workload_settings.persist_to,
replicate_to=workload_settings.replicate_to,
fieldlength=workload_settings.field_length,
fieldcount=workload_settings.field_count,
durability=workload_settings.durability,
kv_endpoints=workload_settings.kv_endpoints,
enable_mutation_token=workload_settings.enable_mutation_token,
transactionsenabled=workload_settings.transactionsenabled,
documentsintransaction=workload_settings.documentsintransaction,
transactionreadproportion=workload_settings.transactionreadproportion,
transactionupdateproportion=workload_settings.transactionupdateproportion,
transactioninsertproportion=workload_settings.transactioninsertproportion,
requestdistribution=workload_settings.requestdistribution,
num_atrs=workload_settings.num_atrs,
ycsb_jvm_args=workload_settings.ycsb_jvm_args,
collections_map=workload_settings.collections,
timeseries=workload_settings.timeseries,
phase_params=phase_params,
cloud=target.cloud)
def ycsb_workload(workload_settings: PhaseSettings,
target: TargetSettings,
timer: int,
instance: int):
soe_params = None
if workload_settings.recorded_load_cache_size:
soe_params = {
'insertstart': (instance + 1) * workload_settings.inserts_per_workerinstance,
'recorded_load_cache_size': workload_settings.recorded_load_cache_size,
}
if workload_settings.ycsb_split_workload:
split_instance = workload_settings.workload_instances // 2
if instance < split_instance:
workload_settings.workload_path = workload_settings.workload_path.split(",")[0]
elif instance >= split_instance:
workload_settings.workload_path = workload_settings.workload_path.split(",")[1]
insert_test_params = None
if workload_settings.insert_test_flag:
insert_test_params = {
'insertstart': int(instance * workload_settings.inserts_per_workerinstance +
workload_settings.items),
'recordcount': int((instance+1) * workload_settings.inserts_per_workerinstance +
workload_settings.items),
}
host = target.node
if target.cloud:
host = target.cloud['cluster_svc']
run_ycsb(host=host,
bucket=target.bucket,
password=target.password,
action='run',
ycsb_client=workload_settings.ycsb_client,
workload=workload_settings.workload_path,
items=workload_settings.items,
workers=workload_settings.workers,
target=int(workload_settings.target),
soe_params=soe_params,
ops=int(workload_settings.ops),
instance=instance,
epoll=workload_settings.epoll,
boost=workload_settings.boost,
persist_to=workload_settings.persist_to,
replicate_to=workload_settings.replicate_to,
execution_time=workload_settings.time,
ssl_keystore_file=workload_settings.ssl_keystore_file,
ssl_keystore_password=workload_settings.ssl_keystore_password,
ssl_mode=workload_settings.ssl_mode,
certificate_file=workload_settings.certificate_file,
timeseries=workload_settings.timeseries,
cbcollect=workload_settings.cbcollect,
fieldlength=workload_settings.field_length,
fieldcount=workload_settings.field_count,
durability=workload_settings.durability,
kv_endpoints=workload_settings.kv_endpoints,
enable_mutation_token=workload_settings.enable_mutation_token,
retry_strategy=workload_settings.retry_strategy,
retry_lower=workload_settings.retry_lower,
retry_upper=workload_settings.retry_upper,
retry_factor=workload_settings.retry_factor,
transactionsenabled=workload_settings.transactionsenabled,
documentsintransaction=workload_settings.documentsintransaction,
transactionreadproportion=workload_settings.transactionreadproportion,
transactionupdateproportion=workload_settings.transactionupdateproportion,
transactioninsertproportion=workload_settings.transactioninsertproportion,
requestdistribution=workload_settings.requestdistribution,
num_atrs=workload_settings.num_atrs,
ycsb_jvm_args=workload_settings.ycsb_jvm_args,
collections_map=workload_settings.collections,
out_of_order=workload_settings.ycsb_out_of_order,
insert_test_params=insert_test_params,
cloud=target.cloud)
| 46.640288
| 92
| 0.681783
| 593
| 6,483
| 7.070826
| 0.168634
| 0.316718
| 0.040067
| 0.03005
| 0.774624
| 0.737181
| 0.737181
| 0.735035
| 0.688767
| 0.688767
| 0
| 0.001245
| 0.256826
| 6,483
| 138
| 93
| 46.978261
| 0.869033
| 0
| 0
| 0.666667
| 0
| 0
| 0.02468
| 0.011414
| 0
| 0
| 0
| 0
| 0
| 1
| 0.015873
| false
| 0.02381
| 0.015873
| 0
| 0.031746
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
6dd70717ee1b8fc1056067f9ccd21594623d866a
| 661
|
py
|
Python
|
Smart calc/calc.py
|
df2204/training-folder
|
4661f125e842610404cf2c3e6830d78d0498fa26
|
[
"MIT"
] | null | null | null |
Smart calc/calc.py
|
df2204/training-folder
|
4661f125e842610404cf2c3e6830d78d0498fa26
|
[
"MIT"
] | null | null | null |
Smart calc/calc.py
|
df2204/training-folder
|
4661f125e842610404cf2c3e6830d78d0498fa26
|
[
"MIT"
] | null | null | null |
print("hello")
o= input ("input operation ")
x= int(input ("x = "))
y= int(input ("y = "))
if o == "+":
print (x+y)
elif o == "-":
print (x-y)
elif o == "*" :
print (x*y)
elif o == "/" :
if y==0 :
print ("it's a simple calc, don't be too smart")
else :
print (x/y)
elif o == "//" :
if y==0 :
print ("it's a simple calc, don't be too smart")
else :
print (x//y)
elif o == "**" :
print (x**y)
elif o == "%" :
if y==0 :
print ("it's a simple calc, don't be too smart")
else :
print (x%y)
else :
print ("Unknown operational symbol. It's a simple calc, don't forget" )
| 22.033333
| 75
| 0.472012
| 107
| 661
| 2.915888
| 0.242991
| 0.051282
| 0.157051
| 0.211538
| 0.708333
| 0.708333
| 0.708333
| 0.650641
| 0.650641
| 0.650641
| 0
| 0.006726
| 0.325265
| 661
| 29
| 76
| 22.793103
| 0.692825
| 0
| 0
| 0.344828
| 0
| 0
| 0.320726
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.413793
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
6de2925c9dc3649e1c465e32e01fdb0869693e32
| 313
|
py
|
Python
|
irctest/scram/exceptions.py
|
FiskFan1999/ergochat_irctest
|
da005d7d2492bf31c4bdeb46108240766c69d0ad
|
[
"MIT"
] | 16
|
2015-12-20T16:24:54.000Z
|
2021-06-03T18:00:03.000Z
|
irctest/scram/exceptions.py
|
FiskFan1999/ergochat_irctest
|
da005d7d2492bf31c4bdeb46108240766c69d0ad
|
[
"MIT"
] | 66
|
2015-12-20T00:23:25.000Z
|
2021-08-14T09:57:04.000Z
|
irctest/scram/exceptions.py
|
FiskFan1999/ergochat_irctest
|
da005d7d2492bf31c4bdeb46108240766c69d0ad
|
[
"MIT"
] | 3
|
2021-12-04T21:18:41.000Z
|
2022-03-22T01:42:36.000Z
|
class ScramException(Exception):
pass
class BadChallengeException(ScramException):
pass
class ExtraChallengeException(ScramException):
pass
class ServerScramError(ScramException):
pass
class BadSuccessException(ScramException):
pass
class NotAuthorizedException(ScramException):
pass
| 17.388889
| 46
| 0.792332
| 24
| 313
| 10.333333
| 0.375
| 0.181452
| 0.370968
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.15016
| 313
| 17
| 47
| 18.411765
| 0.932331
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 0
| 1
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
0998d4118040ecbdae6ade511e0c92b1a9c293c1
| 27
|
py
|
Python
|
src/sage/graphs/generators/__init__.py
|
bopopescu/sage
|
2d495be78e0bdc7a0a635454290b27bb4f5f70f0
|
[
"BSL-1.0"
] | 5
|
2015-01-04T07:15:06.000Z
|
2022-03-04T15:15:18.000Z
|
src/sage/graphs/generators/__init__.py
|
Ivo-Maffei/sage
|
467fbc70a08b552b3de33d9065204ee9cbfb02c7
|
[
"BSL-1.0"
] | 2
|
2018-10-30T13:40:20.000Z
|
2020-07-23T12:13:30.000Z
|
src/sage/graphs/generators/__init__.py
|
dimpase/sage
|
468f23815ade42a2192b0a9cd378de8fdc594dcd
|
[
"BSL-1.0"
] | 10
|
2016-09-28T13:12:40.000Z
|
2022-02-12T09:28:34.000Z
|
# This file is not empty !
| 13.5
| 26
| 0.666667
| 5
| 27
| 3.6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.259259
| 27
| 1
| 27
| 27
| 0.9
| 0.888889
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
09e19fe537b6aa641c8e8f54b2ea6eff63c1fdbd
| 50
|
py
|
Python
|
mvdnet/evaluation/__init__.py
|
qiank10/MVDNet
|
d83663df068183d1e606100adb0fc78c35f1141c
|
[
"Apache-2.0"
] | 51
|
2021-03-05T08:20:27.000Z
|
2022-03-30T03:16:19.000Z
|
mvdnet/evaluation/__init__.py
|
qiank10/MVDNet
|
d83663df068183d1e606100adb0fc78c35f1141c
|
[
"Apache-2.0"
] | 10
|
2021-04-04T09:07:44.000Z
|
2021-12-14T09:16:11.000Z
|
mvdnet/evaluation/__init__.py
|
qiank10/MVDNet
|
d83663df068183d1e606100adb0fc78c35f1141c
|
[
"Apache-2.0"
] | 4
|
2021-03-08T01:49:33.000Z
|
2021-12-03T12:46:01.000Z
|
from .robotcar_evaluation import RobotCarEvaluator
| 50
| 50
| 0.92
| 5
| 50
| 9
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.06
| 50
| 1
| 50
| 50
| 0.957447
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
09e8355bc82186cdf9a94afdd31ef30230007eec
| 158
|
py
|
Python
|
apidaora/myapp.py
|
sarincr/Python-Web-Frameworks-and-Template-Engines
|
a830cafb0075539527688ea3c12e3e2d97d3519f
|
[
"MIT"
] | null | null | null |
apidaora/myapp.py
|
sarincr/Python-Web-Frameworks-and-Template-Engines
|
a830cafb0075539527688ea3c12e3e2d97d3519f
|
[
"MIT"
] | null | null | null |
apidaora/myapp.py
|
sarincr/Python-Web-Frameworks-and-Template-Engines
|
a830cafb0075539527688ea3c12e3e2d97d3519f
|
[
"MIT"
] | null | null | null |
from apidaora import appdaora, route
@route.get('/')
def hello_controller(name: str) -> str:
return f'Hello World!'
app = appdaora(hello_controller)
| 15.8
| 39
| 0.708861
| 21
| 158
| 5.238095
| 0.714286
| 0.272727
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.164557
| 158
| 9
| 40
| 17.555556
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0.088608
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0.2
| 0.2
| 0.6
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
09f4d42d6f26286c5c1ef69f2fb975726f81fa79
| 150
|
py
|
Python
|
src/you_get/cli_wrapper/player/__main__.py
|
adger-me/you-get
|
b32741ad8e57b3a02a1006d27e87eb5b879f1c90
|
[
"MIT"
] | 46,956
|
2015-01-01T08:48:41.000Z
|
2022-03-31T13:38:03.000Z
|
src/you_get/cli_wrapper/player/__main__.py
|
adger-me/you-get
|
b32741ad8e57b3a02a1006d27e87eb5b879f1c90
|
[
"MIT"
] | 2,477
|
2015-01-02T18:12:33.000Z
|
2022-03-31T10:44:24.000Z
|
src/you_get/cli_wrapper/player/__main__.py
|
adger-me/you-get
|
b32741ad8e57b3a02a1006d27e87eb5b879f1c90
|
[
"MIT"
] | 10,750
|
2015-01-03T01:33:16.000Z
|
2022-03-31T06:36:28.000Z
|
#!/usr/bin/env python
''' WIP
def main():
script_main('you-get', any_download, any_download_playlist)
if __name__ == "__main__":
main()
'''
| 15
| 63
| 0.646667
| 20
| 150
| 4.25
| 0.75
| 0.258824
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 150
| 9
| 64
| 16.666667
| 0.68
| 0.926667
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
09fb85c745573d94771db6cb539775578a370186
| 47
|
py
|
Python
|
apps/forms-flow-ai/forms-flow-api/tests/conf/__init__.py
|
saravanpa-aot/SBC_DivApps
|
87355ef93f8020b2c42f5392ff5d7b0129fa2222
|
[
"Apache-2.0"
] | 132
|
2020-04-16T20:26:18.000Z
|
2022-03-30T12:39:50.000Z
|
apps/forms-flow-ai/forms-flow-api/tests/conf/__init__.py
|
saravanpa-aot/SBC_DivApps
|
87355ef93f8020b2c42f5392ff5d7b0129fa2222
|
[
"Apache-2.0"
] | 102
|
2020-06-05T21:30:27.000Z
|
2022-03-30T04:53:25.000Z
|
apps/forms-flow-ai/forms-flow-api/tests/conf/__init__.py
|
saravanpa-aot/SBC_DivApps
|
87355ef93f8020b2c42f5392ff5d7b0129fa2222
|
[
"Apache-2.0"
] | 91
|
2020-04-15T18:54:51.000Z
|
2022-03-29T04:58:49.000Z
|
"""Test-Suite for the configuration system."""
| 23.5
| 46
| 0.723404
| 6
| 47
| 5.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.106383
| 47
| 1
| 47
| 47
| 0.809524
| 0.851064
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
61ccf64925a4d55c203a566ed351ab1aec40bca1
| 90
|
py
|
Python
|
delivery/delivery/ext/site/__init__.py
|
alisonamerico/curso-flask
|
693b0009278de7662d5cf269ff0c21ccfc01eab2
|
[
"Unlicense"
] | null | null | null |
delivery/delivery/ext/site/__init__.py
|
alisonamerico/curso-flask
|
693b0009278de7662d5cf269ff0c21ccfc01eab2
|
[
"Unlicense"
] | null | null | null |
delivery/delivery/ext/site/__init__.py
|
alisonamerico/curso-flask
|
693b0009278de7662d5cf269ff0c21ccfc01eab2
|
[
"Unlicense"
] | null | null | null |
from delivery.ext.site.main import bp
def init_app(app):
app.register_blueprint(bp)
| 15
| 37
| 0.755556
| 15
| 90
| 4.4
| 0.8
| 0.181818
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.144444
| 90
| 5
| 38
| 18
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0
| 0.666667
| 0.333333
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
110f91c2ea844b71d6f314e8a6c23e696a73457c
| 137
|
py
|
Python
|
tests/test_wikipedia.py
|
cauliyang/test-poetry
|
d61c1c7648aaff0fe7667770fd35c7c8119caeac
|
[
"MIT"
] | null | null | null |
tests/test_wikipedia.py
|
cauliyang/test-poetry
|
d61c1c7648aaff0fe7667770fd35c7c8119caeac
|
[
"MIT"
] | null | null | null |
tests/test_wikipedia.py
|
cauliyang/test-poetry
|
d61c1c7648aaff0fe7667770fd35c7c8119caeac
|
[
"MIT"
] | null | null | null |
from test_poetry import wikipedia
def test_random_page_use_given_language(mock_requests_get):
wikipedia.random_page(language="de")
| 22.833333
| 59
| 0.839416
| 20
| 137
| 5.3
| 0.75
| 0.188679
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.094891
| 137
| 5
| 60
| 27.4
| 0.854839
| 0
| 0
| 0
| 0
| 0
| 0.014599
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
114b5df4c79b729027d3f505c8f68ce44a832f41
| 2,873
|
py
|
Python
|
textadapter/tests/data/benchmarks.py
|
ContinuumIO/TextAdapter
|
53138c2277cdfcf32e127251313d4f77f81050aa
|
[
"BSD-3-Clause"
] | 22
|
2016-11-09T12:20:04.000Z
|
2021-02-07T03:07:58.000Z
|
dbadapter/tests/data/benchmarks.py
|
ContinuumIO/DBAdapter
|
19a49b4963d50df27cc930cf27693f31fd8e0426
|
[
"BSD-3-Clause"
] | 5
|
2017-03-11T00:37:15.000Z
|
2018-05-24T15:35:40.000Z
|
dbadapter/tests/data/benchmarks.py
|
ContinuumIO/DBAdapter
|
19a49b4963d50df27cc930cf27693f31fd8e0426
|
[
"BSD-3-Clause"
] | 10
|
2017-10-10T23:15:00.000Z
|
2021-12-04T14:44:19.000Z
|
import timeit
import os
def timeFunction(function, setup):
print 'timing', function
t = timeit.Timer(stmt=function, setup=setup)
times = []
for i in range(0,3):
os.system('sudo sh -c "sync; echo 3 > /proc/sys/vm/drop_caches"')
times.append(str(t.timeit(number=1)))
return min(times)
ints1 = timeFunction('blazeopt.loadtxt("ints1", dtype="u4,u4,u4,u4,u4", delimiter=",")', 'import blazeopt')
ints2 = timeFunction('blazeopt.loadtxt("ints2", dtype="u4,u4,u4,u4,u4", delimiter=",")', 'import blazeopt')
ints3 = timeFunction('blazeopt.loadtxt("ints3", dtype="u4,u4,u4,u4,u4", delimiter=",")', 'import blazeopt')
print ints1, ints2, ints3
floats1 = timeFunction('blazeopt.loadtxt("floats1", dtype="f8,f8,f8,f8,f8", delimiter=",")', 'import blazeopt')
floats2 = timeFunction('blazeopt.loadtxt("floats2", dtype="f8,f8,f8,f8,f8", delimiter=",")', 'import blazeopt')
floats3 = timeFunction('blazeopt.loadtxt("floats3", dtype="f8,f8,f8,f8,f8", delimiter=",")', 'import blazeopt')
print floats1, floats2, floats3
ints1 = timeFunction('blazeopt.genfromtxt("ints1", dtype="u4,u4,u4,u4,u4", delimiter=",")', 'import blazeopt')
ints2 = timeFunction('blazeopt.genfromtxt("ints2", dtype="u4,u4,u4,u4,u4", delimiter=",")', 'import blazeopt')
ints3 = timeFunction('blazeopt.genfromtxt("ints3", dtype="u4,u4,u4,u4,u4", delimiter=",")', 'import blazeopt')
print ints1, ints2, ints3
floats1 = timeFunction('blazeopt.genfromtxt("floats1", dtype="f8,f8,f8,f8,f8", delimiter=",")', 'import blazeopt')
floats2 = timeFunction('blazeopt.genfromtxt("floats2", dtype="f8,f8,f8,f8,f8", delimiter=",")', 'import blazeopt')
floats3 = timeFunction('blazeopt.genfromtxt("floats3", dtype="f8,f8,f8,f8,f8", delimiter=",")', 'import blazeopt')
print floats1, floats2, floats3
missingValues1 = timeFunction('blazeopt.genfromtxt("missingvalues1", dtype="u4,u4,u4,u4,u4", delimiter=",", missing_values={0:["NA","NaN"], 1:["xx","inf"]}, filling_values="999")', 'import blazeopt')
missingValues2 = timeFunction('blazeopt.genfromtxt("missingvalues2", dtype="u4,u4,u4,u4,u4", delimiter=",", missing_values={0:["NA","NaN"], 1:["xx","inf"]}, filling_values="999")', 'import blazeopt')
missingValues3 = timeFunction('blazeopt.genfromtxt("missingvalues3", dtype="u4,u4,u4,u4,u4", delimiter=",", missing_values={0:["NA","NaN"], 1:["xx","inf"]}, filling_values="999")', 'import blazeopt')
print missingValues1, missingValues2, missingValues3
fixedwidth1 = timeFunction('blazeopt.genfromtxt("fixedwidth1", dtype="u4,u4,u4,u4,u4", delimiter=[2,3,4,5,6])', 'import blazeopt')
fixedwidth2 = timeFunction('blazeopt.genfromtxt("fixedwidth2", dtype="u4,u4,u4,u4,u4", delimiter=[2,3,4,5,6])', 'import blazeopt')
fixedwidth3 = timeFunction('blazeopt.genfromtxt("fixedwidth3", dtype="u4,u4,u4,u4,u4", delimiter=[2,3,4,5,6])', 'import blazeopt')
print fixedwidth1, fixedwidth2, fixedwidth3
| 63.844444
| 199
| 0.705186
| 372
| 2,873
| 5.427419
| 0.180108
| 0.095097
| 0.106984
| 0.095097
| 0.618128
| 0.618128
| 0.618128
| 0.618128
| 0.618128
| 0.618128
| 0
| 0.067527
| 0.082492
| 2,873
| 44
| 200
| 65.295455
| 0.698407
| 0
| 0
| 0.117647
| 0
| 0.176471
| 0.630223
| 0.41922
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.588235
| null | null | 0.205882
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
fecd563f40dd85eeb0c2b6894a2a09e0c5ef2ae2
| 475
|
py
|
Python
|
3week/test_car_fuelling.py
|
gypsyx/algos-toolbox
|
d2eb2a975332b5a2ee451129c8569d97f99a2b11
|
[
"MIT"
] | null | null | null |
3week/test_car_fuelling.py
|
gypsyx/algos-toolbox
|
d2eb2a975332b5a2ee451129c8569d97f99a2b11
|
[
"MIT"
] | null | null | null |
3week/test_car_fuelling.py
|
gypsyx/algos-toolbox
|
d2eb2a975332b5a2ee451129c8569d97f99a2b11
|
[
"MIT"
] | null | null | null |
from car_fueling_3 import *
def test_compute_min_refills():
stops = [200, 375, 550, 750]
assert compute_min_refills(950, 400, stops) == 2
stops = [1, 2, 5, 9]
assert compute_min_refills(10, 3, stops) == -1
stops = [100, 150]
assert compute_min_refills(200, 250, stops) == 0
stops = [100, 200, 300, 400]
assert compute_min_refills(500, 200, stops) == 2
stops = [100, 200, 300, 400]
assert compute_min_refills(700, 200, stops) == -1
| 27.941176
| 53
| 0.637895
| 73
| 475
| 3.945205
| 0.410959
| 0.208333
| 0.354167
| 0.399306
| 0.277778
| 0.277778
| 0.277778
| 0.277778
| 0.277778
| 0
| 0
| 0.215259
| 0.227368
| 475
| 17
| 53
| 27.941176
| 0.569482
| 0
| 0
| 0.166667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.416667
| 1
| 0.083333
| false
| 0
| 0.083333
| 0
| 0.166667
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
fefaf9b9ae4998c70d018dd8a35614c168274d05
| 2,317
|
py
|
Python
|
code/2_process_hearings/2_1_clean_hearings.py
|
ianpcook/info_transmission
|
f0533f7643f5d29fc31932d9f59fd46cbc1a333b
|
[
"CC0-1.0"
] | null | null | null |
code/2_process_hearings/2_1_clean_hearings.py
|
ianpcook/info_transmission
|
f0533f7643f5d29fc31932d9f59fd46cbc1a333b
|
[
"CC0-1.0"
] | null | null | null |
code/2_process_hearings/2_1_clean_hearings.py
|
ianpcook/info_transmission
|
f0533f7643f5d29fc31932d9f59fd46cbc1a333b
|
[
"CC0-1.0"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Wed Dec 10 18:14:42 2014
@author: ian
"""
"""
This file opens the downloaded hearing files, and cleans out
the HTML and other problematic content, then saves them as
separate files.
"""
import os
import fnmatch
from bs4 import BeautifulSoup
import codecs
import re
mydir = './data/scraped_hearings/'
"""
File traversing adapted from:
http://stackoverflow.com/questions/2186525/use-a-glob-to-find-files-recursively-in-python
"""
def find_files(directory, pattern):
for root, dirs, files in os.walk(directory):
for basename in files:
if fnmatch.fnmatch(basename, pattern):
fullpath = os.path.join(root, basename)
yield fullpath
# Code below thanks to Joe Esposito
filelist = []
failed_files = []
for filename in list(find_files(mydir, '*.txt')):
# Get text
with codecs.open(filename, 'r', 'utf-8') as f:
try:
content = f.read()
except UnicodeDecodeError:
print 'Skipping non-UTF-8 file:', filename
failed_files.append(filename)
continue
# Convert to text
soup = BeautifulSoup(content.encode('UTF-8')).text
# Reopen and write new text to the file
with codecs.open('./data/clean_hearings_flat/'+os.path.basename(filename), 'w', 'utf-8') as f:
f.write(soup)
# Print to show progress
print len(filelist)
# Add filename to array to be used as first data column
filelist.append(os.path.basename(filename))
print 'Done!'
if failed_files:
print
print 'Skipped files:'
for failed_file in failed_files:
print failed_file
# Clean out extraneous formatting notes and graphics indicators
import fileinput
chf = './data/clean_hearings_flat/'
fileCruft = [
# to be removed from files
re.compile('\[GRAPHIC\]'),
re.compile('\[TIFF OMITTED\]'),
re.compile('\[\[\w+\s+\d+\]\]'),
re.compile('\[GRAPHIC\(S\) NOT AVAILABLE IN TIFF FORMAT\]')
]
for filename in list(find_files(chf, '*.txt')):
with codecs.open(filename, 'r', 'utf-8') as f:
content = f.read()
for cruft in fileCruft:
content = re.sub(cruft, " ", content)
with codecs.open(filename, 'w', 'utf-8') as f:
f.write(content)
print "Finished cleaning file ", filename
| 2,317
| 2,317
| 0.645231
| 313
| 2,317
| 4.731629
| 0.447284
| 0.018906
| 0.037812
| 0.018906
| 0.105334
| 0.105334
| 0.070223
| 0.070223
| 0.040513
| 0
| 0
| 0.015084
| 0.227449
| 2,317
| 1
| 2,317
| 2,317
| 0.812291
| 0.995252
| 0
| 0.081633
| 0
| 0
| 0.160777
| 0.045936
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.122449
| null | null | 0.142857
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
fefea9ffd9759541208b9b69f38aced2237f92e2
| 203
|
py
|
Python
|
PythonFiles/app.py
|
IamVaibhavsar/Python_Files
|
283d73929a3e11955c71499407c4f8bff56e4273
|
[
"MIT"
] | null | null | null |
PythonFiles/app.py
|
IamVaibhavsar/Python_Files
|
283d73929a3e11955c71499407c4f8bff56e4273
|
[
"MIT"
] | null | null | null |
PythonFiles/app.py
|
IamVaibhavsar/Python_Files
|
283d73929a3e11955c71499407c4f8bff56e4273
|
[
"MIT"
] | 1
|
2019-07-26T15:25:21.000Z
|
2019-07-26T15:25:21.000Z
|
import usefulFunctions
#importing the other file in a program
#all the functions, variables of this file can be used in this file
print(usefulFunctions.roll_dice())
print(usefulFunctions.friends)
| 29
| 68
| 0.793103
| 29
| 203
| 5.517241
| 0.724138
| 0.1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.152709
| 203
| 7
| 69
| 29
| 0.930233
| 0.507389
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0.666667
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 5
|
3a0e13183c8b1e4268fa8a67f8c73872ed8f2538
| 245
|
py
|
Python
|
custom_exceptions.py
|
maxweldsouza/ceme
|
a5bc65d8502f196a2e14cf0a0dce9c3c6efa4b3d
|
[
"MIT"
] | null | null | null |
custom_exceptions.py
|
maxweldsouza/ceme
|
a5bc65d8502f196a2e14cf0a0dce9c3c6efa4b3d
|
[
"MIT"
] | null | null | null |
custom_exceptions.py
|
maxweldsouza/ceme
|
a5bc65d8502f196a2e14cf0a0dce9c3c6efa4b3d
|
[
"MIT"
] | null | null | null |
class AlreadyExists(Exception):
pass
class NoRights(Exception):
pass
class EntryNotFound(Exception):
pass
class LoginFailed(Exception):
pass
# input validation
""" Input validation """
class InvalidInput(Exception):
pass
| 14.411765
| 31
| 0.722449
| 24
| 245
| 7.375
| 0.416667
| 0.367232
| 0.305085
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.187755
| 245
| 16
| 32
| 15.3125
| 0.889447
| 0.065306
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
3a1f7866fc87005ed243ac0c304a4393a6cec261
| 338
|
py
|
Python
|
src/plugins/getNow.py
|
Moyulingjiu/QQbot
|
cfdcee77de58215b4d05e59a1d8ea2462e091764
|
[
"Apache-2.0"
] | null | null | null |
src/plugins/getNow.py
|
Moyulingjiu/QQbot
|
cfdcee77de58215b4d05e59a1d8ea2462e091764
|
[
"Apache-2.0"
] | null | null | null |
src/plugins/getNow.py
|
Moyulingjiu/QQbot
|
cfdcee77de58215b4d05e59a1d8ea2462e091764
|
[
"Apache-2.0"
] | null | null | null |
import datetime
def toString():
curr_time = datetime.datetime.now()
time_str = datetime.datetime.strftime(curr_time, '%Y-%m-%d %H:%M:%S')
return time_str
def getHour():
curr_time = datetime.datetime.now()
return curr_time.hour
def getMinute():
curr_time = datetime.datetime.now()
return curr_time.minute
| 18.777778
| 73
| 0.686391
| 47
| 338
| 4.765957
| 0.404255
| 0.214286
| 0.214286
| 0.321429
| 0.486607
| 0.366071
| 0.366071
| 0.366071
| 0
| 0
| 0
| 0
| 0.183432
| 338
| 17
| 74
| 19.882353
| 0.811594
| 0
| 0
| 0.272727
| 0
| 0
| 0.050296
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.272727
| false
| 0
| 0.090909
| 0
| 0.636364
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.