hexsha
string
size
int64
ext
string
lang
string
max_stars_repo_path
string
max_stars_repo_name
string
max_stars_repo_head_hexsha
string
max_stars_repo_licenses
list
max_stars_count
int64
max_stars_repo_stars_event_min_datetime
string
max_stars_repo_stars_event_max_datetime
string
max_issues_repo_path
string
max_issues_repo_name
string
max_issues_repo_head_hexsha
string
max_issues_repo_licenses
list
max_issues_count
int64
max_issues_repo_issues_event_min_datetime
string
max_issues_repo_issues_event_max_datetime
string
max_forks_repo_path
string
max_forks_repo_name
string
max_forks_repo_head_hexsha
string
max_forks_repo_licenses
list
max_forks_count
int64
max_forks_repo_forks_event_min_datetime
string
max_forks_repo_forks_event_max_datetime
string
content
string
avg_line_length
float64
max_line_length
int64
alphanum_fraction
float64
qsc_code_num_words_quality_signal
int64
qsc_code_num_chars_quality_signal
float64
qsc_code_mean_word_length_quality_signal
float64
qsc_code_frac_words_unique_quality_signal
float64
qsc_code_frac_chars_top_2grams_quality_signal
float64
qsc_code_frac_chars_top_3grams_quality_signal
float64
qsc_code_frac_chars_top_4grams_quality_signal
float64
qsc_code_frac_chars_dupe_5grams_quality_signal
float64
qsc_code_frac_chars_dupe_6grams_quality_signal
float64
qsc_code_frac_chars_dupe_7grams_quality_signal
float64
qsc_code_frac_chars_dupe_8grams_quality_signal
float64
qsc_code_frac_chars_dupe_9grams_quality_signal
float64
qsc_code_frac_chars_dupe_10grams_quality_signal
float64
qsc_code_frac_chars_replacement_symbols_quality_signal
float64
qsc_code_frac_chars_digital_quality_signal
float64
qsc_code_frac_chars_whitespace_quality_signal
float64
qsc_code_size_file_byte_quality_signal
float64
qsc_code_num_lines_quality_signal
float64
qsc_code_num_chars_line_max_quality_signal
float64
qsc_code_num_chars_line_mean_quality_signal
float64
qsc_code_frac_chars_alphabet_quality_signal
float64
qsc_code_frac_chars_comments_quality_signal
float64
qsc_code_cate_xml_start_quality_signal
float64
qsc_code_frac_lines_dupe_lines_quality_signal
float64
qsc_code_cate_autogen_quality_signal
float64
qsc_code_frac_lines_long_string_quality_signal
float64
qsc_code_frac_chars_string_length_quality_signal
float64
qsc_code_frac_chars_long_word_length_quality_signal
float64
qsc_code_frac_lines_string_concat_quality_signal
float64
qsc_code_cate_encoded_data_quality_signal
float64
qsc_code_frac_chars_hex_words_quality_signal
float64
qsc_code_frac_lines_prompt_comments_quality_signal
float64
qsc_code_frac_lines_assert_quality_signal
float64
qsc_codepython_cate_ast_quality_signal
float64
qsc_codepython_frac_lines_func_ratio_quality_signal
float64
qsc_codepython_cate_var_zero_quality_signal
bool
qsc_codepython_frac_lines_pass_quality_signal
float64
qsc_codepython_frac_lines_import_quality_signal
float64
qsc_codepython_frac_lines_simplefunc_quality_signal
float64
qsc_codepython_score_lines_no_logic_quality_signal
float64
qsc_codepython_frac_lines_print_quality_signal
float64
qsc_code_num_words
int64
qsc_code_num_chars
int64
qsc_code_mean_word_length
int64
qsc_code_frac_words_unique
null
qsc_code_frac_chars_top_2grams
int64
qsc_code_frac_chars_top_3grams
int64
qsc_code_frac_chars_top_4grams
int64
qsc_code_frac_chars_dupe_5grams
int64
qsc_code_frac_chars_dupe_6grams
int64
qsc_code_frac_chars_dupe_7grams
int64
qsc_code_frac_chars_dupe_8grams
int64
qsc_code_frac_chars_dupe_9grams
int64
qsc_code_frac_chars_dupe_10grams
int64
qsc_code_frac_chars_replacement_symbols
int64
qsc_code_frac_chars_digital
int64
qsc_code_frac_chars_whitespace
int64
qsc_code_size_file_byte
int64
qsc_code_num_lines
int64
qsc_code_num_chars_line_max
int64
qsc_code_num_chars_line_mean
int64
qsc_code_frac_chars_alphabet
int64
qsc_code_frac_chars_comments
int64
qsc_code_cate_xml_start
int64
qsc_code_frac_lines_dupe_lines
int64
qsc_code_cate_autogen
int64
qsc_code_frac_lines_long_string
int64
qsc_code_frac_chars_string_length
int64
qsc_code_frac_chars_long_word_length
int64
qsc_code_frac_lines_string_concat
null
qsc_code_cate_encoded_data
int64
qsc_code_frac_chars_hex_words
int64
qsc_code_frac_lines_prompt_comments
int64
qsc_code_frac_lines_assert
int64
qsc_codepython_cate_ast
int64
qsc_codepython_frac_lines_func_ratio
int64
qsc_codepython_cate_var_zero
int64
qsc_codepython_frac_lines_pass
int64
qsc_codepython_frac_lines_import
int64
qsc_codepython_frac_lines_simplefunc
int64
qsc_codepython_score_lines_no_logic
int64
qsc_codepython_frac_lines_print
int64
effective
string
hits
int64
7a57c7ed60d1b6a1ebecaacc8e16b81a8a38baba
34
py
Python
app/__init__.py
alessap/apistar_alpine
4848cdcf1c0becee339c2ee8cbbebdad0536e9c0
[ "MIT" ]
null
null
null
app/__init__.py
alessap/apistar_alpine
4848cdcf1c0becee339c2ee8cbbebdad0536e9c0
[ "MIT" ]
1
2021-03-28T07:11:05.000Z
2021-03-28T07:11:05.000Z
app/__init__.py
alessap/apistar_alpine
4848cdcf1c0becee339c2ee8cbbebdad0536e9c0
[ "MIT" ]
null
null
null
from .app import app # noqa:F401
17
33
0.705882
6
34
4
0.833333
0
0
0
0
0
0
0
0
0
0
0.111111
0.205882
34
1
34
34
0.777778
0.264706
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
7a78998f26a56ffb975c5667776b91ef98ed8192
153
py
Python
knitty/api.py
phlummox-patches/knitty
d7a5256ba9dfa4cc146681cb202343937214c0f6
[ "MIT" ]
41
2018-06-29T17:52:14.000Z
2021-12-12T06:59:59.000Z
knitty/api.py
phlummox-patches/knitty
d7a5256ba9dfa4cc146681cb202343937214c0f6
[ "MIT" ]
26
2018-01-17T09:40:34.000Z
2022-03-22T09:44:45.000Z
knitty/api.py
phlummox-patches/knitty
d7a5256ba9dfa4cc146681cb202343937214c0f6
[ "MIT" ]
2
2021-09-20T16:20:50.000Z
2022-03-22T09:26:41.000Z
from .ast_filter import knitty_pandoc_filter, safe_spawn # noqa from .preprocess_filter import knitty_preprosess # noqa from .tools import KnittyError
38.25
64
0.836601
21
153
5.809524
0.619048
0.196721
0.295082
0
0
0
0
0
0
0
0
0
0.124183
153
3
65
51
0.910448
0.058824
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
7a9bda47acb009f76b698e2decfbc04d101063dc
108
py
Python
main/donate/__init__.py
Curiouspaul1/Ecolead
1c5008b9ad9a6611b76bf61539ae3af9da06afc5
[ "MIT" ]
null
null
null
main/donate/__init__.py
Curiouspaul1/Ecolead
1c5008b9ad9a6611b76bf61539ae3af9da06afc5
[ "MIT" ]
null
null
null
main/donate/__init__.py
Curiouspaul1/Ecolead
1c5008b9ad9a6611b76bf61539ae3af9da06afc5
[ "MIT" ]
null
null
null
from flask import Blueprint donor = Blueprint("donor", __name__) from . import views from . import errors
15.428571
36
0.759259
14
108
5.571429
0.571429
0.358974
0
0
0
0
0
0
0
0
0
0
0.166667
108
6
37
18
0.866667
0
0
0
0
0
0.046296
0
0
0
0
0
0
1
0
false
0
0.75
0
0.75
0.5
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
1
0
6
7aa0a971808f685a2d562e6b25ba83c9ae07ed02
91
py
Python
kingkong/__init__.py
adrn/StroemeFinden
cf907a50296c9a09d32c6a6c3e9d49a2885e8bee
[ "MIT" ]
1
2015-08-04T15:38:56.000Z
2015-08-04T15:38:56.000Z
kingkong/__init__.py
adrn/StroemeFinden
cf907a50296c9a09d32c6a6c3e9d49a2885e8bee
[ "MIT" ]
6
2015-08-12T14:05:09.000Z
2015-08-13T13:22:58.000Z
kingkong/__init__.py
adrn/KingKong
cf907a50296c9a09d32c6a6c3e9d49a2885e8bee
[ "MIT" ]
null
null
null
from .core import * from .mockdata import * from .coordinates import * from .util import *
18.2
26
0.736264
12
91
5.583333
0.5
0.447761
0
0
0
0
0
0
0
0
0
0
0.175824
91
4
27
22.75
0.893333
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
7aba77fe761f33e5d86506f0f80ae7a5f475fa31
713
py
Python
06_objects_in_motion/data/vertex_data.py
Mekire/gltut-pygame
352df52f5e1e64a9f50cc91cd1992751e14a140b
[ "MIT" ]
21
2016-03-24T10:07:06.000Z
2021-03-26T04:40:46.000Z
06_objects_in_motion/data/vertex_data.py
Mekire/gltut-pygame
352df52f5e1e64a9f50cc91cd1992751e14a140b
[ "MIT" ]
null
null
null
06_objects_in_motion/data/vertex_data.py
Mekire/gltut-pygame
352df52f5e1e64a9f50cc91cd1992751e14a140b
[ "MIT" ]
4
2018-11-04T14:35:39.000Z
2019-11-21T10:41:21.000Z
NUMBER_OF_VERTICES = 8 _COLOR = {"GREEN" : [0.0,1.0,0.0,1.0], "BLUE" : [0.0,0.0,1.0,1.0], "RED" : [1.0,0.0,0.0,1.0], "GREY" : [0.8,0.8,0.8,1.0], "BROWN" : [0.5,0.5,0.0,1.0]} _POSITIONS = [ 1.0, 1.0, 1.0, -1.0,-1.0, 1.0, -1.0, 1.0,-1.0, 1.0,-1.0,-1.0, -1.0,-1.0,-1.0, 1.0, 1.0,-1.0, 1.0,-1.0, 1.0, -1.0, 1.0, 1.0] _COLORS = (_COLOR["GREEN"]+_COLOR["BLUE"]+_COLOR["RED"]+_COLOR["BROWN"])*2 VERTICES = _POSITIONS+_COLORS INDICES = [0,1,2, 1,0,3, 2,3,0, 3,2,1, 5,4,6, 4,5,7, 7,6,4, 6,7,5]
23
74
0.349229
137
713
1.737226
0.153285
0.277311
0.365546
0.403361
0.281513
0.252101
0.201681
0.201681
0.201681
0.201681
0
0.265116
0.396914
713
31
75
23
0.288372
0
0
0
0
0
0.053221
0
0
0
0
0
0
1
0
false
0
0
0
0
0
0
0
1
null
1
1
1
0
0
0
0
0
0
0
1
0
0
0
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
8f884340f051ede74d4263c6ca8d2166dc00daaa
47
py
Python
app/oauth/__init__.py
fariszr/app
932134c2123714cf1d1b7090998fbdf27344cce0
[ "MIT" ]
5
2021-01-13T16:50:46.000Z
2021-11-29T04:01:46.000Z
app/oauth/__init__.py
fariszr/app
932134c2123714cf1d1b7090998fbdf27344cce0
[ "MIT" ]
1
2021-02-08T21:04:06.000Z
2021-02-08T21:04:06.000Z
app/oauth/__init__.py
fariszr/app
932134c2123714cf1d1b7090998fbdf27344cce0
[ "MIT" ]
4
2021-02-08T23:04:33.000Z
2022-01-05T12:02:34.000Z
from .views import authorize, token, user_info
23.5
46
0.808511
7
47
5.285714
1
0
0
0
0
0
0
0
0
0
0
0
0.12766
47
1
47
47
0.902439
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
8fc58f6812fa6483390e2642d770416f695c008e
34
py
Python
cride/circles/permissions/__init__.py
Audio10/Comparte-crite
e3500373ccb59a46aaad57b4549bcdcfd73cb19c
[ "MIT" ]
null
null
null
cride/circles/permissions/__init__.py
Audio10/Comparte-crite
e3500373ccb59a46aaad57b4549bcdcfd73cb19c
[ "MIT" ]
5
2021-04-08T21:35:51.000Z
2022-02-10T12:32:17.000Z
cride/circles/permissions/__init__.py
Audio10/Comparte-crite
e3500373ccb59a46aaad57b4549bcdcfd73cb19c
[ "MIT" ]
null
null
null
from .circles import IsCircleAdmin
34
34
0.882353
4
34
7.5
1
0
0
0
0
0
0
0
0
0
0
0
0.088235
34
1
34
34
0.967742
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
8904f09ae8d05ee82cf2b3de370afabcde1b6990
22
py
Python
Bindings/Python/__init__.py
chrisdembia/opensim-debian
50c255ce850aab252f26ac73b67bd2b78dc65cfe
[ "Apache-2.0" ]
null
null
null
Bindings/Python/__init__.py
chrisdembia/opensim-debian
50c255ce850aab252f26ac73b67bd2b78dc65cfe
[ "Apache-2.0" ]
null
null
null
Bindings/Python/__init__.py
chrisdembia/opensim-debian
50c255ce850aab252f26ac73b67bd2b78dc65cfe
[ "Apache-2.0" ]
null
null
null
from opensim import *
11
21
0.772727
3
22
5.666667
1
0
0
0
0
0
0
0
0
0
0
0
0.181818
22
1
22
22
0.944444
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
64f496d04a1aa6d4ca5fd0a960b638bd513fe0dd
30
py
Python
wizard/__init__.py
afadhitya/IdeasApp
1d85e9fcc46b271e9c79576038ff489cd54e3842
[ "MIT" ]
null
null
null
wizard/__init__.py
afadhitya/IdeasApp
1d85e9fcc46b271e9c79576038ff489cd54e3842
[ "MIT" ]
null
null
null
wizard/__init__.py
afadhitya/IdeasApp
1d85e9fcc46b271e9c79576038ff489cd54e3842
[ "MIT" ]
null
null
null
from . import wizard_give_vote
30
30
0.866667
5
30
4.8
1
0
0
0
0
0
0
0
0
0
0
0
0.1
30
1
30
30
0.888889
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
8f3897a2f436130cde7bbe0cfd215db6a66a34a5
180
py
Python
dbaas/system/views.py
didindinn/database-as-a-service
747de31ff8546f7874ddd654af860e130afd17a0
[ "BSD-3-Clause" ]
303
2015-01-08T10:35:54.000Z
2022-02-28T08:54:06.000Z
dbaas/system/views.py
nouraellm/database-as-a-service
5e655c9347bea991b7218a01549f5e44f161d7be
[ "BSD-3-Clause" ]
124
2015-01-14T12:56:15.000Z
2022-03-22T20:45:11.000Z
dbaas/system/views.py
nouraellm/database-as-a-service
5e655c9347bea991b7218a01549f5e44f161d7be
[ "BSD-3-Clause" ]
110
2015-01-02T11:59:48.000Z
2022-02-28T08:54:06.000Z
from django.http import HttpResponse from models import CeleryHealthCheck def CeleryHealthCheckView(request): return HttpResponse(CeleryHealthCheck.get_healthcheck_string())
25.714286
67
0.85
18
180
8.388889
0.777778
0
0
0
0
0
0
0
0
0
0
0
0.1
180
6
68
30
0.932099
0
0
0
0
0
0
0
0
0
0
0
0
1
0.25
false
0
0.5
0.25
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
1
1
1
0
0
6
56c55aa949132afdcc0c9fb2e04302eba6b77aae
20
py
Python
evi/plotting/__init__.py
jranek/EVI
7a4ec37dc847d02268241b464b296f00826c327d
[ "MIT" ]
40
2018-04-23T02:03:40.000Z
2022-02-09T14:41:03.000Z
trunk/python/gtsam_utils/__init__.py
shaolinbit/PPP-BayesTree
6f469775277a1a33447bf4c19603c796c2c63c75
[ "MIT" ]
33
2020-08-05T23:00:56.000Z
2022-03-21T22:37:03.000Z
trunk/python/gtsam_utils/__init__.py
shaolinbit/PPP-BayesTree
6f469775277a1a33447bf4c19603c796c2c63c75
[ "MIT" ]
16
2018-05-18T05:59:00.000Z
2022-03-07T13:51:18.000Z
from .plot import *
10
19
0.7
3
20
4.666667
1
0
0
0
0
0
0
0
0
0
0
0
0.2
20
1
20
20
0.875
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
56de1acd0aa3d2632be3c5313760b68aa537535b
698
py
Python
src/back/tests/test_users.py
khamidou/kite
c049faf8522c8346c22c70f2a35a35db6b4a155d
[ "BSD-3-Clause" ]
136
2015-01-06T01:14:35.000Z
2022-01-20T17:04:52.000Z
src/back/tests/test_users.py
khamidou/kite
c049faf8522c8346c22c70f2a35a35db6b4a155d
[ "BSD-3-Clause" ]
3
2016-01-14T21:37:10.000Z
2019-04-17T02:44:08.000Z
src/back/tests/test_users.py
khamidou/kite
c049faf8522c8346c22c70f2a35a35db6b4a155d
[ "BSD-3-Clause" ]
38
2015-02-28T14:12:26.000Z
2021-01-17T21:01:02.000Z
import unittest import kite.users as users class TestUsers(unittest.TestCase): def test_path_cleanups(self): s = users.get_threads_index_folder("/home/kite/Maildirs/testuser/new/1234563.mail") self.assertEqual(s, "/home/kite/Maildirs/testuser") s = users.get_threads_index_folder("/home/kite/Maildirs/example.com/testuser/new/1234563.mail") self.assertEqual(s, "/home/kite/Maildirs/example.com/testuser") s = users.get_username_from_folder("/home/kite/Maildirs/testuser/new/1234563.mail") self.assertEqual(s, "testuser") s = users.get_username_from_folder("/home/kite/Maildirs/testuser/new/") self.assertEqual(s, "testuser")
38.777778
103
0.716332
92
698
5.282609
0.315217
0.098765
0.197531
0.18107
0.751029
0.751029
0.709877
0.709877
0.709877
0.578189
0
0.035294
0.147564
698
17
104
41.058824
0.781513
0
0
0.166667
0
0
0.378766
0.355811
0
0
0
0
0.333333
1
0.083333
false
0
0.166667
0
0.333333
0
0
0
0
null
0
1
1
0
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
713151a7d69011ada1b55f3af80fa3067c9488b9
4,489
py
Python
applygpy/tests/test_plotting.py
kant/applygpy
11dffe8d26d31b62b85ba306d4fee25ab8d32d23
[ "BSD-3-Clause" ]
null
null
null
applygpy/tests/test_plotting.py
kant/applygpy
11dffe8d26d31b62b85ba306d4fee25ab8d32d23
[ "BSD-3-Clause" ]
null
null
null
applygpy/tests/test_plotting.py
kant/applygpy
11dffe8d26d31b62b85ba306d4fee25ab8d32d23
[ "BSD-3-Clause" ]
1
2020-03-01T15:54:49.000Z
2020-03-01T15:54:49.000Z
''' Created on 30 Sep 2015 @author: Max Zwiessele ''' import matplotlib from GPy.testing.plotting_tests import flatten_axis as fl, compare_axis_dicts as cm matplotlib.use('agg') import matplotlib.pyplot as plt # @UnresolvedImport import GPy, numpy as np from applygpy.prediction import PredictionModelSparse, PredictionModel from io import StringIO import unittest class Test(unittest.TestCase): def setUp(self): self.X, self.Y = np.random.normal(0, 1, (10, 1)), np.random.normal(0, 1, (10, 1)) pass def tearDown(self): plt.close('all') def testPlotting(self): m = GPy.models.GPRegression(self.X, self.Y) p = PredictionModel(m) fig, ax1 = plt.subplots() m.plot(plot_training_data=False, ax=ax1) ax1.set_ylim(0, 1) ax1.set_xlim(-2, 2) #i1 = StringIO() #fig.savefig(i1, format='svg') #i1.seek(0) fig, ax2 = plt.subplots() p.plot(plot_training_data=False, ax=ax2) ax2.set_ylim(0, 1) ax2.set_xlim(-2, 2) #i2 = StringIO() #fig.savefig(i2, format='svg') #i2.seek(0) #self.assertEqual(i1.read(), i2.read()) cm(fl(ax1), fl(ax2)) def testPlottingSparse(self): m = GPy.models.SparseGPRegression(self.X, self.Y) p = PredictionModelSparse(m) fig, ax1 = plt.subplots() m.plot(plot_training_data=False, ax=ax1) ax1.set_ylim(0, 1) ax1.set_xlim(-2, 2) #i1 = StringIO() #fig.savefig(i1, format='svg') #i1.seek(0) fig, ax2 = plt.subplots() p.plot(plot_training_data=False, ax=ax2) ax2.set_ylim(0, 1) ax2.set_xlim(-2, 2) #i2 = StringIO() #fig.savefig(i2, format='svg') #i2.seek(0) #self.assertEqual(i1.read(), i2.read()) cm(fl(ax1), fl(ax2)) def testPlottingClass(self): m = GPy.models.GPClassification(self.X, self.Y<0) p = PredictionModel(m) fig, ax1 = plt.subplots() m.plot(plot_training_data=False, ax=ax1) ax1.set_ylim(0, 1) ax1.set_xlim(-2, 2) #i1 = StringIO() #fig.savefig(i1, format='svg') #i1.seek(0) fig, ax2 = plt.subplots() p.plot(plot_training_data=False, ax=ax2) ax2.set_ylim(0, 1) ax2.set_xlim(-2, 2) #i2 = StringIO() #fig.savefig(i2, format='svg') #i2.seek(0) #self.assertEqual(i1.read(), i2.read()) cm(fl(ax1), fl(ax2)) def testPlottingSparseClass(self): m = GPy.models.SparseGPClassification(self.X, self.Y<0) p = PredictionModelSparse(m) fig, ax1 = plt.subplots() m.plot(plot_training_data=False, ax=ax1) ax1.set_ylim(0, 1) ax1.set_xlim(-2, 2) #i1 = StringIO() #fig.savefig(i1, format='svg') #i1.seek(0) fig, ax2 = plt.subplots() p.plot(plot_training_data=False, ax=ax2) ax2.set_ylim(0, 1) ax2.set_xlim(-2, 2) #i2 = StringIO() #fig.savefig(i2, format='svg') #i2.seek(0) #self.assertEqual(i1.read(), i2.read()) cm(fl(ax1), fl(ax2)) def testPlottingDataNotShow(self): m = GPy.models.SparseGPRegression(self.X, self.Y) p = PredictionModelSparse(m) p.plot_data() fig, ax1 = plt.subplots() p.plot(plot_training_data=False, ax=ax1) ax1.set_ylim(0, 1) ax1.set_xlim(-2, 2) #i1 = StringIO() #fig.savefig(i1, format='svg') #i1.seek(0) fig, ax2 = plt.subplots() p.plot(plot_training_data=True, ax=ax2) ax2.set_ylim(0, 1) ax2.set_xlim(-2, 2) #i2 = StringIO() #fig.savefig(i2, format='svg') #i2.seek(0) cm(fl(ax1), fl(ax2)) m = GPy.models.GPRegression(self.X, self.Y) p = PredictionModel(m) p.plot_data() fig, ax1 = plt.subplots() p.plot(plot_training_data=False, ax=ax1) ax1.set_ylim(0, 1) ax1.set_xlim(-2, 2) #i1 = StringIO() #fig.savefig(i1, format='svg') #i1.seek(0) fig, ax2 = plt.subplots() p.plot(plot_training_data=True, ax=ax2) ax2.set_ylim(0, 1) ax2.set_xlim(-2, 2) #i2 = StringIO() #fig.savefig(i2, format='svg') #i2.seek(0) cm(fl(ax1), fl(ax2)) if __name__ == "__main__": #import sys;sys.argv = ['', 'Test.testPlotting'] unittest.main()
27.539877
89
0.559145
621
4,489
3.942029
0.148148
0.011438
0.078431
0.098039
0.75817
0.75817
0.749183
0.73366
0.73366
0.73366
0
0.056928
0.287815
4,489
162
90
27.709877
0.708789
0.203163
0
0.741573
0
0
0.003968
0
0
0
0
0
0
1
0.078652
false
0.011236
0.078652
0
0.168539
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
854e3045cbef08da50b69b6f63ad90f75ebc0fb5
10,987
py
Python
p8_test/test_local/test_eta5_execution/test_companion.py
crazynayan/tpf1
c81a15d88d4d1f3ed2cf043c90782a4b8509ef14
[ "MIT" ]
1
2020-01-27T10:10:40.000Z
2020-01-27T10:10:40.000Z
p8_test/test_local/test_eta5_execution/test_companion.py
crazynayan/tpf1
c81a15d88d4d1f3ed2cf043c90782a4b8509ef14
[ "MIT" ]
4
2019-08-23T05:24:23.000Z
2021-09-16T10:05:55.000Z
p8_test/test_local/test_eta5_execution/test_companion.py
crazynayan/tpf1
c81a15d88d4d1f3ed2cf043c90782a4b8509ef14
[ "MIT" ]
null
null
null
from base64 import b64encode from p1_utils.data_type import DataType from p8_test.test_local.test_eta5_execution import NameGeneral, hfax_2812_gld, fqtv_gld, itin_2811_2812, tr1gaa class Companion(NameGeneral): def setUp(self) -> None: super().setUp() self.test_data.add_pnr_element(["1ZAVERI"], "name") self.test_data.add_tpfdf(tr1gaa, "40", "TR1GAA") self.test_data.set_field("WA0ET6", bytes([self.wa0hfx])) self.fqtv_exp_key = [ { "PR00_60_FQT_CXR": b64encode(DataType("C", input="AA").to_bytes()).decode(), "PR00_60_FQT_FTN": b64encode(DataType("C", input="NKE9088").to_bytes()).decode(), "PR00_60_FQT_TYP": b64encode(DataType("X", input="60").to_bytes()).decode(), # EXP and # KEY }, ] def test_fqtv_itin_match_award_not_exp_key_ETK2(self) -> None: self.test_data.add_pnr_element(hfax_2812_gld, "hfax") self.test_data.add_pnr_field_data(fqtv_gld, "fqtv", "DGHWCL") self.test_data.add_pnr_field_data(itin_2811_2812, "itin", "DGHWCL") test_data = self.tpf_server.run("ETA5", self.test_data) self.output = test_data.output self.assertEqual(self.ETK2_END, self.output.last_line, self.output.last_node) self.assertEqual("E6D7F8F9", test_data.get_field("EBX000")) self.assertEqual("60", test_data.get_field("EBRS01")) self.assertEqual(116, self.output.regs["R6"]) def test_fqtv_no_match_award_not_exp_key_ETK2(self) -> None: self.test_data.add_pnr_element(hfax_2812_gld, "hfax") self.test_data.add_pnr_field_data(self.fqtv_exp_key, "fqtv", "DGHWCL") self.test_data.add_pnr_field_data(itin_2811_2812, "itin", "DGHWCL") test_data = self.tpf_server.run("ETA5", self.test_data) self.output = test_data.output self.assertEqual(self.ETK2_END, self.output.last_line) self.assertNotEqual("E6D7F8F9", test_data.get_field("EBX000")) self.assertEqual("60", test_data.get_field("EBRS01")) self.assertEqual(116, self.output.regs["R6"]) def test_itin_no_match_award_not_exp_key_ETK2(self) -> None: self.test_data.add_pnr_element(hfax_2812_gld, "hfax") self.test_data.add_pnr_field_data(fqtv_gld, "fqtv", "DGHWCL") test_data = self.tpf_server.run("ETA5", self.test_data) self.output = test_data.output self.assertEqual(self.ETK2_END, self.output.last_line) self.assertNotEqual("E6D7F8F9", test_data.get_field("EBX000")) self.assertEqual("60", test_data.get_field("EBRS01")) self.assertEqual(116, self.output.regs["R6"]) def test_date_error_ETK2(self) -> None: hfax_2812_gld_date_error = ["SSRFQTUAA2811Y32OCTDFW ORD 0510GLD*DGHWCL RR "] self.test_data.add_pnr_element(hfax_2812_gld_date_error, "hfax") self.test_data.add_pnr_field_data(fqtv_gld, "fqtv", "DGHWCL") test_data = self.tpf_server.run("ETA5", self.test_data) self.output = test_data.output self.assertEqual(self.ETK2_END, self.output.last_line) self.assertNotEqual("E6D7F8F9", test_data.get_field("EBX000")) self.assertEqual("60", test_data.get_field("EBRS01")) self.assertEqual(116, self.output.regs["R6"]) def test_fqtv_itin_match_no_award_exp_ETAW(self) -> None: hfax_2811_exp = ["SSRFQTUAA2811Y20OCTDFW ORD 0510EXP*DGHWCL RR "] self.test_data.add_pnr_element(hfax_2811_exp, "hfax") self.test_data.add_pnr_field_data(self.fqtv_exp_key, "fqtv", "DGHWCL") self.test_data.add_pnr_field_data(itin_2811_2812, "itin", "DGHWCL") test_data = self.tpf_server.run("ETA5", self.test_data) self.output = test_data.output self.assertEqual(self.IGR1_END, self.output.last_line) self.assertEqual("E6D7F8F9", test_data.get_field("EBX000")) self.assertEqual("01", test_data.get_field("WA0PTY")) def test_fqtv_itin_match_award_exp_ETAW(self) -> None: hfax_2812_exp = ["SSRFQTUAA2812Y20OCTDFW ORD 0510EXP*DGHWCL RR "] self.test_data.add_pnr_element(hfax_2812_exp, "hfax") self.test_data.add_pnr_field_data(self.fqtv_exp_key, "fqtv", "DGHWCL") self.test_data.add_pnr_field_data(itin_2811_2812, "itin", "DGHWCL") test_data = self.tpf_server.run("ETA5", self.test_data) self.output = test_data.output self.assertEqual(self.IGR1_END, self.output.last_line) self.assertEqual("E6D7F8F9", test_data.get_field("EBX000")) self.assertEqual("01", test_data.get_field("WA0PTY")) def test_fqtv_itin_match_award_key_ETAW(self) -> None: hfax_2812_key = ["SSRFQTUAA2812Y20OCTDFW ORD 0510KEY*DGHWCL RR "] self.test_data.add_pnr_element(hfax_2812_key, "hfax") self.test_data.add_pnr_field_data(self.fqtv_exp_key, "fqtv", "DGHWCL") self.test_data.add_pnr_field_data(itin_2811_2812, "itin", "DGHWCL") test_data = self.tpf_server.run("ETA5", self.test_data) self.output = test_data.output self.assertEqual(self.IGR1_END, self.output.last_line) self.assertEqual("E6D7F8F9", test_data.get_field("EBX000")) self.assertEqual("01", test_data.get_field("WA0PTY")) def test_no_tr1gaa_ETAW(self) -> None: self.test_data.add_pnr_element(hfax_2812_gld, "hfax") self.test_data.add_pnr_field_data(fqtv_gld, "fqtv", "DGHWCL") self.test_data.add_pnr_field_data(itin_2811_2812, "itin", "DGHWCL") self.test_data.tpfdf = list() test_data = self.tpf_server.run("ETA5", self.test_data) self.output = test_data.output self.assertEqual(self.IGR1_END, self.output.last_line) self.assertNotEqual("E6D7F8F9", test_data.get_field("EBX000")) self.assertEqual("01", test_data.get_field("WA0PTY")) def test_tr1gaa_error_ETAW(self) -> None: self.test_data.add_pnr_element(hfax_2812_gld, "hfax") self.test_data.errors.append("ETA92100.1") test_data = self.tpf_server.run("ETA5", self.test_data) self.output = test_data.output self.assertEqual(self.IGR1_END, self.output.last_line) self.assertNotEqual("E6D7F8F9", test_data.get_field("EBX000")) self.assertEqual("01", test_data.get_field("WA0PTY")) def test_dbifb_error_ETAW(self) -> None: self.test_data.add_pnr_element(hfax_2812_gld, "hfax") self.test_data.errors.append("ETA92300.1") test_data = self.tpf_server.run("ETA5", self.test_data) self.output = test_data.output self.assertEqual(self.IGR1_END, self.output.last_line) self.assertNotEqual("E6D7F8F9", test_data.get_field("EBX000")) self.assertEqual("01", test_data.get_field("WA0PTY")) def test_pnrcc_error_ETAW(self) -> None: self.test_data.add_pnr_element(hfax_2812_gld, "hfax") self.test_data.errors.append("ETA92300.10") test_data = self.tpf_server.run("ETA5", self.test_data) self.output = test_data.output self.assertEqual(self.IGR1_END, self.output.last_line) self.assertNotEqual("E6D7F8F9", test_data.get_field("EBX000")) self.assertEqual("01", test_data.get_field("WA0PTY")) def test_prp1_error_ETAW(self) -> None: self.test_data.add_pnr_element(hfax_2812_gld, "hfax") self.test_data.errors.append("PRP1ERR") test_data = self.tpf_server.run("ETA5", self.test_data) self.output = test_data.output self.assertEqual(self.IGR1_END, self.output.last_line) self.assertNotEqual("E6D7F8F9", test_data.get_field("EBX000")) self.assertEqual("01", test_data.get_field("WA0PTY")) def test_eta9pdwk_allocate_error_ETAW(self) -> None: self.test_data.add_pnr_element(hfax_2812_gld, "hfax") self.test_data.errors.append("ETA92300.27") test_data = self.tpf_server.run("ETA5", self.test_data) self.output = test_data.output self.assertEqual(self.IGR1_END, self.output.last_line) self.assertNotEqual("E6D7F8F9", test_data.get_field("EBX000")) self.assertEqual("01", test_data.get_field("WA0PTY")) def test_fqtv_error_ETAW(self) -> None: self.test_data.add_pnr_element(hfax_2812_gld, "hfax") self.test_data.errors.append("ETA92400.1") test_data = self.tpf_server.run("ETA5", self.test_data) self.output = test_data.output self.assertEqual(self.IGR1_END, self.output.last_line) self.assertNotEqual("E6D7F8F9", test_data.get_field("EBX000")) self.assertEqual("01", test_data.get_field("WA0PTY")) def test_itin_error_ETAW(self) -> None: self.test_data.add_pnr_element(hfax_2812_gld, "hfax") self.test_data.add_pnr_field_data(fqtv_gld, "fqtv", "DGHWCL") self.test_data.errors.append("ETA92500.1") test_data = self.tpf_server.run("ETA5", self.test_data) self.output = test_data.output self.assertEqual(self.IGR1_END, self.output.last_line) self.assertNotEqual("E6D7F8F9", test_data.get_field("EBX000")) self.assertEqual("01", test_data.get_field("WA0PTY")) def test_chkaward_allocate_error_ETAW(self) -> None: self.test_data.add_pnr_element(hfax_2812_gld, "hfax") self.test_data.add_pnr_field_data(fqtv_gld, "fqtv", "DGHWCL") self.test_data.add_pnr_field_data(itin_2811_2812, "itin", "DGHWCL") self.test_data.errors.append("ETA92500.11") test_data = self.tpf_server.run("ETA5", self.test_data) self.output = test_data.output self.assertEqual(self.IGR1_END, self.output.last_line, self.output.last_node) self.assertNotEqual("E6D7F8F9", test_data.get_field("EBX000")) self.assertEqual("01", test_data.get_field("WA0PTY")) def test_chkaward_loadadd_error_ETAW(self) -> None: self.test_data.add_pnr_element(hfax_2812_gld, "hfax") self.test_data.add_pnr_field_data(fqtv_gld, "fqtv", "DGHWCL") self.test_data.add_pnr_field_data(itin_2811_2812, "itin", "DGHWCL") self.test_data.errors.append("ETA92500.24") test_data = self.tpf_server.run("ETA5", self.test_data) self.output = test_data.output self.assertEqual(self.IGR1_END, self.output.last_line) self.assertEqual("E6D7F8F9", test_data.get_field("EBX000")) self.assertEqual("01", test_data.get_field("WA0PTY")) def test_fqtv_itin_match_award_error_ETAW(self) -> None: self.test_data.add_pnr_element(hfax_2812_gld, "hfax") self.test_data.add_pnr_field_data(fqtv_gld, "fqtv", "DGHWCL") self.test_data.add_pnr_field_data(itin_2811_2812, "itin", "DGHWCL") self.test_data.errors.append("WP89ERR") test_data = self.tpf_server.run("ETA5", self.test_data) self.output = test_data.output self.assertEqual(self.IGR1_END, self.output.last_line) self.assertEqual("E6D7F8F9", test_data.get_field("EBX000")) self.assertEqual("01", test_data.get_field("WA0PTY"))
53.595122
111
0.693092
1,557
10,987
4.550417
0.07386
0.161468
0.120254
0.086803
0.905434
0.896683
0.878758
0.875935
0.875935
0.875512
0
0.06083
0.175571
10,987
204
112
53.857843
0.721351
0.001092
0
0.697802
0
0
0.103901
0.00802
0
0
0
0
0.318681
1
0.104396
false
0
0.016484
0
0.126374
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
85516929ae1ee9a6943fa0bece553a5694cc56f7
114
py
Python
ambra_sdk/service/entrypoints/order.py
dyens/sdk-python
24bf05268af2832c70120b84fd53bf44862cffec
[ "Apache-2.0" ]
null
null
null
ambra_sdk/service/entrypoints/order.py
dyens/sdk-python
24bf05268af2832c70120b84fd53bf44862cffec
[ "Apache-2.0" ]
null
null
null
ambra_sdk/service/entrypoints/order.py
dyens/sdk-python
24bf05268af2832c70120b84fd53bf44862cffec
[ "Apache-2.0" ]
null
null
null
from ambra_sdk.service.entrypoints.generated.order import Order as GOrder class Order(GOrder): """Order."""
19
73
0.745614
15
114
5.6
0.733333
0
0
0
0
0
0
0
0
0
0
0
0.131579
114
5
74
22.8
0.848485
0.052632
0
0
1
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
85af949785e88e7e2d5d133930dcde89f5847202
141
py
Python
crosscheck/__main__.py
TinyTheBrontosaurus/nhl-ai
0209388919ef8795c8f3eff14675b3b4df3adb31
[ "MIT" ]
4
2022-03-25T18:02:43.000Z
2022-03-25T18:31:08.000Z
crosscheck/__main__.py
TinyTheBrontosaurus/nhl-ai
0209388919ef8795c8f3eff14675b3b4df3adb31
[ "MIT" ]
5
2020-02-09T21:39:33.000Z
2022-03-11T23:48:47.000Z
crosscheck/__main__.py
TinyTheBrontosaurus/nhl-ai
0209388919ef8795c8f3eff14675b3b4df3adb31
[ "MIT" ]
null
null
null
import sys import crosscheck.main_train # Sort out relative imports if __name__ == "__main__": crosscheck.main_train.main(sys.argv[1:])
20.142857
44
0.758865
20
141
4.85
0.65
0.28866
0.391753
0
0
0
0
0
0
0
0
0.008197
0.134752
141
6
45
23.5
0.786885
0.177305
0
0
0
0
0.070175
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
6
a48f743cccce769cc8388e6b6b97148cbcedd03e
208
py
Python
crm/admin.py
baniasbaabe/happy-qr
bf44ac19306ea6405cc7c9a100e6f83afca125b4
[ "MIT" ]
1
2021-01-23T21:42:10.000Z
2021-01-23T21:42:10.000Z
crm/admin.py
baniasbaabe/happy-qr
bf44ac19306ea6405cc7c9a100e6f83afca125b4
[ "MIT" ]
null
null
null
crm/admin.py
baniasbaabe/happy-qr
bf44ac19306ea6405cc7c9a100e6f83afca125b4
[ "MIT" ]
null
null
null
from django.contrib import admin # Register your models here. from crm.models import * admin.site.register(Mitarbeiter) admin.site.register(Kunde) admin.site.register(Auftrag) admin.site.register(Rechnung)
20.8
32
0.807692
29
208
5.793103
0.517241
0.214286
0.404762
0
0
0
0
0
0
0
0
0
0.091346
208
9
33
23.111111
0.888889
0.125
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.333333
0
0.333333
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
6
f100d0c4fadeb7cb6dba7109d6c79de516bd18b1
157
py
Python
utils/__init__.py
Johnson-yue/lffont
f31f5a1cd6a075449a0f18aaafd945d373121e15
[ "MIT" ]
98
2020-09-24T01:05:19.000Z
2022-03-04T16:13:42.000Z
utils/__init__.py
Johnson-yue/lffont
f31f5a1cd6a075449a0f18aaafd945d373121e15
[ "MIT" ]
26
2020-09-24T07:36:37.000Z
2022-02-08T12:36:49.000Z
utils/__init__.py
Johnson-yue/lffont
f31f5a1cd6a075449a0f18aaafd945d373121e15
[ "MIT" ]
20
2020-09-24T02:29:42.000Z
2022-01-23T15:35:28.000Z
""" LF-Font Copyright (c) 2020-present NAVER Corp. MIT license """ from .utils import * from .visualize import * from .writer import * from .logger import *
15.7
38
0.713376
22
157
5.090909
0.727273
0.267857
0
0
0
0
0
0
0
0
0
0.030534
0.165605
157
9
39
17.444444
0.824427
0.369427
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
f1144d3759ece7570075f852d34e78c77630899b
1,067
py
Python
pyenv/lib/python3.6/_bootlocale.py
ronald-rgr/ai-chatbot-smartguide
c9c830feb6b66c2e362f8fb5d147ef0c4f4a08cf
[ "Apache-2.0" ]
null
null
null
pyenv/lib/python3.6/_bootlocale.py
ronald-rgr/ai-chatbot-smartguide
c9c830feb6b66c2e362f8fb5d147ef0c4f4a08cf
[ "Apache-2.0" ]
3
2020-03-23T18:01:51.000Z
2021-03-19T23:15:15.000Z
pyenv/lib/python3.6/_bootlocale.py
ronald-rgr/ai-chatbot-smartguide
c9c830feb6b66c2e362f8fb5d147ef0c4f4a08cf
[ "Apache-2.0" ]
null
null
null
XSym 0078 8f578bce46b7c425a599b77ac92178fd /Library/Frameworks/Python.framework/Versions/3.6/lib/python3.6/_bootlocale.py
213.4
945
0.099344
15
1,067
7
0.933333
0
0
0
0
0
0
0
0
0
0
0.237288
0.88941
1,067
5
945
213.4
0.652542
0
0
0
0
0
0
0
0
0
0
0
0
0
null
null
0
0
null
null
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
1
1
0
1
0
1
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
6
f11ce820f3087740f7751e1d65a57a6850f074ea
71
py
Python
Python/CodingBat/make_pi.py
dvt32/cpp-journey
afd7db7a1ad106c41601fb09e963902187ae36e6
[ "MIT" ]
1
2018-05-24T11:30:05.000Z
2018-05-24T11:30:05.000Z
Python/CodingBat/make_pi.py
dvt32/cpp-journey
afd7db7a1ad106c41601fb09e963902187ae36e6
[ "MIT" ]
null
null
null
Python/CodingBat/make_pi.py
dvt32/cpp-journey
afd7db7a1ad106c41601fb09e963902187ae36e6
[ "MIT" ]
2
2017-08-11T06:53:30.000Z
2017-08-29T12:07:52.000Z
# http://codingbat.com/prob/p113659 def make_pi(): return [3, 1, 4]
14.2
35
0.647887
12
71
3.75
1
0
0
0
0
0
0
0
0
0
0
0.15
0.15493
71
4
36
17.75
0.6
0.464789
0
0
0
0
0
0
0
0
0
0
0
1
0.5
true
0
0
0.5
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
0
0
1
1
0
0
6
f11ce9c0f8865ea4ed913621172075ad20968b0c
264
py
Python
fds/galaxy_fds_client_exception.py
XiaoMi/galaxy-fds-sdk-python
fd6f0203d879effc9db853b2691f0fbb5a46639b
[ "Apache-2.0" ]
42
2015-07-23T07:02:06.000Z
2022-03-30T09:08:30.000Z
fds/galaxy_fds_client_exception.py
XiaoMi/galaxy-fds-sdk-python
fd6f0203d879effc9db853b2691f0fbb5a46639b
[ "Apache-2.0" ]
3
2017-02-08T09:36:37.000Z
2020-08-03T02:04:27.000Z
fds/galaxy_fds_client_exception.py
XiaoMi/galaxy-fds-sdk-python
fd6f0203d879effc9db853b2691f0fbb5a46639b
[ "Apache-2.0" ]
24
2016-04-01T12:30:23.000Z
2022-03-29T02:04:35.000Z
class Error(Exception): pass class GalaxyFDSClientException(Error): def __init__(self, message): self.message = message def __str__(self): if self.message: return super.__str__(self) + ": " + str(self.message) return super.__str__(self)
20.307692
59
0.689394
31
264
5.354839
0.419355
0.26506
0.204819
0.26506
0.349398
0.349398
0
0
0
0
0
0
0.193182
264
12
60
22
0.779343
0
0
0
0
0
0.007576
0
0
0
0
0
0
1
0.222222
false
0.111111
0
0
0.666667
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
1
0
0
6
f1523cfec8010b3f2f3106cfdcde1e212ded6321
169
py
Python
pydm/__init__.py
klauer/pydm
e26aad58a7a0eb6f7321c61aa1dace646ff652bd
[ "BSD-3-Clause-LBNL" ]
null
null
null
pydm/__init__.py
klauer/pydm
e26aad58a7a0eb6f7321c61aa1dace646ff652bd
[ "BSD-3-Clause-LBNL" ]
null
null
null
pydm/__init__.py
klauer/pydm
e26aad58a7a0eb6f7321c61aa1dace646ff652bd
[ "BSD-3-Clause-LBNL" ]
null
null
null
from .application import PyDMApplication from .display_module import Display from ._version import get_versions __version__ = get_versions()['version'] del get_versions
28.166667
40
0.840237
21
169
6.333333
0.47619
0.24812
0.270677
0
0
0
0
0
0
0
0
0
0.100592
169
5
41
33.8
0.875
0
0
0
0
0
0.04142
0
0
0
0
0
0
1
0
false
0
0.6
0
0.6
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
6
f16d7c3c4abfe2a804e295bc8cdc0e175c2e4b80
154
py
Python
nonebot_plugin_bam/database/tables/__init__.py
7sDream/nonebot_plugin_bam
9d19856661a75484440efff8d77094390230f4c9
[ "MIT" ]
4
2021-02-08T16:18:12.000Z
2021-12-28T07:13:51.000Z
nonebot_plugin_bam/database/tables/__init__.py
7sDream/nonebot_plugin_bam
9d19856661a75484440efff8d77094390230f4c9
[ "MIT" ]
null
null
null
nonebot_plugin_bam/database/tables/__init__.py
7sDream/nonebot_plugin_bam
9d19856661a75484440efff8d77094390230f4c9
[ "MIT" ]
null
null
null
from .group import Group from .bilibili_user import BilibiliUser from .bilibili_user_status import BilibiliUserStatus from .follow_link import FollowLink
30.8
52
0.87013
20
154
6.5
0.55
0.184615
0.246154
0
0
0
0
0
0
0
0
0
0.103896
154
4
53
38.5
0.942029
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
f18b6ff773ae02cd6832c359726b64fe98a2e03e
27
py
Python
src/euler_python_package/euler_python/medium/p399.py
wilsonify/euler
5214b776175e6d76a7c6d8915d0e062d189d9b79
[ "MIT" ]
null
null
null
src/euler_python_package/euler_python/medium/p399.py
wilsonify/euler
5214b776175e6d76a7c6d8915d0e062d189d9b79
[ "MIT" ]
null
null
null
src/euler_python_package/euler_python/medium/p399.py
wilsonify/euler
5214b776175e6d76a7c6d8915d0e062d189d9b79
[ "MIT" ]
null
null
null
def problem399(): pass
9
17
0.62963
3
27
5.666667
1
0
0
0
0
0
0
0
0
0
0
0.15
0.259259
27
2
18
13.5
0.7
0
0
0
0
0
0
0
0
0
0
0
0
1
0.5
true
0.5
0
0
0.5
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
1
0
0
0
0
0
6
2d1c94a1d48ae7ad0ec8af271a9a8d7c4142ae0c
10,904
py
Python
commands.py
xingyousong/ufom
05684abbaa25a668d9da59c73953f64351717328
[ "MIT" ]
4
2021-07-08T01:33:26.000Z
2021-12-14T06:49:18.000Z
commands.py
xingyousong/ufom
05684abbaa25a668d9da59c73953f64351717328
[ "MIT" ]
null
null
null
commands.py
xingyousong/ufom
05684abbaa25a668d9da59c73953f64351717328
[ "MIT" ]
1
2021-07-07T20:00:19.000Z
2021-07-07T20:00:19.000Z
all_h_params = [] # Omniglot FOML for seed in [0, 1, 2]: for n_classes in [40]: for inner_iters in [10]: for prob in [0.0]: h_params = { "config.dataset": "omniglot", "config.seed": seed, "config.shots": 1, "config.classes": n_classes, "config.inner_batch": n_classes, "config.inner_iters": inner_iters, "config.meta_step": 0.1, "config.meta_batch": 5, "config.meta_iters": 200000, "config.eval_batch": n_classes, "config.eval_iters": inner_iters, "config.learning_rate": 0.005, "config.train_shots": None, "config.meta_step_final": 0, "config.checkpoint": "ckpt_o1{0}_FOML_prob={1}_ii={2}_seed={3}".format(n_classes, prob, inner_iters, seed), "config.mode": 'FOML', "config.exact_prob": prob, "config.clip_grads": (inner_iters == 10), "config.clip_grad_value": 0.1, "config.on_resampling": False } all_h_params.append(h_params) ''' # CIFAR100 FOML for seed in [0]: for n_classes in [20, 30]: #[5, 10, 15]: for inner_iters in [10]: for prob in [0.0, -1.0, 1.0]: h_params = { "config.dataset": "cifar100", "config.seed": seed, "config.shots": 1, "config.classes": n_classes, "config.inner_batch": n_classes, "config.inner_iters": inner_iters, "config.meta_step": 0.1, "config.meta_batch": 5, "config.meta_iters": 40000, "config.eval_batch": n_classes, "config.eval_iters": inner_iters, "config.learning_rate": 0.005, "config.train_shots": None, "config.meta_step_final": 0, "config.checkpoint": "ckpt_c1{0}_FOML_prob={1}_ii={2}_seed={3}".format(n_classes, prob, inner_iters, seed), "config.mode": 'FOML', "config.exact_prob": prob, "config.clip_grads": True, "config.clip_grad_value": 0.1, "config.on_resampling": False } all_h_params.append(h_params) # CIFAR100 Reptile for n_classes in [20, 30]:#[5, 10, 15]: for learning_rate in [0.0005]: for seed in [0]: h_params = { "config.dataset": "cifar100", "config.seed": 0, "config.shots": 1, "config.classes": n_classes, "config.inner_batch": n_classes, "config.inner_iters": 10, "config.meta_step": 0.0005/learning_rate, "config.meta_batch": 5, "config.meta_iters": 40000, "config.eval_batch": 10, "config.eval_iters": 50, "config.learning_rate": learning_rate, "config.meta_step_final": 0, "config.train_shots": 10, "config.checkpoint": "ckpt_c1{0}_Reptile_lr={1}_seed={2}".format(n_classes, learning_rate, seed), "config.mode": 'Reptile', "config.adam": True } all_h_params.append(h_params) ''' ''' # FOML Mini-ImageNet. for seed in [0]: for n_shots, n_classes, eval_batch in [(5, 5, 15), (1, 5, 5), (1, 15, 10)]: for prob in [0.0, -1.0, 1.0]: h_params = { "config.dataset": "miniimagenet", "config.seed": seed, "config.shots": n_shots, "config.classes": n_classes, "config.inner_batch": 10, "config.inner_iters": 8, "config.meta_step": 1, "config.meta_batch": 5, "config.meta_iters": 100000, "config.eval_batch": eval_batch, "config.eval_iters": 8, "config.learning_rate": 0.001, "config.meta_step_final": 0, "config.checkpoint": "ckpt_m{0}{1}_FOML_pr={2}_seed={3}".format(n_shots, n_classes, prob, seed), "config.mode": "FOML", "config.exact_prob": prob, "config.clip_grads": True, "config.clip_grad_value": 0.1, "config.train_shots": None, "config.on_resampling": False } all_h_params.append(h_params) # Mini-ImageNet Reptile for n_shots, n_classes, eval_batch in [(5, 5, 15), (1, 5, 5), (1, 15, 10)]: for seed in [0]: h_params = { "config.dataset": "miniimagenet", "config.seed": seed, "config.shots": n_shots, "config.classes": n_classes, "config.inner_batch": 10, "config.inner_iters": 8, "config.meta_step": 1, "config.meta_batch": 5, "config.meta_iters": 100000, "config.eval_batch": eval_batch, "config.eval_iters": 50, "config.learning_rate": 0.001, "config.meta_step_final": 0, "config.train_shots": 15, "config.checkpoint": "ckpt_m{0}{1}_Reptile_seed={2}".format(n_shots, n_classes, seed), "config.mode": 'Reptile', "config.adam": True } all_h_params.append(h_params) ''' ''' # Omniglot FOML for seed in [0]: for n_classes in [50, 60]:#[5, 10, 15, 20, 30, 40]: for inner_iters in [10]: for prob in [0.0, -1.0, 1.0]: h_params = { "config.dataset": "omniglot", "config.seed": seed, "config.shots": 1, "config.classes": n_classes, "config.inner_batch": n_classes, "config.inner_iters": inner_iters, "config.meta_step": 0.1, "config.meta_batch": 5, "config.meta_iters": 200000, "config.eval_batch": n_classes, "config.eval_iters": inner_iters, "config.learning_rate": 0.005, "config.train_shots": None, "config.meta_step_final": 0, "config.checkpoint": "ckpt_o1{0}_FOML_prob={1}_ii={2}_seed={3}".format(n_classes, prob, inner_iters, seed), "config.mode": 'FOML', "config.exact_prob": prob, "config.clip_grads": (inner_iters == 10), "config.clip_grad_value": 0.1, "config.on_resampling": False } all_h_params.append(h_params) # Omniglot Reptile for n_classes in [50, 60]:#[5, 10, 15, 20, 30, 40]: for learning_rate in [0.0005]: for seed in [0]: h_params = { "config.dataset": "omniglot", "config.seed": seed, "config.shots": 1, "config.classes": n_classes, "config.inner_batch": n_classes, "config.inner_iters": 10, "config.meta_step": 0.0005/learning_rate, "config.meta_batch": 5, "config.meta_iters": 200000, "config.eval_batch": 10, "config.eval_iters": 50, "config.learning_rate": learning_rate, "config.meta_step_final": 0, "config.train_shots": 10, "config.checkpoint": "ckpt_o1{0}_Reptile_lr={1}_seed={2}".format(n_classes, learning_rate, seed), "config.mode": 'Reptile', "config.adam": True } all_h_params.append(h_params) ''' ''' # FOML Mini-ImageNet. for seed in [0, 1, 2]: for learning_rate in [0.001]: for n_classes in [10]: for inner_iters in [8]: for prob in [1.0]: #[0.0, 0.2, 0.4, 0.6, 0.8]: h_params = { "config.dataset": "miniimagenet", "config.seed": seed, "config.shots": 1, "config.classes": n_classes, "config.inner_batch": 10, "config.inner_iters": inner_iters, "config.meta_step": 0.001/learning_rate, "config.meta_batch": 5, "config.meta_iters": int(100000*45/(9 + 36*prob)), "config.eval_batch": 10, "config.eval_iters": inner_iters, "config.learning_rate": learning_rate, "config.meta_step_final": 0, "config.checkpoint": "ckpt_m1{0}_FOML_pr={1}_lr={2}_ii={3}_seed={4}".format( n_classes, prob, learning_rate, inner_iters, seed), "config.mode": "FOML", "config.exact_prob": prob, "config.clip_grads": (inner_iters == 10), "config.clip_grad_value": 0.1, "config.train_shots": None, "config.on_resampling": False } all_h_params.append(h_params) # Mini-ImageNet Reptile for n_classes in [10]: for learning_rate in [0.001]: for seed in [0, 1, 2]: h_params = { "config.dataset": "miniimagenet", "config.seed": seed, "config.shots": 1, "config.classes": n_classes, "config.inner_batch": 10, "config.inner_iters": 8, "config.meta_step": 0.001/learning_rate, "config.meta_batch": 5, "config.meta_iters": int(100000*45/8), "config.eval_batch": 5, "config.eval_iters": 50, "config.learning_rate": learning_rate, "config.meta_step_final": 0, "config.train_shots": 15, "config.checkpoint": "ckpt_m1{0}_Reptile_lr={1}_seed={2}".format(n_classes, learning_rate, seed), "config.mode": 'Reptile', "config.adam": True } all_h_params.append(h_params) '''
42.929134
131
0.460473
1,155
10,904
4.10303
0.066667
0.075965
0.053176
0.05613
0.969825
0.954843
0.946191
0.928888
0.903144
0.895126
0
0.060895
0.41416
10,904
253
132
43.098814
0.680964
0.001192
0
0
0
0
0.278618
0.060475
0
0
0
0
0
1
0
false
0
0
0
0
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
747417389c71cff93abb3c57f5fc87081db25816
8,503
py
Python
tests/test_places_api.py
baffolobill/HerePy
c6655e9dfde7a5888cc231d7f9f9e8a888f54dcd
[ "MIT" ]
null
null
null
tests/test_places_api.py
baffolobill/HerePy
c6655e9dfde7a5888cc231d7f9f9e8a888f54dcd
[ "MIT" ]
null
null
null
tests/test_places_api.py
baffolobill/HerePy
c6655e9dfde7a5888cc231d7f9f9e8a888f54dcd
[ "MIT" ]
null
null
null
#!/usr/bin/env python import os import sys import io import unittest import responses import codecs import herepy class PlacesApiTest(unittest.TestCase): def setUp(self): api = herepy.PlacesApi('app_id', 'app_code') self._api = api def test_initiation(self): self.assertIsInstance(self._api, herepy.PlacesApi) self.assertEqual(self._api._app_id, 'app_id') self.assertEqual(self._api._app_code, 'app_code') self.assertEqual(self._api._base_url, 'https://places.cit.api.here.com/places/v1/') @responses.activate def test_onebox_search_whensucceed(self): with open('testdata/models/places_api.json', 'r') as f: expectedResponse = f.read() responses.add(responses.GET, 'https://places.cit.api.here.com/places/v1/discover/search', expectedResponse, status=200) response = self._api.onebox_search([37.7905, -122.4107], 'restaurant') self.assertTrue(response) self.assertIsInstance(response, herepy.PlacesResponse) @responses.activate def test_onebox_search_whenerroroccured(self): with open('testdata/models/places_api_error.json', 'r') as f: expectedResponse = f.read() responses.add(responses.GET, 'https://places.cit.api.here.com/places/v1/discover/search', expectedResponse, status=200) with self.assertRaises(herepy.HEREError): self._api.onebox_search([37.7905, -122.4107], '') @responses.activate def test_places_at_whensucceed(self): with open('testdata/models/places_api.json', 'r') as f: expectedResponse = f.read() responses.add(responses.GET, 'https://places.cit.api.here.com/places/v1/discover/explore', expectedResponse, status=200) response = self._api.places_at([37.7905, -122.4107]) self.assertTrue(response) self.assertIsInstance(response, herepy.PlacesResponse) @responses.activate def test_places_at_whenerroroccured(self): with open('testdata/models/places_api_error.json', 'r') as f: expectedResponse = f.read() responses.add(responses.GET, 'https://places.cit.api.here.com/places/v1/discover/explore', expectedResponse, status=200) with self.assertRaises(herepy.HEREError): self._api.places_at([-9999.0, -9999.0]) @responses.activate def test_category_places_at_whensucceed(self): with open('testdata/models/places_api.json', 'r') as f: expectedResponse = f.read() responses.add(responses.GET, 'https://places.cit.api.here.com/places/v1/discover/explore', expectedResponse, status=200) response = self._api.category_places_at([37.7905, -122.4107], [herepy.PlacesCategory.eat_drink]) self.assertTrue(response) self.assertIsInstance(response, herepy.PlacesResponse) @responses.activate def test_category_places_at_whenerroroccured(self): with open('testdata/models/places_api_error.json', 'r') as f: expectedResponse = f.read() responses.add(responses.GET, 'https://places.cit.api.here.com/places/v1/discover/explore', expectedResponse, status=200) with self.assertRaises(herepy.HEREError): self._api.category_places_at([-9999.0, -9999.0], [herepy.PlacesCategory.eat_drink]) def test_category_places_at_withoutnocategories(self): with self.assertRaises(Exception) as context: self._api.category_places_at([37.7905, -122.4107]) self.assertTrue('category_places_at function requires category types!' in str(context.exception)) @responses.activate def test_nearby_places_whensucceed(self): with open('testdata/models/places_api.json', 'r') as f: expectedResponse = f.read() responses.add(responses.GET, 'https://places.cit.api.here.com/places/v1/discover/here', expectedResponse, status=200) response = self._api.nearby_places([37.7905, -122.4107]) self.assertTrue(response) self.assertIsInstance(response, herepy.PlacesResponse) @responses.activate def test_nearby_places_whenerroroccured(self): with open('testdata/models/places_api_error.json', 'r') as f: expectedResponse = f.read() responses.add(responses.GET, 'https://places.cit.api.here.com/places/v1/discover/here', expectedResponse, status=200) with self.assertRaises(herepy.HEREError): self._api.nearby_places([-9999.0, -9999.0]) @responses.activate def test_search_suggestions_whensucceed(self): with io.open('testdata/models/places_api_suggestions.json', 'r', encoding='utf-8') as f: expectedResponse = f.read() responses.add(responses.GET, 'https://places.cit.api.here.com/places/v1/suggest', expectedResponse, status=200) response = self._api.search_suggestions([52.5159, 13.3777], 'berlin') self.assertTrue(response) self.assertIsInstance(response, herepy.PlacesSuggestionsResponse) @responses.activate def test_search_suggestions_whenerroroccured(self): with open('testdata/models/places_api_error.json', 'r') as f: expectedResponse = f.read() responses.add(responses.GET, 'https://places.cit.api.here.com/places/v1/suggest', expectedResponse, status=200) with self.assertRaises(herepy.HEREError): self._api.search_suggestions([-9999.0, -9999.0], '') @responses.activate def test_place_categories_whensucceed(self): with open('testdata/models/places_api_categories.json', 'r') as f: expectedResponse = f.read() responses.add(responses.GET, 'https://places.cit.api.here.com/places/v1/categories/places', expectedResponse, status=200) response = self._api.place_categories([52.5159, 13.3777]) self.assertTrue(response) self.assertIsInstance(response, herepy.PlaceCategoriesResponse) @responses.activate def test_place_categories_whenerroroccured(self): with open('testdata/models/places_api_error.json', 'r') as f: expectedResponse = f.read() responses.add(responses.GET, 'https://places.cit.api.here.com/places/v1/categories/places', expectedResponse, status=200) with self.assertRaises(herepy.HEREError): self._api.place_categories([-9999.0, -9999.0]) @responses.activate def test_places_at_boundingbox_whensucceed(self): with open('testdata/models/places_api.json', 'r') as f: expectedResponse = f.read() responses.add(responses.GET, 'https://places.cit.api.here.com/places/v1/discover/explore', expectedResponse, status=200) response = self._api.places_at_boundingbox([-122.408, 37.793], [-122.4070, 37.7942]) self.assertTrue(response) self.assertIsInstance(response, herepy.PlacesResponse) @responses.activate def test_places_at_boundingbox_whenerroroccured(self): with open('testdata/models/places_api_error.json', 'r') as f: expectedResponse = f.read() responses.add(responses.GET, 'https://places.cit.api.here.com/places/v1/discover/explore', expectedResponse, status=200) with self.assertRaises(herepy.HEREError): self._api.places_at_boundingbox([-9999.0, -9999.0], [-9999.0, -9999.0]) @responses.activate def test_places_with_language_whensucceed(self): with open('testdata/models/places_api.json', 'r') as f: expectedResponse = f.read() responses.add(responses.GET, 'https://places.cit.api.here.com/places/v1/discover/explore', expectedResponse, status=200) response = self._api.places_with_language([48.8580, 2.2945], 'en-US') self.assertTrue(response) self.assertIsInstance(response, herepy.PlacesResponse) @responses.activate def test_places_with_language_whenerroroccured(self): with open('testdata/models/places_api_error.json', 'r') as f: expectedResponse = f.read() responses.add(responses.GET, 'https://places.cit.api.here.com/places/v1/discover/explore', expectedResponse, status=200) with self.assertRaises(herepy.HEREError): self._api.places_with_language([-9999.0, -9999.0], '')
47.502793
105
0.671881
1,002
8,503
5.556886
0.108782
0.028915
0.042744
0.051904
0.864224
0.836566
0.780352
0.748743
0.703664
0.677622
0
0.04171
0.202046
8,503
178
106
47.769663
0.778924
0.002352
0
0.641026
0
0
0.192761
0.066847
0
0
0
0
0.192308
1
0.121795
false
0
0.044872
0
0.173077
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
747dc047001fb6bb567db3f11651ae2a9ab54840
93
py
Python
src/util.py
m1kit/yalm-resources
d8b7d3ac62da210a2a7dfbeb9dffd7b0ac5514e7
[ "Apache-2.0" ]
null
null
null
src/util.py
m1kit/yalm-resources
d8b7d3ac62da210a2a7dfbeb9dffd7b0ac5514e7
[ "Apache-2.0" ]
null
null
null
src/util.py
m1kit/yalm-resources
d8b7d3ac62da210a2a7dfbeb9dffd7b0ac5514e7
[ "Apache-2.0" ]
null
null
null
import re def escape_license_id(license: str) -> str: return re.sub(r'\W', '_', license)
15.5
43
0.666667
15
93
3.933333
0.733333
0
0
0
0
0
0
0
0
0
0
0
0.16129
93
5
44
18.6
0.75641
0
0
0
0
0
0.032258
0
0
0
0
0
0
1
0.333333
false
0
0.333333
0.333333
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
1
1
1
0
0
6
7762c6330ef370120a5247b9062a569717aeaa4e
191
py
Python
cfgparser/matches/physdev.py
jejer/iptables-inspector
4d05ece83d8eacff7aeb43f8798d7cf6c6bc1d5c
[ "Apache-2.0" ]
null
null
null
cfgparser/matches/physdev.py
jejer/iptables-inspector
4d05ece83d8eacff7aeb43f8798d7cf6c6bc1d5c
[ "Apache-2.0" ]
null
null
null
cfgparser/matches/physdev.py
jejer/iptables-inspector
4d05ece83d8eacff7aeb43f8798d7cf6c6bc1d5c
[ "Apache-2.0" ]
null
null
null
# http://ipset.netfilter.org/iptables-extensions.man.html#lbBQ class physdev(object): def __init__(self, raw): pass def match(self, packet, runner): return False
23.875
62
0.65445
24
191
5.041667
0.916667
0
0
0
0
0
0
0
0
0
0
0
0.225131
191
8
63
23.875
0.817568
0.308901
0
0
0
0
0
0
0
0
0
0
0
1
0.4
false
0.2
0
0.2
0.8
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
1
1
0
0
6
77c6182627d4b26a014b7bb4ca78533257879f3a
31
py
Python
src/operations/send_metrics.py
curious95/projektCORM
df03028d9ea6ae30ac29eaaceeea5541536b7754
[ "Apache-2.0" ]
null
null
null
src/operations/send_metrics.py
curious95/projektCORM
df03028d9ea6ae30ac29eaaceeea5541536b7754
[ "Apache-2.0" ]
null
null
null
src/operations/send_metrics.py
curious95/projektCORM
df03028d9ea6ae30ac29eaaceeea5541536b7754
[ "Apache-2.0" ]
null
null
null
def send_metrics(): pass
6.2
19
0.612903
4
31
4.5
1
0
0
0
0
0
0
0
0
0
0
0
0.290323
31
4
20
7.75
0.818182
0
0
0
0
0
0
0
0
0
0
0
0
1
0.5
true
0.5
0
0
0.5
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
1
0
0
0
0
0
6
7af3c96fb1fd5277f6398e3dcf5f31a7afc89586
40
py
Python
repository-miner/processing/__init__.py
INSO-TUWien/portfoliometrix
f01df7c556d3b40bc0668fd49c3961daaa64096f
[ "MIT" ]
null
null
null
repository-miner/processing/__init__.py
INSO-TUWien/portfoliometrix
f01df7c556d3b40bc0668fd49c3961daaa64096f
[ "MIT" ]
null
null
null
repository-miner/processing/__init__.py
INSO-TUWien/portfoliometrix
f01df7c556d3b40bc0668fd49c3961daaa64096f
[ "MIT" ]
null
null
null
from .post_process import PostProcessor
20
39
0.875
5
40
6.8
1
0
0
0
0
0
0
0
0
0
0
0
0.1
40
1
40
40
0.944444
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
bb05e91caf9e5256d0256b5a57a7aaf893ead682
8,346
py
Python
tests/formats/mysql/file_reader/parsers/test_constraint_foreign.py
cmancone/mygrations
30d1d568ca7d6c38dbc5211834dd2d04c0bcf078
[ "MIT" ]
10
2018-04-09T08:39:42.000Z
2022-03-14T15:36:05.000Z
tests/formats/mysql/file_reader/parsers/test_constraint_foreign.py
cmancone/mygrations
30d1d568ca7d6c38dbc5211834dd2d04c0bcf078
[ "MIT" ]
14
2018-05-02T11:14:08.000Z
2022-01-15T18:48:54.000Z
tests/formats/mysql/file_reader/parsers/test_constraint_foreign.py
cmancone/mygrations
30d1d568ca7d6c38dbc5211834dd2d04c0bcf078
[ "MIT" ]
5
2018-07-18T02:20:48.000Z
2022-02-19T09:32:07.000Z
import unittest from mygrations.formats.mysql.file_reader.parsers.constraint_foreign import constraint_foreign class test_constraint_foreign(unittest.TestCase): def test_simple(self): # parse a typical foreign key constraint parser = constraint_foreign() returned = parser.parse( 'CONSTRAINT `accounts_status_id_ref_account_statuses_id` FOREIGN KEY (`status_id`) REFERENCES `account_statuses` (`id`) ON DELETE CASCADE ON UPDATE SET NULL' ) # we should have matched self.assertTrue(parser.matched) # and we should have matched everything self.assertEquals('', returned) # we should have lots of data now self.assertEquals('accounts_status_id_ref_account_statuses_id', parser.name) self.assertEquals('status_id', parser.column) self.assertEquals('account_statuses', parser.foreign_table) self.assertEquals('id', parser.foreign_column) self.assertEquals('CASCADE', parser.on_delete) self.assertEquals('SET NULL', parser.on_update) self.assertFalse(parser.has_comma) self.assertEquals( 'CONSTRAINT `accounts_status_id_ref_account_statuses_id` FOREIGN KEY (`status_id`) REFERENCES `account_statuses` (`id`) ON DELETE CASCADE ON UPDATE SET NULL', str(parser) ) def test_all_deletes(self): # parse a typical foreign key constraint parser = constraint_foreign() parser.parse('CONSTRAINT blah FOREIGN KEY (check) REFERENCES tbl (id) ON DELETE CASCADE ON UPDATE SET NULL') self.assertEquals('CASCADE', parser.on_delete) self.assertEquals( 'CONSTRAINT `blah` FOREIGN KEY (`check`) REFERENCES `tbl` (`id`) ON DELETE CASCADE ON UPDATE SET NULL', str(parser) ) # parse a typical foreign key constraint parser = constraint_foreign() parser.parse('CONSTRAINT blah FOREIGN KEY (check) REFERENCES tbl (id) ON DELETE NO ACTION ON UPDATE SET NULL') self.assertEquals('NO ACTION', parser.on_delete) self.assertEquals( 'CONSTRAINT `blah` FOREIGN KEY (`check`) REFERENCES `tbl` (`id`) ON DELETE NO ACTION ON UPDATE SET NULL', str(parser) ) # parse a typical foreign key constraint parser = constraint_foreign() parser.parse('CONSTRAINT blah FOREIGN KEY (check) REFERENCES tbl (id) ON DELETE RESTRICT ON UPDATE SET NULL') self.assertEquals('RESTRICT', parser.on_delete) self.assertEquals( 'CONSTRAINT `blah` FOREIGN KEY (`check`) REFERENCES `tbl` (`id`) ON DELETE RESTRICT ON UPDATE SET NULL', str(parser) ) # parse a typical foreign key constraint parser = constraint_foreign() parser.parse('CONSTRAINT blah FOREIGN KEY (check) REFERENCES tbl (id) ON DELETE set default ON UPDATE SET NULL') self.assertEquals('SET DEFAULT', parser.on_delete) self.assertEquals( 'CONSTRAINT `blah` FOREIGN KEY (`check`) REFERENCES `tbl` (`id`) ON DELETE SET DEFAULT ON UPDATE SET NULL', str(parser) ) # parse a typical foreign key constraint parser = constraint_foreign() parser.parse('CONSTRAINT blah FOREIGN KEY (check) REFERENCES tbl (id) ON DELETE set null ON UPDATE SET NULL') self.assertEquals('SET NULL', parser.on_delete) self.assertEquals( 'CONSTRAINT `blah` FOREIGN KEY (`check`) REFERENCES `tbl` (`id`) ON DELETE SET NULL ON UPDATE SET NULL', str(parser) ) # parse a typical foreign key constraint parser = constraint_foreign() parser.parse('CONSTRAINT blah FOREIGN KEY (check) REFERENCES tbl (id) ON UPDATE SET NULL') self.assertEquals('RESTRICT', parser.on_delete) self.assertEquals( 'CONSTRAINT `blah` FOREIGN KEY (`check`) REFERENCES `tbl` (`id`) ON DELETE RESTRICT ON UPDATE SET NULL', str(parser) ) def test_all_updates(self): # parse a typical foreign key constraint parser = constraint_foreign() parser.parse('CONSTRAINT blah FOREIGN KEY (check) REFERENCES tbl (id) ON DELETE CASCADE ON UPDATE SET NULL') self.assertEquals('SET NULL', parser.on_update) self.assertEquals( 'CONSTRAINT `blah` FOREIGN KEY (`check`) REFERENCES `tbl` (`id`) ON DELETE CASCADE ON UPDATE SET NULL', str(parser) ) # parse a typical foreign key constraint parser = constraint_foreign() parser.parse( 'CONSTRAINT blah FOREIGN KEY (check) REFERENCES tbl (id) ON DELETE NO ACTION ON UPDATE SET DEFAULT' ) self.assertEquals('SET DEFAULT', parser.on_update) self.assertEquals( 'CONSTRAINT `blah` FOREIGN KEY (`check`) REFERENCES `tbl` (`id`) ON DELETE NO ACTION ON UPDATE SET DEFAULT', str(parser) ) # parse a typical foreign key constraint parser = constraint_foreign() parser.parse('CONSTRAINT blah FOREIGN KEY (check) REFERENCES tbl (id) ON DELETE RESTRICT ON UPDATE CASCADE') self.assertEquals('CASCADE', parser.on_update) self.assertEquals( 'CONSTRAINT `blah` FOREIGN KEY (`check`) REFERENCES `tbl` (`id`) ON DELETE RESTRICT ON UPDATE CASCADE', str(parser) ) # parse a typical foreign key constraint parser = constraint_foreign() parser.parse( 'CONSTRAINT blah FOREIGN KEY (check) REFERENCES tbl (id) ON DELETE set default ON UPDATE no action' ) self.assertEquals('NO ACTION', parser.on_update) self.assertEquals( 'CONSTRAINT `blah` FOREIGN KEY (`check`) REFERENCES `tbl` (`id`) ON DELETE SET DEFAULT ON UPDATE NO ACTION', str(parser) ) # parse a typical foreign key constraint parser = constraint_foreign() parser.parse('CONSTRAINT blah FOREIGN KEY (check) REFERENCES tbl (id) ON DELETE set null ON UPDATE RESTRICT') self.assertEquals('RESTRICT', parser.on_update) self.assertEquals( 'CONSTRAINT `blah` FOREIGN KEY (`check`) REFERENCES `tbl` (`id`) ON DELETE SET NULL ON UPDATE RESTRICT', str(parser) ) # parse a typical foreign key constraint parser = constraint_foreign() parser.parse('CONSTRAINT blah FOREIGN KEY (check) REFERENCES tbl (id) ON DELETE SET NULL') self.assertEquals('RESTRICT', parser.on_update) self.assertEquals( 'CONSTRAINT `blah` FOREIGN KEY (`check`) REFERENCES `tbl` (`id`) ON DELETE SET NULL ON UPDATE RESTRICT', str(parser) ) def test_action_optional(self): # all actions are optional, and default to RESTRICT parser = constraint_foreign() remaining = parser.parse('CONSTRAINT blah FOREIGN KEY (check) REFERENCES tbl (id)') self.assertTrue(parser.matched) self.assertEquals('', remaining) self.assertEquals('RESTRICT', parser.on_update) self.assertEquals('RESTRICT', parser.on_delete) self.assertEquals( 'CONSTRAINT `blah` FOREIGN KEY (`check`) REFERENCES `tbl` (`id`) ON DELETE RESTRICT ON UPDATE RESTRICT', str(parser) ) def test_leftovers(self): # anything that doesn't match should be returned as leftovers parser = constraint_foreign() remaining = parser.parse('CONSTRAINT blah FOREIGN KEY (check) REFERENCES tbl (id), sup') self.assertTrue(parser.matched) self.assertEquals('sup', remaining) self.assertTrue(parser.has_comma) self.assertEquals( 'CONSTRAINT `blah` FOREIGN KEY (`check`) REFERENCES `tbl` (`id`) ON DELETE RESTRICT ON UPDATE RESTRICT', str(parser) ) # even if the user forgets a comma parser = constraint_foreign() remaining = parser.parse('CONSTRAINT blah FOREIGN KEY (check) REFERENCES tbl (id) sup') self.assertEquals( 'CONSTRAINT `blah` FOREIGN KEY (`check`) REFERENCES `tbl` (`id`) ON DELETE RESTRICT ON UPDATE RESTRICT', str(parser) ) self.assertTrue(parser.matched) self.assertEquals('sup', remaining) self.assertFalse(parser.has_comma)
44.393617
170
0.646178
965
8,346
5.511917
0.087047
0.084602
0.118443
0.135364
0.882121
0.871028
0.839444
0.826471
0.803346
0.778154
0
0
0.262521
8,346
187
171
44.631016
0.864175
0.088905
0
0.560284
0
0.120567
0.435522
0.017141
0
0
0
0
0.333333
1
0.035461
false
0
0.014184
0
0.056738
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
bb077683b24c7a97a0273ac49f786fd9b053719c
77
py
Python
mazzle/command_line.py
samsquire/devops-pipeline
8df5edfd1932224b5b29eae57c7d64a6fd356f41
[ "MIT" ]
13
2018-04-03T09:45:53.000Z
2020-03-19T05:39:17.000Z
mazzle/command_line.py
samsquire/mazzle
8df5edfd1932224b5b29eae57c7d64a6fd356f41
[ "MIT" ]
19
2019-10-15T06:53:07.000Z
2020-08-11T09:12:33.000Z
mazzle/command_line.py
samsquire/mazzle
8df5edfd1932224b5b29eae57c7d64a6fd356f41
[ "MIT" ]
2
2019-12-02T16:00:44.000Z
2020-01-10T12:10:38.000Z
#!/usr/bin env python3 import devops_pipeline def main(): print("hello")
11
22
0.701299
11
77
4.818182
1
0
0
0
0
0
0
0
0
0
0
0.015385
0.155844
77
6
23
12.833333
0.8
0.272727
0
0
0
0
0.090909
0
0
0
0
0
0
1
0.333333
true
0
0.333333
0
0.666667
0.333333
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
0
1
0
1
0
0
6
bb0eaa1cfd27d991d4d5864d85fa5e342dec6868
113
py
Python
src/lib/bver/__init__.py
backboneHQ/bver
c3c929442fadb28a3f39d0ddec19fb2dfc7a4732
[ "MIT" ]
1
2021-09-09T01:22:37.000Z
2021-09-09T01:22:37.000Z
src/lib/bver/__init__.py
backboneHQ/bver
c3c929442fadb28a3f39d0ddec19fb2dfc7a4732
[ "MIT" ]
null
null
null
src/lib/bver/__init__.py
backboneHQ/bver
c3c929442fadb28a3f39d0ddec19fb2dfc7a4732
[ "MIT" ]
1
2021-09-03T18:45:15.000Z
2021-09-03T18:45:15.000Z
from . import Versioned from . import Loader from .Query import Query, SoftwareNotFoundError, AddonNotFoundError
28.25
67
0.831858
12
113
7.833333
0.583333
0.212766
0
0
0
0
0
0
0
0
0
0
0.123894
113
3
68
37.666667
0.949495
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
bb4648dec1578e3e5aa0c74012719930b1d3e752
19
py
Python
naked/funcs/sklearn/feature_extraction/__init__.py
MaxHalford/naked
f1990a22903db61e6ac74ce1eccf5d43537ebfc4
[ "MIT" ]
26
2021-02-05T09:46:44.000Z
2021-11-14T19:40:47.000Z
naked/funcs/sklearn/feature_extraction/__init__.py
MaxHalford/naked
f1990a22903db61e6ac74ce1eccf5d43537ebfc4
[ "MIT" ]
null
null
null
naked/funcs/sklearn/feature_extraction/__init__.py
MaxHalford/naked
f1990a22903db61e6ac74ce1eccf5d43537ebfc4
[ "MIT" ]
1
2021-08-19T06:21:28.000Z
2021-08-19T06:21:28.000Z
from . import text
9.5
18
0.736842
3
19
4.666667
1
0
0
0
0
0
0
0
0
0
0
0
0.210526
19
1
19
19
0.933333
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
24bf37e9d0f194a57291b427fcbb67345e42963b
105
py
Python
braintree/apple_pay_options.py
futureironman/braintree_python
26bb8a857bc29322a8bca2e8e0fe6d99cfe6a1ac
[ "MIT" ]
182
2015-01-09T05:26:46.000Z
2022-03-16T14:10:06.000Z
braintree/apple_pay_options.py
futureironman/braintree_python
26bb8a857bc29322a8bca2e8e0fe6d99cfe6a1ac
[ "MIT" ]
95
2015-02-24T23:29:56.000Z
2022-03-13T03:27:58.000Z
braintree/apple_pay_options.py
futureironman/braintree_python
26bb8a857bc29322a8bca2e8e0fe6d99cfe6a1ac
[ "MIT" ]
93
2015-02-19T17:59:06.000Z
2022-03-19T17:01:25.000Z
from braintree.attribute_getter import AttributeGetter class ApplePayOptions(AttributeGetter): pass
21
54
0.847619
10
105
8.8
0.9
0
0
0
0
0
0
0
0
0
0
0
0.114286
105
4
55
26.25
0.946237
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0.333333
0.333333
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
1
0
1
0
0
6
24cbd59a88f2b201c85a0e180bc9bfa5e1103158
92
py
Python
lightwood/model_building/__init__.py
ritwik12/lightwood
7975688355fba8b0f8349dd55a1b6cb625c3efd0
[ "MIT" ]
null
null
null
lightwood/model_building/__init__.py
ritwik12/lightwood
7975688355fba8b0f8349dd55a1b6cb625c3efd0
[ "MIT" ]
null
null
null
lightwood/model_building/__init__.py
ritwik12/lightwood
7975688355fba8b0f8349dd55a1b6cb625c3efd0
[ "MIT" ]
null
null
null
from lightwood.model_building.basic_ax_optimizer.basic_ax_optimizer import BasicAxOptimizer
46
91
0.923913
12
92
6.666667
0.75
0.175
0.4
0
0
0
0
0
0
0
0
0
0.043478
92
1
92
92
0.909091
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
702eef261f863600fbad754f3872a37fcd3f62e1
104
py
Python
hcipy/interpolation/__init__.py
dskleingeld/hcipy
85cacfb7a8058506afb288e3acdf3b6059ba2b50
[ "MIT" ]
1
2020-07-20T23:25:17.000Z
2020-07-20T23:25:17.000Z
hcipy/interpolation/__init__.py
dskleingeld/hcipy
85cacfb7a8058506afb288e3acdf3b6059ba2b50
[ "MIT" ]
null
null
null
hcipy/interpolation/__init__.py
dskleingeld/hcipy
85cacfb7a8058506afb288e3acdf3b6059ba2b50
[ "MIT" ]
null
null
null
from .linear import make_linear_interpolator_separated __all__ = ['make_linear_interpolator_separated']
34.666667
54
0.875
12
104
6.75
0.583333
0.246914
0.54321
0.765432
0
0
0
0
0
0
0
0
0.067308
104
3
55
34.666667
0.835052
0
0
0
0
0
0.32381
0.32381
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0
1
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
6
7033828895ee92e6f059073311b44843db03dd8f
69
py
Python
correcao_prova/views.py
WesGtoX/hackribeirao2018
65d7f21d131db9f7c90b9f71c4a8a716bd1dfb18
[ "MIT" ]
null
null
null
correcao_prova/views.py
WesGtoX/hackribeirao2018
65d7f21d131db9f7c90b9f71c4a8a716bd1dfb18
[ "MIT" ]
null
null
null
correcao_prova/views.py
WesGtoX/hackribeirao2018
65d7f21d131db9f7c90b9f71c4a8a716bd1dfb18
[ "MIT" ]
1
2020-04-09T18:45:53.000Z
2020-04-09T18:45:53.000Z
from django.shortcuts import render def corrigir(request): pass
13.8
35
0.768116
9
69
5.888889
1
0
0
0
0
0
0
0
0
0
0
0
0.173913
69
5
36
13.8
0.929825
0
0
0
0
0
0
0
0
0
0
0
0
1
0.333333
false
0.333333
0.333333
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
1
0
1
0
0
6
706404ebccebf171284617ead3f870c76b3cd553
9,784
py
Python
ResNet/net.py
easilylazy/pattern-recognition
2b95689bb3f34e4821a0211b19b76164aa6e615f
[ "MIT" ]
2
2021-03-07T14:40:23.000Z
2021-03-08T08:51:01.000Z
ResNet/net.py
easilylazy/pattern-recognition
2b95689bb3f34e4821a0211b19b76164aa6e615f
[ "MIT" ]
null
null
null
ResNet/net.py
easilylazy/pattern-recognition
2b95689bb3f34e4821a0211b19b76164aa6e615f
[ "MIT" ]
null
null
null
import torch import torch.nn as nn import torch.nn.functional as F unit_num=2 class DimUnit(nn.Module): ''' dimension change ''' def __init__(self,in_channels, out_channels,stride=2): super(DimUnit,self).__init__() self.convList=nn.ModuleList() self.bnList=nn.ModuleList() self.conv1 = nn.Conv2d(in_channels, out_channels, 3, stride, 1) self.conv2 = nn.Conv2d(out_channels, out_channels, 3, 1, 1) def forward(self,x): x = F.relu((self.conv1(x))) x = F.relu((self.conv2(x))) return x class ResUnit(nn.Module): def __init__(self,channels,layers=2): super(ResUnit,self).__init__() self.layers=layers self.convList=nn.ModuleList() for i in range(layers): conv = nn.Conv2d(channels, channels, 3, 1, 1) self.convList.append(conv) def forward(self,xi): x = F.relu(self.convList[0](xi)) x = F.relu(self.convList[1](x)) xo=xi+x return xo class ResNet(nn.Module): def __init__(self): super(ResNet, self).__init__() # 1 input image channel, 6 output channels, 7x7 square convolution # kernel self.conv1_0 = nn.Conv2d(3, 16, 3, 1, 1) self.res1=ResUnit(16) self.resunits=nn.ModuleList() channelsList=[16,32,64] for channels in channelsList: for i in range(unit_num): resunit=ResUnit(channels) self.resunits.append(resunit) self.DimUnit2=DimUnit(16,32) self.DimUnit3=DimUnit(32,64) self.pool=nn.AdaptiveAvgPool2d((1,1)) self.fc = nn.Linear(64,10) def forward(self, x): # dimension 16 x = F.relu((self.conv1_0(x))) x = self.res1(x) for i in range(unit_num): x = self.resunits[i](x) # # dimension 32 x = self.DimUnit2(x) for i in range(unit_num): x = self.resunits[unit_num+i](x) # # dimension 64 x = self.DimUnit3(x) for i in range(unit_num): x = self.resunits[unit_num*2+i](x) x = self.pool(x) x = torch.flatten(x, 1) # flatten all dimensions except the batch dimension x = self.fc(x) return x class ResUnit_BN(nn.Module): def __init__(self,channels,layers=2): super(ResUnit_BN,self).__init__() self.layers=layers self.convList=nn.ModuleList() self.bnList=nn.ModuleList() for i in range(layers): conv = nn.Conv2d(channels, channels, 3, 1, 1) self.convList.append(conv) bn=nn.BatchNorm2d(channels) self.bnList.append(bn) def forward(self,xi): x = F.relu(self.bnList[0](self.convList[0](xi))) x = F.relu(self.bnList[1](self.convList[1](x))) xo=xi+x xo=F.relu(xo) return xo class DimUnit_BN(nn.Module): ''' dimension change ''' def __init__(self,in_channels, out_channels,stride=2,diff=8,option='B'): super(DimUnit_BN,self).__init__() self.option=option self.convList=nn.ModuleList() self.bnList=nn.ModuleList() self.conv1 = nn.Conv2d(in_channels, out_channels, 3, stride, 1) self.conv1_2 = nn.Conv2d(in_channels, out_channels, 3, stride, 1) self.conv2 = nn.Conv2d(out_channels, out_channels, 3, 1, 1) self.bn1=nn.BatchNorm2d(out_channels) self.bn1_2=nn.BatchNorm2d(out_channels) self.bn2=nn.BatchNorm2d(out_channels) self.pad=nn.ConstantPad3d((0,0,0,0,0,out_channels-in_channels),0) def forward(self,xi): x = F.relu(self.bn1(self.conv1(xi))) x = F.relu(self.bn2(self.conv2(x))) if self.option=='A': xo=self.pad(xi[:,:,::2,::2])+x elif self.option=='B': x1_2=self.bn1_2(self.conv1_2(xi)) xo=x1_2+x else: xo=x xo=F.relu(xo) return xo class ResNet_BN(nn.Module): def __init__(self,unit_num=2): super(ResNet_BN, self).__init__() self.unit_num=unit_num # 1 input image channel, 6 output channels, 7x7 square convolution # kernel self.conv1_0 = nn.Conv2d(3, 16, 3, 1, 1) self.res1=ResUnit_BN(16) self.resunits=nn.ModuleList() channelsList=[16,32,64] for channels in channelsList: for i in range(self.unit_num): resunit=ResUnit_BN(channels) self.resunits.append(resunit) self.DimUnit_BN2=DimUnit_BN(16,32) self.DimUnit_BN3=DimUnit_BN(32,64) self.pool=nn.AdaptiveAvgPool2d((1,1)) self.bn1=nn.BatchNorm2d(16) self.fc = nn.Linear(64,10) def forward(self, x): # dimension 16 x = F.relu(self.bn1(self.conv1_0(x))) x = self.res1(x) for i in range(self.unit_num): x = self.resunits[i](x) # # dimension 32 x = self.DimUnit_BN2(x) for i in range(self.unit_num): x = self.resunits[self.unit_num+i](x) # # dimension 64 x = self.DimUnit_BN3(x) for i in range(self.unit_num): x = self.resunits[self.unit_num*2+i](x) x = self.pool(x) x = torch.flatten(x, 1) # flatten all dimensions except the batch dimension x = self.fc(x) return x class Net(nn.Module): def __init__(self): super(Net, self).__init__() # 1 input image channel, 6 output channels, 7x7 square convolution # kernel self.conv1_0 = nn.Conv2d(3, 16, 3, 1, 1) self.conv1 = nn.Conv2d(16, 16, 3, 1, 1) self.conv2_0 = nn.Conv2d(16, 32, 3, 2, 1) self.conv2 = nn.Conv2d(32, 32, 3, 1, 1) self.conv3_0 = nn.Conv2d(32, 64, 3, 2, 1) self.conv3 = nn.Conv2d(64, 64, 3, 1, 1) self.pool=nn.AdaptiveAvgPool2d((1,1)) self.fc = nn.Linear(64,10) def forward(self, x): # Max pooling over a (2, 2) window x1_0 = F.relu(self.conv1_0(x)) x = F.relu(self.conv1(x1_0)) x = F.relu(self.conv1(x)) x1_1=x1_0+x x = F.relu(self.conv1(x1_1)) x = F.relu(self.conv1(x)) x1_2=x1_1+x x = F.relu(self.conv1(x1_2)) x = F.relu(self.conv1(x)) x2_0=x1_2+x x = F.relu(self.conv2_0(x2_0)) x2_1 = F.relu(self.conv2(x)) # dimension 32 x = F.relu(self.conv2(x2_1)) x = F.relu(self.conv2(x)) x2_2=x2_1+x x = F.relu(self.conv2(x2_2)) x = F.relu(self.conv2(x)) x3_0=x2_2+x x = F.relu(self.conv3_0(x3_0)) x3_1 = F.relu(self.conv3(x)) # dimension 64 x = F.relu(self.conv3(x3_1)) x = F.relu(self.conv3(x)) x3_2=x3_1+x x = F.relu(self.conv3(x3_2)) x = F.relu(self.conv3(x)) x = self.pool(x) x = torch.flatten(x, 1) # flatten all dimensions except the batch dimension x = self.fc(x) return x class ResUnit_BN_plain(nn.Module): def __init__(self,channels,layers=2): super(ResUnit_BN_plain,self).__init__() self.layers=layers self.convList=nn.ModuleList() self.bnList=nn.ModuleList() for i in range(layers): conv = nn.Conv2d(channels, channels, 3, 1, 1) self.convList.append(conv) bn=nn.BatchNorm2d(channels) self.bnList.append(bn) def forward(self,xi): x = F.relu(self.bnList[0](self.convList[0](xi))) x = F.relu(self.bnList[1](self.convList[1](x))) x = F.relu(x) return x class DimUnit_BN_plain(nn.Module): ''' dimension change ''' def __init__(self,in_channels, out_channels,stride=2,diff=8): super(DimUnit_BN_plain,self).__init__() self.convList=nn.ModuleList() self.bnList=nn.ModuleList() self.conv1 = nn.Conv2d(in_channels, out_channels, 3, stride, 1) self.conv2 = nn.Conv2d(out_channels, out_channels, 3, 1, 1) self.bn1=nn.BatchNorm2d(out_channels) self.bn2=nn.BatchNorm2d(out_channels) self.pad=nn.ConstantPad3d((0,0,0,0,0,out_channels-in_channels),0) def forward(self,xi): x = F.relu(self.bn1(self.conv1(xi))) x = F.relu(self.bn2(self.conv2(x))) x = F.relu(x) return x class ResNet_BN_plain(nn.Module): def __init__(self,unit_num=2): super(ResNet_BN_plain, self).__init__() self.unit_num=unit_num # 1 input image channel, 6 output channels, 7x7 square convolution # kernel self.conv1_0 = nn.Conv2d(3, 16, 3, 1, 1) self.res1=ResUnit_BN_plain(16) self.resunits=nn.ModuleList() channelsList=[16,32,64] for channels in channelsList: for i in range(self.unit_num): resunit=ResUnit_BN_plain(channels) self.resunits.append(resunit) self.DimUnit_BN2=DimUnit_BN_plain(16,32) self.DimUnit_BN3=DimUnit_BN_plain(32,64) self.pool=nn.AdaptiveAvgPool2d((1,1)) self.bn1=nn.BatchNorm2d(16) self.fc = nn.Linear(64,10) def forward(self, x): # dimension 16 x = F.relu(self.bn1(self.conv1_0(x))) x = self.res1(x) for i in range(self.unit_num): x = self.resunits[i](x) # # dimension 32 x = self.DimUnit_BN2(x) for i in range(self.unit_num): x = self.resunits[self.unit_num+i](x) # # dimension 64 x = self.DimUnit_BN3(x) for i in range(self.unit_num): x = self.resunits[self.unit_num*2+i](x) x = self.pool(x) x = torch.flatten(x, 1) # flatten all dimensions except the batch dimension x = self.fc(x) return x
32.184211
84
0.572159
1,451
9,784
3.710544
0.06754
0.03529
0.056835
0.057578
0.892645
0.874257
0.816122
0.787704
0.752972
0.745542
0
0.061027
0.293234
9,784
304
85
32.184211
0.717571
0.073692
0
0.65678
0
0
0.000334
0
0
0
0
0
0
1
0.084746
false
0
0.012712
0
0.182203
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
706bbc850cbd81198adf4290ecdbd95d546ac6c4
6,686
py
Python
test/test_convert_u01.py
kryshac/pyembroidery
54987b6919b99f4c59f6a1b88b99ec5cb635ba71
[ "MIT" ]
83
2018-08-20T22:52:18.000Z
2022-03-30T07:44:37.000Z
test/test_convert_u01.py
kryshac/pyembroidery
54987b6919b99f4c59f6a1b88b99ec5cb635ba71
[ "MIT" ]
81
2018-08-12T15:43:23.000Z
2022-01-05T14:59:51.000Z
test/test_convert_u01.py
kryshac/pyembroidery
54987b6919b99f4c59f6a1b88b99ec5cb635ba71
[ "MIT" ]
21
2019-02-25T13:31:37.000Z
2022-03-08T08:44:05.000Z
from __future__ import print_function import unittest from pyembroidery import * from pattern_for_tests import * class TestConverts(unittest.TestCase): def position_equals(self, stitches, j, k): self.assertEqual(stitches[j][:1], stitches[k][:1]) def test_convert_u01_to_u01(self): file1 = "convert_u01.u01" file2 = "converted_u01.u01" write_u01(get_big_pattern(), file1) f_pattern = read_u01(file1) write_u01(f_pattern, file2) t_pattern = read_u01(file2) self.assertIsNotNone(t_pattern) self.assertEqual(t_pattern.count_stitch_commands(NEEDLE_SET), 16) self.assertEqual(t_pattern.count_stitch_commands(STITCH), 16 * 5) self.position_equals(t_pattern.stitches, 0, -1) print("u01->u01: ", t_pattern.stitches) self.addCleanup(os.remove, file1) self.addCleanup(os.remove, file2) def test_convert_u01_to_csv(self): file1 = "convert_csv.u01" file2 = "converted_u01.csv" write_u01(get_big_pattern(), file1) f_pattern = read_u01(file1) write_csv(f_pattern, file2) t_pattern = read_csv(file2) self.assertIsNotNone(t_pattern) self.assertEqual(t_pattern.count_stitch_commands(NEEDLE_SET), 16) self.assertEqual(t_pattern.count_stitch_commands(STITCH), 16 * 5) self.position_equals(t_pattern.stitches, 0, -1) print("u01->csv: ", t_pattern.stitches) self.addCleanup(os.remove, file1) self.addCleanup(os.remove, file2) def test_convert_u01_to_exp(self): file1 = "convert_exp.u01" file2 = "converted_u01.exp" write_u01(get_big_pattern(), file1) f_pattern = read_u01(file1) write_exp(f_pattern, file2) t_pattern = read_exp(file2) self.assertIsNotNone(t_pattern) self.assertEqual(t_pattern.count_stitch_commands(COLOR_CHANGE), 15) self.assertEqual(t_pattern.count_stitch_commands(STITCH), 16 * 5) self.position_equals(t_pattern.stitches, 0, -1) print("u01->exp: ", t_pattern.stitches) self.addCleanup(os.remove, file1) self.addCleanup(os.remove, file2) def test_convert_u01_to_pes(self): file1 = "convert_pes.u01" file2 = "converted_u01.pes" write_u01(get_big_pattern(), file1) f_pattern = read_u01(file1) write_pes(f_pattern, file2) t_pattern = read_pes(file2) self.assertIsNotNone(t_pattern) self.assertEqual(t_pattern.count_stitch_commands(COLOR_CHANGE), 15) self.assertEqual(t_pattern.count_stitch_commands(STITCH), 16 * 5) self.position_equals(t_pattern.stitches, 0, -1) print("u01->pes: ", t_pattern.stitches) self.addCleanup(os.remove, file1) self.addCleanup(os.remove, file2) def test_convert_u01_to_jef(self): file1 = "convert_jef.u01" file2 = "converted_u01.jef" write_u01(get_big_pattern(), file1) f_pattern = read_u01(file1) write_jef(f_pattern, file2) t_pattern = read_jef(file2) self.assertIsNotNone(t_pattern) self.assertEqual(t_pattern.count_stitch_commands(COLOR_CHANGE), 15) self.assertEqual(t_pattern.count_stitch_commands(STITCH), 16 * 5) self.position_equals(t_pattern.stitches, 0, -1) print("u01->jef: ", t_pattern.stitches) self.addCleanup(os.remove, file1) self.addCleanup(os.remove, file2) def test_convert_u01_to_pec(self): file1 = "convert_pec.u01" file2 = "converted_u01.pec" write_u01(get_big_pattern(), file1) f_pattern = read_u01(file1) write_pec(f_pattern, file2) t_pattern = read_pec(file2) self.assertIsNotNone(t_pattern) self.assertEqual(t_pattern.count_stitch_commands(COLOR_CHANGE), 15) self.assertEqual(t_pattern.count_stitch_commands(STITCH), 16 * 5) self.position_equals(t_pattern.stitches, 0, -1) print("u01->pec: ", t_pattern.stitches) self.addCleanup(os.remove, file1) self.addCleanup(os.remove, file2) def test_convert_u01_to_vp3(self): file1 = "convert_vp3.u01" file2 = "converted_u01.vp3" write_u01(get_big_pattern(), file1) f_pattern = read_u01(file1) write_vp3(f_pattern, file2) t_pattern = read_vp3(file2) self.assertIsNotNone(t_pattern) self.assertEqual(t_pattern.count_stitch_commands(COLOR_CHANGE), 15) self.assertEqual(t_pattern.count_stitch_commands(STITCH), 16 * 5) self.position_equals(t_pattern.stitches, 0, -1) print("u01->vp3: ", t_pattern.stitches) self.addCleanup(os.remove, file1) self.addCleanup(os.remove, file2) def test_convert_u01_to_dst(self): file1 = "convert_dst.u01" file2 = "converted_u01.dst" write_u01(get_big_pattern(), file1) f_pattern = read_u01(file1) write_dst(f_pattern, file2) t_pattern = read_dst(file2) self.assertIsNotNone(t_pattern) self.assertEqual(t_pattern.count_stitch_commands(COLOR_CHANGE), 15) self.assertEqual(t_pattern.count_stitch_commands(STITCH), 16 * 5) self.position_equals(t_pattern.stitches, 0, -1) print("u01->dst: ", t_pattern.stitches) self.addCleanup(os.remove, file1) self.addCleanup(os.remove, file2) def test_convert_u01_to_gcode(self): file1 = "convert_gcode.u01" file2 = "converted_u01.gcode" write_u01(get_big_pattern(), file1) f_pattern = read_u01(file1) write_gcode(f_pattern, file2) t_pattern = read_gcode(file2) self.assertIsNotNone(t_pattern) self.assertEqual(t_pattern.count_stitch_commands(COLOR_CHANGE), 15) self.assertEqual(t_pattern.count_stitch_commands(STITCH), 16 * 5) self.position_equals(t_pattern.stitches, 0, -1) print("u01->gcode: ", t_pattern.stitches) self.addCleanup(os.remove, file1) self.addCleanup(os.remove, file2) def test_convert_u01_to_xxx(self): file1 = "convert_xxx.u01" file2 = "converted_u01.xxx" write_u01(get_big_pattern(), file1) f_pattern = read_u01(file1) write_xxx(f_pattern, file2) t_pattern = read_xxx(file2) self.assertIsNotNone(t_pattern) self.assertEqual(t_pattern.count_stitch_commands(COLOR_CHANGE), 15) self.assertEqual(t_pattern.count_stitch_commands(STITCH), 16 * 5) self.position_equals(t_pattern.stitches, 0, -1) print("u01->xxx: ", t_pattern.stitches) self.addCleanup(os.remove, file1) self.addCleanup(os.remove, file2)
39.329412
75
0.669309
874
6,686
4.813501
0.067506
0.114096
0.076064
0.109342
0.823865
0.819349
0.759924
0.759924
0.759924
0.759924
0
0.055866
0.223602
6,686
170
76
39.329412
0.754575
0
0
0.544218
0
0
0.063706
0
0
0
0
0
0.210884
1
0.07483
false
0
0.027211
0
0.108844
0.07483
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
708180a814ac548e38ad5d4046f901ecedd540ab
19,411
py
Python
data/video_preprocessing_script.py
Cli98/industry-action-recg
d47f528d4c3003c52100f0675e43975b23bcc684
[ "Apache-2.0" ]
null
null
null
data/video_preprocessing_script.py
Cli98/industry-action-recg
d47f528d4c3003c52100f0675e43975b23bcc684
[ "Apache-2.0" ]
null
null
null
data/video_preprocessing_script.py
Cli98/industry-action-recg
d47f528d4c3003c52100f0675e43975b23bcc684
[ "Apache-2.0" ]
null
null
null
import cv2 from datetime import datetime, timedelta from dateutil import parser from dateutil.parser._parser import ParserError import os from glob import glob from sklearn.model_selection import train_test_split import numpy as np import ffmpeg def check_rotation(path_video_file): # this returns meta-data of the video file in form of a dictionary meta_dict = ffmpeg.probe(path_video_file) # from the dictionary, meta_dict['streams'][0]['tags']['rotate'] is the key # we are looking for rotateCode = None if "tags" in meta_dict['streams'][0] and "rotate" in meta_dict['streams'][0]['tags']: if int(meta_dict['streams'][0]['tags']['rotate']) == 90: rotateCode = cv2.ROTATE_90_CLOCKWISE elif int(meta_dict['streams'][0]['tags']['rotate']) == 180: rotateCode = cv2.ROTATE_180 elif int(meta_dict['streams'][0]['tags']['rotate']) == 270: rotateCode = cv2.ROTATE_90_COUNTERCLOCKWISE return rotateCode def correct_rotation(frame, rotateCode): return cv2.rotate(frame, rotateCode) def extract_frame_from_movie(source_file, target_folder, clip_sec=2): """ Given a video, directly generated images and count of image, save 1. images and 2. annotation for inference purpose one video per time split by 2s folder structure :/data/ucf101/rawframes/throw/name/jpgs :param source_file: :param target_folder: :return: """ # target_folder = "original/throw_up_down_0" # source_file = ["source_data/video/throw/throw_up_down_0.mp4"] fourcc = cv2.VideoWriter_fourcc(*"MJPG") generated_video = [] if not isinstance(source_file, list): source_file = [source_file] if not os.path.exists(os.path.join(target_folder, "inference_video", "inference")): os.makedirs(os.path.join(target_folder, "inference_video", "inference"), exist_ok=False) # print("sn", source_file) for video_name in source_file: v_name, _ = video_name.split(".") v_affix = "avi" v_name = v_name.split("/")[-1] cameraCapture = cv2.VideoCapture(video_name) assert cameraCapture.isOpened(), "failed to load video!" rotateCode = check_rotation(video_name) size = (int(cameraCapture.get(cv2.CAP_PROP_FRAME_WIDTH)), int(cameraCapture.get(cv2.CAP_PROP_FRAME_HEIGHT))) if rotateCode in [0, 2]: size = size[1], size[0] video_count = 0 fps = cameraCapture.get(cv2.CAP_PROP_FPS) print("FPS for video {} is: {}".format(video_name, fps)) success, frame = cameraCapture.read() if rotateCode is not None: print("Need rotation!") frame = correct_rotation(frame, rotateCode) idx = 1 outVideo = cv2.VideoWriter( os.path.join(target_folder, "inference_video", "inference", v_name + "_" + str(video_count) + "." + v_affix), fourcc, fps, size) start_time = parser.parse("{}:{}:{}".format(0, 0, 0)) end_time = parser.parse("{}:{}:{}".format(0, 0, clip_sec)) while success: milliseconds = cameraCapture.get(cv2.CAP_PROP_POS_MSEC) if idx == 1 and milliseconds < 0: # Then enforce millisecond as 0, only show on mts file milliseconds = 0 minutes = 0 hours = 0 seconds = milliseconds // 1000 if seconds >= 60: minutes = seconds // 60 seconds = seconds % 60 if minutes >= 60: hours = minutes // 60 minutes = minutes % 60 try: current_time = parser.parse("{}:{}:{}".format(hours, minutes, seconds)) print(current_time) except ParserError: print("Failed to parse date! quit program! check {}:{}:{}".format(hours, minutes, seconds)) assert False, "Quit program!" if start_time <= current_time <= end_time: idx += 1 outVideo.write(frame) elif end_time < current_time: # Release old and create new video outVideo.release() generated_video.append( [os.path.join("inference", v_name + "_" + str(video_count)), idx - 1, start_time, end_time]) start_time = end_time end_time = end_time + timedelta(seconds=clip_sec) video_count += 1 outVideo = cv2.VideoWriter( os.path.join(target_folder, "inference_video", "inference", v_name + "_" + str(video_count) + "." + v_affix), fourcc, fps, size) idx = 1 continue success, frame = cameraCapture.read() if rotateCode is not None: frame = correct_rotation(frame, rotateCode) cameraCapture.release() with open("ucf101/inference_list.txt", 'w') as file_writer: for ele in generated_video: if ele[1]>0: file_writer.writelines(" ".join(map(str, ele[:2])) + " -1\n") with open("ucf101/inference_result.txt", 'w') as file_writer: for ele in generated_video: if ele[1]>0: file_writer.writelines(" ".join(map(str, ele)) + " -1\n") return generated_video def prepare_movie_with_timestamp(source_folder, target_folder, action_name, clip_len=64): """ Cut to multiple small movie, each contains exactly clips, drop if <16 save to multiple videos :param source_folder: :param target_folder: :return: """ source_file = glob(os.path.join(source_folder, "video", action_name, "*.*")) fourcc = cv2.VideoWriter_fourcc(*"MJPG") assert len(source_file) > 0, "Movie files are not available." if not os.path.exists(os.path.join(target_folder, "videos", action_name)): os.makedirs(os.path.join(target_folder, "videos", action_name), exist_ok=False) generated_video = [] for video_name in source_file: print("procesing file {}".format(video_name)) v_name, _ = video_name.split(".") v_affix = "avi" v_name = v_name.split("/")[-1] annotation_name = video_name.replace("video", "gt_timestamp").replace(video_name.split(".")[-1], "txt") cameraCapture = cv2.VideoCapture(video_name) rotateCode = check_rotation(video_name) assert cameraCapture.isOpened(), "failed to load video!" size = (int(cameraCapture.get(cv2.CAP_PROP_FRAME_WIDTH)), int(cameraCapture.get(cv2.CAP_PROP_FRAME_HEIGHT))) # print(size) if rotateCode in [0, 2]: size = size[1], size[0] # print(size,rotateCode) video_count = 0 fps = cameraCapture.get(cv2.CAP_PROP_FPS) print("FPS for video {} is: {}".format(video_name, fps)) time_string_array = [] with open(annotation_name, 'r') as file_loader: for time_string in file_loader: time_string = time_string.strip("\n") if len(time_string) > 0: start_time, end_time = parser.parse(time_string.split("-")[0]), parser.parse( time_string.split("-")[-1]) # assert start_time < end_time, "start date larger than end date, that's wrong!" time_string_array.append([start_time, end_time]) success, frame = cameraCapture.read() if rotateCode is not None: print("Need rotation!") frame = correct_rotation(frame, rotateCode) idx = 1 if len(time_string_array) > 0: start_time, end_time = time_string_array.pop(0) else: print("This file {} has no target to extract!".format(video_name)) continue # print("The size of the video is: ", size,frame.shape[:-1], video_name) outVideo = cv2.VideoWriter( os.path.join(target_folder, "videos", action_name, v_name + "_" + str(video_count) + "." + v_affix), fourcc, fps, size) while success: milliseconds = cameraCapture.get(cv2.CAP_PROP_POS_MSEC) if idx == 1 and milliseconds < 0: # Then enforce millisecond as 0, only show on mts file milliseconds = 0 minutes = 0 hours = 0 seconds = milliseconds // 1000 if seconds >= 60: minutes = seconds // 60 seconds = seconds % 60 if minutes >= 60: hours = minutes // 60 minutes = minutes % 60 try: current_time = parser.parse("{}:{}:{}".format(hours, minutes, seconds)) except ParserError: print("Failed to parse date! quit program! ") print(hours, minutes, seconds) if start_time <= current_time <= end_time: if idx % (clip_len + 1) == 0: # Release old and create new video idx = 0 outVideo.release() generated_video.append( os.path.join(action_name, v_name + "_" + str(video_count) + "." + v_affix)) video_count += 1 outVideo = cv2.VideoWriter( os.path.join(target_folder, "videos", action_name, v_name + "_" + str(video_count) + "." + v_affix), fourcc, fps, size) idx += 1 outVideo.write(frame) elif end_time < current_time: prev_end_time = end_time if len(time_string_array) > 0: start_time, end_time = time_string_array.pop(0) # assert prev_end_time < end_time, "Issues with labeling file! Is it correct?" else: # All target of interest have been collected if idx >= 32: outVideo.release() generated_video.append( os.path.join(action_name, v_name + "_" + str(video_count) + "." + v_affix)) cameraCapture.release() break success, frame = cameraCapture.read() if rotateCode is not None: frame = correct_rotation(frame, rotateCode) if idx >= 32: outVideo.release() generated_video.append( os.path.join(action_name, v_name + "_" + str(video_count) + "." + v_affix)) cameraCapture.release() return generated_video def prepare_movie_with_timestamp_bkrd(source_folder, target_folder, clip_len=64): """ Cut to multiple small movie, each contains exactly clips, drop if <16 save to multiple videos :param source_folder: :param target_folder: :return: """ action_name = "background" source_file = glob(os.path.join(source_folder, "video", "udstairs", "*.*")) fourcc = cv2.VideoWriter_fourcc(*"MJPG") assert len(source_file) > 0, "Movie files are not available." if not os.path.exists(os.path.join(target_folder, "videos", action_name)): os.makedirs(os.path.join(target_folder, "videos", action_name), exist_ok=False) generated_video = [] for video_name in source_file: print("procesing file {}".format(video_name)) v_name, _ = video_name.split(".") v_affix = "avi" v_name = v_name.split("/")[-1] annotation_name = video_name.replace("video", "gt_timestamp").replace(video_name.split(".")[-1], "txt") cameraCapture = cv2.VideoCapture(video_name) rotateCode = check_rotation(video_name) assert cameraCapture.isOpened(), "failed to load video!" size = (int(cameraCapture.get(cv2.CAP_PROP_FRAME_WIDTH)), int(cameraCapture.get(cv2.CAP_PROP_FRAME_HEIGHT))) # print(size) if rotateCode in [0, 2]: size = size[1], size[0] # print(size,rotateCode) video_count = 0 fps = cameraCapture.get(cv2.CAP_PROP_FPS) print("FPS for video {} is: {}".format(video_name, fps)) time_string_array = [] with open(annotation_name, 'r') as file_loader: for time_string in file_loader: time_string = time_string.strip("\n") if len(time_string) > 0: start_time, end_time = parser.parse(time_string.split("-")[0]), parser.parse( time_string.split("-")[-1]) # assert start_time < end_time, "start date larger than end date, that's wrong!" time_string_array.append([start_time, end_time]) success, frame = cameraCapture.read() if rotateCode is not None: print("Need rotation!") frame = correct_rotation(frame, rotateCode) idx = 1 if len(time_string_array) > 0: start_time, end_time = time_string_array.pop(0) else: print("This file {} has no target to extract!".format(video_name)) continue # print("The size of the video is: ", size,frame.shape[:-1], video_name) print("path: ",os.path.join(target_folder, "videos", action_name, v_name + "_" + str(video_count) + "." + v_affix)) outVideo = cv2.VideoWriter( os.path.join(target_folder, "videos", action_name, v_name + "_" + str(video_count) + "." + v_affix), fourcc, fps, size) while success: milliseconds = cameraCapture.get(cv2.CAP_PROP_POS_MSEC) if idx == 1 and milliseconds < 0: # Then enforce millisecond as 0, only show on mts file milliseconds = 0 minutes = 0 hours = 0 seconds = milliseconds // 1000 if seconds >= 60: minutes = seconds // 60 seconds = seconds % 60 if minutes >= 60: hours = minutes // 60 minutes = minutes % 60 try: current_time = parser.parse("{}:{}:{}".format(hours, minutes, seconds)) except ParserError: print("Failed to parse date! quit program! ") print(hours, minutes, seconds) if not (start_time <= current_time <= end_time): if end_time < current_time: prev_end_time = end_time if len(time_string_array) > 0: start_time, end_time = time_string_array.pop(0) # assert prev_end_time < end_time, "Issues with labeling file! Is it correct?" else: # All target of interest have been collected if idx >= 32: outVideo.release() generated_video.append( os.path.join(action_name, v_name + "_" + str(video_count) + "." + v_affix)) cameraCapture.release() break if idx % (clip_len + 1) == 0: # Release old and create new video idx = 0 outVideo.release() generated_video.append( os.path.join(action_name, v_name + "_" + str(video_count) + "." + v_affix)) video_count += 1 outVideo = cv2.VideoWriter( os.path.join(target_folder, "videos", action_name, v_name + "_" + str(video_count) + "." + v_affix), fourcc, fps, size) idx += 1 outVideo.write(frame) success, frame = cameraCapture.read() if rotateCode is not None: frame = correct_rotation(frame, rotateCode) if idx >= 32: outVideo.release() generated_video.append( os.path.join(action_name, v_name + "_" + str(video_count) + "." + v_affix)) cameraCapture.release() return generated_video def generate_classidx(target_folder, class_idx): if not os.path.exists(os.path.join(target_folder, "annotations")): os.makedirs(os.path.join(target_folder, "annotations"), exist_ok=False) with open(os.path.join(target_folder, "annotations", "classInd.txt"), 'w') as file_writer: for action_name_key in class_idx: file_writer.writelines("{} {}\n".format(class_idx[action_name_key], action_name_key)) def generate_annotation(target_folder, class_idx, video_list, seed=42, test_ratio=0.2): """ Generate annotation after you obtain video gt build one list for train and test, prefix 01 :param class_idx: type dict, indicate the class to train :param video_list: the list of video to generate as gt :return: """ if not os.path.exists(os.path.join(target_folder, "annotations")): os.makedirs(os.path.join(target_folder, "annotations"), exist_ok=False) X_trainval, X_test = train_test_split(video_list, test_size=test_ratio, random_state=seed) for i in range(1, 4): with open(os.path.join(target_folder, "annotations", "trainlist{:02d}.txt".format(i)), 'w') as file_writer: for path in X_trainval: action_name = path.split("/")[0] file_writer.writelines(path + " " + str(class_idx[action_name]) + "\n") with open(os.path.join(target_folder, "annotations", "testlist{:02d}.txt".format(i)), 'w') as file_writer: for path in X_test: action_name = path.split("/")[0] file_writer.writelines(path + " " + str(class_idx[action_name]) + "\n") if __name__ == "__main__": """ Use this script to organize data in ucf101 format: This script can achieve the following objectives: 1. Generate video 2. Generate annotation i. Generate class index ii. Generate train/test annotation file Copy source data folder into mmaction/data before running script Make sub-folder by action name for each raw video """ root = "" source_folder, target_folder = os.path.join(root, "source_data"), os.path.join(root, "ucf101") clip_len = 64 action_name = ["udstairs"] seed = 42 test_ratio = 0.2 class_idx = {x: i + 2 for i, x in enumerate(action_name)} class_idx["background"] = 1 mode = "train" # train or test if mode == "test": bkrd_list = prepare_movie_with_timestamp_bkrd(source_folder, target_folder, clip_len=clip_len) generate_classidx(target_folder, class_idx) for action in action_name: video_list = prepare_movie_with_timestamp(source_folder, target_folder, action, clip_len=clip_len) generate_annotation(target_folder, class_idx, bkrd_list + video_list, seed=seed, test_ratio=test_ratio) else: # Root : data source_file = "source_data/video/udstairs/1027_ok_1p_part4.mp4" target_folder = os.path.join("ucf101") extract_frame_from_movie(source_file, target_folder, clip_sec=0.8)
43.039911
123
0.573747
2,289
19,411
4.651813
0.12145
0.020849
0.030053
0.030053
0.80231
0.77423
0.741266
0.729621
0.699098
0.671394
0
0.019395
0.317346
19,411
450
124
43.135556
0.784167
0.100201
0
0.761194
0
0
0.071935
0.005847
0
0
0
0
0.01791
1
0.020896
false
0
0.026866
0.002985
0.062687
0.050746
0
0
0
null
0
0
0
1
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
563e531a2903eefc2eaa1311b132f087e9e8389f
40
py
Python
mailhandler/__init__.py
MailHandler/py-mailhandler
9d7af60638ac1242b5e74154b744fc0a212f4780
[ "Apache-2.0" ]
5
2016-10-21T09:15:09.000Z
2018-09-12T03:53:39.000Z
mailhandler/__init__.py
MailHandler/py-mailhandler
9d7af60638ac1242b5e74154b744fc0a212f4780
[ "Apache-2.0" ]
null
null
null
mailhandler/__init__.py
MailHandler/py-mailhandler
9d7af60638ac1242b5e74154b744fc0a212f4780
[ "Apache-2.0" ]
null
null
null
# coding: utf-8 from .core import Client
20
24
0.75
7
40
4.285714
1
0
0
0
0
0
0
0
0
0
0
0.029412
0.15
40
2
24
20
0.852941
0.325
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
566a35707e41f121076873eaf15560a9ed0c1848
55
py
Python
torchcv/visualizations/__init__.py
CVHj/torchcv
6291f3e1e4bbf6467fd6b1e79001d34a59481bb6
[ "MIT" ]
433
2017-11-30T15:46:58.000Z
2022-01-16T08:06:11.000Z
torchcv/visualizations/__init__.py
CVHj/torchcv
6291f3e1e4bbf6467fd6b1e79001d34a59481bb6
[ "MIT" ]
51
2018-01-29T15:14:33.000Z
2021-08-23T12:02:18.000Z
fpn-hoi/torchcv/visualizations/__init__.py
TheFairBear/Box-Attention-SSD-HOI
6101e209a709899c5645342784c8f451028ff46e
[ "MIT" ]
92
2018-01-20T07:45:36.000Z
2021-05-28T10:43:53.000Z
from torchcv.visualizations.vis_image import vis_image
27.5
54
0.890909
8
55
5.875
0.75
0.340426
0
0
0
0
0
0
0
0
0
0
0.072727
55
1
55
55
0.921569
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
56a5e5f0686e362dcfeaeae29c1aabb11d7ba2a4
186
py
Python
src/results/__init__.py
iN1k1/deep-pyramidal-representations-peron-re-identification
18eacd3b7bde2c4767ba290b655cb0f5c72ed8fe
[ "MIT" ]
13
2019-08-09T08:33:27.000Z
2020-12-21T08:51:33.000Z
src/results/__init__.py
iN1k1/deep-pyramidal-representations-peron-re-identification
18eacd3b7bde2c4767ba290b655cb0f5c72ed8fe
[ "MIT" ]
5
2021-03-19T02:17:23.000Z
2022-03-11T23:53:44.000Z
src/results/__init__.py
iN1k1/deep-pyramidal-representations-peron-re-identification
18eacd3b7bde2c4767ba290b655cb0f5c72ed8fe
[ "MIT" ]
4
2019-11-06T08:02:21.000Z
2021-01-13T20:34:23.000Z
from .performance import nAUC, get_matching_images from .reid import ReIDPerformance, ProbePerformance __all__ = ['nAUC', 'get_matching_images', 'ReIDPerformance', 'ProbePerformance']
31
80
0.806452
19
186
7.473684
0.578947
0.098592
0.211268
0.295775
0
0
0
0
0
0
0
0
0.096774
186
5
81
37.2
0.845238
0
0
0
0
0
0.291892
0
0
0
0
0
0
1
0
false
0
0.666667
0
0.666667
0
1
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
6
3b74886246482206eb5e69965e1331e2d4bd8f2f
125
py
Python
drill_core/__init__.py
JohnOmernik/jupyter_drill
f5bbeca43e9afcb8222093de9cc0f3dcec256276
[ "Apache-2.0" ]
12
2018-07-23T17:47:06.000Z
2021-05-28T13:54:26.000Z
drill_core/__init__.py
JohnOmernik/jupyter_drill
f5bbeca43e9afcb8222093de9cc0f3dcec256276
[ "Apache-2.0" ]
7
2018-11-19T19:05:47.000Z
2020-08-28T17:42:34.000Z
drill_core/__init__.py
JohnOmernik/jupyter_drill
f5bbeca43e9afcb8222093de9cc0f3dcec256276
[ "Apache-2.0" ]
5
2019-01-11T10:19:51.000Z
2021-04-12T19:50:18.000Z
from integration_core import Integration from drill_core.drill_base import Drill from drill_core._version import __version__
31.25
43
0.888
18
125
5.666667
0.388889
0.176471
0.254902
0
0
0
0
0
0
0
0
0
0.096
125
3
44
41.666667
0.902655
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
3b75e70df3fb127d77e002bfd963ead8a9085323
44
py
Python
src/viz/__init__.py
qshzh/lightning-hydra-template
acf3825676c23ad4cdd3303feebf79d35a6ff748
[ "Unlicense", "MIT" ]
1
2021-08-24T02:54:29.000Z
2021-08-24T02:54:29.000Z
src/viz/__init__.py
qshzh/lightning-hydra-template
acf3825676c23ad4cdd3303feebf79d35a6ff748
[ "Unlicense", "MIT" ]
null
null
null
src/viz/__init__.py
qshzh/lightning-hydra-template
acf3825676c23ad4cdd3303feebf79d35a6ff748
[ "Unlicense", "MIT" ]
null
null
null
from .wandb_fig import wandb_img, wandb_plt
22
43
0.840909
8
44
4.25
0.75
0
0
0
0
0
0
0
0
0
0
0
0.113636
44
1
44
44
0.871795
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
3b91456aedbca5f4012d14236786be7d0b45fa46
96
py
Python
venv/lib/python3.8/site-packages/pkg_resources/__init__.py
Retraces/UkraineBot
3d5d7f8aaa58fa0cb8b98733b8808e5dfbdb8b71
[ "MIT" ]
2
2022-03-13T01:58:52.000Z
2022-03-31T06:07:54.000Z
venv/lib/python3.8/site-packages/pkg_resources/__init__.py
DesmoSearch/Desmobot
b70b45df3485351f471080deb5c785c4bc5c4beb
[ "MIT" ]
19
2021-11-20T04:09:18.000Z
2022-03-23T15:05:55.000Z
venv/lib/python3.8/site-packages/pkg_resources/__init__.py
DesmoSearch/Desmobot
b70b45df3485351f471080deb5c785c4bc5c4beb
[ "MIT" ]
null
null
null
/home/runner/.cache/pip/pool/b8/09/cf/abc16c4d71c01071562bb514f4085d363138a394a47e4f02c9f6f3b4ef
96
96
0.895833
9
96
9.555556
1
0
0
0
0
0
0
0
0
0
0
0.427083
0
96
1
96
96
0.46875
0
0
0
0
0
0
0
0
1
0
0
0
0
null
null
0
0
null
null
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
1
0
0
0
0
0
0
0
null
1
0
0
0
1
0
0
0
0
0
0
0
0
6
8ea8fb800e9c79180ac00bce6431041fe3baf097
1,707
py
Python
eval/migrations/0002_auto_20200512_0327.py
dennereed/paleocore
d6da6c39cde96050ee4b9e7213ec1200530cbeee
[ "MIT" ]
1
2021-02-05T19:50:13.000Z
2021-02-05T19:50:13.000Z
eval/migrations/0002_auto_20200512_0327.py
dennereed/paleocore
d6da6c39cde96050ee4b9e7213ec1200530cbeee
[ "MIT" ]
59
2020-06-17T22:21:51.000Z
2022-02-10T05:00:01.000Z
eval/migrations/0002_auto_20200512_0327.py
dennereed/paleocore
d6da6c39cde96050ee4b9e7213ec1200530cbeee
[ "MIT" ]
2
2020-07-01T14:11:09.000Z
2020-08-10T17:27:26.000Z
# -*- coding: utf-8 -*- # Generated by Django 1.11.28 on 2020-05-12 03:27 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('eval', '0001_initial'), ] operations = [ migrations.AlterField( model_name='evaluation', name='academic_decision', field=models.CharField(blank=True, choices=[('Good', 'Good'), ('Concern', 'Concern'), ('Serious Concern', 'Serious Concern')], max_length=50, null=True), ), migrations.AlterField( model_name='evaluation', name='evaluation_decision', field=models.CharField(blank=True, choices=[('Good', 'Good'), ('Concern', 'Concern'), ('Serious Concern', 'Serious Concern')], max_length=255, null=True), ), migrations.AlterField( model_name='evaluation', name='professional_decision', field=models.CharField(blank=True, choices=[('Good', 'Good'), ('Concern', 'Concern'), ('Serious Concern', 'Serious Concern')], max_length=50, null=True), ), migrations.AlterField( model_name='evaluation', name='research_decision', field=models.CharField(blank=True, choices=[('Good', 'Good'), ('Concern', 'Concern'), ('Serious Concern', 'Serious Concern')], max_length=50, null=True), ), migrations.AlterField( model_name='evaluation', name='teaching_decision', field=models.CharField(blank=True, choices=[('Good', 'Good'), ('Concern', 'Concern'), ('Serious Concern', 'Serious Concern')], max_length=50, null=True), ), ]
41.634146
166
0.602812
170
1,707
5.929412
0.305882
0.138889
0.208333
0.143849
0.771825
0.771825
0.729167
0.729167
0.678571
0.678571
0
0.02521
0.233158
1,707
40
167
42.675
0.744843
0.040422
0
0.575758
1
0
0.255046
0.012844
0
0
0
0
0
1
0
false
0
0.060606
0
0.151515
0
0
0
0
null
0
1
0
0
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
d90871961620e014433675a75923c996f16c7a36
35
py
Python
smol/__init__.py
romamartyanov/smol
f24cedfce0f2aeafb2a589c9da52e4a913b431c1
[ "MIT" ]
1
2021-04-01T12:03:54.000Z
2021-04-01T12:03:54.000Z
smol/__init__.py
romamartyanov/smol
f24cedfce0f2aeafb2a589c9da52e4a913b431c1
[ "MIT" ]
null
null
null
smol/__init__.py
romamartyanov/smol
f24cedfce0f2aeafb2a589c9da52e4a913b431c1
[ "MIT" ]
1
2021-04-01T12:10:40.000Z
2021-04-01T12:10:40.000Z
from .yolov4_tiny import YoloV4Tiny
35
35
0.885714
5
35
6
1
0
0
0
0
0
0
0
0
0
0
0.0625
0.085714
35
1
35
35
0.875
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
d95ade140abfe250261eab407e50f27b968ecd23
163
py
Python
students/admin.py
Davy-71993/MySchool
fa02c8ec19d71873fc0d714cf652d8ad05f2f0e7
[ "MIT" ]
null
null
null
students/admin.py
Davy-71993/MySchool
fa02c8ec19d71873fc0d714cf652d8ad05f2f0e7
[ "MIT" ]
null
null
null
students/admin.py
Davy-71993/MySchool
fa02c8ec19d71873fc0d714cf652d8ad05f2f0e7
[ "MIT" ]
null
null
null
from django.contrib import admin from .models import Student, Subject, Paper admin.site.register(Student) admin.site.register(Subject) admin.site.register(Paper)
23.285714
43
0.815951
23
163
5.782609
0.478261
0.203008
0.383459
0
0
0
0
0
0
0
0
0
0.08589
163
6
44
27.166667
0.892617
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.4
0
0.4
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
6
d967bc1154fa07936e1bee9dd5f8320045c37ede
257
py
Python
main.py
isa-vit/CCA-B
8637e41d4017894acfa17b3bebec4560583a1ffe
[ "MIT" ]
null
null
null
main.py
isa-vit/CCA-B
8637e41d4017894acfa17b3bebec4560583a1ffe
[ "MIT" ]
null
null
null
main.py
isa-vit/CCA-B
8637e41d4017894acfa17b3bebec4560583a1ffe
[ "MIT" ]
null
null
null
import db_operations ''' To read collection ''' # db_operations.readCompleteData("Events") ''' To write an event into collection ''' # db_operations.addNewTechnitude("Events") ''' To write a whole collection ''' # db_operations.addCompleteData("Events")
15.117647
42
0.7393
29
257
6.413793
0.551724
0.258065
0.354839
0
0
0
0
0
0
0
0
0
0.120623
257
16
43
16.0625
0.823009
0.470817
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
d984fa75e5d1bde3214bc09abcff676fc184c627
370
py
Python
chapter1/tests/test_chapter1.py
AmatsukiUrato/study_python
e06afbc92a6f112ff9d3159bea4abcca6b17d280
[ "MIT" ]
null
null
null
chapter1/tests/test_chapter1.py
AmatsukiUrato/study_python
e06afbc92a6f112ff9d3159bea4abcca6b17d280
[ "MIT" ]
null
null
null
chapter1/tests/test_chapter1.py
AmatsukiUrato/study_python
e06afbc92a6f112ff9d3159bea4abcca6b17d280
[ "MIT" ]
1
2019-04-11T04:08:03.000Z
2019-04-11T04:08:03.000Z
import unittest from chapter1 import question1 class TestChapter1(unittest.TestCase): def test_calc1(self): self.assertEqual(question1.calc1(3, 2, 2, 1), 1) self.assertEqual(question1.calc1(5, 5, 2, 3), 6) def test_calc2(self): self.assertEqual(question1.calc2(3, 2, 2, 1), 1) self.assertEqual(question1.calc2(5, 5, 2, 3), 6)
26.428571
56
0.659459
54
370
4.481481
0.351852
0.247934
0.396694
0.231405
0.280992
0.239669
0.239669
0.239669
0
0
0
0.112245
0.205405
370
13
57
28.461538
0.710884
0
0
0
0
0
0
0
0
0
0
0
0.444444
1
0.222222
false
0
0.222222
0
0.555556
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
1
0
0
0
0
1
0
0
6
d992bfa3f70e0668506503a9520f645c01712ba2
29
py
Python
siggy/__init__.py
Storj/siggy
cf635cca1c5f1b084b5b5e8f789e534c1cfea4b6
[ "MIT" ]
4
2015-01-03T00:08:03.000Z
2015-10-10T03:27:15.000Z
siggy/__init__.py
StorjOld/siggy
cf635cca1c5f1b084b5b5e8f789e534c1cfea4b6
[ "MIT" ]
null
null
null
siggy/__init__.py
StorjOld/siggy
cf635cca1c5f1b084b5b5e8f789e534c1cfea4b6
[ "MIT" ]
5
2015-03-09T07:03:39.000Z
2015-09-05T22:52:45.000Z
from .siggy import * # NOQA
14.5
28
0.655172
4
29
4.75
1
0
0
0
0
0
0
0
0
0
0
0
0.241379
29
1
29
29
0.863636
0.137931
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
d99431d88b970cdc95b750ccb4c412e52efe3056
1,595
py
Python
tests/test_scale_offset.py
nmandery/rasterio
ba5e90c487bd1930f52e57dba999e889b4df9ade
[ "BSD-3-Clause" ]
1,479
2015-01-10T12:35:07.000Z
2021-10-18T16:17:15.000Z
tests/test_scale_offset.py
nmandery/rasterio
ba5e90c487bd1930f52e57dba999e889b4df9ade
[ "BSD-3-Clause" ]
1,819
2015-01-06T21:56:25.000Z
2021-10-20T02:28:27.000Z
tests/test_scale_offset.py
nmandery/rasterio
ba5e90c487bd1930f52e57dba999e889b4df9ade
[ "BSD-3-Clause" ]
509
2015-01-06T20:59:12.000Z
2021-10-18T14:14:57.000Z
import pytest import rasterio from rasterio.profiles import default_gtiff_profile def test_set_scales(tmpdir): """Scales can be set when dataset is open""" tmptiff = str(tmpdir.join('test.tif')) with rasterio.open( tmptiff, 'w', count=3, height=256, width=256, **default_gtiff_profile) as dst: assert dst.scales == (1.0,) * 3 dst.scales = [0.1] * 3 assert dst.scales == (0.1,) * 3 @pytest.mark.parametrize('value', [[0.1], [2.0] * 3, []]) def test_set_scales_error(tmpdir, value): """Number of values must match band count""" tmptiff = str(tmpdir.join('test.tif')) with rasterio.open( tmptiff, 'w', count=2, height=256, width=256, **default_gtiff_profile) as dst: with pytest.raises(ValueError): dst.scales = value def test_set_offsets(tmpdir): """Scales can be set when dataset is open""" tmptiff = str(tmpdir.join('test.tif')) with rasterio.open( tmptiff, 'w', count=3, height=256, width=256, **default_gtiff_profile) as dst: assert dst.offsets == (0.0,) * 3 dst.offsets = [0.1] * 3 assert dst.offsets == (0.1,) * 3 @pytest.mark.parametrize('value', [[0.1], [2.0] * 3, []]) def test_set_offsets_error(tmpdir, value): """Number of values must match band count""" tmptiff = str(tmpdir.join('test.tif')) with rasterio.open( tmptiff, 'w', count=2, height=256, width=256, **default_gtiff_profile) as dst: with pytest.raises(ValueError): dst.offsets = value
32.55102
57
0.602508
219
1,595
4.296804
0.223744
0.070138
0.100956
0.085016
0.831031
0.77152
0.77152
0.77152
0.77152
0.77152
0
0.046745
0.248903
1,595
48
58
33.229167
0.738731
0.097179
0
0.571429
0
0
0.032417
0
0
0
0
0
0.114286
1
0.114286
false
0
0.085714
0
0.2
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
7947ebe6d82af84e96e565ed75358755411f84bb
237
py
Python
pattern_classes/Engineer.py
Sunuba/PythonStrategyPattern
b0490cf63ecc87562d5fc3ef1f2a152fb97b0a78
[ "MIT" ]
null
null
null
pattern_classes/Engineer.py
Sunuba/PythonStrategyPattern
b0490cf63ecc87562d5fc3ef1f2a152fb97b0a78
[ "MIT" ]
null
null
null
pattern_classes/Engineer.py
Sunuba/PythonStrategyPattern
b0490cf63ecc87562d5fc3ef1f2a152fb97b0a78
[ "MIT" ]
null
null
null
class Engineer: def __init__(self, height, width, method): self.method = method self.height = height self.width = width def calculate(self): return self.method().calculate(self.width, self.height)
29.625
63
0.637131
28
237
5.25
0.357143
0.204082
0
0
0
0
0
0
0
0
0
0
0.257384
237
8
63
29.625
0.835227
0
0
0
0
0
0
0
0
0
0
0
0
1
0.285714
false
0
0
0.142857
0.571429
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
6
7948cdd8b582b2fcd8fce30038647cb2a78a07a2
46
py
Python
miranda/eccc/__init__.py
Ouranosinc/miranda
5c54767a4e6e6c3c1f638ca0fe22673ea98e2746
[ "Apache-2.0" ]
4
2019-11-07T17:45:26.000Z
2021-09-22T18:22:01.000Z
miranda/eccc/__init__.py
Ouranosinc/miranda
5c54767a4e6e6c3c1f638ca0fe22673ea98e2746
[ "Apache-2.0" ]
12
2019-09-19T17:05:39.000Z
2022-03-31T20:26:16.000Z
miranda/eccc/__init__.py
Ouranosinc/miranda
5c54767a4e6e6c3c1f638ca0fe22673ea98e2746
[ "Apache-2.0" ]
1
2020-02-01T01:01:22.000Z
2020-02-01T01:01:22.000Z
from ._raw import * from ._summaries import *
15.333333
25
0.73913
6
46
5.333333
0.666667
0
0
0
0
0
0
0
0
0
0
0
0.173913
46
2
26
23
0.842105
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
7950c5d229922e5a3130d40ad166807b1b1e793e
25,099
py
Python
shed/tests/test_simple.py
st3107/shed-streaming
c632fc465d7e11fe0155fbc3e8add1965615dd51
[ "BSD-3-Clause" ]
4
2017-09-20T16:26:34.000Z
2020-03-24T15:51:28.000Z
shed/tests/test_simple.py
st3107/shed-streaming
c632fc465d7e11fe0155fbc3e8add1965615dd51
[ "BSD-3-Clause" ]
172
2017-07-25T21:36:12.000Z
2022-02-25T16:05:36.000Z
shed/tests/test_simple.py
st3107/shed-streaming
c632fc465d7e11fe0155fbc3e8add1965615dd51
[ "BSD-3-Clause" ]
6
2017-08-08T12:39:18.000Z
2021-03-29T22:28:47.000Z
import operator as op import time import uuid import networkx as nx import numpy as np import pytest from bluesky.plan_stubs import checkpoint, abs_set, trigger_and_read from bluesky.plans import scan, count from shed import ( SimpleFromEventStream as FromEventStream, SimpleToEventStream as ToEventStream, walk_to_translation, simple_to_event_stream_new_api, ) from shed.simple import _hash_or_uid, build_upstream_node_set from shed.tests.utils import y from shed.utils import unstar from rapidz import Stream, move_to_first def test_from_event_model(RE, hw): source = Stream() t = FromEventStream("event", ("data", "motor"), source, principle=True) L = t.sink_to_list() RE.subscribe(unstar(source.emit)) RE.subscribe(print) RE(scan([hw.motor], hw.motor, 0, 9, 10)) assert len(L) == 10 for i, ll in enumerate(L): assert i == ll def test_from_event_model_single(RE, hw): source = Stream() t = FromEventStream("event", "data", source, principle=True) L = t.sink_to_list() RE.subscribe(unstar(source.emit)) RE.subscribe(print) RE(scan([hw.motor], hw.motor, 0, 9, 10)) assert len(L) == 10 for i, ll in enumerate(L): assert i == ll["motor"] def test_from_event_model_multi(RE, hw): source = Stream() t = FromEventStream( "event", ("data", ("motor", "motor_setpoint")), source, principle=True ) L = t.sink_to_list() RE.subscribe(unstar(source.emit)) RE.subscribe(print) RE(scan([hw.motor], hw.motor, 0, 9, 10)) assert len(L) == 10 for i, ll in enumerate(L): assert i == ll[0] assert i == ll[1] def test_from_event_model_all(RE, hw): source = Stream() t = FromEventStream("event", (), source, principle=True) L = t.sink_to_list() RE.subscribe(unstar(source.emit)) RE.subscribe(print) RE(scan([hw.motor], hw.motor, 0, 9, 10)) assert len(L) == 10 for i, ll in enumerate(L): assert i == ll["data"]["motor"] def test_from_event_model_stream_syntax(RE, hw): source = Stream() t = source.simple_from_event_stream( "event", ("data", "motor"), principle=True ) L = t.sink_to_list() RE.subscribe(unstar(source.emit)) RE.subscribe(print) RE(scan([hw.motor], hw.motor, 0, 9, 10)) assert len(L) == 10 for i, ll in enumerate(L): assert i == ll def test_from_event_model_stream_name(): def data(): suid = str(uuid.uuid4()) duid = str(uuid.uuid4()) yield "start", {"hi": "world", "uid": suid} yield "descriptor", { "name": "hi", "data_keys": {"ct"}, "uid": duid, "run_start": suid, } for i in range(10): yield "event", { "uid": str(uuid.uuid4()), "data": {"ct": i}, "descriptor": duid, } duid = str(uuid.uuid4()) yield "descriptor", { "name": "not hi", "data_keys": {"ct"}, "uid": duid, "run_start": suid, } for i in range(100, 110): yield "event", { "uid": str(uuid.uuid4()), "data": {"ct": i}, "descriptor": duid, } yield "stop", {"uid": str(uuid.uuid4()), "run_start": suid} g = data() source = Stream() t = FromEventStream( "event", ("data", "ct"), source, event_stream_name="hi" ) L = t.sink_to_list() for gg in g: source.emit(gg) assert len(L) == 10 for i, ll in enumerate(L): assert i == ll def test_from_event_model_stream_name2(): def data(): suid = str(uuid.uuid4()) duid = str(uuid.uuid4()) yield "start", {"hi": "world", "uid": suid} yield "descriptor", { "name": "hi", "data_keys": {"ct"}, "uid": duid, "run_start": suid, } for i in range(10): yield "event", { "uid": str(uuid.uuid4()), "data": {"ct": i}, "descriptor": duid, } duid = str(uuid.uuid4()) yield "descriptor", { "name": "not hi", "data_keys": {"ct"}, "uid": duid, "run_start": suid, } for i in range(100, 110): yield "event", { "uid": str(uuid.uuid4()), "data": {"ct": i}, "descriptor": duid, } yield "stop", {"uid": str(uuid.uuid4()), "run_start": suid} g = data() source = Stream() t = FromEventStream( "event", ("data", "ct"), source, event_stream_name="not hi" ) L = t.sink_to_list() for gg in g: source.emit(gg) assert len(L) == 10 for i, ll in enumerate(L): assert i + 100 == ll def test_walk_up(): raw = Stream() a_translation = FromEventStream("start", ("time",), raw, principle=True) b_translation = FromEventStream("event", ("data", "pe1_image"), raw) d = b_translation.zip_latest(a_translation) dd = d.map(op.truediv) e = ToEventStream(dd, ("data",)) g = nx.DiGraph() walk_to_translation(e, g) att = [] for node, attrs in g.nodes.items(): att.append(attrs["stream"]) s = {a_translation, b_translation, d, dd, e} assert s == set(att) assert {_hash_or_uid(k) for k in s} == set(g.nodes) def test_walk_up_partial(): raw = Stream() a_translation = FromEventStream("start", ("time",), raw, principle=True) b_translation = FromEventStream("event", ("data", "pe1_image"), raw) d = b_translation.zip_latest(a_translation) ddd = ToEventStream(d, ("data",)) dd = d.map(op.truediv) e = ToEventStream(dd, ("data",)) g = nx.DiGraph() walk_to_translation(e, g) att = [] for node, attrs in g.nodes.items(): att.append(attrs["stream"]) s = {ddd, dd, e, d} assert s == set(att) assert {_hash_or_uid(k) for k in s} == set(g.nodes) def test_to_event_model(RE, hw): source = Stream() t = FromEventStream("event", ("data", "motor"), source, principle=True) assert t.principle n = ToEventStream(t, ("ct",), data_key_md={"ct": {"units": "arb"}}) tt = t.sink_to_list() p = n.pluck(0).sink_to_list() d = n.pluck(1).sink_to_list() RE.subscribe(unstar(source.emit)) RE.subscribe(print) RE(scan([hw.motor], hw.motor, 0, 9, 10)) assert tt assert set(p) == {"start", "stop", "event", "descriptor"} assert d[1]["hints"] == {"analyzer": {"fields": ["ct"]}} assert d[1]["data_keys"]["ct"]["units"] == "arb" assert d[-1]["run_start"] def test_to_event_model_stream_syntax(RE, hw): source = Stream() t = FromEventStream("event", ("data", "motor"), source, principle=True) assert t.principle n = t.simple_to_event_stream(("ct",), data_key_md={"ct": {"units": "arb"}}) tt = t.sink_to_list() p = n.pluck(0).sink_to_list() d = n.pluck(1).sink_to_list() RE.subscribe(unstar(source.emit)) RE.subscribe(print) RE(scan([hw.motor], hw.motor, 0, 9, 10)) assert tt assert set(p) == {"start", "stop", "event", "descriptor"} assert d[1]["hints"] == {"analyzer": {"fields": ["ct"]}} assert d[1]["data_keys"]["ct"]["units"] == "arb" assert d[-1]["run_start"] def test_align(): a = Stream() b = Stream() z = a.AlignEventStreams(b) sl = z.sink_to_list() # TODO: use real run engine here for n, d, dd in zip( ["start", "descriptor", "event", "stop"], [ {"a": "hi", "b": {"hi": "world"}, "uid": "hi", "time": 123}, {"bla": "foo", "uid": "abc"}, {"data": "now", "descriptor": "abc"}, {"stop": "doc"}, ], [ {"a": "hi2", "b": {"hi2": "world"}}, {"bla": "foo", "uid": "123"}, {"data": "now", "descriptor": "123"}, {"stop": "doc"}, ], ): a.emit((n, d)) b.emit((n, dd)) assert len(sl) == 4 assert sl[0][1].get("b") == {"hi": "world", "hi2": "world"} def test_align_stream_syntax(): a = Stream() b = Stream() z = a.align_event_streams(b) sl = z.sink_to_list() # TODO: use real run engine here for n, d, dd in zip( ["start", "descriptor", "event", "stop"], [ {"a": "hi", "b": {"hi": "world"}, "uid": "hi", "time": 123}, {"bla": "foo", "uid": "abc"}, {"data": "now", "descriptor": "abc"}, {"stop": "doc"}, ], [ {"a": "hi2", "b": {"hi2": "world"}}, {"bla": "foo", "uid": "123"}, {"data": "now", "descriptor": "123"}, {"stop": "doc"}, ], ): a.emit((n, d)) b.emit((n, dd)) assert len(sl) == 4 assert sl[0][1].get("b") == {"hi": "world", "hi2": "world"} assert "original_start_time" in sl[0][1] def test_align_interrupted(RE, hw): a = Stream() b = FromEventStream("event", ("data", "img"), a, principle=True).map( op.add, 1 ) b.sink(print) c = ToEventStream(b, ("out",)) z = move_to_first(a.AlignEventStreams(c)) sl = z.sink_to_list() L = [] RE.subscribe(lambda *x: L.append(x)) RE(count([hw.img])) for nd in L: name, doc = nd # cause an exception if name == "event": doc["data"]["img"] = "hi" try: a.emit((name, doc)) except TypeError: pass assert {"start", "stop"} == set(list(zip(*sl))[0]) # check that buffers are not cleared, yet sl.clear() # If there are elements in the buffer they need to be cleared when all # start docs come in. for nd in L: name, doc = nd # cause an exception if name == "event": doc["data"]["img"] = 1 a.emit((name, doc)) if name == "start": # now buffers should be clear assert not any( [b for n, tb in z.true_buffers.items() for u, b in tb.items()] ) assert {"start", "descriptor", "event", "stop"} == set(list(zip(*sl))[0]) # now buffers should be clear (as all docs were emitted) assert not any( [b for n, tb in z.true_buffers.items() for u, b in tb.items()] ) def test_align_res_dat(RE, hw): a = Stream() b = FromEventStream("event", ("data", "motor"), a, principle=True).map( op.add, 1 ) c = ToEventStream(b, ("out",)) z = a.AlignEventStreams(c) sl = z.sink_to_list() RE.subscribe(lambda *x: a.emit(x)) osu = RE(scan([hw.img], hw.motor, 0, 10, 10)) for n, d in sl: if n == "start": assert d["original_start_uid"] == osu[0] if n == "event": assert d["data"]["out"] == d["data"]["motor"] + 1 def test_align_buffering(RE, hw): zz = {"data": False} a = Stream() b = FromEventStream( "event", ("data", "motor"), a.filter(lambda x: zz["data"]), principle=True, ).map(op.add, 1) c = ToEventStream(b, ("out",)) z = move_to_first(a.AlignEventStreams(c)) sl = z.sink_to_list() RE.subscribe(lambda *x: a.emit(x)) RE(scan([hw.img], hw.motor, 0, 10, 10, md={"hello": "world"})) zz["data"] = True sl.clear() RE(scan([hw.img], hw.motor, 0, 10, 10)) assert "hello" not in sl[0][1] def test_align_buffering2(RE, hw): a = Stream() d = Stream() b = FromEventStream( "event", ("data", "motor"), principle=True, upstream=a ).map(op.add, 1) c = ToEventStream(b, ("out",)) z = c.AlignEventStreams(d) names = z.pluck(0).sink_to_list() L = [] RE.subscribe(lambda *x: L.append(x)) RE(scan([hw.img], hw.motor, 0, 10, 10, md={"hello": "world"})) for nd in L: d.emit(nd) print("hi") for nd in L: a.emit(nd) assert all(k in names for k in ["start", "descriptor", "event", "stop"]) def test_align_multi_stream(RE, hw): a = Stream() b = FromEventStream( "event", ("data", "motor"), a, principle=True, event_stream_name="primary", ).map(op.add, 1) c = ToEventStream(b, ("out",)) c.sink(print) z = a.AlignEventStreams(c, event_stream_name="primary") sl = z.sink_to_list() RE.subscribe(lambda *x: a.emit(x)) def one_1d_step(detectors, motor, step): """ Inner loop of a 1D step scan This is the default function for ``per_step`` param in 1D plans. """ yield from checkpoint() yield from abs_set(motor, step, wait=True) yield from trigger_and_read(list(detectors) + [motor], name="dark") return (yield from trigger_and_read(list(detectors) + [motor])) osu = RE(scan([hw.img], hw.motor, 0, 10, 10, per_step=one_1d_step)) assert len(sl) == 10 + 3 for n, d in sl: if n == "start": assert d["original_start_uid"] == osu[0] if n == "event": print(d) assert d["data"]["out"] == d["data"]["motor"] + 1 def test_to_event_model_dict(RE, hw): source = Stream() t = FromEventStream("event", ("data",), source, principle=True) n = ToEventStream(t) p = n.pluck(0).sink_to_list() d = n.pluck(1).sink_to_list() n.sink(print) RE.subscribe(unstar(source.emit)) RE.subscribe(print) RE(scan([hw.motor], hw.motor, 0, 9, 10)) print(d[1]["hints"]) # AAA assert set(p) == {"start", "stop", "event", "descriptor"} assert d[1]["hints"] == { "analyzer": {"fields": ["motor", "motor_setpoint"]} } assert d[2]["data"] == {"motor_setpoint": 0, "motor": 0} assert d[-1]["run_start"] def test_replay_export_test(): def y(): suid = str(uuid.uuid4()) yield ("start", {"uid": suid, "time": time.time()}) duid = str(uuid.uuid4()) yield ( "descriptor", { "uid": duid, "run_start": suid, "name": "primary", "data_keys": {"det_image": {"dtype": "int", "units": "arb"}}, "time": time.time(), }, ) for i in range(5): yield ( "event", { "uid": str(uuid.uuid4()), "data": {"det_image": i}, "timestamps": {"det_image": time.time()}, "seq_num": i + 1, "time": time.time(), "descriptor": duid, }, ) yield ( "stop", {"uid": str(uuid.uuid4()), "time": time.time(), "run_start": suid}, ) print("build graph") g1 = FromEventStream( "event", ("data", "det_image"), principle=True, stream_name="g1" ) g11 = FromEventStream("event", ("data", "det_image"), stream_name="g11") g11_1 = g1.zip(g11) g2 = g11_1.starmap(op.mul).map(np.log) g = g2.SimpleToEventStream(("img2",)) from pprint import pprint g.sink(pprint) L = g.sink_to_list() print("run experiment") for yy in y(): print(yy[0]) g11.update(yy) g1.update(yy) assert L[-1][1]["run_start"] def test_no_stop(hw, RE): source = Stream().filter(lambda x: x[0] != "stop") t = FromEventStream("event", ("data",), source, principle=True) n = ToEventStream(t) p = n.pluck(0).sink_to_list() d = n.pluck(1).sink_to_list() RE.subscribe(unstar(source.emit)) RE.subscribe(print) RE(scan([hw.motor], hw.motor, 0, 9, 10)) RE(scan([hw.motor], hw.motor, 0, 9, 10)) assert set(p) == {"start", "stop", "event", "descriptor"} assert d[1]["hints"] == { "analyzer": {"fields": ["motor", "motor_setpoint"]} } assert d[2]["data"] == {"motor_setpoint": 0, "motor": 0} def test_parent_nodes(): # build the graph g1 = FromEventStream( "event", ("data", "det_image"), principle=True, stream_name="g1", asynchronous=True, ) g11 = FromEventStream( "event", ("data", "det_image"), stream_name="g11", asynchronous=True ) g2 = g1.zip(g11).starmap(op.mul, stream_name="mul") g = g2.SimpleToEventStream(("img2",)) l1 = g.sink_to_list() # g.sink(print) assert len(g.translation_nodes) == 2 print("start experiment") # run the experiment l0 = [] for yy in y(5): l0.append(yy) g11.update(yy) g1.update(yy) print(g11.start_uid) assert len(l1[0][1]["parent_node_map"]) == 2 @pytest.mark.xfail(raises=RuntimeError) def test_no_parent_nodes(): # build the graph g1 = FromEventStream( "event", ("data", "det_image"), stream_name="g1", asynchronous=True ) g11 = FromEventStream( "event", ("data", "det_image"), stream_name="g11", asynchronous=True ) g2 = g1.zip(g11).starmap(op.mul, stream_name="mul") g2.SimpleToEventStream(("img2",)) def test_multi_path_principle(hw, RE): source = Stream() fes1 = FromEventStream("start", ("number",), source, principle=True) fes2 = FromEventStream("event", ("data", "motor"), source, principle=True) out1 = fes1.map(op.add, 1) out2 = fes2.combine_latest(out1, emit_on=0).starmap(op.mul) a = ToEventStream(out1, ("out1",)) b = ToEventStream(out2, ("out2",)) la = a.sink_to_list() lb = b.sink_to_list() RE.subscribe(lambda *x: source.emit(x)) for i in range(1, 3): RE(count([hw.motor], md={"number": 5})) for lst in [la, lb]: o1 = [z[0] for z in lst] o2 = ["start", "descriptor", "event", "stop"] * i assert o1 == o2 def test_same_hdr_many_times(hw, RE): source = Stream() fes1 = FromEventStream("start", ("number",), source, principle=True) fes2 = FromEventStream("event", ("data", "motor"), source, principle=True) out1 = fes1.map(op.add, 1) out2 = fes2.combine_latest(out1, emit_on=0).starmap(op.mul) a = ToEventStream(out1, ("out1",)) b = ToEventStream(out2, ("out2",)) la = a.sink_to_list() lb = b.sink_to_list() L = [] RE.subscribe(lambda *x: L.append(x)) RE(count([hw.motor], md={"number": 5})) for i in range(1, 3): for ll in L: source.emit(ll) for lst in [la, lb]: o1 = [z[0] for z in lst] o2 = ["start", "descriptor", "event", "stop"] * i assert o1 == o2 def test_last_cache(RE, hw): source = Stream() t = FromEventStream("event", ("data", "motor"), source, principle=True) assert t.principle n = ToEventStream( t, ("ct",), data_key_md={"ct": {"units": "arb"}} ).LastCache() tt = t.sink_to_list() names = n.pluck(0).sink_to_list() docs = n.pluck(1).sink_to_list() RE.subscribe(unstar(source.emit)) RE.subscribe(print) RE(scan([hw.motor], hw.motor, 0, 9, 10)) assert len(docs) == 10 + 3 + 2 assert names[-3] == "descriptor" assert names[-2] == "event" assert tt assert set(names) == {"start", "stop", "event", "descriptor"} assert docs[1]["hints"] == {"analyzer": {"fields": ["ct"]}} assert docs[1]["data_keys"]["ct"]["units"] == "arb" assert docs[-1]["run_start"] def test_build_upstream_node_set(): source = Stream() t = FromEventStream("event", ("data", "motor"), source, principle=True) assert t.principle n = ToEventStream( t, ("ct",), data_key_md={"ct": {"units": "arb"}} ).LastCache() s = build_upstream_node_set(n) assert len(s) == 3 def test_to_event_model_new_api(RE, hw): source = Stream() t = FromEventStream("event", ("data", "motor"), source, principle=True) assert t.principle n = simple_to_event_stream_new_api( {t: {"data_keys": {"ct": {"units": "arb", "precision": 2}}}} ) tt = t.sink_to_list() p = n.pluck(0).sink_to_list() d = n.pluck(1).sink_to_list() RE.subscribe(unstar(source.emit)) RE.subscribe(print) RE(scan([hw.motor], hw.motor, 0, 9, 10)) assert tt assert set(p) == {"start", "stop", "event", "descriptor"} assert d[1]["hints"] == {"analyzer": {"fields": ["ct"]}} assert d[1]["data_keys"]["ct"]["units"] == "arb" assert d[-1]["run_start"] def test_to_event_model_new_api_no_data_keys(RE, hw): source = Stream() t = FromEventStream("event", ("data",), source, principle=True) assert t.principle n = simple_to_event_stream_new_api({t: {}}) tt = t.sink_to_list() p = n.pluck(0).sink_to_list() d = n.pluck(1).sink_to_list() RE.subscribe(unstar(source.emit)) RE.subscribe(print) RE(scan([hw.motor], hw.motor, 0, 9, 10)) assert tt assert set(p) == {"start", "stop", "event", "descriptor"} assert d[1]["hints"] == { "analyzer": {"fields": ["motor", "motor_setpoint"]} } assert d[1]["data_keys"]["motor"] assert d[-1]["run_start"] def test_to_event_model_new_api_clobber(RE, hw): source = Stream() t = FromEventStream("event", ("data", "motor"), source, principle=True) assert t.principle n = simple_to_event_stream_new_api( {t: {"data_keys": {"ct": {"units": "arb", "dtype": "array"}}}} ) tt = t.sink_to_list() p = n.pluck(0).sink_to_list() d = n.pluck(1).sink_to_list() RE.subscribe(unstar(source.emit)) RE.subscribe(print) RE(scan([hw.motor], hw.motor, 0, 9, 10)) assert tt assert set(p) == {"start", "stop", "event", "descriptor"} assert d[1]["data_keys"]["ct"]["dtype"] == "array" assert d[-1]["run_start"] def test_to_event_model_new_api_multi(RE, hw): source = Stream() stop = FromEventStream("stop", (), source) t = FromEventStream( "event", ("data", "motor"), source, principle=True, stream_name="hi" ) assert t.principle tt = t.zip(stop) n = simple_to_event_stream_new_api( { t: {"data_keys": {"ct": {"units": "arb", "precision": 2}}}, tt: { "name": "final", "data_keys": {"ct": {"units": "arb", "precision": 2}}, }, }, hello="world", ) tt = t.sink_to_list() p = n.pluck(0).sink_to_list() d = n.pluck(1).sink_to_list() RE.subscribe(unstar(source.emit)) RE.subscribe(print) RE(scan([hw.motor], hw.motor, 0, 9, 10)) assert tt assert set(p) == {"start", "stop", "event", "descriptor"} assert d[0]["hello"] == "world" assert d[1]["hints"] == {"analyzer": {"fields": ["ct"]}} assert d[1]["data_keys"]["ct"]["units"] == "arb" assert d[-3]["name"] == "final" assert d[-1]["run_start"] @pytest.mark.xfail(raises=RuntimeError) def test_to_event_model_new_api_no_principle(RE, hw): source = Stream() stop = FromEventStream("stop", (), source) t = FromEventStream("event", ("data", "motor"), source, stream_name="hi") tt = t.zip(stop) simple_to_event_stream_new_api( { t: {"data_keys": {"ct": {"units": "arb", "precision": 2}}}, tt: { "name": "final", "data_keys": {"ct": {"units": "arb", "precision": 2}}, }, }, hello="world", ) def test_to_event_model_new_api_multi_parent(RE, hw): source = Stream() t = FromEventStream("event", ("data", "motor"), source, principle=True) t2 = FromEventStream("event", ("data", "motor"), source, principle=True) assert t.principle n = simple_to_event_stream_new_api( { t.zip(t2).pluck(0): { "data_keys": {"ct": {"units": "arb", "precision": 2}} } } ) tt = t.sink_to_list() p = n.pluck(0).sink_to_list() d = n.pluck(1).sink_to_list() RE.subscribe(unstar(source.emit)) RE.subscribe(print) RE(scan([hw.motor], hw.motor, 0, 9, 10)) assert tt assert set(p) == {"start", "stop", "event", "descriptor"} assert d[1]["hints"] == {"analyzer": {"fields": ["ct"]}} assert d[1]["data_keys"]["ct"]["units"] == "arb" assert d[-1]["run_start"] def test_to_event_model_new_api_e_stop(RE, hw): source = Stream() t = FromEventStream("event", ("data", "motor"), source, principle=True) assert t.principle n = simple_to_event_stream_new_api( {t: {"data_keys": {"ct": {"units": "arb", "precision": 2}}}} ) tt = t.sink_to_list() p = n.pluck(0).sink_to_list() d = n.pluck(1).sink_to_list() def f(*x): if x[0] == "stop": return source.emit(x) RE.subscribe(f) RE(scan([hw.motor], hw.motor, 0, 9, 10)) rs = d[0]["uid"] assert tt assert set(p) == {"start", "event", "descriptor"} assert d[1]["hints"] == {"analyzer": {"fields": ["ct"]}} assert d[1]["data_keys"]["ct"]["units"] == "arb" ll = len(d) RE(scan([hw.motor], hw.motor, 0, 9, 10)) assert d[ll]["run_start"] == rs assert set(p) == {"start", "stop", "event", "descriptor"}
27.611661
79
0.538229
3,349
25,099
3.902658
0.0851
0.023412
0.039021
0.042158
0.805815
0.78508
0.758837
0.734124
0.708263
0.691431
0
0.02146
0.274075
25,099
908
80
27.64207
0.695884
0.018965
0
0.667606
0
0
0.123688
0
0
0
0
0.001101
0.152113
1
0.05493
false
0.001408
0.019718
0
0.077465
0.039437
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
7953ce9836a274d9c32140c193b43e238251adbf
31,921
py
Python
Incident-Response/Tools/dfirtrack/dfirtrack_artifacts/tests/artifact/test_artifact_exporter_spreadsheet_xls_views.py
sn0b4ll/Incident-Playbook
cf519f58fcd4255674662b3620ea97c1091c1efb
[ "MIT" ]
1
2021-07-24T17:22:50.000Z
2021-07-24T17:22:50.000Z
Incident-Response/Tools/dfirtrack/dfirtrack_artifacts/tests/artifact/test_artifact_exporter_spreadsheet_xls_views.py
sn0b4ll/Incident-Playbook
cf519f58fcd4255674662b3620ea97c1091c1efb
[ "MIT" ]
2
2022-02-28T03:40:31.000Z
2022-02-28T03:40:52.000Z
Incident-Response/Tools/dfirtrack/dfirtrack_artifacts/tests/artifact/test_artifact_exporter_spreadsheet_xls_views.py
sn0b4ll/Incident-Playbook
cf519f58fcd4255674662b3620ea97c1091c1efb
[ "MIT" ]
2
2022-02-25T08:34:51.000Z
2022-03-16T17:29:44.000Z
from datetime import datetime from django.contrib.auth.models import User from django.test import TestCase from django.utils import timezone from dfirtrack_artifacts.exporter.spreadsheet.xls import artifact_cron from dfirtrack_artifacts.models import Artifact, Artifactstatus, Artifacttype from dfirtrack_config.models import ArtifactExporterSpreadsheetXlsConfigModel, MainConfigModel from dfirtrack_main.models import System, Systemstatus from mock import patch import urllib.parse import xlrd class ArtifactExporterSpreadsheetXlsViewTestCase(TestCase): """ artifact exporter spreadsheet XLS view tests """ @classmethod def setUpTestData(cls): # create user test_user = User.objects.create_user(username='testuser_artifact_exporter_spreadsheet_xls', password='LTzoNHIdxiJydsaJKf1G') # create object artifactstatus_3 = Artifactstatus.objects.create(artifactstatus_name = 'artifactstatus_3') # create object artifactstatus_1 = Artifactstatus.objects.create( artifactstatus_name='artifactstatus_1', artifactstatus_note='lorem ipsum', ) # create objects artifacttype_1 = Artifacttype.objects.create(artifacttype_name='artifacttype_1') artifacttype_2 = Artifacttype.objects.create( artifacttype_name='artifacttype_2', artifacttype_note='lorem ipsum', ) # create object systemstatus_1 = Systemstatus.objects.create(systemstatus_name='systemstatus_1') # create object system_1 = System.objects.create( system_name='artifact_exporter_spreadsheet_xls_system_1', systemstatus = systemstatus_1, system_modify_time = timezone.now(), system_created_by_user_id = test_user, system_modified_by_user_id = test_user, ) """ create artifacts """ # mock timezone.now() t_1 = datetime(2012, 11, 10, 12, 34, tzinfo=timezone.utc) with patch.object(timezone, 'now', return_value=t_1): # create object with maximum attributes Artifact.objects.create( artifact_name = 'artifact_exporter_spreadsheet_xls_artifact_1_all_attributes', artifactstatus = artifactstatus_3, artifacttype = artifacttype_1, system = system_1, artifact_source_path = 'C:\Temp\malicious.exe', artifact_note_internal = 'artifact note for internal usage', artifact_note_external = 'artifact note for external usage', artifact_note_analysisresult = 'artifact note for analysis result', artifact_md5 = 'd41d8cd98f00b204e9800998ecf8427e', artifact_sha1 = 'da39a3ee5e6b4b0d3255bfef95601890afd80709', artifact_sha256 = 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855', artifact_created_by_user_id = test_user, artifact_modified_by_user_id = test_user, ) # mock timezone.now() t_2 = datetime(2009, 8, 7, 23, 45, tzinfo=timezone.utc) with patch.object(timezone, 'now', return_value=t_2): # create object with minimum attributes Artifact.objects.create( artifact_name = 'artifact_exporter_spreadsheet_xls_artifact_2_no_attributes', artifactstatus = artifactstatus_3, artifacttype = artifacttype_1, system = system_1, artifact_created_by_user_id = test_user, artifact_modified_by_user_id = test_user, ) # create object that will not be exported Artifact.objects.create( artifact_name = 'artifact_exporter_spreadsheet_xls_artifact_3_not_exported', artifactstatus = artifactstatus_1, artifacttype = artifacttype_2, system = system_1, artifact_created_by_user_id = test_user, artifact_modified_by_user_id = test_user, ) def test_artifact_exporter_spreadsheet_xls_not_logged_in(self): """ test exporter view """ # create url destination = '/login/?next=' + urllib.parse.quote('/artifacts/artifact/exporter/spreadsheet/xls/artifact/', safe='') # get response response = self.client.get('/artifacts/artifact/exporter/spreadsheet/xls/artifact/', follow=True) # compare self.assertRedirects(response, destination, status_code=302, target_status_code=200) def test_artifact_exporter_spreadsheet_xls_logged_in(self): """ test exporter view """ # login testuser self.client.login(username='testuser_artifact_exporter_spreadsheet_xls', password='LTzoNHIdxiJydsaJKf1G') # get response response = self.client.get('/artifacts/artifact/exporter/spreadsheet/xls/artifact/') # compare self.assertEqual(response.status_code, 200) def test_artifact_exporter_spreadsheet_xls_redirect(self): """ test exporter view """ # login testuser self.client.login(username='testuser_artifact_exporter_spreadsheet_xls', password='LTzoNHIdxiJydsaJKf1G') # create url destination = urllib.parse.quote('/artifacts/artifact/exporter/spreadsheet/xls/artifact/', safe='/') # get response response = self.client.get('/artifacts/artifact/exporter/spreadsheet/xls/artifact', follow=True) # compare self.assertRedirects(response, destination, status_code=301, target_status_code=200) def test_artifact_exporter_spreadsheet_xls_minimal_spreadsheet(self): """ test exporter view """ """ modify config section """ # get and modify config to show only mandatory columns artifact_exporter_spreadsheet_xls_config_model = ArtifactExporterSpreadsheetXlsConfigModel.objects.get(artifact_exporter_spreadsheet_xls_config_name='ArtifactExporterSpreadsheetXlsConfig') artifact_exporter_spreadsheet_xls_config_model.artifactlist_xls_artifact_id = False artifact_exporter_spreadsheet_xls_config_model.artifactlist_xls_system_id = False artifact_exporter_spreadsheet_xls_config_model.artifactlist_xls_system_name = False artifact_exporter_spreadsheet_xls_config_model.artifactlist_xls_artifactstatus = False artifact_exporter_spreadsheet_xls_config_model.artifactlist_xls_artifactpriority = False artifact_exporter_spreadsheet_xls_config_model.artifactlist_xls_artifacttype = False artifact_exporter_spreadsheet_xls_config_model.artifactlist_xls_artifact_source_path = False artifact_exporter_spreadsheet_xls_config_model.artifactlist_xls_artifact_storage_path = False artifact_exporter_spreadsheet_xls_config_model.artifactlist_xls_artifact_note_internal = False artifact_exporter_spreadsheet_xls_config_model.artifactlist_xls_artifact_note_external = False artifact_exporter_spreadsheet_xls_config_model.artifactlist_xls_artifact_note_analysisresult = False artifact_exporter_spreadsheet_xls_config_model.artifactlist_xls_artifact_md5 = False artifact_exporter_spreadsheet_xls_config_model.artifactlist_xls_artifact_sha1 = False artifact_exporter_spreadsheet_xls_config_model.artifactlist_xls_artifact_sha256 = False artifact_exporter_spreadsheet_xls_config_model.artifactlist_xls_artifact_create_time = False artifact_exporter_spreadsheet_xls_config_model.artifactlist_xls_artifact_modify_time = False artifact_exporter_spreadsheet_xls_config_model.artifactlist_xls_worksheet_artifactstatus = False artifact_exporter_spreadsheet_xls_config_model.artifactlist_xls_worksheet_artifacttype = False artifact_exporter_spreadsheet_xls_config_model.save() # get object artifactstatus_3 = Artifactstatus.objects.get(artifactstatus_name = 'artifactstatus_3') # add artifactstatus to choice for export artifact_exporter_spreadsheet_xls_config_model.artifactlist_xls_choice_artifactstatus.add(artifactstatus_3) """ call view section """ # login testuser self.client.login(username='testuser_artifact_exporter_spreadsheet_xls', password='LTzoNHIdxiJydsaJKf1G') # mock timezone.now() t1_now = timezone.now() with patch.object(timezone, 'now', return_value=t1_now): # get response response = self.client.get('/artifacts/artifact/exporter/spreadsheet/xls/artifact/') """ get file section """ # get artifactlist from response content workbook = response.content # open artifactlist directly from byte stream artifactlist = xlrd.open_workbook(file_contents=workbook) """ prepare objects section """ # get objects artifact_1 = Artifact.objects.get(artifact_name = 'artifact_exporter_spreadsheet_xls_artifact_1_all_attributes') artifact_2 = Artifact.objects.get(artifact_name = 'artifact_exporter_spreadsheet_xls_artifact_2_no_attributes') # get sheets sheet_artifacts = artifactlist.sheet_by_name('artifacts') """ compare values section """ # compare non-available sheets self.assertRaises(xlrd.biffh.XLRDError, artifactlist.sheet_by_name, sheet_name='artifactstatus') self.assertRaises(xlrd.biffh.XLRDError, artifactlist.sheet_by_name, sheet_name='artifacttype') # compare number of rows and columns self.assertEqual(sheet_artifacts.nrows, 6) self.assertEqual(sheet_artifacts.ncols, 2) # compare headlines self.assertEqual(sheet_artifacts.row_values(0), ['Artifact', '']) # compare content - artifact 1 self.assertEqual(sheet_artifacts.cell(1,0).value, artifact_1.artifact_name) # compare content - artifact 2 self.assertEqual(sheet_artifacts.cell(2,0).value, artifact_2.artifact_name) # compare content - metadata self.assertEqual(sheet_artifacts.cell(4,0).value, 'Created:') self.assertEqual(sheet_artifacts.cell(4,1).value, t1_now.strftime('%Y-%m-%d %H:%M')) self.assertEqual(sheet_artifacts.cell(5,0).value, 'Created by:') self.assertEqual(sheet_artifacts.cell(5,1).value, 'testuser_artifact_exporter_spreadsheet_xls') def test_artifact_exporter_spreadsheet_xls_complete_spreadsheet(self): """ test exporter view """ """ modify config section """ # get and modify config to show all columns and sheets artifact_exporter_spreadsheet_xls_config_model = ArtifactExporterSpreadsheetXlsConfigModel.objects.get(artifact_exporter_spreadsheet_xls_config_name='ArtifactExporterSpreadsheetXlsConfig') artifact_exporter_spreadsheet_xls_config_model.artifactlist_xls_artifact_id = True artifact_exporter_spreadsheet_xls_config_model.artifactlist_xls_system_id = True artifact_exporter_spreadsheet_xls_config_model.artifactlist_xls_system_name = True artifact_exporter_spreadsheet_xls_config_model.artifactlist_xls_artifactstatus = True artifact_exporter_spreadsheet_xls_config_model.artifactlist_xls_artifactpriority = True artifact_exporter_spreadsheet_xls_config_model.artifactlist_xls_artifacttype = True artifact_exporter_spreadsheet_xls_config_model.artifactlist_xls_artifact_source_path = True artifact_exporter_spreadsheet_xls_config_model.artifactlist_xls_artifact_storage_path = True artifact_exporter_spreadsheet_xls_config_model.artifactlist_xls_artifact_note_internal = True artifact_exporter_spreadsheet_xls_config_model.artifactlist_xls_artifact_note_external = True artifact_exporter_spreadsheet_xls_config_model.artifactlist_xls_artifact_note_analysisresult = True artifact_exporter_spreadsheet_xls_config_model.artifactlist_xls_artifact_md5 = True artifact_exporter_spreadsheet_xls_config_model.artifactlist_xls_artifact_sha1 = True artifact_exporter_spreadsheet_xls_config_model.artifactlist_xls_artifact_sha256 = True artifact_exporter_spreadsheet_xls_config_model.artifactlist_xls_artifact_create_time = True artifact_exporter_spreadsheet_xls_config_model.artifactlist_xls_artifact_modify_time = True artifact_exporter_spreadsheet_xls_config_model.artifactlist_xls_worksheet_artifactstatus = True artifact_exporter_spreadsheet_xls_config_model.artifactlist_xls_worksheet_artifacttype = True artifact_exporter_spreadsheet_xls_config_model.save() # get object artifactstatus_3 = Artifactstatus.objects.get(artifactstatus_name = 'artifactstatus_3') # add artifactstatus to choice for export artifact_exporter_spreadsheet_xls_config_model.artifactlist_xls_choice_artifactstatus.add(artifactstatus_3) """ call view section """ # login testuser self.client.login(username='testuser_artifact_exporter_spreadsheet_xls', password='LTzoNHIdxiJydsaJKf1G') # mock timezone.now() t2_now = timezone.now() with patch.object(timezone, 'now', return_value=t2_now): # get response response = self.client.get('/artifacts/artifact/exporter/spreadsheet/xls/artifact/') """ get file section """ # get artifactlist from response content workbook = response.content # open artifactlist directly from byte stream artifactlist = xlrd.open_workbook(file_contents=workbook) """ prepare objects section """ # get objects artifact_1 = Artifact.objects.get(artifact_name = 'artifact_exporter_spreadsheet_xls_artifact_1_all_attributes') artifact_2 = Artifact.objects.get(artifact_name = 'artifact_exporter_spreadsheet_xls_artifact_2_no_attributes') # create lists for easier comparison with whole columns - artifactstatus artifactstatus_id_list = ['ID'] artifactstatus_name_list = ['Artifactstatus'] artifactstatus_note_list = ['Note'] all_artifactstatus = Artifactstatus.objects.all().order_by('artifactstatus_name') for artifactstatus_object in all_artifactstatus: # the conversion to float was carried out, because otherwise the return values from the spreadsheet would have had to be converted to int, which would have been more time-consuming artifactstatus_id_list.append(float(artifactstatus_object.artifactstatus_id)) artifactstatus_name_list.append(artifactstatus_object.artifactstatus_name) if artifactstatus_object.artifactstatus_note: artifactstatus_note_list.append(artifactstatus_object.artifactstatus_note) else: artifactstatus_note_list.append('---') # create lists for easier comparison with whole columns - artifacttype artifacttype_id_list = ['ID'] artifacttype_name_list = ['Artifacttype'] artifacttype_note_list = ['Note'] all_artifacttype = Artifacttype.objects.all().order_by('artifacttype_name') for artifacttype_object in all_artifacttype: # the conversion to float was carried out, because otherwise the return values from the spreadsheet would have had to be converted to int, which would have been more time-consuming artifacttype_id_list.append(float(artifacttype_object.artifacttype_id)) artifacttype_name_list.append(artifacttype_object.artifacttype_name) if artifacttype_object.artifacttype_note: artifacttype_note_list.append(artifacttype_object.artifacttype_note) else: artifacttype_note_list.append('---') # get sheets sheet_artifacts = artifactlist.sheet_by_name('artifacts') sheet_artifactstatus = artifactlist.sheet_by_name('artifactstatus') sheet_artifacttype = artifactlist.sheet_by_name('artifacttype') """ compare values section """ # compare number of rows and columns self.assertEqual(sheet_artifacts.nrows, 6) self.assertEqual(sheet_artifacts.ncols, 17) self.assertEqual(sheet_artifactstatus.nrows, 14) self.assertEqual(sheet_artifactstatus.ncols, 3) self.assertEqual(sheet_artifacttype.nrows, 7) self.assertEqual(sheet_artifacttype.ncols, 3) # compare headlines self.assertEqual(sheet_artifacts.row_values(0), ['Artifact ID', 'Artifact', 'System ID', 'System', 'Artifactstatus', 'Artifactpriority', 'Artifacttype', 'Source path', 'Storage path', 'Internal note','External note', 'Analysis result', 'MD5', 'SHA1', 'SHA256', 'Created', 'Modified']) self.assertEqual(sheet_artifactstatus.row_values(0), ['ID', 'Artifactstatus', 'Note']) self.assertEqual(sheet_artifacttype.row_values(0), ['ID', 'Artifacttype', 'Note']) # compare content - artifact 1 self.assertEqual(int(sheet_artifacts.cell(1,0).value), artifact_1.artifact_id) self.assertEqual(sheet_artifacts.cell(1,1).value, artifact_1.artifact_name) self.assertEqual(int(sheet_artifacts.cell(1,2).value), artifact_1.system.system_id) self.assertEqual(sheet_artifacts.cell(1,3).value, artifact_1.system.system_name) self.assertEqual(sheet_artifacts.cell(1,4).value, artifact_1.artifactstatus.artifactstatus_name) self.assertEqual(sheet_artifacts.cell(1,5).value, artifact_1.artifactpriority.artifactpriority_name) self.assertEqual(sheet_artifacts.cell(1,6).value, artifact_1.artifacttype.artifacttype_name) self.assertEqual(sheet_artifacts.cell(1,7).value, artifact_1.artifact_source_path) self.assertEqual(sheet_artifacts.cell(1,8).value, artifact_1.artifact_storage_path) self.assertEqual(sheet_artifacts.cell(1,9).value, 'artifact note for internal usage') # artifact_note_internal self.assertEqual(sheet_artifacts.cell(1,10).value, 'artifact note for external usage') # artifact_note_external self.assertEqual(sheet_artifacts.cell(1,11).value, 'artifact note for analysis result') # artifact_note_analysisresult self.assertEqual(sheet_artifacts.cell(1,12).value, artifact_1.artifact_md5) self.assertEqual(sheet_artifacts.cell(1,13).value, artifact_1.artifact_sha1) self.assertEqual(sheet_artifacts.cell(1,14).value, artifact_1.artifact_sha256) self.assertEqual(sheet_artifacts.cell(1,15).value, '2012-11-10 12:34') self.assertEqual(sheet_artifacts.cell(1,16).value, '2012-11-10 12:34') # compare content - artifact 2 self.assertEqual(int(sheet_artifacts.cell(2,0).value), artifact_2.artifact_id) self.assertEqual(sheet_artifacts.cell(2,1).value, artifact_2.artifact_name) self.assertEqual(int(sheet_artifacts.cell(2,2).value), artifact_2.system.system_id) self.assertEqual(sheet_artifacts.cell(2,3).value, artifact_2.system.system_name) self.assertEqual(sheet_artifacts.cell(2,4).value, artifact_2.artifactstatus.artifactstatus_name) self.assertEqual(sheet_artifacts.cell(2,5).value, artifact_2.artifactpriority.artifactpriority_name) self.assertEqual(sheet_artifacts.cell(2,6).value, artifact_2.artifacttype.artifacttype_name) self.assertEqual(sheet_artifacts.cell(2,7).value, '') self.assertEqual(sheet_artifacts.cell(2,8).value, artifact_2.artifact_storage_path) self.assertEqual(sheet_artifacts.cell(2,9).value, '') self.assertEqual(sheet_artifacts.cell(2,10).value, '') self.assertEqual(sheet_artifacts.cell(2,11).value, '') self.assertEqual(sheet_artifacts.cell(2,12).value, '') self.assertEqual(sheet_artifacts.cell(2,13).value, '') self.assertEqual(sheet_artifacts.cell(2,14).value, '') self.assertEqual(sheet_artifacts.cell(2,15).value, '2009-08-07 23:45') self.assertEqual(sheet_artifacts.cell(2,16).value, '2009-08-07 23:45') # compare content - artifactstatus worksheet (whole columns) self.assertEqual(sheet_artifactstatus.col_values(0), artifactstatus_id_list) self.assertEqual(sheet_artifactstatus.col_values(1), artifactstatus_name_list) self.assertEqual(sheet_artifactstatus.col_values(2), artifactstatus_note_list) # compare content - artifacttype worksheet (whole columns) self.assertEqual(sheet_artifacttype.col_values(0), artifacttype_id_list) self.assertEqual(sheet_artifacttype.col_values(1), artifacttype_name_list) self.assertEqual(sheet_artifacttype.col_values(2), artifacttype_note_list) # compare content - metadata self.assertEqual(sheet_artifacts.cell(4,0).value, 'Created:') self.assertEqual(sheet_artifacts.cell(4,1).value, t2_now.strftime('%Y-%m-%d %H:%M')) self.assertEqual(sheet_artifacts.cell(5,0).value, 'Created by:') self.assertEqual(sheet_artifacts.cell(5,1).value, 'testuser_artifact_exporter_spreadsheet_xls') def test_artifact_exporter_spreadsheet_xls_cron_complete_spreadsheet(self): """ test exporter view """ """ modify config section """ # get and modify main config main_config_model = MainConfigModel.objects.get(main_config_name = 'MainConfig') main_config_model.cron_export_path = '/tmp' main_config_model.cron_username = 'cron' main_config_model.save() # get and modify config to show all columns and sheets artifact_exporter_spreadsheet_xls_config_model = ArtifactExporterSpreadsheetXlsConfigModel.objects.get(artifact_exporter_spreadsheet_xls_config_name='ArtifactExporterSpreadsheetXlsConfig') artifact_exporter_spreadsheet_xls_config_model.artifactlist_xls_artifact_id = True artifact_exporter_spreadsheet_xls_config_model.artifactlist_xls_system_id = True artifact_exporter_spreadsheet_xls_config_model.artifactlist_xls_system_name = True artifact_exporter_spreadsheet_xls_config_model.artifactlist_xls_artifactstatus = True artifact_exporter_spreadsheet_xls_config_model.artifactlist_xls_artifactpriority = True artifact_exporter_spreadsheet_xls_config_model.artifactlist_xls_artifacttype = True artifact_exporter_spreadsheet_xls_config_model.artifactlist_xls_artifact_source_path = True artifact_exporter_spreadsheet_xls_config_model.artifactlist_xls_artifact_storage_path = True artifact_exporter_spreadsheet_xls_config_model.artifactlist_xls_artifact_note_internal = True artifact_exporter_spreadsheet_xls_config_model.artifactlist_xls_artifact_note_external = True artifact_exporter_spreadsheet_xls_config_model.artifactlist_xls_artifact_note_analysisresult = True artifact_exporter_spreadsheet_xls_config_model.artifactlist_xls_artifact_md5 = True artifact_exporter_spreadsheet_xls_config_model.artifactlist_xls_artifact_sha1 = True artifact_exporter_spreadsheet_xls_config_model.artifactlist_xls_artifact_sha256 = True artifact_exporter_spreadsheet_xls_config_model.artifactlist_xls_artifact_create_time = True artifact_exporter_spreadsheet_xls_config_model.artifactlist_xls_artifact_modify_time = True artifact_exporter_spreadsheet_xls_config_model.artifactlist_xls_worksheet_artifactstatus = True artifact_exporter_spreadsheet_xls_config_model.artifactlist_xls_worksheet_artifacttype = True artifact_exporter_spreadsheet_xls_config_model.save() # get object artifactstatus_3 = Artifactstatus.objects.get(artifactstatus_name = 'artifactstatus_3') # add artifactstatus to choice for export artifact_exporter_spreadsheet_xls_config_model.artifactlist_xls_choice_artifactstatus.add(artifactstatus_3) """ call view section """ # login testuser self.client.login(username='testuser_artifact_exporter_spreadsheet_xls', password='LTzoNHIdxiJydsaJKf1G') # mock timezone.now() t3_now = timezone.now() with patch.object(timezone, 'now', return_value=t3_now): # create spreadsheet without GET by directly calling the function artifact_cron() """ get file section """ # refresh config main_config_model.refresh_from_db() # get time for output file filetime = t3_now.strftime('%Y%m%d_%H%M') # prepare output file path output_file_path = main_config_model.cron_export_path + '/' + filetime + '_artifacts.xls' # open file from temp folder xls_disk = xlrd.open_workbook(output_file_path) """ prepare objects section """ # get objects artifact_1 = Artifact.objects.get(artifact_name = 'artifact_exporter_spreadsheet_xls_artifact_1_all_attributes') artifact_2 = Artifact.objects.get(artifact_name = 'artifact_exporter_spreadsheet_xls_artifact_2_no_attributes') # create lists for easier comparison with whole columns - artifactstatus artifactstatus_id_list = ['ID'] artifactstatus_name_list = ['Artifactstatus'] artifactstatus_note_list = ['Note'] all_artifactstatus = Artifactstatus.objects.all().order_by('artifactstatus_name') for artifactstatus_object in all_artifactstatus: # the conversion to float was carried out, because otherwise the return values from the spreadsheet would have had to be converted to int, which would have been more time-consuming artifactstatus_id_list.append(float(artifactstatus_object.artifactstatus_id)) artifactstatus_name_list.append(artifactstatus_object.artifactstatus_name) if artifactstatus_object.artifactstatus_note: artifactstatus_note_list.append(artifactstatus_object.artifactstatus_note) else: artifactstatus_note_list.append('---') # create lists for easier comparison with whole columns - artifacttype artifacttype_id_list = ['ID'] artifacttype_name_list = ['Artifacttype'] artifacttype_note_list = ['Note'] all_artifacttype = Artifacttype.objects.all().order_by('artifacttype_name') for artifacttype_object in all_artifacttype: # the conversion to float was carried out, because otherwise the return values from the spreadsheet would have had to be converted to int, which would have been more time-consuming artifacttype_id_list.append(float(artifacttype_object.artifacttype_id)) artifacttype_name_list.append(artifacttype_object.artifacttype_name) if artifacttype_object.artifacttype_note: artifacttype_note_list.append(artifacttype_object.artifacttype_note) else: artifacttype_note_list.append('---') # get sheets sheet_artifacts = xls_disk.sheet_by_name('artifacts') sheet_artifactstatus = xls_disk.sheet_by_name('artifactstatus') sheet_artifacttype = xls_disk.sheet_by_name('artifacttype') """ compare values section """ # compare number of rows and columns self.assertEqual(sheet_artifacts.nrows, 6) self.assertEqual(sheet_artifacts.ncols, 17) self.assertEqual(sheet_artifactstatus.nrows, 14) self.assertEqual(sheet_artifactstatus.ncols, 3) self.assertEqual(sheet_artifacttype.nrows, 7) self.assertEqual(sheet_artifacttype.ncols, 3) # compare headlines self.assertEqual(sheet_artifacts.row_values(0), ['Artifact ID', 'Artifact', 'System ID', 'System', 'Artifactstatus', 'Artifactpriority', 'Artifacttype', 'Source path', 'Storage path', 'Internal note','External note', 'Analysis result', 'MD5', 'SHA1', 'SHA256', 'Created', 'Modified']) self.assertEqual(sheet_artifactstatus.row_values(0), ['ID', 'Artifactstatus', 'Note']) self.assertEqual(sheet_artifacttype.row_values(0), ['ID', 'Artifacttype', 'Note']) # compare content - artifact 1 self.assertEqual(int(sheet_artifacts.cell(1,0).value), artifact_1.artifact_id) self.assertEqual(sheet_artifacts.cell(1,1).value, artifact_1.artifact_name) self.assertEqual(int(sheet_artifacts.cell(1,2).value), artifact_1.system.system_id) self.assertEqual(sheet_artifacts.cell(1,3).value, artifact_1.system.system_name) self.assertEqual(sheet_artifacts.cell(1,4).value, artifact_1.artifactstatus.artifactstatus_name) self.assertEqual(sheet_artifacts.cell(1,5).value, artifact_1.artifactpriority.artifactpriority_name) self.assertEqual(sheet_artifacts.cell(1,6).value, artifact_1.artifacttype.artifacttype_name) self.assertEqual(sheet_artifacts.cell(1,7).value, artifact_1.artifact_source_path) self.assertEqual(sheet_artifacts.cell(1,8).value, artifact_1.artifact_storage_path) self.assertEqual(sheet_artifacts.cell(1,9).value, 'artifact note for internal usage') # artifact_note_internal self.assertEqual(sheet_artifacts.cell(1,10).value, 'artifact note for external usage') # artifact_note_external self.assertEqual(sheet_artifacts.cell(1,11).value, 'artifact note for analysis result') # artifact_note_analysisresult self.assertEqual(sheet_artifacts.cell(1,12).value, artifact_1.artifact_md5) self.assertEqual(sheet_artifacts.cell(1,13).value, artifact_1.artifact_sha1) self.assertEqual(sheet_artifacts.cell(1,14).value, artifact_1.artifact_sha256) self.assertEqual(sheet_artifacts.cell(1,15).value, '2012-11-10 12:34') self.assertEqual(sheet_artifacts.cell(1,16).value, '2012-11-10 12:34') # compare content - artifact 2 self.assertEqual(int(sheet_artifacts.cell(2,0).value), artifact_2.artifact_id) self.assertEqual(sheet_artifacts.cell(2,1).value, artifact_2.artifact_name) self.assertEqual(int(sheet_artifacts.cell(2,2).value), artifact_2.system.system_id) self.assertEqual(sheet_artifacts.cell(2,3).value, artifact_2.system.system_name) self.assertEqual(sheet_artifacts.cell(2,4).value, artifact_2.artifactstatus.artifactstatus_name) self.assertEqual(sheet_artifacts.cell(2,5).value, artifact_2.artifactpriority.artifactpriority_name) self.assertEqual(sheet_artifacts.cell(2,6).value, artifact_2.artifacttype.artifacttype_name) self.assertEqual(sheet_artifacts.cell(2,7).value, '') self.assertEqual(sheet_artifacts.cell(2,8).value, artifact_2.artifact_storage_path) self.assertEqual(sheet_artifacts.cell(2,9).value, '') self.assertEqual(sheet_artifacts.cell(2,10).value, '') self.assertEqual(sheet_artifacts.cell(2,11).value, '') self.assertEqual(sheet_artifacts.cell(2,12).value, '') self.assertEqual(sheet_artifacts.cell(2,13).value, '') self.assertEqual(sheet_artifacts.cell(2,14).value, '') self.assertEqual(sheet_artifacts.cell(2,15).value, '2009-08-07 23:45') self.assertEqual(sheet_artifacts.cell(2,16).value, '2009-08-07 23:45') # compare content - artifactstatus worksheet (whole columns) self.assertEqual(sheet_artifactstatus.col_values(0), artifactstatus_id_list) self.assertEqual(sheet_artifactstatus.col_values(1), artifactstatus_name_list) self.assertEqual(sheet_artifactstatus.col_values(2), artifactstatus_note_list) # compare content - artifacttype worksheet (whole columns) self.assertEqual(sheet_artifacttype.col_values(0), artifacttype_id_list) self.assertEqual(sheet_artifacttype.col_values(1), artifacttype_name_list) self.assertEqual(sheet_artifacttype.col_values(2), artifacttype_note_list) # compare content - metadata self.assertEqual(sheet_artifacts.cell(4,0).value, 'Created:') self.assertEqual(sheet_artifacts.cell(4,1).value, t3_now.strftime('%Y-%m-%d %H:%M')) self.assertEqual(sheet_artifacts.cell(5,0).value, 'Created by:') self.assertEqual(sheet_artifacts.cell(5,1).value, 'cron')
61.268714
293
0.740986
3,656
31,921
6.134847
0.064278
0.077578
0.095412
0.13108
0.911454
0.907174
0.878951
0.873779
0.872665
0.852289
0
0.022982
0.176655
31,921
520
294
61.386538
0.830416
0.10031
0
0.675287
0
0
0.109741
0.054888
0
0
0
0
0.344828
1
0.020115
false
0.017241
0.031609
0
0.054598
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
79854d3299a57511d17d5ffae0a25253ce2e269d
188
py
Python
part34/blog/forms.py
yllew36/WellyGI
7d53fac4c81bb994f61b22761e5ac7e48994ade4
[ "Apache-2.0" ]
1
2019-11-15T08:02:45.000Z
2019-11-15T08:02:45.000Z
part34/blog/forms.py
yllew36/WellyGI
7d53fac4c81bb994f61b22761e5ac7e48994ade4
[ "Apache-2.0" ]
null
null
null
part34/blog/forms.py
yllew36/WellyGI
7d53fac4c81bb994f61b22761e5ac7e48994ade4
[ "Apache-2.0" ]
null
null
null
from django import forms class PostForm(forms.Form): judul = forms.CharField(max_length=20) body = forms.CharField( widget=forms.Textarea) category = forms.CharField(max_length=20)
26.857143
42
0.771277
26
188
5.5
0.615385
0.293706
0.237762
0.321678
0.34965
0
0
0
0
0
0
0.024242
0.12234
188
7
42
26.857143
0.842424
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.166667
0
0.833333
0
1
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
6
79b774d16e635541a0aaeb720b1bb505e26faa7a
40
py
Python
docs/19_util/test.py
rasql/turtle-tutorial
21add97eb5a8e26bf632633256f7bb34053c083a
[ "MIT" ]
null
null
null
docs/19_util/test.py
rasql/turtle-tutorial
21add97eb5a8e26bf632633256f7bb34053c083a
[ "MIT" ]
null
null
null
docs/19_util/test.py
rasql/turtle-tutorial
21add97eb5a8e26bf632633256f7bb34053c083a
[ "MIT" ]
null
null
null
x = 123.123 print(f'number = {x:0.1f}')
13.333333
27
0.575
9
40
2.555556
0.777778
0
0
0
0
0
0
0
0
0
0
0.235294
0.15
40
3
27
13.333333
0.441176
0
0
0
0
0
0.414634
0
0
0
0
0
0
1
0
false
0
0
0
0
0.5
1
1
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
6
8dca71f649f313e8dddc21f39d59919798a51c5e
5,134
py
Python
test/web/ui/test_ui.py
StepanBarantsev/course_management
627d003d8ed63a5cc64485a191c4b98e91796a45
[ "Apache-2.0" ]
null
null
null
test/web/ui/test_ui.py
StepanBarantsev/course_management
627d003d8ed63a5cc64485a191c4b98e91796a45
[ "Apache-2.0" ]
null
null
null
test/web/ui/test_ui.py
StepanBarantsev/course_management
627d003d8ed63a5cc64485a191c4b98e91796a45
[ "Apache-2.0" ]
null
null
null
from time import sleep from test.web.db_helper import create_course, create_student from web.app.models import Student def test_edit_profile_data(session, app, user_logined_minimal): app.courses_page.check_course_table_not_present() app.navigation_component.check_edit_profile_link_present() def test_go_by_pages(session, app, user_logined): for i in range(11): create_course(session, user_id=user_logined.id, course_name=f'Название {i}', course_lms_id=i) app.refresh() num_of_rows_on_first_page = app.courses_page.get_num_of_courses_on_page() assert num_of_rows_on_first_page == 10 app.courses_page.go_to_next_page() num_of_rows_on_second_page = app.courses_page.get_num_of_courses_on_page() assert num_of_rows_on_second_page == 1 def test_change_student_status(session, app, user_logined): course = create_course(session, user_id=user_logined.id, course_name=f'Название курса', course_lms_id=1) create_student(session, course_id=course.id, student_name=f'Имя студента', student_lms_id=1) app.refresh() app.courses_page.go_to_students_of_course_by_course_number(0) status = app.students_page.get_student_status(0) assert status == Student.student_statuses['active'] app.students_page.change_status_to_dropped(0) sleep(1) status = app.students_page.get_student_status(0) assert status == Student.student_statuses['dropped'] app.students_page.change_status_to_freezed(0) sleep(1) status = app.students_page.get_student_status(0) assert status == Student.student_statuses['freezed'] app.students_page.change_status_to_finished(0) sleep(1) status = app.students_page.get_student_status(0) assert status == Student.student_statuses['finished'] def test_add_days_to_student(session, app, user_logined): course = create_course(session, user_id=user_logined.id, course_name=f'Название курса', course_lms_id=1) create_student(session, course_id=course.id, student_name=f'Имя студента', student_lms_id=1) app.refresh() app.courses_page.go_to_students_of_course_by_course_number(0) days = app.students_page.get_student_number_of_days(0) assert days == 0 app.students_page.add_days_to_student(0) sleep(1) days = app.students_page.get_student_number_of_days(0) assert days == 30 def test_find_student(session, app, user_logined): course = create_course(session, user_id=user_logined.id, course_name=f'Название курса', course_lms_id=1) create_student(session, course_id=course.id, student_name=f'Студент Студентович', student_lms_id=1) create_student(session, course_id=course.id, student_name=f'Школьник Школьникович', student_lms_id=2) app.refresh() app.courses_page.go_to_students_of_course_by_course_number(0) number_of_students = app.students_page.get_number_of_students_on_page() assert number_of_students == 2 app.students_page.find_student('Школьник Школьникович') sleep(1) number_of_students = app.students_page.get_number_of_students_on_page() first_student_name = app.students_page.get_student_name(0) assert number_of_students == 1 assert first_student_name == 'Школьник Школьникович' def test_filter_students_by_status(session, app, user_logined): course = create_course(session, user_id=user_logined.id, course_name=f'Название курса', course_lms_id=1) create_student(session, course_id=course.id, student_name=f'Студент Студентович', student_lms_id=1) create_student(session, course_id=course.id, student_name=f'Школьник Школьникович', student_lms_id=2) app.refresh() app.courses_page.go_to_students_of_course_by_course_number(0) app.students_page.change_status_to_freezed(0) number_of_students = app.students_page.get_number_of_students_on_page() assert number_of_students == 2 app.students_page.filter_by_status_freezed() number_of_students = app.students_page.get_number_of_students_on_page() first_student_name = app.students_page.get_student_name(0) status = app.students_page.get_student_status(0) assert number_of_students == 1 assert first_student_name == 'Студент Студентович' assert status == Student.student_statuses['freezed'] def test_filter_students_by_course(session, app, user_logined): course_1 = create_course(session, user_id=user_logined.id, course_name=f'Студенческий курс', course_lms_id=1) course_2 = create_course(session, user_id=user_logined.id, course_name=f'Школьнический курс', course_lms_id=1) create_student(session, course_id=course_1.id, student_name=f'Студент Студентович', student_lms_id=1) create_student(session, course_id=course_2.id, student_name=f'Школьник Школьникович', student_lms_id=2) app.refresh() app.courses_page.go_to_students_of_course_by_course_number(0) first_student_name = app.students_page.get_student_name(0) assert first_student_name == 'Студент Студентович' app.students_page.filter_by_course(course_2) first_student_name = app.students_page.get_student_name(0) assert first_student_name == 'Школьник Школьникович'
37.75
114
0.784768
789
5,134
4.683143
0.103929
0.068471
0.093369
0.073072
0.851421
0.812991
0.749662
0.749662
0.730176
0.730176
0
0.012692
0.125243
5,134
135
115
38.02963
0.810065
0
0
0.609195
0
0
0.074601
0
0
0
0
0
0.195402
1
0.08046
false
0
0.034483
0
0.114943
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
8dd389426c0b3f77c8e937de7af389f6af9fe9ac
23,658
py
Python
optimization/second_sdEta_mjj_optimization/lumi_and_kin_plots/four_cuts_lum40_fixed/Output/Histos/MadAnalysis5job_0/selection_10.py
sheride/axion_pheno
7d3fc08f5ae5b17a3500eba19a2e43f87f076ce5
[ "MIT" ]
null
null
null
optimization/second_sdEta_mjj_optimization/lumi_and_kin_plots/four_cuts_lum40_fixed/Output/Histos/MadAnalysis5job_0/selection_10.py
sheride/axion_pheno
7d3fc08f5ae5b17a3500eba19a2e43f87f076ce5
[ "MIT" ]
null
null
null
optimization/second_sdEta_mjj_optimization/lumi_and_kin_plots/four_cuts_lum40_fixed/Output/Histos/MadAnalysis5job_0/selection_10.py
sheride/axion_pheno
7d3fc08f5ae5b17a3500eba19a2e43f87f076ce5
[ "MIT" ]
null
null
null
def selection_10(): # Library import import numpy import matplotlib import matplotlib.pyplot as plt import matplotlib.gridspec as gridspec # Library version matplotlib_version = matplotlib.__version__ numpy_version = numpy.__version__ # Histo binning xBinning = numpy.linspace(0.0,2000.0,81,endpoint=True) # Creating data sequence: middle of each bin xData = numpy.array([12.5,37.5,62.5,87.5,112.5,137.5,162.5,187.5,212.5,237.5,262.5,287.5,312.5,337.5,362.5,387.5,412.5,437.5,462.5,487.5,512.5,537.5,562.5,587.5,612.5,637.5,662.5,687.5,712.5,737.5,762.5,787.5,812.5,837.5,862.5,887.5,912.5,937.5,962.5,987.5,1012.5,1037.5,1062.5,1087.5,1112.5,1137.5,1162.5,1187.5,1212.5,1237.5,1262.5,1287.5,1312.5,1337.5,1362.5,1387.5,1412.5,1437.5,1462.5,1487.5,1512.5,1537.5,1562.5,1587.5,1612.5,1637.5,1662.5,1687.5,1712.5,1737.5,1762.5,1787.5,1812.5,1837.5,1862.5,1887.5,1912.5,1937.5,1962.5,1987.5]) # Creating weights for histo: y11_PT_0 y11_PT_0_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,25.7517901873,27.8029244499,29.7803667749,31.4507533601,31.4139053913,31.0331577138,31.1764495924,30.7547579496,29.6780148616,29.6902988512,27.9011843667,26.857189251,25.9155500486,24.6709511028,23.4509121362,22.4683329685,20.3926307267,19.8808711602,18.7017761589,17.4367052305,16.163442309,15.3896629644,14.4684917447,13.5841724937,12.7817291734,12.0734537733,11.3365183976,10.1697033859,9.6743198055,9.08068030833,8.43790885278,8.04078118916,6.98450608387,6.68154634048,6.37449060057,5.95689095429,5.49425934616,5.08894768947,4.60584409868,4.45436422699,3.79112198878,3.52091221766,3.6027937483,3.20157368815,3.11150376444,2.92317592396,2.4441683297,2.30087525107,2.18214695164,1.98972471463,1.94878394931,1.91193718052,1.64582180593,1.60078704407,1.31010689029,1.25688373537,1.19956658392,1.07264989143,1.02352113304,0.900698437075,0.9293572128,0.753311361918,0.728746982725,0.708276600064,0.695994210467,0.642771055549,0.56907751797,0.458537211602]) # Creating weights for histo: y11_PT_1 y11_PT_1_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0242945760233,0.0121753353338,0.0,0.0,0.0,0.0,0.0121313846429,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0]) # Creating weights for histo: y11_PT_2 y11_PT_2_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.341428826021,0.34122243079,0.0903653279136,0.0802686550033,0.0903492956113,0.0602618293179,0.0301763877224,0.0401299144865,0.010034093215,0.0301425504742,0.0,0.0201115108343,0.0200893011373,0.0,0.0,0.0100696700506,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0]) # Creating weights for histo: y11_PT_3 y11_PT_3_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.38044092341,1.08899205374,0.836121009257,0.660124484831,0.495072949174,0.456550375028,0.34670763614,0.203467507781,0.153889727152,0.0935118698771,0.0660347114052,0.0494884851857,0.0439890559155,0.049519035765,0.0274872580384,0.0165081111218,0.0220364537527,0.010991956222,0.00550370716379,0.0,0.0,0.00547920982432,0.00550414592211,0.00549442823786,0.0,0.00548474305423,0.00549598014229,0.0,0.00548806217966,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.00551421705056,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0]) # Creating weights for histo: y11_PT_4 y11_PT_4_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.548706381244,0.489429390329,0.505215637913,0.456941000998,0.428327425179,0.380948480648,0.339477554506,0.269420188667,0.24079390657,0.174653040455,0.15492528142,0.106574647335,0.0947513555508,0.0582105854792,0.0424269032005,0.0286317093204,0.0305976991897,0.0207222517484,0.0108564121772,0.0108645249553,0.00592194719602,0.0108544280739,0.00394836041553,0.00197208166777,0.00296049140078,0.0019739856053,0.00296333808802,0.0,0.000987659381206,0.0,0.000984219467109,0.00295751484495,0.00197603504375,0.000983908824669,0.0,0.0,0.0,0.000985428768068,0.0,0.0,0.0,0.00197503216971,0.0,0.000983908824669,0.0,0.0,0.0,0.0,0.0,0.0,0.000986794392319,0.0,0.0,0.0,0.000986290550322,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0]) # Creating weights for histo: y11_PT_5 y11_PT_5_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.113677409924,0.10814789873,0.101840849414,0.103606424711,0.106124211025,0.101832927191,0.0978062375842,0.0983018966314,0.0945228765029,0.0889866434234,0.092005570323,0.0814352450296,0.0731011870763,0.0652860747028,0.0579707825438,0.0589823863291,0.0446190769898,0.0385732529548,0.0292436437195,0.0216812022277,0.0128554623429,0.013106020632,0.00831931381714,0.00580000307488,0.00378229104612,0.00302591325946,0.00529442924626,0.000756058497093,0.00176474546405,0.0015125095043,0.00100942717461,0.00126115379148,0.000504342683255,0.000252130810257,0.000503856946991,0.000252358434112,0.000251770949305,0.0,0.000251614665463,0.0,0.000251614665463,0.000252082356664,0.0,0.0,0.000252086197742,0.000252017858571,0.000252186585904,0.0,0.0,0.0,0.0,0.0,0.0,0.000251958161824,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0]) # Creating weights for histo: y11_PT_6 y11_PT_6_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0254973009635,0.0226304206519,0.0197474649926,0.0209041502425,0.0220445801968,0.0237529657822,0.021179420578,0.0268989853096,0.024911140601,0.019446632055,0.0266277638024,0.0203386039142,0.0229137686497,0.0220418409896,0.0203385239374,0.0263385375058,0.0209026606736,0.0234857331205,0.019188816668,0.0220435305006,0.0206220918715,0.0186141329841,0.0197595714889,0.0120269193541,0.0157348362658,0.0160283613183,0.0120353069267,0.00858937819435,0.0102972799197,0.00859092074792,0.00772929410665,0.004862738701,0.0040044710876,0.00543648165714,0.00228820378327,0.00085790951703,0.000854948373998,0.000858374382496,0.000284540452681,0.000286183777095,0.0,0.000862325339071,0.0,0.0,0.0,0.000857851133926,0.0,0.0,0.000286183777095,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0]) # Creating weights for histo: y11_PT_7 y11_PT_7_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0016631212549,0.00168467784916,0.00133921201026,0.00153329344308,0.0010364620574,0.001339070356,0.0012082245613,0.00122929626212,0.000863925065078,0.00116633932248,0.00110194823816,0.00118795291373,0.00105846540743,0.000842506772315,0.00107982125497,0.00101511501086,0.000863801012821,0.000799543200862,0.00103708818602,0.000842255734132,0.000863821967594,0.00127451750083,0.000842979092902,0.00107845668014,0.00103657227951,0.000972192510767,0.000993464539214,0.00107961044995,0.000972299799206,0.0009938907593,0.00127405523854,0.0014673659548,0.00131813108935,0.00144738054942,0.00131747436676,0.00170117135113,0.00146898785424,0.00142542036623,0.00168464390243,0.00114189935142,0.00125280332667,0.0012098787311,0.000864098989696,0.000927304872041,0.000799187388814,0.000518219503574,0.000453610908712,0.000345750949543,0.000300814947201,0.000237746360529,0.000259019604353,8.64930056e-05,8.64363438933e-05,0.000129582054234,2.15827542054e-05,0.0,0.0,4.32204350622e-05,2.15933950392e-05,0.0,0.0,0.0,2.16751857099e-05,2.16220821237e-05,0.0,0.0,0.0,0.0]) # Creating weights for histo: y11_PT_8 y11_PT_8_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.000113736721231,8.51343815685e-05,5.67987805063e-05,0.0,8.52347723319e-05,8.48750375258e-05,2.8257334576e-05,0.0,5.68463105807e-05,0.000113621481959,0.000113625446511,0.000113120359659,8.50232107883e-05,0.0,8.52544169081e-05,0.000141943452091,0.000113632172885,5.6826680853e-05,2.83498704786e-05,2.83498704786e-05,5.68174005354e-05,5.66824721422e-05,0.0,2.83498704786e-05,0.0,5.67183461378e-05,2.84080914789e-05,0.0,5.68570015066e-05,0.0,5.67183461378e-05,5.66456775391e-05,0.000113236430447,2.84292654515e-05,5.6826680853e-05,8.52148010885e-05,2.83684905078e-05,0.000113196814627,8.51450724944e-05,0.000113704856332,8.51951565123e-05,2.83684905078e-05,0.000113724500908,0.000196602562578,5.6826680853e-05,2.8397400553e-05,0.000140565941145,0.000113406223137,5.68373717789e-05,0.000113724500908,0.00028375395064,0.000170223593975,0.000142033552838,8.52135983593e-05,5.677913593e-05,0.00011328894962,0.000141636236458,0.000170499627741,0.000170532888399,0.000141795679738,2.84489100277e-05,0.000140466174019,0.000141984983368,0.0,5.67977559592e-05,5.56796929953e-05,0.000113475120215,8.46724741782e-05]) # Creating weights for histo: y11_PT_9 y11_PT_9_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0]) # Creating weights for histo: y11_PT_10 y11_PT_10_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.05211370067,2.10758668394,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0]) # Creating weights for histo: y11_PT_11 y11_PT_11_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,3.22551702834,1.84358044477,4.37706148725,2.99361318568,2.76392619639,1.38204341062,0.460138675788,0.460448397371,0.689676184067,0.0,0.230128553894,0.230360191822,0.230673179699,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.230673179699,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0]) # Creating weights for histo: y11_PT_12 y11_PT_12_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.13513858326,0.830690226803,0.996880818839,1.07982109063,0.914237516547,0.747610702481,0.886215444211,0.886191979004,0.858309543879,0.692428230955,0.720020236069,0.803181696275,0.498131707321,0.304675699681,0.193693160093,0.0829900103302,0.0830284394138,0.221627487946,0.110640447655,0.0555005212398,0.0276381507887,0.0276586847675,0.0,0.0,0.0276696172456,0.0277263953511,0.0,0.0277307075636,0.0276929401219,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0]) # Creating weights for histo: y11_PT_13 y11_PT_13_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0806631152429,0.191564088543,0.120871080506,0.110910865544,0.100815011534,0.121051568438,0.131063004589,0.100821687281,0.151115371556,0.171450972275,0.191698756572,0.191582173749,0.221760132146,0.22164500585,0.332767067763,0.181451059656,0.201590089961,0.211758891682,0.141171603207,0.100829880244,0.0705497829126,0.0403589706674,0.0504321999629,0.0403172897288,0.0302705145991,0.010042957816,0.0605348268221,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0100805665487,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.010097516878,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0]) # Creating weights for histo: y11_PT_14 y11_PT_14_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0565646758237,0.0367998618393,0.036801708613,0.0282951922258,0.031109171233,0.0283022676773,0.0169679791924,0.019786940641,0.0141470593943,0.0254537693929,0.0339462002836,0.0311338410508,0.0339254510123,0.0311194477588,0.0396317237712,0.0311136342693,0.0226458731964,0.0452324495693,0.0537533477062,0.0679085598366,0.0537680064719,0.0565767952757,0.079195039969,0.104752925073,0.087716207427,0.0679047123915,0.0509162032322,0.0339433839538,0.0424435135822,0.0452375666713,0.0395992128603,0.0169686294106,0.00848347788602,0.0282822070987,0.0169854119661,0.0113239811795,0.0141381410166,0.0169797100525,0.00283012134774,0.0,0.00283097817376,0.00565362047301,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0]) # Creating weights for histo: y11_PT_15 y11_PT_15_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.00610087510698,0.00306963151122,0.0,0.00306733863696,0.0,0.0,0.0,0.00151727408562,0.00152449663954,0.00151265406217,0.0,0.00150849615926,0.0,0.00305525258944,0.0,0.00304862571007,0.00153629548684,0.0,0.00152162936482,0.0,0.00151265406217,0.0,0.0,0.0,0.00306015390365,0.00152644440077,0.00150849615926,0.00154541025297,0.0,0.00152260679112,0.0015209497758,0.0,0.0,0.00304258032252,0.00152162936482,0.00304044818583,0.00457795629586,0.00609317152222,0.00151265406217,0.0045406025376,0.00914378045033,0.00304377876298,0.00454491172294,0.0015209497758,0.00456340008998,0.00150849615926,0.0,0.00153333602439,0.00153333602439,0.00305826405522,0.00150849615926,0.00153821488261,0.00153153127233,0.0,0.00151115660254,0.00153821488261,0.0,0.0,0.0,0.0,0.0,0.00303715070171,0.0,0.0,0.0,0.0,0.0,0.0]) # Creating weights for histo: y11_PT_16 y11_PT_16_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.000180553030001,0.000180503367022,0.0,0.0,0.000180003618866,0.000180626138525,0.0,0.0,0.000180626138525,0.0,0.0,0.0,0.000180626138525,0.0,0.0,0.0,0.000180734319121,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.000180970237517,0.0,0.000179998691067,0.000180402039147,0.0,0.000361424095555,0.000180154571221,0.000180547139741,0.000180533819283,0.000361923458727,0.00054109046815,0.00126469084164,0.000180657129763,0.000360405658014,0.000721969964861,0.0,0.000361110679814,0.0,0.000361314067034,0.0,0.00018020300225,0.000180970237517,0.00072115148818,0.0,0.0,0.0]) # Creating a new Canvas fig = plt.figure(figsize=(12,6),dpi=80) frame = gridspec.GridSpec(1,1,right=0.7) pad = fig.add_subplot(frame[0]) # Creating a new Stack pad.hist(x=xData, bins=xBinning, weights=y11_PT_0_weights+y11_PT_1_weights+y11_PT_2_weights+y11_PT_3_weights+y11_PT_4_weights+y11_PT_5_weights+y11_PT_6_weights+y11_PT_7_weights+y11_PT_8_weights+y11_PT_9_weights+y11_PT_10_weights+y11_PT_11_weights+y11_PT_12_weights+y11_PT_13_weights+y11_PT_14_weights+y11_PT_15_weights+y11_PT_16_weights,\ label="$bg\_dip\_1600\_inf$", histtype="step", rwidth=1.0,\ color=None, edgecolor="#e5e5e5", linewidth=1, linestyle="solid",\ bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical") pad.hist(x=xData, bins=xBinning, weights=y11_PT_0_weights+y11_PT_1_weights+y11_PT_2_weights+y11_PT_3_weights+y11_PT_4_weights+y11_PT_5_weights+y11_PT_6_weights+y11_PT_7_weights+y11_PT_8_weights+y11_PT_9_weights+y11_PT_10_weights+y11_PT_11_weights+y11_PT_12_weights+y11_PT_13_weights+y11_PT_14_weights+y11_PT_15_weights,\ label="$bg\_dip\_1200\_1600$", histtype="step", rwidth=1.0,\ color=None, edgecolor="#f2f2f2", linewidth=1, linestyle="solid",\ bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical") pad.hist(x=xData, bins=xBinning, weights=y11_PT_0_weights+y11_PT_1_weights+y11_PT_2_weights+y11_PT_3_weights+y11_PT_4_weights+y11_PT_5_weights+y11_PT_6_weights+y11_PT_7_weights+y11_PT_8_weights+y11_PT_9_weights+y11_PT_10_weights+y11_PT_11_weights+y11_PT_12_weights+y11_PT_13_weights+y11_PT_14_weights,\ label="$bg\_dip\_800\_1200$", histtype="step", rwidth=1.0,\ color=None, edgecolor="#ccc6aa", linewidth=1, linestyle="solid",\ bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical") pad.hist(x=xData, bins=xBinning, weights=y11_PT_0_weights+y11_PT_1_weights+y11_PT_2_weights+y11_PT_3_weights+y11_PT_4_weights+y11_PT_5_weights+y11_PT_6_weights+y11_PT_7_weights+y11_PT_8_weights+y11_PT_9_weights+y11_PT_10_weights+y11_PT_11_weights+y11_PT_12_weights+y11_PT_13_weights,\ label="$bg\_dip\_600\_800$", histtype="step", rwidth=1.0,\ color=None, edgecolor="#ccc6aa", linewidth=1, linestyle="solid",\ bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical") pad.hist(x=xData, bins=xBinning, weights=y11_PT_0_weights+y11_PT_1_weights+y11_PT_2_weights+y11_PT_3_weights+y11_PT_4_weights+y11_PT_5_weights+y11_PT_6_weights+y11_PT_7_weights+y11_PT_8_weights+y11_PT_9_weights+y11_PT_10_weights+y11_PT_11_weights+y11_PT_12_weights,\ label="$bg\_dip\_400\_600$", histtype="step", rwidth=1.0,\ color=None, edgecolor="#c1bfa8", linewidth=1, linestyle="solid",\ bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical") pad.hist(x=xData, bins=xBinning, weights=y11_PT_0_weights+y11_PT_1_weights+y11_PT_2_weights+y11_PT_3_weights+y11_PT_4_weights+y11_PT_5_weights+y11_PT_6_weights+y11_PT_7_weights+y11_PT_8_weights+y11_PT_9_weights+y11_PT_10_weights+y11_PT_11_weights,\ label="$bg\_dip\_200\_400$", histtype="step", rwidth=1.0,\ color=None, edgecolor="#bab5a3", linewidth=1, linestyle="solid",\ bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical") pad.hist(x=xData, bins=xBinning, weights=y11_PT_0_weights+y11_PT_1_weights+y11_PT_2_weights+y11_PT_3_weights+y11_PT_4_weights+y11_PT_5_weights+y11_PT_6_weights+y11_PT_7_weights+y11_PT_8_weights+y11_PT_9_weights+y11_PT_10_weights,\ label="$bg\_dip\_100\_200$", histtype="step", rwidth=1.0,\ color=None, edgecolor="#b2a596", linewidth=1, linestyle="solid",\ bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical") pad.hist(x=xData, bins=xBinning, weights=y11_PT_0_weights+y11_PT_1_weights+y11_PT_2_weights+y11_PT_3_weights+y11_PT_4_weights+y11_PT_5_weights+y11_PT_6_weights+y11_PT_7_weights+y11_PT_8_weights+y11_PT_9_weights,\ label="$bg\_dip\_0\_100$", histtype="step", rwidth=1.0,\ color=None, edgecolor="#b7a39b", linewidth=1, linestyle="solid",\ bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical") pad.hist(x=xData, bins=xBinning, weights=y11_PT_0_weights+y11_PT_1_weights+y11_PT_2_weights+y11_PT_3_weights+y11_PT_4_weights+y11_PT_5_weights+y11_PT_6_weights+y11_PT_7_weights+y11_PT_8_weights,\ label="$bg\_vbf\_1600\_inf$", histtype="step", rwidth=1.0,\ color=None, edgecolor="#ad998c", linewidth=1, linestyle="solid",\ bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical") pad.hist(x=xData, bins=xBinning, weights=y11_PT_0_weights+y11_PT_1_weights+y11_PT_2_weights+y11_PT_3_weights+y11_PT_4_weights+y11_PT_5_weights+y11_PT_6_weights+y11_PT_7_weights,\ label="$bg\_vbf\_1200\_1600$", histtype="step", rwidth=1.0,\ color=None, edgecolor="#9b8e82", linewidth=1, linestyle="solid",\ bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical") pad.hist(x=xData, bins=xBinning, weights=y11_PT_0_weights+y11_PT_1_weights+y11_PT_2_weights+y11_PT_3_weights+y11_PT_4_weights+y11_PT_5_weights+y11_PT_6_weights,\ label="$bg\_vbf\_800\_1200$", histtype="step", rwidth=1.0,\ color=None, edgecolor="#876656", linewidth=1, linestyle="solid",\ bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical") pad.hist(x=xData, bins=xBinning, weights=y11_PT_0_weights+y11_PT_1_weights+y11_PT_2_weights+y11_PT_3_weights+y11_PT_4_weights+y11_PT_5_weights,\ label="$bg\_vbf\_600\_800$", histtype="step", rwidth=1.0,\ color=None, edgecolor="#afcec6", linewidth=1, linestyle="solid",\ bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical") pad.hist(x=xData, bins=xBinning, weights=y11_PT_0_weights+y11_PT_1_weights+y11_PT_2_weights+y11_PT_3_weights+y11_PT_4_weights,\ label="$bg\_vbf\_400\_600$", histtype="step", rwidth=1.0,\ color=None, edgecolor="#84c1a3", linewidth=1, linestyle="solid",\ bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical") pad.hist(x=xData, bins=xBinning, weights=y11_PT_0_weights+y11_PT_1_weights+y11_PT_2_weights+y11_PT_3_weights,\ label="$bg\_vbf\_200\_400$", histtype="step", rwidth=1.0,\ color=None, edgecolor="#89a8a0", linewidth=1, linestyle="solid",\ bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical") pad.hist(x=xData, bins=xBinning, weights=y11_PT_0_weights+y11_PT_1_weights+y11_PT_2_weights,\ label="$bg\_vbf\_100\_200$", histtype="step", rwidth=1.0,\ color=None, edgecolor="#829e8c", linewidth=1, linestyle="solid",\ bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical") pad.hist(x=xData, bins=xBinning, weights=y11_PT_0_weights+y11_PT_1_weights,\ label="$bg\_vbf\_0\_100$", histtype="step", rwidth=1.0,\ color=None, edgecolor="#adbcc6", linewidth=1, linestyle="solid",\ bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical") pad.hist(x=xData, bins=xBinning, weights=y11_PT_0_weights,\ label="$signal$", histtype="step", rwidth=1.0,\ color=None, edgecolor="#7a8e99", linewidth=1, linestyle="solid",\ bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical") # Axis plt.rc('text',usetex=False) plt.xlabel(r"p_{T} [ a_{1} ] ",\ fontsize=16,color="black") plt.ylabel(r"$\mathrm{Events}$ $(\mathcal{L}_{\mathrm{int}} = 40.0\ \mathrm{fb}^{-1})$ ",\ fontsize=16,color="black") # Boundary of y-axis ymax=(y11_PT_0_weights+y11_PT_1_weights+y11_PT_2_weights+y11_PT_3_weights+y11_PT_4_weights+y11_PT_5_weights+y11_PT_6_weights+y11_PT_7_weights+y11_PT_8_weights+y11_PT_9_weights+y11_PT_10_weights+y11_PT_11_weights+y11_PT_12_weights+y11_PT_13_weights+y11_PT_14_weights+y11_PT_15_weights+y11_PT_16_weights).max()*1.1 ymin=0 # linear scale #ymin=min([x for x in (y11_PT_0_weights+y11_PT_1_weights+y11_PT_2_weights+y11_PT_3_weights+y11_PT_4_weights+y11_PT_5_weights+y11_PT_6_weights+y11_PT_7_weights+y11_PT_8_weights+y11_PT_9_weights+y11_PT_10_weights+y11_PT_11_weights+y11_PT_12_weights+y11_PT_13_weights+y11_PT_14_weights+y11_PT_15_weights+y11_PT_16_weights) if x])/100. # log scale plt.gca().set_ylim(ymin,ymax) # Log/Linear scale for X-axis plt.gca().set_xscale("linear") #plt.gca().set_xscale("log",nonposx="clip") # Log/Linear scale for Y-axis plt.gca().set_yscale("linear") #plt.gca().set_yscale("log",nonposy="clip") # Legend plt.legend(bbox_to_anchor=(1.05,1), loc=2, borderaxespad=0.) # Saving the image plt.savefig('../../HTML/MadAnalysis5job_0/selection_10.png') plt.savefig('../../PDF/MadAnalysis5job_0/selection_10.png') plt.savefig('../../DVI/MadAnalysis5job_0/selection_10.eps') # Running! if __name__ == '__main__': selection_10()
121.948454
1,188
0.74478
4,710
23,658
3.585775
0.162845
0.19338
0.273551
0.34484
0.479839
0.464563
0.457339
0.448162
0.445912
0.443069
0
0.473282
0.069744
23,658
193
1,189
122.580311
0.29412
0.055668
0
0.185841
0
0.00885
0.046268
0.009101
0
0
0
0
0
1
0.00885
false
0
0.035398
0
0.044248
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
1
0
0
0
1
1
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
8deea790842711d090ed033e95d8069e6163895f
24,439
py
Python
tests/prepare_test.py
DamianPala/pyrepogen
1626d13875083b4d005b4ae5d68ac35a9879a464
[ "MIT" ]
null
null
null
tests/prepare_test.py
DamianPala/pyrepogen
1626d13875083b4d005b4ae5d68ac35a9879a464
[ "MIT" ]
null
null
null
tests/prepare_test.py
DamianPala/pyrepogen
1626d13875083b4d005b4ae5d68ac35a9879a464
[ "MIT" ]
null
null
null
#!/usr/bin/env python # -*- coding: utf-8 -*- import pytest import inspect import shutil import stat import tempfile from pathlib import Path from pprint import pprint from pyrepogen import prepare from pyrepogen import settings from pyrepogen import utils from pyrepogen import PARDIR from pyrepogen import pygittools TESTS_SETUPS_PATH = Path(inspect.getframeinfo(inspect.currentframe()).filename).parent / 'tests_setups/prepare_test' SKIP_ALL_MARKED = False _DEFAULT_CONFIG = { 'project_type': settings.ProjectType.MODULE.value, 'project_name': 'sample_project', 'author': 'Damian', 'author_email': 'mail@mail.com', 'short_description': 'This is a sample project', 'changelog_type': settings.ChangelogType.GENERATED.value, 'authors_type': settings.AuthorsType.GENERATED.value, 'pipreqs_ignore': [settings.DirName.REPOASSIST, settings.DirName.TESTS] } class Args: force = True cloud = False sample_layout = True def _error_remove_readonly(_action, name, _exc): Path(name).chmod(stat.S_IWRITE) Path(name).unlink() @pytest.fixture() def cwd(request): workspace_path = Path(tempfile.mkdtemp()) failed_before = request.session.testsfailed yield workspace_path if request.session.testsfailed != failed_before: print(f'Tests workspace path: {workspace_path}') else: shutil.rmtree(workspace_path, ignore_errors=False, onerror=_error_remove_readonly) @pytest.mark.skipif(SKIP_ALL_MARKED, reason="Skipped on request") def test_generate_setup_cfg(): cwd = TESTS_SETUPS_PATH / 'test_generate_setup_cfg' Path(cwd).mkdir(parents=True, exist_ok=True) config = settings.Config(**_DEFAULT_CONFIG) config.keywords = ['sample_project'] args = Args args.force = True args.cloud = True path = prepare.write_file_from_template(Path(PARDIR) / settings.DirName.TEMPLATES / settings.FileName.SETUP_CFG, Path(cwd) / settings.FileName.SETUP_CFG, config.__dict__, cwd, args) config_from_setup = utils.get_repo_config_from_setup_cfg(path[0]) if Path(cwd).exists(): shutil.rmtree(Path(cwd), ignore_errors=False, onerror=_error_remove_readonly) assert config.__dict__ == config_from_setup.__dict__ assert config_from_setup.keywords[0] == config.project_name @pytest.mark.skipif(SKIP_ALL_MARKED, reason="Skipped on request") def test_generate_module_repo_dirs(): cwd = TESTS_SETUPS_PATH / 'test_generate_module_repo_dirs' Path(cwd).mkdir(parents=True, exist_ok=True) config = settings.Config(**_DEFAULT_CONFIG) prepare._generate_repo_dirs(config, cwd) generated_dirset = set() for dirname in Path(cwd).iterdir(): generated_dirset.add(dirname.name) if dirname.name == settings.DirName.REPOASSIST: for dirnamelvl2 in (Path(cwd) / settings.DirName.REPOASSIST).iterdir(): generated_dirset.add(str(Path(dirname.name) / dirnamelvl2.name)) pprint(generated_dirset) if Path(cwd).exists(): shutil.rmtree(Path(cwd), ignore_errors=False, onerror=_error_remove_readonly) assert generated_dirset == set(settings.REPO_DIRS_TO_GEN) @pytest.mark.skipif(SKIP_ALL_MARKED, reason="Skipped on request") def test_generate_package_repo_SHOULD_generate_repo_tree_properly(): cwd = TESTS_SETUPS_PATH / 'test_generate_package_repo_SHOULD_generate_repo_tree_properly' if Path(cwd).exists(): shutil.rmtree(Path(cwd)) Path(cwd).mkdir(parents=True, exist_ok=True) expected_paths = { 'docs', 'README.md', '.gitignore', 'TODO.md', 'conftest.py', 'requirements.txt', 'requirements-dev.txt', 'Makefile', 'LICENSE', 'tox.ini', 'setup.cfg', 'setup.py', _DEFAULT_CONFIG['project_name'], '{}/{}'.format(_DEFAULT_CONFIG['project_name'], settings.FileName.CLI), '{}/{}'.format(_DEFAULT_CONFIG['project_name'], settings.FileName.MAIN), '{}/{}'.format(_DEFAULT_CONFIG['project_name'], settings.FileName.PACKAGE_SAMPLE_MODULE), '{}/{}'.format(_DEFAULT_CONFIG['project_name'], settings.FileName.PYINIT), 'tests', 'tests/{}'.format(settings.FileName.PYINIT), 'tests/{}'.format(settings.FileName.PACKAGE_SAMPLE_TEST), 'repoassist', 'repoassist/templates', 'repoassist/__init__.py', 'repoassist/__main__.py', 'repoassist/cli.py', 'repoassist/colreqs.py', 'repoassist/settings.py', 'repoassist/logger.py', 'repoassist/release.py', 'repoassist/pygittools.py', 'repoassist/utils.py', 'repoassist/meldformat.py', 'repoassist/wizard.py', 'repoassist/sicloudman.py', 'repoassist/exceptions.py', 'repoassist/prepare.py', 'repoassist/reltools.py', 'repoassist/clean.py', 'repoassist/templates/CHANGELOG_generated.md.j2', 'repoassist/templates/CHANGELOG_prepared.md.j2', 'repoassist/templates/AUTHORS_prepared.md.j2', 'repoassist/templates/AUTHORS_generated.md.j2', 'repoassist/templates/requirements-dev.txt.j2', 'repoassist/README.md', 'cloud_credentials.txt', } config = settings.Config(**_DEFAULT_CONFIG) config.project_type = settings.ProjectType.PACKAGE.value config.is_sample_layout = True args = Args args.force = False args.cloud = True paths = prepare.generate_repo(config, cwd, options=args) paths = {path.relative_to(cwd).as_posix() for path in paths} pprint(paths) if Path(cwd).exists(): shutil.rmtree(Path(cwd), ignore_errors=False, onerror=_error_remove_readonly) assert paths == expected_paths @pytest.mark.skipif(SKIP_ALL_MARKED, reason="Skipped on request") def test_generate_package_repo_SHOULD_add_genereated_files_to_repo_tree_when_choosen(): cwd = TESTS_SETUPS_PATH / 'test_generate_package_repo_SHOULD_add_genereated_files_to_repo_tree_when_choosen' if Path(cwd).exists(): shutil.rmtree(Path(cwd), ignore_errors=False, onerror=_error_remove_readonly) Path(cwd).mkdir(parents=True, exist_ok=True) config = settings.Config(**_DEFAULT_CONFIG) config.project_type = settings.ProjectType.PACKAGE.value config.is_sample_layout = True config.is_git = True args = Args args.force = False args.cloud = True paths = prepare.generate_repo(config, cwd, options=args) paths = {path.relative_to(cwd).as_posix() for path in paths \ if path.is_file() and settings.FileName.CLOUD_CREDENTIALS not in path.__str__()} pprint(paths) pygittools.commit("Initial Commit", cwd) repo_paths = utils.get_git_repo_tree(cwd) repo_paths = {path.relative_to(cwd).as_posix() for path in repo_paths} pprint(repo_paths) assert paths == repo_paths if Path(cwd).exists(): shutil.rmtree(Path(cwd), ignore_errors=False, onerror=_error_remove_readonly) @pytest.mark.skipif(SKIP_ALL_MARKED, reason="Skipped on request") def test_generate_package_repo_SHOULD_generate_repo_tree_properly_WHEN_no_sample(): cwd = TESTS_SETUPS_PATH / 'test_generate_package_repo_SHOULD_generate_repo_tree_properly_WHEN_no_sample' if Path(cwd).exists(): shutil.rmtree(Path(cwd)) Path(cwd).mkdir(parents=True, exist_ok=True) expected_paths = { 'docs', 'README.md', '.gitignore', 'TODO.md', 'conftest.py', 'requirements.txt', 'requirements-dev.txt', 'Makefile', 'LICENSE', 'tox.ini', 'setup.cfg', 'setup.py', _DEFAULT_CONFIG['project_name'], 'tests', 'repoassist', 'repoassist/templates', 'repoassist/__init__.py', 'repoassist/__main__.py', 'repoassist/cli.py', 'repoassist/colreqs.py', 'repoassist/settings.py', 'repoassist/logger.py', 'repoassist/release.py', 'repoassist/pygittools.py', 'repoassist/utils.py', 'repoassist/meldformat.py', 'repoassist/wizard.py', 'repoassist/sicloudman.py', 'repoassist/exceptions.py', 'repoassist/prepare.py', 'repoassist/reltools.py', 'repoassist/clean.py', 'repoassist/templates/CHANGELOG_generated.md.j2', 'repoassist/templates/CHANGELOG_prepared.md.j2', 'repoassist/templates/AUTHORS_prepared.md.j2', 'repoassist/templates/AUTHORS_generated.md.j2', 'repoassist/templates/requirements-dev.txt.j2', 'repoassist/README.md', 'cloud_credentials.txt', } config = settings.Config(**_DEFAULT_CONFIG) config.project_type = settings.ProjectType.PACKAGE.value config.is_sample_layout = False args = Args args.force = False args.cloud = True paths = prepare.generate_repo(config, cwd, options=args) paths = {path.relative_to(cwd).as_posix() for path in paths} pprint(paths) if Path(cwd).exists(): shutil.rmtree(Path(cwd), ignore_errors=False, onerror=_error_remove_readonly) assert paths == expected_paths @pytest.mark.skipif(SKIP_ALL_MARKED, reason="Skipped on request") def test_generate_module_repo_SHOULD_generate_repo_tree_properly(): cwd = TESTS_SETUPS_PATH / 'test_generate_module_repo_SHOULD_generate_repo_tree_properly' if Path(cwd).exists(): shutil.rmtree(Path(cwd)) Path(cwd).mkdir(parents=True, exist_ok=True) expected_paths = { 'docs', 'tests', 'README.md', '.gitignore', 'TODO.md', 'conftest.py', '{}.py'.format(_DEFAULT_CONFIG['project_name']), 'tests/{}_test.py'.format(_DEFAULT_CONFIG['project_name']), 'tests/__init__.py', 'requirements.txt', 'requirements-dev.txt', 'Makefile', 'LICENSE', 'tox.ini', 'setup.cfg', 'setup.py', 'repoassist', 'repoassist/templates', 'repoassist/__init__.py', 'repoassist/__main__.py', 'repoassist/cli.py', 'repoassist/colreqs.py', 'repoassist/settings.py', 'repoassist/logger.py', 'repoassist/release.py', 'repoassist/pygittools.py', 'repoassist/utils.py', 'repoassist/meldformat.py', 'repoassist/wizard.py', 'repoassist/sicloudman.py', 'repoassist/exceptions.py', 'repoassist/prepare.py', 'repoassist/reltools.py', 'repoassist/clean.py', 'repoassist/templates/CHANGELOG_generated.md.j2', 'repoassist/templates/CHANGELOG_prepared.md.j2', 'repoassist/templates/AUTHORS_prepared.md.j2', 'repoassist/templates/AUTHORS_generated.md.j2', 'repoassist/templates/requirements-dev.txt.j2', 'repoassist/README.md', 'cloud_credentials.txt', } config = settings.Config(**_DEFAULT_CONFIG) config.project_type = settings.ProjectType.MODULE.value config.is_sample_layout = True args = Args args.force = False args.cloud = True paths = prepare.generate_repo(config, cwd, options=args) paths = {path.relative_to(cwd).as_posix() for path in paths} pprint(paths) if Path(cwd).exists(): shutil.rmtree(Path(cwd), ignore_errors=False, onerror=_error_remove_readonly) assert paths == expected_paths @pytest.mark.skipif(SKIP_ALL_MARKED, reason="Skipped on request") def test_generate_module_repo_SHOULD_generate_repo_tree_properly_WHEN_no_sample(): cwd = TESTS_SETUPS_PATH / 'test_generate_module_repo_SHOULD_generate_repo_tree_properly_WHEN_no_sample' if Path(cwd).exists(): shutil.rmtree(Path(cwd)) Path(cwd).mkdir(parents=True, exist_ok=True) expected_paths = { 'docs', 'tests', 'README.md', '.gitignore', 'TODO.md', 'conftest.py', 'tests/__init__.py', 'requirements.txt', 'requirements-dev.txt', 'Makefile', 'LICENSE', 'tox.ini', 'setup.cfg', 'setup.py', 'repoassist', 'repoassist/templates', 'repoassist/__init__.py', 'repoassist/__main__.py', 'repoassist/cli.py', 'repoassist/colreqs.py', 'repoassist/settings.py', 'repoassist/logger.py', 'repoassist/release.py', 'repoassist/pygittools.py', 'repoassist/utils.py', 'repoassist/meldformat.py', 'repoassist/wizard.py', 'repoassist/sicloudman.py', 'repoassist/exceptions.py', 'repoassist/prepare.py', 'repoassist/reltools.py', 'repoassist/clean.py', 'repoassist/templates/CHANGELOG_generated.md.j2', 'repoassist/templates/CHANGELOG_prepared.md.j2', 'repoassist/templates/AUTHORS_prepared.md.j2', 'repoassist/templates/AUTHORS_generated.md.j2', 'repoassist/templates/requirements-dev.txt.j2', 'repoassist/README.md', 'cloud_credentials.txt', } config = settings.Config(**_DEFAULT_CONFIG) config.project_type = settings.ProjectType.MODULE.value config.is_sample_layout = False args = Args args.force = False args.cloud = True paths = prepare.generate_repo(config, cwd, options=args) paths = {path.relative_to(cwd).as_posix() for path in paths} pprint(paths) if Path(cwd).exists(): shutil.rmtree(Path(cwd), ignore_errors=False, onerror=_error_remove_readonly) assert paths == expected_paths @pytest.mark.skipif(SKIP_ALL_MARKED, reason="Skipped on request") def test_generate_module_repo_SHOULD_force_properly(): cwd = TESTS_SETUPS_PATH / 'test_generate_module_repo_SHOULD_force_properly' if Path(cwd).exists(): shutil.rmtree(Path(cwd), ignore_errors=False, onerror=_error_remove_readonly) Path(cwd).mkdir(parents=True, exist_ok=True) config = settings.Config(**_DEFAULT_CONFIG) config.project_type = settings.ProjectType.MODULE.value config.is_sample_layout = True args = Args args.force = True args.cloud = True args.sample_layout = True for dirname in settings.REPO_DIRS_TO_GEN: Path(Path(cwd) / dirname).mkdir(exist_ok=True) test_content = '<#test_content#>' files_paths_to_overwrite = [] for file in settings.MODULE_REPO_FILES_TO_GEN: if settings.PROJECT_NAME_PATH_PLACEHOLDER in str(file.dst): dst = Path(str(file.dst).replace(settings.PROJECT_NAME_PATH_PLACEHOLDER, config.project_name)) else: dst = file.dst with open(Path(cwd) / dst, 'w') as testfile: testfile.write(test_content) files_paths_to_overwrite.append(Path(cwd) / dst) for file in settings.REPOASSIST_FILES: with open(Path(cwd) / file.dst, 'w') as testfile: testfile.write(test_content) files_paths_to_overwrite.append(Path(cwd) / file.dst) prepare.generate_repo(config, cwd, options=args) pprint(files_paths_to_overwrite) for path in files_paths_to_overwrite: with open(path, 'r') as file: content = file.readlines() if content == test_content: assert False, "{} file not overwritten!".format(path) if Path(cwd).exists(): shutil.rmtree(Path(cwd), ignore_errors=False, onerror=_error_remove_readonly) @pytest.mark.skipif(SKIP_ALL_MARKED, reason="Skipped on request") def test_generate_module_repo_SHOULD_generate_makefile_without_cloud_properly(): cwd = TESTS_SETUPS_PATH / 'test_generate_module_repo_SHOULD_generate_makefile_without_cloud_properly' if Path(cwd).exists(): shutil.rmtree(Path(cwd)) Path(cwd).mkdir(parents=True, exist_ok=True) config = settings.Config(**_DEFAULT_CONFIG) config.project_type = settings.ProjectType.MODULE.value options = Args options.force = True options.cloud = False paths = prepare.generate_repo(config, cwd, options=options) paths = {path.relative_to(cwd).as_posix() for path in paths} with open(Path(cwd) / settings.FileName.MAKEFILE) as file: makefile_content = file.read() if Path(cwd).exists(): shutil.rmtree(Path(cwd), ignore_errors=False, onerror=_error_remove_readonly) assert "make upload" not in makefile_content @pytest.mark.skipif(SKIP_ALL_MARKED, reason="Skipped on request") def test_generate_module_repo_SHOULD_not_generate_cloud_credentials_without_cloud(): cwd = TESTS_SETUPS_PATH / 'test_generate_module_repo_SHOULD_not_generate_cloud_credentials_without_cloud' if Path(cwd).exists(): shutil.rmtree(Path(cwd)) Path(cwd).mkdir(parents=True, exist_ok=True) config = settings.Config(**_DEFAULT_CONFIG) config.project_type = settings.ProjectType.MODULE.value options = Args options.force = True options.cloud = False paths = prepare.generate_repo(config, cwd, options=options) paths = {path.relative_to(cwd).as_posix() for path in paths} if Path(cwd).exists(): shutil.rmtree(Path(cwd), ignore_errors=False, onerror=_error_remove_readonly) assert settings.FileName.CLOUD_CREDENTIALS not in paths @pytest.mark.skipif(SKIP_ALL_MARKED, reason="Skipped on request") def test_generate_module_repo_SHOULD_generate_makefile_with_cloud_properly(): cwd = TESTS_SETUPS_PATH / 'test_generate_module_repo_SHOULD_generate_makefile_with_cloud_properly' if Path(cwd).exists(): shutil.rmtree(Path(cwd)) Path(cwd).mkdir(parents=True, exist_ok=True) config = settings.Config(**_DEFAULT_CONFIG) config.project_type = settings.ProjectType.MODULE.value options = Args options.force = True options.cloud = True paths = prepare.generate_repo(config, cwd, options=options) paths = {path.relative_to(cwd).as_posix() for path in paths} with open(Path(cwd) / settings.FileName.MAKEFILE) as file: makefile_content = file.read() if Path(cwd).exists(): shutil.rmtree(Path(cwd), ignore_errors=False, onerror=_error_remove_readonly) assert "make upload" in makefile_content @pytest.mark.skipif(SKIP_ALL_MARKED, reason="Skipped on request") def test_generate_empty_file_SHOULD_generate_file_when_no_exists(): cwd = TESTS_SETUPS_PATH / 'test_generate_empty_file_SHOULD_generate_file_when_no_exists' if Path(cwd).exists(): shutil.rmtree(Path(cwd), ignore_errors=False, onerror=_error_remove_readonly) Path(cwd).mkdir(parents=True, exist_ok=True) path = Path(cwd) / 'file.txt' options = Args options.force = True prepare._generate_empty_file(path, cwd, options) assert Path(path).exists() if Path(cwd).exists(): shutil.rmtree(Path(cwd), ignore_errors=False, onerror=_error_remove_readonly) @pytest.mark.skipif(SKIP_ALL_MARKED, reason="Skipped on request") def test_generate_empty_file_SHOULD_overwrite_file_when_force(): cwd = TESTS_SETUPS_PATH / 'test_generate_empty_file_SHOULD_overwrite_file_when_force' if Path(cwd).exists(): shutil.rmtree(Path(cwd), ignore_errors=False, onerror=_error_remove_readonly) Path(cwd).mkdir(parents=True, exist_ok=True) path = Path(cwd) / 'file.py' options = Args options.force = True with open(path, 'w') as file: file.write("line") prepare._generate_empty_file(path, cwd, options) with open(path, 'r') as file: content = file.read() assert content == '' if Path(cwd).exists(): shutil.rmtree(Path(cwd), ignore_errors=False, onerror=_error_remove_readonly) def update_repoassist_setup(cwd): config = settings.Config(**_DEFAULT_CONFIG) options = settings.Options() options.force = True options.cloud = config.is_cloud options.sample_layout = config.is_sample_layout options.project_type = config.project_type repoassit_path = cwd / 'repoassist' repoassist_templates_path = cwd / 'repoassist/templates' repoassist_templates_path.mkdir(exist_ok=True, parents=True) return config, options, repoassit_path, repoassist_templates_path @pytest.mark.skipif(SKIP_ALL_MARKED, reason="Skipped on request") def test_update_repoassist_SHOULD_add_new_files_if_repoassist_empty(cwd): config, options, repoassit_path, repoassist_templates_path = update_repoassist_setup(cwd) repoassist_paths_expected = prepare._generate_repoasist(config, cwd, options).paths shutil.rmtree(repoassit_path) assert repoassit_path.exists() == False repoassist_templates_path.mkdir(exist_ok=True, parents=True) new_files, removed_files = prepare.update_repoassist(config, cwd, add_to_tree=False, options=options) repoassist_files = {item for item in repoassit_path.rglob('*') if item.is_file()} assert set(repoassist_files) == set(repoassist_paths_expected) assert set(new_files) == set(repoassist_paths_expected) assert set(removed_files).__len__() == 0 @pytest.mark.skipif(SKIP_ALL_MARKED, reason="Skipped on request") def test_update_repoassist_SHOULD_overwrite_repoassist_files(cwd): config, options, repoassit_path, repoassist_templates_path = update_repoassist_setup(cwd) repoassist_paths_expected = prepare._generate_repoasist(config, cwd, options).paths shutil.rmtree(repoassit_path) assert repoassit_path.exists() == False repoassist_templates_path.mkdir(exist_ok=True, parents=True) for path in repoassist_paths_expected: path.touch() assert (repoassit_path / settings.FileName.MAIN).read_text() == '' new_files, removed_files = prepare.update_repoassist(config, cwd, add_to_tree=False, options=options) repoassist_files = {item for item in repoassit_path.rglob('*') if item.is_file()} assert (repoassit_path / settings.FileName.MAIN).read_text != '' assert set(repoassist_files) == set(repoassist_paths_expected) assert new_files.__len__() == 0 assert set(removed_files).__len__() == 0 @pytest.mark.skipif(SKIP_ALL_MARKED, reason="Skipped on request") def test_update_repoassist_SHOULD_add_new_files_to_repo_tree_and_remove_old_from_tree_and_from_drive(cwd): config, options, repoassit_path, _ = update_repoassist_setup(cwd) pygittools.init(cwd) repoassist_paths_expected = prepare._generate_repoasist(config, cwd, options).paths paths_to_git_add = list(repoassist_paths_expected) paths_to_git_add = [path for path in paths_to_git_add if (settings.FileName.MAIN not in path.__str__() and settings.FileName.CLI not in path.__str__())] (repoassit_path / settings.FileName.MAIN).unlink() (repoassit_path / settings.FileName.CLI).unlink() (repoassit_path / 'dummy_file.txt').touch() (repoassit_path / 'dummy_file2.txt').touch() paths_to_git_add.append(repoassit_path / 'dummy_file.txt') for path in paths_to_git_add: pygittools.add(path, cwd) pygittools.commit('First Commit', cwd) repo_tree = utils.get_git_repo_tree(cwd) pprint(repo_tree) assert set(repo_tree) != set(repoassist_paths_expected) new_files, removed_files = prepare.update_repoassist(config, cwd, add_to_tree=True, options=options) pprint(new_files) pygittools.commit('Second Commit', cwd) paths_to_git_add.remove(repoassit_path / 'dummy_file.txt') assert repoassit_path / settings.FileName.MAIN in new_files assert repoassit_path / settings.FileName.CLI in new_files assert repoassit_path / 'dummy_file.txt' in removed_files assert repoassit_path / 'dummy_file2.txt' in removed_files assert new_files.__len__() == 2 assert removed_files.__len__() == 2 repoassist_files = {item for item in repoassit_path.rglob('*') if item.is_file()} assert set(repoassist_files) == set(repoassist_paths_expected) repo_tree = utils.get_git_repo_tree(cwd) pprint(repo_tree) assert set(repo_tree) == set(repoassist_paths_expected)
35.316474
122
0.683089
2,949
24,439
5.363174
0.073923
0.034522
0.013657
0.022762
0.815883
0.779148
0.763025
0.728123
0.713012
0.696131
0
0.001647
0.204918
24,439
691
123
35.367583
0.81231
0.001719
0
0.675422
0
0
0.195327
0.113712
0
0
0
0
0.061914
1
0.035647
false
0
0.022514
0
0.067542
0.02439
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
8df8cf209ce48c6ec6c789fdf771d6854d7b94fe
30
py
Python
tests/sfko/sfko/util/queue/__init__.py
Public-Cloud-Projects/Zenko
d62a7bad90b69108bbf9ea1cbd2f9b983f1235b7
[ "Apache-2.0" ]
453
2017-07-11T13:14:05.000Z
2022-03-31T20:50:01.000Z
tests/sfko/sfko/util/queue/__init__.py
Public-Cloud-Projects/Zenko
d62a7bad90b69108bbf9ea1cbd2f9b983f1235b7
[ "Apache-2.0" ]
987
2017-07-11T16:56:45.000Z
2022-03-31T19:36:10.000Z
tests/sfko/sfko/util/queue/__init__.py
Public-Cloud-Projects/Zenko
d62a7bad90b69108bbf9ea1cbd2f9b983f1235b7
[ "Apache-2.0" ]
100
2017-07-11T16:16:29.000Z
2022-03-11T15:07:54.000Z
from .client import TaskQueue
15
29
0.833333
4
30
6.25
1
0
0
0
0
0
0
0
0
0
0
0
0.133333
30
1
30
30
0.961538
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
5c4b2fefe240a58064b2f6f060d54a63e6c3f26c
80
py
Python
guet/config/errors.py
sturzl/guet
b8c453f07968b689b303e20e7a31b405c02c54ef
[ "Apache-2.0" ]
13
2018-12-21T22:47:28.000Z
2021-12-17T14:27:35.000Z
guet/config/errors.py
sturzl/guet
b8c453f07968b689b303e20e7a31b405c02c54ef
[ "Apache-2.0" ]
63
2018-08-30T11:19:12.000Z
2021-05-13T12:11:08.000Z
guet/config/errors.py
sturzl/guet
b8c453f07968b689b303e20e7a31b405c02c54ef
[ "Apache-2.0" ]
7
2019-05-21T13:52:37.000Z
2022-01-30T22:57:21.000Z
from guet import GuetError class AlreadyInitializedError(GuetError): pass
13.333333
41
0.8
8
80
8
0.875
0
0
0
0
0
0
0
0
0
0
0
0.1625
80
5
42
16
0.955224
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0.333333
0.333333
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
1
0
1
0
0
6
3082b2a859713407c8ec287ebf6723ae7d202211
3,060
py
Python
python/binarytree-traversal/preorder_test.py
gurupratap-matharu/Exercism
a083c8d4bbc10d777524d917329a34e560201c9c
[ "MIT" ]
null
null
null
python/binarytree-traversal/preorder_test.py
gurupratap-matharu/Exercism
a083c8d4bbc10d777524d917329a34e560201c9c
[ "MIT" ]
null
null
null
python/binarytree-traversal/preorder_test.py
gurupratap-matharu/Exercism
a083c8d4bbc10d777524d917329a34e560201c9c
[ "MIT" ]
null
null
null
import unittest from preorder import Tree, TreeNode class TreeTests(unittest.TestCase): """Test suite for binary tree traversal using recursion and iteration method""" def test_preorder_recursive_traversal_for_null_binary_tree(self): root = TreeNode() self.assertEqual(Tree().preorder_recursive_traversal(root=root), [0]) def test_preorder_recursive_traversal_for_single_node_binary_tree(self): root = TreeNode(val=5, left=None, right=None) self.assertEqual(Tree().preorder_recursive_traversal(root=root), [5]) def test_preorder_recursive_traversal_for_simple_binary_tree(self): root = TreeNode(val=1, left=TreeNode(val=2)) self.assertEqual(Tree().preorder_recursive_traversal(root=root), [1, 2]) def test_preorder_recursive_traversal_for_medium_binary_tree(self): root = TreeNode(1, right=TreeNode(2, left=TreeNode(3))) self.assertEqual(Tree().preorder_recursive_traversal(root=root), [1, 2, 3]) def test_preorder_recursive_traversal_for_large_binary_tree(self): node_9 = TreeNode(9) node_11 = TreeNode(11) node_37 = TreeNode(37) node_21 = TreeNode(21, left=node_9) node_49 = TreeNode(49, right=node_11) node_17 = TreeNode(17, left=node_37) node_77 = TreeNode(77, left=node_49, right=node_17) node_7 = TreeNode(7, right=node_21) root = TreeNode(100, left=node_7, right=node_77) self.assertEqual( Tree().preorder_recursive_traversal(root=root), [100, 7, 21, 9, 77, 49, 11, 17, 37], ) def test_preorder_iterative_traversal_for_null_binary_tree(self): root = TreeNode() self.assertEqual(Tree().preorder_iterative_traversal(root=root), [0]) def test_preorder_iterative_traversal_for_single_node_binary_tree(self): root = TreeNode(val=5, left=None, right=None) self.assertEqual(Tree().preorder_iterative_traversal(root=root), [5]) def test_preorder_iterative_traversal_for_simple_binary_tree(self): root = TreeNode(val=1, left=TreeNode(val=2)) self.assertEqual(Tree().preorder_iterative_traversal(root=root), [1, 2]) def test_preorder_iterative_traversal_for_medium_binary_tree(self): root = TreeNode(1, right=TreeNode(2, left=TreeNode(3))) self.assertEqual(Tree().preorder_iterative_traversal(root=root), [1, 2, 3]) def test_preorder_iterative_traversal_for_large_binary_tree(self): node_9 = TreeNode(9) node_11 = TreeNode(11) node_37 = TreeNode(37) node_21 = TreeNode(21, left=node_9) node_49 = TreeNode(49, right=node_11) node_17 = TreeNode(17, left=node_37) node_77 = TreeNode(77, left=node_49, right=node_17) node_7 = TreeNode(7, right=node_21) root = TreeNode(100, left=node_7, right=node_77) self.assertEqual( Tree().preorder_iterative_traversal(root=root), [100, 7, 21, 9, 77, 49, 11, 17, 37], ) if __name__ == "__main__": unittest.main()
40.263158
83
0.688562
413
3,060
4.782082
0.125908
0.055696
0.075949
0.136709
0.919494
0.919494
0.858734
0.826329
0.749367
0.733165
0
0.061275
0.2
3,060
75
84
40.8
0.745507
0.023856
0
0.526316
0
0
0.002684
0
0
0
0
0
0.175439
1
0.175439
false
0
0.035088
0
0.22807
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
308344b006b15036478016fb582c869c0a54fc8d
2,150
py
Python
tests/test_upstreams/test_hacker_news.py
SudoQ/rumor
a02a0b8b4ee929d9cd41c33816b1533c24b9cdb3
[ "MIT" ]
1
2019-09-16T13:57:27.000Z
2019-09-16T13:57:27.000Z
tests/test_upstreams/test_hacker_news.py
SudoQ/rumor
a02a0b8b4ee929d9cd41c33816b1533c24b9cdb3
[ "MIT" ]
null
null
null
tests/test_upstreams/test_hacker_news.py
SudoQ/rumor
a02a0b8b4ee929d9cd41c33816b1533c24b9cdb3
[ "MIT" ]
null
null
null
from unittest.mock import MagicMock, patch import pytest from rumor.exceptions import UpstreamError from rumor.upstreams.hacker_news import (get_news_items, news_item_source_request) @patch('rumor.upstreams.hacker_news.requests') def test_get_news_items_ok(mock_requests): mock_response = MagicMock() mock_response.json.return_value = [1, 13, 24] mock_response.status_code = 200 mock_requests.get.return_value = mock_response target_api_url = 'https://some-url' results = get_news_items(target_api_url) assert results == [1, 13, 24] mock_requests.get.assert_called_once_with(f'{target_api_url}/v0/topstories.json') @patch('rumor.upstreams.hacker_news.requests') def test_get_news_items_error(mock_requests): mock_response = MagicMock() mock_response.status_code = 500 mock_requests.get.return_value = mock_response target_api_url = 'https://some-url' with pytest.raises(UpstreamError): get_news_items(target_api_url) mock_requests.get.assert_called_once_with(f'{target_api_url}/v0/topstories.json') @patch('rumor.upstreams.hacker_news.requests') def test_news_item_source_request_ok(mock_requests): mock_response = MagicMock() mock_response.json.return_value = {'foo': 'bar'} mock_response.status_code = 200 mock_requests.get.return_value = mock_response news_item_id = '4753' target_api_url = 'https://some-url' results = news_item_source_request(news_item_id, target_api_url) assert results == {'foo': 'bar'} mock_requests.get.assert_called_once_with(f'{target_api_url}/v0/item/{news_item_id}.json') @patch('rumor.upstreams.hacker_news.requests') def test_news_item_source_request_error(mock_requests): mock_response = MagicMock() mock_response.status_code = 500 mock_requests.get.return_value = mock_response news_item_id = '4753' target_api_url = 'https://some-url' with pytest.raises(UpstreamError): news_item_source_request(news_item_id, target_api_url) mock_requests.get.assert_called_once_with(f'{target_api_url}/v0/item/{news_item_id}.json')
31.617647
94
0.749302
304
2,150
4.898026
0.171053
0.112827
0.096709
0.080591
0.882471
0.86501
0.840833
0.83143
0.83143
0.83143
0
0.018569
0.148372
2,150
67
95
32.089552
0.794648
0
0
0.622222
0
0
0.179535
0.140465
0
0
0
0
0.133333
1
0.088889
false
0
0.088889
0
0.177778
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
30c1fbf52fa07c2fb65841f1adc873e5a79f3a93
123
py
Python
tests/test_dummy.py
hbredin/pyannote-database
11d8dcb72e54874fc9437cfbd7d944dcce7e9217
[ "MIT" ]
31
2018-05-24T11:48:20.000Z
2022-03-12T12:39:35.000Z
tests/test_dummy.py
hbredin/pyannote-database
11d8dcb72e54874fc9437cfbd7d944dcce7e9217
[ "MIT" ]
59
2016-11-17T13:13:11.000Z
2022-03-16T15:41:12.000Z
tests/test_dummy.py
hbredin/pyannote-database
11d8dcb72e54874fc9437cfbd7d944dcce7e9217
[ "MIT" ]
20
2017-07-11T12:43:44.000Z
2022-03-10T09:50:40.000Z
import pytest from pyannote.database import get_databases def test_dummy(): assert isinstance(get_databases(), list)
17.571429
44
0.788618
16
123
5.875
0.8125
0.255319
0
0
0
0
0
0
0
0
0
0
0.138211
123
6
45
20.5
0.886792
0
0
0
0
0
0
0
0
0
0
0
0.25
1
0.25
true
0
0.5
0
0.75
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
0
1
0
0
0
0
6
eb9e198030ca5b7518fd16d993637a03d898e777
31
py
Python
projects/plyplus/test.py
fleimgruber/python
2e735762c73651cffc027ca850b2a58d87d54b49
[ "Unlicense" ]
25
2021-10-30T19:54:59.000Z
2022-03-29T06:11:02.000Z
projects/plyplus/test.py
fleimgruber/python
2e735762c73651cffc027ca850b2a58d87d54b49
[ "Unlicense" ]
21
2021-10-19T01:09:38.000Z
2022-03-24T16:08:53.000Z
projects/plyplus/test.py
fleimgruber/python
2e735762c73651cffc027ca850b2a58d87d54b49
[ "Unlicense" ]
3
2022-01-25T20:25:13.000Z
2022-03-08T02:58:50.000Z
import examples import plyplus
10.333333
15
0.870968
4
31
6.75
0.75
0
0
0
0
0
0
0
0
0
0
0
0.129032
31
2
16
15.5
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
69576573117a59862010988dc8a7daa26708b1b8
106,207
py
Python
likeyoubot_blade2.py
dogfooter-master/dogfooter
e1e39375703fe3019af7976f97c44cf2cb7ca0fa
[ "MIT" ]
null
null
null
likeyoubot_blade2.py
dogfooter-master/dogfooter
e1e39375703fe3019af7976f97c44cf2cb7ca0fa
[ "MIT" ]
null
null
null
likeyoubot_blade2.py
dogfooter-master/dogfooter
e1e39375703fe3019af7976f97c44cf2cb7ca0fa
[ "MIT" ]
null
null
null
import likeyoubot_game as lybgame import likeyoubot_blade2_scene as lybscene from likeyoubot_configure import LYBConstant as lybconstant import time import sys import tkinter from tkinter import ttk from tkinter import font import copy class LYBBlade2(lybgame.LYBGame): work_list = [ '게임 시작', '로그인', '모험', '모두팔기', '장비교체', '자동분해', '우편함', '반격 던전', '영웅의 탑', '레이드', '일대일 대전', '팀 대전', '점령전', '친구', '명예의 전당', '알림', '[반복 시작]', '[반복 종료]', '[작업 대기]', '[작업 예약]', ''] blade2_icon_list = [ 'blade2_icon' ] hero_list = [ '검투사', '암살자', '마법사', '격투가', ] stone_list = [ '무기 승급석', '방어구 승급석', '장신구 승급석' ] gyeoltoo_list = [ '일대일 대전', '팀 대전', '점령전', ] item_equip_list = [ '무기', '방어구', '장신구', ] item_status_list = [ '레벨업 중', '강화 중', '정수', ] moheom_level_list = [ '일반', '정예', '악몽', '지옥' ] character_move_list = [ "↑", "↗", "→", "↘", "↓", "↙", "←", "↖" ] tier_list = [ 'T1', 'T2', 'T3', 'T4' ] auto_combat_list = [ 'AUTO', 'SKILL AUTO' ] def __init__(self, game_name, game_data_name, window): lybgame.LYBGame.__init__(self, lybconstant.LYB_GAME_BLADE2, lybconstant.LYB_GAME_DATA_BLADE2, window) def process(self, window_image): rc = super(LYBBlade2, self).process(window_image) if rc < 0: return rc return rc def custom_check(self, window_image, window_pixel): pb_name = 'confirm' (loc_x, loc_y), match_rate = self.locationOnWindowPart( self.window_image, self.resource_manager.pixel_box_dic[pb_name], custom_below_level=(200, 170, 100), custom_top_level=(230, 190, 120), custom_threshold=0.8, custom_flag=1, custom_rect=(540, 350, 590, 380) ) if loc_x != -1: self.logger.warn('확인: ' + str(match_rate)) self.mouse_click(pb_name) (loc_x, loc_y), match_rate = self.locationResourceOnWindowPart( self.window_image, 'jeomryoungjeon_combat_scene_result_loc', custom_top_level=(235, 235, 235), custom_below_level=(115, 115, 115), custom_threshold=0.7, custom_flag=1, custom_rect=(515, 350, 580, 380) ) if loc_x != -1: self.logger.warn('결과 보기: ' + str(match_rate)) self.mouse_click('jeomryoungjeon_combat_scene_result_0') # 패배! # (loc_x, loc_y), match_rate = self.locationResourceOnWindowPart( # self.window_image, # 'defeat_press_key_loc', # custom_below_level=(250, 250, 250), # custom_top_level=(255, 255, 255), # custom_threshold=0.7, # custom_flag=1, # custom_rect=(280, 190, 360, 230) # ) # if loc_x != -1: # self.logger.warn('전투 패배: ' + str(match_rate)) # self.mouse_click('defeat_press_key_0') return '' def get_screen_by_location(self, window_image): scene_name = self.scene_init_screen(window_image) if len(scene_name) > 0: return scene_name # scene_name = self.jeontoo_scene(window_image) # if len(scene_name) > 0: # return scene_name # scene_name = self.scene_google_play_account_select(window_image) # if len(scene_name) > 0: # return scene_name return '' # def jeontoo_scene(self, window_image): # (loc_x, loc_y), match_rate = self.locationResourceOnWindowPart( # self.window_image, # 'jeontoo_scene_loc', # custom_below_level=(100, 100, 100), # custom_top_level=(255, 255, 255), # custom_threshold=0.7, # custom_flag=1, # custom_rect=(5, 90, 80, 130) # ) # if match_rate > 0.7: # return 'jeontoo_scene' # return '' def scene_init_screen(self, window_image): loc_x = -1 loc_y = -1 if self.player_type == 'nox': for each_icon in LYBBlade2.blade2_icon_list: (loc_x, loc_y), match_rate = self.locationOnWindowPart( window_image, self.resource_manager.pixel_box_dic[each_icon], custom_threshold=0.8, custom_flag=1, custom_rect=(80, 110, 570, 300) ) # self.logger.debug(each_icon + ' ' + str((loc_x, loc_y)) + ' ' + str(match_rate)) if loc_x != -1: break elif self.player_type == 'momo': for each_icon in LYBBlade2.blade2_icon_list: (loc_x, loc_y), match_rate = self.locationOnWindowPart( window_image, self.resource_manager.pixel_box_dic[each_icon], custom_threshold=0.8, custom_flag=1, custom_rect=(30, 10, 610, 300) ) # self.logger.debug(each_icon + ' ' + str((loc_x, loc_y)) + ' ' + str(match_rate)) if loc_x != -1: break if loc_x == -1: return '' return 'init_screen_scene' def scene_google_play_account_select(self, window_image): loc_x_list = [] loc_y_list = [] (loc_x, loc_y) = lybgame.LYBGame.locationOnWindow( window_image, self.resource_manager.pixel_box_dic['google_play_letter'] ) loc_x_list.append(loc_x) loc_y_list.append(loc_y) for i in range(6): (loc_x, loc_y) = lybgame.LYBGame.locationOnWindow( window_image, self.resource_manager.pixel_box_dic['google_play_letter_' + str(i)] ) loc_x_list.append(loc_x) loc_y_list.append(loc_y) for each_loc in loc_x_list: if each_loc == -1: return '' else: continue return 'google_play_account_select_scene' def clear_scene(self): last_scene = self.scene_dic self.scene_dic = {} for scene_name, scene in last_scene.items(): if ('google_play_account_select_scene' in scene_name or 'logo_screen_scene' in scene_name or 'connect_account_scene' in scene_name ): self.scene_dic[scene_name] = last_scene[scene_name] def add_scene(self, scene_name): self.scene_dic[scene_name] = lybscene.LYBBlade2Scene(scene_name) self.scene_dic[scene_name].setLoggingQueue(self.logging_queue) self.scene_dic[scene_name].setGameObject(self) class LYBBlade2Tab(lybgame.LYBGameTab): def __init__(self, root_frame, configure, game_options, inner_frame_dics, width, height, game_name=lybconstant.LYB_GAME_BLADE2): lybgame.LYBGameTab.__init__(self, root_frame, configure, game_options, inner_frame_dics, width, height, game_name) def set_work_list(self): lybgame.LYBGameTab.set_work_list(self) for each_work in LYBBlade2.work_list: self.option_dic['work_list_listbox'].insert('end', each_work) self.configure.common_config[self.game_name]['work_list'].append(each_work) def set_option(self): # PADDING frame = ttk.Frame( master=self.master, relief=self.frame_relief ) frame.pack(pady=5) self.inner_frame_dic['options'] = ttk.Frame( master=self.master, relief=self.frame_relief ) self.option_dic['option_note'] = ttk.Notebook( master=self.inner_frame_dic['options'] ) self.inner_frame_dic['common_tab_frame'] = ttk.Frame( master=self.option_dic['option_note'], relief=self.frame_relief ) self.inner_frame_dic['common_tab_frame'].pack(anchor=tkinter.NW, fill=tkinter.BOTH, expand=True) self.option_dic['option_note'].add(self.inner_frame_dic['common_tab_frame'], text='일반') self.inner_frame_dic['work_tab_frame'] = ttk.Frame( master=self.option_dic['option_note'], relief=self.frame_relief ) self.inner_frame_dic['work_tab_frame'].pack(anchor=tkinter.NW, fill=tkinter.BOTH, expand=True) self.option_dic['option_note'].add(self.inner_frame_dic['work_tab_frame'], text='작업') self.inner_frame_dic['work_2_tab_frame'] = ttk.Frame( master=self.option_dic['option_note'], relief=self.frame_relief ) self.inner_frame_dic['work_2_tab_frame'].pack(anchor=tkinter.NW, fill=tkinter.BOTH, expand=True) self.option_dic['option_note'].add(self.inner_frame_dic['work_2_tab_frame'], text='작업2') self.inner_frame_dic['notify_tab_frame'] = ttk.Frame( master=self.option_dic['option_note'], relief=self.frame_relief ) self.inner_frame_dic['notify_tab_frame'].pack(anchor=tkinter.NW, fill=tkinter.BOTH, expand=True) self.option_dic['option_note'].add(self.inner_frame_dic['notify_tab_frame'], text='알림') # ------ # 일반 탭 좌측 frame_l = ttk.Frame(self.inner_frame_dic['common_tab_frame']) frame_label = ttk.LabelFrame(frame_l, text='설정') frame = ttk.Frame(frame_label) label = ttk.Label( master=frame, text=self.get_option_text("TAG 클릭 주기(초)") ) label.pack(side=tkinter.LEFT) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_tag_period'] = tkinter.StringVar(frame) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_tag_period'].trace( 'w', lambda *args: self.callback_moheom_tag_period(args, lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_tag_period') ) combobox_list = [] for i in range(0, 3601): combobox_list.append(str(i)) if not lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_tag_period' in self.configure.common_config[ self.game_name]: self.configure.common_config[self.game_name][ lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_tag_period'] = 20 combobox = ttk.Combobox( master=frame, values=combobox_list, textvariable=self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_tag_period'], state="readonly", height=10, width=5, font=lybconstant.LYB_FONT ) combobox.set( self.configure.common_config[self.game_name][lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_tag_period']) combobox.pack(anchor=tkinter.W, side=tkinter.LEFT) frame.pack(anchor=tkinter.NW) frame = ttk.Frame(frame_label) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'ilil_quest'] = tkinter.BooleanVar(frame) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'ilil_quest'].trace( 'w', lambda *args: self.callback_ilil_quest(args, lybconstant.LYB_DO_STRING_BLADE2_WORK + 'ilil_quest') ) if not lybconstant.LYB_DO_STRING_BLADE2_WORK + 'ilil_quest' in self.configure.common_config[self.game_name]: self.configure.common_config[self.game_name][lybconstant.LYB_DO_STRING_BLADE2_WORK + 'ilil_quest'] = True check_box = ttk.Checkbutton( master=frame, text=self.get_option_text('일일 퀘스트 클릭'), variable=self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'ilil_quest'], onvalue=True, offvalue=False ) check_box.pack(anchor=tkinter.W, side=tkinter.LEFT) frame.pack(anchor=tkinter.NW) frame = ttk.Frame(frame_label) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'ilil_quest_continue'] = tkinter.BooleanVar(frame) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'ilil_quest_continue'].trace( 'w', lambda *args: self.callback_ilil_quest_continue(args, lybconstant.LYB_DO_STRING_BLADE2_WORK + 'ilil_quest_continue') ) if not lybconstant.LYB_DO_STRING_BLADE2_WORK + 'ilil_quest_continue' in self.configure.common_config[ self.game_name]: self.configure.common_config[self.game_name][ lybconstant.LYB_DO_STRING_BLADE2_WORK + 'ilil_quest_continue'] = True check_box = ttk.Checkbutton( master=frame, text=self.get_option_text('일일 퀘스트 연속 수행'), variable=self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'ilil_quest_continue'], onvalue=True, offvalue=False ) check_box.pack(anchor=tkinter.W, side=tkinter.LEFT) frame.pack(anchor=tkinter.NW) frame = ttk.Frame(frame_label) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'raid_notice'] = tkinter.BooleanVar(frame) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'raid_notice'].trace( 'w', lambda *args: self.callback_raid_notice(args, lybconstant.LYB_DO_STRING_BLADE2_WORK + 'raid_notice') ) if not lybconstant.LYB_DO_STRING_BLADE2_WORK + 'raid_notice' in self.configure.common_config[self.game_name]: self.configure.common_config[self.game_name][lybconstant.LYB_DO_STRING_BLADE2_WORK + 'raid_notice'] = True check_box = ttk.Checkbutton( master=frame, text=self.get_option_text('레이드 알림 클릭'), variable=self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'raid_notice'], onvalue=True, offvalue=False ) check_box.pack(anchor=tkinter.W, side=tkinter.LEFT) frame.pack(anchor=tkinter.NW) frame = ttk.Frame(frame_label) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'jeomryoungjeon_notice'] = tkinter.BooleanVar(frame) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'jeomryoungjeon_notice'].trace( 'w', lambda *args: self.callback_jeomryoungjeon_notice(args, lybconstant.LYB_DO_STRING_BLADE2_WORK + 'jeomryoungjeon_notice') ) if not lybconstant.LYB_DO_STRING_BLADE2_WORK + 'jeomryoungjeon_notice' in self.configure.common_config[ self.game_name]: self.configure.common_config[self.game_name][ lybconstant.LYB_DO_STRING_BLADE2_WORK + 'jeomryoungjeon_notice'] = False check_box = ttk.Checkbutton( master=frame, text=self.get_option_text('점령전 알림 클릭'), variable=self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'jeomryoungjeon_notice'], onvalue=True, offvalue=False ) check_box.pack(anchor=tkinter.W, side=tkinter.LEFT) # frame.pack(anchor=tkinter.NW) frame_label.pack(anchor=tkinter.NW, padx=5, pady=5) frame_l.pack(side=tkinter.LEFT, anchor=tkinter.NW) # 일반 탭 중간 frame_m = ttk.Frame(self.inner_frame_dic['common_tab_frame']) frame_m.pack(side=tkinter.LEFT, anchor=tkinter.NW) # 일반 탭 우측 frame_r = ttk.Frame(self.inner_frame_dic['common_tab_frame']) frame_r.pack(side=tkinter.LEFT, anchor=tkinter.NW) # 작업 탭 좌측 frame_l = ttk.Frame(self.inner_frame_dic['work_tab_frame']) frame_label = ttk.LabelFrame(frame_l, text='모험') frame = ttk.Frame(frame_label) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_level'] = tkinter.StringVar(frame) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_level'].trace( 'w', lambda *args: self.callback_moheom_level(args, lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_level') ) combobox_list = LYBBlade2.moheom_level_list if not lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_level' in self.configure.common_config[self.game_name]: self.configure.common_config[self.game_name][lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_level'] = \ combobox_list[1] combobox = ttk.Combobox( master=frame, values=combobox_list, textvariable=self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_level'], state="readonly", height=10, width=5, font=lybconstant.LYB_FONT ) combobox.set( self.configure.common_config[self.game_name][lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_level']) combobox.pack(anchor=tkinter.W, side=tkinter.LEFT) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_act'] = tkinter.StringVar(frame) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_act'].trace( 'w', lambda *args: self.callback_moheom_act(args, lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_act') ) combobox_list = [] for i in range(1, 6): combobox_list.append(str(i)) if not lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_act' in self.configure.common_config[self.game_name]: self.configure.common_config[self.game_name][lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_act'] = \ combobox_list[0] combobox = ttk.Combobox( master=frame, values=combobox_list, textvariable=self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_act'], state="readonly", height=10, width=3, font=lybconstant.LYB_FONT ) combobox.set(self.configure.common_config[self.game_name][lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_act']) combobox.pack(anchor=tkinter.W, side=tkinter.LEFT) label = ttk.Label( master=frame, text="막" ) label.pack(side=tkinter.LEFT) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_stage'] = tkinter.StringVar(frame) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_stage'].trace( 'w', lambda *args: self.callback_moheom_stage(args, lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_stage') ) combobox_list = [] for i in range(1, 11): combobox_list.append(str(i)) if not lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_stage' in self.configure.common_config[self.game_name]: self.configure.common_config[self.game_name][lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_stage'] = \ combobox_list[0] combobox = ttk.Combobox( master=frame, values=combobox_list, textvariable=self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_stage'], state="readonly", height=10, width=3, font=lybconstant.LYB_FONT ) combobox.set( self.configure.common_config[self.game_name][lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_stage']) combobox.pack(anchor=tkinter.W, side=tkinter.LEFT) label = ttk.Label( master=frame, text="스테이지" ) label.pack(side=tkinter.LEFT) frame.pack(anchor=tkinter.NW) frame = ttk.Frame(frame_label) label = ttk.Label( master=frame, text=self.get_option_text("진행 횟수(0:무한)") ) label.pack(side=tkinter.LEFT) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_limit_count'] = tkinter.StringVar(frame) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_limit_count'].trace( 'w', lambda *args: self.callback_moheom_limit_count(args, lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_limit_count') ) combobox_list = [] for i in range(0, 1001): combobox_list.append(str(i)) if not lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_limit_count' in self.configure.common_config[ self.game_name]: self.configure.common_config[self.game_name][ lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_limit_count'] = 0 combobox = ttk.Combobox( master=frame, values=combobox_list, textvariable=self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_limit_count'], state="readonly", height=10, width=8, font=lybconstant.LYB_FONT ) combobox.set( self.configure.common_config[self.game_name][lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_limit_count']) combobox.pack(anchor=tkinter.W, side=tkinter.LEFT) frame.pack(anchor=tkinter.NW) frame = ttk.Frame(frame_label) label = ttk.Label( master=frame, text=self.get_option_text("봇 주기(초)(0:설정 안함)") ) label.pack(side=tkinter.LEFT) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_bot_period'] = tkinter.StringVar(frame) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_bot_period'].trace( 'w', lambda *args: self.callback_moheom_bot_period(args, lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_bot_period') ) combobox_list = [] for i in range(0, 100, 1): combobox_list.append("{0:.2f}".format(i * 0.01)) if not lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_bot_period' in self.configure.common_config[ self.game_name]: self.configure.common_config[self.game_name][ lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_bot_period'] = 0.00 combobox = ttk.Combobox( master=frame, values=combobox_list, textvariable=self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_bot_period'], state="readonly", height=10, width=8, font=lybconstant.LYB_FONT ) combobox.set( self.configure.common_config[self.game_name][lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_bot_period']) combobox.pack(anchor=tkinter.W, side=tkinter.LEFT) frame.pack(anchor=tkinter.NW) frame = ttk.Frame(frame_label) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_repeat'] = tkinter.BooleanVar(frame) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_repeat'].trace( 'w', lambda *args: self.callback_moheom_repeat(args, lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_repeat') ) if not lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_repeat' in self.configure.common_config[self.game_name]: self.configure.common_config[self.game_name][lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_repeat'] = True check_box = ttk.Checkbutton( master=frame, text=self.get_option_text('반복 전투', width=10), variable=self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_repeat'], onvalue=True, offvalue=False ) check_box.pack(anchor=tkinter.W, side=tkinter.LEFT) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_triple'] = tkinter.BooleanVar(frame) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_triple'].trace( 'w', lambda *args: self.callback_moheom_triple(args, lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_triple') ) if not lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_triple' in self.configure.common_config[self.game_name]: self.configure.common_config[self.game_name][lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_triple'] = True check_box = ttk.Checkbutton( master=frame, text=self.get_option_text('3배 모험', width=10), variable=self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_triple'], onvalue=True, offvalue=False ) check_box.pack(anchor=tkinter.W, side=tkinter.LEFT) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_next'] = tkinter.BooleanVar(frame) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_next'].trace( 'w', lambda *args: self.callback_moheom_next(args, lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_next') ) if not lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_next' in self.configure.common_config[self.game_name]: self.configure.common_config[self.game_name][lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_next'] = True check_box = ttk.Checkbutton( master=frame, text='다음 지역', variable=self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_next'], onvalue=True, offvalue=False ) check_box.pack(anchor=tkinter.W, side=tkinter.LEFT) frame.pack(anchor=tkinter.NW) frame = ttk.Frame(frame_label) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_hero_use'] = tkinter.BooleanVar(frame) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_hero_use'].trace( 'w', lambda *args: self.callback_moheom_hero_use(args, lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_hero_use') ) if not lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_hero_use' in self.configure.common_config[ self.game_name]: self.configure.common_config[self.game_name][ lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_hero_use'] = True check_box = ttk.Checkbutton( master=frame, text=self.get_option_text('영웅 자동 선택 사용', width=20), variable=self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_hero_use'], onvalue=True, offvalue=False ) check_box.pack(anchor=tkinter.W, side=tkinter.LEFT) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_retry'] = tkinter.BooleanVar(frame) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_retry'].trace( 'w', lambda *args: self.callback_moheom_retry(args, lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_retry') ) if not lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_retry' in self.configure.common_config[self.game_name]: self.configure.common_config[self.game_name][lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_retry'] = False check_box = ttk.Checkbutton( master=frame, text='다시 하기', variable=self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_retry'], onvalue=True, offvalue=False ) check_box.pack(anchor=tkinter.W, side=tkinter.LEFT) frame.pack(anchor=tkinter.NW) frame = ttk.Frame(frame_label) label = ttk.Label( master=frame, text=self.get_option_text("영웅 선택 1") ) label.pack(side=tkinter.LEFT) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_hero_1'] = tkinter.StringVar(frame) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_hero_1'].trace( 'w', lambda *args: self.callback_moheom_hero_1(args, lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_hero_1') ) combobox_list = LYBBlade2.hero_list if not lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_hero_1' in self.configure.common_config[self.game_name]: self.configure.common_config[self.game_name][lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_hero_1'] = \ combobox_list[0] combobox = ttk.Combobox( master=frame, values=combobox_list, textvariable=self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_hero_1'], state="readonly", height=10, width=8, font=lybconstant.LYB_FONT ) combobox.set( self.configure.common_config[self.game_name][lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_hero_1']) combobox.pack(anchor=tkinter.W, side=tkinter.LEFT) frame.pack(anchor=tkinter.NW) frame = ttk.Frame(frame_label) label = ttk.Label( master=frame, text=self.get_option_text("영웅 선택 2") ) label.pack(side=tkinter.LEFT) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_hero_2'] = tkinter.StringVar(frame) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_hero_2'].trace( 'w', lambda *args: self.callback_moheom_hero_2(args, lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_hero_2') ) combobox_list = LYBBlade2.hero_list if not lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_hero_2' in self.configure.common_config[self.game_name]: self.configure.common_config[self.game_name][lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_hero_2'] = \ combobox_list[1] combobox = ttk.Combobox( master=frame, values=combobox_list, textvariable=self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_hero_2'], state="readonly", height=10, width=8, font=lybconstant.LYB_FONT ) combobox.set( self.configure.common_config[self.game_name][lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_hero_2']) combobox.pack(anchor=tkinter.W, side=tkinter.LEFT) frame.pack(anchor=tkinter.NW) frame = ttk.Frame(frame_label) label = ttk.Label( master=frame, text=self.get_option_text("자동 방식", width=28) ) label.pack(side=tkinter.LEFT) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_auto'] = tkinter.StringVar(frame) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_auto'].trace( 'w', lambda *args: self.callback_moheom_auto(args, lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_auto') ) combobox_list = LYBBlade2.auto_combat_list if not lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_auto' in self.configure.common_config[self.game_name]: self.configure.common_config[self.game_name][lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_auto'] = \ combobox_list[1] combobox = ttk.Combobox( master=frame, values=combobox_list, textvariable=self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_auto'], state="readonly", height=10, width=10, font=lybconstant.LYB_FONT ) combobox.set( self.configure.common_config[self.game_name][lybconstant.LYB_DO_STRING_BLADE2_WORK + 'moheom_auto']) combobox.pack(anchor=tkinter.W, side=tkinter.LEFT) frame.pack(anchor=tkinter.NW) frame_label.pack(anchor=tkinter.NW, padx=5, pady=5) frame_label = ttk.LabelFrame(frame_l, text='자동분해') frame_label_inner = ttk.LabelFrame(frame_label, text='티어') frame = ttk.Frame(frame_label_inner) for i in range(len(LYBBlade2.tier_list)): self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'jadong_bunhe_tier' + str(i)] = tkinter.BooleanVar( frame) if i == 0: self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'jadong_bunhe_tier' + '0'].trace( 'w', lambda *args: self.callback_jadong_bunhe_tier_0(args, lybconstant.LYB_DO_STRING_BLADE2_WORK + 'jadong_bunhe_tier' + '0') ) elif i == 1: self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'jadong_bunhe_tier' + '1'].trace( 'w', lambda *args: self.callback_jadong_bunhe_tier_1(args, lybconstant.LYB_DO_STRING_BLADE2_WORK + 'jadong_bunhe_tier' + '1') ) elif i == 2: self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'jadong_bunhe_tier' + '2'].trace( 'w', lambda *args: self.callback_jadong_bunhe_tier_2(args, lybconstant.LYB_DO_STRING_BLADE2_WORK + 'jadong_bunhe_tier' + '2') ) elif i == 3: self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'jadong_bunhe_tier' + '3'].trace( 'w', lambda *args: self.callback_jadong_bunhe_tier_3(args, lybconstant.LYB_DO_STRING_BLADE2_WORK + 'jadong_bunhe_tier' + '3') ) if not lybconstant.LYB_DO_STRING_BLADE2_WORK + 'jadong_bunhe_tier' + str(i) in self.configure.common_config[ self.game_name]: self.configure.common_config[self.game_name][ lybconstant.LYB_DO_STRING_BLADE2_WORK + 'jadong_bunhe_tier' + str(i)] = False check_box = ttk.Checkbutton( master=frame, text=LYBBlade2.tier_list[i], variable=self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'jadong_bunhe_tier' + str(i)], onvalue=True, offvalue=False ) check_box.pack(anchor=tkinter.W, side=tkinter.LEFT) frame.pack(anchor=tkinter.NW) frame_label_inner.pack(anchor=tkinter.NW, padx=5, pady=5) frame_label_inner = ttk.LabelFrame(frame_label, text='등급') frame = ttk.Frame(frame_label_inner) for i in range(6): self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'jadong_bunhe_rank' + str(i)] = tkinter.BooleanVar( frame) if i == 0: self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'jadong_bunhe_rank' + '0'].trace( 'w', lambda *args: self.callback_jadong_bunhe_rank_0(args, lybconstant.LYB_DO_STRING_BLADE2_WORK + 'jadong_bunhe_rank' + '0') ) elif i == 1: self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'jadong_bunhe_rank' + '1'].trace( 'w', lambda *args: self.callback_jadong_bunhe_rank_1(args, lybconstant.LYB_DO_STRING_BLADE2_WORK + 'jadong_bunhe_rank' + '1') ) elif i == 2: self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'jadong_bunhe_rank' + '2'].trace( 'w', lambda *args: self.callback_jadong_bunhe_rank_2(args, lybconstant.LYB_DO_STRING_BLADE2_WORK + 'jadong_bunhe_rank' + '2') ) elif i == 3: self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'jadong_bunhe_rank' + '3'].trace( 'w', lambda *args: self.callback_jadong_bunhe_rank_3(args, lybconstant.LYB_DO_STRING_BLADE2_WORK + 'jadong_bunhe_rank' + '3') ) elif i == 4: self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'jadong_bunhe_rank' + '4'].trace( 'w', lambda *args: self.callback_jadong_bunhe_rank_4(args, lybconstant.LYB_DO_STRING_BLADE2_WORK + 'jadong_bunhe_rank' + '4') ) elif i == 5: self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'jadong_bunhe_rank' + '5'].trace( 'w', lambda *args: self.callback_jadong_bunhe_rank_5(args, lybconstant.LYB_DO_STRING_BLADE2_WORK + 'jadong_bunhe_rank' + '5') ) if not lybconstant.LYB_DO_STRING_BLADE2_WORK + 'jadong_bunhe_rank' + str(i) in self.configure.common_config[ self.game_name]: self.configure.common_config[self.game_name][ lybconstant.LYB_DO_STRING_BLADE2_WORK + 'jadong_bunhe_rank' + str(i)] = False check_box = ttk.Checkbutton( master=frame, text='★' + str(i + 1), variable=self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'jadong_bunhe_rank' + str(i)], onvalue=True, offvalue=False ) check_box.pack(anchor=tkinter.W, side=tkinter.LEFT) frame.pack(anchor=tkinter.NW) frame_label_inner.pack(anchor=tkinter.NW, padx=5, pady=5) frame_label_inner = ttk.LabelFrame(frame_label, text='기타') frame = ttk.Frame(frame_label_inner) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'jadong_bunhe_ganghwa'] = tkinter.BooleanVar(frame) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'jadong_bunhe_ganghwa'].trace( 'w', lambda *args: self.callback_jadong_bunhe_ganghwa(args, lybconstant.LYB_DO_STRING_BLADE2_WORK + 'jadong_bunhe_ganghwa') ) if not lybconstant.LYB_DO_STRING_BLADE2_WORK + 'jadong_bunhe_ganghwa' in self.configure.common_config[ self.game_name]: self.configure.common_config[self.game_name][ lybconstant.LYB_DO_STRING_BLADE2_WORK + 'jadong_bunhe_ganghwa'] = False check_box = ttk.Checkbutton( master=frame, text='강화', variable=self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'jadong_bunhe_ganghwa'], onvalue=True, offvalue=False ) check_box.pack(anchor=tkinter.W, side=tkinter.LEFT) frame.pack(anchor=tkinter.NW) frame_label_inner.pack(anchor=tkinter.NW, padx=5, pady=5) frame_label.pack(anchor=tkinter.NW, padx=5, pady=5) frame_l.pack(side=tkinter.LEFT, anchor=tkinter.NW) # 작업 탭 중간 frame_m = ttk.Frame(self.inner_frame_dic['work_tab_frame']) frame_label = ttk.LabelFrame(frame_m, text='일대일 대전') frame = ttk.Frame(frame_label) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'ildeil_hero_use'] = tkinter.BooleanVar(frame) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'ildeil_hero_use'].trace( 'w', lambda *args: self.callback_ildeil_hero_use(args, lybconstant.LYB_DO_STRING_BLADE2_WORK + 'ildeil_hero_use') ) if not lybconstant.LYB_DO_STRING_BLADE2_WORK + 'ildeil_hero_use' in self.configure.common_config[ self.game_name]: self.configure.common_config[self.game_name][ lybconstant.LYB_DO_STRING_BLADE2_WORK + 'ildeil_hero_use'] = True check_box = ttk.Checkbutton( master=frame, text=self.get_option_text('영웅 자동 선택 사용'), variable=self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'ildeil_hero_use'], onvalue=True, offvalue=False ) check_box.pack(anchor=tkinter.W, side=tkinter.LEFT) frame.pack(anchor=tkinter.NW) frame = ttk.Frame(frame_label) label = ttk.Label( master=frame, text=self.get_option_text("진행 횟수") ) label.pack(side=tkinter.LEFT) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'ildeil_count'] = tkinter.StringVar(frame) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'ildeil_count'].trace( 'w', lambda *args: self.callback_ildeil_count(args, lybconstant.LYB_DO_STRING_BLADE2_WORK + 'ildeil_count') ) combobox_list = [] for i in range(1, 11): combobox_list.append(str(i)) if not lybconstant.LYB_DO_STRING_BLADE2_WORK + 'ildeil_count' in self.configure.common_config[self.game_name]: self.configure.common_config[self.game_name][lybconstant.LYB_DO_STRING_BLADE2_WORK + 'ildeil_count'] = \ combobox_list[2] combobox = ttk.Combobox( master=frame, values=combobox_list, textvariable=self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'ildeil_count'], state="readonly", height=10, width=3, font=lybconstant.LYB_FONT ) combobox.set( self.configure.common_config[self.game_name][lybconstant.LYB_DO_STRING_BLADE2_WORK + 'ildeil_count']) combobox.pack(anchor=tkinter.W, side=tkinter.LEFT) frame.pack(anchor=tkinter.NW) frame = ttk.Frame(frame_label) label = ttk.Label( master=frame, text=self.get_option_text("영웅 선택 1", width=25) ) label.pack(side=tkinter.LEFT) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'ildeil_hero_1'] = tkinter.StringVar(frame) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'ildeil_hero_1'].trace( 'w', lambda *args: self.callback_ildeil_hero_1(args, lybconstant.LYB_DO_STRING_BLADE2_WORK + 'ildeil_hero_1') ) combobox_list = LYBBlade2.hero_list if not lybconstant.LYB_DO_STRING_BLADE2_WORK + 'ildeil_hero_1' in self.configure.common_config[self.game_name]: self.configure.common_config[self.game_name][lybconstant.LYB_DO_STRING_BLADE2_WORK + 'ildeil_hero_1'] = \ combobox_list[0] combobox = ttk.Combobox( master=frame, values=combobox_list, textvariable=self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'ildeil_hero_1'], state="readonly", height=10, width=8, font=lybconstant.LYB_FONT ) combobox.set( self.configure.common_config[self.game_name][lybconstant.LYB_DO_STRING_BLADE2_WORK + 'ildeil_hero_1']) combobox.pack(anchor=tkinter.W, side=tkinter.LEFT) frame.pack(anchor=tkinter.NW) frame = ttk.Frame(frame_label) label = ttk.Label( master=frame, text=self.get_option_text("영웅 선택 2", width=25) ) label.pack(side=tkinter.LEFT) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'ildeil_hero_2'] = tkinter.StringVar(frame) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'ildeil_hero_2'].trace( 'w', lambda *args: self.callback_ildeil_hero_2(args, lybconstant.LYB_DO_STRING_BLADE2_WORK + 'ildeil_hero_2') ) combobox_list = LYBBlade2.hero_list if not lybconstant.LYB_DO_STRING_BLADE2_WORK + 'ildeil_hero_2' in self.configure.common_config[self.game_name]: self.configure.common_config[self.game_name][lybconstant.LYB_DO_STRING_BLADE2_WORK + 'ildeil_hero_2'] = \ combobox_list[1] combobox = ttk.Combobox( master=frame, values=combobox_list, textvariable=self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'ildeil_hero_2'], state="readonly", height=10, width=8, font=lybconstant.LYB_FONT ) combobox.set( self.configure.common_config[self.game_name][lybconstant.LYB_DO_STRING_BLADE2_WORK + 'ildeil_hero_2']) combobox.pack(anchor=tkinter.W, side=tkinter.LEFT) frame.pack(anchor=tkinter.NW) frame_label.pack(anchor=tkinter.NW, padx=5, pady=5) frame_label = ttk.LabelFrame(frame_m, text='팀 대전') frame = ttk.Frame(frame_label) label = ttk.Label( master=frame, text=self.get_option_text("진행 횟수") ) label.pack(side=tkinter.LEFT) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'team_count'] = tkinter.StringVar(frame) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'team_count'].trace( 'w', lambda *args: self.callback_team_count(args, lybconstant.LYB_DO_STRING_BLADE2_WORK + 'team_count') ) combobox_list = [] for i in range(1, 11): combobox_list.append(str(i)) if not lybconstant.LYB_DO_STRING_BLADE2_WORK + 'team_count' in self.configure.common_config[self.game_name]: self.configure.common_config[self.game_name][lybconstant.LYB_DO_STRING_BLADE2_WORK + 'team_count'] = \ combobox_list[2] combobox = ttk.Combobox( master=frame, values=combobox_list, textvariable=self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'team_count'], state="readonly", height=10, width=3, font=lybconstant.LYB_FONT ) combobox.set(self.configure.common_config[self.game_name][lybconstant.LYB_DO_STRING_BLADE2_WORK + 'team_count']) combobox.pack(anchor=tkinter.W, side=tkinter.LEFT) frame.pack(anchor=tkinter.NW) frame_label.pack(anchor=tkinter.NW, padx=5, pady=5) frame_label = ttk.LabelFrame(frame_m, text='점령전') frame = ttk.Frame(frame_label) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'jeomryoungjeon_hero_use'] = tkinter.BooleanVar(frame) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'jeomryoungjeon_hero_use'].trace( 'w', lambda *args: self.callback_jeomryoungjeon_hero_use(args, lybconstant.LYB_DO_STRING_BLADE2_WORK + 'jeomryoungjeon_hero_use') ) if not lybconstant.LYB_DO_STRING_BLADE2_WORK + 'jeomryoungjeon_hero_use' in self.configure.common_config[ self.game_name]: self.configure.common_config[self.game_name][ lybconstant.LYB_DO_STRING_BLADE2_WORK + 'jeomryoungjeon_hero_use'] = False check_box = ttk.Checkbutton( master=frame, text=self.get_option_text('영웅 자동 선택 사용', width=25), variable=self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'jeomryoungjeon_hero_use'], onvalue=True, offvalue=False ) check_box.pack(anchor=tkinter.W, side=tkinter.LEFT) frame.pack(anchor=tkinter.NW) frame = ttk.Frame(frame_label) label = ttk.Label( master=frame, text=self.get_option_text("영웅 선택", width=25) ) label.pack(side=tkinter.LEFT) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'jeomryoungjeon_hero'] = tkinter.StringVar(frame) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'jeomryoungjeon_hero'].trace( 'w', lambda *args: self.callback_jeomryoungjeon_hero(args, lybconstant.LYB_DO_STRING_BLADE2_WORK + 'jeomryoungjeon_hero') ) combobox_list = LYBBlade2.hero_list if not lybconstant.LYB_DO_STRING_BLADE2_WORK + 'jeomryoungjeon_hero' in self.configure.common_config[ self.game_name]: self.configure.common_config[self.game_name][ lybconstant.LYB_DO_STRING_BLADE2_WORK + 'jeomryoungjeon_hero'] = combobox_list[0] combobox = ttk.Combobox( master=frame, values=combobox_list, textvariable=self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'jeomryoungjeon_hero'], state="readonly", height=10, width=8, font=lybconstant.LYB_FONT ) combobox.set( self.configure.common_config[self.game_name][lybconstant.LYB_DO_STRING_BLADE2_WORK + 'jeomryoungjeon_hero']) combobox.pack(anchor=tkinter.W, side=tkinter.LEFT) frame.pack(anchor=tkinter.NW) # frame_label.pack(anchor=tkinter.NW, padx=5, pady=5) frame_label = ttk.LabelFrame(frame_m, text='반격 던전') frame = ttk.Frame(frame_label) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'bangyeok_dungeon_sotang'] = tkinter.BooleanVar(frame) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'bangyeok_dungeon_sotang'].trace( 'w', lambda *args: self.callback_bangyeok_dungeon_sotang(args, lybconstant.LYB_DO_STRING_BLADE2_WORK + 'bangyeok_dungeon_sotang') ) if not lybconstant.LYB_DO_STRING_BLADE2_WORK + 'bangyeok_dungeon_sotang' in self.configure.common_config[ self.game_name]: self.configure.common_config[self.game_name][ lybconstant.LYB_DO_STRING_BLADE2_WORK + 'bangyeok_dungeon_sotang'] = True check_box = ttk.Checkbutton( master=frame, text=self.get_option_text('소탕', width=25), variable=self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'bangyeok_dungeon_sotang'], onvalue=True, offvalue=False ) check_box.pack(anchor=tkinter.W, side=tkinter.LEFT) frame.pack(anchor=tkinter.NW) frame = ttk.Frame(frame_label) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'bangyeok_dungeon_hero_use'] = tkinter.BooleanVar(frame) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'bangyeok_dungeon_hero_use'].trace( 'w', lambda *args: self.callback_bangyeok_dungeon_hero_use(args, lybconstant.LYB_DO_STRING_BLADE2_WORK + 'bangyeok_dungeon_hero_use') ) if not lybconstant.LYB_DO_STRING_BLADE2_WORK + 'bangyeok_dungeon_hero_use' in self.configure.common_config[ self.game_name]: self.configure.common_config[self.game_name][ lybconstant.LYB_DO_STRING_BLADE2_WORK + 'bangyeok_dungeon_hero_use'] = False check_box = ttk.Checkbutton( master=frame, text=self.get_option_text('영웅 자동 선택 사용'), variable=self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'bangyeok_dungeon_hero_use'], onvalue=True, offvalue=False ) check_box.pack(anchor=tkinter.W, side=tkinter.LEFT) frame.pack(anchor=tkinter.NW) frame = ttk.Frame(frame_label) label = ttk.Label( master=frame, text=self.get_option_text("영웅 선택", width=25) ) label.pack(side=tkinter.LEFT) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'bangyeok_dungeon_hero'] = tkinter.StringVar(frame) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'bangyeok_dungeon_hero'].trace( 'w', lambda *args: self.callback_bangyeok_dungeon_hero(args, lybconstant.LYB_DO_STRING_BLADE2_WORK + 'bangyeok_dungeon_hero') ) combobox_list = LYBBlade2.hero_list if not lybconstant.LYB_DO_STRING_BLADE2_WORK + 'bangyeok_dungeon_hero' in self.configure.common_config[ self.game_name]: self.configure.common_config[self.game_name][ lybconstant.LYB_DO_STRING_BLADE2_WORK + 'bangyeok_dungeon_hero'] = combobox_list[1] combobox = ttk.Combobox( master=frame, values=combobox_list, textvariable=self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'bangyeok_dungeon_hero'], state="readonly", height=10, width=8, font=lybconstant.LYB_FONT ) combobox.set(self.configure.common_config[self.game_name][ lybconstant.LYB_DO_STRING_BLADE2_WORK + 'bangyeok_dungeon_hero']) combobox.pack(anchor=tkinter.W, side=tkinter.LEFT) frame.pack(anchor=tkinter.NW) frame = ttk.Frame(frame_label) label = ttk.Label( master=frame, text=self.get_option_text("입장 레벨", width=28) ) label.pack(side=tkinter.LEFT) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'bangyeok_dungeon_level'] = tkinter.StringVar(frame) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'bangyeok_dungeon_level'].trace( 'w', lambda *args: self.callback_bangyeok_dungeon_level(args, lybconstant.LYB_DO_STRING_BLADE2_WORK + 'bangyeok_dungeon_level') ) combobox_list = [] for i in range(0, 11): combobox_list.append(str(i)) if not lybconstant.LYB_DO_STRING_BLADE2_WORK + 'bangyeok_dungeon_level' in self.configure.common_config[ self.game_name]: self.configure.common_config[self.game_name][ lybconstant.LYB_DO_STRING_BLADE2_WORK + 'bangyeok_dungeon_level'] = 6 combobox = ttk.Combobox( master=frame, values=combobox_list, textvariable=self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'bangyeok_dungeon_level'], state="readonly", height=10, width=5, font=lybconstant.LYB_FONT ) combobox.set(self.configure.common_config[self.game_name][ lybconstant.LYB_DO_STRING_BLADE2_WORK + 'bangyeok_dungeon_level']) combobox.pack(anchor=tkinter.W, side=tkinter.LEFT) frame.pack(anchor=tkinter.NW) frame = ttk.Frame(frame_label) label = ttk.Label( master=frame, text=self.get_option_text("방어 아이콘 클릭 주기(ms)", width=28) ) label.pack(side=tkinter.LEFT) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'bangyeok_dungeon_response'] = tkinter.StringVar(frame) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'bangyeok_dungeon_response'].trace( 'w', lambda *args: self.callback_bangyeok_dungeon_response(args, lybconstant.LYB_DO_STRING_BLADE2_WORK + 'bangyeok_dungeon_response') ) combobox_list = [] for i in range(1, 2001): combobox_list.append(str(i)) if not lybconstant.LYB_DO_STRING_BLADE2_WORK + 'bangyeok_dungeon_response' in self.configure.common_config[ self.game_name]: self.configure.common_config[self.game_name][ lybconstant.LYB_DO_STRING_BLADE2_WORK + 'bangyeok_dungeon_response'] = 10 combobox = ttk.Combobox( master=frame, values=combobox_list, textvariable=self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'bangyeok_dungeon_response'], state="readonly", height=10, width=5, font=lybconstant.LYB_FONT ) combobox.set(self.configure.common_config[self.game_name][ lybconstant.LYB_DO_STRING_BLADE2_WORK + 'bangyeok_dungeon_response']) combobox.pack(anchor=tkinter.W, side=tkinter.LEFT) frame.pack(anchor=tkinter.NW) frame_label.pack(anchor=tkinter.NW, padx=5, pady=5) frame_label = ttk.LabelFrame(frame_m, text='장비교체') frame = ttk.Frame(frame_label) label = ttk.Label( master=frame, text=self.get_option_text("영웅 선택", width=25) ) label.pack(side=tkinter.LEFT) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'jangbi_change_set_hero'] = tkinter.StringVar(frame) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'jangbi_change_set_hero'].trace( 'w', lambda *args: self.callback_jangbi_change_set_hero(args, lybconstant.LYB_DO_STRING_BLADE2_WORK + 'jangbi_change_set_hero') ) combobox_list = LYBBlade2.hero_list if not lybconstant.LYB_DO_STRING_BLADE2_WORK + 'jangbi_change_set_hero' in self.configure.common_config[ self.game_name]: self.configure.common_config[self.game_name][ lybconstant.LYB_DO_STRING_BLADE2_WORK + 'jangbi_change_set_hero'] = combobox_list[1] combobox = ttk.Combobox( master=frame, values=combobox_list, textvariable=self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'jangbi_change_set_hero'], state="readonly", height=10, width=8, font=lybconstant.LYB_FONT ) combobox.set(self.configure.common_config[self.game_name][ lybconstant.LYB_DO_STRING_BLADE2_WORK + 'jangbi_change_set_hero']) combobox.pack(anchor=tkinter.W, side=tkinter.LEFT) frame.pack(anchor=tkinter.NW) frame = ttk.Frame(frame_label) label = ttk.Label( master=frame, text=self.get_option_text("장비 세트 번호(0:안함)", width=28) ) label.pack(side=tkinter.LEFT) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'jangbi_change_set'] = tkinter.StringVar(frame) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'jangbi_change_set'].trace( 'w', lambda *args: self.callback_jangbi_change_set(args, lybconstant.LYB_DO_STRING_BLADE2_WORK + 'jangbi_change_set') ) combobox_list = [] for i in range(0, 4): combobox_list.append(str(i)) if not lybconstant.LYB_DO_STRING_BLADE2_WORK + 'jangbi_change_set' in self.configure.common_config[ self.game_name]: self.configure.common_config[self.game_name][ lybconstant.LYB_DO_STRING_BLADE2_WORK + 'jangbi_change_set'] = 1 combobox = ttk.Combobox( master=frame, values=combobox_list, textvariable=self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'jangbi_change_set'], state="readonly", height=10, width=5, font=lybconstant.LYB_FONT ) combobox.set( self.configure.common_config[self.game_name][lybconstant.LYB_DO_STRING_BLADE2_WORK + 'jangbi_change_set']) combobox.pack(anchor=tkinter.W, side=tkinter.LEFT) frame.pack(anchor=tkinter.NW) frame_label.pack(anchor=tkinter.NW, padx=5, pady=5) frame_m.pack(side=tkinter.LEFT, anchor=tkinter.NW) # 작업 탭 우측 frame_r = ttk.Frame(self.inner_frame_dic['work_tab_frame']) frame_label = ttk.LabelFrame(frame_r, text='우편함') frame = ttk.Frame(frame_label) label = ttk.Label( master=frame, text=self.get_option_text("영웅 선택", width=27) ) label.pack(side=tkinter.LEFT) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'mail_select_hero'] = tkinter.StringVar(frame) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'mail_select_hero'].trace( 'w', lambda *args: self.callback_mail_select_hero(args, lybconstant.LYB_DO_STRING_BLADE2_WORK + 'mail_select_hero') ) combobox_list = LYBBlade2.hero_list if not lybconstant.LYB_DO_STRING_BLADE2_WORK + 'mail_select_hero' in self.configure.common_config[ self.game_name]: self.configure.common_config[self.game_name][lybconstant.LYB_DO_STRING_BLADE2_WORK + 'mail_select_hero'] = \ combobox_list[1] combobox = ttk.Combobox( master=frame, values=combobox_list, textvariable=self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'mail_select_hero'], state="readonly", height=10, width=8, font=lybconstant.LYB_FONT ) combobox.set( self.configure.common_config[self.game_name][lybconstant.LYB_DO_STRING_BLADE2_WORK + 'mail_select_hero']) combobox.pack(anchor=tkinter.W, side=tkinter.LEFT) frame.pack(anchor=tkinter.NW) frame = ttk.Frame(frame_label) label = ttk.Label( master=frame, text=self.get_option_text("승급석 선택", width=19) ) label.pack(side=tkinter.LEFT) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'mail_select_stone'] = tkinter.StringVar(frame) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'mail_select_stone'].trace( 'w', lambda *args: self.callback_mail_select_stone(args, lybconstant.LYB_DO_STRING_BLADE2_WORK + 'mail_select_stone') ) combobox_list = LYBBlade2.stone_list if not lybconstant.LYB_DO_STRING_BLADE2_WORK + 'mail_select_stone' in self.configure.common_config[ self.game_name]: self.configure.common_config[self.game_name][lybconstant.LYB_DO_STRING_BLADE2_WORK + 'mail_select_stone'] = \ combobox_list[0] combobox = ttk.Combobox( master=frame, values=combobox_list, textvariable=self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'mail_select_stone'], state="readonly", height=10, width=16, font=lybconstant.LYB_FONT ) combobox.set( self.configure.common_config[self.game_name][lybconstant.LYB_DO_STRING_BLADE2_WORK + 'mail_select_stone']) combobox.pack(anchor=tkinter.W, side=tkinter.LEFT) frame.pack(anchor=tkinter.NW) frame_label.pack(anchor=tkinter.NW, padx=5, pady=5) frame_label = ttk.LabelFrame(frame_r, text='레이드') frame = ttk.Frame(frame_label) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'raid_hero_use'] = tkinter.BooleanVar(frame) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'raid_hero_use'].trace( 'w', lambda *args: self.callback_raid_hero_use(args, lybconstant.LYB_DO_STRING_BLADE2_WORK + 'raid_hero_use') ) if not lybconstant.LYB_DO_STRING_BLADE2_WORK + 'raid_hero_use' in self.configure.common_config[self.game_name]: self.configure.common_config[self.game_name][ lybconstant.LYB_DO_STRING_BLADE2_WORK + 'raid_hero_use'] = False check_box = ttk.Checkbutton( master=frame, text=self.get_option_text('영웅 자동 선택 사용', width=25), variable=self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'raid_hero_use'], onvalue=True, offvalue=False ) check_box.pack(anchor=tkinter.W, side=tkinter.LEFT) frame.pack(anchor=tkinter.NW) frame = ttk.Frame(frame_label) label = ttk.Label( master=frame, text=self.get_option_text("영웅 선택", width=27) ) label.pack(side=tkinter.LEFT) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'raid_hero'] = tkinter.StringVar(frame) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'raid_hero'].trace( 'w', lambda *args: self.callback_raid_hero(args, lybconstant.LYB_DO_STRING_BLADE2_WORK + 'raid_hero') ) combobox_list = LYBBlade2.hero_list if not lybconstant.LYB_DO_STRING_BLADE2_WORK + 'raid_hero' in self.configure.common_config[self.game_name]: self.configure.common_config[self.game_name][lybconstant.LYB_DO_STRING_BLADE2_WORK + 'raid_hero'] = \ combobox_list[0] combobox = ttk.Combobox( master=frame, values=combobox_list, textvariable=self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'raid_hero'], state="readonly", height=10, width=8, font=lybconstant.LYB_FONT ) combobox.set(self.configure.common_config[self.game_name][lybconstant.LYB_DO_STRING_BLADE2_WORK + 'raid_hero']) combobox.pack(anchor=tkinter.W, side=tkinter.LEFT) frame.pack(anchor=tkinter.NW) frame = ttk.Frame(frame_label) label = ttk.Label( master=frame, text=self.get_option_text("전투 제한 시간(초)(0:없음)") ) label.pack(side=tkinter.LEFT) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'raid_combat_limit'] = tkinter.StringVar(frame) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'raid_combat_limit'].trace( 'w', lambda *args: self.callback_raid_combat_limit(args, lybconstant.LYB_DO_STRING_BLADE2_WORK + 'raid_combat_limit') ) combobox_list = [] for i in range(0, 3601, 10): combobox_list.append(str(i)) if not lybconstant.LYB_DO_STRING_BLADE2_WORK + 'raid_combat_limit' in self.configure.common_config[ self.game_name]: self.configure.common_config[self.game_name][ lybconstant.LYB_DO_STRING_BLADE2_WORK + 'raid_combat_limit'] = 0 combobox = ttk.Combobox( master=frame, values=combobox_list, textvariable=self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'raid_combat_limit'], state="readonly", height=10, width=5, font=lybconstant.LYB_FONT ) combobox.set( self.configure.common_config[self.game_name][lybconstant.LYB_DO_STRING_BLADE2_WORK + 'raid_combat_limit']) combobox.pack(anchor=tkinter.W, side=tkinter.LEFT) frame.pack(anchor=tkinter.NW) frame_label_inner = ttk.LabelFrame(frame_label, text='난폭한 하랑') frame = ttk.Frame(frame_label_inner) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'raid_make_room_0'] = tkinter.BooleanVar(frame) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'raid_make_room_0'].trace( 'w', lambda *args: self.callback_raid_make_room_0(args, lybconstant.LYB_DO_STRING_BLADE2_WORK + 'raid_make_room_0') ) if not lybconstant.LYB_DO_STRING_BLADE2_WORK + 'raid_make_room_0' in self.configure.common_config[ self.game_name]: self.configure.common_config[self.game_name][ lybconstant.LYB_DO_STRING_BLADE2_WORK + 'raid_make_room_0'] = False check_box = ttk.Checkbutton( master=frame, text=self.get_option_text('방 만들기', width=15), variable=self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'raid_make_room_0'], onvalue=True, offvalue=False ) check_box.pack(anchor=tkinter.W, side=tkinter.LEFT) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'raid_team_0'] = tkinter.BooleanVar(frame) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'raid_team_0'].trace( 'w', lambda *args: self.callback_raid_team_0(args, lybconstant.LYB_DO_STRING_BLADE2_WORK + 'raid_team_0') ) if not lybconstant.LYB_DO_STRING_BLADE2_WORK + 'raid_team_0' in self.configure.common_config[self.game_name]: self.configure.common_config[self.game_name][lybconstant.LYB_DO_STRING_BLADE2_WORK + 'raid_team_0'] = False check_box = ttk.Checkbutton( master=frame, text=self.get_option_text('팀원과 다시하기', width=19), variable=self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'raid_team_0'], onvalue=True, offvalue=False ) check_box.pack(anchor=tkinter.W, side=tkinter.LEFT) frame.pack(anchor=tkinter.NW) frame = ttk.Frame(frame_label_inner) label = ttk.Label( master=frame, text=self.get_option_text("입장 레벨", width=28) ) label.pack(side=tkinter.LEFT) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'raid_level'] = tkinter.StringVar(frame) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'raid_level'].trace( 'w', lambda *args: self.callback_raid_level(args, lybconstant.LYB_DO_STRING_BLADE2_WORK + 'raid_level') ) combobox_list = [] for i in range(0, 11): combobox_list.append(str(i)) if not lybconstant.LYB_DO_STRING_BLADE2_WORK + 'raid_level' in self.configure.common_config[self.game_name]: self.configure.common_config[self.game_name][lybconstant.LYB_DO_STRING_BLADE2_WORK + 'raid_level'] = 3 combobox = ttk.Combobox( master=frame, values=combobox_list, textvariable=self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'raid_level'], state="readonly", height=10, width=5, font=lybconstant.LYB_FONT ) combobox.set(self.configure.common_config[self.game_name][lybconstant.LYB_DO_STRING_BLADE2_WORK + 'raid_level']) combobox.pack(anchor=tkinter.W, side=tkinter.LEFT) frame.pack(anchor=tkinter.NW) frame_label_inner.pack(anchor=tkinter.NW, padx=5, pady=5) frame_label_inner = ttk.LabelFrame(frame_label, text='사르곤') frame = ttk.Frame(frame_label_inner) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'raid_make_room_1'] = tkinter.BooleanVar(frame) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'raid_make_room_1'].trace( 'w', lambda *args: self.callback_raid_make_room_1(args, lybconstant.LYB_DO_STRING_BLADE2_WORK + 'raid_make_room_1') ) if not lybconstant.LYB_DO_STRING_BLADE2_WORK + 'raid_make_room_1' in self.configure.common_config[ self.game_name]: self.configure.common_config[self.game_name][ lybconstant.LYB_DO_STRING_BLADE2_WORK + 'raid_make_room_1'] = False check_box = ttk.Checkbutton( master=frame, text=self.get_option_text('방 만들기', width=15), variable=self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'raid_make_room_1'], onvalue=True, offvalue=False ) check_box.pack(anchor=tkinter.W, side=tkinter.LEFT) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'raid_team_1'] = tkinter.BooleanVar(frame) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'raid_team_1'].trace( 'w', lambda *args: self.callback_raid_team_1(args, lybconstant.LYB_DO_STRING_BLADE2_WORK + 'raid_team_1') ) if not lybconstant.LYB_DO_STRING_BLADE2_WORK + 'raid_team_1' in self.configure.common_config[self.game_name]: self.configure.common_config[self.game_name][lybconstant.LYB_DO_STRING_BLADE2_WORK + 'raid_team_1'] = False check_box = ttk.Checkbutton( master=frame, text=self.get_option_text('팀원과 다시하기', width=19), variable=self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'raid_team_1'], onvalue=True, offvalue=False ) check_box.pack(anchor=tkinter.W, side=tkinter.LEFT) frame.pack(anchor=tkinter.NW) frame = ttk.Frame(frame_label_inner) label = ttk.Label( master=frame, text=self.get_option_text("입장 레벨", width=28) ) label.pack(side=tkinter.LEFT) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'raid_level_1'] = tkinter.StringVar(frame) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'raid_level_1'].trace( 'w', lambda *args: self.callback_raid_level_1(args, lybconstant.LYB_DO_STRING_BLADE2_WORK + 'raid_level_1') ) combobox_list = [] for i in range(0, 11): combobox_list.append(str(i)) if not lybconstant.LYB_DO_STRING_BLADE2_WORK + 'raid_level_1' in self.configure.common_config[self.game_name]: self.configure.common_config[self.game_name][lybconstant.LYB_DO_STRING_BLADE2_WORK + 'raid_level_1'] = 3 combobox = ttk.Combobox( master=frame, values=combobox_list, textvariable=self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'raid_level_1'], state="readonly", height=10, width=5, font=lybconstant.LYB_FONT ) combobox.set( self.configure.common_config[self.game_name][lybconstant.LYB_DO_STRING_BLADE2_WORK + 'raid_level_1']) combobox.pack(anchor=tkinter.W, side=tkinter.LEFT) frame.pack(anchor=tkinter.NW) frame_label_inner.pack(anchor=tkinter.NW, padx=5, pady=5) frame_label.pack(anchor=tkinter.NW, padx=5, pady=5) frame_r.pack(side=tkinter.LEFT, anchor=tkinter.NW) # 탭 좌측 frame_l = ttk.Frame(self.inner_frame_dic['work_2_tab_frame']) frame_label = ttk.LabelFrame(frame_l, text='모두팔기') frame = ttk.Frame(frame_label) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'sell_levelup'] = tkinter.BooleanVar(frame) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'sell_levelup'].trace( 'w', lambda *args: self.callback_sell_levelup(args, lybconstant.LYB_DO_STRING_BLADE2_WORK + 'sell_levelup') ) if not lybconstant.LYB_DO_STRING_BLADE2_WORK + 'sell_levelup' in self.configure.common_config[self.game_name]: self.configure.common_config[self.game_name][lybconstant.LYB_DO_STRING_BLADE2_WORK + 'sell_levelup'] = False check_box = ttk.Checkbutton( master=frame, text=self.get_option_text('모두팔기 대신 재료 레벨업하기'), variable=self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'sell_levelup'], onvalue=True, offvalue=False ) check_box.pack(anchor=tkinter.W, side=tkinter.LEFT) frame.pack(anchor=tkinter.NW) frame = ttk.Frame(frame_label) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'sell_bunhe'] = tkinter.BooleanVar(frame) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'sell_bunhe'].trace( 'w', lambda *args: self.callback_sell_bunhe(args, lybconstant.LYB_DO_STRING_BLADE2_WORK + 'sell_bunhe') ) if not lybconstant.LYB_DO_STRING_BLADE2_WORK + 'sell_bunhe' in self.configure.common_config[self.game_name]: self.configure.common_config[self.game_name][lybconstant.LYB_DO_STRING_BLADE2_WORK + 'sell_bunhe'] = False check_box = ttk.Checkbutton( master=frame, text=self.get_option_text('모두팔기 대신 자동 분해하기'), variable=self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'sell_bunhe'], onvalue=True, offvalue=False ) check_box.pack(anchor=tkinter.W, side=tkinter.LEFT) frame.pack(anchor=tkinter.NW) frame_label_inner = ttk.LabelFrame(frame_label, text='작업을 수행할 영웅 선택') frame = ttk.Frame(frame_label_inner) for i in range(len(LYBBlade2.hero_list)): self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'sell_item_hero' + str(i)] = tkinter.BooleanVar( frame) if i == 0: self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'sell_item_hero' + '0'].trace( 'w', lambda *args: self.callback_sell_item_hero_0(args, lybconstant.LYB_DO_STRING_BLADE2_WORK + 'sell_item_hero' + '0') ) elif i == 1: self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'sell_item_hero' + '1'].trace( 'w', lambda *args: self.callback_sell_item_hero_1(args, lybconstant.LYB_DO_STRING_BLADE2_WORK + 'sell_item_hero' + '1') ) elif i == 2: self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'sell_item_hero' + '2'].trace( 'w', lambda *args: self.callback_sell_item_hero_2(args, lybconstant.LYB_DO_STRING_BLADE2_WORK + 'sell_item_hero' + '2') ) elif i == 3: self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'sell_item_hero' + '3'].trace( 'w', lambda *args: self.callback_sell_item_hero_3(args, lybconstant.LYB_DO_STRING_BLADE2_WORK + 'sell_item_hero' + '3') ) if not lybconstant.LYB_DO_STRING_BLADE2_WORK + 'sell_item_hero' + str(i) in self.configure.common_config[ self.game_name]: self.configure.common_config[self.game_name][ lybconstant.LYB_DO_STRING_BLADE2_WORK + 'sell_item_hero' + str(i)] = True check_box = ttk.Checkbutton( master=frame, text=LYBBlade2.hero_list[i], variable=self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'sell_item_hero' + str(i)], onvalue=True, offvalue=False ) check_box.pack(anchor=tkinter.W, side=tkinter.LEFT) frame.pack(anchor=tkinter.NW) frame_label_inner.pack(anchor=tkinter.NW, padx=5, pady=5) frame = ttk.Frame(frame_label) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'sell_holy_lock'] = tkinter.BooleanVar(frame) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'sell_holy_lock'].trace( 'w', lambda *args: self.callback_sell_holy_lock(args, lybconstant.LYB_DO_STRING_BLADE2_WORK + 'sell_holy_lock') ) if not lybconstant.LYB_DO_STRING_BLADE2_WORK + 'sell_holy_lock' in self.configure.common_config[self.game_name]: self.configure.common_config[self.game_name][ lybconstant.LYB_DO_STRING_BLADE2_WORK + 'sell_holy_lock'] = False s = ttk.Style() s.configure('red_label.TCheckbutton', foreground='red') check_box = ttk.Checkbutton( master=frame, text=self.get_option_text('[신성한 빛] 아이템 잠금하기'), variable=self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'sell_holy_lock'], onvalue=True, offvalue=False, style='red_label.TCheckbutton' ) check_box.pack(anchor=tkinter.W, side=tkinter.LEFT) # frame.pack(anchor=tkinter.NW) frame_label_inner = ttk.LabelFrame(frame_label, text='잠금할 [신성한 빛] 아이템 등급') frame = ttk.Frame(frame_label_inner) for i in range(6): self.option_dic[ lybconstant.LYB_DO_STRING_BLADE2_WORK + 'sell_holy_lock_rank' + str(i)] = tkinter.BooleanVar(frame) if i == 0: self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'sell_holy_lock_rank' + '0'].trace( 'w', lambda *args: self.callback_sell_holy_lock_rank_0(args, lybconstant.LYB_DO_STRING_BLADE2_WORK + 'sell_holy_lock_rank' + '0') ) elif i == 1: self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'sell_holy_lock_rank' + '1'].trace( 'w', lambda *args: self.callback_sell_holy_lock_rank_1(args, lybconstant.LYB_DO_STRING_BLADE2_WORK + 'sell_holy_lock_rank' + '1') ) elif i == 2: self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'sell_holy_lock_rank' + '2'].trace( 'w', lambda *args: self.callback_sell_holy_lock_rank_2(args, lybconstant.LYB_DO_STRING_BLADE2_WORK + 'sell_holy_lock_rank' + '2') ) elif i == 3: self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'sell_holy_lock_rank' + '3'].trace( 'w', lambda *args: self.callback_sell_holy_lock_rank_3(args, lybconstant.LYB_DO_STRING_BLADE2_WORK + 'sell_holy_lock_rank' + '3') ) elif i == 4: self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'sell_holy_lock_rank' + '4'].trace( 'w', lambda *args: self.callback_sell_holy_lock_rank_4(args, lybconstant.LYB_DO_STRING_BLADE2_WORK + 'sell_holy_lock_rank' + '4') ) elif i == 5: self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'sell_holy_lock_rank' + '5'].trace( 'w', lambda *args: self.callback_sell_holy_lock_rank_5(args, lybconstant.LYB_DO_STRING_BLADE2_WORK + 'sell_holy_lock_rank' + '5') ) if not lybconstant.LYB_DO_STRING_BLADE2_WORK + 'sell_holy_lock_rank' + str(i) in \ self.configure.common_config[self.game_name]: self.configure.common_config[self.game_name][ lybconstant.LYB_DO_STRING_BLADE2_WORK + 'sell_holy_lock_rank' + str(i)] = True check_box = ttk.Checkbutton( master=frame, text='★' + str(i + 1), variable=self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'sell_holy_lock_rank' + str(i)], onvalue=True, offvalue=False ) check_box.pack(anchor=tkinter.W, side=tkinter.LEFT) frame.pack(anchor=tkinter.NW) # frame_label_inner.pack(anchor=tkinter.NW, padx=5, pady=5) frame_label_inner = ttk.LabelFrame(frame_label, text='분류') frame = ttk.Frame(frame_label_inner) for i in range(len(LYBBlade2.item_equip_list)): self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'sell_item_equip' + str(i)] = tkinter.BooleanVar( frame) if i == 0: self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'sell_item_equip' + '0'].trace( 'w', lambda *args: self.callback_sell_item_equip_0(args, lybconstant.LYB_DO_STRING_BLADE2_WORK + 'sell_item_equip' + '0') ) elif i == 1: self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'sell_item_equip' + '1'].trace( 'w', lambda *args: self.callback_sell_item_equip_1(args, lybconstant.LYB_DO_STRING_BLADE2_WORK + 'sell_item_equip' + '1') ) elif i == 2: self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'sell_item_equip' + '2'].trace( 'w', lambda *args: self.callback_sell_item_equip_2(args, lybconstant.LYB_DO_STRING_BLADE2_WORK + 'sell_item_equip' + '2') ) if not lybconstant.LYB_DO_STRING_BLADE2_WORK + 'sell_item_equip' + str(i) in self.configure.common_config[ self.game_name]: self.configure.common_config[self.game_name][ lybconstant.LYB_DO_STRING_BLADE2_WORK + 'sell_item_equip' + str(i)] = True check_box = ttk.Checkbutton( master=frame, text=LYBBlade2.item_equip_list[i], variable=self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'sell_item_equip' + str(i)], onvalue=True, offvalue=False ) check_box.pack(anchor=tkinter.W, side=tkinter.LEFT) frame.pack(anchor=tkinter.NW) frame_label_inner.pack(anchor=tkinter.NW, padx=5, pady=5) frame_label_inner = ttk.LabelFrame(frame_label, text='등급') frame = ttk.Frame(frame_label_inner) for i in range(7): self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'sell_item_rank' + str(i)] = tkinter.BooleanVar( frame) if i == 0: self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'sell_item_rank' + '0'].trace( 'w', lambda *args: self.callback_sell_sell_item_rank_0(args, lybconstant.LYB_DO_STRING_BLADE2_WORK + 'sell_item_rank' + '0') ) elif i == 1: self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'sell_item_rank' + '1'].trace( 'w', lambda *args: self.callback_sell_sell_item_rank_1(args, lybconstant.LYB_DO_STRING_BLADE2_WORK + 'sell_item_rank' + '1') ) elif i == 2: self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'sell_item_rank' + '2'].trace( 'w', lambda *args: self.callback_sell_sell_item_rank_2(args, lybconstant.LYB_DO_STRING_BLADE2_WORK + 'sell_item_rank' + '2') ) elif i == 3: self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'sell_item_rank' + '3'].trace( 'w', lambda *args: self.callback_sell_sell_item_rank_3(args, lybconstant.LYB_DO_STRING_BLADE2_WORK + 'sell_item_rank' + '3') ) elif i == 4: self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'sell_item_rank' + '4'].trace( 'w', lambda *args: self.callback_sell_sell_item_rank_4(args, lybconstant.LYB_DO_STRING_BLADE2_WORK + 'sell_item_rank' + '4') ) elif i == 5: self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'sell_item_rank' + '5'].trace( 'w', lambda *args: self.callback_sell_sell_item_rank_5(args, lybconstant.LYB_DO_STRING_BLADE2_WORK + 'sell_item_rank' + '5') ) elif i == 6: self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'sell_item_rank' + '6'].trace( 'w', lambda *args: self.callback_sell_sell_item_rank_6(args, lybconstant.LYB_DO_STRING_BLADE2_WORK + 'sell_item_rank' + '6') ) if not lybconstant.LYB_DO_STRING_BLADE2_WORK + 'sell_item_rank' + str(i) in self.configure.common_config[ self.game_name]: self.configure.common_config[self.game_name][ lybconstant.LYB_DO_STRING_BLADE2_WORK + 'sell_item_rank' + str(i)] = False check_box = ttk.Checkbutton( master=frame, text='★' + str(i + 1), variable=self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'sell_item_rank' + str(i)], onvalue=True, offvalue=False ) check_box.pack(anchor=tkinter.W, side=tkinter.LEFT) frame.pack(anchor=tkinter.NW) frame_label_inner.pack(anchor=tkinter.NW, padx=5, pady=5) frame_label_inner = ttk.LabelFrame(frame_label, text='상태') frame = ttk.Frame(frame_label_inner) for i in range(len(LYBBlade2.item_status_list)): self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'sell_item_status' + str(i)] = tkinter.BooleanVar( frame) if i == 0: self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'sell_item_status' + '0'].trace( 'w', lambda *args: self.callback_sell_item_status_0(args, lybconstant.LYB_DO_STRING_BLADE2_WORK + 'sell_item_status' + '0') ) elif i == 1: self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'sell_item_status' + '1'].trace( 'w', lambda *args: self.callback_sell_item_status_1(args, lybconstant.LYB_DO_STRING_BLADE2_WORK + 'sell_item_status' + '1') ) elif i == 2: self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'sell_item_status' + '2'].trace( 'w', lambda *args: self.callback_sell_item_status_2(args, lybconstant.LYB_DO_STRING_BLADE2_WORK + 'sell_item_status' + '2') ) if not lybconstant.LYB_DO_STRING_BLADE2_WORK + 'sell_item_status' + str(i) in self.configure.common_config[ self.game_name]: self.configure.common_config[self.game_name][ lybconstant.LYB_DO_STRING_BLADE2_WORK + 'sell_item_status' + str(i)] = True check_box = ttk.Checkbutton( master=frame, text=LYBBlade2.item_status_list[i], variable=self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_WORK + 'sell_item_status' + str(i)], onvalue=True, offvalue=False ) check_box.pack(anchor=tkinter.W, side=tkinter.LEFT) frame.pack(anchor=tkinter.NW) frame_label_inner.pack(anchor=tkinter.NW, padx=5, pady=5) frame_label.pack(anchor=tkinter.NW, padx=5, pady=5) frame_l.pack(side=tkinter.LEFT, anchor=tkinter.NW) # 탭 중간 frame_m = ttk.Frame(self.inner_frame_dic['work_2_tab_frame']) frame_m.pack(side=tkinter.LEFT, anchor=tkinter.NW) # 탭 우측 frame_r = ttk.Frame(self.inner_frame_dic['work_2_tab_frame']) frame_r.pack(side=tkinter.LEFT, anchor=tkinter.NW) # 알림 탭 좌 frame_l = ttk.Frame(self.inner_frame_dic['notify_tab_frame']) frame_label = ttk.Frame(frame_l) frame = ttk.Frame(frame_label) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_NOTIFY + 'jeontoo_defeat'] = tkinter.BooleanVar(frame) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_NOTIFY + 'jeontoo_defeat'].trace( 'w', lambda *args: self.callback_notify_jeontoo_defeat(args, lybconstant.LYB_DO_STRING_BLADE2_NOTIFY + 'jeontoo_defeat') ) if not lybconstant.LYB_DO_STRING_BLADE2_NOTIFY + 'jeontoo_defeat' in self.configure.common_config[ self.game_name]: self.configure.common_config[self.game_name][ lybconstant.LYB_DO_STRING_BLADE2_NOTIFY + 'jeontoo_defeat'] = True check_box = ttk.Checkbutton( master=frame, text=self.get_option_text('전투패배'), variable=self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_NOTIFY + 'jeontoo_defeat'], onvalue=True, offvalue=False ) check_box.pack(anchor=tkinter.W, side=tkinter.LEFT) frame.pack(anchor=tkinter.NW) frame = ttk.Frame(frame_label) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_NOTIFY + 'holy_item_lock'] = tkinter.BooleanVar(frame) self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_NOTIFY + 'holy_item_lock'].trace( 'w', lambda *args: self.callback_notify_holy_item_lock(args, lybconstant.LYB_DO_STRING_BLADE2_NOTIFY + 'holy_item_lock') ) if not lybconstant.LYB_DO_STRING_BLADE2_NOTIFY + 'holy_item_lock' in self.configure.common_config[ self.game_name]: self.configure.common_config[self.game_name][ lybconstant.LYB_DO_STRING_BLADE2_NOTIFY + 'holy_item_lock'] = True check_box = ttk.Checkbutton( master=frame, text=self.get_option_text('신성한 빛 아이템 잠금'), variable=self.option_dic[lybconstant.LYB_DO_STRING_BLADE2_NOTIFY + 'holy_item_lock'], onvalue=True, offvalue=False ) check_box.pack(anchor=tkinter.W, side=tkinter.LEFT) frame.pack(anchor=tkinter.NW) frame_label.pack(anchor=tkinter.NW, padx=5, pady=5) frame_l.pack(side=tkinter.LEFT, anchor=tkinter.NW) # 알림 탭 중 frame_m = ttk.Frame(self.inner_frame_dic['notify_tab_frame']) frame_m.pack(side=tkinter.LEFT, anchor=tkinter.NW) # 알림 탭 우 frame_r = ttk.Frame(self.inner_frame_dic['notify_tab_frame']) frame_r.pack(side=tkinter.LEFT, anchor=tkinter.NW) # ------ self.option_dic['option_note'].pack(anchor=tkinter.NW, fill=tkinter.BOTH, expand=True) self.inner_frame_dic['options'].pack(anchor=tkinter.NW, fill=tkinter.BOTH, expand=True) self.set_game_option() def callback_jadong_bunhe_ganghwa(self, args, option_name): self.set_game_config(option_name, self.option_dic[option_name].get()) def callback_jadong_bunhe_rank_5(self, args, option_name): self.set_game_config(option_name, self.option_dic[option_name].get()) def callback_jadong_bunhe_rank_4(self, args, option_name): self.set_game_config(option_name, self.option_dic[option_name].get()) def callback_jadong_bunhe_rank_3(self, args, option_name): self.set_game_config(option_name, self.option_dic[option_name].get()) def callback_jadong_bunhe_rank_2(self, args, option_name): self.set_game_config(option_name, self.option_dic[option_name].get()) def callback_jadong_bunhe_rank_1(self, args, option_name): self.set_game_config(option_name, self.option_dic[option_name].get()) def callback_jadong_bunhe_rank_0(self, args, option_name): self.set_game_config(option_name, self.option_dic[option_name].get()) def callback_jadong_bunhe_tier_3(self, args, option_name): self.set_game_config(option_name, self.option_dic[option_name].get()) def callback_jadong_bunhe_tier_2(self, args, option_name): self.set_game_config(option_name, self.option_dic[option_name].get()) def callback_jadong_bunhe_tier_1(self, args, option_name): self.set_game_config(option_name, self.option_dic[option_name].get()) def callback_jadong_bunhe_tier_0(self, args, option_name): self.set_game_config(option_name, self.option_dic[option_name].get()) def callback_mail_select_stone(self, args, option_name): self.set_game_config(option_name, self.option_dic[option_name].get()) def callback_mail_select_hero(self, args, option_name): self.set_game_config(option_name, self.option_dic[option_name].get()) def callback_notify_holy_item_lock(self, args, option_name): self.set_game_config(option_name, self.option_dic[option_name].get()) def callback_notify_jeontoo_defeat(self, args, option_name): self.set_game_config(option_name, self.option_dic[option_name].get()) def callback_sell_levelup(self, args, option_name): self.set_game_config(option_name, self.option_dic[option_name].get()) def callback_sell_bunhe(self, args, option_name): self.set_game_config(option_name, self.option_dic[option_name].get()) def callback_sell_item_hero_0(self, args, option_name): self.set_game_config(option_name, self.option_dic[option_name].get()) def callback_sell_item_hero_1(self, args, option_name): self.set_game_config(option_name, self.option_dic[option_name].get()) def callback_sell_item_hero_2(self, args, option_name): self.set_game_config(option_name, self.option_dic[option_name].get()) def callback_sell_item_hero_3(self, args, option_name): self.set_game_config(option_name, self.option_dic[option_name].get()) def callback_sell_holy_lock(self, args, option_name): self.set_game_config(option_name, self.option_dic[option_name].get()) def callback_sell_holy_lock_rank_0(self, args, option_name): self.set_game_config(option_name, self.option_dic[option_name].get()) def callback_sell_holy_lock_rank_1(self, args, option_name): self.set_game_config(option_name, self.option_dic[option_name].get()) def callback_sell_holy_lock_rank_2(self, args, option_name): self.set_game_config(option_name, self.option_dic[option_name].get()) def callback_sell_holy_lock_rank_3(self, args, option_name): self.set_game_config(option_name, self.option_dic[option_name].get()) def callback_sell_holy_lock_rank_4(self, args, option_name): self.set_game_config(option_name, self.option_dic[option_name].get()) def callback_sell_holy_lock_rank_5(self, args, option_name): self.set_game_config(option_name, self.option_dic[option_name].get()) def callback_jangbi_change_set_hero(self, args, option_name): self.set_game_config(option_name, self.option_dic[option_name].get()) def callback_jangbi_change_set(self, args, option_name): self.set_game_config(option_name, self.option_dic[option_name].get()) def callback_bangyeok_dungeon_response(self, args, option_name): self.set_game_config(option_name, self.option_dic[option_name].get()) def callback_bangyeok_dungeon_hero_use(self, args, option_name): self.set_game_config(option_name, self.option_dic[option_name].get()) def callback_bangyeok_dungeon_sotang(self, args, option_name): self.set_game_config(option_name, self.option_dic[option_name].get()) def callback_bangyeok_dungeon_hero(self, args, option_name): self.set_game_config(option_name, self.option_dic[option_name].get()) def callback_bangyeok_dungeon_level(self, args, option_name): self.set_game_config(option_name, self.option_dic[option_name].get()) def callback_raid_team_0(self, args, option_name): self.set_game_config(option_name, self.option_dic[option_name].get()) def callback_raid_make_room_0(self, args, option_name): self.set_game_config(option_name, self.option_dic[option_name].get()) def callback_raid_team_1(self, args, option_name): self.set_game_config(option_name, self.option_dic[option_name].get()) def callback_raid_make_room_1(self, args, option_name): self.set_game_config(option_name, self.option_dic[option_name].get()) def callback_raid_hero_use(self, args, option_name): self.set_game_config(option_name, self.option_dic[option_name].get()) def callback_raid_hero(self, args, option_name): self.set_game_config(option_name, self.option_dic[option_name].get()) def callback_raid_combat_limit(self, args, option_name): self.set_game_config(option_name, self.option_dic[option_name].get()) def callback_raid_level_1(self, args, option_name): self.set_game_config(option_name, self.option_dic[option_name].get()) def callback_raid_level(self, args, option_name): self.set_game_config(option_name, self.option_dic[option_name].get()) def callback_sell_item_status_0(self, args, option_name): self.set_game_config(option_name, self.option_dic[option_name].get()) def callback_sell_item_status_1(self, args, option_name): self.set_game_config(option_name, self.option_dic[option_name].get()) def callback_sell_item_status_2(self, args, option_name): self.set_game_config(option_name, self.option_dic[option_name].get()) def callback_sell_sell_item_rank_0(self, args, option_name): self.set_game_config(option_name, self.option_dic[option_name].get()) def callback_sell_sell_item_rank_1(self, args, option_name): self.set_game_config(option_name, self.option_dic[option_name].get()) def callback_sell_sell_item_rank_2(self, args, option_name): self.set_game_config(option_name, self.option_dic[option_name].get()) def callback_sell_sell_item_rank_3(self, args, option_name): self.set_game_config(option_name, self.option_dic[option_name].get()) def callback_sell_sell_item_rank_4(self, args, option_name): self.set_game_config(option_name, self.option_dic[option_name].get()) def callback_sell_sell_item_rank_5(self, args, option_name): self.set_game_config(option_name, self.option_dic[option_name].get()) def callback_sell_sell_item_rank_6(self, args, option_name): self.set_game_config(option_name, self.option_dic[option_name].get()) def callback_sell_item_equip_0(self, args, option_name): self.set_game_config(option_name, self.option_dic[option_name].get()) def callback_sell_item_equip_1(self, args, option_name): self.set_game_config(option_name, self.option_dic[option_name].get()) def callback_sell_item_equip_2(self, args, option_name): self.set_game_config(option_name, self.option_dic[option_name].get()) def callback_jeomryoungjeon_hero_use(self, args, option_name): self.set_game_config(option_name, self.option_dic[option_name].get()) def callback_jeomryoungjeon_hero(self, args, option_name): self.set_game_config(option_name, self.option_dic[option_name].get()) def callback_team_count(self, args, option_name): self.set_game_config(option_name, self.option_dic[option_name].get()) def callback_ildeil_count(self, args, option_name): self.set_game_config(option_name, self.option_dic[option_name].get()) def callback_ildeil_hero_use(self, args, option_name): self.set_game_config(option_name, self.option_dic[option_name].get()) def callback_ildeil_hero_1(self, args, option_name): self.set_game_config(option_name, self.option_dic[option_name].get()) def callback_ildeil_hero_2(self, args, option_name): self.set_game_config(option_name, self.option_dic[option_name].get()) def callback_ilil_quest(self, args, option_name): self.set_game_config(option_name, self.option_dic[option_name].get()) def callback_ilil_quest_continue(self, args, option_name): self.set_game_config(option_name, self.option_dic[option_name].get()) def callback_jeomryoungjeon_notice(self, args, option_name): self.set_game_config(option_name, self.option_dic[option_name].get()) def callback_raid_notice(self, args, option_name): self.set_game_config(option_name, self.option_dic[option_name].get()) def callback_moheom_tag_period(self, args, option_name): self.set_game_config(option_name, self.option_dic[option_name].get()) def callback_moheom_next(self, args, option_name): self.set_game_config(option_name, self.option_dic[option_name].get()) def callback_moheom_triple(self, args, option_name): self.set_game_config(option_name, self.option_dic[option_name].get()) def callback_moheom_limit_count(self, args, option_name): self.set_game_config(option_name, self.option_dic[option_name].get()) def callback_moheom_bot_period(self, args, option_name): self.set_game_config(option_name, self.option_dic[option_name].get()) def callback_moheom_level(self, args, option_name): self.set_game_config(option_name, self.option_dic[option_name].get()) def callback_moheom_act(self, args, option_name): self.set_game_config(option_name, self.option_dic[option_name].get()) def callback_moheom_stage(self, args, option_name): self.set_game_config(option_name, self.option_dic[option_name].get()) def callback_moheom_repeat(self, args, option_name): self.set_game_config(option_name, self.option_dic[option_name].get()) def callback_moheom_retry(self, args, option_name): self.set_game_config(option_name, self.option_dic[option_name].get()) def callback_moheom_hero_use(self, args, option_name): self.set_game_config(option_name, self.option_dic[option_name].get()) def callback_moheom_hero_1(self, args, option_name): self.set_game_config(option_name, self.option_dic[option_name].get()) def callback_moheom_hero_2(self, args, option_name): self.set_game_config(option_name, self.option_dic[option_name].get()) def callback_moheom_auto(self, args, option_name): self.set_game_config(option_name, self.option_dic[option_name].get()) def callback_main_quest_stringvar(self, args, option_name): self.set_game_config(option_name, self.option_dic[option_name].get()) def callback_main_quest_each_stringvar(self, args, option_name): self.set_game_config(option_name, self.option_dic[option_name].get())
46.176957
143
0.631437
13,180
106,207
4.722989
0.030349
0.099182
0.106154
0.145962
0.957236
0.949525
0.948176
0.932738
0.901203
0.858279
0
0.014716
0.269351
106,207
2,299
144
46.197042
0.78732
0.013351
0
0.477225
0
0
0.078388
0.013224
0
0
0
0
0
1
0.050318
false
0
0.004767
0
0.066737
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
15d180f8a461d3e19ceedfbc5c8e3b73efe7bda6
47
py
Python
crystalgraph/__init__.py
qai222/CrystalFiniteGraph
9908da936325a87a81c82773984b54deb26688b7
[ "MIT" ]
4
2021-08-23T12:19:08.000Z
2021-09-08T18:09:18.000Z
crystalgraph/__init__.py
qai222/CrystalGraph
9908da936325a87a81c82773984b54deb26688b7
[ "MIT" ]
null
null
null
crystalgraph/__init__.py
qai222/CrystalGraph
9908da936325a87a81c82773984b54deb26688b7
[ "MIT" ]
null
null
null
from crystalgraph.qgraph import LQG, UQG, LQGeq
47
47
0.829787
7
47
5.571429
1
0
0
0
0
0
0
0
0
0
0
0
0.106383
47
1
47
47
0.928571
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
d68255e827d608cd2ed5a702515c344f9668b306
145
py
Python
Fund/downloader.py
livi2000/FundSpider
c79407241fe189b61afc54dd2e5b73c906aae0b5
[ "MIT" ]
3
2016-08-17T05:46:34.000Z
2018-02-06T22:02:42.000Z
Fund/downloader.py
livi2000/FundSpider
c79407241fe189b61afc54dd2e5b73c906aae0b5
[ "MIT" ]
null
null
null
Fund/downloader.py
livi2000/FundSpider
c79407241fe189b61afc54dd2e5b73c906aae0b5
[ "MIT" ]
1
2018-06-26T11:03:36.000Z
2018-06-26T11:03:36.000Z
# -*- coding: utf-8 -*- from spider_base import SBDownloader # 目前看来下载器是比较泛用的,毕竟只是单纯的下载静态网页,没有什么动态处理 class FundDownloader(SBDownloader): pass
24.166667
38
0.772414
15
145
7.4
0.933333
0
0
0
0
0
0
0
0
0
0
0.007874
0.124138
145
6
39
24.166667
0.866142
0.4
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0.333333
0.333333
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
1
0
1
0
0
6
d6d3c6992f31b3e028aff4b0157c91a60d1b464e
118
py
Python
learn2learn/vision/__init__.py
joemzhao/learn2learn
e161e0a9e0de513d64315c4ceaf2d8608e4cef4d
[ "MIT" ]
null
null
null
learn2learn/vision/__init__.py
joemzhao/learn2learn
e161e0a9e0de513d64315c4ceaf2d8608e4cef4d
[ "MIT" ]
null
null
null
learn2learn/vision/__init__.py
joemzhao/learn2learn
e161e0a9e0de513d64315c4ceaf2d8608e4cef4d
[ "MIT" ]
null
null
null
#!/usr/bin/env python3 from . import datasets from . import models from . import transforms from . import benchmarks
16.857143
24
0.762712
16
118
5.625
0.625
0.444444
0
0
0
0
0
0
0
0
0
0.010101
0.161017
118
6
25
19.666667
0.89899
0.177966
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
d6e414b37b37652f5d12a00e26f9c9cdec0eed76
275
py
Python
Metrics/code/hinge.py
TannerGilbert/Machine-Learning-Explained
5309f44a38ce862f3f177e8d5de2e60eea44637b
[ "MIT" ]
24
2020-09-14T18:55:13.000Z
2022-03-22T22:14:30.000Z
Metrics/code/hinge.py
TannerGilbert/Machine-Learning-Explained
5309f44a38ce862f3f177e8d5de2e60eea44637b
[ "MIT" ]
null
null
null
Metrics/code/hinge.py
TannerGilbert/Machine-Learning-Explained
5309f44a38ce862f3f177e8d5de2e60eea44637b
[ "MIT" ]
6
2021-02-06T15:34:27.000Z
2022-01-31T23:16:07.000Z
import numpy as np class Hinge: def __call__(self, y: np.ndarray, y_pred: np.ndarray) -> np.float64: return self.loss(y, y_pred) def loss(self, y: np.ndarray, y_pred: np.ndarray) -> np.float64: return np.sum(np.maximum(0, 1 - y * y_pred)) / len(y)
27.5
72
0.629091
48
275
3.4375
0.416667
0.218182
0.084848
0.169697
0.521212
0.521212
0.521212
0.521212
0.521212
0.521212
0
0.028037
0.221818
275
9
73
30.555556
0.742991
0
0
0
0
0
0
0
0
0
0
0
0
1
0.333333
false
0
0.166667
0.333333
1
0
0
0
0
null
1
0
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
6
ba3b53bcc705b0d4f1b36fa4b96a6eb60938082b
43
py
Python
basic/comments/admin.py
neechadi/django-basic-apps
3a90090857549ea4198a72c44f45f6edb238e2a8
[ "BSD-3-Clause" ]
548
2015-01-02T21:41:29.000Z
2022-03-23T09:10:04.000Z
basic/comments/admin.py
neechadi/django-basic-apps
3a90090857549ea4198a72c44f45f6edb238e2a8
[ "BSD-3-Clause" ]
4
2015-01-13T16:27:02.000Z
2016-11-01T01:51:31.000Z
basic/comments/admin.py
neechadi/django-basic-apps
3a90090857549ea4198a72c44f45f6edb238e2a8
[ "BSD-3-Clause" ]
182
2015-01-02T21:41:29.000Z
2021-08-09T07:01:07.000Z
from django.contrib.comments.admin import *
43
43
0.837209
6
43
6
1
0
0
0
0
0
0
0
0
0
0
0
0.069767
43
1
43
43
0.9
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
ba68a0a8d3e245e5c5a3ac3b7da445a0c374b2ea
353
py
Python
agents/random_agent.py
floringogianu/categorical-dqn
eb939785e0e2eea60bbd67abeaedf4a9990fb5ce
[ "MIT" ]
111
2017-07-27T13:19:21.000Z
2022-01-15T17:52:55.000Z
agents/random_agent.py
floringogianu/categorical-dqn
eb939785e0e2eea60bbd67abeaedf4a9990fb5ce
[ "MIT" ]
3
2017-12-05T07:18:23.000Z
2018-04-30T00:03:36.000Z
agents/random_agent.py
floringogianu/categorical-dqn
eb939785e0e2eea60bbd67abeaedf4a9990fb5ce
[ "MIT" ]
12
2017-07-31T13:46:25.000Z
2021-08-23T04:03:19.000Z
from .base_agent import BaseAgent class RandomAgent(BaseAgent): def __init__(self, action_space, cmdl): BaseAgent.__init__(self, action_space) self.name = "RND_agent" def evaluate_policy(self, state): return self.action_space.sample() def improve_policy(self, _state, _action, reward, state, done): pass
23.533333
67
0.688385
43
353
5.255814
0.55814
0.132743
0.199115
0.168142
0
0
0
0
0
0
0
0
0.21813
353
14
68
25.214286
0.818841
0
0
0
0
0
0.025496
0
0
0
0
0
0
1
0.333333
false
0.111111
0.111111
0.111111
0.666667
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
1
1
0
0
6
baa0c7cbab034dda58a144fb3fae0adcb1c3b72f
19,698
py
Python
tests/conftest.py
zreichert/zigzag
a729d33257bcb7c40e7fe8d08bbfb1dfffa25329
[ "Apache-2.0" ]
null
null
null
tests/conftest.py
zreichert/zigzag
a729d33257bcb7c40e7fe8d08bbfb1dfffa25329
[ "Apache-2.0" ]
null
null
null
tests/conftest.py
zreichert/zigzag
a729d33257bcb7c40e7fe8d08bbfb1dfffa25329
[ "Apache-2.0" ]
null
null
null
# -*- coding: utf-8 -*- # ====================================================================================================================== # Imports # ====================================================================================================================== import pytest pytest_plugins = ['helpers_namespace'] # ====================================================================================================================== # Globals # ====================================================================================================================== DEFAULT_GLOBAL_PROPERTIES = \ """ <properties> <property name="BUILD_URL" value="BUILD_URL"/> <property name="BUILD_NUMBER" value="BUILD_NUMBER"/> <property name="RE_JOB_ACTION" value="RE_JOB_ACTION"/> <property name="RE_JOB_IMAGE" value="RE_JOB_IMAGE"/> <property name="RE_JOB_SCENARIO" value="RE_JOB_SCENARIO"/> <property name="RE_JOB_BRANCH" value="RE_JOB_BRANCH"/> <property name="RPC_RELEASE" value="RPC_RELEASE"/> <property name="RPC_PRODUCT_RELEASE" value="RPC_PRODUCT_RELEASE"/> <property name="OS_ARTIFACT_SHA" value="OS_ARTIFACT_SHA"/> <property name="PYTHON_ARTIFACT_SHA" value="PYTHON_ARTIFACT_SHA"/> <property name="APT_ARTIFACT_SHA" value="APT_ARTIFACT_SHA"/> <property name="REPO_URL" value="REPO_URL"/> <property name="JOB_NAME" value="JOB_NAME"/> <property name="MOLECULE_TEST_REPO" value="MOLECULE_TEST_REPO"/> <property name="MOLECULE_SCENARIO_NAME" value="MOLECULE_SCENARIO_NAME"/> </properties> """ DEFAULT_TESTCASE_PROPERTIES = \ """ <properties> <property name="test_id" value="1"/> <property name="start_time" value="2018-04-10T21:38:18Z"/> <property name="end_time" value="2018-04-10T21:38:19Z"/> </properties> """ # ====================================================================================================================== # Helpers # ====================================================================================================================== # noinspection PyUnresolvedReferences @pytest.helpers.register def merge_dicts(*args): """Given any number of dicts, shallow copy and merge into a new dict, precedence goes to key value pairs in latter dicts. Args: *args (list(dict)): A list of dictionaries to be merged. Returns: dict: A merged dictionary. """ result = {} for dictionary in args: result.update(dictionary) return result # ====================================================================================================================== # Fixtures # ====================================================================================================================== @pytest.fixture(scope='session') def single_passing_xml(tmpdir_factory): """JUnitXML sample representing a single passing test.""" filename = tmpdir_factory.mktemp('data').join('single_passing.xml').strpath junit_xml = \ """<?xml version="1.0" encoding="utf-8"?> <testsuite errors="0" failures="0" name="pytest" skips="0" tests="5" time="1.664"> {global_properties} <testcase classname="tests.test_default" file="tests/test_default.py" line="8" name="test_pass[ansible://localhost]" time="0.00372695922852"> {testcase_properties} </testcase> </testsuite> """.format(global_properties=DEFAULT_GLOBAL_PROPERTIES, testcase_properties=DEFAULT_TESTCASE_PROPERTIES) with open(filename, 'w') as f: f.write(junit_xml) return filename @pytest.fixture(scope='session') def single_fail_xml(tmpdir_factory): """JUnitXML sample representing a single failing test.""" filename = tmpdir_factory.mktemp('data').join('single_fail.xml').strpath junit_xml = \ """<?xml version="1.0" encoding="utf-8"?> <testsuite errors="0" failures="0" name="pytest" skips="0" tests="5" time="1.664"> {global_properties} <testcase classname="tests.test_default" file="tests/test_default.py" line="16" name="test_fail[ansible://localhost]" time="0.00335693359375"> {testcase_properties} <failure message="assert False">host = &lt;testinfra.host.Host object at 0x7f0921d98cd0&gt; def test_fail(host): &gt; assert False E assert False tests/test_default.py:18: AssertionError</failure> </testcase> </testsuite> """.format(global_properties=DEFAULT_GLOBAL_PROPERTIES, testcase_properties=DEFAULT_TESTCASE_PROPERTIES) with open(filename, 'w') as f: f.write(junit_xml) return filename @pytest.fixture(scope='session') def single_error_xml(tmpdir_factory): """JUnitXML sample representing a single erroring test.""" filename = tmpdir_factory.mktemp('data').join('single_error.xml').strpath junit_xml = \ """<?xml version="1.0" encoding="utf-8"?> <testsuite errors="0" failures="0" name="pytest" skips="0" tests="5" time="1.664"> {global_properties} <testcase classname="tests.test_default" file="tests/test_default.py" line="20" name="test_error[ansible://localhost]" time="0.00208067893982"> {testcase_properties} <error message="test setup failure">host = &lt;testinfra.host.Host object at 0x7f0921d98cd0&gt; @pytest.fixture def error_fixture(host): &gt; raise RuntimeError(&apos;oops&apos;) E RuntimeError: oops tests/test_default.py:10: RuntimeError</error> </testcase> </testsuite> """.format(global_properties=DEFAULT_GLOBAL_PROPERTIES, testcase_properties=DEFAULT_TESTCASE_PROPERTIES) with open(filename, 'w') as f: f.write(junit_xml) return filename @pytest.fixture(scope='session') def single_skip_xml(tmpdir_factory): """JUnitXML sample representing a single skipping test.""" filename = tmpdir_factory.mktemp('data').join('single_skip.xml').strpath junit_xml = \ """<?xml version="1.0" encoding="utf-8"?> <testsuite errors="0" failures="0" name="pytest" skips="0" tests="5" time="1.664"> {global_properties} <testcase classname="tests.test_default" file="tests/test_default.py" line="24" name="test_skip[ansible://localhost]" time="0.00197100639343"> {testcase_properties} <skipped message="unconditional skip" type="pytest.skip"> tests/test_default.py:24: &lt;py._xmlgen.raw object at 0x7f0921ff4d50&gt; </skipped> </testcase> </testsuite> """.format(global_properties=DEFAULT_GLOBAL_PROPERTIES, testcase_properties=DEFAULT_TESTCASE_PROPERTIES) with open(filename, 'w') as f: f.write(junit_xml) return filename @pytest.fixture(scope='session') def flat_all_passing_xml(tmpdir_factory): """JUnitXML sample representing multiple passing test cases.""" filename = tmpdir_factory.mktemp('data').join('flat_all_passing.xml').strpath junit_xml = \ """<?xml version="1.0" encoding="utf-8"?> <testsuite errors="0" failures="0" name="pytest" skips="0" tests="5" time="1.664"> {global_properties} <testcase classname="tests.test_default" file="tests/test_default.py" line="8" name="test_pass1[ansible://localhost]" time="0.00372695922852"> {testcase_properties} </testcase> <testcase classname="tests.test_default" file="tests/test_default.py" line="12" name="test_pass2[ansible://localhost]" time="0.00341415405273"> {testcase_properties} </testcase> <testcase classname="tests.test_default" file="tests/test_default.py" line="15" name="test_pass3[ansible://localhost]" time="0.00363945960999"> {testcase_properties} </testcase> <testcase classname="tests.test_default" file="tests/test_default.py" line="18" name="test_pass4[ansible://localhost]" time="0.00314617156982"> {testcase_properties} </testcase> <testcase classname="tests.test_default" file="tests/test_default.py" line="21" name="test_pass5[ansible://localhost]" time="0.00332307815552"> {testcase_properties} </testcase> </testsuite> """.format(global_properties=DEFAULT_GLOBAL_PROPERTIES, testcase_properties=DEFAULT_TESTCASE_PROPERTIES) with open(filename, 'w') as f: f.write(junit_xml) return filename @pytest.fixture(scope='session') def suite_all_passing_xml(tmpdir_factory): """JUnitXML sample representing multiple passing test cases in a test suite. (Tests within a Python class)""" filename = tmpdir_factory.mktemp('data').join('suite_all_passing.xml').strpath junit_xml = \ """<?xml version="1.0" encoding="utf-8"?> <testsuite errors="0" failures="0" name="pytest" skips="0" tests="5" time="1.664"> {global_properties} <testcase classname="tests.test_default.TestSuite" file="tests/test_default.py" line="8" name="test_pass1[ansible://localhost]" time="0.00372695922852"> {testcase_properties} </testcase> <testcase classname="tests.test_default.TestSuite" file="tests/test_default.py" line="12" name="test_pass2[ansible://localhost]" time="0.00341415405273"> {testcase_properties} </testcase> <testcase classname="tests.test_default.TestSuite" file="tests/test_default.py" line="15" name="test_pass3[ansible://localhost]" time="0.00363945960999"> {testcase_properties} </testcase> <testcase classname="tests.test_default.TestSuite" file="tests/test_default.py" line="18" name="test_pass4[ansible://localhost]" time="0.00314617156982"> {testcase_properties} </testcase> <testcase classname="tests.test_default.TestSuite" file="tests/test_default.py" line="21" name="test_pass5[ansible://localhost]" time="0.00332307815552"> {testcase_properties} </testcase> </testsuite> """.format(global_properties=DEFAULT_GLOBAL_PROPERTIES, testcase_properties=DEFAULT_TESTCASE_PROPERTIES) with open(filename, 'w') as f: f.write(junit_xml) return filename @pytest.fixture(scope='session') def flat_mix_status_xml(tmpdir_factory): """JUnitXML sample representing mixed status for multiple test cases.""" filename = tmpdir_factory.mktemp('data').join('flat_mix_status.xml').strpath junit_xml = \ """<?xml version="1.0" encoding="utf-8"?> <testsuite errors="1" failures="1" name="pytest" skips="1" tests="4" time="1.901"> {global_properties} <testcase classname="tests.test_default" file="tests/test_default.py" line="12" name="test_pass[ansible://localhost]" time="0.0034921169281"> {testcase_properties} </testcase> <testcase classname="tests.test_default" file="tests/test_default.py" line="16" name="test_fail[ansible://localhost]" time="0.00335693359375"> {testcase_properties} <failure message="assert False">host = &lt;testinfra.host.Host object at 0x7f0921d98cd0&gt; def test_fail(host): &gt; assert False E assert False tests/test_default.py:18: AssertionError</failure> </testcase> <testcase classname="tests.test_default" file="tests/test_default.py" line="20" name="test_error[ansible://localhost]" time="0.00208067893982"> {testcase_properties} <error message="test setup failure">host = &lt;testinfra.host.Host object at 0x7f0921d98cd0&gt; @pytest.fixture def error_fixture(host): &gt; raise RuntimeError(&apos;oops&apos;) E RuntimeError: oops tests/test_default.py:10: RuntimeError</error> </testcase> <testcase classname="tests.test_default" file="tests/test_default.py" line="24" name="test_skip[ansible://localhost]" time="0.00197100639343"> {testcase_properties} <skipped message="unconditional skip" type="pytest.skip"> tests/test_default.py:24: &lt;py._xmlgen.raw object at 0x7f0921ff4d50&gt; </skipped> </testcase> </testsuite> """.format(global_properties=DEFAULT_GLOBAL_PROPERTIES, testcase_properties=DEFAULT_TESTCASE_PROPERTIES) with open(filename, 'w') as f: f.write(junit_xml) return filename @pytest.fixture(scope='session') def bad_xml(tmpdir_factory): """JUnitXML sample representing invalid XML.""" filename = tmpdir_factory.mktemp('data').join('bad.xml').strpath junit_xml = "Totally Bogus Content" with open(filename, 'w') as f: f.write(junit_xml) return filename @pytest.fixture(scope='session') def bad_junit_root(tmpdir_factory): """JUnitXML sample representing XML that is missing all relevant content.""" filename = tmpdir_factory.mktemp('data').join('bad_junit_root.xml').strpath junit_xml = \ """<?xml version="1.0" encoding="utf-8"?> <bad> </bad> """ with open(filename, 'w') as f: f.write(junit_xml) return filename @pytest.fixture(scope='session') def missing_testcase_properties_xml(tmpdir_factory): """JUnitXML sample representing a test case that is missing the test case "properties" element.""" filename = tmpdir_factory.mktemp('data').join('missing_testcase_properties.xml').strpath junit_xml = \ """<?xml version="1.0" encoding="utf-8"?> <testsuite errors="0" failures="0" name="pytest" skips="0" tests="5" time="1.664"> {global_properties} <testcase classname="tests.test_default" file="tests/test_default.py" line="8" name="test_pass[ansible://localhost]" time="0.00372695922852"/> </testsuite> """.format(global_properties=DEFAULT_GLOBAL_PROPERTIES, testcase_properties=DEFAULT_TESTCASE_PROPERTIES) with open(filename, 'w') as f: f.write(junit_xml) return filename @pytest.fixture(scope='session') def missing_test_id_xml(tmpdir_factory): """JUnitXML sample representing a test case that has a missing test id property element.""" filename = tmpdir_factory.mktemp('data').join('missing_test_id.xml').strpath junit_xml = \ """<?xml version="1.0" encoding="utf-8"?> <testsuite errors="0" failures="0" name="pytest" skips="0" tests="5" time="1.664"> {global_properties} <testcase classname="tests.test_default" file="tests/test_default.py" line="8" name="test_pass[ansible://localhost]" time="0.00372695922852"/> <properties> <property name="start_time" value="2018-04-10T21:38:18Z"/> <property name="start_time" value="2018-04-10T21:38:18Z"/> <property name="end_time" value="2018-04-10T21:38:19Z"/> </properties> </testsuite> """.format(global_properties=DEFAULT_GLOBAL_PROPERTIES, testcase_properties=DEFAULT_TESTCASE_PROPERTIES) with open(filename, 'w') as f: f.write(junit_xml) return filename @pytest.fixture(scope='session') def missing_build_url_xml(tmpdir_factory): """JUnitXML sample representing a test suite that is missing the "BUILD_URL" property.""" filename = tmpdir_factory.mktemp('data').join('missing_build_url.xml').strpath junit_xml = \ """<?xml version="1.0" encoding="utf-8"?> <testsuite errors="0" failures="0" name="pytest" skips="0" tests="5" time="1.664"> <properties> <property name="BUILD_NUMBER" value="Unknown"/> <property name="BUILD_NUMBER" value="Unknown"/> <property name="RE_JOB_ACTION" value="Unknown"/> <property name="RE_JOB_IMAGE" value="Unknown"/> <property name="RE_JOB_SCENARIO" value="Unknown"/> <property name="RE_JOB_BRANCH" value="Unknown"/> <property name="RPC_RELEASE" value="Unknown"/> <property name="RPC_PRODUCT_RELEASE" value="Unknown"/> <property name="OS_ARTIFACT_SHA" value="Unknown"/> <property name="PYTHON_ARTIFACT_SHA" value="Unknown"/> <property name="APT_ARTIFACT_SHA" value="Unknown"/> <property name="REPO_URL" value="Unknown"/> </properties> <testcase classname="tests.test_default" file="tests/test_default.py" line="8" name="test_pass[ansible://localhost]" time="0.00372695922852"/> {testcase_properties} </testsuite> """.format(global_properties=DEFAULT_GLOBAL_PROPERTIES, testcase_properties=DEFAULT_TESTCASE_PROPERTIES) with open(filename, 'w') as f: f.write(junit_xml) return filename @pytest.fixture(scope='session') def classname_with_dashes_xml(tmpdir_factory): """JUnitXML sample representing a testcase that has a 'classname' attribute which contains dashes for the py.test filename.""" filename = tmpdir_factory.mktemp('data').join('classname_with_dashes.xml').strpath junit_xml = \ """<?xml version="1.0" encoding="utf-8"?> <testsuite errors="0" failures="0" name="pytest" skips="0" tests="5" time="1.664"> {global_properties} <testcase classname="test.tests.test_for_acs-150.TestForRPC10PlusPostDeploymentQCProcess" file="tests/test_for_acs-150.py" line="140" name="test_verify_kibana_horizon_access_with_no_ssh[_testinfra_host0]" time="0.00372695922852"> {testcase_properties} </testcase> </testsuite> """.format(global_properties=DEFAULT_GLOBAL_PROPERTIES, testcase_properties=DEFAULT_TESTCASE_PROPERTIES) with open(filename, 'w') as f: f.write(junit_xml) return filename @pytest.fixture(scope='session') def invalid_classname_xml(tmpdir_factory): """JUnitXML sample representing a testcase that has an invalid 'classname' attribute which is used to build the results hierarchy in the '_generate_module_hierarchy' function.""" filename = tmpdir_factory.mktemp('data').join('invalid_classname.xml').strpath junit_xml = \ """<?xml version="1.0" encoding="utf-8"?> <testsuite errors="0" failures="0" name="pytest" skips="0" tests="5" time="1.664"> {global_properties} <testcase classname="this is not a valid classname" file="tests/test_default.py" line="8" name="test_pass[ansible://localhost]" time="0.00372695922852"> {testcase_properties} </testcase> </testsuite> """.format(global_properties=DEFAULT_GLOBAL_PROPERTIES, testcase_properties=DEFAULT_TESTCASE_PROPERTIES) with open(filename, 'w') as f: f.write(junit_xml) return filename
43.197368
120
0.604833
2,139
19,698
5.386629
0.106592
0.039837
0.068044
0.043742
0.858532
0.839958
0.774952
0.761934
0.709599
0.702048
0
0.044048
0.220885
19,698
455
121
43.292308
0.706718
0.120824
0
0.675
0
0
0.088191
0.022235
0
0
0
0
0
1
0.125
false
0.05
0.008333
0
0.258333
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
baaa853c801e383a515ed6eb94540014f78e5d86
46
py
Python
simpleoncall/tasks/__init__.py
simpleoncall/simpleoncall
ffc247045c7ce357871899c84fdfc61f4add06a9
[ "MIT" ]
1
2016-01-11T21:37:44.000Z
2016-01-11T21:37:44.000Z
simpleoncall/tasks/__init__.py
simpleoncall/simpleoncall
ffc247045c7ce357871899c84fdfc61f4add06a9
[ "MIT" ]
48
2015-01-04T16:04:20.000Z
2015-01-25T20:53:49.000Z
simpleoncall/tasks/__init__.py
simpleoncall/simpleoncall
ffc247045c7ce357871899c84fdfc61f4add06a9
[ "MIT" ]
null
null
null
from simpleoncall.tasks.notificatons import *
23
45
0.847826
5
46
7.8
1
0
0
0
0
0
0
0
0
0
0
0
0.086957
46
1
46
46
0.928571
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
2424c314059fab73fa7f405e810fad7517eceecc
19,428
py
Python
source/services/taskService.py
hjimmy/glustermg
e43ad5f17b248fa543f0b5d4204baca3c8b18aab
[ "MulanPSL-1.0" ]
null
null
null
source/services/taskService.py
hjimmy/glustermg
e43ad5f17b248fa543f0b5d4204baca3c8b18aab
[ "MulanPSL-1.0" ]
null
null
null
source/services/taskService.py
hjimmy/glustermg
e43ad5f17b248fa543f0b5d4204baca3c8b18aab
[ "MulanPSL-1.0" ]
null
null
null
###added by jian.hou task operations import web import os import logging import logging.config import commands import re from scripts.common import XmlHandler, Globals, Utils db =Globals.db STATUS_CODE_SUCCESS = "0" STATUS_CODE_FAILURE = "1" STATUS_CODE_PART_SUCCESS = "2" STATUS_CODE_RUNNING = "3" STATUS_CODE_PAUSE = "4" STATUS_CODE_WARNING = "5" STATUS_CODE_COMMIT_PENDING = "6" STATUS_CODE_ERROR = "7" logging.config.fileConfig(Globals.LOG_CONF) logger = logging.getLogger("Task") ''' logger.debug("debug message") logger.info("info message") logger.warn("warn message") logger.error("error message") logger.critical("critical message") ''' def getTasks(clusterName): try: show_len = 100 cluster_id_info = db.select('cluster_info',where='name=$clusterName',what="id",vars=locals()) if len(cluster_id_info) == 0: code, reval = "20052", "Error cluster: "+clusterName+" not exist" else: task_info = db.select('task_info ,operation_info',where='task_info.operation_id=operation_info.id and task_info.cluster_name=$clusterName',what = "task_info.id as taskID,description,reference,operation_id,operation_info.id,operation_type,commitSupported,pauseSupported,stopSupported,percentageSupported",vars=locals()) tasksTag = createTasksTag() task_len = len(task_info) if task_len == 0: logger.info("There is no task info") return tasksTag.toxml() elif task_len > show_len: task_start = task_len - show_len task_info = db.select('task_info ,operation_info',where='task_info.operation_id=operation_info.id and task_info.cluster_name=$clusterName limit $task_start,$task_len ',what = "task_info.id as taskID,description,reference,operation_id,operation_info.id,operation_type,commitSupported,pauseSupported,stopSupported,percentageSupported",vars=locals()) for task in task_info: if task.operation_id == 2: migrate = Migrate_operation() (code, reval) = migrate.get_migrate_info(task.reference) (code,message) = getMigrateStatus(reval) percentCompleted = percentCompleted = '0.0' elif task.operation_id == 1: disk = Disk_operation() (code, reval) = disk.get_disk_info(task.reference) messages = reval.split('\n') message = messages[len(messages) - 1].strip()[:-1] code, message = getInitialStatus(reval) percentCompleted = '0.0' else: rebal = Rebalance_operation() (code, reval) = rebal.get_rebalance_info(task.reference) messages = reval.split('\n') message = messages[len(messages) - 1].strip() code, message = getRebalanceStatus(reval) percentCompleted = '0.0' task_info_xml = addTaskTag(tasksTag,task.taskID,task.commitSupported,task.description,task.taskID,task.pauseSupported,task.reference,code,message,percentCompleted,task.percentageSupported,task.stopSupported,task.operation_type) logger.info("Get all tasks info") return task_info_xml except Exception,e: code, reval = "24001", "Error when get all tasks info:" + str(e) logger.error(reval) params = [] params.append(clusterName) result = Utils.errorCode(code, reval, params) raise web.HTTPError(status = "400 Bad Request", data = result) def getTask(clusterName,taskID): try: cluster_id_info = db.select('cluster_info',where='name=$clusterName',what="id",vars=locals()) if len(cluster_id_info) == 0: code, reval = "20052", "Error cluster: "+clusterName+" not exist" else: task_info = db.select('task_info,operation_info',where='task_info.operation_id=operation_info.id and task_info.id=$taskID and task_info.cluster_name=$clusterName',what = "*",vars=locals()) if len(task_info) == 0: code, reval = "20056", "Error the task not exist" else: task =task_info[0] if task.operation_id == 2: task_info=task.reference.replace('#',' ') migrate = Migrate_operation() (status, output) = migrate.get_migrate_info(task.reference) (code,message) = getMigrateStatus(output) percentCompleted = percentCompleted = '0.0' elif task.operation_id == 1: disk = Disk_operation() (status, output) = disk.get_disk_info(task.reference) messages = output.split('\n') message = messages[len(messages) - 1].strip()[:-1] code, message = getInitialStatus(output) percentCompleted = '0.0' else: rebal = Rebalance_operation() (status, output) = rebal.get_rebalance_info(task.reference) messages = output.split('\n') message = messages[len(messages) - 1].strip() code, message = getRebalanceStatus(output) percentCompleted = '0.0' task_info_xml = taskTag(taskID,task.commitSupported,task.description,taskID,task.pauseSupported,task.reference,code,message,percentCompleted,task.percentageSupported,task.stopSupported,task.operation_type) logger.info("Get task info") return task_info_xml except Exception,e: code, reval = "24001", "Error when get task info:" + str(e) logger.error(reval) params = [] params.append(clusterName) params.append(taskID) result = Utils.errorCode(code, reval, params) raise web.HTTPError(status = "400 Bad Request", data = result) def managerTask(clusterName,taskID): try: data = web.input() cluster_id_info = db.select('cluster_info',where='name=$clusterName',what="id",vars=locals()) if len(cluster_id_info) == 0: code, reval = "20052", "Error cluster:"+clusterName+" not exist" else: task_info = db.select('task_info,operation_info',where='task_info.operation_id=operation_info.id and task_info.id=$taskID and task_info.cluster_name=$clusterName',what = "*",vars=locals()) if len(task_info) == 0: code, reval = "20056", "Error the task not exist" else: task =task_info[0] if data.operation == 'stop': if task.operation_id == 2: migrate = Migrate_operation() (code, reval) = migrate.stop_migrate(task.reference) if code == 0: return reval elif task.operation_id == 1: disk = Disk_operation() (code, reval) = disk.stop_disk(task.reference) if code == 0: return reval else: rebal = Rebalance_operation() (code, reval) = rebal.stop_rebalance(task.reference) if code == 0: return reval if data.operation == 'pause': if task.operation_id == 2: task_info=task.reference.replace('#',' ') migrate = Migrate_operation() (code, reval) = migrate.pause_migrate(task.reference) if code == 0: return reval elif task.operation_id == 1: disk = Disk_operation() (code,reval) = disk.pause_disk(task.reference) if code == 0: return reval else: rebal = Rebalance_operation() (code, reval) = rebal.pause_rebalance(task.reference) if code == 0: return reval if data.operation == 'resume': if task.operation_id == 2: task_info=task.reference.replace('#',' ') migrate = Migrate_operation() (code, reval) = migrate.resume_migrate(task.reference) if code == 0: return reval elif task.operation_id == 1: disk = Disk_operation() (code, reval) = disk.resume_disk(task.reference) if code == 0: return reval else: rebal = Rebalance_operation() (code, reval) = rebal.resume_rebalance(task.reference) if code == 0: return reval if data.operation == 'commit': if task.operation_id == 2: task_info=task.reference.replace('#',' ') migrate = Migrate_operation() (code, reval) = migrate.commit_migrate(task.reference) if code == 0: return reval elif task.operation_id == 1: disk = Disk_operation() (code, reval) = disk.commit_disk(task.reference) if code == 0: return reval else: rebal = Rebalance_operation() (code, reval) = rebal.commit_rebalance(task.reference) if code == 0: return reval else: code, reval = "24004", "Error Invalid Task Operation" except Exception,e: code, reval = "24003", "Error when manager a task:" + str(e) logger.error(reval) params = [] params.append(clusterName) params.append(taskID) result = Utils.errorCode(code, reval, params) raise web.HTTPError(status = "400 Bad Request", data = result) def deleteTask(clusterName,taskID): try: cluster_id_info = db.select('cluster_info',where='name=$clusterName',what="id",vars=locals()) if len(cluster_id_info) == 0: code, reval = "20052", "Error cluster: "+clusterName+" not exist" else: task_info = db.select('task_info,operation_info',where='task_info.operation_id=operation_info.id and task_info.id=$taskID and task_info.cluster_name=$clusterName',what = "*",vars=locals()) if len(task_info) == 0: code, reval = "20056", "Error the task not exist" else: is_task_delete = db.delete('task_info',where='id=$taskID',vars=locals()) if is_task_delete == 0: code, reval = "24005","Error when delete a task" else: return '' except Exception,e: code, reval = "24005", "Error when delete a task:" + str(e) logger.error(reval) params = [] params.append(clusterName) result = Utils.errorCode(code, reval, params) raise web.HTTPError(status = "400 Bad Request", data = result) def addTaskTag(tasksTag,name,commitSupported,description,id,pauseSupported,reference,code,message,percentCompleted,percentageSupported,stopSupported,type): responseDom = XmlHandler.ResponseXml() taskTag = responseDom.appendTagRoute("task") taskTag.appendChild(responseDom.createTag("name", name)) taskTag.appendChild(responseDom.createTag("commitSupported", commitSupported)) taskTag.appendChild(responseDom.createTag("description", description)) taskTag.appendChild(responseDom.createTag("id", id)) taskTag.appendChild(responseDom.createTag("reference", reference)) statusTag = responseDom.appendTagRoute("status") statusTag.appendChild(responseDom.createTag("code", code)) statusTag.appendChild(responseDom.createTag("message", message)) statusTag.appendChild(responseDom.createTag("percentCompleted", percentCompleted)) statusTag.appendChild(responseDom.createTag("percentageSupported", percentCompleted)) taskTag.appendChild(statusTag) taskTag.appendChild(responseDom.createTag("stopSupported", stopSupported)) taskTag.appendChild(responseDom.createTag("type", type)) tasksTag.appendChild(taskTag) return tasksTag.toxml() def taskTag(name,commitSupported,description,id,pauseSupported,reference,code,message,percentCompleted,percentageSupported,stopSupported,type): responseDom = XmlHandler.ResponseXml() taskTag = responseDom.appendTagRoute("task") taskTag.appendChild(responseDom.createTag("name", name)) taskTag.appendChild(responseDom.createTag("commitSupported", commitSupported)) taskTag.appendChild(responseDom.createTag("description", description)) taskTag.appendChild(responseDom.createTag("id", id)) taskTag.appendChild(responseDom.createTag("reference", reference)) statusTag = responseDom.appendTagRoute("status") statusTag.appendChild(responseDom.createTag("code", code)) statusTag.appendChild(responseDom.createTag("message", message)) statusTag.appendChild(responseDom.createTag("percentCompleted", percentCompleted)) statusTag.appendChild(responseDom.createTag("percentageSupported", percentCompleted)) taskTag.appendChild(statusTag) taskTag.appendChild(responseDom.createTag("stopSupported", stopSupported)) taskTag.appendChild(responseDom.createTag("type", type)) return taskTag.toxml() def createTasksTag(): responseDom = XmlHandler.ResponseXml() tasksTag = responseDom.appendTagRoute("tasks") return tasksTag def getRebalanceStatus(output): if re.match('^rebalance completed.*', output) != -1: code = STATUS_CODE_SUCCESS message = 'rebalance completed' elif re.match('.*in progress.*', output) != -1: code = STATUS_CODE_RUNNING message = 'rebalance is running' else: code = STATUS_CODE_FAILURE message = 'rebalance failed' logger.info(message) return code,message def getInitialStatus(output): if re.match('STATUS_CODE_SUCCESS', output): code = STATUS_CODE_SUCCESS message = 'initialize disk successfully' elif re.match('STATUS_CODE_RUNNING', output): code = STATUS_CODE_RUNNING message = 'initializing disk is running' elif re.match('STATUS_CODE_FAILURE', output): code = STATUS_CODE_FAILURE message = 'initialize disk failed' else: code = STATUS_CODE_FAILURE message = 'initialize disk failed' logger.info(message) return code,message def getMigrateStatus(message): if re.match("^Number of files migrated.*Migration complete $",message) or re.match("^Number of files migrated = 0 .*Current file=",message): code = STATUS_CODE_COMMIT_PENDING return code,message elif re.match("^Number of files migrated.*Current file=.*",message): code = STATUS_CODE_RUNNING return code,"Brick Migration Started." elif re.match("^replace brick has been paused.*",message) : code = STATUS_CODE_PAUSE return code,"Brick Migration Paused",message elif re.match("replace-brick not started on volume*",message): code = STATUS_CODE_SUCCESS return code,"Brick Migration Committed." else: code = STATUS_CODE_FAILURE return code,message class Disk_operation: def get_disk_info(self, reference): server_disk = reference.split(':') chkcmd = 'python '+ Globals.BACKEND_SCRIPT + '/get_format_device_status.py ' + server_disk[1] hostName = os.popen('hostname').read() if Utils.isLocalHost(server_disk[1].strip()): (status, output) = commands.getstatusoutput(chkcmd) else: (status, output) = Utils.executeOnServer(server_disk[0].strip(), chkcmd) return status,output def stop_disk(self, reference): return ('24004','Error: Stop/Pause/Resume/Commit is not supported in Disk Initialization') def pause_disk(self, clusterName, reference): return ('24004','Error: Stop/Pause/Resume/Commit is not supported in Disk Initialization') def resume_disk(self, clusterName, reference): return ('24004','Error: Stop/Pause/Resume/Commit is not supported in Disk Initialization') def commit_disk(self, clusterName, reference): return ('24004','Error: Stop/Pause/Resume/Commit is not supported in Disk Initialization') class Rebalance_operation: def get_rebalance_info(self,reference): chkcmd = 'gluster volume rebalance ' + reference + ' status' (status, output) = commands.getstatusoutput(chkcmd) return status,output def stop_rebalance(self, clusterName, reference): chkcmd = 'gluster volume rebalance ' + reference + ' stop' (status, output) = commands.getstatusoutput(chkcmd) if status == 0: return 0,"" return (status, output) def pause_rebalance(self,reference): return ('24004','Pause/Resume/Commit is not supported in Volume Rebalance') def resume_rebalance(self, clusterName, reference): return ('24004','Pause/Resume/Commit is not supported in Volume Rebalance') def commit_rebalance(self, clusterName, reference): return ('24004','Pause/Resume/Commit is not supported in Volume Rebalance') class Migrate_operation: def get_migrate_info(self,taskReference): reference = taskReference.replace('#',' ') chkcmd = 'gluster volume replace-brick ' + reference + ' status' (status, output) = commands.getstatusoutput(chkcmd) return status, output def stop_migrate(self,taskReference): reference = taskReference.replace('#',' ') chkcmd = 'gluster volume replace-brick ' + reference + ' abort' (status, output) = commands.getstatusoutput(chkcmd) if status == 0: return 0,"" return (status, output) def pause_migrate(self,taskReference): reference = taskReference.replace('#',' ') chkcmd = 'gluster volume replace-brick ' + reference + ' pause' (status, output) = commands.getstatusoutput(chkcmd) if status == 0: return 0,"" return (status, output) def resume_migrate(self,taskReference): reference = taskReference.replace('#',' ') chkcmd = 'gluster volume replace-brick ' + reference + ' start' (status, output) = commands.getstatusoutput(chkcmd) if status == 0: return 0,"" return (status, output) def commit_migrate(self,taskReference): reference = taskReference.replace('#',' ') chkcmd = 'gluster volume replace-brick ' + reference + ' commit force' (status, output) = commands.getstatusoutput(chkcmd) if status == 0: return 0,"" return (status, output)
48.81407
363
0.605569
1,973
19,428
5.841358
0.099341
0.030542
0.059176
0.046161
0.803124
0.765553
0.734664
0.721562
0.697007
0.687028
0
0.013887
0.288347
19,428
397
364
48.937028
0.819688
0.001699
0
0.638587
0
0.005435
0.166485
0.045235
0
0
0
0
0
0
null
null
0
0.019022
null
null
0
0
0
0
null
0
0
0
1
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
6
24289877dbf235f16399eff9da44e602e2ecace8
110
py
Python
service_skills/helpers.py
muratyaman/service-skills
1f44c2644621fa52a0892c988693328d2311325c
[ "Unlicense" ]
null
null
null
service_skills/helpers.py
muratyaman/service-skills
1f44c2644621fa52a0892c988693328d2311325c
[ "Unlicense" ]
null
null
null
service_skills/helpers.py
muratyaman/service-skills
1f44c2644621fa52a0892c988693328d2311325c
[ "Unlicense" ]
null
null
null
import datetime def now(): return datetime.datetime.utcnow() def nowf(): return now().isoformat()
11
37
0.663636
13
110
5.615385
0.615385
0
0
0
0
0
0
0
0
0
0
0
0.2
110
9
38
12.222222
0.829545
0
0
0
0
0
0
0
0
0
0
0
0
1
0.4
true
0
0.2
0.4
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
0
0
1
1
0
0
6
243a25d0b25e8bee7ec8a0a926adf8db7ac273d8
199
py
Python
src/test/test_predict_model.py
tanminghui/changedetection-main
43899c9959b01da1ddee6b72f075b3fbd0b03423
[ "MIT" ]
1
2022-01-26T08:45:25.000Z
2022-01-26T08:45:25.000Z
src/test/test_predict_model.py
tanminghui/changedetection-main
43899c9959b01da1ddee6b72f075b3fbd0b03423
[ "MIT" ]
2
2022-01-18T04:51:11.000Z
2022-01-28T06:54:51.000Z
src/test/test_predict_model.py
tanminghui/changedetection-main
43899c9959b01da1ddee6b72f075b3fbd0b03423
[ "MIT" ]
null
null
null
from src.models.predict_model import SampleClass def test_factorial(): assert SampleClass.factorial(5) == 120 assert SampleClass.factorial(1) == 1 assert SampleClass.summation() == 15
22.111111
48
0.733668
24
199
6
0.666667
0.354167
0.361111
0
0
0
0
0
0
0
0
0.048193
0.165829
199
8
49
24.875
0.819277
0
0
0
0
0
0
0
0
0
0
0
0.6
1
0.2
true
0
0.2
0
0.4
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
0
1
0
0
0
0
0
0
6
24487036c8d3410fadb1d208b0687d9e6ce4fa59
31
py
Python
database/base/__init__.py
zhouhongf/transfer_database_direct
ca5f1428686d54f496aee55d6c48cfd9403cbcd5
[ "MIT" ]
1
2021-11-27T06:40:42.000Z
2021-11-27T06:40:42.000Z
database/base/__init__.py
zhouhongf/transfer_database_direct
ca5f1428686d54f496aee55d6c48cfd9403cbcd5
[ "MIT" ]
null
null
null
database/base/__init__.py
zhouhongf/transfer_database_direct
ca5f1428686d54f496aee55d6c48cfd9403cbcd5
[ "MIT" ]
null
null
null
from .transfer import Transfer
15.5
30
0.83871
4
31
6.5
0.75
0
0
0
0
0
0
0
0
0
0
0
0.129032
31
1
31
31
0.962963
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
245f97ae035050797135422f71e6b7be62cfdc7d
219
py
Python
sk6502/__init__.py
skoolkid/sk6502
a5c1bd1994f06826624e3328f2698707fed1c273
[ "0BSD" ]
7
2019-10-24T15:55:14.000Z
2021-01-26T17:44:49.000Z
sk6502/__init__.py
skoolkid/sk6502
a5c1bd1994f06826624e3328f2698707fed1c273
[ "0BSD" ]
null
null
null
sk6502/__init__.py
skoolkid/sk6502
a5c1bd1994f06826624e3328f2698707fed1c273
[ "0BSD" ]
1
2021-01-29T21:40:40.000Z
2021-01-29T21:40:40.000Z
from sk6502 import assembler from sk6502.ctlcomposer import ControlDirectiveComposer from sk6502 import ctlgenerator from sk6502.disassembler import Disassembler from sk6502.instructionutility import InstructionUtility
36.5
56
0.894977
23
219
8.521739
0.391304
0.255102
0.163265
0
0
0
0
0
0
0
0
0.100503
0.091324
219
5
57
43.8
0.884422
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
034973c1532c63d9fe781a740de0183ffa9279bb
44
py
Python
slate/utils/__init__.py
jay3332/Slate
491835effba0d4a22b08ec689cc530b561f94736
[ "MIT" ]
null
null
null
slate/utils/__init__.py
jay3332/Slate
491835effba0d4a22b08ec689cc530b561f94736
[ "MIT" ]
null
null
null
slate/utils/__init__.py
jay3332/Slate
491835effba0d4a22b08ec689cc530b561f94736
[ "MIT" ]
null
null
null
from .backoff import * from .queue import *
14.666667
22
0.727273
6
44
5.333333
0.666667
0
0
0
0
0
0
0
0
0
0
0
0.181818
44
2
23
22
0.888889
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
036c1974b08054bd9bf481591088ab8b03f87c6c
29
py
Python
tests/test_bot/src/send_photo/__init__.py
Ketre3/vkquick
81b19111e2322d277bfbb89dae6a27fb70a9b8c7
[ "MIT" ]
null
null
null
tests/test_bot/src/send_photo/__init__.py
Ketre3/vkquick
81b19111e2322d277bfbb89dae6a27fb70a9b8c7
[ "MIT" ]
null
null
null
tests/test_bot/src/send_photo/__init__.py
Ketre3/vkquick
81b19111e2322d277bfbb89dae6a27fb70a9b8c7
[ "MIT" ]
null
null
null
from .main import send_photo
14.5
28
0.827586
5
29
4.6
1
0
0
0
0
0
0
0
0
0
0
0
0.137931
29
1
29
29
0.92
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
300762cc5a4b702a50667508bab923914db5ead3
94
py
Python
players/caboose.py
Mego/PyCTF
f850679515ae989e339a7f02b2d095846026a38f
[ "MIT" ]
null
null
null
players/caboose.py
Mego/PyCTF
f850679515ae989e339a7f02b2d095846026a38f
[ "MIT" ]
null
null
null
players/caboose.py
Mego/PyCTF
f850679515ae989e339a7f02b2d095846026a38f
[ "MIT" ]
null
null
null
#!/usr/bin/env python3 import random print(random.choice('wmsdp')+random.choice('nesw'))
18.8
51
0.702128
13
94
5.076923
0.769231
0.363636
0
0
0
0
0
0
0
0
0
0.011905
0.106383
94
5
51
18.8
0.77381
0.223404
0
0
0
0
0.130435
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0.5
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
1
0
6
302ff3285d178ef21f674d4851c7576615b62681
22,638
py
Python
kick/device2/asa/actions/asa_ha.py
CiscoDevNet/firepower-kickstart
37a36856fcdc661e8c51edaa694e48f74cc6fcb5
[ "Apache-2.0" ]
2
2020-02-10T23:36:57.000Z
2020-03-25T15:46:05.000Z
kick/device2/asa/actions/asa_ha.py
CiscoDevNet/firepower-kickstart
37a36856fcdc661e8c51edaa694e48f74cc6fcb5
[ "Apache-2.0" ]
1
2020-08-07T13:01:32.000Z
2020-08-07T13:01:32.000Z
kick/device2/asa/actions/asa_ha.py
CiscoDevNet/firepower-kickstart
37a36856fcdc661e8c51edaa694e48f74cc6fcb5
[ "Apache-2.0" ]
1
2020-02-19T13:58:35.000Z
2020-02-19T13:58:35.000Z
"""Copyright (c) 2017 Cisco Systems, Inc. Name: asa_ha.py Usage: Submodule for Legacy ASA HA. Author: raywa """ import re import time from unicon.eal.dialogs import Dialog from .constants import ASAHAStates, ASAHALinkNames class AsaHAActiveStandby(): """Class ASA active/standby HA """ def __init__(self, asa_instance1, asa_instance2, init_ha=True): """Initializer of AsaHAActiveStandby. By default, instance1 will be primary and instance2 will be secondary. :param asa_instance1: connection instance1 that created by AsaConfig :param asa_instance2: connection instance2 that created by AsaConfig :return: None """ self.asa_instance1 = asa_instance1 self.asa_instance2 = asa_instance2 self.primary = asa_instance1 self.secondary = asa_instance2 self.active = None self.standby = None if init_ha: # Configure primary unit self.config_as_primary(self.primary) folink_info = self.primary.topo.failover.failover_link if 'vlan' in folink_info and folink_info.vlan: folink_intf = '{hardware}.{vlan}'.format(hardware=folink_info.interface, vlan=folink_info.vlan) cmd = "interface %s\nno shutdown\n" % folink_info.interface cmd += "interface %s\nvlan %s" % (folink_intf, folink_info.vlan) self.primary.config(cmd, ctx = 'system') self.secondary.config(cmd, ctx='system') else: folink_intf = '{hardware}'.format(hardware=folink_info.interface) folink_ip = folink_info.ip_addr folink_mask = folink_info.netmask folink_standby = folink_info.ip_addr_standby folink_name = folink_info.get('link_name', ASAHALinkNames.DEFAULT_FAILOVER_LINK.value) self.config_folink(self.primary, folink_intf, folink_ip, folink_mask, folink_standby, folink_name) # Stateful link is optional. # Check existence in topo first if 'state_link' in self.primary.topo.failover: statelink_info = self.primary.topo.failover.state_link if 'vlan' in statelink_info and statelink_info.vlan: statelink_intf = '{hardware}.{vlan}'.format(hardware=statelink_info.interface, vlan=statelink_info.vlan) cmd = "interface %s\nno shutdown\n" % statelink_info.interface cmd += "interface %s\nvlan %s" % (statelink_intf, statelink_info.vlan) self.primary.config(cmd, ctx='system') self.secondary.config(cmd, ctx='system') else: statelink_intf = '{hardware}'.format(hardware=statelink_info.interface) if statelink_intf != folink_intf: state_link_name = statelink_info.get( 'link_name', ASAHALinkNames.DEFAULT_STATE_LINK.value) statelink_ip = statelink_info.get('ip_addr', None) statelink_mask = statelink_info.get('netmask', None) statelink_standby = statelink_info.get('ip_addr_standby', None) self.config_statelink( unit=self.primary, link_name=state_link_name, intf=statelink_intf, ip_addr=statelink_ip, ip_netmask=statelink_mask, ip_addr_standby=statelink_standby ) else: self.config_statelink( unit=self.primary, link_name=folink_name, ) # Configure secondary unit self.config_folink( self.secondary, folink_intf, folink_ip, folink_mask, folink_standby) if 'state_link' in self.secondary.topo.failover: if statelink_intf != folink_intf: cmd = 'interface {}'.format(statelink_intf) cmd += '\nno shutdown' self.secondary.config(cmd, exception_on_bad_config=True) self.config_as_secondary(self.secondary) # Enable HA self.enable_failover(self.secondary, check_status=False) self.enable_failover(self.primary, check_status=True) # Check HA status try: self.update_failover_roles() except RuntimeError: warning_msg = """ \nWARNING: Unable to determine primary/secondary or active/standby roles. This will impact further operations defined in this module. Double check your configuration and use the config methods provided in this module if something is missed. And then run update_failover_roles again.\n """ self.primary.logger.warning(warning_msg) pass @staticmethod def config_as_primary(unit): """Configure a unit to be primary :param unit: ASA instance to be configured as primary :return: None """ cmd = 'failover lan unit primary' unit.config(cmd, exception_on_bad_config=True) @staticmethod def config_as_secondary(unit): """Configure a unit to be secondary :param unit: ASA instance to be configured as secondary :return: None """ cmd = 'failover lan unit secondary' unit.config(cmd, exception_on_bad_config=True) @staticmethod def config_folink(unit, intf, ip_addr, ip_netmask, ip_addr_standby, link_name=None): """Configure HA failover link :param unit: unit that you want to apply the config :param intf: physical interface of failover link :param ip_addr: Primary IP address of failover link :param ip_netmask: Netmask of failover IP :param ip_addr_standby: Standby IP address of failover link :param link_name: Failover link name to be assigned :return: None """ link_name = link_name or ASAHALinkNames.DEFAULT_FAILOVER_LINK.value cmd = 'interface {intf}'.format(intf=intf) cmd += '\nno shutdown' cmd += '\nfailover lan interface {link_name} {intf}'.format(link_name=link_name, intf=intf) if ':' in ip_addr: cmd += '\nfailover interface ip folink {ip}/{mask} standby {standby}'.\ format(ip=ip_addr, mask=ip_netmask, standby=ip_addr_standby) else: cmd += '\nfailover interface ip folink {ip} {mask} standby {standby}'.\ format(ip=ip_addr, mask=ip_netmask, standby=ip_addr_standby) unit.config(cmd, exception_on_bad_config=True) @staticmethod def config_statelink( unit, link_name, intf=None, ip_addr=None, ip_netmask=None, ip_addr_standby=None): """Configure HA stateful link :param link_name: Stateful link name to be assigned :param unit: unit that you want to apply the config :param intf: physical interface of stateful link :param ip_addr: Primary IP address of stateful link :param ip_netmask: Netmask of stateful IP :param ip_addr_standby: Standby IP address of stateful link :return: None """ cmd = '' intf = intf or '' if intf: cmd += 'interface {intf}'.format(intf=intf) cmd += '\nno shutdown' cmd += '\nfailover link {link_name} {intf}'.format(link_name=link_name, intf=intf) if ip_addr and ip_netmask and ip_addr_standby: if ':' in ip_addr: cmd += '\nfailover interface ip statelink {ip}/{mask} standby {standby}'.\ format(ip=ip_addr, mask=ip_netmask, standby=ip_addr_standby) else: cmd += '\nfailover interface ip statelink {ip} {mask} standby {standby}'.\ format(ip=ip_addr, mask=ip_netmask, standby=ip_addr_standby) unit.config(cmd, exception_on_bad_config=True) def is_failover_formed(self): """Determine whether an HA pair is formed by checking if both active and standby units are actively in the pair. :return: True|False """ output = self.primary.execute('show failover | include host') if re.search('- ' + ASAHAStates.ACTIVE.value, output) \ and re.search('- ' + ASAHAStates.STANDBY.value, output): return True return False def enable_failover(self, unit, check_status=False, timeout=300): """Enable failover :param unit: asa instance where failover will be enabled :param check_status: option to check failover status. False by default :return: None """ unit.config('failover') if check_status: dialog = Dialog([ ['Beginning configuration replication', None, None, True, False], ['End Configuration Replication to mate', 'sendline()', None, False, False], ]) dialog.process(unit.asa_conn.spawn_id, timeout=timeout) start_time = time.time() while time.time() - start_time < timeout: if self.is_failover_formed(): break time.sleep(10) else: raise RuntimeError('HA pair not formed in {} sec'.format(timeout)) def switch_active(self, unit, timeout=180): """Switch active/standby units :param unit: unit to be switched to active :param timeout: timeout for units switching :return: None """ unit.config('failover active') start_time = time.time() while time.time() - start_time < timeout: if self.is_failover_formed(): break time.sleep(5) else: raise RuntimeError('HA pair not formed after switchover in {} sec'.format(timeout)) self.update_failover_roles() def update_failover_roles(self): """Update failver units roles Based on output of show failover, this method determines the roles of primary/secondary and active/standby units :return: None """ show_failover1 = self.asa_instance1.execute('show failover') role1 = re.search('This host: (.*) - (.*)', show_failover1) role2 = re.search('Other host: (.*) - (.*)', show_failover1) primary_found = secondary_found = False active_found = standby_found = False if role1 and role2: # Determine roles on instance 1 if role1.group(1).strip() == ASAHAStates.PRIMARY.value: primary_found = True self.primary = self.asa_instance1 elif role1.group(1).strip() == ASAHAStates.SECONDARY.value: secondary_found = True self.secondary = self.asa_instance1 if role1.group(2).strip() == ASAHAStates.ACTIVE.value: active_found = True self.active = self.asa_instance1 elif role1.group(2).strip() == ASAHAStates.STANDBY.value: standby_found = True self.standby = self.asa_instance1 # Determine roles on instance 2 if role2.group(1).strip() == ASAHAStates.PRIMARY.value: primary_found = True self.primary = self.asa_instance2 elif role2.group(1).strip() == ASAHAStates.SECONDARY.value: secondary_found = True self.secondary = self.asa_instance2 if role2.group(2).strip() == ASAHAStates.ACTIVE.value: active_found = True self.active = self.asa_instance2 elif role2.group(2).strip() == ASAHAStates.STANDBY.value: standby_found = True self.standby = self.asa_instance2 else: raise RuntimeError('Unable to determine units roles') if not primary_found: raise RuntimeError('Unable to find primary unit') if not secondary_found: raise RuntimeError('Unable to find secondary unit') if not active_found: raise RuntimeError('Unable to find active unit') if not standby_found: raise RuntimeError('Unable to find standby unit') def get_dataintf_status(self, nameif): """Get status of a non HA interface given its logical name :param nameif: logical name of the interface :return: link status in both active and standby units in a dict """ status = {} active, standby = self.active.execute('show failover').split('Other host') for line in active.splitlines(): match = re.search('Interface {}.*: (.*)'.format(nameif), line) if match: status['active'] = match.group(1) break for line in standby.splitlines(): match = re.search('Interface {}.*: (.*)'.format(nameif), line) if match: status['standby'] = match.group(1) break return status class AsaHAActiveActive(AsaHAActiveStandby): """Class ASA active/active HA """ def __init__(self, asa_instance1, asa_instance2, init_ha=True): """Initializer of AsaHAActiveStandby. By default, instance1 will be primary and instance2 will be secondary. :param asa_instance1: connection instance1 that created by AsaConfig :param asa_instance2: connection instance2 that created by AsaConfig :return: None """ self.asa_instance1 = asa_instance1 self.asa_instance2 = asa_instance2 self.primary = asa_instance1 self.secondary = asa_instance2 if init_ha: # Configure primary unit self.config_as_primary(self.primary) folink_info = self.primary.topo.failover.failover_link if 'vlan' in folink_info and folink_info.vlan: folink_intf = '{hardware}.{vlan}'.format(hardware=folink_info.interface, vlan=folink_info.vlan) cmd = "interface %s\nno shutdown\n" % folink_info.interface cmd += "interface %s\nvlan %s" % (folink_intf, folink_info.vlan) self.primary.config(cmd, ctx = 'system') self.secondary.config(cmd, ctx='system') else: folink_intf = '{hardware}'.format(hardware=folink_info.interface) folink_ip = folink_info.ip_addr folink_mask = folink_info.netmask folink_standby = folink_info.ip_addr_standby folink_name = folink_info.get('link_name', ASAHALinkNames.DEFAULT_FAILOVER_LINK.value) self.config_folink(self.primary, folink_intf, folink_ip, folink_mask, folink_standby, folink_name) # Stateful link is optional. # Check existence in topo first if 'state_link' in self.primary.topo.failover: if 'state_link' in self.primary.topo.failover: statelink_info = self.primary.topo.failover.state_link if 'vlan' in statelink_info and statelink_info.vlan: statelink_intf = '{hardware}.{vlan}'.format(hardware=statelink_info.interface, vlan=statelink_info.vlan) cmd = "interface %s\nno shutdown\n" % statelink_info.interface cmd += "interface %s\nvlan %s" % (statelink_intf, statelink_info.vlan) self.primary.config(cmd, ctx='system') self.secondary.config(cmd, ctx='system') else: statelink_intf = '{hardware}'.format(hardware=statelink_info.interface) if statelink_intf != folink_intf: state_link_name = statelink_info.get( 'link_name', ASAHALinkNames.DEFAULT_STATE_LINK.value) statelink_ip = statelink_info.get('ip_addr', None) statelink_mask = statelink_info.get('netmask', None) statelink_standby = statelink_info.get('ip_addr_standby', None) self.config_statelink( unit=self.primary, link_name=state_link_name, intf=statelink_intf, ip_addr=statelink_ip, ip_netmask=statelink_mask, ip_addr_standby=statelink_standby ) else: self.config_statelink( unit=self.primary, link_name=folink_name, ) # Configure secondary unit self.config_folink( self.secondary, folink_intf, folink_ip, folink_mask, folink_standby) if 'state_link' in self.secondary.topo.failover: if statelink_intf != folink_intf: cmd = 'interface {}'.format(statelink_intf) cmd += '\nno shutdown' self.secondary.config(cmd, exception_on_bad_config=True) self.config_as_secondary(self.secondary) # Configure failover groups for active/active self.config_failover_groups(self.primary) self.config_failover_groups(self.secondary) # Enable HA self.enable_failover(self.secondary, check_status=False) self.enable_failover(self.primary, check_status=True) # Join failover groups for each context configured in failover_groups for fo_grp in self.primary.topo.failover_groups: self.join_failover_group(unit=self.primary, context=fo_grp.ctx, failover_group_idx=fo_grp.idx) # Check HA status try: self.update_failover_roles() except RuntimeError: warning_msg = """ \nWARNING: Unable to determine primary/secondary or active/standby roles. This will impact further operations defined in this module. Double check your configuration and use the config methods provided in this module if something is missed. And then run update_failover_roles again.\n """ self.primary.logger.warning(warning_msg) pass def config_failover_groups(self, unit): """Configure 2 failover groups on primary or secondary in active/active configuration :param unit: ASA instance to be configured :return: None """ cmd = 'failover group 1' cmd += '\nprimary' cmd += '\npreempt' cmd += '\nexit' cmd += '\nfailover group 2' cmd += '\nsecondary' cmd += '\npreempt' cmd += '\nexit' unit.config(cmd, exception_on_bad_config=True) def join_failover_group(self, unit, context, failover_group_idx): """Configure to join a failover group for a context in active/active configuration :param unit: ASA instance to be configured :param context: ASA context :param failover_group_idx: 1 or 2 as an example :return: None """ unit.config('no failover', ctx='system') unit.config('context {}'.format(context), ctx='system') unit.config('join-failover-group {}'.format(failover_group_idx)) unit.config('exit') self.enable_failover(unit, check_status=True) def is_failover_formed(self): """Determine whether an HA pair is formed by checking if both primary and secondary are actively in the pair in active/active mode :return: True|False """ self.primary.change_to_context(ctx='system') output = self.primary.execute('show failover | include Group 1') if not re.search(ASAHAStates.ACTIVE.value, output) \ or not re.search(ASAHAStates.STANDBY.value, output): return False output = self.primary.execute('show failover | include Group 2') if not re.search(ASAHAStates.ACTIVE.value, output) \ or not re.search(ASAHAStates.STANDBY.value, output): return False return True def validate_failover_groups(self): """Determine whether failover groups are formed in active/active configuration :return: True|False """ flag = False unit = self.primary unit.change_to_context('system') unit.execute('show failover group 1') unit.execute('show failover group 2') output = unit.execute('show failover group 1 | include State') if re.search(ASAHAStates.ACTIVE.value, output) \ and re.search(ASAHAStates.STANDBY.value, output): flag = True output = unit.execute('show failover group 2 | include State') if re.search(ASAHAStates.ACTIVE.value, output) \ and re.search(ASAHAStates.STANDBY.value, output): flag = True return flag def update_failover_roles(self): """Update failver units roles Based on output of show failover, this method determines the roles of primary/secondary active/active :return: None """ show_failover1 = self.asa_instance1.execute('show failover') role1 = re.search('This host: (.*)', show_failover1) role2 = re.search('Other host: (.*)', show_failover1) primary_found = secondary_found = False if role1 and role2: # Determine roles on instance 1 if role1.group(1).strip() == ASAHAStates.PRIMARY.value: primary_found = True self.primary = self.asa_instance1 elif role1.group(1).strip() == ASAHAStates.SECONDARY.value: secondary_found = True self.secondary = self.asa_instance1 # Determine roles on instance 2 if role2.group(1).strip() == ASAHAStates.PRIMARY.value: primary_found = True self.primary = self.asa_instance2 elif role2.group(1).strip() == ASAHAStates.SECONDARY.value: secondary_found = True self.secondary = self.asa_instance2 else: raise RuntimeError('Unable to determine units roles') if not primary_found: raise RuntimeError('Unable to find primary unit') if not secondary_found: raise RuntimeError('Unable to find secondary unit')
40.862816
124
0.600009
2,537
22,638
5.190382
0.091052
0.030908
0.014809
0.013973
0.798603
0.772555
0.741343
0.735419
0.712637
0.703827
0
0.007091
0.314736
22,638
553
125
40.936709
0.841746
0.160792
0
0.706395
0
0
0.143872
0.002295
0
0
0
0
0
1
0.046512
false
0.005814
0.011628
0
0.084302
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
303f5c3c53ad238e36ad1309574430574eb17f0a
10,895
py
Python
website/tests/test_views.py
gbif-norway/resolver-docker
1119223d8c640627adb6a3c481f03d04cca39804
[ "Apache-2.0" ]
null
null
null
website/tests/test_views.py
gbif-norway/resolver-docker
1119223d8c640627adb6a3c481f03d04cca39804
[ "Apache-2.0" ]
5
2021-02-03T07:24:00.000Z
2022-02-03T13:57:44.000Z
website/tests/test_views.py
gbif-norway/resolver-docker
1119223d8c640627adb6a3c481f03d04cca39804
[ "Apache-2.0" ]
2
2020-11-24T14:23:55.000Z
2022-02-26T00:32:39.000Z
from website.models import ResolvableObject, Dataset from populator.models import Statistic import json from rest_framework.test import APITestCase from rest_framework.reverse import reverse class ResolverViewTests(APITestCase): def setUp(self): self.dataset = Dataset.objects.create(id='dataset_id', data={'label': 'My dataset', 'key': 'a', 'type': 'event'}) def test_displays_index(self): Statistic.objects.set_total_count() response = self.client.get('/') self.assertEqual(response.status_code, 200) def test_displays_404_given_non_existent_id(self): response = self.client.get(reverse('resolvableobject-detail', ['00000000-0000-0000-0000-000000000000'])) self.assertTrue(response.status_code == 404) def test_filters_do_not_break_with_paginator(self): Statistic.objects.set_total_count() response = self.client.get(reverse('resolvableobject-list') + '?offset=10&limit=20', HTTP_ACCEPT='application/ld+json') self.assertEqual(response.status_code, 200) def test_displays_all_results(self): for item in 'abcde': ResolvableObject.objects.create(id=item, data={'test': item}, dataset=self.dataset) Statistic.objects.set_total_count() # Total count must be manually pre-set when database is populated to return results here, too slow to calculate on the fly response = self.client.get(reverse('resolvableobject-list') + '?limit=10', HTTP_ACCEPT='application/ld+json') results = json.loads(response.content.decode('utf-8').lower()) self.assertEqual(len(results['results']), 5) def test_pagination(self): url = reverse('resolvableobject-list') for item in 'abcde': ResolvableObject.objects.create(id=item, data={'test': item}, dataset=self.dataset) Statistic.objects.create(name='total_count', value=5) response = self.client.get(url + '?offset=3&limit=2', HTTP_ACCEPT='application/ld+json') results = json.loads(response.content.decode('utf-8').lower()) self.assertEqual(len(results['results']), 2) self.assertEqual(results['results'][0]['dwc:test'], 'd') self.assertEqual(results['results'][1]['dwc:test'], 'e') def test_filters_with_pagination(self): for item in [('a', 'Galium',), ('b', 'Eudyptes'), ('c', 'Eudyptes'), ('d', 'Galium'), ('e', 'Eudyptes')]: ResolvableObject.objects.create(id=item[0], data={'id': item[0], 'scientificname': item[1]}, dataset=self.dataset) response = self.client.get(reverse('resolvableobject-list') + '?offset=1&limit=1&scientificname=Eudyptes', HTTP_ACCEPT='application/ld+json') results = json.loads(response.content.decode('utf-8').lower()) self.assertEqual(len(results['results']), 1) self.assertEqual(results['results'][0]['dwc:scientificname'], 'eudyptes') self.assertEqual(results['results'][0]['owl:sameas'], 'c') def _test_correct_count_with_filtering_and_pagination(self): for item in [('a', 'Galium',), ('b', 'Eudyptes'), ('c', 'Eudyptes'), ('d', 'Galium'), ('e', 'Eudyptes')]: ResolvableObject.objects.create(id=item[0], data={'id': item[0], 'scientificname': item[1]}, dataset=self.dataset) response = self.client.get(reverse('resolvableobject-list') + '?offset=1&limit=1&scientificname=Eudyptes', HTTP_ACCEPT='application/ld+json') results = json.loads(response.content.decode('utf-8').lower()) self.assertEqual(results['count'], 3) def test_filters_on_scientific_name(self): id = 'urn:uuid:5c0884ce-608c-4716-ba0e-cb389dca5580' ResolvableObject.objects.create(id=id, dataset=self.dataset, data={'id': id, 'basisOfRecord': 'preservedspecimen', 'scientificname': 'Galium odoratum'}) id = 'urn:uuid:6c0884ce-608c-4716-ba0e-cb389dca5581' ResolvableObject.objects.create(id=id, dataset=self.dataset, data={'id': id, 'basisOfRecord': 'preservedspecimen', 'scientificname': 'Eudyptes moseleyi'}) url = reverse('resolvableobject-list') response = self.client.get(url + '?scientificname=Galium%20odoratum', HTTP_ACCEPT='application/ld+json') results = json.loads(response.content.decode('utf-8').lower()) self.assertEqual(len(results['results']), 1) self.assertEqual(results['results'][0]['dwc:scientificname'], 'galium odoratum') def test_filters_on_multiple(self): id = 'urn:uuid:5c0884ce-608c-4716-ba0e-cb389dca5580' ResolvableObject.objects.create(id=id, dataset=self.dataset, data={'id': id, 'basisOfRecord': 'preservedspecimen', 'scientificname': 'Galium odoratum'}) id = 'urn:uuid:6c0884ce-608c-4716-ba0e-cb389dca5581' ResolvableObject.objects.create(id=id, dataset=self.dataset, data={'id': id, 'basisOfRecord': 'preservedspecimen', 'scientificname': 'Eudyptes moseleyi'}) id = 'urn:uuid:7c0884ce-608c-4716-ba0e-cb389dca5582' ResolvableObject.objects.create(id=id, dataset=self.dataset, data={'id': id, 'basisOfRecord': 'preservedspecimen', 'scientificname': 'Eudyptes moseleyi'}) url = reverse('resolvableobject-list') response = self.client.get(url + '?scientificname=Eudyptes%20moseleyi', HTTP_ACCEPT='application/ld+json') results = json.loads(response.content.decode('utf-8').lower()) self.assertEqual(results['results'][0]['dwc:scientificname'], 'eudyptes moseleyi') self.assertEqual(results['results'][1]['dwc:scientificname'], 'eudyptes moseleyi') def _test_calculates_correct_counts_with_filter(self): id = 'urn:uuid:5c0884ce-608c-4716-ba0e-cb389dca5580' ResolvableObject.objects.create(id=id, dataset=self.dataset, data={'id': id, 'basisOfRecord': 'preservedspecimen', 'scientificname': 'Galium odoratum'}) id = 'urn:uuid:6c0884ce-608c-4716-ba0e-cb389dca5581' ResolvableObject.objects.create(id=id, dataset=self.dataset, data={'id': id, 'basisOfRecord': 'preservedspecimen', 'scientificname': 'Eudyptes moseleyi'}) id = 'urn:uuid:7c0884ce-608c-4716-ba0e-cb389dca5582' ResolvableObject.objects.create(id=id, dataset=self.dataset, data={'id': id, 'basisOfRecord': 'preservedspecimen', 'scientificname': 'Eudyptes moseleyi'}) url = reverse('resolvableobject-list') response = self.client.get(url + '?scientificname=Eudyptes%20moseleyi', HTTP_ACCEPT='application/ld+json') results = json.loads(response.content.decode('utf-8').lower()) self.assertEqual(len(results['results']), 2) self.assertEqual(results['count'], 2) def _test_calculates_correct_counts_without_filter(self): # FAILS id = 'urn:uuid:5c0884ce-608c-4716-ba0e-cb389dca5580' ResolvableObject.objects.create(id=id, dataset=self.dataset, data={'id': id, 'basisOfRecord': 'preservedspecimen', 'scientificname': 'Galium odoratum'}) id = 'urn:uuid:6c0884ce-608c-4716-ba0e-cb389dca5581' ResolvableObject.objects.create(id=id, dataset=self.dataset, data={'id': id, 'basisOfRecord': 'preservedspecimen', 'scientificname': 'Eudyptes moseleyi'}) id = 'urn:uuid:7c0884ce-608c-4716-ba0e-cb389dca5582' ResolvableObject.objects.create(id=id, dataset=self.dataset, data={'id': id, 'basisOfRecord': 'preservedspecimen', 'scientificname': 'Eudyptes moseleyi'}) url = reverse('resolvableobject-list') response = self.client.get('/', HTTP_ACCEPT='application/ld+json') results = json.loads(response.content.decode('utf-8').lower()) self.assertEqual(len(results['results']), 3) self.assertEqual(results['count'], 3) # Note: this fails at the moment until I can figure out a better way to count def test_renders_occurrence_json_ld(self): response_string = self._simple_request_occurrence('application/ld+json') expected_response = {'owl:sameas': 'urn:uuid:5c0884ce-608c-4716-ba0e-cb389dca5580', '@id': 'http://purl.org/gbifnorway/id/urn:uuid:5c0884ce-608c-4716-ba0e-cb389dca5580', 'dwc:basisofrecord': 'preservedspecimen', 'core-type': '', 'dataset': {'key': 'a', 'label': 'my dataset', 'type': 'event'}, '@context': {'dc': 'http://purl.org/dc/elements/1.1/', 'dwc': 'http://rs.tdwg.org/dwc/terms/', 'owl': 'https://www.w3.org/tr/owl-ref/'}} self.assertEqual(expected_response, json.loads(response_string)) def test_renders_dataset_json_ld(self): response_string = self._simple_request_dataset('application/ld+json') expected_response = {'dc:type': 'dataset', 'owl:sameas': 'https://doi.org/10.12345/abcdef', 'rdfs:label': 'my dataset name' , '@id': 'http://purl.org/gbifnorway/id/urn:uuid:5c0884ce-608c-4716-ba0e-cb389dca5580', 'core-type': '', 'dataset': {'key': 'a', 'label': 'my dataset', 'type': 'event'}, '@context': {'dc': 'http://purl.org/dc/elements/1.1/', 'dwc': 'http://rs.tdwg.org/dwc/terms/', 'owl': 'https://www.w3.org/tr/owl-ref/', 'rdfs': 'https://www.w3.org/tr/rdf-schema/'}} self.assertEqual(expected_response, json.loads(response_string)) def test_renders_occurrence_rdf(self): response_string = self._simple_request_occurrence('application/rdf+xml') self.assertTrue('<owl:sameas>urn:uuid:5c0884ce-608c-4716-ba0e-cb389dca5580</owl:sameas>' in response_string) def test_renders_dataset_rdf(self): response_string = self._simple_request_dataset('application/rdf+xml') self.assertTrue('<owl:sameas>https://doi.org/10.12345/abcdef</owl:sameas>' in response_string) def _simple_request_occurrence(self, http_accept): id = 'urn:uuid:5c0884ce-608c-4716-ba0e-cb389dca5580' ResolvableObject.objects.create(id=id, dataset=self.dataset, data={'id': id, 'basisOfRecord': 'preservedspecimen'}) url = reverse('resolvableobject-detail', [id]) response = self.client.get(url, HTTP_ACCEPT=http_accept) return response.content.decode('utf-8').lower() def _simple_request_dataset(self, http_accept): id = 'urn:uuid:5c0884ce-608c-4716-ba0e-cb389dca5580' ResolvableObject.objects.create(id=id, dataset=self.dataset, data={'label': 'my dataset name', 'type': 'dataset', 'sameas': 'https://doi.org/10.12345/abcdef', 'id': id}) url = reverse('resolvableobject-detail', [id]) response = self.client.get(url, HTTP_ACCEPT=http_accept) return response.content.decode('utf-8').lower()
67.253086
177
0.658192
1,254
10,895
5.623604
0.143541
0.014748
0.038287
0.074731
0.837777
0.802751
0.774107
0.756523
0.704056
0.675978
0
0.051987
0.182561
10,895
161
178
67.670807
0.739838
0.018541
0
0.565217
0
0.028986
0.320891
0.1103
0
0
0
0
0.166667
1
0.130435
false
0
0.036232
0
0.188406
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
306164adbda010f4a4bfd0458ae58149fed01af5
215
py
Python
alegra/resources/__init__.py
okchaty/alegra
6c423b23a24650c9121da5f165f6f03669b98468
[ "MIT" ]
1
2022-03-31T03:44:50.000Z
2022-03-31T03:44:50.000Z
alegra/resources/__init__.py
okchaty/alegra
6c423b23a24650c9121da5f165f6f03669b98468
[ "MIT" ]
4
2020-03-24T17:54:03.000Z
2021-06-02T00:48:50.000Z
alegra/resources/__init__.py
okchaty/alegra
6c423b23a24650c9121da5f165f6f03669b98468
[ "MIT" ]
null
null
null
from alegra.resources.contact import Contact from alegra.resources.invoice import Invoice from alegra.resources.item import Item from alegra.resources.retention import Retention from alegra.resources.tax import Tax
35.833333
48
0.860465
30
215
6.166667
0.3
0.27027
0.513514
0
0
0
0
0
0
0
0
0
0.093023
215
5
49
43
0.948718
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
065764179f9e1f9242c7ee1dc6e5e8cb0ba2739d
1,039
py
Python
tests/conftest.py
connorkendrick/python-codepen
5fb0465698651281d2df260ec4e32bd31575d59a
[ "MIT" ]
4
2017-10-04T00:55:02.000Z
2017-10-10T06:17:06.000Z
tests/conftest.py
connorkendrick/python-codepen
5fb0465698651281d2df260ec4e32bd31575d59a
[ "MIT" ]
null
null
null
tests/conftest.py
connorkendrick/python-codepen
5fb0465698651281d2df260ec4e32bd31575d59a
[ "MIT" ]
3
2017-10-10T06:17:08.000Z
2022-03-02T20:23:20.000Z
"""This module provides the expected keys for all requests.""" from pytest import fixture @fixture(scope='module') def pen_keys(): return ['title', 'details', 'link', 'id', 'views', 'loves', 'comments', 'images', 'user'] @fixture(scope='module') def post_keys(): return ['title', 'content', 'link', 'views', 'loves', 'comments', 'user'] @fixture(scope='module') def collection_info_keys(): return ['title', 'details', 'link', 'id', 'views', 'loves', 'comments', 'images', 'user'] @fixture(scope='module') def collections_list_keys(): return ['title', 'details', 'id', 'url', 'penCount', 'loves', 'views', 'user'] @fixture(scope='module') def profile_keys(): return ['nicename', 'username', 'avatar', 'location', 'bio', 'pro', 'followers', 'following', 'links'] @fixture(scope='module') def follow_keys(): return ['nicename', 'username', 'avatar', 'url'] @fixture(scope='module') def user_tags_keys(): return ['tag', 'penCount', 'link', 'user']
28.861111
65
0.602502
115
1,039
5.356522
0.408696
0.136364
0.204545
0.238636
0.435065
0.25
0.25
0.25
0.25
0.25
0
0
0.179018
1,039
35
66
29.685714
0.722157
0.053898
0
0.407407
0
0
0.321392
0
0
0
0
0
0
1
0.259259
true
0
0.037037
0.259259
0.555556
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
0
0
1
1
0
0
6
0683ec3285ee1249f5143f7cd57d3344f8b8fe08
112
py
Python
src/setup.py
jeelabs/monty
add6da22dd446cda5e93e023e90520cfdb3fc712
[ "Unlicense" ]
11
2021-02-02T02:32:50.000Z
2021-12-30T12:55:41.000Z
src/setup.py
jeelabs/monty
add6da22dd446cda5e93e023e90520cfdb3fc712
[ "Unlicense" ]
75
2021-01-27T10:53:10.000Z
2021-06-30T10:59:49.000Z
src/setup.py
jeelabs/monty
add6da22dd446cda5e93e023e90520cfdb3fc712
[ "Unlicense" ]
1
2021-09-25T11:18:38.000Z
2021-09-25T11:18:38.000Z
import sys #print('####################################################', file=sys.stderr) print('-std=c++11')
22.4
79
0.339286
10
112
3.8
0.8
0
0
0
0
0
0
0
0
0
0
0.018868
0.053571
112
4
80
28
0.339623
0.232143
0
0
0
0
0.30303
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0.5
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
1
0
6
06a04eaa9e2d0b5745713e9e05752a296d640048
77
py
Python
umass_toolkit/__init__.py
ranish1/dining-back
33655fcedbd4d48bb8d27e2c1e582fa5e00d46cc
[ "MIT" ]
13
2018-07-12T20:05:26.000Z
2021-04-02T18:11:23.000Z
umass_toolkit/__init__.py
ranish1/dining-back
33655fcedbd4d48bb8d27e2c1e582fa5e00d46cc
[ "MIT" ]
23
2018-06-24T19:54:20.000Z
2019-04-08T13:51:19.000Z
umass_toolkit/__init__.py
ranish1/dining-back
33655fcedbd4d48bb8d27e2c1e582fa5e00d46cc
[ "MIT" ]
6
2018-07-12T20:05:37.000Z
2018-08-27T11:28:46.000Z
from .dining import * from .people_finder import * from .deans_list import *
19.25
28
0.766234
11
77
5.181818
0.636364
0.350877
0
0
0
0
0
0
0
0
0
0
0.155844
77
3
29
25.666667
0.876923
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
2314150fdd643b91354e52fa0ab2058a3c44a266
409
py
Python
src/blog/redirect_views.py
Vitaldocz/blog
91997b542def86eee6ad58e25c4dab1ad6e68e99
[ "MIT" ]
null
null
null
src/blog/redirect_views.py
Vitaldocz/blog
91997b542def86eee6ad58e25c4dab1ad6e68e99
[ "MIT" ]
null
null
null
src/blog/redirect_views.py
Vitaldocz/blog
91997b542def86eee6ad58e25c4dab1ad6e68e99
[ "MIT" ]
null
null
null
from django.shortcuts import redirect def redirect_register(request): return redirect('accounts:register') def redirect_login(request): return redirect('accounts:login') def redirect_logout(request): return redirect('accounts:logout') def redirect_reset(request): return redirect('accounts:resetPassword') def redirect_forgot(request): return redirect('accounts:forgotPassword')
18.590909
46
0.767726
45
409
6.866667
0.355556
0.177994
0.339806
0.469256
0
0
0
0
0
0
0
0
0.132029
409
21
47
19.47619
0.870423
0
0
0
0
0
0.222494
0.110024
0
0
0
0
0
1
0.454545
false
0.181818
0.090909
0.454545
1
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
1
1
0
0
6
232cdfc34338efe77d723fcfc8f83a8cfa99074d
563
py
Python
coronapy/lib/color.py
Collector0/coronapy-cli
f60e61de074928834319b8029c02335cedcacdd2
[ "MIT" ]
45
2020-03-29T02:04:38.000Z
2021-12-04T20:09:17.000Z
coronapy/lib/color.py
Collector0/coronapy-cli
f60e61de074928834319b8029c02335cedcacdd2
[ "MIT" ]
15
2020-03-29T10:14:54.000Z
2020-10-02T19:56:40.000Z
coronapy/lib/color.py
Collector0/coronapy-cli
f60e61de074928834319b8029c02335cedcacdd2
[ "MIT" ]
15
2020-03-29T10:33:31.000Z
2021-08-08T16:54:36.000Z
from colorama import init init() def prRed(skk): return "\033[91m {}\033[00m".format(skk) def prGreen(skk): return "\033[92m {}\033[00m".format(skk) def prYellow(skk): return "\033[93m {}\033[00m".format(skk) def prLightPurple(skk): return "\033[94m {}\033[00m".format(skk) def prPurple(skk): return "\033[95m {}\033[00m".format(skk) def prCyan(skk): return "\033[96m {}\033[00m".format(skk) def prLightGray(skk): return "\033[97m {}\033[00m".format(skk) def prBlack(skk): return "\033[98m {}\033[00m".format(skk)
15.638889
44
0.630551
85
563
4.176471
0.305882
0.202817
0.270423
0.338028
0.35493
0
0
0
0
0
0
0.170576
0.166963
563
35
45
16.085714
0.586354
0
0
0
0
0
0.269982
0
0
0
0
0
0
1
0.444444
false
0
0.055556
0.444444
0.944444
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
6
235034ddcca88f62f94c8aa39c596c897db8564f
22,767
py
Python
Benchmark/bfunctions.py
SooLee/Benchmark
443d275f4951d986a52d6a49343177efd4d8fe49
[ "MIT" ]
null
null
null
Benchmark/bfunctions.py
SooLee/Benchmark
443d275f4951d986a52d6a49343177efd4d8fe49
[ "MIT" ]
1
2019-06-07T00:52:44.000Z
2019-06-07T00:52:44.000Z
Benchmark/bfunctions.py
SooLee/Benchmark
443d275f4951d986a52d6a49343177efd4d8fe49
[ "MIT" ]
1
2019-10-14T14:50:09.000Z
2019-10-14T14:50:09.000Z
from Benchmark.byteformat import B2GB, B2MB, MB2GB, GB2MB from Benchmark.classes import BenchmarkResult def encode_rnaseq_stranded(input_json): assert 'input_size_in_bytes' in input_json r = BenchmarkResult(size=300, mem=GB2MB(64), cpu=16, exclude_t=True) return(r.as_dict()) def encode_rnaseq_unstranded(input_json): assert 'input_size_in_bytes' in input_json r = BenchmarkResult(size=300, mem=GB2MB(64), cpu=16, exclude_t=True) return(r.as_dict()) def mergebed(input_json): assert 'input_size_in_bytes' in input_json insz = input_json['input_size_in_bytes'] assert 'input_bed' in insz input_size = sum(insz['input_bed']) total_size_in_gb = B2GB((input_size * 5) * 3) r = BenchmarkResult(size=total_size_in_gb, mem=1024, cpu=2) return(r.as_dict()) def encode_atacseq_aln(input_json): assert 'input_size_in_bytes' in input_json insz = input_json['input_size_in_bytes'] assert 'atac.fastqs' in insz assert 'atac.bowtie2_idx_tar' in insz input_fastq_size = sum(insz['atac.fastqs']) if input_json['parameters'].get('atac.paired_end', ''): nTechRep = len(insz['atac.fastqs']) / 2 # we assume one biological replicate else: nTechRep = len(insz['atac.fastqs']) print("nTechRep = " + str(nTechRep)) total_size_in_gb = B2GB((input_fastq_size * 5 + insz['atac.bowtie2_idx_tar'] * 2.5) * nTechRep) * 1.5 if 'parameters' in input_json and 'atac.bowtie2.cpu' in input_json['parameters']: cpu = input_json['parameters']['atac.bowtie2.cpu'] else: cpu = 2 mem = 6 + 2 * (nTechRep - 1) r = BenchmarkResult(size=total_size_in_gb, mem=GB2MB(mem), cpu=cpu, exclude_t=True) return(r.as_dict()) def encode_atacseq_postaln(input_json): assert 'input_size_in_bytes' in input_json insz = input_json['input_size_in_bytes'] assert 'atac.tas' in insz input_size = sum(insz['atac.tas']) nRep = len(insz['atac.tas']) total_size_in_gb = (B2GB(input_size * 55) + 16 * (nRep - 1)) * 2.5 cpu = 12 + 4 * (nRep - 1) mem = 8 + 4 * (nRep - 1) r = BenchmarkResult(size=total_size_in_gb, mem=GB2MB(mem), cpu=cpu, exclude_t=True) return(r.as_dict()) def encode_chipseq_aln_chip(input_json): assert 'input_size_in_bytes' in input_json insz = input_json['input_size_in_bytes'] assert 'chip.fastqs' in insz assert 'chip.bwa_idx_tar' in insz input_fastq_size = sum(insz['chip.fastqs']) total_size_in_gb = B2GB(input_fastq_size * 6 + insz['chip.bwa_idx_tar'] * 4) * 1.2 if 'parameters' in input_json and 'chip.bwa.cpu' in input_json['parameters']: cpu = input_json['parameters']['chip.bwa.cpu'] else: cpu = 2 r = BenchmarkResult(size=total_size_in_gb, mem=GB2MB(16), cpu=cpu if cpu >= 8 else 8, exclude_t=True) return(r.as_dict()) def encode_chipseq_aln_ctl(input_json): assert 'input_size_in_bytes' in input_json insz = input_json['input_size_in_bytes'] assert 'chip.ctl_fastqs' in insz assert 'chip.bwa_idx_tar' in insz input_fastq_size = sum(insz['chip.ctl_fastqs']) total_size_in_gb = B2GB(input_fastq_size * 6 + insz['chip.bwa_idx_tar'] * 4) * 1.2 if 'parameters' in input_json and 'chip.bwa_ctl.cpu' in input_json['parameters']: cpu = input_json['parameters']['chip.bwa_ctl.cpu'] else: cpu = 2 r = BenchmarkResult(size=total_size_in_gb, mem=GB2MB(10), cpu=cpu if cpu >= 8 else 8, exclude_t=True) return(r.as_dict()) def encode_chipseq_postaln(input_json): assert 'input_size_in_bytes' in input_json insz = input_json['input_size_in_bytes'] assert 'chip.tas' in insz assert 'chip.bam2ta_no_filt_R1.ta' in insz input_size = sum(insz['chip.tas']) + sum(insz.get('chip.ctl_tas', [0])) \ + sum(insz['chip.bam2ta_no_filt_R1.ta']) total_size_in_gb = B2GB(input_size * 35) * 3 if 'parameters' in input_json and 'chip.spp_cpu' in input_json['parameters']: cpu = input_json['parameters']['chip.spp_cpu'] else: cpu = 2 mem = GB2MB(cpu * 7) if 'parameters' in input_json and 'chip.pipeline_type' in input_json['parameters']: if input_json['parameters']['chip.pipeline_type'] == 'tf': mem *= 2.5 print("mem=" + str(mem)) r = BenchmarkResult(size=total_size_in_gb, mem=mem, cpu=cpu * 4, exclude_t=True) return(r.as_dict()) def encode_chipseq(input_json): assert 'input_size_in_bytes' in input_json insz = input_json['input_size_in_bytes'] assert 'chip.fastqs' in insz assert 'chip.bwa_idx_tar' in insz input_fastq_size = sum(insz['chip.fastqs']) + sum(insz.get('chip.ctl_fastqs', [0])) input_size = input_fastq_size + insz['chip.bwa_idx_tar'] output_size = input_fastq_size * 8 + insz['chip.bwa_idx_tar'] * 4 total_size_in_gb = B2GB(input_size + output_size) r = BenchmarkResult(size=total_size_in_gb, mem=30000, cpu=16) return(r.as_dict()) def encode_atacseq(input_json): assert 'input_size_in_bytes' in input_json insz = input_json['input_size_in_bytes'] assert 'atac.fastqs' in insz assert 'atac.bowtie2_idx_tar' in insz input_fastq_size = sum(insz['atac.fastqs']) input_size = input_fastq_size + insz['atac.bowtie2_idx_tar'] output_size = input_fastq_size * 10 total_size_in_gb = B2GB(input_size + output_size) if 'parameters' in input_json and 'atac.bowtie2.cpu' in input_json['parameters']: cpu = input_json['parameters']['atac.bowtie2.cpu'] + 2 else: cpu = 6 r = BenchmarkResult(size=total_size_in_gb, mem=16000, cpu=cpu) return(r.as_dict()) def md5(input_json): assert 'input_size_in_bytes' in input_json assert 'input_file' in input_json.get('input_size_in_bytes') input_in_bytes = input_json.get('input_size_in_bytes').get('input_file') input_size = B2GB(input_in_bytes) + 3 r = BenchmarkResult(size=input_size, mem=1024, cpu=1) return(r.as_dict()) def fastqc(input_json): assert 'input_size_in_bytes' in input_json assert 'input_fastq' in input_json.get('input_size_in_bytes') nthreads = 1 # default value according to the cwl if 'parameters' in input_json: if 'threads' in input_json.get('parameters'): nthreads = input_json.get('parameters').get('threads') input_in_bytes = input_json.get('input_size_in_bytes').get('input_fastq') input_size = B2GB(input_in_bytes) * 2 + 3 if input_size > 100: input_size = input_size + 20 mem = 300 * nthreads if mem < 1024: mem = 1024 r = BenchmarkResult(size=input_size, mem=mem, cpu=nthreads) return(r.as_dict()) def fastqc_0_11_4_1(input_json): assert 'input_size_in_bytes' in input_json assert 'input_fastq' in input_json.get('input_size_in_bytes') nthreads = 1 # default value according to the cwl if 'parameters' in input_json: if 'threads' in input_json.get('parameters'): nthreads = input_json.get('parameters').get('threads') input_in_bytes = input_json.get('input_size_in_bytes').get('input_fastq') input_size = B2GB(input_in_bytes) * 2 + 3 if input_size > 100: input_size = input_size + 20 mem = 300 * nthreads if mem < 1024: mem = 1024 r = BenchmarkResult(size=input_size, mem=mem, cpu=nthreads) return(r.as_dict()) def bwa_mem(input_json): assert 'input_size_in_bytes' in input_json assert 'fastq1' in input_json.get('input_size_in_bytes') assert 'fastq2' in input_json.get('input_size_in_bytes') assert 'bwa_index' in input_json.get('input_size_in_bytes') # cpu nthreads = 4 # default from cwl if 'parameters' in input_json: if 'nThreads' in input_json.get('parameters'): nthreads = input_json.get('parameters').get('nThreads') # space input_sizes = input_json.get('input_size_in_bytes') data_input_size = input_sizes.get('fastq1') + input_sizes.get('fastq2') total_input_size = data_input_size + input_sizes.get('bwa_index') output_bam_size = data_input_size * 2 intermediate_index_size = input_sizes.get('bwa_index') * 2 copied_input_size = data_input_size * 7 # copied and unzipped total_intermediate_size \ = intermediate_index_size + output_bam_size + copied_input_size total_output_size = output_bam_size additional_size_in_gb = 10 total_file_size_in_bp \ = total_input_size + total_intermediate_size + total_output_size total_size = B2GB(total_file_size_in_bp) + additional_size_in_gb # mem mem = B2MB(input_sizes.get('bwa_index') * 4) + (nthreads * 500) r = BenchmarkResult(size=total_size, mem=mem, cpu=nthreads) return(r.as_dict()) def insulation_scores_and_boundaries_caller(input_json): assert 'input_size_in_bytes' in input_json assert 'mcoolfile' in input_json.get('input_size_in_bytes') # cpu nthreads = 1 # space input_sizes = input_json.get('input_size_in_bytes') data_input_size = input_sizes.get('mcoolfile') total_input_size = data_input_size output_bw_bed_size = data_input_size * 0.001 total_output_size = output_bw_bed_size additional_size_in_gb = 2 total_file_size_in_bp \ = total_input_size + total_output_size total_size = B2GB(total_file_size_in_bp) + additional_size_in_gb # mem mem = GB2MB(2) r = BenchmarkResult(size=total_size, mem=mem, cpu=nthreads) return(r.as_dict()) def pairsam_parse_sort(input_json): assert 'input_size_in_bytes' in input_json assert 'bam' in input_json.get('input_size_in_bytes') # cpu nthreads = 8 # default from cwl if 'parameters' in input_json: if 'nThreads' in input_json.get('parameters'): nthreads = input_json.get('parameters').get('nThreads') in_size = input_json.get('input_size_in_bytes') bamsize = B2GB(in_size.get('bam')) pairsamsize = bamsize * 10 # very rough number tmp_pairsamsize = pairsamsize total_size = bamsize + pairsamsize + tmp_pairsamsize mem = 48000 # very rough number r = BenchmarkResult(size=total_size, mem=mem, cpu=nthreads * 2) return(r.as_dict()) def pairsam_merge(input_json): assert 'input_size_in_bytes' in input_json assert 'input_pairsams' in input_json.get('input_size_in_bytes') in_size = input_json['input_size_in_bytes'] assert isinstance(in_size['input_pairsams'], list) # cpu nthreads = 8 # default from cwl if 'parameters' in input_json: if 'nThreads' in input_json.get('parameters'): nthreads = input_json.get('parameters').get('nThreads') # space input_size = B2GB(sum(in_size['input_pairsams'])) total_size = input_size * 3 total_safe_size = total_size * 2 # mem mem = 4000 # 32 cores: 1.8G/min (c4.8xlarge), 8 cores: 0.9G/min (r4.2xlarge) r = BenchmarkResult(size=total_safe_size, mem=mem, cpu=nthreads) return(r.as_dict()) def pairsam_markasdup(input_json): assert 'input_size_in_bytes' in input_json assert 'input_pairsam' in input_json.get('input_size_in_bytes') cpu = 1 # very rough estimate mem = 15000 # very rough estimate # space insize = B2GB(input_json['input_size_in_bytes']['input_pairsam']) outsize = insize intersize = outsize total_size = insize + outsize + intersize total_safe_size = total_size * 2 r = BenchmarkResult(size=total_safe_size, mem=mem, cpu=cpu) return(r.as_dict()) def pairsam_filter(input_json): assert 'input_size_in_bytes' in input_json assert 'input_pairsam' in input_json.get('input_size_in_bytes') cpu = 4 # very rough estimate mem = 16000 # very rough estimate # space insize = B2GB(input_json['input_size_in_bytes']['input_pairsam']) outbamsize = insize outpairssize = insize # to be safe outsize = outbamsize + outpairssize intersize = outsize total_size = insize + outsize + intersize total_safe_size = total_size * 2 r = BenchmarkResult(size=total_safe_size, mem=mem, cpu=cpu) return(r.as_dict()) def addfragtopairs(input_json): assert 'input_size_in_bytes' in input_json assert 'input_pairs' in input_json.get('input_size_in_bytes') cpu = 1 # very rough estimate mem = 1024 # very rough estimate # space insize = B2GB(input_json['input_size_in_bytes']['input_pairs']) outsize = insize * 2 intersize = outsize total_size = insize + outsize + intersize total_safe_size = total_size * 2 r = BenchmarkResult(size=total_safe_size, mem=mem, cpu=cpu) return(r.as_dict()) def pairs_patch(input_json): assert 'input_size_in_bytes' in input_json assert 'input_pairs' in input_json.get('input_size_in_bytes') cpu = 1 # very rough estimate mem = 1024 # very rough estimate # space insize = B2GB(input_json['input_size_in_bytes']['input_pairs']) outsize = insize * 2 intersize = outsize total_size = insize + outsize + intersize total_safe_size = total_size * 2 r = BenchmarkResult(size=total_safe_size, mem=mem, cpu=cpu) return(r.as_dict()) def pairsqc_single(input_json): assert 'input_size_in_bytes' in input_json assert 'input_pairs' in input_json.get('input_size_in_bytes') cpu = 1 # very rough estimate mem = 1024 # very rough estimate # space insize = B2GB(input_json['input_size_in_bytes']['input_pairs']) outsize = 0 intersize = 0 total_size = insize + outsize + intersize total_safe_size = total_size * 2 r = BenchmarkResult(size=total_safe_size, mem=mem, cpu=cpu) return(r.as_dict()) def hi_c_processing_partb(input_json): assert 'input_size_in_bytes' in input_json assert 'input_pairs' in input_json.get('input_size_in_bytes') in_size = input_json['input_size_in_bytes'] assert isinstance(in_size['input_pairs'], list) # cpu nthreads = 8 # default from cwl if 'parameters' in input_json: if 'ncores' in input_json.get('parameters'): nthreads = input_json.get('parameters').get('ncores') # space input_size = B2GB(sum(in_size['input_pairs'])) out_pairs_size = input_size out_cool_size = input_size out_hic_size = input_size out_size = out_pairs_size + out_cool_size + out_hic_size total_size = input_size + out_size total_safe_size = total_size * 2 # mem maxmem = MB2GB(14) # default from cwl if 'parameters' in input_json: if 'maxmem' in input_json.get('parameters'): maxmem = input_json.get('parameters').get('maxmem') if 'g' in maxmem: maxmem = GB2MB(int(maxmem.replace('g', ''))) elif 'm' in maxmem: maxmem = int(maxmem.replace('m', '')) else: raise Exception("proper maxmem string?") cooler_mem = GB2MB(nthreads * input_size) if cooler_mem > maxmem: mem = cooler_mem else: mem = maxmem r = BenchmarkResult(size=total_safe_size, mem=mem, cpu=nthreads) return(r.as_dict()) def hi_c_processing_partc(input_json): assert 'input_size_in_bytes' in input_json insize = input_json.get('input_size_in_bytes') assert 'input_cool' in insize assert 'input_hic' in insize nthreads = 1 # default value according to the cwl nres = 13 # default value according to the cwl if 'parameters' in input_json: if 'ncores' in input_json.get('parameters'): nthreads = input_json.get('parameters').get('ncores') if 'nres' in input_json.get('parameters'): nres = input_json.get('parameters').get('nres') input_size = insize['input_cool'] + insize['input_hic'] out_size = input_size * nres inter_size = out_size total_size = B2GB(input_size + out_size + inter_size) total_safe_size = total_size * 2 cpu = nthreads mem = B2MB(nthreads * input_size * 5) if mem < 1024: mem = 1024 r = BenchmarkResult(size=total_safe_size, mem=mem, cpu=cpu) return(r.as_dict()) def hi_c_processing_bam(input_json): assert 'input_size_in_bytes' in input_json in_size = input_json.get('input_size_in_bytes') assert 'input_bams' in input_json.get('input_size_in_bytes') assert 'chromsize' in input_json.get('input_size_in_bytes') assert isinstance(in_size['input_bams'], list) # cpu nthreads_parse_sort = 8 # default from cwl nthreads_merge = 8 # default from cwl if 'parameters' in input_json: param = input_json['parameters'] if 'nthreads_parse_sort' in param: nthreads_parse_sort = param['nthreads_parse_sort'] if 'nthreads_merge' in param: nthreads_merge = param['nthreads_merge'] # nthreads is the maximum of the two nthread parameters if nthreads_parse_sort > nthreads_merge: nthreads = nthreads_parse_sort else: nthreads = nthreads_merge bamsize = B2GB(sum(in_size['input_bams'])) other_inputsize = B2GB(in_size.get('chromsize')) pairsize = bamsize / 2 # rough number outsize = bamsize + pairsize tmp_pairsamsize = bamsize * 5 # input and output are copied once total_size = (bamsize + other_inputsize + outsize) * 2 + tmp_pairsamsize safe_total_size = total_size * 2 mem = 2000 # very rough number r = BenchmarkResult(size=safe_total_size, mem=mem, cpu=nthreads) return(r.as_dict()) def hi_c_processing_pairs(input_json): assert 'input_size_in_bytes' in input_json assert 'input_pairs' in input_json.get('input_size_in_bytes') in_size = input_json['input_size_in_bytes'] assert isinstance(in_size['input_pairs'], list) # cpu nthreads = 8 # default from cwl if 'parameters' in input_json: if 'nthreads' in input_json.get('parameters'): nthreads = input_json.get('parameters').get('nthreads') # space input_size = B2GB(sum(in_size['input_pairs'])) out_size = input_size * 1.5 intermediate_size = input_size * 10 total_size = input_size + out_size + intermediate_size total_safe_size = total_size * 1.4 # mem maxmem = GB2MB(14) # default from cwl if 'parameters' in input_json: if 'maxmem' in input_json.get('parameters'): maxmem = input_json.get('parameters').get('maxmem') if 'g' in maxmem: maxmem = GB2MB(int(maxmem.replace('g', ''))) elif 'm' in maxmem: maxmem = int(maxmem.replace('m', '')) else: raise Exception("proper maxmem string?") cooler_mem = GB2MB(nthreads * input_size) if cooler_mem > maxmem: mem = cooler_mem else: mem = maxmem r = BenchmarkResult(size=total_safe_size, mem=mem, cpu=nthreads) return(r.as_dict()) def hi_c_processing_pairs_nore(input_json): return(hi_c_processing_pairs(input_json)) def hi_c_processing_pairs_nonorm(input_json): return(hi_c_processing_pairs(input_json)) def hi_c_processing_pairs_nore_nonorm(input_json): return(hi_c_processing_pairs(input_json)) def repliseq_parta(input_json): assert 'input_size_in_bytes' in input_json assert 'fastq' in input_json.get('input_size_in_bytes') assert 'bwaIndex' in input_json.get('input_size_in_bytes') # cpu nthreads = 4 # default from cwl if 'parameters' in input_json: if 'nthreads' in input_json.get('parameters'): nthreads = input_json.get('parameters').get('nthreads') # space input_sizes = input_json.get('input_size_in_bytes') input_fastq = B2MB(input_sizes.get('fastq')) input_bwa = B2GB(input_sizes.get('bwaIndex')) if 'fastq2' in input_sizes: # pe total_space = input_fastq * 0.032 + input_bwa * 2.58 mem = input_fastq * 8.48 + nthreads * 329 + input_bwa * 1658 else: # se total_space = input_fastq * 0.018 + input_bwa * 2.54 mem = input_fastq * 9.73 + nthreads * 221 + input_bwa * 1658 r = BenchmarkResult(size=total_space * 1.5, mem=mem * 1.5, cpu=nthreads) return(r.as_dict()) def merge_fastq(input_json): assert 'input_size_in_bytes' in input_json assert 'input_fastqs' in input_json.get('input_size_in_bytes') in_size = input_json['input_size_in_bytes'] assert isinstance(in_size['input_fastqs'], list) # cpu nthreads = 1 # space input_size = B2GB(sum(in_size['input_fastqs'])) total_size = input_size * 3 total_safe_size = total_size * 2 # mem mem = 4000 # 32 cores: 1.8G/min (c4.8xlarge), 8 cores: 0.9G/min (r4.2xlarge) r = BenchmarkResult(size=total_safe_size, mem=mem, cpu=nthreads) return(r.as_dict()) def bamqc(input_json): assert 'input_size_in_bytes' in input_json assert 'bamfile' in input_json.get('input_size_in_bytes') in_size = input_json['input_size_in_bytes'] # cpu nthreads = 1 # space input_size = B2GB(in_size['bamfile']) total_size = input_size + 5 total_safe_size = total_size + 5 # mem mem = 4000 # 32 cores: 1.8G/min (c4.8xlarge), 8 cores: 0.9G/min (r4.2xlarge) r = BenchmarkResult(size=total_safe_size, mem=mem, cpu=nthreads) return(r.as_dict()) def compartments_caller(input_json): assert 'input_size_in_bytes' in input_json assert 'mcoolfile' in input_json.get('input_size_in_bytes') # cpu nthreads = 1 # space input_sizes = input_json.get('input_size_in_bytes') data_input_size = input_sizes.get('mcoolfile') total_input_size = data_input_size output_bw_size = data_input_size * 0.001 total_output_size = output_bw_size additional_size_in_gb = 2 total_file_size_in_bp \ = total_input_size + total_output_size total_size = B2GB(total_file_size_in_bp) + additional_size_in_gb # mem mem = GB2MB(2) r = BenchmarkResult(size=total_size, mem=mem, cpu=nthreads) return(r.as_dict())
32.900289
105
0.659024
3,236
22,767
4.318603
0.070457
0.111413
0.070054
0.092737
0.829696
0.795921
0.759928
0.732093
0.710912
0.671986
0
0.023442
0.237405
22,767
691
106
32.947902
0.781477
0.047745
0
0.627723
0
0
0.167438
0.002314
0
0
0
0
0.146535
1
0.065347
false
0
0.00396
0.005941
0.069307
0.00396
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
88f03befa06eb74d8748b5ba8759d02acc810f0a
126
py
Python
passbook/factors/captcha/admin.py
fossabot/passbook
cba17f6659404445ac3025f11657d89368cc8b4f
[ "MIT" ]
null
null
null
passbook/factors/captcha/admin.py
fossabot/passbook
cba17f6659404445ac3025f11657d89368cc8b4f
[ "MIT" ]
null
null
null
passbook/factors/captcha/admin.py
fossabot/passbook
cba17f6659404445ac3025f11657d89368cc8b4f
[ "MIT" ]
null
null
null
"""captcha factor admin""" from passbook.lib.admin import admin_autoregister admin_autoregister("passbook_factors_captcha")
21
49
0.825397
15
126
6.666667
0.6
0.34
0
0
0
0
0
0
0
0
0
0
0.079365
126
5
50
25.2
0.862069
0.15873
0
0
0
0
0.24
0.24
0
0
0
0
0
1
0
true
1
0.5
0
0.5
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
1
0
0
0
0
6
88f7ce351171bda34919f84a6353d51e34900ec6
12,540
py
Python
ElectroWeakAnalysis/ZEE/python/simpleCutBasedElectronIDSpring10_cfi.py
SWuchterl/cmssw
769b4a7ef81796579af7d626da6039dfa0347b8e
[ "Apache-2.0" ]
6
2017-09-08T14:12:56.000Z
2022-03-09T23:57:01.000Z
ElectroWeakAnalysis/ZEE/python/simpleCutBasedElectronIDSpring10_cfi.py
SWuchterl/cmssw
769b4a7ef81796579af7d626da6039dfa0347b8e
[ "Apache-2.0" ]
545
2017-09-19T17:10:19.000Z
2022-03-07T16:55:27.000Z
ElectroWeakAnalysis/ZEE/python/simpleCutBasedElectronIDSpring10_cfi.py
SWuchterl/cmssw
769b4a7ef81796579af7d626da6039dfa0347b8e
[ "Apache-2.0" ]
14
2017-10-04T09:47:21.000Z
2019-10-23T18:04:45.000Z
import FWCore.ParameterSet.Config as cms ## Electron ID Based on Simple Cuts: Spring10 MC tuned selections # # Instructions on how to use this file # ==================================== # # The selections that are implemented in this python cfg are # explained in this twiki page: # https://twiki.cern.ch/twiki/bin/view/CMS/SimpleCutBasedEleID # In summary, they come in 6 different tightness levels. For # each tightness, the user can select whether they want # combined isolation or relative isolations. # # In order to use this cfg file you have to include it from the # python directory that you have placed it, clone some selection # of your preference and run it in your sequence # %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # from ElectroWeakAnalysis.WENu.simpleCutBasedElectronID_cfi import * # # simpleEleId_95relIso = simpleCutBasedElectronID.clone() # simpleEleId_95relIso.electronQuality = '_95relIso_' # mySequence = cms.Sequence(...+...+..+simpleEleId95relIso+...) # %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Warning: make sure that you use the correct tags for the # RecoEgamma/ElectronIdentification package # consult this twiki to obtain the latest information: # # https://twiki.cern.ch/twiki/bin/view/CMS/SimpleCutBasedEleID # # this version of the file needs # V00-03-07-03 RecoEgamma/ElectronIdentification simpleCutBasedElectronID = cms.EDProducer("EleIdCutBasedExtProducer", # import here your collections src = cms.InputTag("gsfElectrons"), #reducedBarrelRecHitCollection = cms.InputTag("ecalRecHit","EcalRecHitsEB"), #reducedEndcapRecHitCollection = cms.InputTag("ecalRecHit","EcalRecHitsEE"), # Spring10 uses these names: reducedBarrelRecHitCollection = cms.InputTag("reducedEcalRecHitsEB"), reducedEndcapRecHitCollection = cms.InputTag("reducedEcalRecHitsEE"), # if you want the vertices or the offline beam spot verticesCollection = cms.InputTag("offlineBeamSpot"), #dataMagneticFieldSetUp = cms.bool(False), #dcsTag = cms.InputTag("scalersRawToDigi"), algorithm = cms.string('eIDCB'), #electronIDType: robust for the simple Cut-Based #electronQuality: see later #electronVersion: use V03 with the offline beam spot electronIDType = cms.string('robust'), electronQuality = cms.string('test'), electronVersion = cms.string('V04'), #### #### Selections with Relative Isolation robust95relIsoEleIDCutsV04 = cms.PSet( barrel = cms.vdouble(1.5e-01, 1.0e-02, 8.0e-01, 7.0e-03, -1, -1, 9999., 9999., 9999., 9999., 9999., 1.5e-01, 2.0e+00, 1.2e-01, 9999., 9999., 9999., 9999., 0.0, -9999., 9999., 9999., 1, -1, 0.0, 0.0, ), # endcap = cms.vdouble(7.0e-02, 3.0e-02, 7.0e-01, 9999., -1, -1, 9999., 9999., 9999., 9999., 9999., 8.0e-02, # 6.0e-02, 5.0e-02, 9999., 9999., 9999., 9999., 0.0, -9999., 9999., 9999., 1, -1, 0.0, 0.0, ), endcap = cms.vdouble(7.0e-02, 3.0e-02, 7.0e-01, 1.0e-02, -1, -1, 9999., 9999., 9999., 9999., 9999., 8.0e-02, 6.0e-02, 5.0e-02, 9999., 9999., 9999., 9999., 0.0, -9999., 9999., 9999., 1, -1, 0.0, 0.0, ), ), robust90relIsoEleIDCutsV04 = cms.PSet( barrel = cms.vdouble(1.2e-01, 1.0e-02, 8.0e-01, 7.0e-03, -1, -1, 9999., 9999., 9999., 9999., 9999., 1.2e-01, 9.0e-02, 1.0e-01, 9999., 9999., 9999., 9999., 0.0, -9999., 9999., 9999., 1, -1, 0.02, 0.02, ), # endcap = cms.vdouble(5.0e-02, 3.0e-02, 7.0e-01, 9999., -1, -1, 9999., 9999., 9999., 9999., 9999., 5.0e-02, # 6.0e-02, 3.0e-02, 9999., 9999., 9999., 9999., 0.0, -9999., 9999., 9999., 1, -1, 0.02, 0.02, ), endcap = cms.vdouble(5.0e-02, 3.0e-02, 7.0e-01, 9.0e-03, -1, -1, 9999., 9999., 9999., 9999., 9999., 5.0e-02, 6.0e-02, 3.0e-02, 9999., 9999., 9999., 9999., 0.0, -9999., 9999., 9999., 1, -1, 0.02, 0.02, ), ), robust85relIsoEleIDCutsV04 = cms.PSet( barrel = cms.vdouble(4.0e-02, 1.0e-02, 6.0e-02, 6.0e-03, -1, -1, 9999., 9999., 9999., 9999., 9999., 9.0e-02, 8.0e-02, 1.0e-01, 9999., 9999., 9999., 9999., 0.0, -9999., 9999., 9999., 1, -1, 0.02, 0.02, ), # endcap = cms.vdouble(2.5e-02, 3.0e-02, 4.0e-02, 9999., -1, -1, 9999., 9999., 9999., 9999., 9999., 5.0e-02, # 5.0e-02, 2.5e-02, 9999., 9999., 9999., 9999., 0.0, -9999., 9999., 9999., 1, -1, 0.02, 0.02, ), endcap = cms.vdouble(2.5e-02, 3.0e-02, 4.0e-02, 7.0e-03, -1, -1, 9999., 9999., 9999., 9999., 9999., 5.0e-02, 5.0e-02, 2.5e-02, 9999., 9999., 9999., 9999., 0.0, -9999., 9999., 9999., 1, -1, 0.02, 0.02, ), ), robust80relIsoEleIDCutsV04 = cms.PSet( barrel = cms.vdouble(4.0e-02, 1.0e-02, 6.0e-02, 4.0e-03, -1, -1, 9999., 9999., 9999., 9999., 9999., 9.0e-02, 7.0e-02, 1.0e-01, 9999., 9999., 9999., 9999., 0.0, -9999., 9999., 9999., 0, -1, 0.02, 0.02, ), # endcap = cms.vdouble(2.5e-02, 3.0e-02, 3.0e-02, 9999., -1, -1, 9999., 9999., 9999., 9999., 9999., 4.0e-02, # 5.0e-02, 2.5e-02, 9999., 9999., 9999., 9999., 0.0, -9999., 9999., 9999., 0, -1, 0.02, 0.02, ), endcap = cms.vdouble(2.5e-02, 3.0e-02, 3.0e-02, 7.0e-03, -1, -1, 9999., 9999., 9999., 9999., 9999., 4.0e-02, 5.0e-02, 2.5e-02, 9999., 9999., 9999., 9999., 0.0, -9999., 9999., 9999., 0, -1, 0.02, 0.02, ), ), # 70% point modified with restricting cuts to physical values robust70relIsoEleIDCutsV04 = cms.PSet( barrel = cms.vdouble(2.5e-02, 1.0e-02, 3.0e-02, 4.0e-03, -1, -1, 9999., 9999., 9999., 9999., 9999., 5.0e-02, 6.0e-02, 3.0e-02, 9999., 9999., 9999., 9999., 0.0, -9999., 9999., 9999., 0, -1, 0.02, 0.02, ), # endcap = cms.vdouble(2.5e-02, 3.0e-02, 2.0e-02, 9999., -1, -1, 9999., 9999., 9999., 9999., 9999., 2.5e-02, # 2.5e-02, 2.0e-02, 9999., 9999., 9999., 9999., 0.0, -9999., 9999., 9999., 0, -1, 0.02, 0.02, ), endcap = cms.vdouble(2.5e-02, 3.0e-02, 2.0e-02, 5.0e-03, -1, -1, 9999., 9999., 9999., 9999., 9999., 2.5e-02, 2.5e-02, 2.0e-02, 9999., 9999., 9999., 9999., 0.0, -9999., 9999., 9999., 0, -1, 0.02, 0.02, ), ), # 60% point modified with restricting cuts to physical values robust60relIsoEleIDCutsV04 = cms.PSet( barrel = cms.vdouble(2.5e-02, 1.0e-02, 2.5e-02, 4.0e-03, -1, -1, 9999., 9999., 9999., 9999., 9999., 4.0e-02, 4.0e-02, 3.0e-02, 9999., 9999., 9999., 9999., 0.0, -9999., 9999., 9999., 0, -1, 0.02, 0.02, ), # endcap = cms.vdouble(2.5e-02, 3.0e-02, 2.0e-02, 9999., -1, -1, 9999., 9999., 9999., 9999., 9999., 2.5e-02, # 2.0e-02, 2.0e-02, 9999., 9999., 9999., 9999., 0.0, -9999., 9999., 9999., 0, -1, 0.02, 0.02, ), endcap = cms.vdouble(2.5e-02, 3.0e-02, 2.0e-02, 5.0e-03, -1, -1, 9999., 9999., 9999., 9999., 9999., 2.5e-02, 2.0e-02, 2.0e-02, 9999., 9999., 9999., 9999., 0.0, -9999., 9999., 9999., 0, -1, 0.02, 0.02, ), ), #### #### Selections with Combined Isolation robust95cIsoEleIDCutsV04 = cms.PSet( barrel = cms.vdouble(1.5e-01, 1.0e-02, 8.0e-01, 7.0e-03, -1, -1, 9999., 9999., 9999., 9999., 9999., 9999., 9999., 9999., 9999., 9999., 9999., 1.5e-01, 0.0, -9999., 9999., 9999., 1, -1, 0.0, 0.0, ), # endcap = cms.vdouble(7.0e-02, 3.0e-02, 7.0e-01, 9999., -1, -1, 9999., 9999., 9999., 9999., 9999., 9999., # 9999., 9999., 9999., 9999., 9999., 1.0e-01, 0.0, -9999., 9999., 9999., 1, -1, 0.0, 0.0, ), endcap = cms.vdouble(7.0e-02, 3.0e-02, 7.0e-01, 1.0e-02, -1, -1, 9999., 9999., 9999., 9999., 9999., 9999., 9999., 9999., 9999., 9999., 9999., 1.0e-01, 0.0, -9999., 9999., 9999., 1, -1, 0.0, 0.0, ), ), robust90cIsoEleIDCutsV04 = cms.PSet( barrel = cms.vdouble(1.2e-01, 1.0e-02, 8.0e-01, 7.0e-03, -1, -1, 9999., 9999., 9999., 9999., 9999., 9999., 9999., 9999., 9999., 9999., 9999., 1.0e-01, 0.0, -9999., 9999., 9999., 1, -1, 0.02, 0.02, ), # endcap = cms.vdouble(5.0e-02, 3.0e-02, 7.0e-01, 9999., -1, -1, 9999., 9999., 9999., 9999., 9999., 9999., # 9999., 9999., 9999., 9999., 9999., 7.0e-02, 0.0, -9999., 9999., 9999., 1, -1, 0.02, 0.02, ), endcap = cms.vdouble(5.0e-02, 3.0e-02, 7.0e-01, 9.0e-03, -1, -1, 9999., 9999., 9999., 9999., 9999., 9999., 9999., 9999., 9999., 9999., 9999., 7.0e-02, 0.0, -9999., 9999., 9999., 1, -1, 0.02, 0.02, ), ), robust85cIsoEleIDCutsV04 = cms.PSet( barrel = cms.vdouble(4.0e-02, 1.0e-02, 6.0e-02, 6.0e-03, -1, -1, 9999., 9999., 9999., 9999., 9999., 9999., 9999., 9999., 9999., 9999., 9999., 9.0e-02, 0.0, -9999., 9999., 9999., 1, -1, 0.02, 0.02, ), # endcap = cms.vdouble(2.5e-02, 3.0e-02, 4.0e-02, 9999., -1, -1, 9999., 9999., 9999., 9999., 9999., 9999., # 9999., 9999., 9999., 9999., 9999., 6.0e-02, 0.0, -9999., 9999., 9999., 1, -1, 0.02, 0.02, ), endcap = cms.vdouble(2.5e-02, 3.0e-02, 4.0e-02, 7.0e-03, -1, -1, 9999., 9999., 9999., 9999., 9999., 9999., 9999., 9999., 9999., 9999., 9999., 6.0e-02, 0.0, -9999., 9999., 9999., 1, -1, 0.02, 0.02, ), ), robust80cIsoEleIDCutsV04 = cms.PSet( barrel = cms.vdouble(4.0e-02, 1.0e-02, 6.0e-02, 4.0e-03, -1, -1, 9999., 9999., 9999., 9999., 9999., 9999., 9999., 9999., 9999., 9999., 9999., 7.0e-02, 0.0, -9999., 9999., 9999., 0, -1, 0.02, 0.02, ), # endcap = cms.vdouble(2.5e-02, 3.0e-02, 3.0e-02, 9999., -1, -1, 9999., 9999., 9999., 9999., 9999., 9999., # 9999., 9999., 9999., 9999., 9999., 6.0e-02, 0.0, -9999., 9999., 9999., 0, -1, 0.02, 0.02, ), endcap = cms.vdouble(2.5e-02, 3.0e-02, 3.0e-02, 7.0e-03, -1, -1, 9999., 9999., 9999., 9999., 9999., 9999., 9999., 9999., 9999., 9999., 9999., 6.0e-02, 0.0, -9999., 9999., 9999., 0, -1, 0.02, 0.02, ), ), # 70% point modified with restricting cuts to physical values robust70cIsoEleIDCutsV04 = cms.PSet( barrel = cms.vdouble(2.5e-02, 1.0e-02, 3.0e-02, 4.0e-03, -1, -1, 9999., 9999., 9999., 9999., 9999., 9999., 9999., 9999., 9999., 9999., 9999., 4.0e-02, 0.0, -9999., 9999., 9999., 0, -1, 0.02, 0.02, ), # endcap = cms.vdouble(2.5e-02, 3.0e-02, 2.0e-02, 9999., -1, -1, 9999., 9999., 9999., 9999., 9999., 9999., # 9999., 9999., 9999., 9999., 9999., 3.0e-02, 0.0, -9999., 9999., 9999., 0, -1, 0.02, 0.02, ), endcap = cms.vdouble(2.5e-02, 3.0e-02, 2.0e-02, 5.0e-03, -1, -1, 9999., 9999., 9999., 9999., 9999., 9999., 9999., 9999., 9999., 9999., 9999., 3.0e-02, 0.0, -9999., 9999., 9999., 0, -1, 0.02, 0.02, ), ), # 60% point modified with restricting cuts to physical values robust60cIsoEleIDCutsV04 = cms.PSet( barrel = cms.vdouble(2.5e-02, 1.0e-02, 2.5e-02, 4.0e-03, -1, -1, 9999., 9999., 9999., 9999., 9999., 9999., 9999., 9999., 9999., 9999., 9999., 3.0e-02, 0.0, -9999., 9999., 9999., 0, -1, 0.02, 0.02, ), # endcap = cms.vdouble(2.5e-02, 3.0e-02, 2.0e-02, 9999., -1, -1, 9999., 9999., 9999., 9999., 9999., 9999., # 9999., 9999., 9999., 9999., 9999., 2.0e-02, 0.0, -9999., 9999., 9999., 0, -1, 0.02, 0.02, ), endcap = cms.vdouble(2.5e-02, 3.0e-02, 2.0e-02, 5.0e-03, -1, -1, 9999., 9999., 9999., 9999., 9999., 9999., 9999., 9999., 9999., 9999., 9999., 2.0e-02, 0.0, -9999., 9999., 9999., 0, -1, 0.02, 0.02, ), ), )
76
135
0.50008
1,895
12,540
3.306596
0.095515
0.482605
0.551548
0.505586
0.687041
0.686882
0.686882
0.686882
0.685605
0.669646
0
0.359228
0.281419
12,540
164
136
76.463415
0.336145
0.419936
0
0.216867
0
0
0.015166
0.003339
0
0
0
0
0
1
0
false
0
0.012048
0
0.012048
0
0
0
0
null
1
1
1
0
0
0
0
0
1
0
1
0
0
0
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
0038939c5f0db892859e51eaec9e1461508bf5e4
27
py
Python
bdi/__init__.py
keishinkickback/batch-document-inference
3529810b02d596dd3cb76df2068682919f2f1857
[ "MIT" ]
null
null
null
bdi/__init__.py
keishinkickback/batch-document-inference
3529810b02d596dd3cb76df2068682919f2f1857
[ "MIT" ]
null
null
null
bdi/__init__.py
keishinkickback/batch-document-inference
3529810b02d596dd3cb76df2068682919f2f1857
[ "MIT" ]
null
null
null
from .data import Document
13.5
26
0.814815
4
27
5.5
1
0
0
0
0
0
0
0
0
0
0
0
0.148148
27
1
27
27
0.956522
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
0042768d6138908c121761f91ad62ea71520820c
80
py
Python
Python/Tests/TestData/Grammar/FromFuture25.py
nanshuiyu/pytools
9f9271fe8cf564b4f94e9456d400f4306ea77c23
[ "Apache-2.0" ]
null
null
null
Python/Tests/TestData/Grammar/FromFuture25.py
nanshuiyu/pytools
9f9271fe8cf564b4f94e9456d400f4306ea77c23
[ "Apache-2.0" ]
null
null
null
Python/Tests/TestData/Grammar/FromFuture25.py
nanshuiyu/pytools
9f9271fe8cf564b4f94e9456d400f4306ea77c23
[ "Apache-2.0" ]
null
null
null
from __future__ import with_statement from __future__ import absolute_import
26.666667
39
0.8625
11
80
5.454545
0.636364
0.333333
0.533333
0
0
0
0
0
0
0
0
0
0.125
80
2
40
40
0.842857
0
0
0
0
0
0
0
0
0
0
0
0
0
null
null
0
1
null
null
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
1
0
0
0
0
6
cc9991cf06d63b6f2e499739b6bff885b48a5c2d
316
py
Python
ppq/samples/Imagenet/Utilities/Imagenet/__init__.py
openppl-public/ppq
0fdea7d4982bc57feb6bb8548c7f012707fbd607
[ "Apache-2.0" ]
100
2021-12-31T09:34:06.000Z
2022-03-25T02:54:51.000Z
ppq/samples/Imagenet/Utilities/Imagenet/__init__.py
openppl-public/ppq
0fdea7d4982bc57feb6bb8548c7f012707fbd607
[ "Apache-2.0" ]
12
2021-12-31T10:28:15.000Z
2022-03-31T07:08:44.000Z
ppq/samples/Imagenet/Utilities/Imagenet/__init__.py
openppl-public/ppq
0fdea7d4982bc57feb6bb8548c7f012707fbd607
[ "Apache-2.0" ]
21
2021-12-31T09:51:02.000Z
2022-03-30T12:21:55.000Z
from .imagenet_util import (evaluate_mmlab_module_with_imagenet, evaluate_onnx_module_with_imagenet, evaluate_ppq_module_with_imagenet, evaluate_torch_module_with_imagenet, load_imagenet_from_directory)
52.666667
65
0.594937
28
316
6
0.464286
0.238095
0.428571
0.464286
0
0
0
0
0
0
0
0
0.382911
316
5
66
63.2
0.861538
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.2
0
0.2
0
1
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
6