hexsha
string
size
int64
ext
string
lang
string
max_stars_repo_path
string
max_stars_repo_name
string
max_stars_repo_head_hexsha
string
max_stars_repo_licenses
list
max_stars_count
int64
max_stars_repo_stars_event_min_datetime
string
max_stars_repo_stars_event_max_datetime
string
max_issues_repo_path
string
max_issues_repo_name
string
max_issues_repo_head_hexsha
string
max_issues_repo_licenses
list
max_issues_count
int64
max_issues_repo_issues_event_min_datetime
string
max_issues_repo_issues_event_max_datetime
string
max_forks_repo_path
string
max_forks_repo_name
string
max_forks_repo_head_hexsha
string
max_forks_repo_licenses
list
max_forks_count
int64
max_forks_repo_forks_event_min_datetime
string
max_forks_repo_forks_event_max_datetime
string
content
string
avg_line_length
float64
max_line_length
int64
alphanum_fraction
float64
qsc_code_num_words_quality_signal
int64
qsc_code_num_chars_quality_signal
float64
qsc_code_mean_word_length_quality_signal
float64
qsc_code_frac_words_unique_quality_signal
float64
qsc_code_frac_chars_top_2grams_quality_signal
float64
qsc_code_frac_chars_top_3grams_quality_signal
float64
qsc_code_frac_chars_top_4grams_quality_signal
float64
qsc_code_frac_chars_dupe_5grams_quality_signal
float64
qsc_code_frac_chars_dupe_6grams_quality_signal
float64
qsc_code_frac_chars_dupe_7grams_quality_signal
float64
qsc_code_frac_chars_dupe_8grams_quality_signal
float64
qsc_code_frac_chars_dupe_9grams_quality_signal
float64
qsc_code_frac_chars_dupe_10grams_quality_signal
float64
qsc_code_frac_chars_replacement_symbols_quality_signal
float64
qsc_code_frac_chars_digital_quality_signal
float64
qsc_code_frac_chars_whitespace_quality_signal
float64
qsc_code_size_file_byte_quality_signal
float64
qsc_code_num_lines_quality_signal
float64
qsc_code_num_chars_line_max_quality_signal
float64
qsc_code_num_chars_line_mean_quality_signal
float64
qsc_code_frac_chars_alphabet_quality_signal
float64
qsc_code_frac_chars_comments_quality_signal
float64
qsc_code_cate_xml_start_quality_signal
float64
qsc_code_frac_lines_dupe_lines_quality_signal
float64
qsc_code_cate_autogen_quality_signal
float64
qsc_code_frac_lines_long_string_quality_signal
float64
qsc_code_frac_chars_string_length_quality_signal
float64
qsc_code_frac_chars_long_word_length_quality_signal
float64
qsc_code_frac_lines_string_concat_quality_signal
float64
qsc_code_cate_encoded_data_quality_signal
float64
qsc_code_frac_chars_hex_words_quality_signal
float64
qsc_code_frac_lines_prompt_comments_quality_signal
float64
qsc_code_frac_lines_assert_quality_signal
float64
qsc_codepython_cate_ast_quality_signal
float64
qsc_codepython_frac_lines_func_ratio_quality_signal
float64
qsc_codepython_cate_var_zero_quality_signal
bool
qsc_codepython_frac_lines_pass_quality_signal
float64
qsc_codepython_frac_lines_import_quality_signal
float64
qsc_codepython_frac_lines_simplefunc_quality_signal
float64
qsc_codepython_score_lines_no_logic_quality_signal
float64
qsc_codepython_frac_lines_print_quality_signal
float64
qsc_code_num_words
int64
qsc_code_num_chars
int64
qsc_code_mean_word_length
int64
qsc_code_frac_words_unique
null
qsc_code_frac_chars_top_2grams
int64
qsc_code_frac_chars_top_3grams
int64
qsc_code_frac_chars_top_4grams
int64
qsc_code_frac_chars_dupe_5grams
int64
qsc_code_frac_chars_dupe_6grams
int64
qsc_code_frac_chars_dupe_7grams
int64
qsc_code_frac_chars_dupe_8grams
int64
qsc_code_frac_chars_dupe_9grams
int64
qsc_code_frac_chars_dupe_10grams
int64
qsc_code_frac_chars_replacement_symbols
int64
qsc_code_frac_chars_digital
int64
qsc_code_frac_chars_whitespace
int64
qsc_code_size_file_byte
int64
qsc_code_num_lines
int64
qsc_code_num_chars_line_max
int64
qsc_code_num_chars_line_mean
int64
qsc_code_frac_chars_alphabet
int64
qsc_code_frac_chars_comments
int64
qsc_code_cate_xml_start
int64
qsc_code_frac_lines_dupe_lines
int64
qsc_code_cate_autogen
int64
qsc_code_frac_lines_long_string
int64
qsc_code_frac_chars_string_length
int64
qsc_code_frac_chars_long_word_length
int64
qsc_code_frac_lines_string_concat
null
qsc_code_cate_encoded_data
int64
qsc_code_frac_chars_hex_words
int64
qsc_code_frac_lines_prompt_comments
int64
qsc_code_frac_lines_assert
int64
qsc_codepython_cate_ast
int64
qsc_codepython_frac_lines_func_ratio
int64
qsc_codepython_cate_var_zero
int64
qsc_codepython_frac_lines_pass
int64
qsc_codepython_frac_lines_import
int64
qsc_codepython_frac_lines_simplefunc
int64
qsc_codepython_score_lines_no_logic
int64
qsc_codepython_frac_lines_print
int64
effective
string
hits
int64
2cd41717fb796999729d4531e606430b6c2f2dac
237
py
Python
tests/xoto3/dynamodb/write_versioned/keys_test.py
xoeye/xoto3
ef91cde3cce81e1ded311389358271d5c8eba02b
[ "MIT" ]
16
2020-05-23T15:23:38.000Z
2022-03-18T19:28:37.000Z
tests/xoto3/dynamodb/write_versioned/keys_test.py
xoeye/xoto3
ef91cde3cce81e1ded311389358271d5c8eba02b
[ "MIT" ]
9
2020-08-19T23:08:36.000Z
2021-10-06T17:16:35.000Z
tests/xoto3/dynamodb/write_versioned/keys_test.py
xoeye/xoto3
ef91cde3cce81e1ded311389358271d5c8eba02b
[ "MIT" ]
2
2020-12-12T08:23:53.000Z
2021-09-03T20:25:54.000Z
from xoto3.dynamodb.write_versioned.keys import hashable_key, hashable_key_to_key def test_xf_keys(): assert hashable_key(dict(id=1, group="steve")) == hashable_key( hashable_key_to_key(("group", "id"), ("steve", 1)) )
29.625
81
0.704641
35
237
4.428571
0.542857
0.354839
0.245161
0.283871
0.348387
0.348387
0
0
0
0
0
0.014851
0.147679
237
7
82
33.857143
0.752475
0
0
0
0
0
0.07173
0
0
0
0
0
0.2
1
0.2
true
0
0.2
0
0.4
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
5
fa220f37f7316f0dd095c12487c5eecdd5bd9edb
107
py
Python
video_based/models/__init__.py
Siyu-C/RobustForensics
344a35516cabc5fb5a8b0d4e4212341695480a18
[ "MIT" ]
22
2020-06-16T07:11:52.000Z
2022-01-27T00:05:54.000Z
video_based/models/__init__.py
Siyu-C/RobustForensics
344a35516cabc5fb5a8b0d4e4212341695480a18
[ "MIT" ]
3
2021-05-24T13:56:11.000Z
2021-09-14T12:48:26.000Z
video_based/models/__init__.py
Siyu-C/RobustForensics
344a35516cabc5fb5a8b0d4e4212341695480a18
[ "MIT" ]
7
2020-07-10T06:24:38.000Z
2021-11-27T12:04:49.000Z
from .slowfast import * def model_entry(config): return globals()[config['arch']](**config['kwargs'])
21.4
56
0.682243
13
107
5.538462
0.846154
0
0
0
0
0
0
0
0
0
0
0
0.121495
107
4
57
26.75
0.765957
0
0
0
0
0
0.093458
0
0
0
0
0
0
1
0.333333
false
0
0.333333
0.333333
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
1
1
0
0
0
5
d75a702f61e5a374d3fc2e5e48ac219dfe845c38
26
py
Python
2744.py
BACCHUS-S/Baekjoon
05edab8ea7c7205ca67ef8fe34bf0abd22726a0e
[ "MIT" ]
1
2018-12-27T08:20:52.000Z
2018-12-27T08:20:52.000Z
yukicoder/yuki047.py
knuu/competitive-programming
16bc68fdaedd6f96ae24310d697585ca8836ab6e
[ "MIT" ]
null
null
null
yukicoder/yuki047.py
knuu/competitive-programming
16bc68fdaedd6f96ae24310d697585ca8836ab6e
[ "MIT" ]
1
2018-08-29T13:26:50.000Z
2018-08-29T13:26:50.000Z
print(input().swapcase())
13
25
0.692308
3
26
6
1
0
0
0
0
0
0
0
0
0
0
0
0.038462
26
1
26
26
0.72
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0
0
0
1
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
5
d772ea400ff6c6b7c2ad1a4466683e482bf48182
150
py
Python
mytoolbox/mysubpackage/mymodule3.py
randolf-scholz/Sphinx-Autosummary-Recursion
e2f707fb0d18639ee077101892f67a76a909f4ec
[ "MIT" ]
39
2020-09-09T18:23:20.000Z
2022-03-12T09:42:54.000Z
mytoolbox/mysubpackage/mymodule3.py
randolf-scholz/Sphinx-Autosummary-Recursion
e2f707fb0d18639ee077101892f67a76a909f4ec
[ "MIT" ]
3
2020-08-19T17:09:48.000Z
2021-11-02T21:53:34.000Z
mytoolbox/mysubpackage/mymodule3.py
randolf-scholz/Sphinx-Autosummary-Recursion
e2f707fb0d18639ee077101892f67a76a909f4ec
[ "MIT" ]
51
2020-06-29T03:00:29.000Z
2022-03-28T13:40:15.000Z
""" Module containing a third class. """ from mytoolbox.mymodule1 import myClass1 class myClass3(myClass1): """This is the third class.""" pass
15
40
0.713333
19
150
5.631579
0.789474
0.186916
0
0
0
0
0
0
0
0
0
0.032
0.166667
150
9
41
16.666667
0.824
0.38
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0.333333
0.333333
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
1
0
0
0
0
5
d78af9ddcc8857ab6d080ba83f5b8c4b5c328088
174
py
Python
tests/web_platform/CSS2/normal_flow/test_block_in_inline_append_002_ref.py
jonboland/colosseum
cbf974be54fd7f6fddbe7285704cfaf7a866c5c5
[ "BSD-3-Clause" ]
71
2015-04-13T09:44:14.000Z
2019-03-24T01:03:02.000Z
tests/web_platform/CSS2/normal_flow/test_block_in_inline_append_002_ref.py
jonboland/colosseum
cbf974be54fd7f6fddbe7285704cfaf7a866c5c5
[ "BSD-3-Clause" ]
35
2019-05-06T15:26:09.000Z
2022-03-28T06:30:33.000Z
tests/web_platform/CSS2/normal_flow/test_block_in_inline_append_002_ref.py
jonboland/colosseum
cbf974be54fd7f6fddbe7285704cfaf7a866c5c5
[ "BSD-3-Clause" ]
139
2015-05-30T18:37:43.000Z
2019-03-27T17:14:05.000Z
from tests.utils import W3CTestCase class TestBlockInInlineAppend002Ref(W3CTestCase): vars().update(W3CTestCase.find_tests(__file__, 'block-in-inline-append-002-ref'))
29
85
0.804598
20
174
6.75
0.85
0
0
0
0
0
0
0
0
0
0
0.05625
0.08046
174
5
86
34.8
0.7875
0
0
0
0
0
0.172414
0.172414
0
0
0
0
0
1
0
true
0
0.333333
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
d78b8d56341d15f9ef94d38443ec5cfd970d933f
47
py
Python
custom_components/anna/__init__.py
CoMPaTech/anna-ha
a6d2d55009f8144c736419173b5d1e811fcfc281
[ "MIT" ]
1
2019-05-06T15:23:58.000Z
2019-05-06T15:23:58.000Z
custom_components/anna/__init__.py
CoMPaTech/anna-ha
a6d2d55009f8144c736419173b5d1e811fcfc281
[ "MIT" ]
null
null
null
custom_components/anna/__init__.py
CoMPaTech/anna-ha
a6d2d55009f8144c736419173b5d1e811fcfc281
[ "MIT" ]
null
null
null
"""Plugwise Anna component for HomeAssistant"""
47
47
0.787234
5
47
7.4
1
0
0
0
0
0
0
0
0
0
0
0
0.085106
47
1
47
47
0.860465
0.87234
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
5
ad11502b76fac0cca56222bef0b1a29b6164cd6a
51
py
Python
uno.py
calebpeffer/Maji_Quest
416ce98bdab6ce30241405f52bf188330f385852
[ "MIT" ]
null
null
null
uno.py
calebpeffer/Maji_Quest
416ce98bdab6ce30241405f52bf188330f385852
[ "MIT" ]
null
null
null
uno.py
calebpeffer/Maji_Quest
416ce98bdab6ce30241405f52bf188330f385852
[ "MIT" ]
null
null
null
import input_verification as iv print("hello")
7.285714
31
0.745098
7
51
5.285714
1
0
0
0
0
0
0
0
0
0
0
0
0.176471
51
6
32
8.5
0.880952
0
0
0
0
0
0.102041
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0.5
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
1
0
5
ad16dd0f3686310f111a3bef0d039b46c6dc7a3b
2,907
py
Python
module/Npc_pos.py
koidemizu/pyxel_knights_ae
ddd88539a1fefb339ec19960e9a51d8a7a2fe106
[ "MIT" ]
1
2022-02-23T21:39:18.000Z
2022-02-23T21:39:18.000Z
module/Npc_pos.py
koidemizu/pyxel_knights_ae
ddd88539a1fefb339ec19960e9a51d8a7a2fe106
[ "MIT" ]
null
null
null
module/Npc_pos.py
koidemizu/pyxel_knights_ae
ddd88539a1fefb339ec19960e9a51d8a7a2fe106
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- #Npc_pos.py def npc_posx(): pos = { "0-0":[], "1-0":[], "2-0":[], "3-0":[], "4-0":[], "5-0":[13], "6-0":[12,12], "7-0":[107,208], "8-0":[], "9-0":[], "10-0":[], "0-1":[], "1-1":[], "2-1":[], "3-1":[], "4-1":[], "5-1":[], "6-1":[], "7-1":[], "8-1":[7], "9-1":[], "10-1":[], "0-2":[], "1-2":[], "2-2":[], "3-2":[], "4-2":[], "5-2":[], "6-2":[], "7-2":[401,508,612], "8-2":[313], "9-2":[], "10-2":[], "0-3":[], "1-3":[], "2-3":[], "3-3":[], "4-3":[], "5-3":[], "6-3":[], "7-3":[], "8-3":[13], "9-3":[], "10-3":[], "2-8":[1010, 1110], "0-5":[903, 804], "11-2":[1308, 1207], "11-3":[1307], "12-0":[707], "13-3":[1304], } return pos def npc_posy(): pos = { "0-0":[], "1-0":[], "2-0":[], "3-0":[], "4-0":[], "5-0":[9], "6-0":[4,10], "7-0":[107,212], "8-0":[], "9-0":[], "10-0":[], "0-1":[], "1-1":[], "2-1":[], "3-1":[], "4-1":[], "5-1":[], "6-1":[], "7-1":[], "8-1":[5], "9-1":[], "10-1":[], "0-2":[], "1-2":[], "2-2":[], "3-2":[], "4-2":[], "5-2":[], "6-2":[], "7-2":[406,510,607], "8-2":[307], "9-2":[], "10-2":[], "0-3":[], "1-3":[], "2-3":[], "3-3":[], "4-3":[], "5-3":[], "6-3":[], "7-3":[], "8-3":[13], "9-3":[], "10-3":[], "2-8":[1009, 1110], "0-5":[903, 803], "11-2":[1307, 1210], "11-3":[1308], "12-0":[703], "13-3":[1307], } return pos
25.955357
36
0.137255
258
2,907
1.534884
0.182171
0.030303
0.030303
0.030303
0.469697
0.469697
0.469697
0.469697
0.469697
0.469697
0
0.294894
0.609219
2,907
112
37
25.955357
0.053697
0.010664
0
0.740741
0
0
0.114368
0
0
0
0
0
0
1
0.018519
false
0
0
0
0.037037
0
0
0
1
null
0
0
0
0
0
0
0
0
0
0
1
1
0
0
0
0
1
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
ad28c5b1310beb34428688e56058d728d626306e
169
py
Python
sudokuless/exceptions.py
lixulun/sudokuless
9c8f017b675dea0953eb4bc06cd729f4ef6337d9
[ "MIT" ]
null
null
null
sudokuless/exceptions.py
lixulun/sudokuless
9c8f017b675dea0953eb4bc06cd729f4ef6337d9
[ "MIT" ]
null
null
null
sudokuless/exceptions.py
lixulun/sudokuless
9c8f017b675dea0953eb4bc06cd729f4ef6337d9
[ "MIT" ]
null
null
null
class FormatError(Exception): pass class InternalError(Exception): pass class ComputeError(Exception): pass class GameError(Exception): pass
15.363636
32
0.692308
16
169
7.3125
0.4375
0.444444
0.461538
0
0
0
0
0
0
0
0
0
0.236686
169
11
33
15.363636
0.906977
0
0
0.5
0
0
0
0
0
0
0
0
0
1
0
true
0.5
0
0
0.5
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
0
0
0
0
0
5
ad46e664922a47708b67f5ee083dd8ed2d0b8364
112
py
Python
vbb/language/admin.py
VillageBookBuilders/vbb-portal-backend
decdec392f7bd585b73e5554b20c17baea5d133d
[ "MIT" ]
1
2022-03-30T18:12:49.000Z
2022-03-30T18:12:49.000Z
vbb/language/admin.py
VillageBookBuilders/vbb-portal-backend
decdec392f7bd585b73e5554b20c17baea5d133d
[ "MIT" ]
22
2022-02-28T02:37:03.000Z
2022-03-28T02:32:35.000Z
vbb/language/admin.py
VillageBookBuilders/vbb-portal-backend
decdec392f7bd585b73e5554b20c17baea5d133d
[ "MIT" ]
null
null
null
from django.contrib import admin from vbb.language.models import Language admin.sites.site.register(Language)
18.666667
40
0.830357
16
112
5.8125
0.6875
0
0
0
0
0
0
0
0
0
0
0
0.098214
112
5
41
22.4
0.920792
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.666667
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
d14359cc10417b857b9cf92c200ae061dc8fac71
3,979
py
Python
src/modules/preprocessors.py
gchhablani/toxic-spans-detection
5eeba0c069bef8c707d9c5fef8c6048c98d89ba5
[ "MIT" ]
11
2021-02-25T03:03:37.000Z
2021-10-18T03:51:23.000Z
src/modules/preprocessors.py
gchhablani/toxic-spans-detection
5eeba0c069bef8c707d9c5fef8c6048c98d89ba5
[ "MIT" ]
null
null
null
src/modules/preprocessors.py
gchhablani/toxic-spans-detection
5eeba0c069bef8c707d9c5fef8c6048c98d89ba5
[ "MIT" ]
5
2021-02-25T03:02:07.000Z
2021-05-18T15:59:01.000Z
from src.modules.tokenizers import * from src.modules.embeddings import * from src.utils.mapper import configmapper class Preprocessor: def preprocess(self): pass @configmapper.map("preprocessors", "glove") class GlovePreprocessor(Preprocessor): """GlovePreprocessor.""" def __init__(self, config): """ Args: config (src.utils.module.Config): configuration for preprocessor """ super(GlovePreprocessor, self).__init__() self.config = config self.tokenizer = configmapper.get_object( "tokenizers", self.config.main.preprocessor.tokenizer.name )(**self.config.main.preprocessor.tokenizer.init_params.as_dict()) self.tokenizer_params = ( self.config.main.preprocessor.tokenizer.init_vector_params.as_dict() ) self.tokenizer.initialize_vectors(**self.tokenizer_params) self.embeddings = configmapper.get_object( "embeddings", self.config.main.preprocessor.embedding.name )( self.tokenizer.text_field.vocab.vectors, self.tokenizer.text_field.vocab.stoi[self.tokenizer.text_field.pad_token], ) def preprocess(self, model_config, data_config): train_dataset = configmapper.get_object("datasets", data_config.main.name)( data_config.train, self.tokenizer ) val_dataset = configmapper.get_object("datasets", data_config.main.name)( data_config.val, self.tokenizer ) model = configmapper.get_object("models", model_config.name)( self.embeddings, **model_config.params.as_dict() ) return model, train_dataset, val_dataset @configmapper.map("preprocessors", "clozePreprocessor") class ClozePreprocessor(Preprocessor): """GlovePreprocessor.""" def __init__(self, config): """ Args: config (src.utils.module.Config): configuration for preprocessor """ super(ClozePreprocessor, self).__init__() self.config = config self.tokenizer = configmapper.get_object( "tokenizers", self.config.main.preprocessor.tokenizer.name ).from_pretrained( **self.config.main.preprocessor.tokenizer.init_params.as_dict() ) def preprocess(self, model_config, data_config): train_dataset = configmapper.get_object("datasets", data_config.main.name)( data_config.train, self.tokenizer ) val_dataset = configmapper.get_object("datasets", data_config.main.name)( data_config.val, self.tokenizer ) model = configmapper.get_object("models", model_config.name).from_pretrained( **model_config.params.as_dict() ) return model, train_dataset, val_dataset @configmapper.map("preprocessors", "transformersConcretenessPreprocessor") class TransformersConcretenessPreprocessor(Preprocessor): """BertConcretenessPreprocessor.""" def __init__(self, config): """ Args: config (src.utils.module.Config): configuration for preprocessor """ super(TransformersConcretenessPreprocessor, self).__init__() self.config = config self.tokenizer = configmapper.get_object( "tokenizers", self.config.main.preprocessor.tokenizer.name ).from_pretrained( **self.config.main.preprocessor.tokenizer.init_params.as_dict() ) def preprocess(self, model_config, data_config): train_dataset = configmapper.get_object("datasets", data_config.main.name)( data_config.train, self.tokenizer ) val_dataset = configmapper.get_object("datasets", data_config.main.name)( data_config.val, self.tokenizer ) model = configmapper.get_object("models", model_config.name)( **model_config.params.as_dict() ) return model, train_dataset, val_dataset
35.212389
86
0.661221
395
3,979
6.420253
0.141772
0.076893
0.10765
0.082019
0.757098
0.72082
0.705442
0.705442
0.705442
0.685331
0
0
0.231717
3,979
112
87
35.526786
0.829571
0.073385
0
0.493506
0
0
0.056736
0.010061
0
0
0
0
0
1
0.090909
false
0.012987
0.038961
0
0.220779
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
d1672b72a1f37c44bbfba6a7c3b907d00b96ed93
53
py
Python
datasets/exceptions.py
talebzeghmi/datasets
db04bdcdbc7b782eae54991571181badea5e4c7a
[ "Apache-2.0" ]
4
2021-11-01T06:05:41.000Z
2021-12-17T04:06:07.000Z
datasets/exceptions.py
talebzeghmi/datasets
db04bdcdbc7b782eae54991571181badea5e4c7a
[ "Apache-2.0" ]
9
2021-10-31T23:06:27.000Z
2022-02-25T17:47:39.000Z
datasets/exceptions.py
talebzeghmi/datasets
db04bdcdbc7b782eae54991571181badea5e4c7a
[ "Apache-2.0" ]
1
2021-12-03T22:12:49.000Z
2021-12-03T22:12:49.000Z
class InvalidOperationException(Exception): pass
17.666667
43
0.811321
4
53
10.75
1
0
0
0
0
0
0
0
0
0
0
0
0.132075
53
2
44
26.5
0.934783
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0.5
0
0
0.5
0
1
0
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
0
0
0
0
0
5
d16bfeb10a7f1a097ab63adfd048d5956bd45741
41
py
Python
tests/examples/image/__init__.py
kostaleonard/mlops
236d3499535d6294768c15336180217829fb2ee3
[ "MIT" ]
1
2021-11-26T21:41:00.000Z
2021-11-26T21:41:00.000Z
tests/examples/image/__init__.py
kostaleonard/mlops
236d3499535d6294768c15336180217829fb2ee3
[ "MIT" ]
39
2021-11-18T20:01:34.000Z
2022-03-26T17:59:07.000Z
tests/examples/image/__init__.py
kostaleonard/mlops
236d3499535d6294768c15336180217829fb2ee3
[ "MIT" ]
null
null
null
"""Tests image-based example modules."""
20.5
40
0.707317
5
41
5.8
1
0
0
0
0
0
0
0
0
0
0
0
0.097561
41
1
41
41
0.783784
0.829268
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
5
0f40d21ca85634ec897aa46055db867481f8b09d
75
py
Python
watchanimeapi/__init__.py
michalecznik123/ani-cli
274f0b71d424349cb6581d63062a3ba5195af8b1
[ "Unlicense" ]
2
2022-03-20T13:13:22.000Z
2022-03-20T13:23:42.000Z
watchanimeapi/__init__.py
michalecznik123/ani-cli
274f0b71d424349cb6581d63062a3ba5195af8b1
[ "Unlicense" ]
null
null
null
watchanimeapi/__init__.py
michalecznik123/ani-cli
274f0b71d424349cb6581d63062a3ba5195af8b1
[ "Unlicense" ]
null
null
null
from . import ( models, common, ) from . import main as watchanime
12.5
32
0.64
9
75
5.333333
0.777778
0.416667
0
0
0
0
0
0
0
0
0
0
0.28
75
5
33
15
0.888889
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.4
0
0.4
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
0f5bf76cf9fc7198bf26a63b345d727789a692a4
219
py
Python
beerreviews/beers/admin.py
HE-Arc/BeerReviews
12b4af25b317683c22a93ea43bcb37bc7954f249
[ "Apache-2.0" ]
1
2018-02-28T17:05:21.000Z
2018-02-28T17:05:21.000Z
beerreviews/beers/admin.py
HE-Arc/BeerReviews
12b4af25b317683c22a93ea43bcb37bc7954f249
[ "Apache-2.0" ]
10
2018-03-07T15:10:09.000Z
2018-04-08T19:38:11.000Z
beerreviews/beers/admin.py
HE-Arc/BeerReviews
12b4af25b317683c22a93ea43bcb37bc7954f249
[ "Apache-2.0" ]
null
null
null
from django.contrib import admin from .models import Beer, Maker, Style, Review # Register your models here. admin.site.register(Beer) admin.site.register(Maker) admin.site.register(Style) admin.site.register(Review)
21.9
46
0.794521
32
219
5.4375
0.4375
0.206897
0.390805
0
0
0
0
0
0
0
0
0
0.100457
219
9
47
24.333333
0.883249
0.118721
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.333333
0
0.333333
0
0
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
0f5c3d0e0bb311a68bf23eb39632e2a6fdc81acc
64
py
Python
package/aistore/client/__init__.py
NVIDIA/ais-etl
e60e4c5a8be208379916fc245fd874f670336ce2
[ "MIT" ]
4
2020-08-08T19:39:33.000Z
2021-06-02T19:14:34.000Z
package/aistore/client/__init__.py
NVIDIA/ais-etl
e60e4c5a8be208379916fc245fd874f670336ce2
[ "MIT" ]
null
null
null
package/aistore/client/__init__.py
NVIDIA/ais-etl
e60e4c5a8be208379916fc245fd874f670336ce2
[ "MIT" ]
4
2020-10-28T19:49:15.000Z
2022-03-28T23:21:02.000Z
from .api import Client from .const import * from .msg import *
16
23
0.734375
10
64
4.7
0.6
0
0
0
0
0
0
0
0
0
0
0
0.1875
64
3
24
21.333333
0.903846
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
7e1e06774cf056d315e5c27fc8a2da92e7aa48c6
132
py
Python
state_machine/__init__.py
Hillerr/ocpp-op
f68ad19d31a1861825a2ec4660cc0a62db39e51e
[ "MIT" ]
1
2020-07-27T00:10:19.000Z
2020-07-27T00:10:19.000Z
state_machine/__init__.py
Hillerr/ocpp-op
f68ad19d31a1861825a2ec4660cc0a62db39e51e
[ "MIT" ]
1
2020-07-27T20:58:54.000Z
2020-07-27T20:58:54.000Z
state_machine/__init__.py
Hillerr/ocpp-op
f68ad19d31a1861825a2ec4660cc0a62db39e51e
[ "MIT" ]
1
2020-07-27T00:28:36.000Z
2020-07-27T00:28:36.000Z
from .state import State from .states_manager import StatesManager from .state_machine import StateMachine from .exceptions import *
33
41
0.848485
17
132
6.470588
0.529412
0.163636
0
0
0
0
0
0
0
0
0
0
0.113636
132
4
42
33
0.940171
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
7e3929b057b65ee500819a2d818cc9f52af1f879
66
py
Python
treemaker/__init__.py
SimonGreenhill/TreeMaker
4a6f22400561b21667ec34f34d8bda8636700fb6
[ "BSD-3-Clause" ]
4
2017-05-25T19:18:02.000Z
2021-04-28T16:15:43.000Z
treemaker/__init__.py
SimonGreenhill/TreeMaker
4a6f22400561b21667ec34f34d8bda8636700fb6
[ "BSD-3-Clause" ]
64
2017-03-03T00:38:05.000Z
2022-03-28T08:07:38.000Z
treemaker/__init__.py
SimonGreenhill/TreeMaker
4a6f22400561b21667ec34f34d8bda8636700fb6
[ "BSD-3-Clause" ]
3
2018-11-01T13:52:42.000Z
2019-06-19T02:37:53.000Z
from .treemaker import VERSION, Tree, TreeMaker, parse_args, main
33
65
0.80303
9
66
5.777778
0.888889
0
0
0
0
0
0
0
0
0
0
0
0.121212
66
1
66
66
0.896552
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
7e62653b440a87d66a92d0e3ae6297d1f0ff56a8
96
py
Python
slack/settings.py
imsure-group/response
6429a4a1dd5420ce779600727a5cdd377d09396e
[ "MIT" ]
1
2019-06-11T13:30:50.000Z
2019-06-11T13:30:50.000Z
slack/settings.py
imsure-group/response
6429a4a1dd5420ce779600727a5cdd377d09396e
[ "MIT" ]
9
2020-03-24T18:14:57.000Z
2022-02-10T10:15:04.000Z
slack/settings.py
joshedney/slack-incident-bot
186b34bbc8f10c66adf76d39af1b6cf2be45d249
[ "MIT" ]
null
null
null
INCIDENT_REPORT_DIALOG = "incident-report-dialog" INCIDENT_EDIT_DIALOG = "incident-edit-dialog"
32
49
0.833333
12
96
6.333333
0.333333
0.552632
0.526316
0.736842
0
0
0
0
0
0
0
0
0.0625
96
2
50
48
0.844444
0
0
0
0
0
0.4375
0.229167
0
0
0
0
0
1
0
false
0
0
0
0
0
1
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
7e6b47aa5aa6bdb99149c55e12de6c5819b60a6a
221
py
Python
direct_cloud_upload/admin.py
koendewit/django-direct-cloud-upload
2dbc12be87f0dc048671e88adbeea6f7179fcc7f
[ "BSD-3-Clause" ]
6
2020-01-21T10:24:15.000Z
2021-05-18T10:10:01.000Z
direct_cloud_upload/admin.py
koendewit/django-direct-cloud-upload
2dbc12be87f0dc048671e88adbeea6f7179fcc7f
[ "BSD-3-Clause" ]
2
2021-04-18T20:33:47.000Z
2021-05-18T10:58:52.000Z
direct_cloud_upload/admin.py
koendewit/django-direct-cloud-upload
2dbc12be87f0dc048671e88adbeea6f7179fcc7f
[ "BSD-3-Clause" ]
1
2020-08-28T13:29:38.000Z
2020-08-28T13:29:38.000Z
from django.contrib.admin import ModelAdmin class DdcuAdminMixin(ModelAdmin): class Media: css = {'all': ("direct_cloud_upload/cloud_file_widget.css", )} js = ("direct_cloud_upload/ddcu_upload.js", )
31.571429
70
0.705882
27
221
5.518519
0.666667
0.201342
0.228188
0
0
0
0
0
0
0
0
0
0.171946
221
6
71
36.833333
0.814208
0
0
0
0
0
0.352941
0.339367
0
0
0
0
0
1
0
false
0
0.2
0
0.6
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
5
0e21936fb44126d6198903314b981a440fe963dc
18,596
py
Python
assertpy/numeric.py
santunioni/assertpy
c970c6612a80aa10769dc612324630d27019e1b5
[ "BSD-3-Clause" ]
138
2019-11-19T11:58:50.000Z
2022-03-29T21:30:33.000Z
assertpy/numeric.py
santunioni/assertpy
c970c6612a80aa10769dc612324630d27019e1b5
[ "BSD-3-Clause" ]
24
2019-11-17T21:13:22.000Z
2022-03-18T21:57:33.000Z
assertpy/numeric.py
santunioni/assertpy
c970c6612a80aa10769dc612324630d27019e1b5
[ "BSD-3-Clause" ]
23
2019-11-17T15:47:15.000Z
2022-03-18T21:25:51.000Z
# Copyright (c) 2015-2019, Activision Publishing, Inc. # All rights reserved. # # Redistribution and use in source and binary forms, with or without modification, # are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, this # list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # 3. Neither the name of the copyright holder nor the names of its contributors # may be used to endorse or promote products derived from this software without # specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND # ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR # ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON # ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. from __future__ import division import sys import math import numbers import datetime __tracebackhide__ = True class NumericMixin(object): """Numeric assertions mixin.""" _NUMERIC_COMPAREABLE = set([datetime.datetime, datetime.timedelta, datetime.date, datetime.time]) _NUMERIC_NON_COMPAREABLE = set([complex]) def _validate_compareable(self, other): self_type = type(self.val) other_type = type(other) if self_type in self._NUMERIC_NON_COMPAREABLE: raise TypeError('ordering is not defined for type <%s>' % self_type.__name__) if self_type in self._NUMERIC_COMPAREABLE: if other_type is not self_type: raise TypeError('given arg must be <%s>, but was <%s>' % (self_type.__name__, other_type.__name__)) return if isinstance(self.val, numbers.Number): if not isinstance(other, numbers.Number): raise TypeError('given arg must be a number, but was <%s>' % other_type.__name__) return raise TypeError('ordering is not defined for type <%s>' % self_type.__name__) def _validate_number(self): """Raise TypeError if val is not numeric.""" if isinstance(self.val, numbers.Number) is False: raise TypeError('val is not numeric') def _validate_real(self): """Raise TypeError if val is not real number.""" if isinstance(self.val, numbers.Real) is False: raise TypeError('val is not real number') def is_zero(self): """Asserts that val is numeric and is zero. Examples: Usage:: assert_that(0).is_zero() Returns: AssertionBuilder: returns this instance to chain to the next assertion Raises: AssertionError: if val is **not** zero """ self._validate_number() return self.is_equal_to(0) def is_not_zero(self): """Asserts that val is numeric and is *not* zero. Examples: Usage:: assert_that(1).is_not_zero() assert_that(123.4).is_not_zero() Returns: AssertionBuilder: returns this instance to chain to the next assertion Raises: AssertionError: if val **is** zero """ self._validate_number() return self.is_not_equal_to(0) def is_nan(self): """Asserts that val is real number and is ``NaN`` (not a number). Examples: Usage:: assert_that(float('nan')).is_nan() assert_that(float('inf') * 0).is_nan() Returns: AssertionBuilder: returns this instance to chain to the next assertion Raises: AssertionError: if val is **not** NaN """ self._validate_number() self._validate_real() if not math.isnan(self.val): return self.error('Expected <%s> to be <NaN>, but was not.' % self.val) return self def is_not_nan(self): """Asserts that val is real number and is *not* ``NaN`` (not a number). Examples: Usage:: assert_that(0).is_not_nan() assert_that(123.4).is_not_nan() assert_that(float('inf')).is_not_nan() Returns: AssertionBuilder: returns this instance to chain to the next assertion Raises: AssertionError: if val **is** NaN """ self._validate_number() self._validate_real() if math.isnan(self.val): return self.error('Expected not <NaN>, but was.') return self def is_inf(self): """Asserts that val is real number and is ``Inf`` (infinity). Examples: Usage:: assert_that(float('inf')).is_inf() assert_that(float('inf') * 1).is_inf() Returns: AssertionBuilder: returns this instance to chain to the next assertion Raises: AssertionError: if val is **not** Inf """ self._validate_number() self._validate_real() if not math.isinf(self.val): return self.error('Expected <%s> to be <Inf>, but was not.' % self.val) return self def is_not_inf(self): """Asserts that val is real number and is *not* ``Inf`` (infinity). Examples: Usage:: assert_that(0).is_not_inf() assert_that(123.4).is_not_inf() assert_that(float('nan')).is_not_inf() Returns: AssertionBuilder: returns this instance to chain to the next assertion Raises: AssertionError: if val **is** Inf """ self._validate_number() self._validate_real() if math.isinf(self.val): return self.error('Expected not <Inf>, but was.') return self def is_greater_than(self, other): """Asserts that val is numeric and is greater than other. Args: other: the other date, expected to be less than val Examples: Usage:: assert_that(1).is_greater_than(0) assert_that(123.4).is_greater_than(111.1) For dates, behavior is identical to :meth:`~assertpy.date.DateMixin.is_after`:: import datetime today = datetime.datetime.now() yesterday = today - datetime.timedelta(days=1) assert_that(today).is_greater_than(yesterday) Returns: AssertionBuilder: returns this instance to chain to the next assertion Raises: AssertionError: if val is **not** greater than other """ self._validate_compareable(other) if self.val <= other: if type(self.val) is datetime.datetime: return self.error('Expected <%s> to be greater than <%s>, but was not.' % ( self.val.strftime('%Y-%m-%d %H:%M:%S'), other.strftime('%Y-%m-%d %H:%M:%S'))) else: return self.error('Expected <%s> to be greater than <%s>, but was not.' % (self.val, other)) return self def is_greater_than_or_equal_to(self, other): """Asserts that val is numeric and is greater than or equal to other. Args: other: the other date, expected to be less than or equal to val Examples: Usage:: assert_that(1).is_greater_than_or_equal_to(0) assert_that(1).is_greater_than_or_equal_to(1) assert_that(123.4).is_greater_than_or_equal_to(111.1) For dates, behavior is identical to :meth:`~assertpy.date.DateMixin.is_after` *except* when equal:: import datetime today = datetime.datetime.now() yesterday = today - datetime.timedelta(days=1) assert_that(today).is_greater_than_or_equal_to(yesterday) assert_that(today).is_greater_than_or_equal_to(today) Returns: AssertionBuilder: returns this instance to chain to the next assertion Raises: AssertionError: if val is **not** greater than or equal to other """ self._validate_compareable(other) if self.val < other: if type(self.val) is datetime.datetime: return self.error('Expected <%s> to be greater than or equal to <%s>, but was not.' % ( self.val.strftime('%Y-%m-%d %H:%M:%S'), other.strftime('%Y-%m-%d %H:%M:%S'))) else: return self.error('Expected <%s> to be greater than or equal to <%s>, but was not.' % (self.val, other)) return self def is_less_than(self, other): """Asserts that val is numeric and is less than other. Args: other: the other date, expected to be greater than val Examples: Usage:: assert_that(0).is_less_than(1) assert_that(123.4).is_less_than(555.5) For dates, behavior is identical to :meth:`~assertpy.date.DateMixin.is_before`:: import datetime today = datetime.datetime.now() yesterday = today - datetime.timedelta(days=1) assert_that(yesterday).is_less_than(today) Returns: AssertionBuilder: returns this instance to chain to the next assertion Raises: AssertionError: if val is **not** less than other """ self._validate_compareable(other) if self.val >= other: if type(self.val) is datetime.datetime: return self.error('Expected <%s> to be less than <%s>, but was not.' % (self.val.strftime('%Y-%m-%d %H:%M:%S'), other.strftime('%Y-%m-%d %H:%M:%S'))) else: return self.error('Expected <%s> to be less than <%s>, but was not.' % (self.val, other)) return self def is_less_than_or_equal_to(self, other): """Asserts that val is numeric and is less than or equal to other. Args: other: the other date, expected to be greater than or equal to val Examples: Usage:: assert_that(1).is_less_than_or_equal_to(0) assert_that(1).is_less_than_or_equal_to(1) assert_that(123.4).is_less_than_or_equal_to(100.0) For dates, behavior is identical to :meth:`~assertpy.date.DateMixin.is_before` *except* when equal:: import datetime today = datetime.datetime.now() yesterday = today - datetime.timedelta(days=1) assert_that(yesterday).is_less_than_or_equal_to(today) assert_that(today).is_less_than_or_equal_to(today) Returns: AssertionBuilder: returns this instance to chain to the next assertion Raises: AssertionError: if val is **not** less than or equal to other """ self._validate_compareable(other) if self.val > other: if type(self.val) is datetime.datetime: return self.error('Expected <%s> to be less than or equal to <%s>, but was not.' % ( self.val.strftime('%Y-%m-%d %H:%M:%S'), other.strftime('%Y-%m-%d %H:%M:%S'))) else: return self.error('Expected <%s> to be less than or equal to <%s>, but was not.' % (self.val, other)) return self def is_positive(self): """Asserts that val is numeric and is greater than zero. Examples: Usage:: assert_that(1).is_positive() assert_that(123.4).is_positive() Returns: AssertionBuilder: returns this instance to chain to the next assertion Raises: AssertionError: if val is **not** positive """ return self.is_greater_than(0) def is_negative(self): """Asserts that val is numeric and is less than zero. Examples: Usage:: assert_that(-1).is_negative() assert_that(-123.4).is_negative() Returns: AssertionBuilder: returns this instance to chain to the next assertion Raises: AssertionError: if val is **not** negative """ return self.is_less_than(0) def is_between(self, low, high): """Asserts that val is numeric and is between low and high. Args: low: the low value high: the high value Examples: Usage:: assert_that(1).is_between(0, 2) assert_that(123.4).is_between(111.1, 222.2) For dates, works as expected:: import datetime today = datetime.datetime.now() middle = today - datetime.timedelta(hours=12) yesterday = today - datetime.timedelta(days=1) assert_that(middle).is_between(yesterday, today) Returns: AssertionBuilder: returns this instance to chain to the next assertion Raises: AssertionError: if val is **not** between low and high """ val_type = type(self.val) self._validate_between_args(val_type, low, high) if self.val < low or self.val > high: if val_type is datetime.datetime: return self.error('Expected <%s> to be between <%s> and <%s>, but was not.' % ( self.val.strftime('%Y-%m-%d %H:%M:%S'), low.strftime('%Y-%m-%d %H:%M:%S'), high.strftime('%Y-%m-%d %H:%M:%S'))) else: return self.error('Expected <%s> to be between <%s> and <%s>, but was not.' % (self.val, low, high)) return self def is_not_between(self, low, high): """Asserts that val is numeric and is *not* between low and high. Args: low: the low value high: the high value Examples: Usage:: assert_that(1).is_not_between(2, 3) assert_that(1.1).is_not_between(2.2, 3.3) Returns: AssertionBuilder: returns this instance to chain to the next assertion Raises: AssertionError: if val **is** between low and high """ val_type = type(self.val) self._validate_between_args(val_type, low, high) if self.val >= low and self.val <= high: if val_type is datetime.datetime: return self.error('Expected <%s> to not be between <%s> and <%s>, but was.' % ( self.val.strftime('%Y-%m-%d %H:%M:%S'), low.strftime('%Y-%m-%d %H:%M:%S'), high.strftime('%Y-%m-%d %H:%M:%S'))) else: return self.error('Expected <%s> to not be between <%s> and <%s>, but was.' % (self.val, low, high)) return self def is_close_to(self, other, tolerance): """Asserts that val is numeric and is close to other within tolerance. Args: other: the other value, expected to be close to val within tolerance tolerance: the tolerance Examples: Usage:: assert_that(123).is_close_to(100, 25) assert_that(123.4).is_close_to(123, 0.5) For dates, works as expected:: import datetime today = datetime.datetime.now() yesterday = today - datetime.timedelta(days=1) assert_that(today).is_close_to(yesterday, datetime.timedelta(hours=36)) Returns: AssertionBuilder: returns this instance to chain to the next assertion Raises: AssertionError: if val is **not** close to other within tolerance """ self._validate_close_to_args(self.val, other, tolerance) if self.val < (other-tolerance) or self.val > (other+tolerance): if type(self.val) is datetime.datetime: tolerance_seconds = tolerance.days * 86400 + tolerance.seconds + tolerance.microseconds / 1000000 h, rem = divmod(tolerance_seconds, 3600) m, s = divmod(rem, 60) return self.error('Expected <%s> to be close to <%s> within tolerance <%d:%02d:%02d>, but was not.' % ( self.val.strftime('%Y-%m-%d %H:%M:%S'), other.strftime('%Y-%m-%d %H:%M:%S'), h, m, s)) else: return self.error('Expected <%s> to be close to <%s> within tolerance <%s>, but was not.' % (self.val, other, tolerance)) return self def is_not_close_to(self, other, tolerance): """Asserts that val is numeric and is *not* close to other within tolerance. Args: other: the other value tolerance: the tolerance Examples: Usage:: assert_that(123).is_not_close_to(100, 22) assert_that(123.4).is_not_close_to(123, 0.1) Returns: AssertionBuilder: returns this instance to chain to the next assertion Raises: AssertionError: if val **is** close to other within tolerance """ self._validate_close_to_args(self.val, other, tolerance) if self.val >= (other-tolerance) and self.val <= (other+tolerance): if type(self.val) is datetime.datetime: tolerance_seconds = tolerance.days * 86400 + tolerance.seconds + tolerance.microseconds / 1000000 h, rem = divmod(tolerance_seconds, 3600) m, s = divmod(rem, 60) return self.error('Expected <%s> to not be close to <%s> within tolerance <%d:%02d:%02d>, but was.' % ( self.val.strftime('%Y-%m-%d %H:%M:%S'), other.strftime('%Y-%m-%d %H:%M:%S'), h, m, s)) else: return self.error('Expected <%s> to not be close to <%s> within tolerance <%s>, but was.' % (self.val, other, tolerance)) return self
36.178988
165
0.587062
2,399
18,596
4.42476
0.10296
0.031653
0.022798
0.026943
0.809515
0.779934
0.736034
0.704757
0.66293
0.609797
0
0.01552
0.310497
18,596
513
166
36.249513
0.812354
0.492525
0
0.457746
0
0.014085
0.20652
0
0
0
0
0
0
1
0.133803
false
0
0.035211
0
0.457746
0
0
0
0
null
0
0
0
1
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
0e2eab7a4cc8d7ffa1b83d8a423e57fc326b1735
34
py
Python
Segmentation/slic.py
Joevaen/Scikit-image_On_CT
e3bf0eeadc50691041b4b7c44a19d07546a85001
[ "Apache-2.0" ]
null
null
null
Segmentation/slic.py
Joevaen/Scikit-image_On_CT
e3bf0eeadc50691041b4b7c44a19d07546a85001
[ "Apache-2.0" ]
null
null
null
Segmentation/slic.py
Joevaen/Scikit-image_On_CT
e3bf0eeadc50691041b4b7c44a19d07546a85001
[ "Apache-2.0" ]
null
null
null
# 在Color-(x,y,z)空间中使用k-均值聚类对图像进行分割
34
34
0.764706
6
34
4.333333
1
0
0
0
0
0
0
0
0
0
0
0
0.029412
34
1
34
34
0.787879
0.941176
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
5
0e33a5f9edee75437608ef090671a547e81e7b32
5,105
py
Python
tests/test_nightking.py
itamaro/gcp-night-king
ae5cfee35a1ff73c1dac6f7058a966bb7e1ce45a
[ "Apache-2.0" ]
2
2019-02-02T07:13:59.000Z
2019-05-25T22:26:53.000Z
tests/test_nightking.py
itamaro/gcp-night-king
ae5cfee35a1ff73c1dac6f7058a966bb7e1ce45a
[ "Apache-2.0" ]
7
2019-10-19T18:26:13.000Z
2021-06-05T18:30:44.000Z
tests/test_nightking.py
itamaro/gcp-night-king
ae5cfee35a1ff73c1dac6f7058a966bb7e1ce45a
[ "Apache-2.0" ]
null
null
null
# -*- coding: utf-8 -*- # Copyright 2017 Itamar Ostricher import json from unittest import mock from apiclient import errors import pytest from nightking import lurker @pytest.fixture(scope='module') def callback(): return lurker.make_callback('path', 'project') class MockPubSubMessage: """A mock class for Pub/Sub messages""" def __init__(self, data): self.data = data self.acked = False def ack(self): self.acked = True def test_resurrect(mocker, callback): """Test that when a VM is in TERMINATED state it is restarted.""" mocker.patch.object(lurker.GoogleCloud, '__init__', auto_spec=True, return_value=None) mocker.patch.object(lurker.GoogleCloud, 'get_instance', auto_spec=True, return_value={'status': 'TERMINATED'}) mocker.patch.object(lurker.GoogleCloud, 'start_instance', auto_spec=True) message = MockPubSubMessage('{"name": "foo", "zone": "bar"}') callback(message) assert message.acked is True lurker.GoogleCloud.get_instance.assert_called_once_with('bar', 'foo') lurker.GoogleCloud.start_instance.assert_called_once_with('bar', 'foo') def test_resurrect_realistic_flow(mocker, callback): """Test that when a VM is RUNNING -> STOPPING -> TERMINATED, it is restarted eventually.""" mocker.patch.object(lurker.GoogleCloud, '__init__', auto_spec=True, return_value=None) mocker.patch.object( lurker.GoogleCloud, 'get_instance', auto_spec=True, side_effect=[ {'status': 'RUNNING'}, {'status': 'RUNNING'}, {'status': 'RUNNING'}, {'status': 'STOPPING'}, {'status': 'TERMINATED'}, ]) mocker.patch.object(lurker.GoogleCloud, 'start_instance', auto_spec=True) mocker.patch('time.sleep') # skip the wait message = MockPubSubMessage('{"name": "foo", "zone": "bar"}') callback(message) assert message.acked is True assert 5 == lurker.GoogleCloud.get_instance.call_count lurker.GoogleCloud.start_instance.assert_called_once_with('bar', 'foo') def test_resurrect_still_running(mocker, callback): """Test that when a VM is in RUNNING state for a while nothing happens.""" mocker.patch.object(lurker.GoogleCloud, '__init__', auto_spec=True, return_value=None) mocker.patch.object(lurker.GoogleCloud, 'get_instance', auto_spec=True, return_value={'status': 'RUNNING'}) mocker.patch.object(lurker.GoogleCloud, 'start_instance', auto_spec=True) mocker.patch('time.sleep') # skip the wait message = MockPubSubMessage('{"name": "foo", "zone": "bar"}') callback(message) assert message.acked is True assert lurker.GoogleCloud.get_instance.call_count > 2 lurker.GoogleCloud.start_instance.assert_not_called() def test_resurrect_stopping(mocker, callback): """Test that when a VM is in STOPPING state it will be restarted after it reaches the TERMINATED state.""" mocker.patch.object(lurker.GoogleCloud, '__init__', auto_spec=True, return_value=None) mocker.patch.object( lurker.GoogleCloud, 'get_instance', auto_spec=True, side_effect=[{'status': 'STOPPING'}, {'status': 'TERMINATED'}]) mocker.patch.object(lurker.GoogleCloud, 'start_instance', auto_spec=True) mocker.patch('time.sleep') # skip the wait message = MockPubSubMessage('{"name": "foo", "zone": "bar"}') callback(message) assert message.acked is True assert 2 == lurker.GoogleCloud.get_instance.call_count lurker.GoogleCloud.start_instance.assert_called_once_with('bar', 'foo') def test_resurrect_no_such_vm(mocker, callback): """Test that when there's no such VM nothing happens.""" mocker.patch.object(lurker.GoogleCloud, '__init__', auto_spec=True, return_value=None) mocker.patch.object(lurker.GoogleCloud, 'get_instance', auto_spec=True, side_effect=errors.HttpError) mocker.patch.object(lurker.GoogleCloud, 'start_instance', auto_spec=True) message = MockPubSubMessage('{"name": "foo", "zone": "bar"}') callback(message) assert message.acked is True lurker.GoogleCloud.get_instance.assert_called_once_with('bar', 'foo') lurker.GoogleCloud.start_instance.assert_not_called() def test_invalid_json(mocker, callback): """Test that invalid JSON doesn't crash the service, and message is ACKed.""" mocker.patch('nightking.lurker.resurrect_instance') message = MockPubSubMessage('foo') callback(message) assert message.acked is True lurker.resurrect_instance.assert_not_called() def test_missing_fields(mocker, callback): """Test that missing JSON doesn't crash the service, and message is ACKed.""" mocker.spy(lurker, 'resurrect_instance') mocker.patch.object(lurker.GoogleCloud, '__init__', auto_spec=True, return_value=None) mocker.patch.object(lurker.GoogleCloud, 'get_instance', auto_spec=True) message = MockPubSubMessage('{"foo": "bar"}') callback(message) assert message.acked is True lurker.resurrect_instance.assert_called_once() lurker.GoogleCloud.get_instance.assert_not_called()
38.674242
79
0.710872
636
5,105
5.504717
0.174528
0.135961
0.082548
0.111682
0.779206
0.752642
0.730648
0.730648
0.721794
0.673236
0
0.001869
0.161606
5,105
131
80
38.969466
0.816122
0.125563
0
0.583333
0
0
0.139487
0.007938
0
0
0
0
0.208333
1
0.104167
false
0
0.052083
0.010417
0.177083
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
0e5adc46602db0b18921f89167e4b888d87d0051
180
py
Python
uchat_backend/chatapp/admin.py
harshkothari410/uChat
2367894dd0aa60ec1db68dd91925090cbbd37087
[ "MIT" ]
null
null
null
uchat_backend/chatapp/admin.py
harshkothari410/uChat
2367894dd0aa60ec1db68dd91925090cbbd37087
[ "MIT" ]
null
null
null
uchat_backend/chatapp/admin.py
harshkothari410/uChat
2367894dd0aa60ec1db68dd91925090cbbd37087
[ "MIT" ]
null
null
null
from django.contrib import admin # Register your models here. from .models import * admin.site.register(ChatRoom) admin.site.register(Message) admin.site.register(ChatRoomMember)
22.5
35
0.811111
24
180
6.083333
0.541667
0.184932
0.349315
0
0
0
0
0
0
0
0
0
0.094444
180
8
35
22.5
0.895706
0.144444
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.4
0
0.4
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
0ea2436142669342fa30d04d041a31fa7dcc8bd3
194
py
Python
src/demo_tezos_domains_big_map/types/name_registry/big_map/store_records_key.py
arrijabba/dipdup-py
fa90bfd889c473966e0d5aed98cec90a575fcb90
[ "MIT" ]
39
2021-04-13T10:53:27.000Z
2022-02-11T00:53:44.000Z
src/demo_tezos_domains_big_map/types/name_registry/big_map/store_records_key.py
arrijabba/dipdup-py
fa90bfd889c473966e0d5aed98cec90a575fcb90
[ "MIT" ]
113
2021-06-01T18:16:42.000Z
2022-03-28T06:12:58.000Z
src/demo_tezos_domains_big_map/types/name_registry/big_map/store_records_key.py
arrijabba/dipdup-py
fa90bfd889c473966e0d5aed98cec90a575fcb90
[ "MIT" ]
16
2021-05-26T07:04:40.000Z
2022-03-29T06:50:25.000Z
# generated by datamodel-codegen: # filename: store_records_key.json from __future__ import annotations from pydantic import BaseModel class StoreRecordsKey(BaseModel): __root__: str
17.636364
37
0.793814
22
194
6.545455
0.863636
0
0
0
0
0
0
0
0
0
0
0
0.154639
194
10
38
19.4
0.878049
0.345361
0
0
1
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
0eaa7166844007b8b5738452b6c6f0893e7bafcc
45
py
Python
twitoff/__init__.py
xianshiw/lambdaschool_u3_project_twitoff
d6cb0dfe213800c9e0e4513b6411e9793420b410
[ "MIT" ]
null
null
null
twitoff/__init__.py
xianshiw/lambdaschool_u3_project_twitoff
d6cb0dfe213800c9e0e4513b6411e9793420b410
[ "MIT" ]
3
2021-09-08T01:50:16.000Z
2022-03-12T00:23:29.000Z
twitoff/__init__.py
xianshiw/lambdaschool_u3_project_twitoff
d6cb0dfe213800c9e0e4513b6411e9793420b410
[ "MIT" ]
null
null
null
from .app import create_app APP=create_app()
15
27
0.8
8
45
4.25
0.5
0.529412
0
0
0
0
0
0
0
0
0
0
0.111111
45
3
28
15
0.85
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0
1
1
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
5
7ed7cd7957ce41d1dff568b3fe0acb526e0eca64
377
py
Python
web/modules/auth/controllers/__init__.py
devetek/Omni
3a0676f307bd1814da925e1a184743c517ec9307
[ "Apache-2.0" ]
4
2019-04-30T11:07:11.000Z
2019-06-10T03:03:37.000Z
web/modules/auth/controllers/__init__.py
devetek/Omni
3a0676f307bd1814da925e1a184743c517ec9307
[ "Apache-2.0" ]
8
2019-07-17T17:13:09.000Z
2022-02-26T15:40:01.000Z
web/modules/auth/controllers/__init__.py
devetek/Omni
3a0676f307bd1814da925e1a184743c517ec9307
[ "Apache-2.0" ]
null
null
null
from .authorization import AuthorizationController from .isauthenticated import IsAuthenticatedController from .logout import LogoutController from .refresh import RefreshController from .registration import RegistrationController __all__ = ["RegistrationController", "AuthorizationController", "RefreshController", "IsAuthenticatedController", "LogoutController"]
41.888889
80
0.838196
26
377
12
0.5
0
0
0
0
0
0
0
0
0
0
0
0.103448
377
8
81
47.125
0.923077
0
0
0
0
0
0.27321
0.185676
0
0
0
0
0
1
0
false
0
0.714286
0
0.714286
0
1
0
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
5
7edc9d1fa704170063a98d651327aefd6b4cbaa4
60
py
Python
corider/__init__.py
LukasHedegaard/co-rider
9fdc7f207ea87f89ea463b62daa2e59e12d11586
[ "Apache-2.0" ]
1
2021-05-24T09:23:28.000Z
2021-05-24T09:23:28.000Z
corider/__init__.py
LukasHedegaard/co-rider
9fdc7f207ea87f89ea463b62daa2e59e12d11586
[ "Apache-2.0" ]
null
null
null
corider/__init__.py
LukasHedegaard/co-rider
9fdc7f207ea87f89ea463b62daa2e59e12d11586
[ "Apache-2.0" ]
null
null
null
from .config import Config, Configs, Strategy # noqa: F401
30
59
0.75
8
60
5.625
0.875
0
0
0
0
0
0
0
0
0
0
0.06
0.166667
60
1
60
60
0.84
0.166667
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
7d21308e1649bb4b3da121256cc992ed1298fe80
28
py
Python
nqs/layer.py
stubbi/netket
7391466077a4694e8f12c649730a81bf634f695e
[ "Apache-2.0" ]
1
2019-11-28T10:26:04.000Z
2019-11-28T10:26:04.000Z
nqs/layer.py
stubbi/nqs
7391466077a4694e8f12c649730a81bf634f695e
[ "Apache-2.0" ]
null
null
null
nqs/layer.py
stubbi/nqs
7391466077a4694e8f12c649730a81bf634f695e
[ "Apache-2.0" ]
null
null
null
from ._C_nqs.layer import *
14
27
0.75
5
28
3.8
1
0
0
0
0
0
0
0
0
0
0
0
0.142857
28
1
28
28
0.791667
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
ada2e613561908ad4969f7839d1e56588a4b77a5
41
py
Python
pysyft_proto/__init__.py
IonesioJunior/proto
699cef5de30c4bc84c1b3394e5c5d0465132d5b8
[ "Apache-2.0" ]
null
null
null
pysyft_proto/__init__.py
IonesioJunior/proto
699cef5de30c4bc84c1b3394e5c5d0465132d5b8
[ "Apache-2.0" ]
null
null
null
pysyft_proto/__init__.py
IonesioJunior/proto
699cef5de30c4bc84c1b3394e5c5d0465132d5b8
[ "Apache-2.0" ]
null
null
null
from pysyft_proto.proto import proto_info
41
41
0.902439
7
41
5
0.714286
0
0
0
0
0
0
0
0
0
0
0
0.073171
41
1
41
41
0.921053
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
adae2d7f0402dd37124239c0f21dd92fcd2bd4fb
42
py
Python
pythonDesafios/aula12.py
mateusdev7/desafios-python
6160ddc84548c7af7f5775f9acabe58238f83008
[ "MIT" ]
null
null
null
pythonDesafios/aula12.py
mateusdev7/desafios-python
6160ddc84548c7af7f5775f9acabe58238f83008
[ "MIT" ]
null
null
null
pythonDesafios/aula12.py
mateusdev7/desafios-python
6160ddc84548c7af7f5775f9acabe58238f83008
[ "MIT" ]
null
null
null
#Foi explicado a ideia de IF , Elif , Else
42
42
0.714286
8
42
3.75
1
0
0
0
0
0
0
0
0
0
0
0
0.214286
42
1
42
42
0.909091
0.97619
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
5
adb1eedf569bccee1be912206381fd8e41bb0de9
121
py
Python
intro/admin.py
Kianoosh76/webelopers-scoreboard
550d6a525ce1b1b9a4a7e31bd02882bbc21a275d
[ "MIT" ]
null
null
null
intro/admin.py
Kianoosh76/webelopers-scoreboard
550d6a525ce1b1b9a4a7e31bd02882bbc21a275d
[ "MIT" ]
2
2021-06-08T19:29:43.000Z
2022-03-11T23:37:47.000Z
intro/admin.py
Kianoosh76/webelopers-scoreboard
550d6a525ce1b1b9a4a7e31bd02882bbc21a275d
[ "MIT" ]
1
2018-05-02T15:11:49.000Z
2018-05-02T15:11:49.000Z
from django.contrib import admin # Register your models here. from intro.models import Staff admin.site.register(Staff)
20.166667
32
0.809917
18
121
5.444444
0.666667
0
0
0
0
0
0
0
0
0
0
0
0.123967
121
6
33
20.166667
0.924528
0.214876
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.666667
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
ade8ed1834a773d32759b647b0a0aa5e7a0169be
189
py
Python
tests/unittests/broken_functions/import_error/main.py
gohar94/azure-functions-python-worker
4322e53ddbcc1eea40c1b061b42653336d9003f6
[ "MIT" ]
277
2018-01-25T23:13:03.000Z
2022-02-22T06:12:04.000Z
tests/unittests/broken_functions/import_error/main.py
gohar94/azure-functions-python-worker
4322e53ddbcc1eea40c1b061b42653336d9003f6
[ "MIT" ]
731
2018-01-18T18:54:38.000Z
2022-03-29T00:01:46.000Z
tests/unittests/broken_functions/import_error/main.py
YunchuWang/azure-functions-python-worker
1f23e038a506c6412e4efbf07eb471a6afab0c2a
[ "MIT" ]
109
2018-01-18T02:22:57.000Z
2022-02-15T18:59:54.000Z
# Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. from sys import __nonexistent # should raise ImportError def main(req): __nonexistent()
23.625
59
0.761905
23
189
6.086957
0.956522
0
0
0
0
0
0
0
0
0
0
0
0.169312
189
7
60
27
0.89172
0.603175
0
0
0
0
0
0
0
0
0
0
0
1
0.333333
false
0
0.333333
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
1
0
1
0
0
5
bc05fd47deade80de35c0b07f6af9767b06b39d6
72
py
Python
slashtags/http/__init__.py
Onii-Chan-Discord/phen-cogs
e0d8dc38a4b68c6a59ab73f6affc079d30009779
[ "MIT" ]
105
2020-08-11T02:14:38.000Z
2022-03-21T13:17:14.000Z
slashtags/http/__init__.py
Kami-DiscordBot/phen-cogs
c4dbf2cee8d49b0bff8853f850b76247d8a7fdbc
[ "MIT" ]
101
2020-10-03T22:55:03.000Z
2022-03-21T01:48:55.000Z
slashtags/http/__init__.py
Kami-DiscordBot/phen-cogs
c4dbf2cee8d49b0bff8853f850b76247d8a7fdbc
[ "MIT" ]
81
2020-09-11T10:24:59.000Z
2022-03-31T22:22:58.000Z
from .httpclient import SlashHTTP # noqa from .models import * # noqa
24
41
0.736111
9
72
5.888889
0.666667
0
0
0
0
0
0
0
0
0
0
0
0.194444
72
2
42
36
0.913793
0.125
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
70b5c9cea862969f2fed429489e9cff9b70e9556
145
py
Python
example/orm/foo.py
m-housh/cnx_base_microservice
d72ad4a480078730729745d61744601fdcb3a72a
[ "MIT" ]
1
2017-03-28T08:53:57.000Z
2017-03-28T08:53:57.000Z
example/orm/foo.py
m-housh/cnx_base_microservice
d72ad4a480078730729745d61744601fdcb3a72a
[ "MIT" ]
null
null
null
example/orm/foo.py
m-housh/cnx_base_microservice
d72ad4a480078730729745d61744601fdcb3a72a
[ "MIT" ]
null
null
null
from sqlalchemy import Column, String from ..db import DbModel class Foo(DbModel): bar = Column(String(40), unique=True, nullable=False)
16.111111
57
0.731034
20
145
5.3
0.75
0.226415
0
0
0
0
0
0
0
0
0
0.016529
0.165517
145
8
58
18.125
0.859504
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.5
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
5
70e3aaf33f2712ab3aea8ca4bbd2d0db2b43da87
44,477
py
Python
languages/python/asm/arm.py
errir503/asmdot
87a8fa9f273e86d5d5be94bf2bd61924bab7f52f
[ "MIT" ]
1
2021-07-07T08:14:21.000Z
2021-07-07T08:14:21.000Z
languages/python/asm/arm.py
errir503/asmdot
87a8fa9f273e86d5d5be94bf2bd61924bab7f52f
[ "MIT" ]
null
null
null
languages/python/asm/arm.py
errir503/asmdot
87a8fa9f273e86d5d5be94bf2bd61924bab7f52f
[ "MIT" ]
1
2021-07-19T18:56:48.000Z
2021-07-19T18:56:48.000Z
import struct from enum import Enum, Flag from typing import NewType Reg = NewType("Reg", int) setattr(Reg, "r0", Reg(0)) setattr(Reg, "r1", Reg(1)) setattr(Reg, "r2", Reg(2)) setattr(Reg, "r3", Reg(3)) setattr(Reg, "r4", Reg(4)) setattr(Reg, "r5", Reg(5)) setattr(Reg, "r6", Reg(6)) setattr(Reg, "r7", Reg(7)) setattr(Reg, "r8", Reg(8)) setattr(Reg, "r9", Reg(9)) setattr(Reg, "r10", Reg(10)) setattr(Reg, "r11", Reg(11)) setattr(Reg, "r12", Reg(12)) setattr(Reg, "r13", Reg(13)) setattr(Reg, "r14", Reg(14)) setattr(Reg, "r15", Reg(15)) setattr(Reg, "a1", Reg(0)) setattr(Reg, "a2", Reg(1)) setattr(Reg, "a3", Reg(2)) setattr(Reg, "a4", Reg(3)) setattr(Reg, "v1", Reg(4)) setattr(Reg, "v2", Reg(5)) setattr(Reg, "v3", Reg(6)) setattr(Reg, "v4", Reg(7)) setattr(Reg, "v5", Reg(8)) setattr(Reg, "v6", Reg(9)) setattr(Reg, "v7", Reg(10)) setattr(Reg, "v8", Reg(11)) setattr(Reg, "ip", Reg(12)) setattr(Reg, "sp", Reg(13)) setattr(Reg, "lr", Reg(14)) setattr(Reg, "pc", Reg(15)) setattr(Reg, "wr", Reg(7)) setattr(Reg, "sb", Reg(9)) setattr(Reg, "sl", Reg(10)) setattr(Reg, "fp", Reg(11)) class RegList(int, Flag): """A list of ARM registers, where each register corresponds to a single bit.""" R0 = 0 R1 = 1 R2 = 2 R3 = 3 R4 = 4 R5 = 5 R6 = 6 R7 = 7 R8 = 8 R9 = 9 R10 = 10 R11 = 11 R12 = 12 R13 = 13 R14 = 14 R15 = 15 A1 = 0 A2 = 1 A3 = 2 A4 = 3 V1 = 4 V2 = 5 V3 = 6 V4 = 7 V5 = 8 V6 = 9 V7 = 10 V8 = 11 IP = 12 SP = 13 LR = 14 PC = 15 WR = 7 SB = 9 SL = 10 FP = 11 Coprocessor = NewType("Coprocessor", int) setattr(Coprocessor, "cp0", Coprocessor(0)) setattr(Coprocessor, "cp1", Coprocessor(1)) setattr(Coprocessor, "cp2", Coprocessor(2)) setattr(Coprocessor, "cp3", Coprocessor(3)) setattr(Coprocessor, "cp4", Coprocessor(4)) setattr(Coprocessor, "cp5", Coprocessor(5)) setattr(Coprocessor, "cp6", Coprocessor(6)) setattr(Coprocessor, "cp7", Coprocessor(7)) setattr(Coprocessor, "cp8", Coprocessor(8)) setattr(Coprocessor, "cp9", Coprocessor(9)) setattr(Coprocessor, "cp10", Coprocessor(10)) setattr(Coprocessor, "cp11", Coprocessor(11)) setattr(Coprocessor, "cp12", Coprocessor(12)) setattr(Coprocessor, "cp13", Coprocessor(13)) setattr(Coprocessor, "cp14", Coprocessor(14)) setattr(Coprocessor, "cp15", Coprocessor(15)) class Condition(int, Enum): """Condition for an ARM instruction to be executed.""" EQ = 0 NE = 1 HS = 2 LO = 3 MI = 4 PL = 5 VS = 6 VC = 7 HI = 8 LS = 9 GE = 10 LT = 11 GT = 12 LE = 13 AL = 14 UN = 15 CS = 2 CC = 3 class Mode(int, Enum): """Processor mode.""" USR = 16 FIQ = 17 IRQ = 18 SVC = 19 ABT = 23 UND = 27 SYS = 31 class Shift(int, Enum): """Kind of a shift.""" LSL = 0 LSR = 1 ASR = 2 ROR = 3 RRX = 3 class Rotation(int, Enum): """Kind of a right rotation.""" NOP = 0 ROR8 = 1 ROR16 = 2 ROR24 = 3 class FieldMask(int, Flag): """Field mask bits.""" C = 1 X = 2 S = 4 F = 8 class InterruptFlags(int, Flag): """Interrupt flags.""" F = 1 I = 2 A = 4 class Addressing(int, Enum): """Addressing type.""" PostIndexed = 0 PreIndexed = 1 Offset = 1 class OffsetMode(int, Enum): """Offset adding or subtracting mode.""" Subtract = 0 Add = 1 class ArmAssembler: """Assembler that targets the arm architecture.""" def __init__(self, size: int) -> None: assert size > 0 self.size = size self.buf = bytearray(size) self.pos = 0 def adc(self, cond: Condition, update_cprs: bool, rn: Reg, rd: Reg, update_condition: bool) -> None: """Emits an 'adc' instruction.""" struct.pack_into("<I", self.buf, self.pos, (((((10485760 or cond) or (update_cprs << 20)) or (rn << 16)) or (rd << 12)) or (update_condition << 20))) self.pos += 4 def add(self, cond: Condition, update_cprs: bool, rn: Reg, rd: Reg, update_condition: bool) -> None: """Emits an 'add' instruction.""" struct.pack_into("<I", self.buf, self.pos, (((((8388608 or cond) or (update_cprs << 20)) or (rn << 16)) or (rd << 12)) or (update_condition << 20))) self.pos += 4 def and_(self, cond: Condition, update_cprs: bool, rn: Reg, rd: Reg, update_condition: bool) -> None: """Emits an 'and' instruction.""" struct.pack_into("<I", self.buf, self.pos, (((((0 or cond) or (update_cprs << 20)) or (rn << 16)) or (rd << 12)) or (update_condition << 20))) self.pos += 4 def eor(self, cond: Condition, update_cprs: bool, rn: Reg, rd: Reg, update_condition: bool) -> None: """Emits an 'eor' instruction.""" struct.pack_into("<I", self.buf, self.pos, (((((2097152 or cond) or (update_cprs << 20)) or (rn << 16)) or (rd << 12)) or (update_condition << 20))) self.pos += 4 def orr(self, cond: Condition, update_cprs: bool, rn: Reg, rd: Reg, update_condition: bool) -> None: """Emits an 'orr' instruction.""" struct.pack_into("<I", self.buf, self.pos, (((((25165824 or cond) or (update_cprs << 20)) or (rn << 16)) or (rd << 12)) or (update_condition << 20))) self.pos += 4 def rsb(self, cond: Condition, update_cprs: bool, rn: Reg, rd: Reg, update_condition: bool) -> None: """Emits a 'rsb' instruction.""" struct.pack_into("<I", self.buf, self.pos, (((((6291456 or cond) or (update_cprs << 20)) or (rn << 16)) or (rd << 12)) or (update_condition << 20))) self.pos += 4 def rsc(self, cond: Condition, update_cprs: bool, rn: Reg, rd: Reg, update_condition: bool) -> None: """Emits a 'rsc' instruction.""" struct.pack_into("<I", self.buf, self.pos, (((((14680064 or cond) or (update_cprs << 20)) or (rn << 16)) or (rd << 12)) or (update_condition << 20))) self.pos += 4 def sbc(self, cond: Condition, update_cprs: bool, rn: Reg, rd: Reg, update_condition: bool) -> None: """Emits a 'sbc' instruction.""" struct.pack_into("<I", self.buf, self.pos, (((((12582912 or cond) or (update_cprs << 20)) or (rn << 16)) or (rd << 12)) or (update_condition << 20))) self.pos += 4 def sub(self, cond: Condition, update_cprs: bool, rn: Reg, rd: Reg, update_condition: bool) -> None: """Emits a 'sub' instruction.""" struct.pack_into("<I", self.buf, self.pos, (((((4194304 or cond) or (update_cprs << 20)) or (rn << 16)) or (rd << 12)) or (update_condition << 20))) self.pos += 4 def bkpt(self, immed: int) -> None: """Emits a 'bkpt' instruction.""" struct.pack_into("<I", self.buf, self.pos, ((3776970864 or ((immed and 65520) << 8)) or ((immed and 15) << 0))) self.pos += 4 def b(self, cond: Condition) -> None: """Emits a 'b' instruction.""" struct.pack_into("<I", self.buf, self.pos, (167772160 or cond)) self.pos += 4 def bic(self, cond: Condition, update_cprs: bool, rn: Reg, rd: Reg, update_condition: bool) -> None: """Emits a 'bic' instruction.""" struct.pack_into("<I", self.buf, self.pos, (((((29360128 or cond) or (update_cprs << 20)) or (rn << 16)) or (rd << 12)) or (update_condition << 20))) self.pos += 4 def blx(self, cond: Condition) -> None: """Emits a 'blx' instruction.""" struct.pack_into("<I", self.buf, self.pos, (19922736 or cond)) self.pos += 4 def bx(self, cond: Condition) -> None: """Emits a 'bx' instruction.""" struct.pack_into("<I", self.buf, self.pos, (19922704 or cond)) self.pos += 4 def bxj(self, cond: Condition) -> None: """Emits a 'bxj' instruction.""" struct.pack_into("<I", self.buf, self.pos, (19922720 or cond)) self.pos += 4 def blxun(self) -> None: """Emits a 'blxun' instruction.""" struct.pack_into("<I", self.buf, self.pos, 4194304000) self.pos += 4 def clz(self, cond: Condition, rd: Reg) -> None: """Emits a 'clz' instruction.""" struct.pack_into("<I", self.buf, self.pos, ((24055568 or cond) or (rd << 12))) self.pos += 4 def cmn(self, cond: Condition, rn: Reg) -> None: """Emits a 'cmn' instruction.""" struct.pack_into("<I", self.buf, self.pos, ((24117248 or cond) or (rn << 16))) self.pos += 4 def cmp(self, cond: Condition, rn: Reg) -> None: """Emits a 'cmp' instruction.""" struct.pack_into("<I", self.buf, self.pos, ((22020096 or cond) or (rn << 16))) self.pos += 4 def cpy(self, cond: Condition, rd: Reg) -> None: """Emits a 'cpy' instruction.""" struct.pack_into("<I", self.buf, self.pos, ((27262976 or cond) or (rd << 12))) self.pos += 4 def cps(self, mode: Mode) -> None: """Emits a 'cps' instruction.""" struct.pack_into("<I", self.buf, self.pos, (4043440128 or (mode << 0))) self.pos += 4 def cpsie(self, iflags: InterruptFlags) -> None: """Emits a 'cpsie' instruction.""" struct.pack_into("<I", self.buf, self.pos, (4043833344 or (iflags << 6))) self.pos += 4 def cpsid(self, iflags: InterruptFlags) -> None: """Emits a 'cpsid' instruction.""" struct.pack_into("<I", self.buf, self.pos, (4044095488 or (iflags << 6))) self.pos += 4 def cpsie_mode(self, iflags: InterruptFlags, mode: Mode) -> None: """Emits a 'cpsie_mode' instruction.""" struct.pack_into("<I", self.buf, self.pos, ((4043964416 or (iflags << 6)) or (mode << 0))) self.pos += 4 def cpsid_mode(self, iflags: InterruptFlags, mode: Mode) -> None: """Emits a 'cpsid_mode' instruction.""" struct.pack_into("<I", self.buf, self.pos, ((4044226560 or (iflags << 6)) or (mode << 0))) self.pos += 4 def ldc(self, cond: Condition, write: bool, rn: Reg, cpnum: Coprocessor, offset_mode: OffsetMode, addressing_mode: Addressing) -> None: """Emits a 'ldc' instruction.""" struct.pack_into("<I", self.buf, self.pos, ((((((202375168 or cond) or (write << 21)) or (rn << 16)) or (cpnum << 8)) or (addressing_mode << 23)) or (offset_mode << 11))) self.pos += 4 def ldm(self, cond: Condition, rn: Reg, offset_mode: OffsetMode, addressing_mode: Addressing, registers: RegList, write: bool, copy_spsr: bool) -> None: """Emits a 'ldm' instruction.""" assert ((copy_spsr == 1) ^ (write == (registers and 32768))) struct.pack_into("<I", self.buf, self.pos, ((((((((135266304 or cond) or (rn << 16)) or (addressing_mode << 23)) or (offset_mode << 11)) or (addressing_mode << 23)) or registers) or (copy_spsr << 21)) or (write << 10))) self.pos += 4 def ldr(self, cond: Condition, write: bool, rn: Reg, rd: Reg, offset_mode: OffsetMode, addressing_mode: Addressing) -> None: """Emits a 'ldr' instruction.""" struct.pack_into("<I", self.buf, self.pos, ((((((68157440 or cond) or (write << 21)) or (rn << 16)) or (rd << 12)) or (addressing_mode << 23)) or (offset_mode << 11))) self.pos += 4 def ldrb(self, cond: Condition, write: bool, rn: Reg, rd: Reg, offset_mode: OffsetMode, addressing_mode: Addressing) -> None: """Emits a 'ldrb' instruction.""" struct.pack_into("<I", self.buf, self.pos, ((((((72351744 or cond) or (write << 21)) or (rn << 16)) or (rd << 12)) or (addressing_mode << 23)) or (offset_mode << 11))) self.pos += 4 def ldrbt(self, cond: Condition, rn: Reg, rd: Reg, offset_mode: OffsetMode) -> None: """Emits a 'ldrbt' instruction.""" struct.pack_into("<I", self.buf, self.pos, ((((74448896 or cond) or (rn << 16)) or (rd << 12)) or (offset_mode << 23))) self.pos += 4 def ldrd(self, cond: Condition, write: bool, rn: Reg, rd: Reg, offset_mode: OffsetMode, addressing_mode: Addressing) -> None: """Emits a 'ldrd' instruction.""" struct.pack_into("<I", self.buf, self.pos, ((((((208 or cond) or (write << 21)) or (rn << 16)) or (rd << 12)) or (addressing_mode << 23)) or (offset_mode << 11))) self.pos += 4 def ldrex(self, cond: Condition, rn: Reg, rd: Reg) -> None: """Emits a 'ldrex' instruction.""" struct.pack_into("<I", self.buf, self.pos, (((26218399 or cond) or (rn << 16)) or (rd << 12))) self.pos += 4 def ldrh(self, cond: Condition, write: bool, rn: Reg, rd: Reg, offset_mode: OffsetMode, addressing_mode: Addressing) -> None: """Emits a 'ldrh' instruction.""" struct.pack_into("<I", self.buf, self.pos, ((((((1048752 or cond) or (write << 21)) or (rn << 16)) or (rd << 12)) or (addressing_mode << 23)) or (offset_mode << 11))) self.pos += 4 def ldrsb(self, cond: Condition, write: bool, rn: Reg, rd: Reg, offset_mode: OffsetMode, addressing_mode: Addressing) -> None: """Emits a 'ldrsb' instruction.""" struct.pack_into("<I", self.buf, self.pos, ((((((1048784 or cond) or (write << 21)) or (rn << 16)) or (rd << 12)) or (addressing_mode << 23)) or (offset_mode << 11))) self.pos += 4 def ldrsh(self, cond: Condition, write: bool, rn: Reg, rd: Reg, offset_mode: OffsetMode, addressing_mode: Addressing) -> None: """Emits a 'ldrsh' instruction.""" struct.pack_into("<I", self.buf, self.pos, ((((((1048816 or cond) or (write << 21)) or (rn << 16)) or (rd << 12)) or (addressing_mode << 23)) or (offset_mode << 11))) self.pos += 4 def ldrt(self, cond: Condition, rn: Reg, rd: Reg, offset_mode: OffsetMode) -> None: """Emits a 'ldrt' instruction.""" struct.pack_into("<I", self.buf, self.pos, ((((70254592 or cond) or (rn << 16)) or (rd << 12)) or (offset_mode << 23))) self.pos += 4 def cdp(self, cond: Condition, cpnum: Coprocessor) -> None: """Emits a 'cdp' instruction.""" struct.pack_into("<I", self.buf, self.pos, ((234881024 or cond) or (cpnum << 8))) self.pos += 4 def mcr(self, cond: Condition, rd: Reg, cpnum: Coprocessor) -> None: """Emits a 'mcr' instruction.""" struct.pack_into("<I", self.buf, self.pos, (((234881040 or cond) or (rd << 12)) or (cpnum << 8))) self.pos += 4 def mrc(self, cond: Condition, rd: Reg, cpnum: Coprocessor) -> None: """Emits a 'mrc' instruction.""" struct.pack_into("<I", self.buf, self.pos, (((235929616 or cond) or (rd << 12)) or (cpnum << 8))) self.pos += 4 def mcrr(self, cond: Condition, rn: Reg, rd: Reg, cpnum: Coprocessor) -> None: """Emits a 'mcrr' instruction.""" struct.pack_into("<I", self.buf, self.pos, ((((205520896 or cond) or (rn << 16)) or (rd << 12)) or (cpnum << 8))) self.pos += 4 def mla(self, cond: Condition, update_cprs: bool, rn: Reg, rd: Reg, update_condition: bool) -> None: """Emits a 'mla' instruction.""" struct.pack_into("<I", self.buf, self.pos, (((((2097296 or cond) or (update_cprs << 20)) or (rn << 12)) or (rd << 16)) or (update_condition << 20))) self.pos += 4 def mov(self, cond: Condition, update_cprs: bool, rd: Reg, update_condition: bool) -> None: """Emits a 'mov' instruction.""" struct.pack_into("<I", self.buf, self.pos, ((((27262976 or cond) or (update_cprs << 20)) or (rd << 12)) or (update_condition << 20))) self.pos += 4 def mrrc(self, cond: Condition, rn: Reg, rd: Reg, cpnum: Coprocessor) -> None: """Emits a 'mrrc' instruction.""" struct.pack_into("<I", self.buf, self.pos, ((((206569472 or cond) or (rn << 16)) or (rd << 12)) or (cpnum << 8))) self.pos += 4 def mrs(self, cond: Condition, rd: Reg) -> None: """Emits a 'mrs' instruction.""" struct.pack_into("<I", self.buf, self.pos, ((17760256 or cond) or (rd << 12))) self.pos += 4 def mul(self, cond: Condition, update_cprs: bool, rd: Reg, update_condition: bool) -> None: """Emits a 'mul' instruction.""" struct.pack_into("<I", self.buf, self.pos, ((((144 or cond) or (update_cprs << 20)) or (rd << 16)) or (update_condition << 20))) self.pos += 4 def mvn(self, cond: Condition, update_cprs: bool, rd: Reg, update_condition: bool) -> None: """Emits a 'mvn' instruction.""" struct.pack_into("<I", self.buf, self.pos, ((((31457280 or cond) or (update_cprs << 20)) or (rd << 12)) or (update_condition << 20))) self.pos += 4 def msr_imm(self, cond: Condition, fieldmask: FieldMask) -> None: """Emits a 'msr_imm' instruction.""" struct.pack_into("<I", self.buf, self.pos, ((52490240 or cond) or (fieldmask << 16))) self.pos += 4 def msr_reg(self, cond: Condition, fieldmask: FieldMask) -> None: """Emits a 'msr_reg' instruction.""" struct.pack_into("<I", self.buf, self.pos, ((18935808 or cond) or (fieldmask << 16))) self.pos += 4 def pkhbt(self, cond: Condition, rn: Reg, rd: Reg) -> None: """Emits a 'pkhbt' instruction.""" struct.pack_into("<I", self.buf, self.pos, (((109051920 or cond) or (rn << 16)) or (rd << 12))) self.pos += 4 def pkhtb(self, cond: Condition, rn: Reg, rd: Reg) -> None: """Emits a 'pkhtb' instruction.""" struct.pack_into("<I", self.buf, self.pos, (((109051984 or cond) or (rn << 16)) or (rd << 12))) self.pos += 4 def pld(self, rn: Reg, offset_mode: OffsetMode) -> None: """Emits a 'pld' instruction.""" struct.pack_into("<I", self.buf, self.pos, ((4115722240 or (rn << 16)) or (offset_mode << 23))) self.pos += 4 def qadd(self, cond: Condition, rn: Reg, rd: Reg) -> None: """Emits a 'qadd' instruction.""" struct.pack_into("<I", self.buf, self.pos, (((16777296 or cond) or (rn << 16)) or (rd << 12))) self.pos += 4 def qadd16(self, cond: Condition, rn: Reg, rd: Reg) -> None: """Emits a 'qadd16' instruction.""" struct.pack_into("<I", self.buf, self.pos, (((102764304 or cond) or (rn << 16)) or (rd << 12))) self.pos += 4 def qadd8(self, cond: Condition, rn: Reg, rd: Reg) -> None: """Emits a 'qadd8' instruction.""" struct.pack_into("<I", self.buf, self.pos, (((102764432 or cond) or (rn << 16)) or (rd << 12))) self.pos += 4 def qaddsubx(self, cond: Condition, rn: Reg, rd: Reg) -> None: """Emits a 'qaddsubx' instruction.""" struct.pack_into("<I", self.buf, self.pos, (((102764336 or cond) or (rn << 16)) or (rd << 12))) self.pos += 4 def qdadd(self, cond: Condition, rn: Reg, rd: Reg) -> None: """Emits a 'qdadd' instruction.""" struct.pack_into("<I", self.buf, self.pos, (((20971600 or cond) or (rn << 16)) or (rd << 12))) self.pos += 4 def qdsub(self, cond: Condition, rn: Reg, rd: Reg) -> None: """Emits a 'qdsub' instruction.""" struct.pack_into("<I", self.buf, self.pos, (((23068752 or cond) or (rn << 16)) or (rd << 12))) self.pos += 4 def qsub(self, cond: Condition, rn: Reg, rd: Reg) -> None: """Emits a 'qsub' instruction.""" struct.pack_into("<I", self.buf, self.pos, (((18874448 or cond) or (rn << 16)) or (rd << 12))) self.pos += 4 def qsub16(self, cond: Condition, rn: Reg, rd: Reg) -> None: """Emits a 'qsub16' instruction.""" struct.pack_into("<I", self.buf, self.pos, (((102764400 or cond) or (rn << 16)) or (rd << 12))) self.pos += 4 def qsub8(self, cond: Condition, rn: Reg, rd: Reg) -> None: """Emits a 'qsub8' instruction.""" struct.pack_into("<I", self.buf, self.pos, (((102764528 or cond) or (rn << 16)) or (rd << 12))) self.pos += 4 def qsubaddx(self, cond: Condition, rn: Reg, rd: Reg) -> None: """Emits a 'qsubaddx' instruction.""" struct.pack_into("<I", self.buf, self.pos, (((102764368 or cond) or (rn << 16)) or (rd << 12))) self.pos += 4 def rev(self, cond: Condition, rd: Reg) -> None: """Emits a 'rev' instruction.""" struct.pack_into("<I", self.buf, self.pos, ((113184560 or cond) or (rd << 12))) self.pos += 4 def rev16(self, cond: Condition, rd: Reg) -> None: """Emits a 'rev16' instruction.""" struct.pack_into("<I", self.buf, self.pos, ((113184688 or cond) or (rd << 12))) self.pos += 4 def revsh(self, cond: Condition, rd: Reg) -> None: """Emits a 'revsh' instruction.""" struct.pack_into("<I", self.buf, self.pos, ((117378992 or cond) or (rd << 12))) self.pos += 4 def rfe(self, write: bool, rn: Reg, offset_mode: OffsetMode, addressing_mode: Addressing) -> None: """Emits a 'rfe' instruction.""" struct.pack_into("<I", self.buf, self.pos, ((((4161800704 or (write << 21)) or (rn << 16)) or (addressing_mode << 23)) or (offset_mode << 11))) self.pos += 4 def sadd16(self, cond: Condition, rn: Reg, rd: Reg) -> None: """Emits a 'sadd16' instruction.""" struct.pack_into("<I", self.buf, self.pos, (((101715728 or cond) or (rn << 16)) or (rd << 12))) self.pos += 4 def sadd8(self, cond: Condition, rn: Reg, rd: Reg) -> None: """Emits a 'sadd8' instruction.""" struct.pack_into("<I", self.buf, self.pos, (((101715856 or cond) or (rn << 16)) or (rd << 12))) self.pos += 4 def saddsubx(self, cond: Condition, rn: Reg, rd: Reg) -> None: """Emits a 'saddsubx' instruction.""" struct.pack_into("<I", self.buf, self.pos, (((101715760 or cond) or (rn << 16)) or (rd << 12))) self.pos += 4 def sel(self, cond: Condition, rn: Reg, rd: Reg) -> None: """Emits a 'sel' instruction.""" struct.pack_into("<I", self.buf, self.pos, (((109055920 or cond) or (rn << 16)) or (rd << 12))) self.pos += 4 def setendbe(self) -> None: """Emits a 'setendbe' instruction.""" struct.pack_into("<I", self.buf, self.pos, 4043375104) self.pos += 4 def setendle(self) -> None: """Emits a 'setendle' instruction.""" struct.pack_into("<I", self.buf, self.pos, 4043374592) self.pos += 4 def shadd16(self, cond: Condition, rn: Reg, rd: Reg) -> None: """Emits a 'shadd16' instruction.""" struct.pack_into("<I", self.buf, self.pos, (((103812880 or cond) or (rn << 16)) or (rd << 12))) self.pos += 4 def shadd8(self, cond: Condition, rn: Reg, rd: Reg) -> None: """Emits a 'shadd8' instruction.""" struct.pack_into("<I", self.buf, self.pos, (((103813008 or cond) or (rn << 16)) or (rd << 12))) self.pos += 4 def shaddsubx(self, cond: Condition, rn: Reg, rd: Reg) -> None: """Emits a 'shaddsubx' instruction.""" struct.pack_into("<I", self.buf, self.pos, (((103812912 or cond) or (rn << 16)) or (rd << 12))) self.pos += 4 def shsub16(self, cond: Condition, rn: Reg, rd: Reg) -> None: """Emits a 'shsub16' instruction.""" struct.pack_into("<I", self.buf, self.pos, (((103812976 or cond) or (rn << 16)) or (rd << 12))) self.pos += 4 def shsub8(self, cond: Condition, rn: Reg, rd: Reg) -> None: """Emits a 'shsub8' instruction.""" struct.pack_into("<I", self.buf, self.pos, (((103813104 or cond) or (rn << 16)) or (rd << 12))) self.pos += 4 def shsubaddx(self, cond: Condition, rn: Reg, rd: Reg) -> None: """Emits a 'shsubaddx' instruction.""" struct.pack_into("<I", self.buf, self.pos, (((103812944 or cond) or (rn << 16)) or (rd << 12))) self.pos += 4 def smlabb(self, cond: Condition, rn: Reg, rd: Reg) -> None: """Emits a 'smlabb' instruction.""" struct.pack_into("<I", self.buf, self.pos, (((16777344 or cond) or (rn << 12)) or (rd << 16))) self.pos += 4 def smlabt(self, cond: Condition, rn: Reg, rd: Reg) -> None: """Emits a 'smlabt' instruction.""" struct.pack_into("<I", self.buf, self.pos, (((16777376 or cond) or (rn << 12)) or (rd << 16))) self.pos += 4 def smlatb(self, cond: Condition, rn: Reg, rd: Reg) -> None: """Emits a 'smlatb' instruction.""" struct.pack_into("<I", self.buf, self.pos, (((16777408 or cond) or (rn << 12)) or (rd << 16))) self.pos += 4 def smlatt(self, cond: Condition, rn: Reg, rd: Reg) -> None: """Emits a 'smlatt' instruction.""" struct.pack_into("<I", self.buf, self.pos, (((16777440 or cond) or (rn << 12)) or (rd << 16))) self.pos += 4 def smlad(self, cond: Condition, exchange: bool, rn: Reg, rd: Reg) -> None: """Emits a 'smlad' instruction.""" struct.pack_into("<I", self.buf, self.pos, ((((117440528 or cond) or (exchange << 5)) or (rn << 12)) or (rd << 16))) self.pos += 4 def smlal(self, cond: Condition, update_cprs: bool, update_condition: bool) -> None: """Emits a 'smlal' instruction.""" struct.pack_into("<I", self.buf, self.pos, (((14680208 or cond) or (update_cprs << 20)) or (update_condition << 20))) self.pos += 4 def smlalbb(self, cond: Condition) -> None: """Emits a 'smlalbb' instruction.""" struct.pack_into("<I", self.buf, self.pos, (20971648 or cond)) self.pos += 4 def smlalbt(self, cond: Condition) -> None: """Emits a 'smlalbt' instruction.""" struct.pack_into("<I", self.buf, self.pos, (20971680 or cond)) self.pos += 4 def smlaltb(self, cond: Condition) -> None: """Emits a 'smlaltb' instruction.""" struct.pack_into("<I", self.buf, self.pos, (20971712 or cond)) self.pos += 4 def smlaltt(self, cond: Condition) -> None: """Emits a 'smlaltt' instruction.""" struct.pack_into("<I", self.buf, self.pos, (20971744 or cond)) self.pos += 4 def smlald(self, cond: Condition, exchange: bool) -> None: """Emits a 'smlald' instruction.""" struct.pack_into("<I", self.buf, self.pos, ((121634832 or cond) or (exchange << 5))) self.pos += 4 def smlawb(self, cond: Condition, rn: Reg, rd: Reg) -> None: """Emits a 'smlawb' instruction.""" struct.pack_into("<I", self.buf, self.pos, (((18874496 or cond) or (rn << 12)) or (rd << 16))) self.pos += 4 def smlawt(self, cond: Condition, rn: Reg, rd: Reg) -> None: """Emits a 'smlawt' instruction.""" struct.pack_into("<I", self.buf, self.pos, (((18874560 or cond) or (rn << 12)) or (rd << 16))) self.pos += 4 def smlsd(self, cond: Condition, exchange: bool, rn: Reg, rd: Reg) -> None: """Emits a 'smlsd' instruction.""" struct.pack_into("<I", self.buf, self.pos, ((((117440592 or cond) or (exchange << 5)) or (rn << 12)) or (rd << 16))) self.pos += 4 def smlsld(self, cond: Condition, exchange: bool) -> None: """Emits a 'smlsld' instruction.""" struct.pack_into("<I", self.buf, self.pos, ((121634896 or cond) or (exchange << 5))) self.pos += 4 def smmla(self, cond: Condition, rn: Reg, rd: Reg) -> None: """Emits a 'smmla' instruction.""" struct.pack_into("<I", self.buf, self.pos, (((122683408 or cond) or (rn << 12)) or (rd << 16))) self.pos += 4 def smmls(self, cond: Condition, rn: Reg, rd: Reg) -> None: """Emits a 'smmls' instruction.""" struct.pack_into("<I", self.buf, self.pos, (((122683600 or cond) or (rn << 12)) or (rd << 16))) self.pos += 4 def smmul(self, cond: Condition, rd: Reg) -> None: """Emits a 'smmul' instruction.""" struct.pack_into("<I", self.buf, self.pos, ((122744848 or cond) or (rd << 16))) self.pos += 4 def smuad(self, cond: Condition, exchange: bool, rd: Reg) -> None: """Emits a 'smuad' instruction.""" struct.pack_into("<I", self.buf, self.pos, (((117501968 or cond) or (exchange << 5)) or (rd << 16))) self.pos += 4 def smulbb(self, cond: Condition, rd: Reg) -> None: """Emits a 'smulbb' instruction.""" struct.pack_into("<I", self.buf, self.pos, ((23068800 or cond) or (rd << 16))) self.pos += 4 def smulbt(self, cond: Condition, rd: Reg) -> None: """Emits a 'smulbt' instruction.""" struct.pack_into("<I", self.buf, self.pos, ((23068832 or cond) or (rd << 16))) self.pos += 4 def smultb(self, cond: Condition, rd: Reg) -> None: """Emits a 'smultb' instruction.""" struct.pack_into("<I", self.buf, self.pos, ((23068864 or cond) or (rd << 16))) self.pos += 4 def smultt(self, cond: Condition, rd: Reg) -> None: """Emits a 'smultt' instruction.""" struct.pack_into("<I", self.buf, self.pos, ((23068896 or cond) or (rd << 16))) self.pos += 4 def smull(self, cond: Condition, update_cprs: bool, update_condition: bool) -> None: """Emits a 'smull' instruction.""" struct.pack_into("<I", self.buf, self.pos, (((12583056 or cond) or (update_cprs << 20)) or (update_condition << 20))) self.pos += 4 def smulwb(self, cond: Condition, rd: Reg) -> None: """Emits a 'smulwb' instruction.""" struct.pack_into("<I", self.buf, self.pos, ((18874528 or cond) or (rd << 16))) self.pos += 4 def smulwt(self, cond: Condition, rd: Reg) -> None: """Emits a 'smulwt' instruction.""" struct.pack_into("<I", self.buf, self.pos, ((18874592 or cond) or (rd << 16))) self.pos += 4 def smusd(self, cond: Condition, exchange: bool, rd: Reg) -> None: """Emits a 'smusd' instruction.""" struct.pack_into("<I", self.buf, self.pos, (((117502032 or cond) or (exchange << 5)) or (rd << 16))) self.pos += 4 def srs(self, write: bool, mode: Mode, offset_mode: OffsetMode, addressing_mode: Addressing) -> None: """Emits a 'srs' instruction.""" struct.pack_into("<I", self.buf, self.pos, ((((4165797120 or (write << 21)) or (mode << 0)) or (addressing_mode << 23)) or (offset_mode << 11))) self.pos += 4 def ssat(self, cond: Condition, rd: Reg) -> None: """Emits a 'ssat' instruction.""" struct.pack_into("<I", self.buf, self.pos, ((105906192 or cond) or (rd << 12))) self.pos += 4 def ssat16(self, cond: Condition, rd: Reg) -> None: """Emits a 'ssat16' instruction.""" struct.pack_into("<I", self.buf, self.pos, ((111152944 or cond) or (rd << 12))) self.pos += 4 def ssub16(self, cond: Condition, rn: Reg, rd: Reg) -> None: """Emits a 'ssub16' instruction.""" struct.pack_into("<I", self.buf, self.pos, (((101715824 or cond) or (rn << 16)) or (rd << 12))) self.pos += 4 def ssub8(self, cond: Condition, rn: Reg, rd: Reg) -> None: """Emits a 'ssub8' instruction.""" struct.pack_into("<I", self.buf, self.pos, (((101715952 or cond) or (rn << 16)) or (rd << 12))) self.pos += 4 def ssubaddx(self, cond: Condition, rn: Reg, rd: Reg) -> None: """Emits a 'ssubaddx' instruction.""" struct.pack_into("<I", self.buf, self.pos, (((101715792 or cond) or (rn << 16)) or (rd << 12))) self.pos += 4 def stc(self, cond: Condition, write: bool, rn: Reg, cpnum: Coprocessor, offset_mode: OffsetMode, addressing_mode: Addressing) -> None: """Emits a 'stc' instruction.""" struct.pack_into("<I", self.buf, self.pos, ((((((201326592 or cond) or (write << 21)) or (rn << 16)) or (cpnum << 8)) or (addressing_mode << 23)) or (offset_mode << 11))) self.pos += 4 def stm(self, cond: Condition, rn: Reg, offset_mode: OffsetMode, addressing_mode: Addressing, registers: RegList, write: bool, user_mode: bool) -> None: """Emits a 'stm' instruction.""" assert ((user_mode == 0) or (write == 0)) struct.pack_into("<I", self.buf, self.pos, ((((((((134217728 or cond) or (rn << 16)) or (addressing_mode << 23)) or (offset_mode << 11)) or (addressing_mode << 23)) or registers) or (user_mode << 21)) or (write << 10))) self.pos += 4 def str(self, cond: Condition, write: bool, rn: Reg, rd: Reg, offset_mode: OffsetMode, addressing_mode: Addressing) -> None: """Emits a 'str' instruction.""" struct.pack_into("<I", self.buf, self.pos, ((((((67108864 or cond) or (write << 21)) or (rn << 16)) or (rd << 12)) or (addressing_mode << 23)) or (offset_mode << 11))) self.pos += 4 def strb(self, cond: Condition, write: bool, rn: Reg, rd: Reg, offset_mode: OffsetMode, addressing_mode: Addressing) -> None: """Emits a 'strb' instruction.""" struct.pack_into("<I", self.buf, self.pos, ((((((71303168 or cond) or (write << 21)) or (rn << 16)) or (rd << 12)) or (addressing_mode << 23)) or (offset_mode << 11))) self.pos += 4 def strbt(self, cond: Condition, rn: Reg, rd: Reg, offset_mode: OffsetMode) -> None: """Emits a 'strbt' instruction.""" struct.pack_into("<I", self.buf, self.pos, ((((73400320 or cond) or (rn << 16)) or (rd << 12)) or (offset_mode << 23))) self.pos += 4 def strd(self, cond: Condition, write: bool, rn: Reg, rd: Reg, offset_mode: OffsetMode, addressing_mode: Addressing) -> None: """Emits a 'strd' instruction.""" struct.pack_into("<I", self.buf, self.pos, ((((((240 or cond) or (write << 21)) or (rn << 16)) or (rd << 12)) or (addressing_mode << 23)) or (offset_mode << 11))) self.pos += 4 def strex(self, cond: Condition, rn: Reg, rd: Reg) -> None: """Emits a 'strex' instruction.""" struct.pack_into("<I", self.buf, self.pos, (((25169808 or cond) or (rn << 16)) or (rd << 12))) self.pos += 4 def strh(self, cond: Condition, write: bool, rn: Reg, rd: Reg, offset_mode: OffsetMode, addressing_mode: Addressing) -> None: """Emits a 'strh' instruction.""" struct.pack_into("<I", self.buf, self.pos, ((((((176 or cond) or (write << 21)) or (rn << 16)) or (rd << 12)) or (addressing_mode << 23)) or (offset_mode << 11))) self.pos += 4 def strt(self, cond: Condition, rn: Reg, rd: Reg, offset_mode: OffsetMode) -> None: """Emits a 'strt' instruction.""" struct.pack_into("<I", self.buf, self.pos, ((((69206016 or cond) or (rn << 16)) or (rd << 12)) or (offset_mode << 23))) self.pos += 4 def swi(self, cond: Condition) -> None: """Emits a 'swi' instruction.""" struct.pack_into("<I", self.buf, self.pos, (251658240 or cond)) self.pos += 4 def swp(self, cond: Condition, rn: Reg, rd: Reg) -> None: """Emits a 'swp' instruction.""" struct.pack_into("<I", self.buf, self.pos, (((16777360 or cond) or (rn << 16)) or (rd << 12))) self.pos += 4 def swpb(self, cond: Condition, rn: Reg, rd: Reg) -> None: """Emits a 'swpb' instruction.""" struct.pack_into("<I", self.buf, self.pos, (((20971664 or cond) or (rn << 16)) or (rd << 12))) self.pos += 4 def sxtab(self, cond: Condition, rn: Reg, rd: Reg, rotate: Rotation) -> None: """Emits a 'sxtab' instruction.""" struct.pack_into("<I", self.buf, self.pos, ((((111149168 or cond) or (rn << 16)) or (rd << 12)) or (rotate << 10))) self.pos += 4 def sxtab16(self, cond: Condition, rn: Reg, rd: Reg, rotate: Rotation) -> None: """Emits a 'sxtab16' instruction.""" struct.pack_into("<I", self.buf, self.pos, ((((109052016 or cond) or (rn << 16)) or (rd << 12)) or (rotate << 10))) self.pos += 4 def sxtah(self, cond: Condition, rn: Reg, rd: Reg, rotate: Rotation) -> None: """Emits a 'sxtah' instruction.""" struct.pack_into("<I", self.buf, self.pos, ((((112197744 or cond) or (rn << 16)) or (rd << 12)) or (rotate << 10))) self.pos += 4 def sxtb(self, cond: Condition, rd: Reg, rotate: Rotation) -> None: """Emits a 'sxtb' instruction.""" struct.pack_into("<I", self.buf, self.pos, (((112132208 or cond) or (rd << 12)) or (rotate << 10))) self.pos += 4 def sxtb16(self, cond: Condition, rd: Reg, rotate: Rotation) -> None: """Emits a 'sxtb16' instruction.""" struct.pack_into("<I", self.buf, self.pos, (((110035056 or cond) or (rd << 12)) or (rotate << 10))) self.pos += 4 def sxth(self, cond: Condition, rd: Reg, rotate: Rotation) -> None: """Emits a 'sxth' instruction.""" struct.pack_into("<I", self.buf, self.pos, (((113180784 or cond) or (rd << 12)) or (rotate << 10))) self.pos += 4 def teq(self, cond: Condition, rn: Reg) -> None: """Emits a 'teq' instruction.""" struct.pack_into("<I", self.buf, self.pos, ((19922944 or cond) or (rn << 16))) self.pos += 4 def tst(self, cond: Condition, rn: Reg) -> None: """Emits a 'tst' instruction.""" struct.pack_into("<I", self.buf, self.pos, ((17825792 or cond) or (rn << 16))) self.pos += 4 def uadd16(self, cond: Condition, rn: Reg, rd: Reg) -> None: """Emits an 'uadd16' instruction.""" struct.pack_into("<I", self.buf, self.pos, (((105910032 or cond) or (rn << 16)) or (rd << 12))) self.pos += 4 def uadd8(self, cond: Condition, rn: Reg, rd: Reg) -> None: """Emits an 'uadd8' instruction.""" struct.pack_into("<I", self.buf, self.pos, (((105910160 or cond) or (rn << 16)) or (rd << 12))) self.pos += 4 def uaddsubx(self, cond: Condition, rn: Reg, rd: Reg) -> None: """Emits an 'uaddsubx' instruction.""" struct.pack_into("<I", self.buf, self.pos, (((105910064 or cond) or (rn << 16)) or (rd << 12))) self.pos += 4 def uhadd16(self, cond: Condition, rn: Reg, rd: Reg) -> None: """Emits an 'uhadd16' instruction.""" struct.pack_into("<I", self.buf, self.pos, (((108007184 or cond) or (rn << 16)) or (rd << 12))) self.pos += 4 def uhadd8(self, cond: Condition, rn: Reg, rd: Reg) -> None: """Emits an 'uhadd8' instruction.""" struct.pack_into("<I", self.buf, self.pos, (((108007312 or cond) or (rn << 16)) or (rd << 12))) self.pos += 4 def uhaddsubx(self, cond: Condition, rn: Reg, rd: Reg) -> None: """Emits an 'uhaddsubx' instruction.""" struct.pack_into("<I", self.buf, self.pos, (((108007216 or cond) or (rn << 16)) or (rd << 12))) self.pos += 4 def uhsub16(self, cond: Condition, rn: Reg, rd: Reg) -> None: """Emits an 'uhsub16' instruction.""" struct.pack_into("<I", self.buf, self.pos, (((108007280 or cond) or (rn << 16)) or (rd << 12))) self.pos += 4 def uhsub8(self, cond: Condition, rn: Reg, rd: Reg) -> None: """Emits an 'uhsub8' instruction.""" struct.pack_into("<I", self.buf, self.pos, (((108007408 or cond) or (rn << 16)) or (rd << 12))) self.pos += 4 def uhsubaddx(self, cond: Condition, rn: Reg, rd: Reg) -> None: """Emits an 'uhsubaddx' instruction.""" struct.pack_into("<I", self.buf, self.pos, (((108007248 or cond) or (rn << 16)) or (rd << 12))) self.pos += 4 def umaal(self, cond: Condition) -> None: """Emits an 'umaal' instruction.""" struct.pack_into("<I", self.buf, self.pos, (4194448 or cond)) self.pos += 4 def umlal(self, cond: Condition, update_cprs: bool, update_condition: bool) -> None: """Emits an 'umlal' instruction.""" struct.pack_into("<I", self.buf, self.pos, (((10485904 or cond) or (update_cprs << 20)) or (update_condition << 20))) self.pos += 4 def umull(self, cond: Condition, update_cprs: bool, update_condition: bool) -> None: """Emits an 'umull' instruction.""" struct.pack_into("<I", self.buf, self.pos, (((8388752 or cond) or (update_cprs << 20)) or (update_condition << 20))) self.pos += 4 def uqadd16(self, cond: Condition, rn: Reg, rd: Reg) -> None: """Emits an 'uqadd16' instruction.""" struct.pack_into("<I", self.buf, self.pos, (((106958608 or cond) or (rn << 16)) or (rd << 12))) self.pos += 4 def uqadd8(self, cond: Condition, rn: Reg, rd: Reg) -> None: """Emits an 'uqadd8' instruction.""" struct.pack_into("<I", self.buf, self.pos, (((106958736 or cond) or (rn << 16)) or (rd << 12))) self.pos += 4 def uqaddsubx(self, cond: Condition, rn: Reg, rd: Reg) -> None: """Emits an 'uqaddsubx' instruction.""" struct.pack_into("<I", self.buf, self.pos, (((106958640 or cond) or (rn << 16)) or (rd << 12))) self.pos += 4 def uqsub16(self, cond: Condition, rn: Reg, rd: Reg) -> None: """Emits an 'uqsub16' instruction.""" struct.pack_into("<I", self.buf, self.pos, (((106958704 or cond) or (rn << 16)) or (rd << 12))) self.pos += 4 def uqsub8(self, cond: Condition, rn: Reg, rd: Reg) -> None: """Emits an 'uqsub8' instruction.""" struct.pack_into("<I", self.buf, self.pos, (((106958832 or cond) or (rn << 16)) or (rd << 12))) self.pos += 4 def uqsubaddx(self, cond: Condition, rn: Reg, rd: Reg) -> None: """Emits an 'uqsubaddx' instruction.""" struct.pack_into("<I", self.buf, self.pos, (((106958672 or cond) or (rn << 16)) or (rd << 12))) self.pos += 4 def usad8(self, cond: Condition, rd: Reg) -> None: """Emits an 'usad8' instruction.""" struct.pack_into("<I", self.buf, self.pos, ((125890576 or cond) or (rd << 16))) self.pos += 4 def usada8(self, cond: Condition, rn: Reg, rd: Reg) -> None: """Emits an 'usada8' instruction.""" struct.pack_into("<I", self.buf, self.pos, (((125829136 or cond) or (rn << 12)) or (rd << 16))) self.pos += 4 def usat(self, cond: Condition, rd: Reg) -> None: """Emits an 'usat' instruction.""" struct.pack_into("<I", self.buf, self.pos, ((115343376 or cond) or (rd << 12))) self.pos += 4 def usat16(self, cond: Condition, rd: Reg) -> None: """Emits an 'usat16' instruction.""" struct.pack_into("<I", self.buf, self.pos, ((115347248 or cond) or (rd << 12))) self.pos += 4 def usub16(self, cond: Condition, rn: Reg, rd: Reg) -> None: """Emits an 'usub16' instruction.""" struct.pack_into("<I", self.buf, self.pos, (((105910128 or cond) or (rn << 16)) or (rd << 12))) self.pos += 4 def usub8(self, cond: Condition, rn: Reg, rd: Reg) -> None: """Emits an 'usub8' instruction.""" struct.pack_into("<I", self.buf, self.pos, (((105910256 or cond) or (rn << 16)) or (rd << 12))) self.pos += 4 def usubaddx(self, cond: Condition, rn: Reg, rd: Reg) -> None: """Emits an 'usubaddx' instruction.""" struct.pack_into("<I", self.buf, self.pos, (((105910096 or cond) or (rn << 16)) or (rd << 12))) self.pos += 4 def uxtab(self, cond: Condition, rn: Reg, rd: Reg, rotate: Rotation) -> None: """Emits an 'uxtab' instruction.""" struct.pack_into("<I", self.buf, self.pos, ((((115343472 or cond) or (rn << 16)) or (rd << 12)) or (rotate << 10))) self.pos += 4 def uxtab16(self, cond: Condition, rn: Reg, rd: Reg, rotate: Rotation) -> None: """Emits an 'uxtab16' instruction.""" struct.pack_into("<I", self.buf, self.pos, ((((113246320 or cond) or (rn << 16)) or (rd << 12)) or (rotate << 10))) self.pos += 4 def uxtah(self, cond: Condition, rn: Reg, rd: Reg, rotate: Rotation) -> None: """Emits an 'uxtah' instruction.""" struct.pack_into("<I", self.buf, self.pos, ((((116392048 or cond) or (rn << 16)) or (rd << 12)) or (rotate << 10))) self.pos += 4 def uxtb(self, cond: Condition, rd: Reg, rotate: Rotation) -> None: """Emits an 'uxtb' instruction.""" struct.pack_into("<I", self.buf, self.pos, (((116326512 or cond) or (rd << 12)) or (rotate << 10))) self.pos += 4 def uxtb16(self, cond: Condition, rd: Reg, rotate: Rotation) -> None: """Emits an 'uxtb16' instruction.""" struct.pack_into("<I", self.buf, self.pos, (((114229360 or cond) or (rd << 12)) or (rotate << 10))) self.pos += 4 def uxth(self, cond: Condition, rd: Reg, rotate: Rotation) -> None: """Emits an 'uxth' instruction.""" struct.pack_into("<I", self.buf, self.pos, (((117375088 or cond) or (rd << 12)) or (rotate << 10))) self.pos += 4
44.971689
227
0.564494
6,312
44,477
3.925697
0.082858
0.091247
0.090964
0.097462
0.784818
0.779249
0.759756
0.754954
0.716292
0.431535
0
0.077035
0.242328
44,477
988
228
45.017206
0.658269
0.113025
0
0.251956
0
0
0.012127
0
0
0
0
0
0.004695
1
0.253521
false
0
0.004695
0
0.402191
0
0
0
0
null
0
0
0
0
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
5
cb1246b9f94f60b956f1ba23c480c0c75472e7e8
681
py
Python
api/decorators.py
kinpa200296/cmdpy
3ce1e2c2c8803ad296d9b7c3ac0be5100938632e
[ "MIT" ]
null
null
null
api/decorators.py
kinpa200296/cmdpy
3ce1e2c2c8803ad296d9b7c3ac0be5100938632e
[ "MIT" ]
null
null
null
api/decorators.py
kinpa200296/cmdpy
3ce1e2c2c8803ad296d9b7c3ac0be5100938632e
[ "MIT" ]
null
null
null
class CmdDecorator(object): def __init__(self, cmd_name, cmd_aliases, cmd_prefix): self.cmd_prefix = cmd_prefix self.cmd_name = cmd_name self.cmd_aliases = cmd_aliases def __call__(self, func): func._cmd_prefix = self.cmd_prefix func._cmd_name = self.cmd_name func._cmd_aliases = self.cmd_aliases return func def cmd_helper(cmd_name, cmd_aliases=None): return CmdDecorator(cmd_name, cmd_aliases, 'help') def cmd_func(cmd_name, cmd_aliases=None): return CmdDecorator(cmd_name, cmd_aliases, 'do') def cmd_completer(cmd_name, cmd_aliases=None): return CmdDecorator(cmd_name, cmd_aliases, 'complete')
27.24
58
0.712188
96
681
4.625
0.197917
0.173423
0.18018
0.268018
0.477477
0.378378
0.378378
0.378378
0.378378
0.378378
0
0
0.198238
681
24
59
28.375
0.813187
0
0
0
0
0
0.020558
0
0
0
0
0
0
1
0.3125
false
0
0
0.1875
0.625
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
5
cb17de83c065c526ea2709e9cbafed8d95e42a9c
100
py
Python
Class 1/Chap01/hello-working.py
gratienceacademy/python_basics
102a740d9c9fae45a3947b14a204f7bae33af813
[ "Apache-2.0" ]
null
null
null
Class 1/Chap01/hello-working.py
gratienceacademy/python_basics
102a740d9c9fae45a3947b14a204f7bae33af813
[ "Apache-2.0" ]
null
null
null
Class 1/Chap01/hello-working.py
gratienceacademy/python_basics
102a740d9c9fae45a3947b14a204f7bae33af813
[ "Apache-2.0" ]
null
null
null
#!/usr/bin/env python3 # Copyright 2009-2017 BHG http://bw.org/ print('Hello, World. To Aliens!!')
20
40
0.68
16
100
4.25
1
0
0
0
0
0
0
0
0
0
0
0.102273
0.12
100
4
41
25
0.670455
0.6
0
0
0
0
0.657895
0
0
0
0
0
0
1
0
true
0
0
0
0
1
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
1
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
5
cb18c7154d5b430e3e1c52a0de0c5f502c48c9cd
24,489
py
Python
tests/views/view_test_case_test.py
BMeu/Aerarium
119946cead727ef68b5ecea339990d982c006391
[ "MIT" ]
null
null
null
tests/views/view_test_case_test.py
BMeu/Aerarium
119946cead727ef68b5ecea339990d982c006391
[ "MIT" ]
139
2018-12-26T07:54:31.000Z
2021-06-01T23:14:45.000Z
tests/views/view_test_case_test.py
BMeu/Aerarium
119946cead727ef68b5ecea339990d982c006391
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- from typing import Set from werkzeug.exceptions import NotFound from app.userprofile import Permission from app.userprofile import permission_required from app.userprofile import permission_required_all from app.userprofile import permission_required_one_of from app.userprofile import Role from app.userprofile import User from tests.views import ViewTestCase class ViewTestCaseTest(ViewTestCase): # region Route Accessing def test_get_with_correct_status(self) -> None: """ Test accessing a URL via HTTP GET and expecting the correct status. Expected result: The response data is returned. No error is raised. """ self.create_and_login_user() data = self.get('/user/login') self.assertIn('Logout', data) def test_get_with_incorrect_status(self) -> None: """ Test accessing a URL via HTTP GET and expecting an incorrect status. Expected result: The response data is returned. No error is raised. """ self.create_and_login_user() with self.assertRaises(self.failureException): self.get('/user/login', expected_status=404) def test_post_with_correct_status(self) -> None: """ Test accessing a URL via HTTP POST and expecting the correct status. Expected result: The response data is returned. No error is raised. """ email = 'jane@doe.com' name = 'Jane Doe' password = 'ABC123!' self.create_user(email, name, password) data = self.post('/user/login', data=dict( email=email, password=password, )) self.assertIn('Welcome', data) self.assertNotIn('Log In', data) def test_post_with_correct_status_and_no_data(self) -> None: """ Test accessing a URL via HTTP POST and expecting the correct status, but passing no data. Expected result: The response data is returned. No error is raised. """ email = 'jane@doe.com' name = 'Jane Doe' password = 'ABC123!' self.create_user(email, name, password) data = self.post('/user/login') self.assertNotIn('Welcome', data) self.assertIn('Log In', data) def test_post_with_incorrect_status(self) -> None: """ Test accessing a URL via HTTP POST and expecting an incorrect status. Expected result: The response data is returned. No error is raised. """ email = 'jane@doe.com' name = 'Jane Doe' password = 'ABC123!' self.create_user(email, name, password) with self.assertRaises(self.failureException): self.post('/user/login', expected_status=404, data=dict( email=email, password=password, )) def test_check_allowed_methods_without_assertion_failures(self) -> None: """ Test checking the allowed and prohibited methods of a URL when all assertions hold. Expected result: No error is raised. 'OPTIONS' and 'HEAD' are automatically added to the allowed methods. """ allowed_methods = ['GET', 'POST', 'PUT', 'OPTIONS', 'HEAD'] self.app.add_url_rule('/example', 'example', self.example_route, methods=allowed_methods) self.check_allowed_methods('/example', {'GET', 'POST', 'PUT'}) def test_check_allowed_methods_with_unexpectedly_allowed_method(self) -> None: """ Test checking the allowed and prohibited methods of a URL when there is a method that is allowed but shouldn't be. Expected result: An assertion error is raised with a message describing the failure. """ self.app.add_url_rule('/example', 'example', self.example_route, methods=['POST', 'PUT']) # PUT is allowed, but should not be allowed. with self.assertRaises(self.failureException) as exception_cm: self.check_allowed_methods('/example', {'POST'}) self.assertEqual('405 != 200 : PUT /example is allowed, but should not be.', str(exception_cm.exception)) def test_check_allowed_methods_with_unexpectedly_prohibited_method(self) -> None: """ Test checking the allowed and prohibited methods of a URL when there is a method that is prohibited but shouldn't be. Expected result: An assertion error is raised with a message describing the failure. """ self.app.add_url_rule('/example', 'example', self.example_route, methods=['POST']) # PUT is not allowed, but should be. with self.assertRaises(self.failureException) as exception_cm: self.check_allowed_methods('/example', {'POST', 'PUT'}) # The error object does not have a `msg` attribute if it is raised from `assertNotEqual`. self.assertEqual('405 == 405 : PUT /example is not allowed, but should be.', str(exception_cm.exception)) def test_check_allowed_methods_with_default_methods(self) -> None: """ Test checking the allowed and prohibited methods of a URL when not specifying the allowed methods. Expected result: 'GET' is automatically allowed and thus, will not raise an error. """ self.app.add_url_rule('/example', 'example', self.example_route, methods=['GET']) self.check_allowed_methods('/example') def test_check_allowed_methods_without_options(self) -> None: """ Test checking the allowed and prohibited methods of a URL when not automatically adding 'OPTIONS' to the allowed methods. Expected result: An error is raised that 'OPTIONS' is allowed, but should not be. """ self.app.add_url_rule('/example', 'example', self.example_route, methods=['GET', 'OPTIONS']) with self.assertRaises(self.failureException) as exception_cm: self.check_allowed_methods('/example', {'GET'}, allow_options=False) self.assertEqual('405 != 200 : OPTIONS /example is allowed, but should not be.', str(exception_cm.exception)) def test_get_status_code_for_method_delete(self) -> None: """ Test that accessing a URL via DELETE. Expected result: The correct status code is returned. """ self.app.add_url_rule('/delete', 'delete', self.example_route, methods=['DELETE']) status_code = self._get_status_code_for_method('/delete', 'DELETE') self.assertEqual(200, status_code) def test_get_status_code_for_method_get(self) -> None: """ Test that accessing a URL via GET. Expected result: The correct status code is returned. """ self.app.add_url_rule('/get', 'get', self.example_route, methods=['GET']) status_code = self._get_status_code_for_method('/get', 'GET') self.assertEqual(200, status_code) def test_get_status_code_for_method_head(self) -> None: """ Test that accessing a URL via HEAD. Expected result: The correct status code is returned. """ self.app.add_url_rule('/head', 'head', self.example_route, methods=['HEAD']) status_code = self._get_status_code_for_method('/head', 'HEAD') self.assertEqual(200, status_code) def test_get_status_code_for_method_options(self) -> None: """ Test that accessing a URL via OPTIONS. Expected result: The correct status code is returned. """ self.app.add_url_rule('/options', 'options', self.example_route, methods=['OPTIONS']) status_code = self._get_status_code_for_method('/options', 'OPTIONS') self.assertEqual(200, status_code) def test_get_status_code_for_method_patch(self) -> None: """ Test that accessing a URL via PATCH. Expected result: The correct status code is returned. """ self.app.add_url_rule('/patch', 'patch', self.example_route, methods=['PATCH']) status_code = self._get_status_code_for_method('/patch', 'PATCH') self.assertEqual(200, status_code) def test_get_status_code_for_method_post(self) -> None: """ Test that accessing a URL via POST. Expected result: The correct status code is returned. """ self.app.add_url_rule('/post', 'post', self.example_route, methods=['POST']) status_code = self._get_status_code_for_method('/post', 'POST') self.assertEqual(200, status_code) def test_get_status_code_for_method_put(self) -> None: """ Test that accessing a URL via PUT. Expected result: The correct status code is returned. """ self.app.add_url_rule('/put', 'put', self.example_route, methods=['PUT']) status_code = self._get_status_code_for_method('/put', 'PUT') self.assertEqual(200, status_code) def test_get_status_code_for_method_invalid_method(self) -> None: """ Test that accessing a URL via an invalid method. Expected result: A value error is raised. """ with self.assertRaises(ValueError) as exception_cm: self._get_status_code_for_method('/invalid', 'INVALID') self.assertEqual('Invalid HTTP method INVALID', str(exception_cm.exception)) # endregion # region Permissions @staticmethod def _get_messages_for_inaccessible_url(permissions: Set[Permission]) -> Set[str]: """ Get the failure message for each given permission when a URL should be accessible with the permission, but it is not. The HTTP method in the message will always be 'GET', the URL '/example'. :param permissions: The permissions for which the failure messages will be created. :return: A set of all messages, one for each permission. """ messages = set() for permission in permissions: messages.add(f'403 == 403 : GET /example must be accessible with permission {permission}, but it is not.') return messages @staticmethod def _get_messages_for_accessible_url(permissions: Set[Permission]) -> Set[str]: """ Get the failure message for each given permission when a URL should not be accessible with the permission, but it is. The HTTP method in the message will always be 'GET', the URL '/example', the actual status code 200. :param permissions: The permissions for which the failure messages will be created. :return: A set of all message, one for each permissions. """ messages = set() for permission in permissions: messages.add(f'403 != 200 : GET /example must not be accessible with permission {permission}, but it is.') return messages def test_assert_no_permission_required_if_no_permission_is_required(self) -> None: """ Test the assertion `assert_no_permission_required` if the accessed URL does in fact not require any permissions. Expected result: No errors are raised. """ self.app.add_url_rule('/example', 'example', self.example_route, methods=['GET', 'POST']) self.assert_no_permission_required('/example') self.assert_no_permission_required('/example', method='POST') def test_assert_no_permission_required_but_url_requires_a_permission(self) -> None: """ Test the assertion `assert_no_permission_required` if the accessed URL requires a permission. Expected result: An error is raised for any permission other than the required one. """ decorator = permission_required(Permission.EditUser) decorated_view = decorator(self.example_route) self.app.add_url_rule('/example', 'example', decorated_view) with self.assertRaises(self.failureException) as exception_cm: self.assert_no_permission_required('/example') # The assertion can fail for any of these permissions since the assertion uses sets. expected_messages = self._get_messages_for_inaccessible_url({ Permission(0), Permission.EditRole, Permission.EditGlobalSettings, Permission.EditRole | Permission.EditGlobalSettings }) self.assertIn(str(exception_cm.exception), expected_messages) def test_assert_permission_required_with_correct_permissions(self) -> None: """ Test the assertion `assert_permission_required` with the permission that the accessed URL requires. Expected result: No errors are raised. """ decorator = permission_required(Permission.EditRole) decorated_view = decorator(self.example_route) self.app.add_url_rule('/example', 'example', decorated_view, methods=['GET', 'POST']) self.assert_permission_required('/example', Permission.EditRole) self.assert_permission_required('/example', Permission.EditRole, method='POST') def test_assert_permission_required_with_incorrect_permissions(self) -> None: """ Test the assertion `assert_permission_required` with a different permission than the one required by the URL. Expected result: An error is raised than the different one. """ decorator = permission_required(Permission.EditRole) decorated_view = decorator(self.example_route) self.app.add_url_rule('/example', 'example', decorated_view) with self.assertRaises(self.failureException) as exception_cm: self.assert_permission_required('/example', Permission.EditUser) # The assertion can fail for any of these permissions since the assertion uses sets. expected_messages = self._get_messages_for_inaccessible_url({ Permission.EditUser, Permission.EditUser | Permission.EditGlobalSettings, }) self.assertIn(str(exception_cm.exception), expected_messages) def test_assert_permission_required_without_required_permissions(self) -> None: """ Test the assertion `assert_permission_required` assuming that a URL requires a permission, while in fact, it does not. Expected result: An error is raised for any other permission than the assumed one. """ self.app.add_url_rule('/example', 'example', self.example_route) with self.assertRaises(self.failureException) as exception_cm: self.assert_permission_required('/example', Permission.EditUser) expected_messages = self._get_messages_for_accessible_url({ Permission(0), Permission.EditRole, Permission.EditGlobalSettings, Permission.EditRole | Permission.EditGlobalSettings, }) self.assertIn(str(exception_cm.exception), expected_messages) def test_assert_permission_required_one_of_with_correct_permissions(self) -> None: """ Test the assertion `assert_permission_required_one_of` with the same permissions that are required by the URL. Expected result: No errors are raised. """ decorator = permission_required_one_of(Permission.EditRole, Permission.EditUser) decorated_view = decorator(self.example_route) self.app.add_url_rule('/example', 'example', decorated_view, methods=['GET', 'POST']) self.assert_permission_required_one_of('/example', Permission.EditRole, Permission.EditUser) self.assert_permission_required_one_of('/example', Permission.EditRole, Permission.EditUser, method='POST') def test_assert_permission_required_one_of_with_too_many_permissions(self) -> None: """ Test the assertion `assert_permission_required_one_of` with more permissions than actually required by the URL. Expected result: An error is raised for the permission that is given in the assertion, but in in fact not required. """ decorator = permission_required_one_of(Permission.EditRole, Permission.EditUser) decorated_view = decorator(self.example_route) self.app.add_url_rule('/example', 'example', decorated_view) with self.assertRaises(self.failureException) as exception_cm: self.assert_permission_required_one_of('/example', Permission.EditRole, Permission.EditUser, Permission.EditGlobalSettings) expected_messages = self._get_messages_for_inaccessible_url({ Permission.EditGlobalSettings, }) self.assertIn(str(exception_cm.exception), expected_messages) def test_assert_permission_required_one_of_with_too_few_permissions(self) -> None: """ Test the assertion `assert_permission_required_one_of` with fewer permissions than actually required by the URL. Expected result: An error is raised for the permission that is in fact required, but not given in the assertion. """ decorator = permission_required_one_of(Permission.EditRole, Permission.EditUser, Permission.EditGlobalSettings) decorated_view = decorator(self.example_route) self.app.add_url_rule('/example', 'example', decorated_view) with self.assertRaises(self.failureException) as exception_cm: self.assert_permission_required_one_of('/example', Permission.EditRole, Permission.EditUser) expected_messages = self._get_messages_for_accessible_url({ Permission.EditGlobalSettings, }) self.assertIn(str(exception_cm.exception), expected_messages) def test_assert_permission_required_all_with_correct_permissions(self) -> None: """ Test the assertion `assert_permission_required_all` with the same permissions that are required by the URL. Expected result: No errors are raised. """ decorator = permission_required_all(Permission.EditRole, Permission.EditUser) decorated_view = decorator(self.example_route) self.app.add_url_rule('/example', 'example', decorated_view, methods=['GET', 'POST']) self.assert_permission_required_all('/example', Permission.EditRole, Permission.EditUser) self.assert_permission_required_all('/example', Permission.EditRole, Permission.EditUser, method='POST') def test_assert_permission_required_all_with_too_many_permissions(self) -> None: """ Test the assertion `assert_permission_required_all` with more permissions than actually required by the URL. Expected result: An error is raised that the URL is accessible with the permissions that are actually required. """ decorator = permission_required_all(Permission.EditRole, Permission.EditUser) decorated_view = decorator(self.example_route) self.app.add_url_rule('/example', 'example', decorated_view) with self.assertRaises(self.failureException) as exception_cm: self.assert_permission_required_all('/example', Permission.EditRole, Permission.EditUser, Permission.EditGlobalSettings) expected_messages = self._get_messages_for_accessible_url({ Permission.EditUser | Permission.EditRole, }) self.assertIn(str(exception_cm.exception), expected_messages) def test_assert_permission_required_all_with_too_few_permissions(self) -> None: """ Test the assertion `assert_permission_required_all` with fewer permissions than actually required by the URL. Expected result: An error is raised that the URL is not accessible with the permissions given in the assertion. """ decorator = permission_required_all(Permission.EditRole, Permission.EditUser, Permission.EditGlobalSettings) decorated_view = decorator(self.example_route) self.app.add_url_rule('/example', 'example', decorated_view) with self.assertRaises(self.failureException) as exception_cm: self.assert_permission_required_all('/example', Permission.EditRole, Permission.EditUser) expected_messages = self._get_messages_for_inaccessible_url({ Permission.EditUser | Permission.EditRole, }) self.assertIn(str(exception_cm.exception), expected_messages) # endregion # region Application Entities def test_create_user_without_role(self) -> None: """ Test creating a new user without a role. Expected result: The user is created with the given parameters and without a role. The user is saved on the DB. """ email = 'john@doe.com' name = 'John Doe' password = '123ABC$' user = self.create_user(email, name, password) self.assertIsNotNone(user) self.assertEqual(email, user.email) self.assertEqual(name, user.name) self.assertTrue(user.check_password(password)) self.assertIsNone(user.role) self.assertEqual(user, User.load_from_id(user.id)) def test_create_user_with_role(self) -> None: """ Test creating a new user with a given role. Expected result: The user is created with the given parameters and the role. The user is saved on the DB. """ role = self.create_role(Permission.EditUser, Permission.EditRole) email = 'john@doe.com' name = 'John Doe' password = '123ABC$' user = self.create_user(email, name, password, role) self.assertIsNotNone(user) self.assertEqual(email, user.email) self.assertEqual(name, user.name) self.assertTrue(user.check_password(password)) self.assertEqual(role, user.role) self.assertEqual(user, User.load_from_id(user.id)) def test_create_and_login_user(self) -> None: """ Test creating a new user and logging them in. Expected result: The user is created and logged in. """ user = self.create_and_login_user() self.assertIsNotNone(user) self.assertEqual('doe@example.com', user.email) self.assertEqual('Jane Doe', user.name) self.assertTrue(user.check_password('ABC123!')) self.assertIsNone(user.role) self.assertEqual(user, User.load_from_id(user.id)) # Check if the login was successful by checking if the login page is shown. response = self.client.get('/user/login', follow_redirects=True) data = response.get_data(as_text=True) self.assertNotIn('<h1>Log In</h1>', data) self.assertIn('<h1>Dashboard</h1>', data) def test_create_role(self) -> None: """ Test creating a new role. Expected result: The role is created with the given permissions. """ name = 'Administrator' role = self.create_role(Permission.EditUser, Permission.EditRole, name=name) self.assertIsNotNone(role) self.assertEqual(name, role.name) self.assertTrue(role.has_permissions_all(Permission.EditUser, Permission.EditRole)) self.assertFalse(role.has_permissions_one_of(Permission.EditGlobalSettings)) self.assertEqual(role, Role.load_from_id(role.id)) # endregion # region Routes def test_aborting_route(self) -> None: """ Test the aborting route handler for a 404 error. Expected result: The NotFound error is raised. """ with self.assertRaises(NotFound): self.aborting_route(404) def test_example_route(self) -> None: """ Test the example route handle. Expected result: 'Hello, world!' is returned. """ self.assertEqual('Hello, world!', self.example_route()) # endregion # region Other Helper Methods def test_get_false(self) -> None: """ Test getting `False`. Expected Result: `False`. """ self.assertFalse(self.get_false()) # endregion
38.44427
120
0.659357
2,887
24,489
5.390371
0.079321
0.056677
0.02776
0.019213
0.810436
0.7767
0.718866
0.692842
0.629161
0.619586
0
0.004971
0.25244
24,489
636
121
38.504717
0.845087
0.26808
0
0.503817
0
0
0.08669
0
0
0
0
0
0.347328
1
0.145038
false
0.057252
0.034351
0
0.19084
0
0
0
0
null
0
0
0
1
1
1
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
5
cb2506e7e42be4e4f4b29c4f30e15fd536a55961
5,969
py
Python
napari_clusters_plotter/_tests/test_clustering.py
BiAPoL/napari-clusters-plotter
e9df50f2938384764dbe28753e19093e1ed37a05
[ "BSD-3-Clause" ]
16
2021-11-15T16:04:42.000Z
2022-03-30T21:29:32.000Z
napari_clusters_plotter/_tests/test_clustering.py
BiAPoL/napari-clusters-plotter
e9df50f2938384764dbe28753e19093e1ed37a05
[ "BSD-3-Clause" ]
45
2021-11-16T10:06:27.000Z
2022-03-31T16:04:38.000Z
napari_clusters_plotter/_tests/test_clustering.py
BiAPoL/napari-clusters-plotter
e9df50f2938384764dbe28753e19093e1ed37a05
[ "BSD-3-Clause" ]
2
2021-12-08T17:30:05.000Z
2022-01-21T13:01:49.000Z
import numpy as np from sklearn import datasets def test_clustering_widget(make_napari_viewer): import napari_clusters_plotter as ncp viewer = make_napari_viewer() widget_list = ncp.napari_experimental_provide_dock_widget() n_wdgts = len(viewer.window._dock_widgets) for widget in widget_list: _widget = widget(viewer) if isinstance(_widget, ncp._clustering.ClusteringWidget): plot_widget = _widget viewer.window.add_dock_widget(plot_widget) assert len(viewer.window._dock_widgets) == n_wdgts + 1 def test_kmeans_clustering(): # create an example dataset n_samples = 20 n_centers = 2 data = datasets.make_blobs( n_samples=n_samples, random_state=1, centers=n_centers, cluster_std=0.3, n_features=2, ) true_class = data[1] measurements = data[0] from napari_clusters_plotter._clustering import kmeans_clustering # kmeans_clustering returns (str, np.ndarray), where the first item is algorithm name result = kmeans_clustering( measurements, cluster_number=n_centers, iterations=50, ) assert len(np.unique(result[1])) == 2 assert np.array_equal(1 - true_class, result[1]) true_class[n_samples // 2] = -1 measurements[n_samples // 2, :] = np.NaN result = kmeans_clustering( measurements, cluster_number=n_centers, iterations=50, ) assert np.isnan(result[1][n_samples // 2]) assert np.array_equal( result[1][~np.isnan(result[1])], 1 - true_class[~np.isnan(result[1])] ) def test_hdbscan_clustering(): # create an example dataset n_samples = 100 data = datasets.make_moons(n_samples=n_samples, random_state=1, noise=0.05) true_class = data[1] measurements = data[0] from napari_clusters_plotter._clustering import hdbscan_clustering min_cluster_size = 5 min_samples = 2 # number of samples that should be included in one cluster # hdbscan_clustering returns (str, np.ndarray), where the first item is algorithm name result = hdbscan_clustering( measurements, min_cluster_size=min_cluster_size, min_samples=min_samples, ) assert len(np.unique(result[1])) == 2 assert np.array_equal(true_class, result[1]) true_class[n_samples // 2] = -1 measurements[n_samples // 2, :] = np.NaN result = hdbscan_clustering( measurements, min_cluster_size=min_cluster_size, min_samples=min_samples, ) assert np.isnan(result[1][n_samples // 2]) assert np.array_equal( result[1][~np.isnan(result[1])], true_class[~np.isnan(result[1])] ) def test_gaussian_mixture_model(): # create an example dataset n_samples = 20 n_centers = 2 data = datasets.make_blobs( n_samples=n_samples, random_state=1, centers=n_centers, cluster_std=0.3, n_features=2, ) true_class = data[1] measurements = data[0] from napari_clusters_plotter._clustering import gaussian_mixture_model # gaussian_mixture_model returns (str, np.ndarray), where the first item is algorithm name result = gaussian_mixture_model(measurements, cluster_number=2) print(result[1]) assert len(np.unique(result[1])) == n_centers assert np.array_equal(true_class, (result[1])) or np.array_equal( 1 - true_class, (result[1]) ) # Test bad data true_class[n_samples // 2] = -1 measurements[n_samples // 2, :] = np.NaN result = gaussian_mixture_model(measurements, cluster_number=2) assert np.isnan(result[1][n_samples // 2]) true_result = true_class[~np.isnan(result[1])].astype(bool) result = result[1][~np.isnan(result[1])].astype(bool) assert np.array_equal(result, 1 - true_result) or np.array_equal( result, true_result ) def test_agglomerative_clustering(): # create an example dataset n_samples = 20 n_centers = 2 data = datasets.make_blobs( n_samples=n_samples, random_state=1, centers=n_centers, cluster_std=0.3, n_features=2, ) true_class = data[1] measurements = data[0] from napari_clusters_plotter._clustering import agglomerative_clustering result = agglomerative_clustering(measurements, cluster_number=2, n_neighbors=2) assert len(np.unique(result[1])) == n_centers assert np.array_equal(true_class, (result[1])) or np.array_equal( 1 - true_class, (result[1]) ) # Test bad data true_class[n_samples // 2] = -1 measurements[n_samples // 2, :] = np.NaN result = agglomerative_clustering(measurements, cluster_number=2, n_neighbors=2) assert np.isnan(result[1][n_samples // 2]) true_class = true_class[~np.isnan(result[1])] result = result[1][~np.isnan(result[1])] assert np.array_equal(true_class, result) or np.array_equal(1 - true_class, result) def test_mean_shift(): # create an example dataset n_samples = 20 n_centers = 2 data = datasets.make_blobs( n_samples=n_samples, random_state=1, centers=n_centers, cluster_std=0.3, n_features=2, ) true_class = data[1] measurements = data[0] from napari_clusters_plotter._clustering import mean_shift result = mean_shift(measurements, quantile=0.5, n_samples=50) assert len(np.unique(result[1])) == n_centers assert np.array_equal(true_class, result[1]) or np.array_equal( 1 - true_class, result[1] ) # Test bad data true_class[n_samples // 2] = -1 measurements[n_samples // 2, :] = np.NaN result = mean_shift(measurements, quantile=0.5, n_samples=50) assert np.isnan(result[1][n_samples // 2]) assert np.array_equal( result[1][~np.isnan(result[1])], 1 - true_class[~np.isnan(result[1])] ) if __name__ == "__main__": test_gaussian_mixture_model()
26.766816
94
0.668789
815
5,969
4.631902
0.12638
0.064901
0.047682
0.055629
0.811126
0.792053
0.767947
0.726887
0.69457
0.684768
0
0.028126
0.225666
5,969
222
95
26.887387
0.78862
0.081421
0
0.62
0
0
0.001463
0
0
0
0
0
0.14
1
0.04
false
0
0.053333
0
0.093333
0.006667
0
0
0
null
0
0
0
1
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
cb33ac79dda4e6b5104456f64ff7795fe559e155
12,776
py
Python
pybind/slxos/v17r_2_00/hardware/profile/tcam/limit/__init__.py
extremenetworks/pybind
44c467e71b2b425be63867aba6e6fa28b2cfe7fb
[ "Apache-2.0" ]
null
null
null
pybind/slxos/v17r_2_00/hardware/profile/tcam/limit/__init__.py
extremenetworks/pybind
44c467e71b2b425be63867aba6e6fa28b2cfe7fb
[ "Apache-2.0" ]
null
null
null
pybind/slxos/v17r_2_00/hardware/profile/tcam/limit/__init__.py
extremenetworks/pybind
44c467e71b2b425be63867aba6e6fa28b2cfe7fb
[ "Apache-2.0" ]
1
2021-11-05T22:15:42.000Z
2021-11-05T22:15:42.000Z
from operator import attrgetter import pyangbind.lib.xpathhelper as xpathhelper from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType, RestrictedClassType, TypedListType from pyangbind.lib.yangtypes import YANGBool, YANGListType, YANGDynClass, ReferenceType from pyangbind.lib.base import PybindBase from decimal import Decimal from bitarray import bitarray import __builtin__ class limit(PybindBase): """ This class was auto-generated by the PythonClass plugin for PYANG from YANG module brocade-hardware - based on the path /hardware/profile/tcam/limit. Each member element of the container is represented as a class variable - with a specific YANG type. """ __slots__ = ('_pybind_generated_by', '_path_helper', '_yang_name', '_rest_name', '_extmethods', '__l2l3v4_app','__v6_app','__flex_acl',) _yang_name = 'limit' _rest_name = 'limit' _pybind_generated_by = 'container' def __init__(self, *args, **kwargs): path_helper_ = kwargs.pop("path_helper", None) if path_helper_ is False: self._path_helper = False elif path_helper_ is not None and isinstance(path_helper_, xpathhelper.YANGPathHelper): self._path_helper = path_helper_ elif hasattr(self, "_parent"): path_helper_ = getattr(self._parent, "_path_helper", False) self._path_helper = path_helper_ else: self._path_helper = False extmethods = kwargs.pop("extmethods", None) if extmethods is False: self._extmethods = False elif extmethods is not None and isinstance(extmethods, dict): self._extmethods = extmethods elif hasattr(self, "_parent"): extmethods = getattr(self._parent, "_extmethods", None) self._extmethods = extmethods else: self._extmethods = False self.__l2l3v4_app = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'0 .. 12288']}), is_leaf=True, yang_name="l2l3v4-app", rest_name="l2l3v4-app", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'layer 2/3 IPv4 application set'}}, namespace='urn:brocade.com:mgmt:brocade-hardware', defining_module='brocade-hardware', yang_type='uint32', is_config=True) self.__flex_acl = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'0 .. 24576']}), is_leaf=True, yang_name="flex-acl", rest_name="flex-acl", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'flex ACL/user defined keys in ACL'}}, namespace='urn:brocade.com:mgmt:brocade-hardware', defining_module='brocade-hardware', yang_type='uint32', is_config=True) self.__v6_app = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'0 .. 12288']}), is_leaf=True, yang_name="v6-app", rest_name="v6-app", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'IPv6 application set'}}, namespace='urn:brocade.com:mgmt:brocade-hardware', defining_module='brocade-hardware', yang_type='uint32', is_config=True) load = kwargs.pop("load", None) if args: if len(args) > 1: raise TypeError("cannot create a YANG container with >1 argument") all_attr = True for e in self._pyangbind_elements: if not hasattr(args[0], e): all_attr = False break if not all_attr: raise ValueError("Supplied object did not have the correct attributes") for e in self._pyangbind_elements: nobj = getattr(args[0], e) if nobj._changed() is False: continue setmethod = getattr(self, "_set_%s" % e) if load is None: setmethod(getattr(args[0], e)) else: setmethod(getattr(args[0], e), load=load) def _path(self): if hasattr(self, "_parent"): return self._parent._path()+[self._yang_name] else: return [u'hardware', u'profile', u'tcam', u'limit'] def _rest_path(self): if hasattr(self, "_parent"): if self._rest_name: return self._parent._rest_path()+[self._rest_name] else: return self._parent._rest_path() else: return [u'hardware', u'profile', u'tcam', u'limit'] def _get_l2l3v4_app(self): """ Getter method for l2l3v4_app, mapped from YANG variable /hardware/profile/tcam/limit/l2l3v4_app (uint32) """ return self.__l2l3v4_app def _set_l2l3v4_app(self, v, load=False): """ Setter method for l2l3v4_app, mapped from YANG variable /hardware/profile/tcam/limit/l2l3v4_app (uint32) If this variable is read-only (config: false) in the source YANG file, then _set_l2l3v4_app is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_l2l3v4_app() directly. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'0 .. 12288']}), is_leaf=True, yang_name="l2l3v4-app", rest_name="l2l3v4-app", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'layer 2/3 IPv4 application set'}}, namespace='urn:brocade.com:mgmt:brocade-hardware', defining_module='brocade-hardware', yang_type='uint32', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """l2l3v4_app must be of a type compatible with uint32""", 'defined-type': "uint32", 'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'0 .. 12288']}), is_leaf=True, yang_name="l2l3v4-app", rest_name="l2l3v4-app", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'layer 2/3 IPv4 application set'}}, namespace='urn:brocade.com:mgmt:brocade-hardware', defining_module='brocade-hardware', yang_type='uint32', is_config=True)""", }) self.__l2l3v4_app = t if hasattr(self, '_set'): self._set() def _unset_l2l3v4_app(self): self.__l2l3v4_app = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'0 .. 12288']}), is_leaf=True, yang_name="l2l3v4-app", rest_name="l2l3v4-app", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'layer 2/3 IPv4 application set'}}, namespace='urn:brocade.com:mgmt:brocade-hardware', defining_module='brocade-hardware', yang_type='uint32', is_config=True) def _get_v6_app(self): """ Getter method for v6_app, mapped from YANG variable /hardware/profile/tcam/limit/v6_app (uint32) """ return self.__v6_app def _set_v6_app(self, v, load=False): """ Setter method for v6_app, mapped from YANG variable /hardware/profile/tcam/limit/v6_app (uint32) If this variable is read-only (config: false) in the source YANG file, then _set_v6_app is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_v6_app() directly. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'0 .. 12288']}), is_leaf=True, yang_name="v6-app", rest_name="v6-app", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'IPv6 application set'}}, namespace='urn:brocade.com:mgmt:brocade-hardware', defining_module='brocade-hardware', yang_type='uint32', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """v6_app must be of a type compatible with uint32""", 'defined-type': "uint32", 'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'0 .. 12288']}), is_leaf=True, yang_name="v6-app", rest_name="v6-app", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'IPv6 application set'}}, namespace='urn:brocade.com:mgmt:brocade-hardware', defining_module='brocade-hardware', yang_type='uint32', is_config=True)""", }) self.__v6_app = t if hasattr(self, '_set'): self._set() def _unset_v6_app(self): self.__v6_app = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'0 .. 12288']}), is_leaf=True, yang_name="v6-app", rest_name="v6-app", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'IPv6 application set'}}, namespace='urn:brocade.com:mgmt:brocade-hardware', defining_module='brocade-hardware', yang_type='uint32', is_config=True) def _get_flex_acl(self): """ Getter method for flex_acl, mapped from YANG variable /hardware/profile/tcam/limit/flex_acl (uint32) """ return self.__flex_acl def _set_flex_acl(self, v, load=False): """ Setter method for flex_acl, mapped from YANG variable /hardware/profile/tcam/limit/flex_acl (uint32) If this variable is read-only (config: false) in the source YANG file, then _set_flex_acl is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_flex_acl() directly. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'0 .. 24576']}), is_leaf=True, yang_name="flex-acl", rest_name="flex-acl", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'flex ACL/user defined keys in ACL'}}, namespace='urn:brocade.com:mgmt:brocade-hardware', defining_module='brocade-hardware', yang_type='uint32', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """flex_acl must be of a type compatible with uint32""", 'defined-type': "uint32", 'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'0 .. 24576']}), is_leaf=True, yang_name="flex-acl", rest_name="flex-acl", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'flex ACL/user defined keys in ACL'}}, namespace='urn:brocade.com:mgmt:brocade-hardware', defining_module='brocade-hardware', yang_type='uint32', is_config=True)""", }) self.__flex_acl = t if hasattr(self, '_set'): self._set() def _unset_flex_acl(self): self.__flex_acl = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'0 .. 24576']}), is_leaf=True, yang_name="flex-acl", rest_name="flex-acl", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'flex ACL/user defined keys in ACL'}}, namespace='urn:brocade.com:mgmt:brocade-hardware', defining_module='brocade-hardware', yang_type='uint32', is_config=True) l2l3v4_app = __builtin__.property(_get_l2l3v4_app, _set_l2l3v4_app) v6_app = __builtin__.property(_get_v6_app, _set_v6_app) flex_acl = __builtin__.property(_get_flex_acl, _set_flex_acl) _pyangbind_elements = {'l2l3v4_app': l2l3v4_app, 'v6_app': v6_app, 'flex_acl': flex_acl, }
66.196891
580
0.722996
1,755
12,776
5.011396
0.103704
0.043206
0.044571
0.042297
0.794201
0.759864
0.747584
0.74531
0.74008
0.736441
0
0.037148
0.136115
12,776
192
581
66.541667
0.759717
0.120304
0
0.429688
0
0.023438
0.329874
0.120848
0
0
0
0
0
1
0.09375
false
0
0.0625
0
0.289063
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
cb34f07301b181cb6a7d9e9cb567d4d9ad232999
197
py
Python
kanga/stats/mean_array_summary.py
kushagragpt99/kanga
1f2343d15965148ae7c1dea95168c31a87b27c4f
[ "MIT" ]
1
2022-02-03T08:50:35.000Z
2022-02-03T08:50:35.000Z
kanga/stats/mean_array_summary.py
kushagragpt99/kanga
1f2343d15965148ae7c1dea95168c31a87b27c4f
[ "MIT" ]
null
null
null
kanga/stats/mean_array_summary.py
kushagragpt99/kanga
1f2343d15965148ae7c1dea95168c31a87b27c4f
[ "MIT" ]
null
null
null
from functools import reduce from operator import add # x is a numpy array of 3 dimensions, (chain, MC iteration, parameter) def mean_array_summary(x, g): return reduce(add, map(g, x))/len(x)
28.142857
70
0.736041
34
197
4.205882
0.735294
0
0
0
0
0
0
0
0
0
0
0.006098
0.167513
197
6
71
32.833333
0.865854
0.345178
0
0
0
0
0
0
0
0
0
0
0
1
0.25
false
0
0.5
0.25
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
1
1
1
0
0
5
cb41eeae770bb47375efceacac4db4431c0d4f94
1,241
py
Python
test_main.py
cesarbruschetta/cloud-cost
9bf5a20bebe73646dcc599c80addac413d32e218
[ "BSD-2-Clause" ]
null
null
null
test_main.py
cesarbruschetta/cloud-cost
9bf5a20bebe73646dcc599c80addac413d32e218
[ "BSD-2-Clause" ]
null
null
null
test_main.py
cesarbruschetta/cloud-cost
9bf5a20bebe73646dcc599c80addac413d32e218
[ "BSD-2-Clause" ]
null
null
null
import unittest from main import * class CloudCostTests(unittest.TestCase): def test_lambda_execution(self): cc = CloudCost() self.assertGreater(cc.lambda_execution(), 0) def test_app_execution(self): cc = CloudCost() self.assertGreater(cc.app_execution(1), 0) self.assertGreater(cc.app_execution(50), 0) self.assertGreater(cc.app_execution(100), 0) self.assertGreater(cc.app_execution(1000), 0) self.assertGreater(cc.app_execution(5000), 0) def test_month(self): cc = CloudCost() self.assertGreater(cc.month(1, 1), 0) self.assertGreater(cc.month(1, 2), 0) self.assertGreater(cc.month(1, 4), 0) self.assertGreater(cc.month(50, 4), 0) self.assertGreater(cc.month(100, 4), 0) self.assertGreater(cc.month(1000, 4), 0) self.assertGreater(cc.month(50000, 4), 0) def test_year(self): cc = CloudCost() self.assertEqual(12, len(cc.year(1))) self.assertEqual(12, len(cc.year(50))) self.assertEqual(12, len(cc.year(100))) self.assertEqual(12, len(cc.year(1000))) self.assertEqual(12, len(cc.year(5000))) if __name__ == "__main__": unittest.main()
31.025
53
0.629331
165
1,241
4.612121
0.193939
0.290407
0.324573
0.262812
0.727989
0.701708
0.113009
0
0
0
0
0.078044
0.225625
1,241
39
54
31.820513
0.71384
0
0
0.129032
0
0
0.006446
0
0
0
0
0
0.580645
1
0.129032
false
0
0.064516
0
0.225806
0
0
0
0
null
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
0
0
0
0
0
0
0
0
5
cb6d7f4753b3b88a179bbd54dc4cc182f8761846
43
py
Python
venv/Lib/site-packages/win32com/test/__init__.py
ajayiagbebaku/NFL-Model
afcc67a85ca7138c58c3334d45988ada2da158ed
[ "MIT" ]
1,078
2016-07-19T02:48:30.000Z
2022-03-30T21:22:34.000Z
venv/Lib/site-packages/win32com/test/__init__.py
ajayiagbebaku/NFL-Model
afcc67a85ca7138c58c3334d45988ada2da158ed
[ "MIT" ]
576
2017-05-21T12:36:48.000Z
2022-03-30T13:47:03.000Z
venv/Lib/site-packages/win32com/test/__init__.py
ajayiagbebaku/NFL-Model
afcc67a85ca7138c58c3334d45988ada2da158ed
[ "MIT" ]
269
2017-05-21T04:44:47.000Z
2022-03-31T16:18:13.000Z
# Empty file to designate a Python package
21.5
42
0.790698
7
43
4.857143
1
0
0
0
0
0
0
0
0
0
0
0
0.186047
43
1
43
43
0.971429
0.930233
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
5
cb886f52ef70a3684b4a85618336a86eee42eae2
118
py
Python
streamlined/middlewares/setup.py
pengzhengyi/Streamlined
59743f44e349318e51e1db4e72c7d1b5992f25be
[ "MIT" ]
null
null
null
streamlined/middlewares/setup.py
pengzhengyi/Streamlined
59743f44e349318e51e1db4e72c7d1b5992f25be
[ "MIT" ]
5
2021-09-11T07:46:55.000Z
2022-03-12T02:03:54.000Z
streamlined/middlewares/setup.py
pengzhengyi/Streamlined
59743f44e349318e51e1db4e72c7d1b5992f25be
[ "MIT" ]
null
null
null
from .action import Action from .middleware import Context class Setup(Action): pass SETUP = Setup.get_name()
11.8
31
0.737288
16
118
5.375
0.625
0
0
0
0
0
0
0
0
0
0
0
0.186441
118
9
32
13.111111
0.895833
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0.2
0.4
0
0.6
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
1
0
1
0
0
5
cb8927d94d2b64199108e8ee9408ef28f60d5f7b
83
py
Python
widark/widget/components/spacer.py
knowark/widark
ea3199c2665587d53168963dc1b9c07929e2baff
[ "MIT" ]
1
2020-09-22T14:34:20.000Z
2020-09-22T14:34:20.000Z
widark/widget/components/spacer.py
knowark/widark
ea3199c2665587d53168963dc1b9c07929e2baff
[ "MIT" ]
4
2020-06-12T02:16:38.000Z
2020-06-15T16:43:17.000Z
widark/widget/components/spacer.py
knowark/widark
ea3199c2665587d53168963dc1b9c07929e2baff
[ "MIT" ]
null
null
null
from ..widget import Widget class Spacer(Widget): """"Spacer blank object"""
13.833333
30
0.674699
10
83
5.6
0.7
0
0
0
0
0
0
0
0
0
0
0
0.180723
83
5
31
16.6
0.823529
0.240964
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
cb9b725ef865623ac51a2df10e2197f8934994fb
141
py
Python
py_roboat_enviro/__init__.py
drewmee/py-roboat-enviro
2e836bc75aaddce042b5771f50f32148f3c4e10a
[ "MIT" ]
null
null
null
py_roboat_enviro/__init__.py
drewmee/py-roboat-enviro
2e836bc75aaddce042b5771f50f32148f3c4e10a
[ "MIT" ]
null
null
null
py_roboat_enviro/__init__.py
drewmee/py-roboat-enviro
2e836bc75aaddce042b5771f50f32148f3c4e10a
[ "MIT" ]
null
null
null
from pkg_resources import get_distribution from .roboat_enviro_backend import * __version__ = get_distribution("py-roboat-enviro").version
23.5
58
0.836879
18
141
6.055556
0.611111
0.275229
0
0
0
0
0
0
0
0
0
0
0.092199
141
5
59
28.2
0.851563
0
0
0
0
0
0.113475
0
0
0
0
0
0
1
0
false
0
0.666667
0
0.666667
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
5
cbb743b65e1c6a2bd12c720b04e817d3e7a04a9f
194
py
Python
iiotsapps/graphql/farms/schemas/farm.py
CorpofloTechCommunity/Intelligent-IOT-system-
99af2efe1e8c7623ed76df246e29cfb5654474af
[ "MIT" ]
null
null
null
iiotsapps/graphql/farms/schemas/farm.py
CorpofloTechCommunity/Intelligent-IOT-system-
99af2efe1e8c7623ed76df246e29cfb5654474af
[ "MIT" ]
1
2020-08-02T11:58:22.000Z
2020-08-02T11:58:22.000Z
iiotsapps/graphql/farms/schemas/farm.py
CorpofloTechCommunity/Intelligent-IOT-system-
99af2efe1e8c7623ed76df246e29cfb5654474af
[ "MIT" ]
2
2020-07-31T11:08:14.000Z
2020-08-19T10:46:43.000Z
import graphene from ..queries import FarmQuery from ..mutations import FarmMutation class FarmQueries(FarmQuery): pass class FarmMutations(FarmMutation, graphene.ObjectType): pass
14.923077
55
0.783505
20
194
7.6
0.6
0
0
0
0
0
0
0
0
0
0
0
0.154639
194
12
56
16.166667
0.926829
0
0
0.285714
0
0
0
0
0
0
0
0
0
1
0
true
0.285714
0.428571
0
0.714286
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
1
0
1
0
0
5
cbc3f833ca4475c1bc4c5fd8fc83dc0cebeadf0d
200
py
Python
transpydata/config/__init__.py
salpreh/transpydata
88cecf53f93b316e8fd95983c28a918ca2f41cf4
[ "Apache-2.0" ]
null
null
null
transpydata/config/__init__.py
salpreh/transpydata
88cecf53f93b316e8fd95983c28a918ca2f41cf4
[ "Apache-2.0" ]
4
2020-12-09T17:56:21.000Z
2021-01-25T22:03:12.000Z
transpydata/config/__init__.py
salpreh/transpydata
88cecf53f93b316e8fd95983c28a918ca2f41cf4
[ "Apache-2.0" ]
null
null
null
from .IConfigurable import IConfigurable from .IProcessor import IProcessor from .IResourceAware import IResourceAware from .LoggableMixin import LoggableMixin from .IDataService import IDataService
28.571429
42
0.87
20
200
8.7
0.35
0
0
0
0
0
0
0
0
0
0
0
0.105
200
6
43
33.333333
0.972067
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
1dac58ddb4c3d40635705259cead9733b86b7447
239
py
Python
tgt_grease/core/__init__.py
jairamd22/grease
7ebf3df71d5c80a8ed9df44d9b64b735a9d0f899
[ "MIT" ]
44
2017-09-29T00:53:44.000Z
2020-12-20T13:43:49.000Z
tgt_grease/core/__init__.py
jairamd22/grease
7ebf3df71d5c80a8ed9df44d9b64b735a9d0f899
[ "MIT" ]
39
2017-09-29T10:26:10.000Z
2019-05-02T21:07:59.000Z
tgt_grease/core/__init__.py
jairamd22/grease
7ebf3df71d5c80a8ed9df44d9b64b735a9d0f899
[ "MIT" ]
26
2017-09-28T18:00:39.000Z
2021-10-17T15:14:39.000Z
from .Configuration import Configuration from .Notifier import Notifications from .Logging import Logging from .Importer import ImportTool from .Connectivity import Mongo from .InversionOfControl import GreaseContainer from . import Types
29.875
47
0.853556
27
239
7.555556
0.481481
0
0
0
0
0
0
0
0
0
0
0
0.117155
239
7
48
34.142857
0.966825
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
1db6a54f049f0adb665d891033ada935076c50af
98
py
Python
mbench/util/__init__.py
sepmein/mBench
5f2d7115ac8178ca0e076cfceebc75fa8d46127d
[ "BSD-3-Clause" ]
1
2022-03-21T14:18:11.000Z
2022-03-21T14:18:11.000Z
mbench/util/__init__.py
sepmein/mBench
5f2d7115ac8178ca0e076cfceebc75fa8d46127d
[ "BSD-3-Clause" ]
6
2022-03-04T11:37:52.000Z
2022-03-31T19:13:43.000Z
mbench/util/__init__.py
sepmein/mBench
5f2d7115ac8178ca0e076cfceebc75fa8d46127d
[ "BSD-3-Clause" ]
null
null
null
from .np_looper import np_looper from .interpolate import missing_data as missing_data_interpolate
49
65
0.887755
15
98
5.466667
0.533333
0.195122
0
0
0
0
0
0
0
0
0
0
0.091837
98
2
65
49
0.921348
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
1dc5c38cf65551df9b2e928bc1290a80eb89c871
3,216
py
Python
snoboy/memory.py
Osmose/snoboy
cbe85091e3129fd7271629c650523eb868eaeed1
[ "MIT" ]
9
2015-01-17T20:07:33.000Z
2021-07-21T06:22:31.000Z
snoboy/memory.py
Osmose/snoboy
cbe85091e3129fd7271629c650523eb868eaeed1
[ "MIT" ]
1
2017-04-24T23:14:25.000Z
2017-04-24T23:15:03.000Z
snoboy/memory.py
Osmose/snoboy
cbe85091e3129fd7271629c650523eb868eaeed1
[ "MIT" ]
5
2016-08-17T16:19:23.000Z
2018-02-06T02:57:18.000Z
from array import array from itertools import repeat from snoboy import cart main_ram = array('B', repeat(0, 0x2000)) def read(loc): """Read a location in memory.""" if loc <= 0x00FF: # Restart and Interrupt Vectors return cart.cart_data[loc] elif loc <= 0x014F: # Cartridge Header Area return cart.cart_data[loc] elif loc <= 0x3FFF: # Cartridge ROM Bank 0 return cart.cart_data[loc] elif loc <= 0x7FFF: # Cartridge ROM Switchable Bank pass elif loc <= 0x97FF: # Character RAM pass elif loc <= 0x9BFF: # BG Map Data 1 pass elif loc <= 0x9FFF: # BG Map Data 2 pass elif loc <= 0xBFFF: # Cartridge RAM pass elif loc <= 0xCFFF: # Internal RAM Bank 0 # 0xD000 = Internal RAM Switchable Bank (CGB only) return main_ram[loc - 0xC000] elif loc <= 0xFDFF: # Echo RAM pass elif loc <= 0xFE9F: # OAM RAM pass elif loc <= 0xFEFF: # Unusable memory pass elif loc <= 0xFF7F: # Hardware I/O Registers pass elif loc <= 0xFFFE: # Zero Page pass elif loc == 0xFFFF: # Interrupt enable flag pass else: raise IndexError print "Memory location 0x%x not implemented" % loc return 0 def write(loc, value): """Write to a location in memory.""" if loc <= 0x00FF: # Restart and Interrupt Vectors print "Memory location 0x%x not implemented" % loc elif loc <= 0x014F: # Cartridge Header Area print "Memory location 0x%x not implemented" % loc elif loc <= 0x3FFF: # Cartridge ROM Bank 0 print "Memory location 0x%x not implemented" % loc elif loc <= 0x7FFF: # Cartridge ROM Switchable Bank print "Memory location 0x%x not implemented" % loc elif loc <= 0x97FF: # Character RAM print "Memory location 0x%x not implemented" % loc elif loc <= 0x9BFF: # BG Map Data 1 print "Memory location 0x%x not implemented" % loc elif loc <= 0x9FFF: # BG Map Data 2 print "Memory location 0x%x not implemented" % loc elif loc <= 0xBFFF: # Cartridge RAM print "Memory location 0x%x not implemented" % loc elif loc <= 0xCFFF: # Internal RAM Bank 0 # 0xD000 = Internal RAM Switchable Bank (CGB only) main_ram[loc - 0xC000] = value elif loc <= 0xFDFF: # Echo RAM print "Memory location 0x%x not implemented" % loc elif loc <= 0xFE9F: # OAM RAM print "Memory location 0x%x not implemented" % loc elif loc <= 0xFEFF: # Unusable memory print "Memory location 0x%x not implemented" % loc elif loc <= 0xFF7F: # Hardware I/O Registers print "Memory location 0x%x not implemented" % loc elif loc <= 0xFFFE: # Zero Page print "Memory location 0x%x not implemented" % loc elif loc == 0xFFFF: # Interrupt enable flag print "Interrupt enable flag not imlemented(Memory locatin 0xFFFF)" else: raise IndexError
28.210526
75
0.581157
398
3,216
4.680905
0.208543
0.105207
0.085883
0.15781
0.830381
0.708535
0.677939
0.544283
0.45357
0.45357
0
0.04841
0.344838
3,216
113
76
28.460177
0.835785
0.196206
0
0.861111
0
0
0.227053
0
0
0
0.082126
0
0
0
null
null
0.152778
0.041667
null
null
0.208333
0
0
0
null
0
0
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
1
0
0
1
0
0
0
0
0
5
1ddd5e927cc937856b73b0f4c79e249a3c309487
172
py
Python
gefapi/routes/api/__init__.py
ConservationInternational/trends.earth-API
120042c6eb17734404f739d1764718c8332c9e03
[ "X11" ]
null
null
null
gefapi/routes/api/__init__.py
ConservationInternational/trends.earth-API
120042c6eb17734404f739d1764718c8332c9e03
[ "X11" ]
4
2020-04-27T12:46:56.000Z
2021-02-25T12:29:53.000Z
gefapi/routes/api/__init__.py
ConservationInternational/trends.earth-API
120042c6eb17734404f739d1764718c8332c9e03
[ "X11" ]
null
null
null
# GENERIC Error from flask import jsonify # GENERIC Error def error(status=400, detail='Bad Request'): return jsonify({'status': status, 'detail': detail}), status
17.2
64
0.709302
22
172
5.545455
0.590909
0.196721
0
0
0
0
0
0
0
0
0
0.020833
0.162791
172
9
65
19.111111
0.826389
0.156977
0
0
0
0
0.161972
0
0
0
0
0
0
1
0.333333
false
0
0.333333
0.333333
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
1
1
0
0
0
5
1dded323fe561cffc86e76b0c4f241ab473f87b9
176
py
Python
aspc/menu/templatetags/menu_tag.py
aspc/mainsite
a6ccee0bb921147b7f630d65e01371e451aa3c54
[ "MIT" ]
8
2015-09-27T07:57:32.000Z
2018-10-28T06:08:40.000Z
aspc/menu/templatetags/menu_tag.py
aspc/mainsite
a6ccee0bb921147b7f630d65e01371e451aa3c54
[ "MIT" ]
132
2015-01-17T01:22:09.000Z
2018-11-13T22:05:32.000Z
aspc/menu/templatetags/menu_tag.py
aspc/mainsite
a6ccee0bb921147b7f630d65e01371e451aa3c54
[ "MIT" ]
20
2015-01-16T04:32:30.000Z
2018-09-03T22:55:05.000Z
from django import template from HTMLParser import HTMLParser register = template.Library() h = HTMLParser() @register.filter def clean_item_name(s): return h.unescape(s)
19.555556
33
0.778409
24
176
5.625
0.666667
0.266667
0
0
0
0
0
0
0
0
0
0
0.136364
176
9
34
19.555556
0.888158
0
0
0
0
0
0
0
0
0
0
0
0
1
0.142857
false
0
0.285714
0.142857
0.571429
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
1
1
0
0
5
38051bd4f1f82c268407f5c2479ed8d889a71c7b
1,070
py
Python
tests/thing/test_thing_connector.py
janluak/aws-iot
8f3a4d83c40bb950b4f49e8b7afcc3383a30661e
[ "Apache-2.0" ]
null
null
null
tests/thing/test_thing_connector.py
janluak/aws-iot
8f3a4d83c40bb950b4f49e8b7afcc3383a30661e
[ "Apache-2.0" ]
null
null
null
tests/thing/test_thing_connector.py
janluak/aws-iot
8f3a4d83c40bb950b4f49e8b7afcc3383a30661e
[ "Apache-2.0" ]
null
null
null
from pathlib import Path from os import environ from pytest import fail def test_connection(test_env_real, caplog): from aws_iot.thing import IoTThingConnector t = IoTThingConnector( environ["TestThingName"], environ["AWS_REGION"], endpoint=environ["IOT_ENDPOINT"], cert_path=Path(Path(__file__).parent, "../certs"), ) t.connect() resp = t.publish("test_topic", {"some_payload": "some_value"}) assert resp is True del t if len(caplog.messages) != 0: fail(str(caplog.messages)) def test_connection_context_manager(test_env_real, caplog): from aws_iot.thing import IoTThingConnector with IoTThingConnector( environ["TestThingName"], environ["AWS_REGION"], endpoint=environ["IOT_ENDPOINT"], cert_path=Path(Path(__file__).parent, "../certs"), ) as t: resp = t.publish("test_topic", {"some_payload": "some_value"}) assert resp is True assert t.connected is False if len(caplog.messages) != 0: fail(str(caplog.messages))
28.157895
70
0.660748
131
1,070
5.167939
0.366412
0.047267
0.050222
0.050222
0.776957
0.776957
0.776957
0.776957
0.776957
0.655835
0
0.002395
0.219626
1,070
37
71
28.918919
0.808383
0
0
0.6
0
0
0.140187
0
0
0
0
0
0.1
1
0.066667
false
0
0.166667
0
0.233333
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
697e94b8691b378e7e098b7e186125a4ac1f76a8
84
py
Python
app/tasks.py
yoophi/api-template-celery
6b4c1c317ff3bfe99ca963f81e4ae1e9b1f86579
[ "MIT" ]
null
null
null
app/tasks.py
yoophi/api-template-celery
6b4c1c317ff3bfe99ca963f81e4ae1e9b1f86579
[ "MIT" ]
null
null
null
app/tasks.py
yoophi/api-template-celery
6b4c1c317ff3bfe99ca963f81e4ae1e9b1f86579
[ "MIT" ]
null
null
null
from . import create_celery celery = create_celery() from app.main.tasks import *
14
28
0.761905
12
84
5.166667
0.583333
0.387097
0
0
0
0
0
0
0
0
0
0
0.154762
84
5
29
16.8
0.873239
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.666667
0
0.666667
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
5
69a7948b9607eb3767f6590aa4486e4ec4910427
114
py
Python
tests/plugin1.py
koder-ua/agent
e151d30fa75ce2ea27f7f30691db431cd43a6b22
[ "Apache-2.0" ]
null
null
null
tests/plugin1.py
koder-ua/agent
e151d30fa75ce2ea27f7f30691db431cd43a6b22
[ "Apache-2.0" ]
null
null
null
tests/plugin1.py
koder-ua/agent
e151d30fa75ce2ea27f7f30691db431cd43a6b22
[ "Apache-2.0" ]
null
null
null
mod_name = "pl1" __version__ = (1, 0) def rpc_add(x, y): return x + y def rpc_mul(x, y): return x * y
10.363636
20
0.561404
22
114
2.590909
0.590909
0.140351
0.280702
0.315789
0.350877
0
0
0
0
0
0
0.037037
0.289474
114
10
21
11.4
0.666667
0
0
0
0
0
0.026316
0
0
0
0
0
0
1
0.333333
false
0
0
0.333333
0.666667
0
1
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
0
0
0
5
69c0cbe5a0e5965d807bc655bea8f7c71d97bd1a
19,971
py
Python
affinity/works.py
timkphd/examples
04c162ec890a1c9ba83498b275fbdc81a4704062
[ "Unlicense" ]
5
2020-11-01T00:29:22.000Z
2022-01-24T19:09:47.000Z
affinity/works.py
timkphd/examples
04c162ec890a1c9ba83498b275fbdc81a4704062
[ "Unlicense" ]
1
2022-02-09T01:59:47.000Z
2022-02-09T01:59:47.000Z
affinity/works.py
timkphd/examples
04c162ec890a1c9ba83498b275fbdc81a4704062
[ "Unlicense" ]
null
null
null
#!/usr/bin/env python # coding: utf-8 # In[ ]: import pandas as pd t="IntelMPI/Intel t=99.83 cpu-bind=NONE n=16 cpus-per-task=4 OMP_PROC_BIND=close OMP_NUM_THREADS=4 " #Type Compiler mpi cpu-bind OMP_PROC_BIND threads tasks time global cases,thedirs cases=[] thedirs=[] def topd(t,ofn): global cases t=t.replace("\n"," ") rtype="None" mpi="None" compiler="None" if t.find("ifort") > -1 : compiler="ifort" if t.find("gfortran") > -1 : compiler="gfortran" if t.find("IntelMPI/Intel") > -1: compiler="ifort" mpi="intel" if t.find("OpenMPI/Intel") > -1: compiler="ifort" mpi="openmpi" if t.find("IntelMPI/GNU") > -1: compiler="gfortran" mpi="intel" if t.find("OpenMPI/GNU") > -1: compiler="gfortran" mpi="openmpi" x=t.split(" t=") x=x[1] x=x.split(" ") mtime=float(x[0]) try: x=t.split(" cpu-bind=") x=x[1] x=x.split(" ") cpubind=x[0] except: cpubind="None" try: x=t.split(" n=") x=x[1] x=x.split(" ") n=x[0] except: n=1 try: x=t.split(" cpus-per-task=") x=x[1] x=x.split(" ") cpus_per_task=int(x[0]) except: cpus_per_task=0 try: x=t.split(" OMP_PROC_BIND=") x=x[1] x=x.split(" ") omp_proc_bind=x[0] except: omp_proc_bind="None" try: x=t.split(" OMP_NUM_THREADS=") x=x[1] x=x.split(" ") omp_num_threads=int(x[0]) except: omp_num_threads=0 if(mpi == "None"): mtype="openmp" if(mpi != "None" and omp_num_threads > 0 ): mtype="hybrid" if(mpi != "None" and omp_num_threads ==0 ): mtype="mpi" if omp_num_threads == 0 : cores=n else: cores=int(n)*int(omp_num_threads) return[mtime,mtype,compiler,mpi,cpubind,cpus_per_task,omp_proc_bind,n,omp_num_threads,cores,ofn] def pdappend(df,line): a_series = pd.Series(line, index = df.columns) return df.append(a_series, ignore_index=True) head=['time','type','compiler','mpi','cpubind','cpus_per_task','omp_proc_bind','tasks','omp_num_threads','cores','file'] results=pd.DataFrame(columns=head) # In[ ]: from os.path import exists def nfname(path,ex=""): if exists(path+ex): print(path + " EXISTS") return(path+"_a") else: print(path) return(path) # In[ ]: def doit(hdir,base): global results global cases global thedirs thedirs.append(hdir) nplots=0 for ver in ["stf_ii","stf_ig","stf_og","stf_oi"] : #for ver in ["stf_og"] : import os import numpy as np from plsub import myplot import matplotlib.pyplot as plt command="grep cpus-per-task "+hdir+"/script* | tail -1" #print(command) c=os.popen(command,"r") lines=c.read() #print("lines",lines) cpt="NONE" if len(lines) > 0 : cpt=lines.replace("#SBATCH --","") cpt=cpt.strip() #print(cpt) command="grep -l " +ver+ " " +hdir+"/2*" c=os.popen(command,"r") files=c.read() #print(files) cores=np.array(range(0,64)) sums=np.zeros(64) mins=np.zeros(64)+1e6 maxs=np.zeros(64)-1e6 icnt=np.zeros(64) nf=0 files=files.split() #print(files) nt=len(files) heat=np.zeros([nt,64]) print() for f in files: #print(f) nf=nf+1 infile=open(f,"r") dat=infile.readlines() isums=np.zeros(64) imins=np.zeros(64)+1e6 imaxs=np.zeros(64)-1e6 for d in dat: d=d.split() l=float(d[4]) c=int(d[6]) % 64 #print(c,l) isums[c]=isums[c]+l heat[nf-1,c]=heat[nf-1,c]+l icnt[c]=1 for c in range(0,64) : if isums[c] < imins[c] : imins[c]=isums[c] if isums[c] > imaxs[c] : imaxs[c]=isums[c] sums=sums+isums for c in range(0,64) : if imins[c] < 1e5: if imins[c] < mins[c]: mins[c]=imins[c] if imaxs[c] > 0.0: if imaxs[c] > maxs[c]: maxs[c]=imaxs[c] #print(f) sums=sums/nf cores=[] fmins=[] faves=[] fmaxs=[] #print(sums) for c in range(0,64): if sums[c] > 0.0: cores.append(c) fmins.append(mins[c]) faves.append(sums[c]) fmaxs.append(maxs[c]) #print(c,mins[c],sums[c],maxs[c]) #print(len(cores),fmins,faves,fmaxs) asets=[[cores,fmins,"min"],[cores,faves,"ave"],[cores,fmaxs,"max"]] #myplot(sets=asets,doxkcd=False,width=0,do_sym="y",bl="Physical Core",sl="Load",topl=title,outname=ver+"-"+edir,yr="90,105",xr="0,64") command="grep srun "+hdir+"/script* | tail -1" c=os.popen(command,"r") lines=c.read() #print(lines) settings="cpu-bind=NONE" if lines.find("rank") > -1 : settings="cpu-bind=rank" if lines.find("socket") > -1 : settings="cpu-bind=sockets" if lines.find("core") > -1 : settings="cpu-bind=cores" #command="grep 'run time' "+hdir+"/slurm-*.out | awk {'print $NF }'" lines=lines.split(" -n") lines=lines[1] lines=lines.split() ccount=lines[0] #print(ccount) command="grep 'run time' "+hdir+"/slurm-*.out" c=os.popen(command,"r") times=c.readlines() #print(times) j=0 for t in times: t=t.split("=") t=t[1] ttt=t.split() t=float(ttt[0]) t="%5.2f" % (t) times[j]=t j=j+1 #print(times) if ver.find("ii") > -1: cset="IntelMPI/Intel t="+times[0]+" " if ver.find("ig") > -1: cset="IntelMPI/GNU t="+times[1]+" " if ver.find("og") > -1: cset="OpenMPI/GNU t="+times[2]+" " if ver.find("oi") > -1: cset="OpenMPI/Intel t="+times[3]+" " title=cset+settings+" n="+ccount title=title+" "+cpt settings=settings.replace("cpu-bind=","") #get OMP_ command="grep 'OMP_' "+hdir+"/env*" c=os.popen(command,"r") omp=c.read() print("bonk",omp) if len(omp) > 0: omp=omp.replace("\n"," ") print("OMP=",omp) title=title+"\n"+omp print(title) ofn=base+"/"+ver+"_"+settings+"_"+ccount+"_"+cpt ofn=nfname(ofn,".pdf") myplot(sets=asets,doxkcd=False,width=0,do_sym="y",bl="Physical Core",sl="Load",topl=title,outname=ofn,xr="0,64") t2=title.replace("\n"," ") cases.append(t2) results=pdappend(results,topd(title,ofn)) nplots=nplots+1 fig, ax = plt.subplots( figsize=(12, 6), subplot_kw={'xticks': [], 'yticks': []}) fig.subplots_adjust(hspace=100.0, wspace=50) zmax=np.max(heat) zmin=np.min(heat) x=np.array(range(0,nt)) x=x/(nt-1) cores=np.array(range(0,64)) #print(x) #print(cores) from scipy import interpolate #nh= interpolate.interp2d(cores, x, heat) #nh= interpolate.RectBivariateSpline(cores, x, heat,kx=1,ky=1) #nh= interpolate.RectBivariateSpline(x, cores, heat,kx=1,ky=1,s=0) nh=interpolate.RegularGridInterpolator((x,cores), heat,method="nearest") #print(nh) newx=np.array(range(0,64))/64.0 nheat=np.zeros([64,64]) for i in range(0,64): for j in range(0,64): yi=cores[i] xi=newx[j] #print(xi,yi) #pts=np.array([xi,yi]) #print(nh([[4,4]])) pts = np.array([[xi, yi]]) nheat[j][i]=nh(pts) #c = ax.pcolormesh(x, cores, heat, cmap='RdBu', vmin=zmin, vmax=zmax) #heat[3]=0 c = ax.pcolormesh(cores, x, heat, cmap='rainbow', vmin=zmin, vmax=zmax,shading='nearest') #c = ax.pcolormesh(cores, newx, nheat, cmap='rainbow', vmin=zmin, vmax=zmax,shading='nearest') #c=plt.imshow(heat,cmap='rainbow',interpolation="none") c=plt.imshow(nheat,cmap='rainbow',interpolation="none") ax.set_title(title) ax.set_ylabel('Relative Time') ax.set_xlabel('Physical Core') ax.set_xticks(range(0,65,4)) yticks=range(0,65,4) yticks=np.array(yticks)/65 ax.set_yticks(range(0,65,4)) #ax.set_size=(20,2) #ax.axis([x.max(), x.min(), cores.min(), cores.max()]) fig.colorbar(c, ax=ax) #plt.show() outname=base+"/"+"h_"+ver+"_"+settings+"_"+ccount+"_"+cpt outname=nfname(outname,".pdf") plt.savefig(outname+".pdf") nplots=nplots+1 print(title) # for h in heat[:] : # print(h) return(nplots) # In[ ]: tplots=0 #base="/home/tkaiser2/bench/affinity/mpi/thu/thu" #for set in ["32","64","128"]: #base="/home/tkaiser2/bench/affinity/hybrid/thu/spread" #for set in ["16","32","64"]: base="/home/tkaiser2/bench/affinity/redo/hybrid/close" for set in ["16","16o","32","32o","64o"]: #for set in ["32"]: for ver in ["none","cores","sockets","rank"]: #for ver in ["none"]: hdir=base+"/"+set+"/"+ver print(hdir) try: heat=doit(hdir,base) tplots=tplots+heat except: print(set,ver,"failed") print(tplots) # In[ ]: cases # In[ ]: print(tplots) print(results) # In[ ]: tplots=0 #base="/home/tkaiser2/bench/affinity/mpi/thu/thu" #for set in ["32","64","128"]: base="/home/tkaiser2/bench/affinity/redo/hybrid/spread" for set in ["16","16o","32","32o","64o"]: #for set in ["16"]: #base="/home/tkaiser2/bench/affinity/hybrid/thu/close" #for set in ["16","32","64"]: for ver in ["none","cores","sockets","rank"]: hdir=base+"/"+set+"/"+ver print(hdir) try: heat=doit(hdir,base) tplots=tplots+heat except: print(set,ver,"failed") print(tplots) # In[ ]: tplots=0 base="/home/tkaiser2/bench/affinity/redo/mpi" for set in ["32","64","128"]: #for set in ["128"]: #base="/home/tkaiser2/bench/affinity/hybrid/thu/spread" #for set in ["16","32","64"]: #base="/home/tkaiser2/bench/affinity/hybrid/thu/close" #for set in ["16","32","64"]: for ver in ["none","cores","sockets","rank"]: hdir=base+"/"+set+"/"+ver print(hdir) try: heat=doit(hdir,base) tplots=tplots+heat except: print(set,ver,"failed") print(tplots) # In[ ]: def doomp(hdir,base,bind): nplots=0 global results global cases global thedirs thedirs.append(hdir) for ver in ["stf_ii","stf_ig"] : import os import numpy as np from plsub import myplot import matplotlib.pyplot as plt command="grep -l " +ver+ " " +hdir+"/2*" #print("COMMAND",command) c=os.popen(command,"r") files=c.read() cores=np.array(range(0,64)) sums=np.zeros(64) mins=np.zeros(64)+1e6 maxs=np.zeros(64)-1e6 icnt=np.zeros(64) nf=0 files=files.split() #print(files) nt=len(files) heat=np.zeros([nt,64]) for f in files: #print(f) nf=nf+1 infile=open(f,"r") dat=infile.readlines() isums=np.zeros(64) imins=np.zeros(64)+1e6 imaxs=np.zeros(64)-1e6 for d in dat: d=d.split() l=float(d[4]) c=int(d[6]) % 64 #print(c,l) isums[c]=isums[c]+l heat[nf-1,c]=heat[nf-1,c]+l icnt[c]=1 for c in range(0,64) : if isums[c] < imins[c] : imins[c]=isums[c] if isums[c] > imaxs[c] : imaxs[c]=isums[c] sums=sums+isums for c in range(0,64) : if imins[c] < 1e5: if imins[c] < mins[c]: mins[c]=imins[c] if imaxs[c] > 0.0: if imaxs[c] > maxs[c]: maxs[c]=imaxs[c] sums=sums/nf #print(sums) cores=[] fmins=[] faves=[] fmaxs=[] for c in range(0,64): if sums[c] > 0.0: cores.append(c) fmins.append(mins[c]) faves.append(sums[c]) fmaxs.append(maxs[c]) #print(c,mins[c],sums[c],maxs[c]) # print(len(cores),fmins,faves,fmaxs) asets=[[cores,fmins,"min"],[cores,faves,"ave"],[cores,fmaxs,"max"]] #myplot(sets=asets,doxkcd=False,width=0,do_sym="y",bl="Physical Core",sl="Load",topl=title,outname=ver+"-"+edir,yr="90,105",xr="0,64") command="grep cpus-per-task "+hdir+"/script* | tail -1" print(command) c=os.popen(command,"r") lines=c.read() #print(lines) settings="NONE" settings=lines.replace("#SBATCH --","") settings=settings.strip() #print(settings) if lines.find("rank") > -1 : settings="cpu-bind=rank" if lines.find("socket") > -1 : settings="cpu-bind=sockets" if lines.find("core") > -1 : settings="cpu-bind=cores" #command="grep 'run time' "+hdir+"/slurm-*.out | awk {'print $NF }'" #print(settings+"01") count=0 try: lines=lines.split("-n") lines=lines[1] lines=lines.split() ccount=lines[0] except: #print("count failed") ccount="0" #print(settings+"02") #print("count=",ccount) command="grep 'run time' "+hdir+"/slurm-*.out" #print(command) c=os.popen(command,"r") times=c.readlines() #print(times) j=0 for t in times: t=t.split("=") #print("t=",t) t=t[1] ttt=t.split() t=float(ttt[0]) t="%5.2f" % (t) times[j]=t j=j+1 #print(times) #print(settings+"03") cset="CSET" #print("ver",ver) if ver.find("ii") > -1: cset="ifort t="+times[0]+" " if ver.find("ig") > -1: cset="gfortran t="+times[1]+" " if ver.find("og") > -1: cset="OpenMPI/GNU t="+times[2]+" " if ver.find("oi") > -1: cset="OpenMPI/Intel t="+times[3]+" " #print(cset,settings,ccount) title=cset+settings #print(title) #settings=settings.replace("cpu-bind=","") #print(settings+"04") #get OMP_ command="grep 'OMP_' "+hdir+"/env*" c=os.popen(command,"r") omp=c.read() #print("bonk",omp) if len(omp) > 0: omp=omp.replace("\n"," ") #print("OMP=",omp) title=title+"\n"+omp #print(title) ofn=base+"/"+ver+"_"+settings+"_"+ccount+"_"+bind ofn=nfname(ofn,".pdf") #print(ofn) myplot(sets=asets,doxkcd=False,width=0,do_sym="y",bl="Physical Core",sl="Load",topl=title,outname=ofn,xr="0,64") t2=title.replace("\n"," ") cases.append(t2) results=pdappend(results,topd(title,ofn)) nplots=nplots+1 fig, ax = plt.subplots( figsize=(12, 6), subplot_kw={'xticks': [], 'yticks': []}) fig.subplots_adjust(hspace=100.0, wspace=50) zmax=np.max(heat) zmin=np.min(heat) x=np.array(range(0,nt)) x=x/(nt-1) cores=np.array(range(0,64)) #print(x) #print(cores) from scipy import interpolate #nh= interpolate.interp2d(cores, x, heat) #nh= interpolate.RectBivariateSpline(cores, x, heat,kx=1,ky=1) #nh= interpolate.RectBivariateSpline(x, cores, heat,kx=1,ky=1,s=0) nh=interpolate.RegularGridInterpolator((x,cores), heat,method="nearest") #print(nh) newx=np.array(range(0,64))/64.0 nheat=np.zeros([64,64]) for i in range(0,64): for j in range(0,64): yi=cores[i] xi=newx[j] #print(xi,yi) #pts=np.array([xi,yi]) #print(nh([[4,4]])) pts = np.array([[xi, yi]]) nheat[j][i]=nh(pts) #c = ax.pcolormesh(x, cores, heat, cmap='RdBu', vmin=zmin, vmax=zmax) #heat[3]=0 c = ax.pcolormesh(cores, x, heat, cmap='rainbow', vmin=zmin, vmax=zmax,shading='nearest') #c = ax.pcolormesh(cores, newx, nheat, cmap='rainbow', vmin=zmin, vmax=zmax,shading='nearest') #c=plt.imshow(heat,cmap='rainbow',interpolation="none") c=plt.imshow(nheat,cmap='rainbow',interpolation="none") ax.set_title(title) ax.set_ylabel('Relative Time') ax.set_xlabel('Physical Core') ax.set_xticks(range(0,65,4)) yticks=range(0,65,4) yticks=np.array(yticks)/65 ax.set_yticks(range(0,65,4)) #ax.set_size=(20,2) #ax.axis([x.max(), x.min(), cores.min(), cores.max()]) fig.colorbar(c, ax=ax) #plt.show() outname=nfname(outname,".pdf") outname=base+"/"+"h_"+ver+"_"+settings+"_"+ccount+"_"+bind plt.savefig(outname+".pdf") nplots=nplots+1 # for h in heat[:] : # print(h) return(nplots) # In[ ]: cases # In[ ]: tplots=0 base="/home/tkaiser2/bench/affinity/redo/omp/128" tplots=0 for set in ["128"]: for ver in ["close","none","spread"]: hdir=base+"/"+set+"/"+ver print(hdir) try: heat=doomp(hdir,base,ver) tplots=tplots+heat except: print(set,ver,"failed") t1=tplots # In[ ]: print(t1) print(results) # In[ ]: base="/home/tkaiser2/bench/affinity/redo/omp/64" for set in ["128","64"]: for ver in ["close","none","spread"]: hdir=base+"/"+set+"/"+ver print(hdir) try: heat=doomp(hdir,base,ver) tplots=tplots+heat except: print(set,ver,"failed") t3=tplots # In[3]: # In[ ]: base="/home/tkaiser2/bench/affinity/redo/omp/32" for set in ["32"]: for ver in ["close","none","spread"]: hdir=base+"/"+set+"/"+ver print(hdir) try: heat=doomp(hdir,base,ver) tplots=tplots+heat except: print(set,ver,"failed") t3=tplots # In[3]: # In[ ]: def trimf(x): return x.replace("/home/tkaiser2/bench/affinity","") def trimthur(x): return x.replace("/thu","") def trimslash(x): return x.replace("/","_") def trimone(x): return x[1:] results['file']=results.apply(lambda row:trimf(row['file']),axis=1) results['file']=results.apply(lambda row:trimthur(row['file']),axis=1) results['file']=results.apply(lambda row:trimslash(row['file']),axis=1) results['file']=results.apply(lambda row:trimone(row['file']),axis=1) pd.set_option('display.max_rows', None) pd.set_option('display.max_columns', None) pd.set_option('display.max_colwidth', None) results # In[ ]: bytime=results.sort_values(by="time") # In[ ]: mpi=bytime[(bytime['type']=='mpi')] # In[ ]: hybrid=bytime[(bytime['type']=='hybrid')] # In[ ]: openmp=bytime[(bytime['type']=='openmp')] # In[ ]: results.to_csv('results.csv', index=False) mpi.to_csv('mpi.csv', index=False) hybrid.to_csv('hybrid.csv', index=False) openmp.to_csv('openmp.csv', index=False) # In[ ]: bytime # In[ ]: bytime.to_csv('bytime.csv', index=False) # In[ ]: cases # In[ ]: len(cases)
26.770777
142
0.513645
2,717
19,971
3.73721
0.105263
0.014182
0.012606
0.032007
0.78117
0.757042
0.731633
0.706618
0.687118
0.64418
0
0.032863
0.300636
19,971
745
143
26.806711
0.694136
0.15873
0
0.745868
0
0.002066
0.116557
0.017157
0
0
0
0
0
1
0.018595
false
0
0.024793
0.008264
0.053719
0.055785
0
0
0
null
0
0
0
0
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
69cc4dc2dcb75c9efc49169a4ba853a70c4a1c3f
46
py
Python
pyfan/devel/__init__.py
FanWangEcon/pyfan
126e91c0c6d930f1c335a07396d1d2145b247cea
[ "MIT" ]
1
2020-08-02T21:43:39.000Z
2020-08-02T21:43:39.000Z
pyfan/devel/__init__.py
FanWangEcon/pyfan
126e91c0c6d930f1c335a07396d1d2145b247cea
[ "MIT" ]
null
null
null
pyfan/devel/__init__.py
FanWangEcon/pyfan
126e91c0c6d930f1c335a07396d1d2145b247cea
[ "MIT" ]
1
2020-12-02T06:45:20.000Z
2020-12-02T06:45:20.000Z
# modules for program development and testing
23
45
0.826087
6
46
6.333333
1
0
0
0
0
0
0
0
0
0
0
0
0.152174
46
1
46
46
0.974359
0.934783
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
5
3859a6ca38c9d69e9d0b2ab5dd331077fe5a7f36
1,935
py
Python
tests/test_enforce_import_order.py
mmrahorovic/finn
d1cc9cf94f1c33354cc169c5a6517314d0e94e3b
[ "BSD-3-Clause" ]
283
2019-09-26T10:09:34.000Z
2022-03-09T16:36:23.000Z
tests/test_enforce_import_order.py
mmrahorovic/finn
d1cc9cf94f1c33354cc169c5a6517314d0e94e3b
[ "BSD-3-Clause" ]
238
2019-10-04T12:20:26.000Z
2022-03-31T04:50:53.000Z
tests/test_enforce_import_order.py
mmrahorovic/finn
d1cc9cf94f1c33354cc169c5a6517314d0e94e3b
[ "BSD-3-Clause" ]
144
2019-09-23T13:46:14.000Z
2022-03-18T12:55:07.000Z
# Copyright (c) 2020, Xilinx # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright notice, this # list of conditions and the following disclaimer. # # * Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # * Neither the name of FINN nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE # FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL # DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # IMPORTANT: # - do not move this file to subfolders # pytest must discover it before other FINN tests, can be checked with: # pytest --collect-only # - do not change the order of imports below # this is to workaround the onnx 1.6.0/pytorch bug # https://github.com/onnx/onnx/issues/2394#issuecomment-581638840 import onnx # noqa import torch # noqa def test_enforce_import_order(): assert True
43.977273
80
0.771059
283
1,935
5.261484
0.575972
0.024177
0.022834
0.030893
0.123573
0.091336
0.091336
0.091336
0.091336
0.091336
0
0.012547
0.176227
1,935
43
81
45
0.921581
0.919897
0
0
0
0
0
0
0
0
0
0
0.25
1
0.25
true
0
0.75
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
1
1
0
1
0
1
0
0
5
38670e6df1e429a3ff2671880f00fbe077a7ee3b
73
py
Python
pi/seat.py
jabbate19/Light
17dd3d9fbe03b94a2115eefb09b58f6029a02b02
[ "MIT" ]
2
2021-11-08T01:00:15.000Z
2021-11-08T16:37:35.000Z
pi/seat.py
jabbate19/LightTable
17dd3d9fbe03b94a2115eefb09b58f6029a02b02
[ "MIT" ]
2
2021-11-09T00:10:02.000Z
2021-11-10T04:25:31.000Z
pi/seat.py
jabbate19/Light
17dd3d9fbe03b94a2115eefb09b58f6029a02b02
[ "MIT" ]
null
null
null
#import neopixel class Seat: def __init__(self, pixels): pass
18.25
31
0.657534
9
73
4.888889
1
0
0
0
0
0
0
0
0
0
0
0
0.260274
73
4
32
18.25
0.814815
0.205479
0
0
0
0
0
0
0
0
0
0
0
1
0.333333
false
0.333333
0
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
1
0
0
5
387598487ccd33af11493c38859743edeacfeab7
436
py
Python
client/notifications/windowsnotify.py
lhupfeldt/borgbackup_notifications_multi_target
ef0e8461bbc80535a4fa541c6c00077e57f05572
[ "BSD-3-Clause" ]
null
null
null
client/notifications/windowsnotify.py
lhupfeldt/borgbackup_notifications_multi_target
ef0e8461bbc80535a4fa541c6c00077e57f05572
[ "BSD-3-Clause" ]
null
null
null
client/notifications/windowsnotify.py
lhupfeldt/borgbackup_notifications_multi_target
ef0e8461bbc80535a4fa541c6c00077e57f05572
[ "BSD-3-Clause" ]
null
null
null
# Copyright (c) 2016 Lars Hupfeldt Nielsen, Hupfeldt IT # All rights reserved. This work is under a BSD license, see LICENSE.TXT. from .interface import STOCK_DIALOG_INFO, STOCK_DIALOG_ERROR _ignore_errors = False _notification = None _notify = None def init(program_name, ignore_errors=False): # TODO pass def notify(summary, body, msg_type, expire_timeout=-1): # TODO return def clear(): # TODO return
18.166667
73
0.724771
61
436
4.983607
0.770492
0.072368
0.111842
0
0
0
0
0
0
0
0
0.014327
0.199541
436
23
74
18.956522
0.856734
0.321101
0
0.2
0
0
0
0
0
0
0
0.043478
0
1
0.3
false
0.1
0.1
0.2
0.6
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
1
0
1
0
1
1
0
0
5
389765cb28af63f1d06cdb79d9c105e3c2bb8c88
15,155
py
Python
cohesity_management_sdk/controllers/cluster_controller.py
cohesity/management-sdk-python
867d8c0c40dd317cdb017902c895527da7ae31c0
[ "Apache-2.0" ]
18
2019-09-24T17:35:53.000Z
2022-03-25T08:08:47.000Z
cohesity_management_sdk/controllers/cluster_controller.py
cohesity/management-sdk-python
867d8c0c40dd317cdb017902c895527da7ae31c0
[ "Apache-2.0" ]
18
2019-03-29T19:32:29.000Z
2022-01-03T23:16:45.000Z
cohesity_management_sdk/controllers/cluster_controller.py
cohesity/management-sdk-python
867d8c0c40dd317cdb017902c895527da7ae31c0
[ "Apache-2.0" ]
16
2019-02-27T06:54:12.000Z
2021-11-16T18:10:24.000Z
# -*- coding: utf-8 -*- # Copyright 2021 Cohesity Inc. import logging from cohesity_management_sdk.api_helper import APIHelper from cohesity_management_sdk.configuration import Configuration from cohesity_management_sdk.controllers.base_controller import BaseController from cohesity_management_sdk.http.auth.auth_manager import AuthManager from cohesity_management_sdk.models.apps_config import AppsConfig from cohesity_management_sdk.models.basic_cluster_info import BasicClusterInfo from cohesity_management_sdk.models.cluster import Cluster from cohesity_management_sdk.models.cluster_status_result import ClusterStatusResult from cohesity_management_sdk.exceptions.request_error_error_exception import RequestErrorErrorException class ClusterController(BaseController): """A Controller to access Endpoints in the cohesity_management_sdk API.""" def __init__(self, config=None, client=None, call_back=None): super(ClusterController, self).__init__(client, call_back) self.logger = logging.getLogger(__name__) self.config = config def get_basic_cluster_info(self): """Does a GET request to /public/basicClusterInfo. All Active Directory domains that are currently joined to the Cohesity Cluster are returned. In addition, the default LOCAL domain on the Cohesity Cluster is returned as the first element of the domains array in the response. Returns: BasicClusterInfo: Response from the API. Success Raises: APIException: When an error occurs while fetching the data from the remote API. This exception includes the HTTP Response code, an error message, and the HTTP body that was received in the request. """ try: self.logger.info('get_basic_cluster_info called.') # Prepare query URL self.logger.info('Preparing query URL for get_basic_cluster_info.') _url_path = '/public/basicClusterInfo' _query_builder = self.config.get_base_uri() _query_builder += _url_path _query_url = APIHelper.clean_url(_query_builder) # Prepare headers self.logger.info('Preparing headers for get_basic_cluster_info.') _headers = {'accept': 'application/json'} # Prepare and execute request self.logger.info( 'Preparing and executing request for get_basic_cluster_info.') _request = self.http_client.get(_query_url, headers=_headers) _context = self.execute_request(_request, name='get_basic_cluster_info') # Endpoint and global error handling using HTTP status codes. self.logger.info('Validating response for get_basic_cluster_info.') if _context.response.status_code == 0: raise RequestErrorErrorException('Error', _context) self.validate_response(_context) # Return appropriate type return APIHelper.json_deserialize(_context.response.raw_body, BasicClusterInfo.from_dictionary) except Exception as e: self.logger.error(e, exc_info=True) raise def get_cluster(self, fetch_stats=None, fetch_time_series_schema=None, include_minimum_nodes_info=None): """Does a GET request to /public/cluster. Returns information about this Cohesity Cluster. Args: fetch_stats (bool, optional): If 'true', also get statistics about the Cohesity Cluster. fetch_time_series_schema (bool, optional): Specifies whether to get time series schema info of the cluster. include_minimum_nodes_info (bool, optional): Specifies whether to include info about minimum failure domains needed to support based on fault tolerance configured and EC configuration on all storage domains. Returns: Cluster: Response from the API. Successful Response Raises: APIException: When an error occurs while fetching the data from the remote API. This exception includes the HTTP Response code, an error message, and the HTTP body that was received in the request. """ try: self.logger.info('get_cluster called.') # Prepare query URL self.logger.info('Preparing query URL for get_cluster.') _url_path = '/public/cluster' _query_builder = self.config.get_base_uri() _query_builder += _url_path _query_parameters = { 'fetchStats': fetch_stats, 'fetchTimeSeriesSchema': fetch_time_series_schema, 'includeMinimumNodesInfo': include_minimum_nodes_info } _query_builder = APIHelper.append_url_with_query_parameters( _query_builder, _query_parameters, Configuration.array_serialization) _query_url = APIHelper.clean_url(_query_builder) # Prepare headers self.logger.info('Preparing headers for get_cluster.') _headers = {'accept': 'application/json'} # Prepare and execute request self.logger.info( 'Preparing and executing request for get_cluster.') _request = self.http_client.get(_query_url, headers=_headers) AuthManager.apply(_request, self.config) _context = self.execute_request(_request, name='get_cluster') # Endpoint and global error handling using HTTP status codes. self.logger.info('Validating response for get_cluster.') if _context.response.status_code == 0: raise RequestErrorErrorException('Error', _context) self.validate_response(_context) # Return appropriate type return APIHelper.json_deserialize(_context.response.raw_body, Cluster.from_dictionary) except Exception as e: self.logger.error(e, exc_info=True) raise def update_cluster(self, body=None): """Does a PUT request to /public/cluster. Returns the updated Cluster configuration. Args: body (UpdateClusterParams, optional): Update Cluster Parameter. Returns: Cluster: Response from the API. Success Raises: APIException: When an error occurs while fetching the data from the remote API. This exception includes the HTTP Response code, an error message, and the HTTP body that was received in the request. """ try: self.logger.info('update_cluster called.') # Prepare query URL self.logger.info('Preparing query URL for update_cluster.') _url_path = '/public/cluster' _query_builder = self.config.get_base_uri() _query_builder += _url_path _query_url = APIHelper.clean_url(_query_builder) # Prepare headers self.logger.info('Preparing headers for update_cluster.') _headers = { 'accept': 'application/json', 'content-type': 'application/json; charset=utf-8' } # Prepare and execute request self.logger.info( 'Preparing and executing request for update_cluster.') _request = self.http_client.put( _query_url, headers=_headers, parameters=APIHelper.json_serialize(body)) AuthManager.apply(_request, self.config) _context = self.execute_request(_request, name='update_cluster') # Endpoint and global error handling using HTTP status codes. self.logger.info('Validating response for update_cluster.') if _context.response.status_code == 0: raise RequestErrorErrorException('Error', _context) self.validate_response(_context) # Return appropriate type return APIHelper.json_deserialize(_context.response.raw_body, Cluster.from_dictionary) except Exception as e: self.logger.error(e, exc_info=True) raise def get_app_settings(self): """Does a GET request to /public/cluster/appSettings. Returns the app settings for the cluster. Returns: AppsConfig: Response from the API. Success Raises: APIException: When an error occurs while fetching the data from the remote API. This exception includes the HTTP Response code, an error message, and the HTTP body that was received in the request. """ try: self.logger.info('get_app_settings called.') # Prepare query URL self.logger.info('Preparing query URL for get_app_settings.') _url_path = '/public/cluster/appSettings' _query_builder = self.config.get_base_uri() _query_builder += _url_path _query_url = APIHelper.clean_url(_query_builder) # Prepare headers self.logger.info('Preparing headers for get_app_settings.') _headers = {'accept': 'application/json'} # Prepare and execute request self.logger.info( 'Preparing and executing request for get_app_settings.') _request = self.http_client.get(_query_url, headers=_headers) AuthManager.apply(_request, self.config) _context = self.execute_request(_request, name='get_app_settings') # Endpoint and global error handling using HTTP status codes. self.logger.info('Validating response for get_app_settings.') if _context.response.status_code == 0: raise RequestErrorErrorException('Error', _context) self.validate_response(_context) # Return appropriate type return APIHelper.json_deserialize( _context.response.raw_body, AppsConfig.from_dictionary) except Exception as e: self.logger.error(e, exc_info=True) raise def update_app_settings(self, body=None): """Does a PUT request to /public/cluster/appSettings. Returns the updated app settings. Args: body (AppsConfig): Update App Settings Parameter. Returns: AppsConfig: Response from the API. Success Raises: APIException: When an error occurs while fetching the data from the remote API. This exception includes the HTTP Response code, an error message, and the HTTP body that was received in the request. """ try: self.logger.info('update_app_settings called.') # Prepare query URL self.logger.info( 'Preparing query URL for update_app_settings.') _url_path = '/public/cluster/appSettings' _query_builder = self.config.get_base_uri() _query_builder += _url_path _query_url = APIHelper.clean_url(_query_builder) # Prepare headers self.logger.info( 'Preparing headers for update_app_settings.') _headers = { 'accept': 'application/json', 'content-type': 'application/json; charset=utf-8' } # Prepare and execute request self.logger.info( 'Preparing and executing request for update_app_settings.' ) _request = self.http_client.put( _query_url, headers=_headers, parameters=APIHelper.json_serialize(body)) AuthManager.apply(_request, self.config) _context = self.execute_request(_request, name='update_app_settings') # Endpoint and global error handling using HTTP status codes. self.logger.info( 'Validating response for update_app_settings.') if _context.response.status_code == 0: raise RequestErrorErrorException('Error', _context) self.validate_response(_context) # Return appropriate type return APIHelper.json_deserialize( _context.response.raw_body, AppsConfig.from_dictionary) except Exception as e: self.logger.error(e, exc_info=True) raise def get_cluster_status(self): """Does a GET request to /public/cluster/status. Sends a request to get the status of every node that is part of the current Cluster. Returns: ClusterStatusResult: Response from the API. Success Raises: APIException: When an error occurs while fetching the data from the remote API. This exception includes the HTTP Response code, an error message, and the HTTP body that was received in the request. """ try: self.logger.info('get_cluster_status called.') # Prepare query URL self.logger.info('Preparing query URL for get_cluster_status.') _url_path = '/public/cluster/status' _query_builder = self.config.get_base_uri() _query_builder += _url_path _query_url = APIHelper.clean_url(_query_builder) # Prepare headers self.logger.info('Preparing headers for get_cluster_status.') _headers = {'accept': 'application/json'} # Prepare and execute request self.logger.info( 'Preparing and executing request for get_cluster_status.') _request = self.http_client.get(_query_url, headers=_headers) AuthManager.apply(_request, self.config) _context = self.execute_request(_request, name='get_cluster_status') # Endpoint and global error handling using HTTP status codes. self.logger.info('Validating response for get_cluster_status.') if _context.response.status_code == 0: raise RequestErrorErrorException('Error', _context) self.validate_response(_context) # Return appropriate type return APIHelper.json_deserialize( _context.response.raw_body, ClusterStatusResult.from_dictionary) except Exception as e: self.logger.error(e, exc_info=True) raise
40.092593
103
0.612537
1,590
15,155
5.607547
0.120126
0.041498
0.047106
0.046433
0.770301
0.746299
0.727681
0.712203
0.704576
0.699417
0
0.001268
0.32326
15,155
377
104
40.198939
0.868077
0.272385
0
0.609375
0
0
0.168019
0.027037
0
0
0
0
0
1
0.036458
false
0
0.052083
0
0.125
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
38c95f0394952960db6ad25f551a780eeaed8abf
62
py
Python
00.Unsorted/helloworld.py
cuicaihao/Data_Science_Python
ca4cb64bf9afc1011c192586362d0dd036e9441e
[ "MIT" ]
2
2018-04-26T12:11:41.000Z
2018-10-09T19:37:57.000Z
00.Unsorted/helloworld.py
cuicaihao/Data_Science_Python
ca4cb64bf9afc1011c192586362d0dd036e9441e
[ "MIT" ]
null
null
null
00.Unsorted/helloworld.py
cuicaihao/Data_Science_Python
ca4cb64bf9afc1011c192586362d0dd036e9441e
[ "MIT" ]
4
2018-10-09T19:37:59.000Z
2021-01-23T11:31:16.000Z
import numpy as np print("hello world") print(np.__version__)
15.5
21
0.774194
10
62
4.4
0.8
0
0
0
0
0
0
0
0
0
0
0
0.112903
62
3
22
20.666667
0.8
0
0
0
0
0
0.177419
0
0
0
0
0
0
1
0
true
0
0.333333
0
0.333333
0.666667
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
1
0
5
38ccde58e2af7d5c7978a750de4187c9dff205f2
361
py
Python
Opdrachten Les 8/8_4 (containers).py
Achraf-Ben/Programming
004b92dd88c0b99a2fcdbf750a783ebe786a62fb
[ "MIT" ]
null
null
null
Opdrachten Les 8/8_4 (containers).py
Achraf-Ben/Programming
004b92dd88c0b99a2fcdbf750a783ebe786a62fb
[ "MIT" ]
null
null
null
Opdrachten Les 8/8_4 (containers).py
Achraf-Ben/Programming
004b92dd88c0b99a2fcdbf750a783ebe786a62fb
[ "MIT" ]
1
2019-04-29T16:45:43.000Z
2019-04-29T16:45:43.000Z
Geordend Muteerbaar Iterable Dubbele waarden toegestaan Tuple JA NEE JA JA Dictionary JA JA JA NEE Set NEE JA NEE NEE List JA JA JA JA
60.166667
90
0.301939
26
361
4.192308
0.461538
0.220183
0.165138
0
0
0
0
0
0
0
0
0
0.698061
361
5
91
72.2
1
0
0
0
0
0
0
0
0
0
0
0
0
0
null
null
0
0
null
null
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
1
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
5
38d40f17686797313986af13eab9d905df060769
57
py
Python
solaris/eval/__init__.py
rbavery/solaris
0d7bd1439a96c243d7810fcddf776b7e635a05ea
[ "Apache-2.0" ]
367
2019-05-05T22:09:39.000Z
2022-03-27T10:05:16.000Z
solaris/eval/__init__.py
avanetten/solaris
998735d7661a7faa7bfde8968396e823a22ad1ec
[ "Apache-2.0" ]
396
2019-04-30T21:51:12.000Z
2022-03-31T09:21:09.000Z
solaris/eval/__init__.py
avanetten/solaris
998735d7661a7faa7bfde8968396e823a22ad1ec
[ "Apache-2.0" ]
120
2019-06-29T20:20:08.000Z
2022-03-10T07:37:57.000Z
from . import base, iou, scot, challenges, pixel, vector
28.5
56
0.736842
8
57
5.25
1
0
0
0
0
0
0
0
0
0
0
0
0.157895
57
1
57
57
0.875
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
38d697591624ba6b55cd3fad47f78207a174da85
6,833
py
Python
tests/test_TreeMesh1D.py
bond-anton/BDMesh
e72f1ec96828c41274b82ba67fd06b44fa8b511d
[ "Apache-2.0" ]
null
null
null
tests/test_TreeMesh1D.py
bond-anton/BDMesh
e72f1ec96828c41274b82ba67fd06b44fa8b511d
[ "Apache-2.0" ]
7
2017-07-21T21:42:55.000Z
2017-08-02T10:14:19.000Z
tests/test_TreeMesh1D.py
bond-anton/BDMesh
e72f1ec96828c41274b82ba67fd06b44fa8b511d
[ "Apache-2.0" ]
null
null
null
import numpy as np import unittest from BDMesh import Mesh1D, TreeMesh1D class TestTreeMesh1D(unittest.TestCase): def setUp(self): self.root_mesh = Mesh1D(0.0, 10.0) self.tree = TreeMesh1D(self.root_mesh) def test_constructor(self): with self.assertRaises(TypeError): TreeMesh1D(1) self.assertEqual(self.tree.tree, {0: [self.root_mesh]}) self.assertEqual(self.tree.levels, [0]) self.assertEqual(self.tree.root_mesh, self.root_mesh) def test_add_mesh(self): # adding not overlapping meshes mesh1 = Mesh1D(5, 15) self.tree.add_mesh(mesh=mesh1, level=1) self.assertEqual(self.tree.tree, {0: [self.root_mesh], 1: [mesh1]}) self.assertEqual(self.tree.levels, [0, 1]) mesh2 = Mesh1D(1, 4) self.tree.add_mesh(mesh=mesh2, level=1) self.assertEqual(self.tree.tree, {0: [self.root_mesh], 1: [mesh1, mesh2]}) self.assertEqual(self.tree.levels, [0, 1]) mesh3 = Mesh1D(6, 9) self.tree.add_mesh(mesh=mesh3, level=5) self.assertEqual(self.tree.tree, {0: [self.root_mesh], 1: [mesh1, mesh2], 5: [mesh3]}) self.assertEqual(self.tree.levels, [0, 1, 5]) # adding overlapping meshes mesh4 = Mesh1D(12, 17) self.tree.add_mesh(mesh=mesh4, level=1) mesh4.merge_with(mesh1) self.assertEqual(self.tree.tree, {0: [self.root_mesh], 1: [mesh4, mesh2], 5: [mesh3]}) mesh4 = Mesh1D(3, 6) self.tree.add_mesh(mesh=mesh4, level=1) self.assertEqual(self.tree.tree, {0: [self.root_mesh], 1: [mesh1], 5: [mesh3]}) self.assertEqual(self.tree.levels, [0, 1, 5]) self.assertEqual(mesh1.physical_boundary_1, 1) self.assertEqual(mesh1.physical_boundary_2, 17) # testing exceptions self.tree.add_mesh(mesh=mesh1, level=1.5) with self.assertRaises(TypeError): self.tree.add_mesh(mesh=mesh1, level='1') with self.assertRaises(TypeError): self.tree.add_mesh(mesh='a', level=1) # test mesh level search self.assertEqual(self.tree.get_mesh_level(self.root_mesh), 0) self.assertEqual(self.tree.get_mesh_level(mesh1), 1) self.assertEqual(self.tree.get_mesh_level(mesh4), -1) with self.assertRaises(TypeError): self.tree.get_mesh_level(2) def test_get_children(self): mesh = Mesh1D(1, 7) self.tree.add_mesh(mesh=mesh, level=1) self.tree.add_mesh(mesh=Mesh1D(1, 6), level=2) self.tree.add_mesh(mesh=Mesh1D(2, 3), level=3) self.tree.add_mesh(mesh=Mesh1D(4, 5), level=3) children = self.tree.get_children(mesh) self.assertEqual(children, {2: [Mesh1D(1, 6)], 3: [Mesh1D(2, 3), Mesh1D(4, 5)]}) self.tree.add_mesh(mesh=Mesh1D(4.5, 7.5), level=3) children = self.tree.get_children(mesh) self.assertEqual(children, {2: [Mesh1D(1, 6)], 3: [Mesh1D(2, 3)]}) # testing exceptions with self.assertRaises(TypeError): self.tree.get_children(mesh='a') def test_delete(self): mesh = Mesh1D(1, 7) self.tree.add_mesh(mesh=mesh, level=1) self.tree.add_mesh(mesh=Mesh1D(1, 6), level=2) self.tree.add_mesh(mesh=Mesh1D(2, 3), level=3) self.tree.add_mesh(mesh=Mesh1D(4, 5), level=3) children = self.tree.get_children(mesh) self.assertEqual(children, {2: [Mesh1D(1, 6)], 3: [Mesh1D(2, 3), Mesh1D(4, 5)]}) self.tree.del_mesh(Mesh1D(4, 5)) children = self.tree.get_children(mesh) self.assertEqual(children, {2: [Mesh1D(1, 6)], 3: [Mesh1D(2, 3)]}) self.tree.add_mesh(mesh=Mesh1D(4, 5), level=3) children = self.tree.get_children(mesh) self.assertEqual(children, {2: [Mesh1D(1, 6)], 3: [Mesh1D(2, 3), Mesh1D(4, 5)]}) self.tree.del_mesh(Mesh1D(1, 6)) self.assertEqual(self.tree.tree, {0: [self.root_mesh], 1: [Mesh1D(1, 7)]}) self.assertFalse(self.tree.del_mesh(self.root_mesh)) self.assertFalse(self.tree.del_mesh(Mesh1D(100, 110))) with self.assertRaises(TypeError): self.tree.del_mesh(1) def test_remove_coarse_duplicates(self): mesh = Mesh1D(1, 9) self.tree.add_mesh(mesh=mesh, level=1) self.tree.add_mesh(mesh=Mesh1D(1, 7), level=2) self.tree.add_mesh(mesh=Mesh1D(1, 7), level=3) self.tree.add_mesh(mesh=Mesh1D(8, 9), level=3) children = self.tree.get_children(mesh) self.assertEqual(children, {2: [Mesh1D(1, 7)], 3: [Mesh1D(1, 7), Mesh1D(8, 9)]}) self.tree.remove_coarse_duplicates() children = self.tree.get_children(mesh) self.assertEqual(children, {3: [Mesh1D(1, 7), Mesh1D(8, 9)]}) self.assertEqual(self.tree.tree, {0: [self.root_mesh], 1: [mesh], 3: [Mesh1D(1, 7), Mesh1D(8, 9)]}) def test_flatten(self): # adding not overlapping meshes mesh1 = Mesh1D(5, 15) self.tree.add_mesh(mesh=mesh1, level=1) self.assertEqual(self.tree.tree, {0: [self.root_mesh], 1: [mesh1]}) self.assertEqual(self.tree.levels, [0, 1]) mesh2 = Mesh1D(1, 4) self.tree.add_mesh(mesh=mesh2, level=1) self.assertEqual(self.tree.tree, {0: [self.root_mesh], 1: [mesh1, mesh2]}) self.assertEqual(self.tree.levels, [0, 1]) mesh3 = Mesh1D(6, 9) self.tree.add_mesh(mesh=mesh3, level=5) self.assertEqual(self.tree.tree, {0: [self.root_mesh], 1: [mesh1, mesh2], 5: [mesh3]}) self.assertEqual(self.tree.levels, [0, 1, 5]) # adding overlapping meshes mesh4 = Mesh1D(12, 17) self.tree.add_mesh(mesh=mesh4, level=1) flattened = self.tree.flatten() self.assertTrue(isinstance(flattened, Mesh1D)) flat_grid = np.array([0.0, 1.0, 4.0, 5.0, 6.0, 9.0, 10.0, 12.0, 15.0, 17.0]) np.testing.assert_allclose(flattened.physical_nodes, flat_grid) def test_recalculate(self): # adding not overlapping meshes mesh1 = Mesh1D(5, 15) self.tree.add_mesh(mesh=mesh1, level=1) self.assertEqual(self.tree.tree, {0: [self.root_mesh], 1: [mesh1]}) self.assertEqual(self.tree.levels, [0, 1]) mesh2 = Mesh1D(1, 4) self.tree.add_mesh(mesh=mesh2, level=1) self.assertEqual(self.tree.tree, {0: [self.root_mesh], 1: [mesh1, mesh2]}) self.assertEqual(self.tree.levels, [0, 1]) mesh3 = Mesh1D(6, 9) self.tree.add_mesh(mesh=mesh3, level=5) self.assertEqual(self.tree.tree, {0: [self.root_mesh], 1: [mesh1, mesh2], 5: [mesh3]}) self.assertEqual(self.tree.levels, [0, 1, 5]) # adding overlapping meshes self.tree.tree[0] = None self.tree.recalculate_levels() self.assertEqual(self.tree.tree, {0: [mesh1, mesh2], 4: [mesh3]})
46.168919
107
0.617737
995
6,833
4.148744
0.078392
0.151163
0.138081
0.167151
0.82001
0.779554
0.741764
0.692587
0.656492
0.61095
0
0.070083
0.223182
6,833
147
108
46.482993
0.707611
0.033367
0
0.571429
0
0
0.000455
0
0
0
0
0
0.388889
1
0.063492
false
0
0.02381
0
0.095238
0
0
0
0
null
0
0
1
1
1
1
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
2a02b84bcede4c3fce2c881119a5b126ce9c1ce9
128
py
Python
pyconcz_2016/announcements/admin.py
pyvec/cz.pycon.org-2016
b4affabcf2b1cdd629a2dc67dba671b3414b3682
[ "MIT" ]
10
2016-01-27T08:37:41.000Z
2018-04-26T08:33:44.000Z
pyconcz_2016/announcements/admin.py
pyvec/cz.pycon.org-2016
b4affabcf2b1cdd629a2dc67dba671b3414b3682
[ "MIT" ]
101
2015-11-15T11:20:33.000Z
2019-04-03T15:17:47.000Z
pyconcz_2016/announcements/admin.py
pyvec/cz.pycon.org-2016
b4affabcf2b1cdd629a2dc67dba671b3414b3682
[ "MIT" ]
10
2015-11-15T21:35:53.000Z
2017-01-25T14:30:27.000Z
from django.contrib import admin from pyconcz_2016.announcements.models import Announcement admin.site.register(Announcement)
21.333333
58
0.859375
16
128
6.8125
0.75
0
0
0
0
0
0
0
0
0
0
0.034188
0.085938
128
5
59
25.6
0.897436
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.666667
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
2a16a0a592b36642ebd543aca278b31985f1feaf
7,962
py
Python
tests/test_api.py
FindingJohnny/simplisafe-python
e660938c8280dd47d5a90c49a39f77bba27e8a30
[ "MIT" ]
null
null
null
tests/test_api.py
FindingJohnny/simplisafe-python
e660938c8280dd47d5a90c49a39f77bba27e8a30
[ "MIT" ]
null
null
null
tests/test_api.py
FindingJohnny/simplisafe-python
e660938c8280dd47d5a90c49a39f77bba27e8a30
[ "MIT" ]
null
null
null
"""Define tests for the System object.""" # pylint: disable=protected-access from datetime import datetime, timedelta import aiohttp from aresponses import ResponsesMockServer import pytest from simplipy import API from simplipy.errors import ( InvalidCredentialsError, PendingAuthorizationError, RequestError, ) from .common import ( TEST_CLIENT_ID, TEST_EMAIL, TEST_PASSWORD, TEST_REFRESH_TOKEN, TEST_SUBSCRIPTION_ID, TEST_SYSTEM_ID, TEST_USER_ID, load_fixture, ) @pytest.mark.asyncio async def test_401_bad_credentials(aresponses): """Test that an InvalidCredentialsError is raised with a 401 upon login.""" aresponses.add( "api.simplisafe.com", "/v1/api/token", "post", aresponses.Response(text="Unauthorized", status=401), ) async with aiohttp.ClientSession() as session: with pytest.raises(InvalidCredentialsError): await API.login_via_credentials( TEST_EMAIL, TEST_PASSWORD, client_id=TEST_CLIENT_ID, session=session ) @pytest.mark.asyncio async def test_401_refresh_token_failure( aresponses, v2_server, v2_subscriptions_response ): """Test that a InvalidCredentialsError is raised with refresh token failure.""" async with v2_server: v2_server.add( "api.simplisafe.com", f"/v1/users/{TEST_USER_ID}/subscriptions", "get", aresponses.Response(text=v2_subscriptions_response, status=200), ) v2_server.add( "api.simplisafe.com", f"/v1/subscriptions/{TEST_SUBSCRIPTION_ID}/settings", "get", aresponses.Response(text="Unauthorized", status=401), ) v2_server.add( "api.simplisafe.com", "/v1/api/token", "post", aresponses.Response(text="Unauthorized", status=401), ) async with aiohttp.ClientSession() as session: with pytest.raises(InvalidCredentialsError): simplisafe = await API.login_via_credentials( TEST_EMAIL, TEST_PASSWORD, client_id=TEST_CLIENT_ID, session=session ) systems = await simplisafe.get_systems() system = systems[TEST_SYSTEM_ID] await system.update() @pytest.mark.asyncio async def test_401_refresh_token_success( aresponses, v2_server, v2_subscriptions_response ): """Test that a successful refresh token carries out the original request.""" async with v2_server: v2_server.add( "api.simplisafe.com", f"/v1/users/{TEST_USER_ID}/subscriptions", "get", aresponses.Response(text="Unauthorized", status=401), ) v2_server.add( "api.simplisafe.com", "/v1/api/token", "post", aresponses.Response( text=load_fixture("api_token_response.json"), status=200 ), ) v2_server.add( "api.simplisafe.com", "/v1/api/authCheck", "get", aresponses.Response( text=load_fixture("auth_check_response.json"), status=200 ), ) v2_server.add( "api.simplisafe.com", f"/v1/users/{TEST_USER_ID}/subscriptions", "get", aresponses.Response(text=v2_subscriptions_response, status=200,), ) v2_server.add( "api.simplisafe.com", f"/v1/subscriptions/{TEST_SUBSCRIPTION_ID}/settings", "get", aresponses.Response( text=load_fixture("v2_settings_response.json"), status=200 ), ) async with aiohttp.ClientSession() as session: simplisafe = await API.login_via_credentials( TEST_EMAIL, TEST_PASSWORD, client_id=TEST_CLIENT_ID, session=session ) assert simplisafe.client_id == TEST_CLIENT_ID systems = await simplisafe.get_systems() system = systems[TEST_SYSTEM_ID] await system.update() assert simplisafe.refresh_token == TEST_REFRESH_TOKEN @pytest.mark.asyncio async def test_403_bad_credentials(aresponses): """Test that an InvalidCredentialsError is raised with a 403 upon login.""" aresponses.add( "api.simplisafe.com", "/v1/api/token", "post", aresponses.Response(text="Unauthorized", status=403), ) async with aiohttp.ClientSession() as session: with pytest.raises(InvalidCredentialsError): await API.login_via_credentials( TEST_EMAIL, TEST_PASSWORD, client_id=TEST_CLIENT_ID, session=session ) @pytest.mark.asyncio async def test_bad_request(aresponses, v2_server): """Test that a RequestError is raised on a non-existent endpoint.""" async with v2_server: v2_server.add( "api.simplisafe.com", "/v1/api/fakeEndpoint", "get", aresponses.Response(text="Not Found", status=404), ) async with aiohttp.ClientSession() as session: simplisafe = await API.login_via_credentials( TEST_EMAIL, TEST_PASSWORD, client_id=TEST_CLIENT_ID, session=session ) with pytest.raises(RequestError): await simplisafe.request("get", "api/fakeEndpoint") @pytest.mark.asyncio async def test_expired_token_refresh(aresponses, v2_server): """Test that a refresh token is used correctly.""" async with v2_server: v2_server.add( "api.simplisafe.com", "/v1/api/token", "post", aresponses.Response( text=load_fixture("api_token_response.json"), status=200 ), ) v2_server.add( "api.simplisafe.com", "/v1/api/authCheck", "get", aresponses.Response( text=load_fixture("auth_check_response.json"), status=200 ), ) v2_server.add( "api.simplisafe.com", "/v1/api/token", "post", aresponses.Response( text=load_fixture("api_token_response.json"), status=200 ), ) v2_server.add( "api.simplisafe.com", "/v1/api/authCheck", "get", aresponses.Response( text=load_fixture("auth_check_response.json"), status=200 ), ) async with aiohttp.ClientSession() as session: simplisafe = await API.login_via_credentials( TEST_EMAIL, TEST_PASSWORD, client_id=TEST_CLIENT_ID, session=session ) simplisafe._access_token_expire = datetime.now() - timedelta(hours=1) await simplisafe.request("post", "api/token") @pytest.mark.asyncio async def test_mfa(aresponses): """Test that a successful MFA flow throws the correct exception.""" aresponses.add( "api.simplisafe.com", "/v1/api/token", "post", aresponses.Response( text=load_fixture("mfa_required_response.json"), status=401 ), ) aresponses.add( "api.simplisafe.com", "/v1/api/mfa/challenge", "post", aresponses.Response( text=load_fixture("mfa_challenge_response.json"), status=200 ), ) aresponses.add( "api.simplisafe.com", "/v1/api/token", "post", aresponses.Response( text=load_fixture("mfa_authorization_pending_response.json"), status=200 ), ) async with aiohttp.ClientSession() as session: with pytest.raises(PendingAuthorizationError): await API.login_via_credentials( TEST_EMAIL, TEST_PASSWORD, client_id=None, session=session )
31.595238
88
0.597965
826
7,962
5.564165
0.138015
0.036554
0.062663
0.074413
0.766754
0.762402
0.74195
0.704091
0.704091
0.666014
0
0.021028
0.301181
7,962
251
89
31.721116
0.804996
0.008666
0
0.672897
0
0
0.156009
0.066378
0
0
0
0
0.009346
1
0
false
0.037383
0.03271
0
0.03271
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
2a3133d09e2f4191f4f29e844b01c21c79522e40
327
py
Python
dinossauro.py
evandropierot2021/PEC-186-NOITE
7e8fc296f78ee79e16539319dc86c35159cae756
[ "MIT" ]
null
null
null
dinossauro.py
evandropierot2021/PEC-186-NOITE
7e8fc296f78ee79e16539319dc86c35159cae756
[ "MIT" ]
null
null
null
dinossauro.py
evandropierot2021/PEC-186-NOITE
7e8fc296f78ee79e16539319dc86c35159cae756
[ "MIT" ]
null
null
null
print("Parabéns!!!!!!!!!!") print("Aqui vai um dinossauro!") print(''' ___ / __) .-^^^-/ / __/ / <__.|_|-|_| E aqui vai um bolo! hummmmmmmmm .......... iiiiiiiiii ########### =========== ########### =========== ########### ''')
14.217391
35
0.269113
15
327
5.133333
0.666667
0.181818
0.233766
0
0
0
0
0
0
0
0
0
0.388379
327
22
36
14.863636
0.385
0
0
0.294118
0
0
0.892966
0
0
0
0
0
0
1
0
true
0
0
0
0
0.176471
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
1
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
5
2a5a92d219faab2e9f9f76a2ddd3c1a425be73b1
46
py
Python
SG/__init__.py
yezz123/BFSG
57c0ef53a71c3bf1052a08d0cbb070ad891ac62d
[ "MIT" ]
11
2021-02-14T02:27:20.000Z
2021-11-12T01:33:04.000Z
SG/__init__.py
Collector0/BFSG
57c0ef53a71c3bf1052a08d0cbb070ad891ac62d
[ "MIT" ]
15
2021-04-20T18:48:32.000Z
2022-03-25T00:25:57.000Z
SG/__init__.py
yezz123/BFSG
57c0ef53a71c3bf1052a08d0cbb070ad891ac62d
[ "MIT" ]
2
2021-04-14T11:14:45.000Z
2021-04-14T11:14:45.000Z
from SG.BFSG import BruteForceStringGenerator
23
45
0.891304
5
46
8.2
1
0
0
0
0
0
0
0
0
0
0
0
0.086957
46
1
46
46
0.97619
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
2a7b35837fcb7c7d31bcfa25429b47359d8b343e
110
py
Python
rokuctl/actions/__init__.py
robobrobro/rokuctl
b7e2b22983c65d1578fdbf389b12c0c1966b1c95
[ "MIT" ]
null
null
null
rokuctl/actions/__init__.py
robobrobro/rokuctl
b7e2b22983c65d1578fdbf389b12c0c1966b1c95
[ "MIT" ]
1
2019-03-31T15:16:39.000Z
2019-03-31T15:16:39.000Z
rokuctl/actions/__init__.py
robobrobro/rokuctl
b7e2b22983c65d1578fdbf389b12c0c1966b1c95
[ "MIT" ]
null
null
null
from .base import Action from .discover import Discover from .info import Info from .keypress import Keypress
22
30
0.818182
16
110
5.625
0.4375
0
0
0
0
0
0
0
0
0
0
0
0.145455
110
4
31
27.5
0.957447
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
aa5bd6d0f67816df44beefb3c3aa035a7958dfaa
51
py
Python
Modulo1/ingles.py
EUD-curso-python/control_de_flujo-ipsuarezc
7c82b4b01d1d02f53984ed730904759eb5219c61
[ "MIT" ]
null
null
null
Modulo1/ingles.py
EUD-curso-python/control_de_flujo-ipsuarezc
7c82b4b01d1d02f53984ed730904759eb5219c61
[ "MIT" ]
null
null
null
Modulo1/ingles.py
EUD-curso-python/control_de_flujo-ipsuarezc
7c82b4b01d1d02f53984ed730904759eb5219c61
[ "MIT" ]
null
null
null
from saludo import ingles, espanol print(ingles())
17
34
0.784314
7
51
5.714286
0.857143
0
0
0
0
0
0
0
0
0
0
0
0.117647
51
3
35
17
0.888889
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0.5
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
1
0
5
aa618d649da9d5ce2c007c2d1867b1c0c5ee1b20
19,198
py
Python
tests/test_model_goal.py
andraune/Run4IT_BackEnd
a481427a0d1189a1f08c42e7ac1b452af6bbfc8d
[ "MIT" ]
1
2022-03-29T06:11:20.000Z
2022-03-29T06:11:20.000Z
tests/test_model_goal.py
andraune/run4it_backend
a481427a0d1189a1f08c42e7ac1b452af6bbfc8d
[ "MIT" ]
null
null
null
tests/test_model_goal.py
andraune/run4it_backend
a481427a0d1189a1f08c42e7ac1b452af6bbfc8d
[ "MIT" ]
null
null
null
import pytest import datetime as dt from run4it.api.goal import GoalModel, GoalCategoryModel from run4it.api.workout import WorkoutCategoryModel, WorkoutModel @pytest.mark.usefixtures('db') class TestGoalCategoryModel: def test_get_by_id(self): new_item = GoalCategoryModel("Run Kms") new_item.save() retrieved_item = GoalCategoryModel.get_by_id(new_item.id) assert(retrieved_item == new_item) def test_category_name_not_unique(self, db): item = GoalCategoryModel("Distance", "km", 1) item.save() item_new = GoalCategoryModel("Distance", "km", 2) item_new.save() num_items = db.session.query(GoalCategoryModel).count() assert(num_items == 2) def test_category_name_and_workout_category_unique(self, db): item = GoalCategoryModel("Distance", "km", 1) item.save() try: item_new = GoalCategoryModel("Distance", "km", 1) item_new.save() except: db.session.rollback() num_items = db.session.query(GoalCategoryModel).count() assert(num_items == 1) def test_category_unit(self, db): item = GoalCategoryModel("Run Kms", "km") item.save() assert(item.unit == 'km') def test_category_workout_category_link(self, db): WorkoutCategoryModel("Running", True).save() item1 = GoalCategoryModel("Run Kms", "km", None) item1.save() item2 = GoalCategoryModel("Run Kms", "km", 1) item2.save() assert(item1.workout_category is None) assert(item2.workout_category.id == 1) @pytest.mark.usefixtures('db') class TestGoalModel: def setup(self): WorkoutCategoryModel("Running", True).save() GoalCategoryModel("RunKms", "km", 1).save(commit=False) GoalCategoryModel("WeightTarget").save() def test_setup(self, db): assert(db.session.query(GoalCategoryModel).count() == 2) def test_get_by_id(self): category = GoalCategoryModel.get_by_id(1) new_goal = GoalModel(1, category) new_goal.save() retrieved_goal = GoalModel.get_by_id(new_goal.id) assert(retrieved_goal == new_goal) def test_goal_category_workout_id(self): category1 = GoalCategoryModel.get_by_id(1) category2 = GoalCategoryModel.get_by_id(2) assert(category1.workout_category_id == 1) assert(category2.workout_category_id is None) def test_goal_category_link(self): category = GoalCategoryModel.get_by_id(1) new_goal = GoalModel(1, category) new_goal.save() assert(new_goal.category.id == 1) assert(new_goal.category.name == 'RunKms') assert(new_goal.category_unit == 'km') def test_goal_category_unit_none_gives_empty_string(self): category = GoalCategoryModel.get_by_id(2) new_goal = GoalModel(1, category) new_goal.save() assert(new_goal.category.id == 2) assert(new_goal.category_unit == '') def test_two_goals_with_same_category_and_profile(self): category = GoalCategoryModel.get_by_id(1) new_goal1 = GoalModel(1, category) new_goal1.save() new_goal2 = GoalModel(1, category) new_goal2.save() assert(GoalModel.query.count() == 2) def test_default_start_date_and_end_date(self): category = GoalCategoryModel.get_by_id(1) now = dt.datetime.utcnow() item = GoalModel(1, category) item.save() assert((now - dt.timedelta(minutes=1)) < item.start_at) assert((now + dt.timedelta(minutes=1)) > item.start_at) assert((item.end_at - item.start_at).seconds <= 1) assert(item.duration == 1) def test_start_date_duration(self): category = GoalCategoryModel.get_by_id(1) start_at = dt.datetime.utcnow() + dt.timedelta(days=3) end_at = start_at + dt.timedelta(days=7) item = GoalModel(1, category, start_at, end_at) item.save() assert(item.start_at == start_at) assert(item.end_at == end_at) assert(item.duration == 7.0000) def test_misc_durations(self): category = GoalCategoryModel.get_by_id(1) start_at = dt.datetime(2020, 1, 1, 0, 0, 0) end_at = dt.datetime(2020, 1, 1, 13, 0, 0) item = GoalModel(1, category, start_at, end_at) expected_duration = round(13*60*60 / 86400, 4) assert(item.duration == expected_duration) end_at = dt.datetime(2021, 1, 1, 13, 0, 0) item = GoalModel(1, category, start_at, end_at) expected_duration = 366 + round(13*60*60 / 86400, 4) assert(item.duration == expected_duration) def test_profile_id(self): category = GoalCategoryModel.get_by_id(1) item = GoalModel(3, category) assert(item.profile_id == 3) def test_values(self): category = GoalCategoryModel.get_by_id(1) item = GoalModel(3, category, start_value=1, current_value=2, target_value=3) assert(item.start_value==1) assert(item.current_value==2) assert(item.target_value==3) @pytest.mark.usefixtures('db') class TestGoalModelUpdateFromWorkouts: profile_id = 1 def setup(self): WorkoutCategoryModel("Running", True).save(commit=False) WorkoutCategoryModel("Fitness", False).save() self.running_workout_category = WorkoutCategoryModel.get_by_id(1) self.fitness_workout_category = WorkoutCategoryModel.get_by_id(2) GoalCategoryModel("Cumulative distance", "km", 1).save(commit=False) GoalCategoryModel("Number of workouts", "#", 1).save(commit=False) GoalCategoryModel("Number of workouts", "#", 2).save(commit=False) GoalCategoryModel("Cumulative climb", "m", 1).save(commit=False) GoalCategoryModel("Weight loss", "kg", None).save() self.cumulative_distance_goal_category = GoalCategoryModel.get_by_id(1) self.num_of_running_workouts_goal_category = GoalCategoryModel.get_by_id(2) self.num_of_fitness_workouts_goal_category = GoalCategoryModel.get_by_id(3) self.cumulative_climb_goal_category = GoalCategoryModel.get_by_id(4) self.weight_loss_goal_category = GoalCategoryModel.get_by_id(5) two_days_ago = dt.datetime.utcnow() - dt.timedelta(days=2) two_days_from_now = dt.datetime.utcnow() + dt.timedelta(days=2) GoalModel(self.profile_id, self.cumulative_distance_goal_category, two_days_ago, two_days_from_now, 0, 100, 0).save(commit=False) GoalModel(self.profile_id, self.num_of_running_workouts_goal_category, two_days_ago, two_days_from_now, 0, 10, 0).save(commit=False) GoalModel(self.profile_id, self.num_of_fitness_workouts_goal_category, two_days_ago, two_days_from_now, 0, 10, 0).save(commit=False) GoalModel(self.profile_id, self.cumulative_climb_goal_category, two_days_ago, two_days_from_now, 0, 500, 0).save(commit=False) GoalModel(self.profile_id, self.weight_loss_goal_category, two_days_ago, two_days_from_now, 80, 70, 78).save() self.cumulative_distance_goal = GoalModel.get_by_id(1) self.running_workouts_goal = GoalModel.get_by_id(2) self.fitness_workouts_goal = GoalModel.get_by_id(3) self.cumulative_climb_goal = GoalModel.get_by_id(4) self.weight_loss_goal = GoalModel.get_by_id(5) def test_setup(self): assert(self.cumulative_distance_goal_category.workout_category_name == "Running") assert(self.num_of_running_workouts_goal_category.workout_category_name == "Running") assert(self.num_of_fitness_workouts_goal_category.workout_category_name == "Fitness") assert(self.cumulative_climb_goal_category.workout_category_name == "Running") assert(self.weight_loss_goal_category.workout_category_name == "") assert(self.cumulative_distance_goal.category_name == "Cumulative distance") assert(self.running_workouts_goal.category_name == "Number of workouts") assert(self.fitness_workouts_goal.category_name == "Number of workouts") assert(self.cumulative_climb_goal.category_name == "Cumulative climb") assert(self.weight_loss_goal.category_name == "Weight loss") assert(self.cumulative_distance_goal.start_value == 0) assert(self.running_workouts_goal.start_value == 0) assert(self.fitness_workouts_goal.start_value == 0) assert(self.cumulative_climb_goal.start_value == 0) assert(self.weight_loss_goal.start_value == 80) def test_running_workout_updates_running_goals(self): workout = WorkoutModel(self.profile_id, self.running_workout_category, "Run workout", dt.datetime.utcnow(), 1000, 3600, 10) workout.save() self.cumulative_distance_goal.update_from_workout(workout) self.running_workouts_goal.update_from_workout(workout) self.fitness_workouts_goal.update_from_workout(workout) self.cumulative_climb_goal.update_from_workout(workout) self.weight_loss_goal.update_from_workout(workout) assert(self.cumulative_distance_goal.current_value == 1) # goal has km, workout has m for distance assert(self.running_workouts_goal.current_value == 1) assert(self.fitness_workouts_goal.current_value == 0) assert(self.cumulative_climb_goal.current_value == 10) # workout has m for climb (elevation gain) assert(self.weight_loss_goal.current_value == 78) def test_fitness_workout_updates_fitness_goal(self): workout = WorkoutModel(self.profile_id, self.fitness_workout_category, "Fitness workout", dt.datetime.utcnow(), 0, 3600, 0) workout.save() self.cumulative_distance_goal.update_from_workout(workout) self.running_workouts_goal.update_from_workout(workout) self.fitness_workouts_goal.update_from_workout(workout) self.cumulative_climb_goal.update_from_workout(workout) self.weight_loss_goal.update_from_workout(workout) assert(self.cumulative_distance_goal.current_value == 0) assert(self.running_workouts_goal.current_value == 0) assert(self.fitness_workouts_goal.current_value == 1) assert(self.cumulative_climb_goal.current_value == 0) assert(self.weight_loss_goal.current_value == 78) def test_several_running_workouts(self): workout_1 = WorkoutModel(self.profile_id, self.running_workout_category, "Running workout", dt.datetime.utcnow(), 10000, 3600, 5) workout_1.save(commit=False) workout_2 = WorkoutModel(self.profile_id, self.running_workout_category, "Running workout", dt.datetime.utcnow(), 20000, 5000, 15) workout_2.save() self.cumulative_distance_goal.update_from_workout(workout_1) self.running_workouts_goal.update_from_workout(workout_1) self.cumulative_climb_goal.update_from_workout(workout_1) self.cumulative_distance_goal.update_from_workout(workout_2) self.running_workouts_goal.update_from_workout(workout_2) self.cumulative_climb_goal.update_from_workout(workout_2) assert(self.cumulative_distance_goal.current_value == 30) assert(self.running_workouts_goal.current_value == 2) assert(self.cumulative_climb_goal.current_value == 20) def test_several_fitness_workouts(self): workout_1 = WorkoutModel(self.profile_id, self.fitness_workout_category, "Fitness workout", dt.datetime.utcnow(), 0, 3600, 0) workout_1.save(commit=False) workout_2 = WorkoutModel(self.profile_id, self.fitness_workout_category, "Fitness workout", dt.datetime.utcnow(), 0, 3600, 0) workout_2.save() self.fitness_workouts_goal.update_from_workout(workout_1) self.fitness_workouts_goal.update_from_workout(workout_2) assert(self.fitness_workouts_goal.current_value == 2) def test_workout_for_different_profile_does_not_update(self): workout = WorkoutModel(self.profile_id+1, self.running_workout_category, "Running workout", dt.datetime.utcnow(), 10000, 3600, 10) workout.save() self.cumulative_distance_goal.update_from_workout(workout) self.running_workouts_goal.update_from_workout(workout) self.cumulative_climb_goal.update_from_workout(workout) assert(self.cumulative_distance_goal.current_value == 0) assert(self.running_workouts_goal.current_value == 0) assert(self.cumulative_climb_goal.current_value == 0) def test_workout_before_start_date_does_not_update(self): workout = WorkoutModel(self.profile_id, self.fitness_workout_category, "Fitness workout", dt.datetime.utcnow() - dt.timedelta(days=3), 0, 3600, 0) workout.save() self.fitness_workouts_goal.update_from_workout(workout) assert(self.fitness_workouts_goal.current_value == 0) def test_workout_after_start_date_does_not_update(self): workout = WorkoutModel(self.profile_id, self.fitness_workout_category, "Fitness workout", dt.datetime.utcnow() + dt.timedelta(days=3), 0, 3600, 0) workout.save() self.fitness_workouts_goal.update_from_workout(workout) assert(self.fitness_workouts_goal.current_value == 0) def test_running_workout_remove_updates_running_goals(self): add_workout = WorkoutModel(self.profile_id, self.running_workout_category, "Run workout", dt.datetime.utcnow(), 1000, 3600, 10) add_workout.save(commit=False) remove_workout = WorkoutModel(self.profile_id, self.running_workout_category, "Run workout", dt.datetime.utcnow(), 500, 3600, 3) remove_workout.save() self.cumulative_distance_goal.update_from_workout(add_workout) self.running_workouts_goal.update_from_workout(add_workout) self.fitness_workouts_goal.update_from_workout(add_workout) self.cumulative_climb_goal.update_from_workout(add_workout) self.weight_loss_goal.update_from_workout(add_workout) self.cumulative_distance_goal.remove_from_workout(remove_workout) self.running_workouts_goal.remove_from_workout(remove_workout) self.fitness_workouts_goal.remove_from_workout(remove_workout) self.cumulative_climb_goal.remove_from_workout(remove_workout) self.weight_loss_goal.remove_from_workout(remove_workout) assert(self.cumulative_distance_goal.current_value == 0.5) assert(self.running_workouts_goal.current_value == 0) assert(self.fitness_workouts_goal.current_value == 0) assert(self.cumulative_climb_goal.current_value == 7) assert(self.weight_loss_goal.current_value == 78) def test_fitness_workout_remove_updates_fitness_goals(self): add_workout = WorkoutModel(self.profile_id, self.fitness_workout_category, "Fitness workout", dt.datetime.utcnow(), 0, 3600, 0) add_workout.save(commit=False) remove_workout = WorkoutModel(self.profile_id, self.fitness_workout_category, "Fitness workout", dt.datetime.utcnow(), 0, 1000, 0) remove_workout.save() self.cumulative_distance_goal.update_from_workout(add_workout) self.running_workouts_goal.update_from_workout(add_workout) self.fitness_workouts_goal.update_from_workout(add_workout) self.cumulative_climb_goal.update_from_workout(add_workout) self.weight_loss_goal.update_from_workout(add_workout) self.cumulative_distance_goal.remove_from_workout(remove_workout) self.running_workouts_goal.remove_from_workout(remove_workout) self.fitness_workouts_goal.remove_from_workout(remove_workout) self.cumulative_climb_goal.remove_from_workout(remove_workout) self.weight_loss_goal.remove_from_workout(remove_workout) assert(self.cumulative_distance_goal.current_value == 0) assert(self.running_workouts_goal.current_value == 0) assert(self.fitness_workouts_goal.current_value == 0) assert(self.cumulative_climb_goal.current_value == 0) assert(self.weight_loss_goal.current_value == 78) def test_several_running_workout_remove_updates_running_goals(self): add_workout = WorkoutModel(self.profile_id, self.running_workout_category, "Run workout", dt.datetime.utcnow(), 2100, 3600, 10) add_workout.save(commit=False) remove_workout = WorkoutModel(self.profile_id, self.running_workout_category, "Run workout", dt.datetime.utcnow(), 500, 3600, 3) remove_workout.save() self.cumulative_distance_goal.update_from_workout(add_workout) self.cumulative_climb_goal.update_from_workout(add_workout) self.cumulative_distance_goal.remove_from_workout(remove_workout) self.cumulative_climb_goal.remove_from_workout(remove_workout) self.cumulative_distance_goal.remove_from_workout(remove_workout) # remove again self.cumulative_climb_goal.remove_from_workout(remove_workout) # remove again assert(self.cumulative_distance_goal.current_value == 1.1) assert(self.cumulative_climb_goal.current_value == 4) def test_several_fitness_workout_remove_updates_fitness_goals(self): add_workout = WorkoutModel(self.profile_id, self.fitness_workout_category, "Fitness workout", dt.datetime.utcnow(), 0, 3600, 0) add_workout.save(commit=False) remove_workout = WorkoutModel(self.profile_id, self.fitness_workout_category, "Fitness workout", dt.datetime.utcnow(), 0, 1000, 0) remove_workout.save() self.fitness_workouts_goal.update_from_workout(add_workout) self.fitness_workouts_goal.update_from_workout(add_workout) # added again self.fitness_workouts_goal.update_from_workout(add_workout) # added again self.fitness_workouts_goal.remove_from_workout(remove_workout) self.fitness_workouts_goal.remove_from_workout(remove_workout) # removed again assert(self.fitness_workouts_goal.current_value == 1) def test_remove_too_much_running_doesnt_give_negative_values(self): remove_workout = WorkoutModel(self.profile_id, self.running_workout_category, "Run workout", dt.datetime.utcnow(), 500, 3600, 3) remove_workout.save() self.cumulative_distance_goal.remove_from_workout(remove_workout) self.cumulative_climb_goal.remove_from_workout(remove_workout) assert(self.cumulative_distance_goal.current_value == 0) assert(self.cumulative_climb_goal.current_value == 0) def test_remove_too_much_fitness_doesnt_give_negative_values(self): remove_workout = WorkoutModel(self.profile_id, self.fitness_workout_category, "Fitness workout", dt.datetime.utcnow(), 0, 1000, 0) remove_workout.save() self.fitness_workouts_goal.remove_from_workout(remove_workout) assert(self.fitness_workouts_goal.current_value == 0) def test_workout_for_another_profile_doesnt_remove(self): workout_add = WorkoutModel(self.profile_id, self.fitness_workout_category, "Fitness workout", dt.datetime.utcnow(), 0, 3600, 0) workout_add.save() workout_remove = WorkoutModel(self.profile_id+1, self.fitness_workout_category, "Fitness workout", dt.datetime.utcnow() - dt.timedelta(days=3), 0, 3600, 0) workout_remove.save() self.fitness_workouts_goal.update_from_workout(workout_add) self.fitness_workouts_goal.remove_from_workout(workout_remove) assert(self.fitness_workouts_goal.current_value == 1) def test_workout_before_start_date_does_not_remove(self): workout_add = WorkoutModel(self.profile_id, self.fitness_workout_category, "Fitness workout", dt.datetime.utcnow(), 0, 3600, 0) workout_add.save() workout_remove = WorkoutModel(self.profile_id, self.fitness_workout_category, "Fitness workout", dt.datetime.utcnow() - dt.timedelta(days=3), 0, 3600, 0) workout_remove.save() self.fitness_workouts_goal.update_from_workout(workout_add) self.fitness_workouts_goal.remove_from_workout(workout_remove) assert(self.fitness_workouts_goal.current_value == 1) def test_workout_after_start_date_does_not_remove(self): workout_add = WorkoutModel(self.profile_id, self.fitness_workout_category, "Fitness workout", dt.datetime.utcnow(), 0, 3600, 0) workout_add.save() workout_remove = WorkoutModel(self.profile_id, self.fitness_workout_category, "Fitness workout", dt.datetime.utcnow() + dt.timedelta(days=3), 0, 3600, 0) workout_remove.save() self.fitness_workouts_goal.update_from_workout(workout_add) self.fitness_workouts_goal.update_from_workout(workout_remove) assert(self.fitness_workouts_goal.current_value == 1)
51.331551
371
0.785915
2,686
19,198
5.261355
0.058079
0.049038
0.041608
0.062412
0.846448
0.806468
0.757996
0.706553
0.650368
0.618667
0
0.024958
0.108813
19,198
374
371
51.331551
0.80104
0.007501
0
0.471299
0
0
0.036121
0
0
0
0
0
0.247734
1
0.10574
false
0
0.012085
0
0.129909
0
0
0
0
null
0
0
0
1
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
aaad6995d40e37373700155a55832cfe7f0d5a7b
553
py
Python
tests/games/test_gcd.py
znhv/winsio
4d4e69961285ea3dcebc5ad6358e2d753d6b4f9d
[ "MIT" ]
null
null
null
tests/games/test_gcd.py
znhv/winsio
4d4e69961285ea3dcebc5ad6358e2d753d6b4f9d
[ "MIT" ]
null
null
null
tests/games/test_gcd.py
znhv/winsio
4d4e69961285ea3dcebc5ad6358e2d753d6b4f9d
[ "MIT" ]
null
null
null
from brainstorm.games import gcd def test_generate_question(): actual = gcd.generate_question() assert len(actual) == 2 def test_generate_question_type(): actual = gcd.generate_question() assert isinstance(actual[0], int) assert isinstance(actual[1], int) assert isinstance(actual, tuple) def test_generate_round(): actual = gcd.generate_round() assert len(actual) == 2 def test_generate_round_type(): actual = gcd.generate_round() assert isinstance(actual[0], str) assert isinstance(actual[1], str)
21.269231
37
0.710669
71
553
5.338028
0.295775
0.211082
0.290237
0.121372
0.443272
0.163588
0.163588
0
0
0
0
0.013245
0.180832
553
25
38
22.12
0.8234
0
0
0.375
1
0
0
0
0
0
0
0
0.4375
1
0.25
false
0
0.0625
0
0.3125
0
0
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
1
0
1
0
0
0
0
0
0
0
5
aabb92da25602bd2d714987edb76616b12779aaa
17,707
py
Python
hoomd/md/pytest/test_methods.py
XT-Lee/hoomd-blue
0188f56f32c4a3efe0e74a3dc27397d6ec3469b0
[ "BSD-3-Clause" ]
null
null
null
hoomd/md/pytest/test_methods.py
XT-Lee/hoomd-blue
0188f56f32c4a3efe0e74a3dc27397d6ec3469b0
[ "BSD-3-Clause" ]
null
null
null
hoomd/md/pytest/test_methods.py
XT-Lee/hoomd-blue
0188f56f32c4a3efe0e74a3dc27397d6ec3469b0
[ "BSD-3-Clause" ]
null
null
null
import hoomd from hoomd.conftest import pickling_check import pytest from copy import deepcopy from collections import namedtuple paramtuple = namedtuple( 'paramtuple', ['setup_params', 'extra_params', 'changed_params', 'has_rattle', 'method']) def _method_base_params(): method_base_params_list = [] # Start with valid parameters to get the keys and placeholder values langevin_setup_params = {'kT': hoomd.variant.Constant(2.0)} langevin_extra_params = {'alpha': None, 'tally_reservoir_energy': False} langevin_changed_params = { 'kT': hoomd.variant.Ramp(1, 2, 1000000, 2000000), 'alpha': None, 'tally_reservoir_energy': True } langevin_has_rattle = True method_base_params_list.extend([ paramtuple(langevin_setup_params, langevin_extra_params, langevin_changed_params, langevin_has_rattle, hoomd.md.methods.Langevin) ]) brownian_setup_params = {'kT': hoomd.variant.Constant(2.0)} brownian_extra_params = {'alpha': None} brownian_changed_params = { 'kT': hoomd.variant.Ramp(1, 2, 1000000, 2000000), 'alpha': 0.125 } brownian_has_rattle = True method_base_params_list.extend([ paramtuple(brownian_setup_params, brownian_extra_params, brownian_changed_params, brownian_has_rattle, hoomd.md.methods.Brownian) ]) constant_s = [ hoomd.variant.Constant(1.0), hoomd.variant.Constant(2.0), hoomd.variant.Constant(3.0), hoomd.variant.Constant(0.125), hoomd.variant.Constant(.25), hoomd.variant.Constant(.5) ] ramp_s = [ hoomd.variant.Ramp(1.0, 4.0, 1000, 10000), hoomd.variant.Ramp(2.0, 4.0, 1000, 10000), hoomd.variant.Ramp(3.0, 4.0, 1000, 10000), hoomd.variant.Ramp(0.125, 4.0, 1000, 10000), hoomd.variant.Ramp(.25, 4.0, 1000, 10000), hoomd.variant.Ramp(.5, 4.0, 1000, 10000) ] npt_setup_params = { 'kT': hoomd.variant.Constant(2.0), 'tau': 2.0, 'S': constant_s, 'tauS': 2.0, 'box_dof': [True, True, True, False, False, False], 'couple': 'xyz' } npt_extra_params = { 'rescale_all': False, 'gamma': 0.0, 'translational_thermostat_dof': (0.0, 0.0), 'rotational_thermostat_dof': (0.0, 0.0), 'barostat_dof': (0.0, 0.0, 0.0, 0.0, 0.0, 0.0) } npt_changed_params = { 'kT': hoomd.variant.Ramp(1, 2, 1000000, 2000000), 'tau': 10.0, 'S': ramp_s, 'tauS': 10.0, 'box_dof': [True, False, False, False, True, False], 'couple': 'none', 'rescale_all': True, 'gamma': 2.0, 'translational_thermostat_dof': (0.125, 0.5), 'rotational_thermostat_dof': (0.5, 0.25), 'barostat_dof': (1.0, 2.0, 4.0, 6.0, 8.0, 10.0) } npt_has_rattle = False method_base_params_list.extend([ paramtuple(npt_setup_params, npt_extra_params, npt_changed_params, npt_has_rattle, hoomd.md.methods.NPT) ]) nvt_setup_params = {'kT': hoomd.variant.Constant(2.0), 'tau': 2.0} nvt_extra_params = {} nvt_changed_params = { 'kT': hoomd.variant.Ramp(1, 2, 1000000, 2000000), 'tau': 10.0, 'translational_thermostat_dof': (0.125, 0.5), 'rotational_thermostat_dof': (0.5, 0.25) } nvt_has_rattle = False method_base_params_list.extend([ paramtuple(nvt_setup_params, nvt_extra_params, nvt_changed_params, nvt_has_rattle, hoomd.md.methods.NVT) ]) nve_setup_params = {} nve_extra_params = {} nve_changed_params = {} nve_has_rattle = True method_base_params_list.extend([ paramtuple(nve_setup_params, nve_extra_params, nve_changed_params, nve_has_rattle, hoomd.md.methods.NVE) ]) return method_base_params_list @pytest.fixture(scope="function", params=_method_base_params(), ids=(lambda x: x[4].__name__)) def method_base_params(request): return deepcopy(request.param) def check_instance_attrs(instance, attr_dict, set_attrs=False): for attr, value in attr_dict.items(): if set_attrs: setattr(instance, attr, value) if hasattr(value, "__iter__") and not isinstance(value, str): assert all(v == instance_v for v, instance_v in zip(value, getattr(instance, attr))) else: assert getattr(instance, attr) == value def test_attributes(method_base_params): all_ = hoomd.filter.All() method = method_base_params.method(**method_base_params.setup_params, filter=all_) assert method.filter is all_ check_instance_attrs(method, method_base_params.setup_params) check_instance_attrs(method, method_base_params.extra_params) type_A = hoomd.filter.Type(['A']) method.filter = type_A assert method.filter is type_A check_instance_attrs(method, method_base_params.changed_params, True) def test_attributes_attached(simulation_factory, two_particle_snapshot_factory, method_base_params): all_ = hoomd.filter.All() method = method_base_params.method(**method_base_params.setup_params, filter=all_) sim = simulation_factory(two_particle_snapshot_factory()) sim.operations.integrator = hoomd.md.Integrator(0.005, methods=[method]) sim.run(0) assert method.filter is all_ check_instance_attrs(method, method_base_params.setup_params) check_instance_attrs(method, method_base_params.extra_params) type_A = hoomd.filter.Type(['A']) with pytest.raises(AttributeError): # filter cannot be set after scheduling method.filter = type_A check_instance_attrs(method, method_base_params.changed_params, True) def test_rattle_attributes(method_base_params): if not method_base_params.has_rattle: pytest.skip("RATTLE method is not implemented for this method") all_ = hoomd.filter.All() gyroid = hoomd.md.manifold.Gyroid(N=1) method = method_base_params.method(**method_base_params.setup_params, filter=all_, manifold_constraint=gyroid) assert method.manifold_constraint == gyroid assert method.tolerance == 1e-6 sphere = hoomd.md.manifold.Sphere(r=10) with pytest.raises(AttributeError): method.manifold_constraint = sphere assert method.manifold_constraint == gyroid method.tolerance = 1e-5 assert method.tolerance == 1e-5 def test_rattle_attributes_attached(simulation_factory, two_particle_snapshot_factory, method_base_params): if not method_base_params.has_rattle: pytest.skip("RATTLE integrator is not implemented for this method") all_ = hoomd.filter.All() gyroid = hoomd.md.manifold.Gyroid(N=1) method = method_base_params.method(**method_base_params.setup_params, filter=all_, manifold_constraint=gyroid) sim = simulation_factory(two_particle_snapshot_factory()) sim.operations.integrator = hoomd.md.Integrator(0.005, methods=[method]) sim.run(0) assert method.filter is all_ assert method.manifold_constraint == gyroid assert method.tolerance == 1e-6 check_instance_attrs(method, method_base_params.setup_params) check_instance_attrs(method, method_base_params.extra_params) type_A = hoomd.filter.Type(['A']) with pytest.raises(AttributeError): # filter cannot be set after scheduling method.filter = type_A sphere = hoomd.md.manifold.Sphere(r=10) with pytest.raises(AttributeError): # manifold cannot be set after scheduling method.manifold_constraint = sphere assert method.manifold_constraint == gyroid method.tolerance = 1e-5 assert method.tolerance == 1e-5 check_instance_attrs(method, method_base_params.changed_params, True) def test_rattle_missing_manifold(method_base_params): if not method_base_params.has_rattle: pytest.skip("RATTLE method is not implemented for this method") all_ = hoomd.filter.All() with pytest.raises(TypeError): method_base_params.method(**method_base_params.setup_params, filter=all_, tolerance=1e-5) def test_nph_attributes_attached_3d(simulation_factory, two_particle_snapshot_factory): """Test attributes of the NPH integrator after attaching in 3D.""" all_ = hoomd.filter.All() constant_s = [ hoomd.variant.Constant(1.0), hoomd.variant.Constant(2.0), hoomd.variant.Constant(3.0), hoomd.variant.Constant(0.125), hoomd.variant.Constant(.25), hoomd.variant.Constant(.5) ] nph = hoomd.md.methods.NPH(filter=all_, S=constant_s, tauS=2.0, couple='xyz') sim = simulation_factory(two_particle_snapshot_factory()) sim.operations.integrator = hoomd.md.Integrator(0.005, methods=[nph]) sim.run(0) assert nph.filter == all_ assert len(nph.S) == 6 for i in range(6): assert nph.S[i] is constant_s[i] assert nph.tauS == 2.0 assert nph.couple == 'xyz' type_A = hoomd.filter.Type(['A']) with pytest.raises(AttributeError): # filter cannot be set after scheduling nph.filter = type_A assert nph.filter == all_ nph.tauS = 10.0 assert nph.tauS == 10.0 nph.box_dof = [True, False, False, False, True, False] assert nph.box_dof == [True, False, False, False, True, False] nph.couple = 'none' assert nph.couple == 'none' nph.rescale_all = True assert nph.rescale_all nph.gamma = 2.0 assert nph.gamma == 2.0 assert nph.barostat_dof == (0.0, 0.0, 0.0, 0.0, 0.0, 0.0) nph.barostat_dof = (1.0, 2.0, 4.0, 6.0, 8.0, 10.0) assert nph.barostat_dof == (1.0, 2.0, 4.0, 6.0, 8.0, 10.0) ramp_s = [ hoomd.variant.Ramp(1.0, 4.0, 1000, 10000), hoomd.variant.Ramp(2.0, 4.0, 1000, 10000), hoomd.variant.Ramp(3.0, 4.0, 1000, 10000), hoomd.variant.Ramp(0.125, 4.0, 1000, 10000), hoomd.variant.Ramp(.25, 4.0, 1000, 10000), hoomd.variant.Ramp(.5, 4.0, 1000, 10000) ] nph.S = ramp_s assert len(nph.S) == 6 for _ in range(5): sim.run(1) for i in range(6): assert nph.S[i] is ramp_s[i] def test_npt_thermalize_thermostat_and_barostat_dof( simulation_factory, two_particle_snapshot_factory): """Tests that NPT.thermalize_thermostat_and_barostat_dof can be called.""" all_ = hoomd.filter.All() constant_t = hoomd.variant.Constant(2.0) constant_s = [1, 2, 3, 0.125, 0.25, 0.5] npt = hoomd.md.methods.NPT(filter=all_, kT=constant_t, tau=2.0, S=constant_s, tauS=2.0, box_dof=[True, True, True, True, True, True], couple='xyz') sim = simulation_factory(two_particle_snapshot_factory()) sim.operations.integrator = hoomd.md.Integrator(0.005, methods=[npt]) sim.run(0) npt.thermalize_thermostat_and_barostat_dof() xi, eta = npt.translational_thermostat_dof assert xi != 0.0 assert eta == 0.0 xi_rot, eta_rot = npt.rotational_thermostat_dof assert xi_rot == 0.0 assert eta_rot == 0.0 for v in npt.barostat_dof: assert v != 0.0 def test_npt_thermalize_thermostat_and_barostat_aniso_dof( simulation_factory, two_particle_snapshot_factory): """Tests that NPT.thermalize_thermostat_and_barostat_dof can be called.""" all_ = hoomd.filter.All() constant_t = hoomd.variant.Constant(2.0) constant_s = [1, 2, 3, 0.125, 0.25, 0.5] npt = hoomd.md.methods.NPT(filter=all_, kT=constant_t, tau=2.0, S=constant_s, tauS=2.0, box_dof=[True, True, True, True, True, True], couple='xyz') snap = two_particle_snapshot_factory() if snap.communicator.rank == 0: snap.particles.moment_inertia[:] = [[1, 1, 1], [2, 0, 0]] sim = simulation_factory(snap) sim.operations.integrator = hoomd.md.Integrator(0.005, methods=[npt], aniso=True) sim.run(0) npt.thermalize_thermostat_and_barostat_dof() xi, eta = npt.translational_thermostat_dof assert xi != 0.0 assert eta == 0.0 xi_rot, eta_rot = npt.rotational_thermostat_dof assert xi_rot != 0.0 assert eta_rot == 0.0 for v in npt.barostat_dof: assert v != 0.0 def test_nph_thermalize_barostat_dof(simulation_factory, two_particle_snapshot_factory): """Tests that NPT.thermalize_thermostat_and_barostat_dof can be called.""" all_ = hoomd.filter.All() constant_s = [1, 2, 3, 0.125, 0.25, 0.5] nph = hoomd.md.methods.NPH(filter=all_, S=constant_s, tauS=2.0, box_dof=[True, True, True, True, True, True], couple='xyz') sim = simulation_factory(two_particle_snapshot_factory()) sim.operations.integrator = hoomd.md.Integrator(0.005, methods=[nph]) sim.run(0) nph.thermalize_barostat_dof() for v in nph.barostat_dof: assert v != 0.0 def test_npt_attributes_attached_2d(simulation_factory, two_particle_snapshot_factory): """Test attributes of the NPT integrator specific to 2D simulations.""" all_ = hoomd.filter.All() npt = hoomd.md.methods.NPT(filter=all_, kT=1.0, tau=2.0, S=2.0, tauS=2.0, couple='xy') assert npt.box_dof == [True, True, True, False, False, False] assert npt.couple == 'xy' sim = simulation_factory(two_particle_snapshot_factory(dimensions=2)) sim.operations.integrator = hoomd.md.Integrator(0.005, methods=[npt]) sim.run(0) # after attaching in 2d, only some coupling modes and box dof are valid assert npt.box_dof == [True, True, False, False, False, False] assert npt.couple == 'xy' with pytest.raises(ValueError): npt.couple = 'xyz' with pytest.raises(ValueError): npt.couple = 'xz' with pytest.raises(ValueError): npt.couple = 'yz' npt.couple = 'none' assert npt.couple == 'none' npt.box_dof = [True, True, True, True, True, True] assert npt.box_dof == [True, True, False, True, False, False] def test_nvt_thermalize_thermostat_dof(simulation_factory, two_particle_snapshot_factory): """Tests that NVT.thermalize_thermostat_dof can be called.""" all_ = hoomd.filter.All() constant = hoomd.variant.Constant(2.0) nvt = hoomd.md.methods.NVT(filter=all_, kT=constant, tau=2.0) sim = simulation_factory(two_particle_snapshot_factory()) sim.operations.integrator = hoomd.md.Integrator(0.005, methods=[nvt]) sim.run(0) nvt.thermalize_thermostat_dof() xi, eta = nvt.translational_thermostat_dof assert xi != 0.0 assert eta == 0.0 xi_rot, eta_rot = nvt.rotational_thermostat_dof assert xi_rot == 0.0 assert eta_rot == 0.0 def test_nvt_thermalize_thermostat_aniso_dof(simulation_factory, two_particle_snapshot_factory): """Tests that NVT.thermalize_thermostat_dof can be called.""" all_ = hoomd.filter.All() constant = hoomd.variant.Constant(2.0) nvt = hoomd.md.methods.NVT(filter=all_, kT=constant, tau=2.0) snap = two_particle_snapshot_factory() if snap.communicator.rank == 0: snap.particles.moment_inertia[:] = [[1, 1, 1], [2, 0, 0]] sim = simulation_factory(snap) sim.operations.integrator = hoomd.md.Integrator(0.005, methods=[nvt], aniso=True) sim.run(0) nvt.thermalize_thermostat_dof() xi, eta = nvt.translational_thermostat_dof assert xi != 0.0 assert eta == 0.0 xi_rot, eta_rot = nvt.rotational_thermostat_dof assert xi_rot != 0.0 assert eta_rot == 0.0 def test_pickling(method_base_params, simulation_factory, two_particle_snapshot_factory): method = method_base_params.method(**method_base_params.setup_params, filter=hoomd.filter.All()) pickling_check(method) sim = simulation_factory(two_particle_snapshot_factory()) if (method_base_params.method == hoomd.md.methods.Berendsen and sim.device.communicator.num_ranks > 1): pytest.skip("Berendsen method does not support multiple processor " "configurations.") integrator = hoomd.md.Integrator(0.05, methods=[method]) sim.operations.integrator = integrator sim.run(0) pickling_check(method)
34.382524
80
0.613825
2,259
17,707
4.578132
0.085879
0.009669
0.063431
0.007735
0.800812
0.770547
0.740863
0.711468
0.690872
0.657513
0
0.045863
0.277179
17,707
514
81
34.449416
0.762169
0.041622
0
0.608586
0
0
0.040995
0.011991
0
0
0
0
0.136364
1
0.040404
false
0
0.012626
0.002525
0.058081
0
0
0
0
null
0
0
0
1
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
aac84a456f9ef16c42d5ea9d4aa223fc71725cbf
22
py
Python
test/test_jaxNN.py
yuvrajiro/jaxNN
96996862e804e8cfe0071667ed7b37e841976b6f
[ "MIT" ]
1
2022-01-15T19:11:01.000Z
2022-01-15T19:11:01.000Z
test/test_jaxNN.py
yuvrajiro/jaxNN
96996862e804e8cfe0071667ed7b37e841976b6f
[ "MIT" ]
null
null
null
test/test_jaxNN.py
yuvrajiro/jaxNN
96996862e804e8cfe0071667ed7b37e841976b6f
[ "MIT" ]
null
null
null
print("Hello Rahul")
7.333333
20
0.681818
3
22
5
1
0
0
0
0
0
0
0
0
0
0
0
0.136364
22
3
20
7.333333
0.789474
0
0
0
0
0
0.52381
0
0
0
0
0
0
1
0
true
0
0
0
0
1
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
5
2a9b753c454fa1caf2de3625247af202955ded34
92
py
Python
taurex/core.py
rychallener/TauREx3_public
eb0eeeeca8f47e5e7d64d8d70b43a3af370b7677
[ "BSD-3-Clause" ]
null
null
null
taurex/core.py
rychallener/TauREx3_public
eb0eeeeca8f47e5e7d64d8d70b43a3af370b7677
[ "BSD-3-Clause" ]
null
null
null
taurex/core.py
rychallener/TauREx3_public
eb0eeeeca8f47e5e7d64d8d70b43a3af370b7677
[ "BSD-3-Clause" ]
null
null
null
from taurex.data.fittable import fitparam, Fittable from taurex.output.output import Output
30.666667
51
0.847826
13
92
6
0.538462
0.25641
0
0
0
0
0
0
0
0
0
0
0.097826
92
2
52
46
0.939759
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
2aa436c0078de4765362cbdba27fd87cf9c0d648
146
py
Python
src/pandas_profiling/report/presentation/flavours/html/html.py
damirazo/pandas-profiling
e436694befc25463073652b4abddc9b9537a555d
[ "MIT" ]
2
2020-01-30T15:01:18.000Z
2020-01-30T15:01:19.000Z
src/pandas_profiling/report/presentation/flavours/html/html.py
damirazo/pandas-profiling
e436694befc25463073652b4abddc9b9537a555d
[ "MIT" ]
null
null
null
src/pandas_profiling/report/presentation/flavours/html/html.py
damirazo/pandas-profiling
e436694befc25463073652b4abddc9b9537a555d
[ "MIT" ]
null
null
null
from pandas_profiling.report.presentation.core.html import HTML class HTMLHTML(HTML): def render(self): return self.content["html"]
20.857143
63
0.732877
19
146
5.578947
0.789474
0
0
0
0
0
0
0
0
0
0
0
0.164384
146
6
64
24.333333
0.868852
0
0
0
0
0
0.027397
0
0
0
0
0
0
1
0.25
false
0
0.25
0.25
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
5
630c88853bbcda10691e021e9e0169d86f7dcde5
125
py
Python
backend/backend/util/validators.py
snk4tr/style-me
8d24f43b7ca99a942082f59badf3d8372b0d7302
[ "MIT" ]
3
2019-03-18T19:44:38.000Z
2019-09-09T10:51:35.000Z
backend/backend/util/validators.py
snk4tr/style-me
8d24f43b7ca99a942082f59badf3d8372b0d7302
[ "MIT" ]
null
null
null
backend/backend/util/validators.py
snk4tr/style-me
8d24f43b7ca99a942082f59badf3d8372b0d7302
[ "MIT" ]
null
null
null
import numpy as np async def image_is_valid(image: np.ndarray): return image.shape[2] == 3 and image.dtype == np.uint8
20.833333
58
0.712
22
125
3.954545
0.772727
0
0
0
0
0
0
0
0
0
0
0.029126
0.176
125
5
59
25
0.815534
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.333333
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
6320814f96c730603366b662c5b176273bb94997
239
py
Python
ddpg_curiosity_mc_her/common/__init__.py
yeocy/CDMCH-Pytorch
3fbe00d7607a2081d923e1a4f5dbb413e43dfbae
[ "MIT" ]
12
2019-12-16T16:15:51.000Z
2021-12-14T00:31:35.000Z
ddpg_curiosity_mc_her/common/__init__.py
CDMCH/ddpg-with-curiosity-and-multi-criteria-her
49c3aabe73e19aeec06cde0c3e0b6ab239d04467
[ "MIT" ]
1
2022-01-21T18:14:02.000Z
2022-01-21T18:14:02.000Z
ddpg_curiosity_mc_her/common/__init__.py
CDMCH/ddpg-with-curiosity-and-multi-criteria-her
49c3aabe73e19aeec06cde0c3e0b6ab239d04467
[ "MIT" ]
3
2021-01-18T17:13:53.000Z
2022-03-30T11:11:47.000Z
# flake8: noqa F403 from ddpg_curiosity_mc_her.common.console_util import * from ddpg_curiosity_mc_her.common.dataset import Dataset from ddpg_curiosity_mc_her.common.math_util import * from ddpg_curiosity_mc_her.common.misc_util import *
39.833333
56
0.861925
39
239
4.897436
0.384615
0.167539
0.356021
0.397906
0.691099
0.691099
0.397906
0.397906
0
0
0
0.018265
0.083682
239
5
57
47.8
0.853881
0.07113
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
2d4871941c71e4676994e2a2fb9c5f305f35180e
134
py
Python
venv/Lib/site-packages/pybrain3/rl/environments/twoplayergames/gomokuplayers/__init__.py
ishatserka/MachineLearningAndDataAnalysisCoursera
e82e772df2f4aec162cb34ac6127df10d14a625a
[ "MIT" ]
null
null
null
venv/Lib/site-packages/pybrain3/rl/environments/twoplayergames/gomokuplayers/__init__.py
ishatserka/MachineLearningAndDataAnalysisCoursera
e82e772df2f4aec162cb34ac6127df10d14a625a
[ "MIT" ]
null
null
null
venv/Lib/site-packages/pybrain3/rl/environments/twoplayergames/gomokuplayers/__init__.py
ishatserka/MachineLearningAndDataAnalysisCoursera
e82e772df2f4aec162cb34ac6127df10d14a625a
[ "MIT" ]
null
null
null
from .randomplayer import RandomGomokuPlayer from .killing import KillingGomokuPlayer from .moduledecision import ModuleDecidingPlayer
44.666667
48
0.895522
12
134
10
0.666667
0
0
0
0
0
0
0
0
0
0
0
0.08209
134
3
48
44.666667
0.97561
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
2d9dda88d97baec97da8d11d0215b84dfcba70c1
25
py
Python
plant_classification/test.py
laramaktub/plant-classification-tf-train
49ed70f687829759a7811fb6c60310388e6d100e
[ "MIT" ]
null
null
null
plant_classification/test.py
laramaktub/plant-classification-tf-train
49ed70f687829759a7811fb6c60310388e6d100e
[ "MIT" ]
null
null
null
plant_classification/test.py
laramaktub/plant-classification-tf-train
49ed70f687829759a7811fb6c60310388e6d100e
[ "MIT" ]
1
2018-11-30T20:07:09.000Z
2018-11-30T20:07:09.000Z
import api api.train(3)
6.25
12
0.72
5
25
3.6
0.8
0
0
0
0
0
0
0
0
0
0
0.047619
0.16
25
3
13
8.333333
0.809524
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
2de917ce991adcd124046e202c75914c45be5e30
75
py
Python
labSNMP/wrapper/__init__.py
ypsun-umd/labSNMP
3278b320d0074601e4e8a219e590cd9d1333b78a
[ "BSD-2-Clause" ]
null
null
null
labSNMP/wrapper/__init__.py
ypsun-umd/labSNMP
3278b320d0074601e4e8a219e590cd9d1333b78a
[ "BSD-2-Clause" ]
null
null
null
labSNMP/wrapper/__init__.py
ypsun-umd/labSNMP
3278b320d0074601e4e8a219e590cd9d1333b78a
[ "BSD-2-Clause" ]
null
null
null
#!/usr/bin/env python3 # # Last Change: Tue May 22, 2018 at 10:50 AM -0400
18.75
49
0.666667
15
75
3.333333
1
0
0
0
0
0
0
0
0
0
0
0.245902
0.186667
75
3
50
25
0.57377
0.92
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
5
9303e52d78fc09f7e9d71ef2305210ce996a3679
39
py
Python
tests/__init__.py
stkterry/collatzpy
1adcaac312fed9761e4de79bd6e97841787630d8
[ "MIT" ]
null
null
null
tests/__init__.py
stkterry/collatzpy
1adcaac312fed9761e4de79bd6e97841787630d8
[ "MIT" ]
null
null
null
tests/__init__.py
stkterry/collatzpy
1adcaac312fed9761e4de79bd6e97841787630d8
[ "MIT" ]
null
null
null
"""Unit test package for collatzpy."""
19.5
38
0.692308
5
39
5.4
1
0
0
0
0
0
0
0
0
0
0
0
0.128205
39
1
39
39
0.794118
0.820513
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
5
93706fb929d252faf0f030f8f85f7d190685ce76
335
py
Python
secrets.py
jakelerner-czi/vscode
48cefaf3a95766ad53e213637f2acd3a159fa39d
[ "MIT" ]
null
null
null
secrets.py
jakelerner-czi/vscode
48cefaf3a95766ad53e213637f2acd3a159fa39d
[ "MIT" ]
null
null
null
secrets.py
jakelerner-czi/vscode
48cefaf3a95766ad53e213637f2acd3a159fa39d
[ "MIT" ]
null
null
null
# ruleid: detected-aws-secret-access-key aws_secret_access_key = "wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY" # ruleid: detected-aws-secret-access-key aws_secret_access_key = "wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY" test_twilio_api_key = "SKa9b74728319b15614f09ded410ff5d30" test_twilio_secret = "1gj0hk0Tjb7IoN5ZLwmyGfd70muqpqh8"
33.5
66
0.850746
35
335
7.828571
0.4
0.131387
0.218978
0.262774
0.642336
0.642336
0.642336
0.642336
0.642336
0.642336
0
0.102236
0.065672
335
9
67
37.222222
0.773163
0.229851
0
0.5
0
0
0.572549
0.572549
0
0
0
0
0
1
0
false
0
0
0
0
0
0
0
0
null
0
1
1
0
0
0
0
0
1
0
0
0
0
1
0
0
0
0
0
0
0
0
0
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
fa7399187baa3faf8ad8eea1578a8f5932bb2c37
111
py
Python
tests/magnet_cli/test_main.py
sasano8/magnet-migrade
b5669b34a6a3b845df8df96dfedaf967df6b88e2
[ "MIT" ]
null
null
null
tests/magnet_cli/test_main.py
sasano8/magnet-migrade
b5669b34a6a3b845df8df96dfedaf967df6b88e2
[ "MIT" ]
4
2021-03-24T23:38:22.000Z
2021-03-31T07:24:30.000Z
tests/magnet_cli/test_main.py
sasano8/magnet-migrade
b5669b34a6a3b845df8df96dfedaf967df6b88e2
[ "MIT" ]
null
null
null
import inspect from typer.testing import CliRunner # from magnet.__main__ import app # runner = CliRunner()
13.875
35
0.774775
14
111
5.857143
0.714286
0
0
0
0
0
0
0
0
0
0
0
0.162162
111
7
36
15.857143
0.88172
0.468468
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
fa868d324f49994322a00f891a662de9b3c24f09
59
py
Python
iris/cython/irispy/__init__.py
openhumanoids/iris-distro
bb98593d70dacd388b8b51d224104d996bee0fd9
[ "BSD-2-Clause" ]
null
null
null
iris/cython/irispy/__init__.py
openhumanoids/iris-distro
bb98593d70dacd388b8b51d224104d996bee0fd9
[ "BSD-2-Clause" ]
null
null
null
iris/cython/irispy/__init__.py
openhumanoids/iris-distro
bb98593d70dacd388b8b51d224104d996bee0fd9
[ "BSD-2-Clause" ]
null
null
null
from iriscore import Polyhedron, Ellipsoid, inflate_region
29.5
58
0.864407
7
59
7.142857
1
0
0
0
0
0
0
0
0
0
0
0
0.101695
59
1
59
59
0.943396
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
fa87864e59b1c2d679954504bb807f5a96929806
66
py
Python
test.py
connor-makowski/spych
81b4a9a59d99b2ddd062060c615d63eec5869067
[ "MIT" ]
null
null
null
test.py
connor-makowski/spych
81b4a9a59d99b2ddd062060c615d63eec5869067
[ "MIT" ]
null
null
null
test.py
connor-makowski/spych
81b4a9a59d99b2ddd062060c615d63eec5869067
[ "MIT" ]
null
null
null
""" Test all functions in spych """ from spych.core import spych
11
28
0.712121
10
66
4.7
0.8
0
0
0
0
0
0
0
0
0
0
0
0.181818
66
5
29
13.2
0.87037
0.409091
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
fab8f535d69c94bc2117e751f6835fe5ab719fd5
110
py
Python
kissim/viewer/__init__.py
volkamerlab/kissim
35198a5efd4b651dd3952bf26ac5098fd1c4dfaa
[ "MIT" ]
15
2020-06-23T14:46:07.000Z
2022-02-03T04:23:56.000Z
kissim/viewer/__init__.py
volkamerlab/kissim
35198a5efd4b651dd3952bf26ac5098fd1c4dfaa
[ "MIT" ]
66
2020-11-05T11:45:21.000Z
2021-12-15T12:11:20.000Z
kissim/viewer/__init__.py
volkamerlab/kissim
35198a5efd4b651dd3952bf26ac5098fd1c4dfaa
[ "MIT" ]
3
2021-02-27T12:56:27.000Z
2022-02-03T04:23:57.000Z
from .structure import StructureViewer from .kinase import KinaseViewer from .pair import StructurePairViewer
27.5
38
0.863636
12
110
7.916667
0.666667
0
0
0
0
0
0
0
0
0
0
0
0.109091
110
3
39
36.666667
0.969388
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
fac5aa662f1ec94533fda01e1b398440536b180e
217
py
Python
zbxepics/zbxconfig/apiobjects/__init__.py
sasaki77/zabbix-epics-py
c3371f5b0834492fddabb41317c99126571c59e4
[ "MIT" ]
null
null
null
zbxepics/zbxconfig/apiobjects/__init__.py
sasaki77/zabbix-epics-py
c3371f5b0834492fddabb41317c99126571c59e4
[ "MIT" ]
null
null
null
zbxepics/zbxconfig/apiobjects/__init__.py
sasaki77/zabbix-epics-py
c3371f5b0834492fddabb41317c99126571c59e4
[ "MIT" ]
null
null
null
from .hostgroup import HostGroup from .host import Host from .item import Item from .application import Application from .trigger import Trigger from .hostinterface import HostInterface from .template import Template
27.125
40
0.83871
28
217
6.5
0.321429
0
0
0
0
0
0
0
0
0
0
0
0.129032
217
7
41
31
0.962963
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
87a2fbd74aa2894826db4f0959753950925694d8
147
py
Python
locale/pot/api/core/_autosummary/pyvista-Camera-thickness-1.py
tkoyama010/pyvista-doc-translations
23bb813387b7f8bfe17e86c2244d5dd2243990db
[ "MIT" ]
4
2020-08-07T08:19:19.000Z
2020-12-04T09:51:11.000Z
locale/pot/api/core/_autosummary/pyvista-Camera-thickness-1.py
tkoyama010/pyvista-doc-translations
23bb813387b7f8bfe17e86c2244d5dd2243990db
[ "MIT" ]
19
2020-08-06T00:24:30.000Z
2022-03-30T19:22:24.000Z
locale/pot/api/core/_autosummary/pyvista-Camera-thickness-1.py
tkoyama010/pyvista-doc-translations
23bb813387b7f8bfe17e86c2244d5dd2243990db
[ "MIT" ]
1
2021-03-09T07:50:40.000Z
2021-03-09T07:50:40.000Z
import pyvista pl = pyvista.Plotter() pl.camera.thickness # Expected: ## 1000.0 pl.camera.thickness = 100 pl.camera.thickness # Expected: ## 100.0
14.7
25
0.734694
21
147
5.142857
0.47619
0.222222
0.472222
0.462963
0
0
0
0
0
0
0
0.093023
0.122449
147
9
26
16.333333
0.744186
0.217687
0
0.4
0
0
0
0
0
0
0
0
0
1
0
false
0
0.2
0
0.2
0
1
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
87abb09ab57fda84fdb3b7724f337761bb72c8c8
58
py
Python
moview/ui/__init__.py
pankajp/moview
0b6f9b6c000246cc1a33f0f7637c6db5c182dd93
[ "MIT" ]
1
2019-10-25T12:40:06.000Z
2019-10-25T12:40:06.000Z
moview/ui/__init__.py
pankajp/moview
0b6f9b6c000246cc1a33f0f7637c6db5c182dd93
[ "MIT" ]
null
null
null
moview/ui/__init__.py
pankajp/moview
0b6f9b6c000246cc1a33f0f7637c6db5c182dd93
[ "MIT" ]
null
null
null
# initialize the qt api before any ui loading import qtpy
19.333333
45
0.793103
10
58
4.6
1
0
0
0
0
0
0
0
0
0
0
0
0.189655
58
2
46
29
0.978723
0.741379
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
87added5da3f00be8a0fa2033f318b77346edcba
5,848
py
Python
migrations/versions/28d6724efeb8_add_board_management_module_tables.py
louking/members
ee204211812e00945f9e2b09cfa130cc9d3e6558
[ "Apache-2.0" ]
1
2020-12-07T02:52:01.000Z
2020-12-07T02:52:01.000Z
migrations/versions/28d6724efeb8_add_board_management_module_tables.py
louking/members
ee204211812e00945f9e2b09cfa130cc9d3e6558
[ "Apache-2.0" ]
496
2020-02-12T15:48:26.000Z
2022-03-23T11:17:27.000Z
migrations/versions/28d6724efeb8_add_board_management_module_tables.py
louking/members
ee204211812e00945f9e2b09cfa130cc9d3e6558
[ "Apache-2.0" ]
null
null
null
"""add board management module tables Revision ID: 28d6724efeb8 Revises: b890ed51cfa6 Create Date: 2020-06-04 16:13:09.220452 """ from alembic import op import sqlalchemy as sa # revision identifiers, used by Alembic. revision = '28d6724efeb8' down_revision = 'b890ed51cfa6' branch_labels = None depends_on = None def upgrade(engine_name): globals()["upgrade_%s" % engine_name]() def downgrade(engine_name): globals()["downgrade_%s" % engine_name]() def upgrade_(): # ### commands auto generated by Alembic - please adjust! ### op.create_table('doctemplate', sa.Column('id', sa.Integer(), nullable=False), sa.Column('interest_id', sa.Integer(), nullable=True), sa.Column('templatename', sa.String(length=32), nullable=True), sa.Column('template', sa.Text(), nullable=True), sa.Column('version_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['interest_id'], ['localinterest.id'], ), sa.PrimaryKeyConstraint('id') ) op.create_table('meeting', sa.Column('id', sa.Integer(), nullable=False), sa.Column('interest_id', sa.Integer(), nullable=True), sa.Column('purpose', sa.String(length=512), nullable=True), sa.Column('date', sa.Date(), nullable=True), sa.Column('version_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['interest_id'], ['localinterest.id'], ), sa.PrimaryKeyConstraint('id') ) op.create_table('tag', sa.Column('id', sa.Integer(), nullable=False), sa.Column('interest_id', sa.Integer(), nullable=True), sa.Column('tag', sa.String(length=32), nullable=True), sa.Column('description', sa.String(length=512), nullable=True), sa.Column('version_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['interest_id'], ['localinterest.id'], ), sa.PrimaryKeyConstraint('id') ) op.create_table('agendaitem', sa.Column('id', sa.Integer(), nullable=False), sa.Column('interest_id', sa.Integer(), nullable=True), sa.Column('meeting_id', sa.Integer(), nullable=True), sa.Column('order', sa.Integer(), nullable=True), sa.Column('title', sa.String(length=128), nullable=True), sa.Column('agendaitem', sa.Text(), nullable=True), sa.Column('version_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['interest_id'], ['localinterest.id'], ), sa.ForeignKeyConstraint(['meeting_id'], ['meeting.id'], ), sa.PrimaryKeyConstraint('id') ) op.create_table('invite', sa.Column('id', sa.Integer(), nullable=False), sa.Column('interest_id', sa.Integer(), nullable=True), sa.Column('user_id', sa.Integer(), nullable=True), sa.Column('meeting_id', sa.Integer(), nullable=True), sa.Column('attending', sa.Boolean(), nullable=True), sa.Column('version_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['interest_id'], ['localinterest.id'], ), sa.ForeignKeyConstraint(['meeting_id'], ['meeting.id'], ), sa.ForeignKeyConstraint(['user_id'], ['localuser.id'], ), sa.PrimaryKeyConstraint('id') ) op.create_table('localuser_tag', sa.Column('localuser_id', sa.Integer(), nullable=True), sa.Column('tag_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['localuser_id'], ['localuser.id'], ), sa.ForeignKeyConstraint(['tag_id'], ['tag.id'], ) ) op.create_table('position_tag', sa.Column('position_id', sa.Integer(), nullable=True), sa.Column('tag_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['position_id'], ['position.id'], ), sa.ForeignKeyConstraint(['tag_id'], ['tag.id'], ) ) op.create_table('statusreport', sa.Column('id', sa.Integer(), nullable=False), sa.Column('interest_id', sa.Integer(), nullable=True), sa.Column('meeting_id', sa.Integer(), nullable=True), sa.Column('order', sa.Integer(), nullable=True), sa.Column('title', sa.String(length=128), nullable=True), sa.Column('statusreport', sa.Text(), nullable=True), sa.Column('version_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['interest_id'], ['localinterest.id'], ), sa.ForeignKeyConstraint(['meeting_id'], ['meeting.id'], ), sa.PrimaryKeyConstraint('id') ) op.create_table('actionitem', sa.Column('id', sa.Integer(), nullable=False), sa.Column('interest_id', sa.Integer(), nullable=True), sa.Column('meeting_id', sa.Integer(), nullable=True), sa.Column('agendaitem_id', sa.Integer(), nullable=True), sa.Column('order', sa.Integer(), nullable=True), sa.Column('assignee_id', sa.Integer(), nullable=True), sa.Column('action', sa.Text(), nullable=True), sa.Column('status', sa.Enum('open', 'inprogress', 'closed'), nullable=True), sa.Column('comments', sa.Text(), nullable=True), sa.Column('version_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['agendaitem_id'], ['agendaitem.id'], ), sa.ForeignKeyConstraint(['assignee_id'], ['localuser.id'], ), sa.ForeignKeyConstraint(['interest_id'], ['localinterest.id'], ), sa.ForeignKeyConstraint(['meeting_id'], ['meeting.id'], ), sa.PrimaryKeyConstraint('id') ) # ### end Alembic commands ### def downgrade_(): # ### commands auto generated by Alembic - please adjust! ### op.drop_table('actionitem') op.drop_table('statusreport') op.drop_table('position_tag') op.drop_table('localuser_tag') op.drop_table('invite') op.drop_table('agendaitem') op.drop_table('tag') op.drop_table('meeting') op.drop_table('doctemplate') # ### end Alembic commands ### def upgrade_users(): # ### commands auto generated by Alembic - please adjust! ### pass # ### end Alembic commands ### def downgrade_users(): # ### commands auto generated by Alembic - please adjust! ### pass # ### end Alembic commands ###
38.473684
80
0.663304
701
5,848
5.422254
0.134094
0.103131
0.156538
0.173639
0.749013
0.726388
0.719547
0.701131
0.644041
0.644041
0
0.012375
0.143297
5,848
151
81
38.728477
0.746307
0.080369
0
0.461538
0
0
0.196348
0
0
0
0
0
0
1
0.051282
false
0.017094
0.017094
0
0.068376
0
0
0
0
null
0
0
1
0
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
87c81608072836e6ceafe76088dd3d7745776fe7
204
py
Python
transifex/api/jsonapi/__init__.py
transifex/transifex-python
d467e82bba7f0d620a021cf9e7e58c987ba2fbb5
[ "Apache-2.0" ]
14
2020-04-10T20:54:59.000Z
2022-03-07T16:13:22.000Z
transifex/api/jsonapi/__init__.py
transifex/transifex-python
d467e82bba7f0d620a021cf9e7e58c987ba2fbb5
[ "Apache-2.0" ]
60
2020-04-14T12:41:06.000Z
2022-03-29T06:38:09.000Z
transifex/api/jsonapi/__init__.py
transifex/transifex-python
d467e82bba7f0d620a021cf9e7e58c987ba2fbb5
[ "Apache-2.0" ]
6
2021-01-01T10:28:11.000Z
2021-06-10T09:50:26.000Z
from .apis import JsonApi # noqa from .exceptions import (DoesNotExist, JsonApiException, # noqa MultipleObjectsReturned, NotSingleItem) from .resources import Resource # noqa
40.8
64
0.705882
18
204
8
0.666667
0
0
0
0
0
0
0
0
0
0
0
0.240196
204
4
65
51
0.929032
0.068627
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.75
0
0.75
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
87d168a4a35381da4f4f296dec8c56523a1387c1
351
py
Python
hooks/post_gen_project.py
alextd/cookiecutter-rimworld-mod-development
771cdf4e75d89af22bae19d650cf46f8d1b24dd3
[ "MIT" ]
null
null
null
hooks/post_gen_project.py
alextd/cookiecutter-rimworld-mod-development
771cdf4e75d89af22bae19d650cf46f8d1b24dd3
[ "MIT" ]
null
null
null
hooks/post_gen_project.py
alextd/cookiecutter-rimworld-mod-development
771cdf4e75d89af22bae19d650cf46f8d1b24dd3
[ "MIT" ]
3
2018-09-08T10:33:16.000Z
2020-04-20T17:45:41.000Z
import os #About is always made os.makedirs("{{cookiecutter.mod_name}}/Defs", exist_ok=True) os.makedirs("{{cookiecutter.mod_name}}/Patches", exist_ok=True) os.makedirs("{{cookiecutter.mod_name}}/Languages", exist_ok=True) os.makedirs("{{cookiecutter.mod_name}}/Sounds", exist_ok=True) os.makedirs("{{cookiecutter.mod_name}}/Textures", exist_ok=True)
43.875
65
0.766382
51
351
5.078431
0.352941
0.19305
0.42471
0.482625
0.72973
0.617761
0.617761
0.617761
0
0
0
0
0.045584
351
8
66
43.875
0.773134
0.05698
0
0
0
0
0.495468
0.495468
0
0
0
0
0
1
0
true
0
0.166667
0
0.166667
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
1
null
0
0
0
0
0
0
1
0
0
0
0
0
0
5
e20b09310c3844f1fbb6198b6a1d0c13406b38f9
94
py
Python
dummy_test_module/not_imported.py
vgmartinez/luigi
b5ad3eba1501bdc25e91e98901bc781128f2d8a7
[ "Apache-2.0" ]
2
2017-12-12T00:09:35.000Z
2020-06-16T12:29:48.000Z
dummy_test_module/not_imported.py
vgmartinez/luigi
b5ad3eba1501bdc25e91e98901bc781128f2d8a7
[ "Apache-2.0" ]
5
2015-01-22T06:54:59.000Z
2021-01-13T23:09:09.000Z
dummy_test_module/not_imported.py
vgmartinez/luigi
b5ad3eba1501bdc25e91e98901bc781128f2d8a7
[ "Apache-2.0" ]
1
2021-11-01T15:11:20.000Z
2021-11-01T15:11:20.000Z
import luigi class UnimportedTask(luigi.Task): def complete(self): return False
13.428571
33
0.691489
11
94
5.909091
0.909091
0
0
0
0
0
0
0
0
0
0
0
0.234043
94
6
34
15.666667
0.902778
0
0
0
0
0
0
0
0
0
0
0
0
1
0.25
false
0
0.5
0.25
1.25
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
1
1
0
0
0
5
e216929a83c030bc9e5b33f507573ed14643f7a5
101
py
Python
module/test.py
misizeji/python_study_notes
b0f5a224ac65f962c6255ab7cf8a1912704d1617
[ "MIT" ]
null
null
null
module/test.py
misizeji/python_study_notes
b0f5a224ac65f962c6255ab7cf8a1912704d1617
[ "MIT" ]
1
2018-05-23T06:35:37.000Z
2018-05-23T06:36:12.000Z
module/test.py
misizeji/python_study_notes
b0f5a224ac65f962c6255ab7cf8a1912704d1617
[ "MIT" ]
null
null
null
#!/usr/bin/python3 # Filename: test.py # import module import support support.print_func("Runoob")
12.625
28
0.742574
14
101
5.285714
0.857143
0
0
0
0
0
0
0
0
0
0
0.011236
0.118812
101
7
29
14.428571
0.820225
0.485149
0
0
0
0
0.122449
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0.5
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
1
0
5
357acb457395c8bd780850c33f7ebfc3617ffbbd
497
py
Python
dataproperty/_interface.py
thombashi/DataProperty
ed894483882ef81bd7796c2de4a966b548167c65
[ "MIT" ]
18
2016-02-20T04:16:07.000Z
2022-01-05T20:24:51.000Z
dataproperty/_interface.py
thombashi/DataProperty
ed894483882ef81bd7796c2de4a966b548167c65
[ "MIT" ]
13
2016-03-20T12:11:50.000Z
2020-01-01T06:43:49.000Z
dataproperty/_interface.py
thombashi/DataProperty
ed894483882ef81bd7796c2de4a966b548167c65
[ "MIT" ]
5
2016-05-24T21:09:27.000Z
2019-12-02T21:00:00.000Z
""" .. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com> """ import abc class DataPeropertyInterface(metaclass=abc.ABCMeta): __slots__ = () @abc.abstractproperty def align(self): # pragma: no cover pass @abc.abstractproperty def decimal_places(self): # pragma: no cover pass @abc.abstractproperty def typecode(self): # pragma: no cover pass @abc.abstractproperty def typename(self): # pragma: no cover pass
19.115385
63
0.645875
52
497
6.076923
0.461538
0.240506
0.278481
0.21519
0.474684
0.408228
0.408228
0.408228
0
0
0
0
0.251509
497
25
64
19.88
0.849462
0.265594
0
0.533333
0
0
0
0
0
0
0
0
0
1
0.266667
false
0.266667
0.066667
0
0.466667
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
0
0
0
5
35a41e651d473f25886cccb375827902547f76c0
110
py
Python
src/py/statiskit/pgm/__init__.py
StatisKit/PGM
1a82025003a705c668a9ff0ce170457ff40d37c2
[ "Apache-2.0" ]
1
2021-06-10T04:25:00.000Z
2021-06-10T04:25:00.000Z
src/py/statiskit/pgm/__init__.py
StatisKit/PGM
1a82025003a705c668a9ff0ce170457ff40d37c2
[ "Apache-2.0" ]
null
null
null
src/py/statiskit/pgm/__init__.py
StatisKit/PGM
1a82025003a705c668a9ff0ce170457ff40d37c2
[ "Apache-2.0" ]
5
2017-05-02T06:20:42.000Z
2021-03-15T18:34:12.000Z
from graph import * from distribution import * from process import * from estimation import * from io import *
22
26
0.781818
15
110
5.733333
0.466667
0.465116
0
0
0
0
0
0
0
0
0
0
0.172727
110
5
27
22
0.945055
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5