hexsha
string
size
int64
ext
string
lang
string
max_stars_repo_path
string
max_stars_repo_name
string
max_stars_repo_head_hexsha
string
max_stars_repo_licenses
list
max_stars_count
int64
max_stars_repo_stars_event_min_datetime
string
max_stars_repo_stars_event_max_datetime
string
max_issues_repo_path
string
max_issues_repo_name
string
max_issues_repo_head_hexsha
string
max_issues_repo_licenses
list
max_issues_count
int64
max_issues_repo_issues_event_min_datetime
string
max_issues_repo_issues_event_max_datetime
string
max_forks_repo_path
string
max_forks_repo_name
string
max_forks_repo_head_hexsha
string
max_forks_repo_licenses
list
max_forks_count
int64
max_forks_repo_forks_event_min_datetime
string
max_forks_repo_forks_event_max_datetime
string
content
string
avg_line_length
float64
max_line_length
int64
alphanum_fraction
float64
qsc_code_num_words_quality_signal
int64
qsc_code_num_chars_quality_signal
float64
qsc_code_mean_word_length_quality_signal
float64
qsc_code_frac_words_unique_quality_signal
float64
qsc_code_frac_chars_top_2grams_quality_signal
float64
qsc_code_frac_chars_top_3grams_quality_signal
float64
qsc_code_frac_chars_top_4grams_quality_signal
float64
qsc_code_frac_chars_dupe_5grams_quality_signal
float64
qsc_code_frac_chars_dupe_6grams_quality_signal
float64
qsc_code_frac_chars_dupe_7grams_quality_signal
float64
qsc_code_frac_chars_dupe_8grams_quality_signal
float64
qsc_code_frac_chars_dupe_9grams_quality_signal
float64
qsc_code_frac_chars_dupe_10grams_quality_signal
float64
qsc_code_frac_chars_replacement_symbols_quality_signal
float64
qsc_code_frac_chars_digital_quality_signal
float64
qsc_code_frac_chars_whitespace_quality_signal
float64
qsc_code_size_file_byte_quality_signal
float64
qsc_code_num_lines_quality_signal
float64
qsc_code_num_chars_line_max_quality_signal
float64
qsc_code_num_chars_line_mean_quality_signal
float64
qsc_code_frac_chars_alphabet_quality_signal
float64
qsc_code_frac_chars_comments_quality_signal
float64
qsc_code_cate_xml_start_quality_signal
float64
qsc_code_frac_lines_dupe_lines_quality_signal
float64
qsc_code_cate_autogen_quality_signal
float64
qsc_code_frac_lines_long_string_quality_signal
float64
qsc_code_frac_chars_string_length_quality_signal
float64
qsc_code_frac_chars_long_word_length_quality_signal
float64
qsc_code_frac_lines_string_concat_quality_signal
float64
qsc_code_cate_encoded_data_quality_signal
float64
qsc_code_frac_chars_hex_words_quality_signal
float64
qsc_code_frac_lines_prompt_comments_quality_signal
float64
qsc_code_frac_lines_assert_quality_signal
float64
qsc_codepython_cate_ast_quality_signal
float64
qsc_codepython_frac_lines_func_ratio_quality_signal
float64
qsc_codepython_cate_var_zero_quality_signal
bool
qsc_codepython_frac_lines_pass_quality_signal
float64
qsc_codepython_frac_lines_import_quality_signal
float64
qsc_codepython_frac_lines_simplefunc_quality_signal
float64
qsc_codepython_score_lines_no_logic_quality_signal
float64
qsc_codepython_frac_lines_print_quality_signal
float64
qsc_code_num_words
int64
qsc_code_num_chars
int64
qsc_code_mean_word_length
int64
qsc_code_frac_words_unique
null
qsc_code_frac_chars_top_2grams
int64
qsc_code_frac_chars_top_3grams
int64
qsc_code_frac_chars_top_4grams
int64
qsc_code_frac_chars_dupe_5grams
int64
qsc_code_frac_chars_dupe_6grams
int64
qsc_code_frac_chars_dupe_7grams
int64
qsc_code_frac_chars_dupe_8grams
int64
qsc_code_frac_chars_dupe_9grams
int64
qsc_code_frac_chars_dupe_10grams
int64
qsc_code_frac_chars_replacement_symbols
int64
qsc_code_frac_chars_digital
int64
qsc_code_frac_chars_whitespace
int64
qsc_code_size_file_byte
int64
qsc_code_num_lines
int64
qsc_code_num_chars_line_max
int64
qsc_code_num_chars_line_mean
int64
qsc_code_frac_chars_alphabet
int64
qsc_code_frac_chars_comments
int64
qsc_code_cate_xml_start
int64
qsc_code_frac_lines_dupe_lines
int64
qsc_code_cate_autogen
int64
qsc_code_frac_lines_long_string
int64
qsc_code_frac_chars_string_length
int64
qsc_code_frac_chars_long_word_length
int64
qsc_code_frac_lines_string_concat
null
qsc_code_cate_encoded_data
int64
qsc_code_frac_chars_hex_words
int64
qsc_code_frac_lines_prompt_comments
int64
qsc_code_frac_lines_assert
int64
qsc_codepython_cate_ast
int64
qsc_codepython_frac_lines_func_ratio
int64
qsc_codepython_cate_var_zero
int64
qsc_codepython_frac_lines_pass
int64
qsc_codepython_frac_lines_import
int64
qsc_codepython_frac_lines_simplefunc
int64
qsc_codepython_score_lines_no_logic
int64
qsc_codepython_frac_lines_print
int64
effective
string
hits
int64
602a749ac576513e76eee6bac1e2e15eeb11e64d
55
py
Python
src/pyEanGenerator/EanGenerator/__init__.py
Gegelascience/pyean
64c0d321fb798d88020f76562a6bfa9a5c360b48
[ "MIT" ]
null
null
null
src/pyEanGenerator/EanGenerator/__init__.py
Gegelascience/pyean
64c0d321fb798d88020f76562a6bfa9a5c360b48
[ "MIT" ]
null
null
null
src/pyEanGenerator/EanGenerator/__init__.py
Gegelascience/pyean
64c0d321fb798d88020f76562a6bfa9a5c360b48
[ "MIT" ]
null
null
null
from .EanGenerator import Ean13Generator, Ean8Generator
55
55
0.890909
5
55
9.8
1
0
0
0
0
0
0
0
0
0
0
0.058824
0.072727
55
1
55
55
0.901961
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
603f2102e7dda9c2e54c1c70af7b98c94abe350a
26
py
Python
python/testData/inheritors/dotted/A.py
jnthn/intellij-community
8fa7c8a3ace62400c838e0d5926a7be106aa8557
[ "Apache-2.0" ]
2
2019-04-28T07:48:50.000Z
2020-12-11T14:18:08.000Z
python/testData/inheritors/dotted/A.py
Cyril-lamirand/intellij-community
60ab6c61b82fc761dd68363eca7d9d69663cfa39
[ "Apache-2.0" ]
173
2018-07-05T13:59:39.000Z
2018-08-09T01:12:03.000Z
python/testData/inheritors/dotted/A.py
Cyril-lamirand/intellij-community
60ab6c61b82fc761dd68363eca7d9d69663cfa39
[ "Apache-2.0" ]
2
2020-03-15T08:57:37.000Z
2020-04-07T04:48:14.000Z
class A(object): pass
8.666667
16
0.615385
4
26
4
1
0
0
0
0
0
0
0
0
0
0
0
0.269231
26
3
17
8.666667
0.842105
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0.5
0
0
0.5
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
0
0
0
0
0
5
606002a45a42299834fc7057b11a717c9e8c96fa
108
py
Python
tests/data/make_jit.py
jlarmstrongiv/libtorchjs
d2466337db2707157f8b402dbefd80e84466e3a1
[ "BSD-3-Clause" ]
20
2019-01-08T00:45:44.000Z
2021-05-21T23:38:26.000Z
tests/data/make_jit.py
jlarmstrongiv/libtorchjs
d2466337db2707157f8b402dbefd80e84466e3a1
[ "BSD-3-Clause" ]
5
2020-02-19T14:39:00.000Z
2021-04-18T20:33:49.000Z
tests/data/make_jit.py
jlarmstrongiv/libtorchjs
d2466337db2707157f8b402dbefd80e84466e3a1
[ "BSD-3-Clause" ]
4
2019-07-14T10:50:08.000Z
2021-04-10T20:19:52.000Z
import torch def mul2(x): return x * 2 torch.jit.trace(mul2, torch.randn(3, 3)).save("mul2.pt")
18
58
0.611111
19
108
3.473684
0.684211
0
0
0
0
0
0
0
0
0
0
0.070588
0.212963
108
6
58
18
0.705882
0
0
0
0
0
0.067308
0
0
0
0
0
0
1
0.25
false
0
0.25
0.25
0.75
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
5
609d149f4effb22ae367216072dcceb4aa2a51c0
184
py
Python
causaldag/structure_learning/dag/__init__.py
jcai117/causaldag
94933db7086d21dad1f9d0ea64d538b0365c8add
[ "BSD-3-Clause" ]
3
2021-09-12T13:41:23.000Z
2022-03-24T14:44:04.000Z
causaldag/structure_learning/dag/__init__.py
jcai117/causaldag
94933db7086d21dad1f9d0ea64d538b0365c8add
[ "BSD-3-Clause" ]
null
null
null
causaldag/structure_learning/dag/__init__.py
jcai117/causaldag
94933db7086d21dad1f9d0ea64d538b0365c8add
[ "BSD-3-Clause" ]
2
2021-02-03T04:05:41.000Z
2021-09-12T13:41:26.000Z
from .gsp import permutation2dag, gsp, jci_gsp, igsp, unknown_target_igsp, min_degree_alg_amat, update_minimal_imap, sparsest_permutation from .pcalg import pcalg from .ges import ges
46
137
0.842391
28
184
5.214286
0.678571
0
0
0
0
0
0
0
0
0
0
0.006061
0.103261
184
3
138
61.333333
0.878788
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
7154761d6381d18414b7b9dd026607519aa0d886
43
py
Python
ABC_A/ABC003_A.py
ryosuke0825/atcoder_python
185cdbe7db44ecca1aaf357858d16d31ce515ddb
[ "MIT" ]
null
null
null
ABC_A/ABC003_A.py
ryosuke0825/atcoder_python
185cdbe7db44ecca1aaf357858d16d31ce515ddb
[ "MIT" ]
null
null
null
ABC_A/ABC003_A.py
ryosuke0825/atcoder_python
185cdbe7db44ecca1aaf357858d16d31ce515ddb
[ "MIT" ]
null
null
null
n = int(input()) print(int((n+1)/2*10000))
14.333333
25
0.581395
9
43
2.777778
0.777778
0
0
0
0
0
0
0
0
0
0
0.179487
0.093023
43
2
26
21.5
0.461538
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0
0
0
0.5
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
5
7170886e836d736a054d996e037b4619b2d5958a
226
py
Python
crosswalk/hosts/mx/hierarchy.py
mikemalinowski/crosswalk
9e1c49fcfd3a4a38e24e59660d06c2d903cc3ff4
[ "MIT" ]
1
2019-03-09T12:55:31.000Z
2019-03-09T12:55:31.000Z
crosswalk/hosts/pm/hierarchy.py
mikemalinowski/crosswalk
9e1c49fcfd3a4a38e24e59660d06c2d903cc3ff4
[ "MIT" ]
null
null
null
crosswalk/hosts/pm/hierarchy.py
mikemalinowski/crosswalk
9e1c49fcfd3a4a38e24e59660d06c2d903cc3ff4
[ "MIT" ]
null
null
null
import crosswalk def children(element, wildcard=None, recursive=None): return None def parent(element, index=0): return None def add_child(element, child): return None def set_parent(element, parent): return None
11.894737
53
0.752212
32
226
5.25
0.46875
0.238095
0.232143
0
0
0
0
0
0
0
0
0.005263
0.159292
226
18
54
12.555556
0.878947
0
0
0.444444
0
0
0
0
0
0
0
0
0
1
0.444444
false
0
0.111111
0.444444
1
0
0
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
5
7172a57607b1a690f135ec16d28f5f62e303a957
251
py
Python
froide/bounce/signals.py
manonthemat/froide
698c49935eaf2e922f3c9f6a46af0fd545ccbbbb
[ "MIT" ]
null
null
null
froide/bounce/signals.py
manonthemat/froide
698c49935eaf2e922f3c9f6a46af0fd545ccbbbb
[ "MIT" ]
null
null
null
froide/bounce/signals.py
manonthemat/froide
698c49935eaf2e922f3c9f6a46af0fd545ccbbbb
[ "MIT" ]
null
null
null
from django.dispatch import Signal user_email_bounced = Signal(providing_args=['bounce', 'should_deactivate']) email_bounced = Signal(providing_args=['bounce', 'should_deactivate']) email_unsubscribed = Signal(providing_args=['email', 'reference'])
35.857143
75
0.792829
29
251
6.551724
0.517241
0.236842
0.3
0.284211
0.584211
0.584211
0.584211
0.584211
0.584211
0
0
0
0.071713
251
6
76
41.833333
0.815451
0
0
0
0
0
0.239044
0
0
0
0
0
0
1
0
false
0
0.25
0
0.25
0
1
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
718fa5a23d6369ef32268fbf62c13995bf1055a4
42
py
Python
pacmen_env/gym_foo/envs/__init__.py
lich14/CDS
1f50446fb17e0793c91157e0cdf8754d4a040e5c
[ "Apache-2.0" ]
35
2021-06-04T01:54:42.000Z
2022-03-25T07:08:41.000Z
CDS_GRF/env/gym-foo/gym_foo/envs/__init__.py
ltzheng/CDS
397282147498647a9f26577adfa451e8478de76d
[ "Apache-2.0" ]
6
2021-12-23T12:11:20.000Z
2022-03-01T08:12:32.000Z
CDS_GRF/env/gym-foo/gym_foo/envs/__init__.py
ltzheng/CDS
397282147498647a9f26577adfa451e8478de76d
[ "Apache-2.0" ]
10
2021-06-13T08:07:29.000Z
2022-01-23T03:33:50.000Z
from gym_foo.envs.pac_men import CustomEnv
42
42
0.880952
8
42
4.375
1
0
0
0
0
0
0
0
0
0
0
0
0.071429
42
1
42
42
0.897436
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
71a64fda08400fe346b34da25855379831494d5e
305
py
Python
django_workflow_system/api/tests/factories/workflows/__init__.py
eikonomega/django-workflow-system
dc0e8807263266713d3d7fa46e240e8d72db28d1
[ "MIT" ]
2
2022-01-28T12:35:42.000Z
2022-03-23T16:06:05.000Z
django_workflow_system/api/tests/factories/workflows/__init__.py
eikonomega/django-workflow-system
dc0e8807263266713d3d7fa46e240e8d72db28d1
[ "MIT" ]
10
2021-04-27T20:26:32.000Z
2021-07-21T15:34:31.000Z
django_workflow_system/api/tests/factories/workflows/__init__.py
eikonomega/django-workflow-system
dc0e8807263266713d3d7fa46e240e8d72db28d1
[ "MIT" ]
1
2021-11-13T14:30:34.000Z
2021-11-13T14:30:34.000Z
from .assignment import * from .authors import * from .metadata import * from .engagement import * from .json_schema import * from .step import * from .subscription import * from .workflow_collection import * from .workflows import * from .workflow_collection_image import * from .recommendation import *
25.416667
40
0.783607
37
305
6.351351
0.405405
0.425532
0.153191
0.238298
0
0
0
0
0
0
0
0
0.144262
305
11
41
27.727273
0.900383
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
71a8000418f5266f64dbbc393acb655738b7fa5e
127
py
Python
src/meru/exceptions.py
noname-cyber/meru
cf06f3bae01b1721babeb2f8bcbda165c638091e
[ "MIT" ]
null
null
null
src/meru/exceptions.py
noname-cyber/meru
cf06f3bae01b1721babeb2f8bcbda165c638091e
[ "MIT" ]
null
null
null
src/meru/exceptions.py
noname-cyber/meru
cf06f3bae01b1721babeb2f8bcbda165c638091e
[ "MIT" ]
null
null
null
class ActionException(Exception): pass class PingTimeout(Exception): pass class MeruException(Exception): pass
11.545455
33
0.732283
12
127
7.75
0.5
0.419355
0.387097
0
0
0
0
0
0
0
0
0
0.19685
127
10
34
12.7
0.911765
0
0
0.5
0
0
0
0
0
0
0
0
0
1
0
true
0.5
0
0
0.5
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
0
0
0
0
0
5
71da650e84bf8791c71bc3610914d61009cfd920
66
py
Python
phoenix/outages/utils.py
kiwicom/phoenix
19bfbd23387e7d054780cfba9dbb385525aa7d80
[ "MIT" ]
8
2019-03-03T22:46:06.000Z
2021-12-14T20:26:28.000Z
phoenix/outages/utils.py
kiwicom/phoenix
19bfbd23387e7d054780cfba9dbb385525aa7d80
[ "MIT" ]
250
2018-11-21T09:21:11.000Z
2021-09-22T17:48:07.000Z
phoenix/outages/utils.py
kiwicom/phoenix
19bfbd23387e7d054780cfba9dbb385525aa7d80
[ "MIT" ]
1
2021-02-16T08:01:04.000Z
2021-02-16T08:01:04.000Z
def format_datetime(dt): return dt.strftime("%Y-%m-%d %H:%M")
22
40
0.621212
12
66
3.333333
0.833333
0
0
0
0
0
0
0
0
0
0
0
0.136364
66
2
41
33
0.701754
0
0
0
0
0
0.212121
0
0
0
0
0
0
1
0.5
false
0
0
0.5
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
5
e0b074dfea47675a998f2ee09c96d8c1e7228a20
2,883
py
Python
test/known_issues/test_augment_with_executor.py
jimbozhang/lhotse
85a5c8e9d9827f457ba33432bfe37f44ca28365a
[ "Apache-2.0" ]
1
2020-10-02T02:42:25.000Z
2020-10-02T02:42:25.000Z
test/known_issues/test_augment_with_executor.py
jimbozhang/lhotse
85a5c8e9d9827f457ba33432bfe37f44ca28365a
[ "Apache-2.0" ]
2
2020-11-05T11:44:17.000Z
2021-04-08T11:38:48.000Z
test/known_issues/test_augment_with_executor.py
jimbozhang/lhotse
85a5c8e9d9827f457ba33432bfe37f44ca28365a
[ "Apache-2.0" ]
null
null
null
import multiprocessing import sys from concurrent.futures import ProcessPoolExecutor, ThreadPoolExecutor from functools import partial from tempfile import TemporaryDirectory import pytest from lhotse import CutSet, Fbank from lhotse.testing.fixtures import RandomCutTestCase torchaudio = pytest.importorskip('torchaudio', minversion='0.7.1') class TestAugmentationWithExecutor(RandomCutTestCase): @pytest.mark.parametrize( 'exec_type', [ # Multithreading works ThreadPoolExecutor, # Multiprocessing works, but only when using the "spawn" context (in testing) pytest.param( partial(ProcessPoolExecutor, mp_context=multiprocessing.get_context("spawn")), marks=pytest.mark.skipif( sys.version_info[0] == 3 and sys.version_info[1] < 7, reason="The mp_context argument is introduced in Python 3.7" ) ), ] ) def test_wav_augment_with_executor(self, exec_type): cut = self.with_cut(sampling_rate=16000, num_samples=16000) with TemporaryDirectory() as d, \ exec_type(max_workers=4) as ex: cut_set_speed = CutSet.from_cuts( cut.with_id(str(i)) for i in range(100) ).perturb_speed(1.1) # perturb_speed uses torchaudio SoX effect that could hang # Just test that it runs and does not hang. cut_set_speed_feats = cut_set_speed.compute_and_store_features( extractor=Fbank(), storage_path=d, executor=ex ) @pytest.mark.parametrize( 'exec_type', [ # Multithreading works ThreadPoolExecutor, # Multiprocessing works, but only when using the "spawn" context (in testing) pytest.param( partial(ProcessPoolExecutor, mp_context=multiprocessing.get_context("spawn")), marks=pytest.mark.skipif( sys.version_info[0] == 3 and sys.version_info[1] < 7, reason="The mp_context argument is introduced in Python 3.7" ) ), ] ) def test_wav_augment_with_executor(self, exec_type): cut = self.with_cut(sampling_rate=16000, num_samples=16000) with TemporaryDirectory() as d, \ exec_type(max_workers=4) as ex: cut_set_volume = CutSet.from_cuts( cut.with_id(str(i)) for i in range(100) ).perturb_volume(0.125) # perturb_volume uses torchaudio SoX effect that could hang # Just test that it runs and does not hang. cut_set_volume_feats = cut_set_volume.compute_and_store_features( extractor=Fbank(), storage_path=d, executor=ex )
39.493151
96
0.610128
324
2,883
5.237654
0.320988
0.028285
0.032999
0.029464
0.74013
0.74013
0.74013
0.74013
0.74013
0.74013
0
0.024785
0.314256
2,883
72
97
40.041667
0.833586
0.135969
0
0.566667
0
0
0.058421
0
0
0
0
0
0
1
0.033333
false
0
0.15
0
0.2
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
e0c22a0dda664578591f28d034256ddcc240e7f1
60
py
Python
snappy/__init__.py
connor-mccarthy/snappy
58b890ddffe46ea2de37c40bf8ab971788649b17
[ "MIT" ]
null
null
null
snappy/__init__.py
connor-mccarthy/snappy
58b890ddffe46ea2de37c40bf8ab971788649b17
[ "MIT" ]
null
null
null
snappy/__init__.py
connor-mccarthy/snappy
58b890ddffe46ea2de37c40bf8ab971788649b17
[ "MIT" ]
null
null
null
from snappy.lambda_handler import set_handler # noqa: F401
30
59
0.816667
9
60
5.222222
0.888889
0
0
0
0
0
0
0
0
0
0
0.057692
0.133333
60
1
60
60
0.846154
0.166667
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
e0d222398fd24e0c2066400c6b0c553e03164d4b
232
py
Python
meli_challenge/core/input/__init__.py
rafaelleinio/meli-challenge
72391cc24a502a199523f204779d4fe928c7b10d
[ "MIT" ]
1
2021-02-28T02:12:37.000Z
2021-02-28T02:12:37.000Z
meli_challenge/core/input/__init__.py
rafaelleinio/meli-challenge
72391cc24a502a199523f204779d4fe928c7b10d
[ "MIT" ]
null
null
null
meli_challenge/core/input/__init__.py
rafaelleinio/meli-challenge
72391cc24a502a199523f204779d4fe928c7b10d
[ "MIT" ]
null
null
null
"""input module.""" from meli_challenge.core.input.csv_input import CsvInput from meli_challenge.core.input.input import Input from meli_challenge.core.input.json_input import JsonInput __all__ = ["CsvInput", "JsonInput", "Input"]
33.142857
58
0.797414
32
232
5.5
0.375
0.136364
0.289773
0.357955
0.443182
0
0
0
0
0
0
0
0.086207
232
6
59
38.666667
0.830189
0.056034
0
0
0
0
0.103286
0
0
0
0
0
0
1
0
false
0
0.75
0
0.75
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
5
460d54cc3e33921ad6dbd7e08ed88e9b9a4d8d09
122
py
Python
proxy_pool/__init__.py
ggqshr/proxy_pool
469bd96e7525d1bfbbcf4a349618d1a6e63b1596
[ "MIT" ]
null
null
null
proxy_pool/__init__.py
ggqshr/proxy_pool
469bd96e7525d1bfbbcf4a349618d1a6e63b1596
[ "MIT" ]
null
null
null
proxy_pool/__init__.py
ggqshr/proxy_pool
469bd96e7525d1bfbbcf4a349618d1a6e63b1596
[ "MIT" ]
null
null
null
from .ip_pool import IpPool from .xun_proxy import XunProxy from .data5u_proxy import Data5UProxy name = "ggq_proxy_pool"
24.4
37
0.827869
19
122
5.052632
0.631579
0.229167
0
0
0
0
0
0
0
0
0
0.018692
0.122951
122
4
38
30.5
0.878505
0
0
0
0
0
0.114754
0
0
0
0
0
0
1
0
false
0
0.75
0
0.75
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
5
460f21e9ad47afaf2f8ef480b3daae23e96a3831
118
py
Python
docker/sds-api/spine-directory-service/sds/request/http_headers.py
NHSDigital/elecronic-prescriptions-service-api
41fb4b02cfdb674b2a787dd2d8dd698a8c7a59b0
[ "MIT" ]
2
2020-08-18T09:23:09.000Z
2020-11-23T11:43:27.000Z
docker/sds-api/spine-directory-service/sds/request/http_headers.py
NHSDigital/elecronic-prescriptions-service-api
41fb4b02cfdb674b2a787dd2d8dd698a8c7a59b0
[ "MIT" ]
179
2020-07-01T08:53:50.000Z
2022-03-11T14:18:39.000Z
docker/sds-api/spine-directory-service/sds/request/http_headers.py
NHSDigital/elecronic-prescriptions-service-api
41fb4b02cfdb674b2a787dd2d8dd698a8c7a59b0
[ "MIT" ]
3
2020-07-22T14:00:41.000Z
2021-12-15T15:15:06.000Z
class HttpHeaders: X_CORRELATION_ID = "X-Correlation-ID" CONTENT_TYPE = "Content-Type" ACCEPT = 'Accept'
19.666667
41
0.686441
14
118
5.571429
0.571429
0.307692
0.358974
0
0
0
0
0
0
0
0
0
0.20339
118
5
42
23.6
0.829787
0
0
0
0
0
0.290598
0
0
0
0
0
0
1
0
false
0
0
0
1
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
5
1cb1485ae94b8feac0d35ccc4e36abe4d3ff64c7
5,269
py
Python
tests/query_runner/test_basesql_queryrunner.py
zero1number/redash
caabc4afa4e60e273782a46d84099857821c6500
[ "BSD-2-Clause" ]
20,680
2015-11-16T15:38:37.000Z
2022-03-31T21:43:43.000Z
tests/query_runner/test_basesql_queryrunner.py
zero1number/redash
caabc4afa4e60e273782a46d84099857821c6500
[ "BSD-2-Clause" ]
3,934
2015-11-16T14:46:49.000Z
2022-03-31T13:22:31.000Z
tests/query_runner/test_basesql_queryrunner.py
zero1number/redash
caabc4afa4e60e273782a46d84099857821c6500
[ "BSD-2-Clause" ]
4,147
2015-11-17T15:57:23.000Z
2022-03-31T11:57:43.000Z
import unittest from redash.query_runner import BaseSQLQueryRunner, BaseQueryRunner from redash.utils import gen_query_hash class TestBaseSQLQueryRunner(unittest.TestCase): def setUp(self): self.query_runner = BaseSQLQueryRunner({}) def test_apply_auto_limit_origin_no_limit_1(self): origin_query_text = "SELECT 2" query_text = self.query_runner.apply_auto_limit(origin_query_text, True) self.assertEqual("SELECT 2 LIMIT 1000", query_text) def test_apply_auto_limit_origin_have_limit_1(self): origin_query_text = "SELECT 2 LIMIT 100" query_text = self.query_runner.apply_auto_limit(origin_query_text, True) self.assertEqual(origin_query_text, query_text) def test_apply_auto_limit_origin_have_limit_2(self): origin_query_text = "SELECT * FROM fake WHERE id IN (SELECT id FROM fake_2 LIMIT 200) LIMIT 200" query_text = self.query_runner.apply_auto_limit(origin_query_text, True) self.assertEqual(origin_query_text, query_text) def test_apply_auto_limit_origin_no_limit_2(self): origin_query_text = "SELECT * FROM fake WHERE id IN (SELECT id FROM fake_2 LIMIT 200)" query_text = self.query_runner.apply_auto_limit(origin_query_text, True) self.assertEqual(origin_query_text + " LIMIT 1000", query_text) def test_apply_auto_limit_non_select_query(self): origin_query_text = ("create table execution_times as " "(select id, retrieved_at, data_source_id, query, runtime, query_hash " "from query_results order by 1 desc)") query_text = self.query_runner.apply_auto_limit(origin_query_text, True) self.assertEqual(origin_query_text, query_text) def test_apply_auto_limit_error_query(self): origin_query_text = "dklsk jdhsajhdiwc kkdsakjdwi mdklsjal" query_text = self.query_runner.apply_auto_limit(origin_query_text, True) self.assertEqual(origin_query_text, query_text) def test_apply_auto_limit_multi_query_add_limit_1(self): origin_query_text = ("insert into execution_times (id, retrieved_at, data_source_id, query, runtime, query_hash) " "select id, retrieved_at, data_source_id, query, runtime, query_hash from query_results " "where id > (select max(id) from execution_times);\n" "select max(id), 'execution_times' as table_name from execution_times " "union all " "select max(id), 'query_results' as table_name from query_results") query_text = self.query_runner.apply_auto_limit(origin_query_text, True) self.assertEqual(origin_query_text + " LIMIT 1000", query_text) def test_apply_auto_limit_multi_query_add_limit_2(self): origin_query_text = "use database demo;\n" \ "select * from data" query_text = self.query_runner.apply_auto_limit(origin_query_text, True) self.assertEqual(origin_query_text + " LIMIT 1000", query_text) def test_apply_auto_limit_multi_query_end_with_punc(self): origin_query_text = ("select * from table1;\n" "select * from table2") query_text = self.query_runner.apply_auto_limit(origin_query_text, True) self.assertEqual("select * from table1;\nselect * from table2 LIMIT 1000", query_text) def test_apply_auto_limit_multi_query_last_not_select(self): origin_query_text = ("select * from table1;\n" "CREATE TABLE Persons (PersonID int)") query_text = self.query_runner.apply_auto_limit(origin_query_text, True) self.assertEqual(origin_query_text, query_text) def test_apply_auto_limit_last_command_comment(self): origin_query_text = "select * from raw_events; # comment" query_text = self.query_runner.apply_auto_limit(origin_query_text, True) self.assertEqual("select * from raw_events LIMIT 1000", query_text) def test_apply_auto_limit_last_command_comment_2(self): origin_query_text = "select * from raw_events; -- comment" query_text = self.query_runner.apply_auto_limit(origin_query_text, True) self.assertEqual("select * from raw_events LIMIT 1000", query_text) def test_apply_auto_limit_inline_comment(self): origin_query_text = "select * from raw_events -- comment" query_text = self.query_runner.apply_auto_limit(origin_query_text, True) self.assertEqual("select * from raw_events LIMIT 1000", query_text) def test_gen_query_hash_baseSQL(self): origin_query_text = "select *" expected_query_text = "select * LIMIT 1000" base_runner = BaseQueryRunner({}) self.assertEqual(base_runner.gen_query_hash(expected_query_text), self.query_runner.gen_query_hash(origin_query_text, True)) def test_gen_query_hash_NoneSQL(self): origin_query_text = "select *" base_runner = BaseQueryRunner({}) self.assertEqual(gen_query_hash(origin_query_text), base_runner.gen_query_hash(origin_query_text, True)) if __name__ == '__main__': unittest.main()
51.15534
122
0.70279
698
5,269
4.875358
0.130372
0.177197
0.171907
0.099912
0.803703
0.729356
0.709668
0.709374
0.640024
0.637379
0
0.016082
0.221105
5,269
102
123
51.656863
0.813109
0
0
0.37037
0
0
0.229266
0
0
0
0
0
0.185185
1
0.197531
false
0
0.037037
0
0.246914
0
0
0
0
null
0
0
0
1
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
1cc01ffc7419903ce7d2e0ad53f099c7e2bda696
202
py
Python
deepspace/tabular/__init__.py
bigwater/deepspace
6766d05c3bb8529142aee415134513b0521e7f4f
[ "BSD-2-Clause" ]
null
null
null
deepspace/tabular/__init__.py
bigwater/deepspace
6766d05c3bb8529142aee415134513b0521e7f4f
[ "BSD-2-Clause" ]
null
null
null
deepspace/tabular/__init__.py
bigwater/deepspace
6766d05c3bb8529142aee415134513b0521e7f4f
[ "BSD-2-Clause" ]
null
null
null
from .dense_skipco import DenseSkipCoFactory from .one_layer import OneLayerFactory from .feed_forward import FeedForwardFactory from .supervised_reg_auto_encoder import SupervisedRegAutoEncoderFactory
40.4
72
0.90099
22
202
8
0.727273
0
0
0
0
0
0
0
0
0
0
0
0.079208
202
4
73
50.5
0.946237
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
1ce51c8c83f3d3a114712782553fff122dc57ef6
293
py
Python
admit/bdp/FeatureList_BDP.py
astroumd/admit
bbf3d79bb6e1a6f7523553ed8ede0d358d106f2c
[ "MIT" ]
4
2017-03-01T17:26:28.000Z
2022-03-03T19:23:06.000Z
admit/bdp/FeatureList_BDP.py
teuben/admit
1cae54d1937c9af3f719102838df716e7e6d655c
[ "MIT" ]
48
2016-10-04T01:25:33.000Z
2021-09-08T14:51:10.000Z
admit/bdp/FeatureList_BDP.py
teuben/admit
1cae54d1937c9af3f719102838df716e7e6d655c
[ "MIT" ]
2
2016-11-10T14:10:22.000Z
2017-03-30T18:58:05.000Z
import admit.util.bdp_types as bt from Table_BDP import Table_BDP from Image_BDP import Image_BDP class FeatureList_BDP(Image_BDP,Table_BDP): def __init__(self,xmlFile=None): Table_BDP.__init__(self,xmlFile) Image_BDP.__init__(self,xmlFile) self._version= "0.1.0"
29.3
43
0.74744
46
293
4.26087
0.434783
0.163265
0.229592
0.183673
0
0
0
0
0
0
0
0.012295
0.167235
293
9
44
32.555556
0.790984
0
0
0
0
0
0.017065
0
0
0
0
0
0
1
0.125
false
0
0.375
0
0.625
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
5
1cfaa3b473c961f1ae49643b7fb63a548a0da10a
1,159
py
Python
keras-onnx/onnx_keras/layers.py
jwj04ok/ONNX_Convertor
067a17e16dfc8aa80e36f44c4523959daf7359f5
[ "MIT" ]
33
2020-06-09T21:05:35.000Z
2022-02-24T01:48:45.000Z
keras-onnx/onnx_keras/layers.py
jwj04ok/ONNX_Convertor
067a17e16dfc8aa80e36f44c4523959daf7359f5
[ "MIT" ]
17
2020-07-14T19:44:09.000Z
2022-02-10T10:03:01.000Z
keras-onnx/onnx_keras/layers.py
jwj04ok/ONNX_Convertor
067a17e16dfc8aa80e36f44c4523959daf7359f5
[ "MIT" ]
16
2020-06-17T22:56:11.000Z
2021-12-21T05:44:32.000Z
# This file is generated by generate_layers.py from .merg_layers import Add from .merg_layers import Subtract from .merg_layers import Multiply from .merg_layers import Concatenate from .norm_layers import BatchNormalization from .conv_layers import Conv2D from .conv_layers import Conv2DTranspose from .conv_layers import ZeroPadding2D from .conv_layers import DepthwiseConv2D from .conv_layers import UpSampling2D from .conv_layers import Cropping2D from .conv_layers import Cropping1D from .conv_layers import SeparableConv2D from .pool_layers import MaxPooling2D from .pool_layers import AveragePooling2D from .pool_layers import GlobalAveragePooling2D from .pool_layers import GlobalMaxPooling2D from .core_layers import Flatten from .core_layers import Activation from .core_layers import Dense from .core_layers import Reshape from .core_layers import Dropout from .core_layers import Permute from .aact_layers import LeakyReLU from .aact_layers import ReLU from .aact_layers import PReLU from .aact_layers import Softmax from .aact_layers import Elu from .lamd_layers import Lambda from .recurrent_layers import LSTM from .recurrent_layers import GRU
35.121212
47
0.858499
163
1,159
5.907975
0.300614
0.386293
0.116303
0.166147
0
0
0
0
0
0
0
0.011673
0.113028
1,159
32
48
36.21875
0.925097
0.037964
0
0
1
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
1cfbfba27f9332ef91c5770af4f03d04402530b8
37
py
Python
src/mediaeval2020/dataloaders/__init__.py
dbis-uibk/MediaEval2020
c1ba6b55ce91347556020b7a14fc317aae1dac9a
[ "BSD-3-Clause" ]
2
2022-03-09T07:09:18.000Z
2022-03-31T07:29:43.000Z
src/mediaeval2020/dataloaders/__init__.py
dbis-uibk/MediaEval2020
c1ba6b55ce91347556020b7a14fc317aae1dac9a
[ "BSD-3-Clause" ]
null
null
null
src/mediaeval2020/dataloaders/__init__.py
dbis-uibk/MediaEval2020
c1ba6b55ce91347556020b7a14fc317aae1dac9a
[ "BSD-3-Clause" ]
null
null
null
"""Dataloaders for mediaeval2020."""
18.5
36
0.72973
3
37
9
1
0
0
0
0
0
0
0
0
0
0
0.117647
0.081081
37
1
37
37
0.676471
0.810811
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
5
1cffcb8741e84b3be6f4a959acd82ae2e8446e4c
1,265
py
Python
src/scs_core/data/regression.py
south-coast-science/scs_core
81ad4010abb37ca935f3a31ac805639ef53b1bcf
[ "MIT" ]
3
2019-03-12T01:59:58.000Z
2020-09-12T07:27:42.000Z
src/scs_core/data/regression.py
south-coast-science/scs_core
81ad4010abb37ca935f3a31ac805639ef53b1bcf
[ "MIT" ]
1
2018-04-20T07:58:38.000Z
2021-03-27T08:52:45.000Z
src/scs_core/data/regression.py
south-coast-science/scs_core
81ad4010abb37ca935f3a31ac805639ef53b1bcf
[ "MIT" ]
4
2017-09-29T13:08:43.000Z
2019-10-09T09:13:58.000Z
""" Created on 14 Oct 2016 @author: Bruno Beloff (bruno.beloff@southcoastscience.com) """ from abc import ABC, abstractmethod from scs_core.data.datetime import LocalizedDatetime # -------------------------------------------------------------------------------------------------------------------- class Regression(ABC): """ classdocs """ # ---------------------------------------------------------------------------------------------------------------- @abstractmethod def has_midpoint(self): pass @abstractmethod def has_regression(self): pass @abstractmethod def append(self, rec: LocalizedDatetime, value): pass @abstractmethod def reset(self): pass # ---------------------------------------------------------------------------------------------------------------- @abstractmethod def midpoint(self, ndigits=None): pass # ---------------------------------------------------------------------------------------------------------------- @abstractmethod def min(self, ndigits=None): pass @abstractmethod def mid(self, ndigits=None): pass @abstractmethod def max(self, ndigits=None): pass
20.079365
118
0.389723
81
1,265
6.049383
0.444444
0.277551
0.3
0.155102
0.220408
0.220408
0
0
0
0
0
0.0059
0.196047
1,265
62
119
20.403226
0.47591
0.433992
0
0.592593
0
0
0
0
0
0
0
0
0
1
0.296296
false
0.296296
0.074074
0
0.407407
0
0
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
0
0
0
5
e80a2c81cd2e64dbc0c34cd7607f55e55b913a2b
2,266
py
Python
src/dictionaryupdate.py
att/Distributed-Dictionary-Learning
c6915477dac1ffe7d9a732dd31333d540f5bd00b
[ "MIT" ]
5
2020-07-25T18:44:54.000Z
2022-03-27T14:14:20.000Z
src/dictionaryupdate.py
att/Distributed-Dictionary-Learning
c6915477dac1ffe7d9a732dd31333d540f5bd00b
[ "MIT" ]
null
null
null
src/dictionaryupdate.py
att/Distributed-Dictionary-Learning
c6915477dac1ffe7d9a732dd31333d540f5bd00b
[ "MIT" ]
3
2019-09-04T08:04:09.000Z
2022-03-27T14:14:26.000Z
import numpy as np def l2_update_dict(olddict, X, coef): """Update the dictionary for KSVD. Parameters ---------- Inputs: olddict: dictionary in the previous iteration X: data matrix coef: sparse coding of the data against which to optimize the dictionary Outputs: newdict: updated dictionary coef: updated sparse coefficient matrix """ K = coef.shape[0] newdict = np.copy(olddict) for k in range(K): indexes = np.nonzero(coef[k,:]!=0)[0] if len(indexes)>0: tempcoef = coef[:,indexes] tempcoef[k,:] = 0 ER = X[:,indexes] - np.dot(newdict, tempcoef) U, s, Vh = np.linalg.svd(ER) newdict[:,k] = U[:,0] coef[k,indexes] = s[0]*Vh[0,:] return newdict, coef def nnl2_update_dict(olddict, X, coef): """Update the dictionary for NN-KSVD. Parameters ---------- Inputs: olddict: dictionary in the previous iteration X: data matrix coef: sparse coding of the data against which to optimize the dictionary Outputs: newdict: updated dictionary coef: updated sparse coefficient matrix """ alteritern = 10 K = coef.shape[0] newdict = np.copy(olddict) for k in range(K): indexes = np.nonzero(coef[k,:]!=0)[0] if len(indexes)>0: tempcoef = coef[:,indexes] tempcoef[k,:] = 0 ER = X[:,indexes] - np.dot(newdict, tempcoef) U, s, Vh = np.linalg.svd(ER) u = U[:,0] v = s[0]*Vh[0,:] if np.all(v<0)==True: u = -u v = -v u = (u>0)*u v = (v>0)*v for subiters in range(alteritern): u = np.dot(ER, v)/np.dot(v, v) u = (u>0)*u v = np.dot(u, ER)/np.dot(u, u) v = (v>0)*v normu = np.linalg.norm(u) newdict[:,k] = u/normu coef[k,indexes] = normu*v return newdict, coef
27.975309
81
0.461606
271
2,266
3.845018
0.225092
0.028791
0.008637
0.034549
0.777351
0.769674
0.769674
0.756238
0.756238
0.671785
0
0.018954
0.417917
2,266
81
82
27.975309
0.771039
0.260371
0
0.585366
0
0
0
0
0
0
0
0
0
1
0.04878
false
0
0.02439
0
0.121951
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
e820e5680c3fed8cc8938312cb03a43d74320514
64,685
py
Python
nova/objects/flavor.py
bopopescu/nova-token
ec98f69dea7b3e2b9013b27fd55a2c1a1ac6bfb2
[ "Apache-2.0" ]
null
null
null
nova/objects/flavor.py
bopopescu/nova-token
ec98f69dea7b3e2b9013b27fd55a2c1a1ac6bfb2
[ "Apache-2.0" ]
null
null
null
nova/objects/flavor.py
bopopescu/nova-token
ec98f69dea7b3e2b9013b27fd55a2c1a1ac6bfb2
[ "Apache-2.0" ]
2
2017-07-20T17:31:34.000Z
2020-07-24T02:42:19.000Z
begin_unit comment|'# Copyright 2013 Red Hat, Inc' nl|'\n' comment|'#' nl|'\n' comment|'# Licensed under the Apache License, Version 2.0 (the "License"); you may' nl|'\n' comment|'# not use this file except in compliance with the License. You may obtain' nl|'\n' comment|'# a copy of the License at' nl|'\n' comment|'#' nl|'\n' comment|'# http://www.apache.org/licenses/LICENSE-2.0' nl|'\n' comment|'#' nl|'\n' comment|'# Unless required by applicable law or agreed to in writing, software' nl|'\n' comment|'# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT' nl|'\n' comment|'# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the' nl|'\n' comment|'# License for the specific language governing permissions and limitations' nl|'\n' comment|'# under the License.' nl|'\n' nl|'\n' name|'from' name|'oslo_db' name|'import' name|'exception' name|'as' name|'db_exc' newline|'\n' name|'from' name|'oslo_db' op|'.' name|'sqlalchemy' name|'import' name|'utils' name|'as' name|'sqlalchemyutils' newline|'\n' name|'from' name|'oslo_log' name|'import' name|'log' name|'as' name|'logging' newline|'\n' name|'from' name|'sqlalchemy' name|'import' name|'or_' newline|'\n' name|'from' name|'sqlalchemy' op|'.' name|'orm' name|'import' name|'joinedload' newline|'\n' name|'from' name|'sqlalchemy' op|'.' name|'sql' op|'.' name|'expression' name|'import' name|'asc' newline|'\n' name|'from' name|'sqlalchemy' op|'.' name|'sql' name|'import' name|'func' newline|'\n' name|'from' name|'sqlalchemy' op|'.' name|'sql' name|'import' name|'text' newline|'\n' name|'from' name|'sqlalchemy' op|'.' name|'sql' name|'import' name|'true' newline|'\n' nl|'\n' name|'from' name|'nova' name|'import' name|'db' newline|'\n' name|'from' name|'nova' op|'.' name|'db' op|'.' name|'sqlalchemy' name|'import' name|'api' name|'as' name|'db_api' newline|'\n' name|'from' name|'nova' op|'.' name|'db' op|'.' name|'sqlalchemy' op|'.' name|'api' name|'import' name|'require_context' newline|'\n' name|'from' name|'nova' op|'.' name|'db' op|'.' name|'sqlalchemy' name|'import' name|'api_models' newline|'\n' name|'from' name|'nova' op|'.' name|'db' op|'.' name|'sqlalchemy' name|'import' name|'models' name|'as' name|'main_models' newline|'\n' name|'from' name|'nova' name|'import' name|'exception' newline|'\n' name|'from' name|'nova' op|'.' name|'i18n' name|'import' name|'_LW' newline|'\n' name|'from' name|'nova' name|'import' name|'objects' newline|'\n' name|'from' name|'nova' op|'.' name|'objects' name|'import' name|'base' newline|'\n' name|'from' name|'nova' op|'.' name|'objects' name|'import' name|'fields' newline|'\n' nl|'\n' nl|'\n' DECL|variable|LOG name|'LOG' op|'=' name|'logging' op|'.' name|'getLogger' op|'(' name|'__name__' op|')' newline|'\n' DECL|variable|OPTIONAL_FIELDS name|'OPTIONAL_FIELDS' op|'=' op|'[' string|"'extra_specs'" op|',' string|"'projects'" op|']' newline|'\n' DECL|variable|DEPRECATED_FIELDS name|'DEPRECATED_FIELDS' op|'=' op|'[' string|"'deleted'" op|',' string|"'deleted_at'" op|']' newline|'\n' nl|'\n' nl|'\n' DECL|function|_dict_with_extra_specs name|'def' name|'_dict_with_extra_specs' op|'(' name|'flavor_model' op|')' op|':' newline|'\n' indent|' ' name|'extra_specs' op|'=' op|'{' name|'x' op|'[' string|"'key'" op|']' op|':' name|'x' op|'[' string|"'value'" op|']' nl|'\n' name|'for' name|'x' name|'in' name|'flavor_model' op|'[' string|"'extra_specs'" op|']' op|'}' newline|'\n' name|'return' name|'dict' op|'(' name|'flavor_model' op|',' name|'extra_specs' op|'=' name|'extra_specs' op|')' newline|'\n' nl|'\n' nl|'\n' comment|'# NOTE(danms): There are some issues with the oslo_db context manager' nl|'\n' comment|'# decorators with static methods. We pull these out for now and can' nl|'\n' comment|'# move them back into the actual staticmethods on the object when those' nl|'\n' comment|'# issues are resolved.' nl|'\n' dedent|'' op|'@' name|'db_api' op|'.' name|'api_context_manager' op|'.' name|'reader' newline|'\n' DECL|function|_get_projects_from_db name|'def' name|'_get_projects_from_db' op|'(' name|'context' op|',' name|'flavorid' op|')' op|':' newline|'\n' indent|' ' name|'db_flavor' op|'=' name|'context' op|'.' name|'session' op|'.' name|'query' op|'(' name|'api_models' op|'.' name|'Flavors' op|')' op|'.' name|'filter_by' op|'(' name|'flavorid' op|'=' name|'flavorid' op|')' op|'.' name|'options' op|'(' name|'joinedload' op|'(' string|"'projects'" op|')' op|')' op|'.' name|'first' op|'(' op|')' newline|'\n' name|'if' name|'not' name|'db_flavor' op|':' newline|'\n' indent|' ' name|'raise' name|'exception' op|'.' name|'FlavorNotFound' op|'(' name|'flavor_id' op|'=' name|'flavorid' op|')' newline|'\n' dedent|'' name|'return' op|'[' name|'x' op|'[' string|"'project_id'" op|']' name|'for' name|'x' name|'in' name|'db_flavor' op|'[' string|"'projects'" op|']' op|']' newline|'\n' nl|'\n' nl|'\n' dedent|'' op|'@' name|'db_api' op|'.' name|'api_context_manager' op|'.' name|'writer' newline|'\n' DECL|function|_flavor_add_project name|'def' name|'_flavor_add_project' op|'(' name|'context' op|',' name|'flavor_id' op|',' name|'project_id' op|')' op|':' newline|'\n' indent|' ' name|'project' op|'=' name|'api_models' op|'.' name|'FlavorProjects' op|'(' op|')' newline|'\n' name|'project' op|'.' name|'update' op|'(' op|'{' string|"'flavor_id'" op|':' name|'flavor_id' op|',' nl|'\n' string|"'project_id'" op|':' name|'project_id' op|'}' op|')' newline|'\n' name|'try' op|':' newline|'\n' indent|' ' name|'project' op|'.' name|'save' op|'(' name|'context' op|'.' name|'session' op|')' newline|'\n' dedent|'' name|'except' name|'db_exc' op|'.' name|'DBDuplicateEntry' op|':' newline|'\n' indent|' ' name|'raise' name|'exception' op|'.' name|'FlavorAccessExists' op|'(' name|'flavor_id' op|'=' name|'flavor_id' op|',' nl|'\n' name|'project_id' op|'=' name|'project_id' op|')' newline|'\n' nl|'\n' nl|'\n' dedent|'' dedent|'' op|'@' name|'db_api' op|'.' name|'api_context_manager' op|'.' name|'writer' newline|'\n' DECL|function|_flavor_del_project name|'def' name|'_flavor_del_project' op|'(' name|'context' op|',' name|'flavor_id' op|',' name|'project_id' op|')' op|':' newline|'\n' indent|' ' name|'result' op|'=' name|'context' op|'.' name|'session' op|'.' name|'query' op|'(' name|'api_models' op|'.' name|'FlavorProjects' op|')' op|'.' name|'filter_by' op|'(' name|'project_id' op|'=' name|'project_id' op|')' op|'.' name|'filter_by' op|'(' name|'flavor_id' op|'=' name|'flavor_id' op|')' op|'.' name|'delete' op|'(' op|')' newline|'\n' name|'if' name|'result' op|'==' number|'0' op|':' newline|'\n' indent|' ' name|'raise' name|'exception' op|'.' name|'FlavorAccessNotFound' op|'(' name|'flavor_id' op|'=' name|'flavor_id' op|',' nl|'\n' name|'project_id' op|'=' name|'project_id' op|')' newline|'\n' nl|'\n' nl|'\n' dedent|'' dedent|'' op|'@' name|'db_api' op|'.' name|'api_context_manager' op|'.' name|'writer' newline|'\n' DECL|function|_flavor_extra_specs_add name|'def' name|'_flavor_extra_specs_add' op|'(' name|'context' op|',' name|'flavor_id' op|',' name|'specs' op|',' name|'max_retries' op|'=' number|'10' op|')' op|':' newline|'\n' indent|' ' name|'writer' op|'=' name|'db_api' op|'.' name|'api_context_manager' op|'.' name|'writer' newline|'\n' name|'for' name|'attempt' name|'in' name|'range' op|'(' name|'max_retries' op|')' op|':' newline|'\n' indent|' ' name|'try' op|':' newline|'\n' indent|' ' name|'spec_refs' op|'=' name|'context' op|'.' name|'session' op|'.' name|'query' op|'(' nl|'\n' name|'api_models' op|'.' name|'FlavorExtraSpecs' op|')' op|'.' name|'filter_by' op|'(' name|'flavor_id' op|'=' name|'flavor_id' op|')' op|'.' name|'filter' op|'(' name|'api_models' op|'.' name|'FlavorExtraSpecs' op|'.' name|'key' op|'.' name|'in_' op|'(' nl|'\n' name|'specs' op|'.' name|'keys' op|'(' op|')' op|')' op|')' op|'.' name|'all' op|'(' op|')' newline|'\n' nl|'\n' name|'existing_keys' op|'=' name|'set' op|'(' op|')' newline|'\n' name|'for' name|'spec_ref' name|'in' name|'spec_refs' op|':' newline|'\n' indent|' ' name|'key' op|'=' name|'spec_ref' op|'[' string|'"key"' op|']' newline|'\n' name|'existing_keys' op|'.' name|'add' op|'(' name|'key' op|')' newline|'\n' name|'with' name|'writer' op|'.' name|'savepoint' op|'.' name|'using' op|'(' name|'context' op|')' op|':' newline|'\n' indent|' ' name|'spec_ref' op|'.' name|'update' op|'(' op|'{' string|'"value"' op|':' name|'specs' op|'[' name|'key' op|']' op|'}' op|')' newline|'\n' nl|'\n' dedent|'' dedent|'' name|'for' name|'key' op|',' name|'value' name|'in' name|'specs' op|'.' name|'items' op|'(' op|')' op|':' newline|'\n' indent|' ' name|'if' name|'key' name|'in' name|'existing_keys' op|':' newline|'\n' indent|' ' name|'continue' newline|'\n' dedent|'' name|'spec_ref' op|'=' name|'api_models' op|'.' name|'FlavorExtraSpecs' op|'(' op|')' newline|'\n' name|'with' name|'writer' op|'.' name|'savepoint' op|'.' name|'using' op|'(' name|'context' op|')' op|':' newline|'\n' indent|' ' name|'spec_ref' op|'.' name|'update' op|'(' op|'{' string|'"key"' op|':' name|'key' op|',' string|'"value"' op|':' name|'value' op|',' nl|'\n' string|'"flavor_id"' op|':' name|'flavor_id' op|'}' op|')' newline|'\n' name|'context' op|'.' name|'session' op|'.' name|'add' op|'(' name|'spec_ref' op|')' newline|'\n' nl|'\n' dedent|'' dedent|'' name|'return' name|'specs' newline|'\n' dedent|'' name|'except' name|'db_exc' op|'.' name|'DBDuplicateEntry' op|':' newline|'\n' comment|'# a concurrent transaction has been committed,' nl|'\n' comment|'# try again unless this was the last attempt' nl|'\n' indent|' ' name|'if' name|'attempt' op|'==' name|'max_retries' op|'-' number|'1' op|':' newline|'\n' indent|' ' name|'raise' name|'exception' op|'.' name|'FlavorExtraSpecUpdateCreateFailed' op|'(' nl|'\n' name|'id' op|'=' name|'flavor_id' op|',' name|'retries' op|'=' name|'max_retries' op|')' newline|'\n' nl|'\n' nl|'\n' dedent|'' dedent|'' dedent|'' dedent|'' op|'@' name|'db_api' op|'.' name|'api_context_manager' op|'.' name|'writer' newline|'\n' DECL|function|_flavor_extra_specs_del name|'def' name|'_flavor_extra_specs_del' op|'(' name|'context' op|',' name|'flavor_id' op|',' name|'key' op|')' op|':' newline|'\n' indent|' ' name|'result' op|'=' name|'context' op|'.' name|'session' op|'.' name|'query' op|'(' name|'api_models' op|'.' name|'FlavorExtraSpecs' op|')' op|'.' name|'filter_by' op|'(' name|'flavor_id' op|'=' name|'flavor_id' op|')' op|'.' name|'filter_by' op|'(' name|'key' op|'=' name|'key' op|')' op|'.' name|'delete' op|'(' op|')' newline|'\n' name|'if' name|'result' op|'==' number|'0' op|':' newline|'\n' indent|' ' name|'raise' name|'exception' op|'.' name|'FlavorExtraSpecsNotFound' op|'(' nl|'\n' name|'extra_specs_key' op|'=' name|'key' op|',' name|'flavor_id' op|'=' name|'flavor_id' op|')' newline|'\n' nl|'\n' nl|'\n' dedent|'' dedent|'' op|'@' name|'db_api' op|'.' name|'api_context_manager' op|'.' name|'writer' newline|'\n' DECL|function|_flavor_create name|'def' name|'_flavor_create' op|'(' name|'context' op|',' name|'values' op|')' op|':' newline|'\n' indent|' ' name|'specs' op|'=' name|'values' op|'.' name|'get' op|'(' string|"'extra_specs'" op|')' newline|'\n' name|'db_specs' op|'=' op|'[' op|']' newline|'\n' name|'if' name|'specs' op|':' newline|'\n' indent|' ' name|'for' name|'k' op|',' name|'v' name|'in' name|'specs' op|'.' name|'items' op|'(' op|')' op|':' newline|'\n' indent|' ' name|'db_spec' op|'=' name|'api_models' op|'.' name|'FlavorExtraSpecs' op|'(' op|')' newline|'\n' name|'db_spec' op|'[' string|"'key'" op|']' op|'=' name|'k' newline|'\n' name|'db_spec' op|'[' string|"'value'" op|']' op|'=' name|'v' newline|'\n' name|'db_specs' op|'.' name|'append' op|'(' name|'db_spec' op|')' newline|'\n' nl|'\n' dedent|'' dedent|'' name|'projects' op|'=' name|'values' op|'.' name|'get' op|'(' string|"'projects'" op|')' newline|'\n' name|'db_projects' op|'=' op|'[' op|']' newline|'\n' name|'if' name|'projects' op|':' newline|'\n' indent|' ' name|'for' name|'project' name|'in' name|'set' op|'(' name|'projects' op|')' op|':' newline|'\n' indent|' ' name|'db_project' op|'=' name|'api_models' op|'.' name|'FlavorProjects' op|'(' op|')' newline|'\n' name|'db_project' op|'[' string|"'project_id'" op|']' op|'=' name|'project' newline|'\n' name|'db_projects' op|'.' name|'append' op|'(' name|'db_project' op|')' newline|'\n' nl|'\n' dedent|'' dedent|'' name|'values' op|'[' string|"'extra_specs'" op|']' op|'=' name|'db_specs' newline|'\n' name|'values' op|'[' string|"'projects'" op|']' op|'=' name|'db_projects' newline|'\n' name|'db_flavor' op|'=' name|'api_models' op|'.' name|'Flavors' op|'(' op|')' newline|'\n' name|'db_flavor' op|'.' name|'update' op|'(' name|'values' op|')' newline|'\n' nl|'\n' name|'try' op|':' newline|'\n' indent|' ' name|'db_flavor' op|'.' name|'save' op|'(' name|'context' op|'.' name|'session' op|')' newline|'\n' dedent|'' name|'except' name|'db_exc' op|'.' name|'DBDuplicateEntry' name|'as' name|'e' op|':' newline|'\n' indent|' ' name|'if' string|"'flavorid'" name|'in' name|'e' op|'.' name|'columns' op|':' newline|'\n' indent|' ' name|'raise' name|'exception' op|'.' name|'FlavorIdExists' op|'(' name|'flavor_id' op|'=' name|'values' op|'[' string|"'flavorid'" op|']' op|')' newline|'\n' dedent|'' name|'raise' name|'exception' op|'.' name|'FlavorExists' op|'(' name|'name' op|'=' name|'values' op|'[' string|"'name'" op|']' op|')' newline|'\n' dedent|'' name|'except' name|'Exception' name|'as' name|'e' op|':' newline|'\n' indent|' ' name|'raise' name|'db_exc' op|'.' name|'DBError' op|'(' name|'e' op|')' newline|'\n' nl|'\n' dedent|'' name|'return' name|'_dict_with_extra_specs' op|'(' name|'db_flavor' op|')' newline|'\n' nl|'\n' nl|'\n' dedent|'' op|'@' name|'db_api' op|'.' name|'api_context_manager' op|'.' name|'writer' newline|'\n' DECL|function|_flavor_destroy name|'def' name|'_flavor_destroy' op|'(' name|'context' op|',' name|'flavor_id' op|'=' name|'None' op|',' name|'name' op|'=' name|'None' op|')' op|':' newline|'\n' indent|' ' name|'query' op|'=' name|'context' op|'.' name|'session' op|'.' name|'query' op|'(' name|'api_models' op|'.' name|'Flavors' op|')' newline|'\n' nl|'\n' name|'if' name|'flavor_id' name|'is' name|'not' name|'None' op|':' newline|'\n' indent|' ' name|'query' op|'=' name|'query' op|'.' name|'filter' op|'(' name|'api_models' op|'.' name|'Flavors' op|'.' name|'id' op|'==' name|'flavor_id' op|')' newline|'\n' dedent|'' name|'else' op|':' newline|'\n' indent|' ' name|'query' op|'=' name|'query' op|'.' name|'filter' op|'(' name|'api_models' op|'.' name|'Flavors' op|'.' name|'name' op|'==' name|'name' op|')' newline|'\n' dedent|'' name|'result' op|'=' name|'query' op|'.' name|'first' op|'(' op|')' newline|'\n' nl|'\n' name|'if' name|'not' name|'result' op|':' newline|'\n' indent|' ' name|'if' name|'flavor_id' name|'is' name|'not' name|'None' op|':' newline|'\n' indent|' ' name|'raise' name|'exception' op|'.' name|'FlavorNotFound' op|'(' name|'flavor_id' op|'=' name|'flavor_id' op|')' newline|'\n' dedent|'' name|'else' op|':' newline|'\n' indent|' ' name|'raise' name|'exception' op|'.' name|'FlavorNotFoundByName' op|'(' name|'flavor_name' op|'=' name|'name' op|')' newline|'\n' nl|'\n' dedent|'' dedent|'' name|'context' op|'.' name|'session' op|'.' name|'query' op|'(' name|'api_models' op|'.' name|'FlavorProjects' op|')' op|'.' name|'filter_by' op|'(' name|'flavor_id' op|'=' name|'result' op|'.' name|'id' op|')' op|'.' name|'delete' op|'(' op|')' newline|'\n' name|'context' op|'.' name|'session' op|'.' name|'query' op|'(' name|'api_models' op|'.' name|'FlavorExtraSpecs' op|')' op|'.' name|'filter_by' op|'(' name|'flavor_id' op|'=' name|'result' op|'.' name|'id' op|')' op|'.' name|'delete' op|'(' op|')' newline|'\n' name|'context' op|'.' name|'session' op|'.' name|'delete' op|'(' name|'result' op|')' newline|'\n' nl|'\n' nl|'\n' dedent|'' op|'@' name|'db_api' op|'.' name|'main_context_manager' op|'.' name|'reader' newline|'\n' DECL|function|_ensure_migrated name|'def' name|'_ensure_migrated' op|'(' name|'context' op|')' op|':' newline|'\n' indent|' ' name|'result' op|'=' name|'context' op|'.' name|'session' op|'.' name|'query' op|'(' name|'main_models' op|'.' name|'InstanceTypes' op|')' op|'.' name|'filter_by' op|'(' name|'deleted' op|'=' number|'0' op|')' op|'.' name|'count' op|'(' op|')' newline|'\n' name|'if' name|'result' op|':' newline|'\n' indent|' ' name|'LOG' op|'.' name|'warning' op|'(' name|'_LW' op|'(' string|"'Main database contains %(count)i unmigrated flavors'" op|')' op|',' nl|'\n' op|'{' string|"'count'" op|':' name|'result' op|'}' op|')' newline|'\n' dedent|'' name|'return' name|'result' op|'==' number|'0' newline|'\n' nl|'\n' nl|'\n' comment|'# TODO(berrange): Remove NovaObjectDictCompat' nl|'\n' dedent|'' op|'@' name|'base' op|'.' name|'NovaObjectRegistry' op|'.' name|'register' newline|'\n' name|'class' name|'Flavor' op|'(' name|'base' op|'.' name|'NovaPersistentObject' op|',' name|'base' op|'.' name|'NovaObject' op|',' nl|'\n' DECL|class|Flavor name|'base' op|'.' name|'NovaObjectDictCompat' op|')' op|':' newline|'\n' comment|'# Version 1.0: Initial version' nl|'\n' comment|'# Version 1.1: Added save_projects(), save_extra_specs(), removed' nl|'\n' comment|'# remoteable from save()' nl|'\n' DECL|variable|VERSION indent|' ' name|'VERSION' op|'=' string|"'1.1'" newline|'\n' nl|'\n' DECL|variable|fields name|'fields' op|'=' op|'{' nl|'\n' string|"'id'" op|':' name|'fields' op|'.' name|'IntegerField' op|'(' op|')' op|',' nl|'\n' string|"'name'" op|':' name|'fields' op|'.' name|'StringField' op|'(' name|'nullable' op|'=' name|'True' op|')' op|',' nl|'\n' string|"'memory_mb'" op|':' name|'fields' op|'.' name|'IntegerField' op|'(' op|')' op|',' nl|'\n' string|"'vcpus'" op|':' name|'fields' op|'.' name|'IntegerField' op|'(' op|')' op|',' nl|'\n' string|"'root_gb'" op|':' name|'fields' op|'.' name|'IntegerField' op|'(' op|')' op|',' nl|'\n' string|"'ephemeral_gb'" op|':' name|'fields' op|'.' name|'IntegerField' op|'(' op|')' op|',' nl|'\n' string|"'flavorid'" op|':' name|'fields' op|'.' name|'StringField' op|'(' op|')' op|',' nl|'\n' string|"'swap'" op|':' name|'fields' op|'.' name|'IntegerField' op|'(' op|')' op|',' nl|'\n' string|"'rxtx_factor'" op|':' name|'fields' op|'.' name|'FloatField' op|'(' name|'nullable' op|'=' name|'True' op|',' name|'default' op|'=' number|'1.0' op|')' op|',' nl|'\n' string|"'vcpu_weight'" op|':' name|'fields' op|'.' name|'IntegerField' op|'(' name|'nullable' op|'=' name|'True' op|')' op|',' nl|'\n' string|"'disabled'" op|':' name|'fields' op|'.' name|'BooleanField' op|'(' op|')' op|',' nl|'\n' string|"'is_public'" op|':' name|'fields' op|'.' name|'BooleanField' op|'(' op|')' op|',' nl|'\n' string|"'extra_specs'" op|':' name|'fields' op|'.' name|'DictOfStringsField' op|'(' op|')' op|',' nl|'\n' string|"'projects'" op|':' name|'fields' op|'.' name|'ListOfStringsField' op|'(' op|')' op|',' nl|'\n' op|'}' newline|'\n' nl|'\n' DECL|member|__init__ name|'def' name|'__init__' op|'(' name|'self' op|',' op|'*' name|'args' op|',' op|'**' name|'kwargs' op|')' op|':' newline|'\n' indent|' ' name|'super' op|'(' name|'Flavor' op|',' name|'self' op|')' op|'.' name|'__init__' op|'(' op|'*' name|'args' op|',' op|'**' name|'kwargs' op|')' newline|'\n' name|'self' op|'.' name|'_orig_extra_specs' op|'=' op|'{' op|'}' newline|'\n' name|'self' op|'.' name|'_orig_projects' op|'=' op|'[' op|']' newline|'\n' name|'self' op|'.' name|'_in_api' op|'=' name|'False' newline|'\n' nl|'\n' dedent|'' op|'@' name|'property' newline|'\n' DECL|member|in_api name|'def' name|'in_api' op|'(' name|'self' op|')' op|':' newline|'\n' indent|' ' name|'if' name|'self' op|'.' name|'_in_api' op|':' newline|'\n' indent|' ' name|'return' name|'True' newline|'\n' dedent|'' name|'else' op|':' newline|'\n' indent|' ' name|'try' op|':' newline|'\n' indent|' ' name|'if' string|"'id'" name|'in' name|'self' op|':' newline|'\n' indent|' ' name|'self' op|'.' name|'_flavor_get_from_db' op|'(' name|'self' op|'.' name|'_context' op|',' name|'self' op|'.' name|'id' op|')' newline|'\n' dedent|'' name|'else' op|':' newline|'\n' indent|' ' name|'flavor' op|'=' name|'self' op|'.' name|'_flavor_get_by_flavor_id_from_db' op|'(' nl|'\n' name|'self' op|'.' name|'_context' op|',' nl|'\n' name|'self' op|'.' name|'flavorid' op|')' newline|'\n' comment|'# Fix us up so we can use our real id' nl|'\n' name|'self' op|'.' name|'id' op|'=' name|'flavor' op|'[' string|"'id'" op|']' newline|'\n' dedent|'' name|'self' op|'.' name|'_in_api' op|'=' name|'True' newline|'\n' dedent|'' name|'except' name|'exception' op|'.' name|'FlavorNotFound' op|':' newline|'\n' indent|' ' name|'pass' newline|'\n' dedent|'' name|'return' name|'self' op|'.' name|'_in_api' newline|'\n' nl|'\n' dedent|'' dedent|'' op|'@' name|'staticmethod' newline|'\n' DECL|member|_from_db_object name|'def' name|'_from_db_object' op|'(' name|'context' op|',' name|'flavor' op|',' name|'db_flavor' op|',' name|'expected_attrs' op|'=' name|'None' op|')' op|':' newline|'\n' indent|' ' name|'if' name|'expected_attrs' name|'is' name|'None' op|':' newline|'\n' indent|' ' name|'expected_attrs' op|'=' op|'[' op|']' newline|'\n' dedent|'' name|'flavor' op|'.' name|'_context' op|'=' name|'context' newline|'\n' name|'for' name|'name' op|',' name|'field' name|'in' name|'flavor' op|'.' name|'fields' op|'.' name|'items' op|'(' op|')' op|':' newline|'\n' indent|' ' name|'if' name|'name' name|'in' name|'OPTIONAL_FIELDS' op|':' newline|'\n' indent|' ' name|'continue' newline|'\n' dedent|'' name|'if' name|'name' name|'in' name|'DEPRECATED_FIELDS' name|'and' name|'name' name|'not' name|'in' name|'db_flavor' op|':' newline|'\n' indent|' ' name|'continue' newline|'\n' dedent|'' name|'value' op|'=' name|'db_flavor' op|'[' name|'name' op|']' newline|'\n' name|'if' name|'isinstance' op|'(' name|'field' op|',' name|'fields' op|'.' name|'IntegerField' op|')' op|':' newline|'\n' indent|' ' name|'value' op|'=' name|'value' name|'if' name|'value' name|'is' name|'not' name|'None' name|'else' number|'0' newline|'\n' dedent|'' name|'flavor' op|'[' name|'name' op|']' op|'=' name|'value' newline|'\n' nl|'\n' comment|'# NOTE(danms): This is to support processing the API flavor' nl|'\n' comment|'# model, which does not have these deprecated fields. When we' nl|'\n' comment|'# remove compatibility with the old InstanceType model, we can' nl|'\n' comment|'# remove this as well.' nl|'\n' dedent|'' name|'if' name|'any' op|'(' name|'f' name|'not' name|'in' name|'db_flavor' name|'for' name|'f' name|'in' name|'DEPRECATED_FIELDS' op|')' op|':' newline|'\n' indent|' ' name|'flavor' op|'.' name|'deleted_at' op|'=' name|'None' newline|'\n' name|'flavor' op|'.' name|'deleted' op|'=' name|'False' newline|'\n' nl|'\n' dedent|'' name|'if' string|"'extra_specs'" name|'in' name|'expected_attrs' op|':' newline|'\n' indent|' ' name|'flavor' op|'.' name|'extra_specs' op|'=' name|'db_flavor' op|'[' string|"'extra_specs'" op|']' newline|'\n' nl|'\n' dedent|'' name|'if' string|"'projects'" name|'in' name|'expected_attrs' op|':' newline|'\n' indent|' ' name|'if' string|"'projects'" name|'in' name|'db_flavor' op|':' newline|'\n' indent|' ' name|'flavor' op|'[' string|"'projects'" op|']' op|'=' op|'[' name|'x' op|'[' string|"'project_id'" op|']' nl|'\n' name|'for' name|'x' name|'in' name|'db_flavor' op|'[' string|"'projects'" op|']' op|']' newline|'\n' dedent|'' name|'else' op|':' newline|'\n' indent|' ' name|'flavor' op|'.' name|'_load_projects' op|'(' op|')' newline|'\n' nl|'\n' dedent|'' dedent|'' name|'flavor' op|'.' name|'obj_reset_changes' op|'(' op|')' newline|'\n' name|'return' name|'flavor' newline|'\n' nl|'\n' dedent|'' op|'@' name|'staticmethod' newline|'\n' op|'@' name|'db_api' op|'.' name|'api_context_manager' op|'.' name|'reader' newline|'\n' DECL|member|_flavor_get_query_from_db name|'def' name|'_flavor_get_query_from_db' op|'(' name|'context' op|')' op|':' newline|'\n' indent|' ' name|'query' op|'=' name|'context' op|'.' name|'session' op|'.' name|'query' op|'(' name|'api_models' op|'.' name|'Flavors' op|')' op|'.' name|'options' op|'(' name|'joinedload' op|'(' string|"'extra_specs'" op|')' op|')' newline|'\n' name|'if' name|'not' name|'context' op|'.' name|'is_admin' op|':' newline|'\n' indent|' ' name|'the_filter' op|'=' op|'[' name|'api_models' op|'.' name|'Flavors' op|'.' name|'is_public' op|'==' name|'true' op|'(' op|')' op|']' newline|'\n' name|'the_filter' op|'.' name|'extend' op|'(' op|'[' nl|'\n' name|'api_models' op|'.' name|'Flavors' op|'.' name|'projects' op|'.' name|'any' op|'(' name|'project_id' op|'=' name|'context' op|'.' name|'project_id' op|')' nl|'\n' op|']' op|')' newline|'\n' name|'query' op|'=' name|'query' op|'.' name|'filter' op|'(' name|'or_' op|'(' op|'*' name|'the_filter' op|')' op|')' newline|'\n' dedent|'' name|'return' name|'query' newline|'\n' nl|'\n' dedent|'' op|'@' name|'staticmethod' newline|'\n' op|'@' name|'require_context' newline|'\n' DECL|member|_flavor_get_from_db name|'def' name|'_flavor_get_from_db' op|'(' name|'context' op|',' name|'id' op|')' op|':' newline|'\n' indent|' ' string|'"""Returns a dict describing specific flavor."""' newline|'\n' name|'result' op|'=' name|'Flavor' op|'.' name|'_flavor_get_query_from_db' op|'(' name|'context' op|')' op|'.' name|'filter_by' op|'(' name|'id' op|'=' name|'id' op|')' op|'.' name|'first' op|'(' op|')' newline|'\n' name|'if' name|'not' name|'result' op|':' newline|'\n' indent|' ' name|'raise' name|'exception' op|'.' name|'FlavorNotFound' op|'(' name|'flavor_id' op|'=' name|'id' op|')' newline|'\n' dedent|'' name|'return' name|'db_api' op|'.' name|'_dict_with_extra_specs' op|'(' name|'result' op|')' newline|'\n' nl|'\n' dedent|'' op|'@' name|'staticmethod' newline|'\n' op|'@' name|'require_context' newline|'\n' DECL|member|_flavor_get_by_name_from_db name|'def' name|'_flavor_get_by_name_from_db' op|'(' name|'context' op|',' name|'name' op|')' op|':' newline|'\n' indent|' ' string|'"""Returns a dict describing specific flavor."""' newline|'\n' name|'result' op|'=' name|'Flavor' op|'.' name|'_flavor_get_query_from_db' op|'(' name|'context' op|')' op|'.' name|'filter_by' op|'(' name|'name' op|'=' name|'name' op|')' op|'.' name|'first' op|'(' op|')' newline|'\n' name|'if' name|'not' name|'result' op|':' newline|'\n' indent|' ' name|'raise' name|'exception' op|'.' name|'FlavorNotFoundByName' op|'(' name|'flavor_name' op|'=' name|'name' op|')' newline|'\n' dedent|'' name|'return' name|'db_api' op|'.' name|'_dict_with_extra_specs' op|'(' name|'result' op|')' newline|'\n' nl|'\n' dedent|'' op|'@' name|'staticmethod' newline|'\n' op|'@' name|'require_context' newline|'\n' DECL|member|_flavor_get_by_flavor_id_from_db name|'def' name|'_flavor_get_by_flavor_id_from_db' op|'(' name|'context' op|',' name|'flavor_id' op|')' op|':' newline|'\n' indent|' ' string|'"""Returns a dict describing specific flavor_id."""' newline|'\n' name|'result' op|'=' name|'Flavor' op|'.' name|'_flavor_get_query_from_db' op|'(' name|'context' op|')' op|'.' name|'filter_by' op|'(' name|'flavorid' op|'=' name|'flavor_id' op|')' op|'.' name|'order_by' op|'(' name|'asc' op|'(' name|'api_models' op|'.' name|'Flavors' op|'.' name|'id' op|')' op|')' op|'.' name|'first' op|'(' op|')' newline|'\n' name|'if' name|'not' name|'result' op|':' newline|'\n' indent|' ' name|'raise' name|'exception' op|'.' name|'FlavorNotFound' op|'(' name|'flavor_id' op|'=' name|'flavor_id' op|')' newline|'\n' dedent|'' name|'return' name|'db_api' op|'.' name|'_dict_with_extra_specs' op|'(' name|'result' op|')' newline|'\n' nl|'\n' dedent|'' op|'@' name|'staticmethod' newline|'\n' DECL|member|_get_projects_from_db name|'def' name|'_get_projects_from_db' op|'(' name|'context' op|',' name|'flavorid' op|')' op|':' newline|'\n' indent|' ' name|'return' name|'_get_projects_from_db' op|'(' name|'context' op|',' name|'flavorid' op|')' newline|'\n' nl|'\n' dedent|'' op|'@' name|'base' op|'.' name|'remotable' newline|'\n' DECL|member|_load_projects name|'def' name|'_load_projects' op|'(' name|'self' op|')' op|':' newline|'\n' indent|' ' name|'try' op|':' newline|'\n' indent|' ' name|'self' op|'.' name|'projects' op|'=' name|'self' op|'.' name|'_get_projects_from_db' op|'(' name|'self' op|'.' name|'_context' op|',' nl|'\n' name|'self' op|'.' name|'flavorid' op|')' newline|'\n' dedent|'' name|'except' name|'exception' op|'.' name|'FlavorNotFound' op|':' newline|'\n' indent|' ' name|'self' op|'.' name|'projects' op|'=' op|'[' name|'x' op|'[' string|"'project_id'" op|']' name|'for' name|'x' name|'in' nl|'\n' name|'db' op|'.' name|'flavor_access_get_by_flavor_id' op|'(' name|'self' op|'.' name|'_context' op|',' nl|'\n' name|'self' op|'.' name|'flavorid' op|')' op|']' newline|'\n' dedent|'' name|'self' op|'.' name|'obj_reset_changes' op|'(' op|'[' string|"'projects'" op|']' op|')' newline|'\n' nl|'\n' DECL|member|obj_load_attr dedent|'' name|'def' name|'obj_load_attr' op|'(' name|'self' op|',' name|'attrname' op|')' op|':' newline|'\n' comment|'# NOTE(danms): Only projects could be lazy-loaded right now' nl|'\n' indent|' ' name|'if' name|'attrname' op|'!=' string|"'projects'" op|':' newline|'\n' indent|' ' name|'raise' name|'exception' op|'.' name|'ObjectActionError' op|'(' nl|'\n' name|'action' op|'=' string|"'obj_load_attr'" op|',' name|'reason' op|'=' string|"'unable to load %s'" op|'%' name|'attrname' op|')' newline|'\n' nl|'\n' dedent|'' name|'self' op|'.' name|'_load_projects' op|'(' op|')' newline|'\n' nl|'\n' DECL|member|obj_reset_changes dedent|'' name|'def' name|'obj_reset_changes' op|'(' name|'self' op|',' name|'fields' op|'=' name|'None' op|',' name|'recursive' op|'=' name|'False' op|')' op|':' newline|'\n' indent|' ' name|'super' op|'(' name|'Flavor' op|',' name|'self' op|')' op|'.' name|'obj_reset_changes' op|'(' name|'fields' op|'=' name|'fields' op|',' nl|'\n' name|'recursive' op|'=' name|'recursive' op|')' newline|'\n' name|'if' name|'fields' name|'is' name|'None' name|'or' string|"'extra_specs'" name|'in' name|'fields' op|':' newline|'\n' indent|' ' name|'self' op|'.' name|'_orig_extra_specs' op|'=' op|'(' name|'dict' op|'(' name|'self' op|'.' name|'extra_specs' op|')' nl|'\n' name|'if' name|'self' op|'.' name|'obj_attr_is_set' op|'(' string|"'extra_specs'" op|')' nl|'\n' name|'else' op|'{' op|'}' op|')' newline|'\n' dedent|'' name|'if' name|'fields' name|'is' name|'None' name|'or' string|"'projects'" name|'in' name|'fields' op|':' newline|'\n' indent|' ' name|'self' op|'.' name|'_orig_projects' op|'=' op|'(' name|'list' op|'(' name|'self' op|'.' name|'projects' op|')' nl|'\n' name|'if' name|'self' op|'.' name|'obj_attr_is_set' op|'(' string|"'projects'" op|')' nl|'\n' name|'else' op|'[' op|']' op|')' newline|'\n' nl|'\n' DECL|member|obj_what_changed dedent|'' dedent|'' name|'def' name|'obj_what_changed' op|'(' name|'self' op|')' op|':' newline|'\n' indent|' ' name|'changes' op|'=' name|'super' op|'(' name|'Flavor' op|',' name|'self' op|')' op|'.' name|'obj_what_changed' op|'(' op|')' newline|'\n' name|'if' op|'(' string|"'extra_specs'" name|'in' name|'self' name|'and' nl|'\n' name|'self' op|'.' name|'extra_specs' op|'!=' name|'self' op|'.' name|'_orig_extra_specs' op|')' op|':' newline|'\n' indent|' ' name|'changes' op|'.' name|'add' op|'(' string|"'extra_specs'" op|')' newline|'\n' dedent|'' name|'if' string|"'projects'" name|'in' name|'self' name|'and' name|'self' op|'.' name|'projects' op|'!=' name|'self' op|'.' name|'_orig_projects' op|':' newline|'\n' indent|' ' name|'changes' op|'.' name|'add' op|'(' string|"'projects'" op|')' newline|'\n' dedent|'' name|'return' name|'changes' newline|'\n' nl|'\n' dedent|'' op|'@' name|'classmethod' newline|'\n' DECL|member|_obj_from_primitive name|'def' name|'_obj_from_primitive' op|'(' name|'cls' op|',' name|'context' op|',' name|'objver' op|',' name|'primitive' op|')' op|':' newline|'\n' indent|' ' name|'self' op|'=' name|'super' op|'(' name|'Flavor' op|',' name|'cls' op|')' op|'.' name|'_obj_from_primitive' op|'(' name|'context' op|',' name|'objver' op|',' nl|'\n' name|'primitive' op|')' newline|'\n' name|'changes' op|'=' name|'self' op|'.' name|'obj_what_changed' op|'(' op|')' newline|'\n' name|'if' string|"'extra_specs'" name|'not' name|'in' name|'changes' op|':' newline|'\n' comment|'# This call left extra_specs "clean" so update our tracker' nl|'\n' indent|' ' name|'self' op|'.' name|'_orig_extra_specs' op|'=' op|'(' name|'dict' op|'(' name|'self' op|'.' name|'extra_specs' op|')' nl|'\n' name|'if' name|'self' op|'.' name|'obj_attr_is_set' op|'(' string|"'extra_specs'" op|')' nl|'\n' name|'else' op|'{' op|'}' op|')' newline|'\n' dedent|'' name|'if' string|"'projects'" name|'not' name|'in' name|'changes' op|':' newline|'\n' comment|'# This call left projects "clean" so update our tracker' nl|'\n' indent|' ' name|'self' op|'.' name|'_orig_projects' op|'=' op|'(' name|'list' op|'(' name|'self' op|'.' name|'projects' op|')' nl|'\n' name|'if' name|'self' op|'.' name|'obj_attr_is_set' op|'(' string|"'projects'" op|')' nl|'\n' name|'else' op|'[' op|']' op|')' newline|'\n' dedent|'' name|'return' name|'self' newline|'\n' nl|'\n' dedent|'' op|'@' name|'base' op|'.' name|'remotable_classmethod' newline|'\n' DECL|member|get_by_id name|'def' name|'get_by_id' op|'(' name|'cls' op|',' name|'context' op|',' name|'id' op|')' op|':' newline|'\n' indent|' ' name|'try' op|':' newline|'\n' indent|' ' name|'db_flavor' op|'=' name|'cls' op|'.' name|'_flavor_get_from_db' op|'(' name|'context' op|',' name|'id' op|')' newline|'\n' dedent|'' name|'except' name|'exception' op|'.' name|'FlavorNotFound' op|':' newline|'\n' indent|' ' name|'db_flavor' op|'=' name|'db' op|'.' name|'flavor_get' op|'(' name|'context' op|',' name|'id' op|')' newline|'\n' dedent|'' name|'return' name|'cls' op|'.' name|'_from_db_object' op|'(' name|'context' op|',' name|'cls' op|'(' name|'context' op|')' op|',' name|'db_flavor' op|',' nl|'\n' name|'expected_attrs' op|'=' op|'[' string|"'extra_specs'" op|']' op|')' newline|'\n' nl|'\n' dedent|'' op|'@' name|'base' op|'.' name|'remotable_classmethod' newline|'\n' DECL|member|get_by_name name|'def' name|'get_by_name' op|'(' name|'cls' op|',' name|'context' op|',' name|'name' op|')' op|':' newline|'\n' indent|' ' name|'try' op|':' newline|'\n' indent|' ' name|'db_flavor' op|'=' name|'cls' op|'.' name|'_flavor_get_by_name_from_db' op|'(' name|'context' op|',' name|'name' op|')' newline|'\n' dedent|'' name|'except' name|'exception' op|'.' name|'FlavorNotFoundByName' op|':' newline|'\n' indent|' ' name|'db_flavor' op|'=' name|'db' op|'.' name|'flavor_get_by_name' op|'(' name|'context' op|',' name|'name' op|')' newline|'\n' dedent|'' name|'return' name|'cls' op|'.' name|'_from_db_object' op|'(' name|'context' op|',' name|'cls' op|'(' name|'context' op|')' op|',' name|'db_flavor' op|',' nl|'\n' name|'expected_attrs' op|'=' op|'[' string|"'extra_specs'" op|']' op|')' newline|'\n' nl|'\n' dedent|'' op|'@' name|'base' op|'.' name|'remotable_classmethod' newline|'\n' DECL|member|get_by_flavor_id name|'def' name|'get_by_flavor_id' op|'(' name|'cls' op|',' name|'context' op|',' name|'flavor_id' op|',' name|'read_deleted' op|'=' name|'None' op|')' op|':' newline|'\n' indent|' ' name|'try' op|':' newline|'\n' indent|' ' name|'db_flavor' op|'=' name|'cls' op|'.' name|'_flavor_get_by_flavor_id_from_db' op|'(' name|'context' op|',' nl|'\n' name|'flavor_id' op|')' newline|'\n' dedent|'' name|'except' name|'exception' op|'.' name|'FlavorNotFound' op|':' newline|'\n' indent|' ' name|'db_flavor' op|'=' name|'db' op|'.' name|'flavor_get_by_flavor_id' op|'(' name|'context' op|',' name|'flavor_id' op|',' nl|'\n' name|'read_deleted' op|')' newline|'\n' dedent|'' name|'return' name|'cls' op|'.' name|'_from_db_object' op|'(' name|'context' op|',' name|'cls' op|'(' name|'context' op|')' op|',' name|'db_flavor' op|',' nl|'\n' name|'expected_attrs' op|'=' op|'[' string|"'extra_specs'" op|']' op|')' newline|'\n' nl|'\n' dedent|'' op|'@' name|'staticmethod' newline|'\n' DECL|member|_flavor_add_project name|'def' name|'_flavor_add_project' op|'(' name|'context' op|',' name|'flavor_id' op|',' name|'project_id' op|')' op|':' newline|'\n' indent|' ' name|'return' name|'_flavor_add_project' op|'(' name|'context' op|',' name|'flavor_id' op|',' name|'project_id' op|')' newline|'\n' nl|'\n' dedent|'' op|'@' name|'staticmethod' newline|'\n' DECL|member|_flavor_del_project name|'def' name|'_flavor_del_project' op|'(' name|'context' op|',' name|'flavor_id' op|',' name|'project_id' op|')' op|':' newline|'\n' indent|' ' name|'return' name|'_flavor_del_project' op|'(' name|'context' op|',' name|'flavor_id' op|',' name|'project_id' op|')' newline|'\n' nl|'\n' DECL|member|_add_access dedent|'' name|'def' name|'_add_access' op|'(' name|'self' op|',' name|'project_id' op|')' op|':' newline|'\n' indent|' ' name|'if' name|'self' op|'.' name|'in_api' op|':' newline|'\n' indent|' ' name|'self' op|'.' name|'_flavor_add_project' op|'(' name|'self' op|'.' name|'_context' op|',' name|'self' op|'.' name|'id' op|',' name|'project_id' op|')' newline|'\n' dedent|'' name|'else' op|':' newline|'\n' indent|' ' name|'db' op|'.' name|'flavor_access_add' op|'(' name|'self' op|'.' name|'_context' op|',' name|'self' op|'.' name|'flavorid' op|',' name|'project_id' op|')' newline|'\n' nl|'\n' dedent|'' dedent|'' op|'@' name|'base' op|'.' name|'remotable' newline|'\n' DECL|member|add_access name|'def' name|'add_access' op|'(' name|'self' op|',' name|'project_id' op|')' op|':' newline|'\n' indent|' ' name|'if' string|"'projects'" name|'in' name|'self' op|'.' name|'obj_what_changed' op|'(' op|')' op|':' newline|'\n' indent|' ' name|'raise' name|'exception' op|'.' name|'ObjectActionError' op|'(' name|'action' op|'=' string|"'add_access'" op|',' nl|'\n' name|'reason' op|'=' string|"'projects modified'" op|')' newline|'\n' dedent|'' name|'self' op|'.' name|'_add_access' op|'(' name|'project_id' op|')' newline|'\n' name|'self' op|'.' name|'_load_projects' op|'(' op|')' newline|'\n' nl|'\n' DECL|member|_remove_access dedent|'' name|'def' name|'_remove_access' op|'(' name|'self' op|',' name|'project_id' op|')' op|':' newline|'\n' indent|' ' name|'if' name|'self' op|'.' name|'in_api' op|':' newline|'\n' indent|' ' name|'self' op|'.' name|'_flavor_del_project' op|'(' name|'self' op|'.' name|'_context' op|',' name|'self' op|'.' name|'id' op|',' name|'project_id' op|')' newline|'\n' dedent|'' name|'else' op|':' newline|'\n' indent|' ' name|'db' op|'.' name|'flavor_access_remove' op|'(' name|'self' op|'.' name|'_context' op|',' name|'self' op|'.' name|'flavorid' op|',' name|'project_id' op|')' newline|'\n' nl|'\n' dedent|'' dedent|'' op|'@' name|'base' op|'.' name|'remotable' newline|'\n' DECL|member|remove_access name|'def' name|'remove_access' op|'(' name|'self' op|',' name|'project_id' op|')' op|':' newline|'\n' indent|' ' name|'if' string|"'projects'" name|'in' name|'self' op|'.' name|'obj_what_changed' op|'(' op|')' op|':' newline|'\n' indent|' ' name|'raise' name|'exception' op|'.' name|'ObjectActionError' op|'(' name|'action' op|'=' string|"'remove_access'" op|',' nl|'\n' name|'reason' op|'=' string|"'projects modified'" op|')' newline|'\n' dedent|'' name|'self' op|'.' name|'_remove_access' op|'(' name|'project_id' op|')' newline|'\n' name|'self' op|'.' name|'_load_projects' op|'(' op|')' newline|'\n' nl|'\n' dedent|'' op|'@' name|'staticmethod' newline|'\n' DECL|member|_flavor_create name|'def' name|'_flavor_create' op|'(' name|'context' op|',' name|'updates' op|')' op|':' newline|'\n' indent|' ' name|'return' name|'_flavor_create' op|'(' name|'context' op|',' name|'updates' op|')' newline|'\n' nl|'\n' dedent|'' op|'@' name|'staticmethod' newline|'\n' DECL|member|_ensure_migrated name|'def' name|'_ensure_migrated' op|'(' name|'context' op|')' op|':' newline|'\n' indent|' ' name|'return' name|'_ensure_migrated' op|'(' name|'context' op|')' newline|'\n' nl|'\n' dedent|'' op|'@' name|'base' op|'.' name|'remotable' newline|'\n' DECL|member|create name|'def' name|'create' op|'(' name|'self' op|')' op|':' newline|'\n' indent|' ' name|'if' name|'self' op|'.' name|'obj_attr_is_set' op|'(' string|"'id'" op|')' op|':' newline|'\n' indent|' ' name|'raise' name|'exception' op|'.' name|'ObjectActionError' op|'(' name|'action' op|'=' string|"'create'" op|',' nl|'\n' name|'reason' op|'=' string|"'already created'" op|')' newline|'\n' nl|'\n' comment|'# NOTE(danms): Once we have made it past a point where we know' nl|'\n' comment|'# all flavors have been migrated, we can remove this. Ideally' nl|'\n' comment|'# in Ocata with a blocker migration to be sure.' nl|'\n' dedent|'' name|'if' name|'not' name|'self' op|'.' name|'_ensure_migrated' op|'(' name|'self' op|'.' name|'_context' op|')' op|':' newline|'\n' indent|' ' name|'raise' name|'exception' op|'.' name|'ObjectActionError' op|'(' nl|'\n' name|'action' op|'=' string|"'create'" op|',' nl|'\n' name|'reason' op|'=' string|"'main database still contains flavors'" op|')' newline|'\n' nl|'\n' dedent|'' name|'updates' op|'=' name|'self' op|'.' name|'obj_get_changes' op|'(' op|')' newline|'\n' name|'expected_attrs' op|'=' op|'[' op|']' newline|'\n' name|'for' name|'attr' name|'in' name|'OPTIONAL_FIELDS' op|':' newline|'\n' indent|' ' name|'if' name|'attr' name|'in' name|'updates' op|':' newline|'\n' indent|' ' name|'expected_attrs' op|'.' name|'append' op|'(' name|'attr' op|')' newline|'\n' dedent|'' dedent|'' name|'db_flavor' op|'=' name|'self' op|'.' name|'_flavor_create' op|'(' name|'self' op|'.' name|'_context' op|',' name|'updates' op|')' newline|'\n' name|'self' op|'.' name|'_from_db_object' op|'(' name|'self' op|'.' name|'_context' op|',' name|'self' op|',' name|'db_flavor' op|',' nl|'\n' name|'expected_attrs' op|'=' name|'expected_attrs' op|')' newline|'\n' nl|'\n' dedent|'' op|'@' name|'base' op|'.' name|'remotable' newline|'\n' DECL|member|save_projects name|'def' name|'save_projects' op|'(' name|'self' op|',' name|'to_add' op|'=' name|'None' op|',' name|'to_delete' op|'=' name|'None' op|')' op|':' newline|'\n' indent|' ' string|'"""Add or delete projects.\n\n :param:to_add: A list of projects to add\n :param:to_delete: A list of projects to remove\n """' newline|'\n' nl|'\n' name|'to_add' op|'=' name|'to_add' name|'if' name|'to_add' name|'is' name|'not' name|'None' name|'else' op|'[' op|']' newline|'\n' name|'to_delete' op|'=' name|'to_delete' name|'if' name|'to_delete' name|'is' name|'not' name|'None' name|'else' op|'[' op|']' newline|'\n' nl|'\n' name|'for' name|'project_id' name|'in' name|'to_add' op|':' newline|'\n' indent|' ' name|'self' op|'.' name|'_add_access' op|'(' name|'project_id' op|')' newline|'\n' dedent|'' name|'for' name|'project_id' name|'in' name|'to_delete' op|':' newline|'\n' indent|' ' name|'self' op|'.' name|'_remove_access' op|'(' name|'project_id' op|')' newline|'\n' dedent|'' name|'self' op|'.' name|'obj_reset_changes' op|'(' op|'[' string|"'projects'" op|']' op|')' newline|'\n' nl|'\n' dedent|'' op|'@' name|'staticmethod' newline|'\n' DECL|member|_flavor_extra_specs_add name|'def' name|'_flavor_extra_specs_add' op|'(' name|'context' op|',' name|'flavor_id' op|',' name|'specs' op|',' name|'max_retries' op|'=' number|'10' op|')' op|':' newline|'\n' indent|' ' name|'return' name|'_flavor_extra_specs_add' op|'(' name|'context' op|',' name|'flavor_id' op|',' name|'specs' op|',' name|'max_retries' op|')' newline|'\n' nl|'\n' dedent|'' op|'@' name|'staticmethod' newline|'\n' DECL|member|_flavor_extra_specs_del name|'def' name|'_flavor_extra_specs_del' op|'(' name|'context' op|',' name|'flavor_id' op|',' name|'key' op|')' op|':' newline|'\n' indent|' ' name|'return' name|'_flavor_extra_specs_del' op|'(' name|'context' op|',' name|'flavor_id' op|',' name|'key' op|')' newline|'\n' nl|'\n' dedent|'' op|'@' name|'base' op|'.' name|'remotable' newline|'\n' DECL|member|save_extra_specs name|'def' name|'save_extra_specs' op|'(' name|'self' op|',' name|'to_add' op|'=' name|'None' op|',' name|'to_delete' op|'=' name|'None' op|')' op|':' newline|'\n' indent|' ' string|'"""Add or delete extra_specs.\n\n :param:to_add: A dict of new keys to add/update\n :param:to_delete: A list of keys to remove\n """' newline|'\n' nl|'\n' name|'if' name|'self' op|'.' name|'in_api' op|':' newline|'\n' indent|' ' name|'add_fn' op|'=' name|'self' op|'.' name|'_flavor_extra_specs_add' newline|'\n' name|'del_fn' op|'=' name|'self' op|'.' name|'_flavor_extra_specs_del' newline|'\n' name|'ident' op|'=' name|'self' op|'.' name|'id' newline|'\n' dedent|'' name|'else' op|':' newline|'\n' indent|' ' name|'add_fn' op|'=' name|'db' op|'.' name|'flavor_extra_specs_update_or_create' newline|'\n' name|'del_fn' op|'=' name|'db' op|'.' name|'flavor_extra_specs_delete' newline|'\n' name|'ident' op|'=' name|'self' op|'.' name|'flavorid' newline|'\n' nl|'\n' dedent|'' name|'to_add' op|'=' name|'to_add' name|'if' name|'to_add' name|'is' name|'not' name|'None' name|'else' op|'{' op|'}' newline|'\n' name|'to_delete' op|'=' name|'to_delete' name|'if' name|'to_delete' name|'is' name|'not' name|'None' name|'else' op|'[' op|']' newline|'\n' nl|'\n' name|'if' name|'to_add' op|':' newline|'\n' indent|' ' name|'add_fn' op|'(' name|'self' op|'.' name|'_context' op|',' name|'ident' op|',' name|'to_add' op|')' newline|'\n' nl|'\n' dedent|'' name|'for' name|'key' name|'in' name|'to_delete' op|':' newline|'\n' indent|' ' name|'del_fn' op|'(' name|'self' op|'.' name|'_context' op|',' name|'ident' op|',' name|'key' op|')' newline|'\n' dedent|'' name|'self' op|'.' name|'obj_reset_changes' op|'(' op|'[' string|"'extra_specs'" op|']' op|')' newline|'\n' nl|'\n' DECL|member|save dedent|'' name|'def' name|'save' op|'(' name|'self' op|')' op|':' newline|'\n' indent|' ' name|'updates' op|'=' name|'self' op|'.' name|'obj_get_changes' op|'(' op|')' newline|'\n' name|'projects' op|'=' name|'updates' op|'.' name|'pop' op|'(' string|"'projects'" op|',' name|'None' op|')' newline|'\n' name|'extra_specs' op|'=' name|'updates' op|'.' name|'pop' op|'(' string|"'extra_specs'" op|',' name|'None' op|')' newline|'\n' name|'if' name|'updates' op|':' newline|'\n' indent|' ' name|'raise' name|'exception' op|'.' name|'ObjectActionError' op|'(' nl|'\n' name|'action' op|'=' string|"'save'" op|',' name|'reason' op|'=' string|"'read-only fields were changed'" op|')' newline|'\n' nl|'\n' dedent|'' name|'if' name|'extra_specs' name|'is' name|'not' name|'None' op|':' newline|'\n' indent|' ' name|'deleted_keys' op|'=' op|'(' name|'set' op|'(' name|'self' op|'.' name|'_orig_extra_specs' op|'.' name|'keys' op|'(' op|')' op|')' op|'-' nl|'\n' name|'set' op|'(' name|'extra_specs' op|'.' name|'keys' op|'(' op|')' op|')' op|')' newline|'\n' name|'added_keys' op|'=' name|'self' op|'.' name|'extra_specs' newline|'\n' dedent|'' name|'else' op|':' newline|'\n' indent|' ' name|'added_keys' op|'=' name|'deleted_keys' op|'=' name|'None' newline|'\n' nl|'\n' dedent|'' name|'if' name|'projects' name|'is' name|'not' name|'None' op|':' newline|'\n' indent|' ' name|'deleted_projects' op|'=' name|'set' op|'(' name|'self' op|'.' name|'_orig_projects' op|')' op|'-' name|'set' op|'(' name|'projects' op|')' newline|'\n' name|'added_projects' op|'=' name|'set' op|'(' name|'projects' op|')' op|'-' name|'set' op|'(' name|'self' op|'.' name|'_orig_projects' op|')' newline|'\n' dedent|'' name|'else' op|':' newline|'\n' indent|' ' name|'added_projects' op|'=' name|'deleted_projects' op|'=' name|'None' newline|'\n' nl|'\n' comment|'# NOTE(danms): The first remotable method we call will reset' nl|'\n' comment|'# our of the original values for projects and extra_specs. Thus,' nl|'\n' comment|'# we collect the added/deleted lists for both above and /then/' nl|'\n' comment|'# call these methods to update them.' nl|'\n' nl|'\n' dedent|'' name|'if' name|'added_keys' name|'or' name|'deleted_keys' op|':' newline|'\n' indent|' ' name|'self' op|'.' name|'save_extra_specs' op|'(' name|'self' op|'.' name|'extra_specs' op|',' name|'deleted_keys' op|')' newline|'\n' nl|'\n' dedent|'' name|'if' name|'added_projects' name|'or' name|'deleted_projects' op|':' newline|'\n' indent|' ' name|'self' op|'.' name|'save_projects' op|'(' name|'added_projects' op|',' name|'deleted_projects' op|')' newline|'\n' nl|'\n' dedent|'' dedent|'' op|'@' name|'staticmethod' newline|'\n' DECL|member|_flavor_destroy name|'def' name|'_flavor_destroy' op|'(' name|'context' op|',' name|'flavor_id' op|'=' name|'None' op|',' name|'name' op|'=' name|'None' op|')' op|':' newline|'\n' indent|' ' name|'return' name|'_flavor_destroy' op|'(' name|'context' op|',' name|'flavor_id' op|'=' name|'flavor_id' op|',' name|'name' op|'=' name|'name' op|')' newline|'\n' nl|'\n' dedent|'' op|'@' name|'base' op|'.' name|'remotable' newline|'\n' DECL|member|destroy name|'def' name|'destroy' op|'(' name|'self' op|')' op|':' newline|'\n' comment|'# NOTE(danms): Historically the only way to delete a flavor' nl|'\n' comment|'# is via name, which is not very precise. We need to be able to' nl|'\n' comment|'# support the light construction of a flavor object and subsequent' nl|'\n' comment|'# delete request with only our name filled out. However, if we have' nl|'\n' comment|"# our id property, we should instead delete with that since it's" nl|'\n' comment|'# far more specific.' nl|'\n' indent|' ' name|'try' op|':' newline|'\n' indent|' ' name|'if' string|"'id'" name|'in' name|'self' op|':' newline|'\n' indent|' ' name|'self' op|'.' name|'_flavor_destroy' op|'(' name|'self' op|'.' name|'_context' op|',' name|'flavor_id' op|'=' name|'self' op|'.' name|'id' op|')' newline|'\n' dedent|'' name|'else' op|':' newline|'\n' indent|' ' name|'self' op|'.' name|'_flavor_destroy' op|'(' name|'self' op|'.' name|'_context' op|',' name|'name' op|'=' name|'self' op|'.' name|'name' op|')' newline|'\n' dedent|'' dedent|'' name|'except' name|'exception' op|'.' name|'FlavorNotFound' op|':' newline|'\n' indent|' ' name|'db' op|'.' name|'flavor_destroy' op|'(' name|'self' op|'.' name|'_context' op|',' name|'self' op|'.' name|'name' op|')' newline|'\n' nl|'\n' nl|'\n' dedent|'' dedent|'' dedent|'' op|'@' name|'db_api' op|'.' name|'api_context_manager' op|'.' name|'reader' newline|'\n' DECL|function|_flavor_get_all_from_db name|'def' name|'_flavor_get_all_from_db' op|'(' name|'context' op|',' name|'inactive' op|',' name|'filters' op|',' name|'sort_key' op|',' name|'sort_dir' op|',' nl|'\n' name|'limit' op|',' name|'marker' op|')' op|':' newline|'\n' indent|' ' string|'"""Returns all flavors.\n """' newline|'\n' name|'filters' op|'=' name|'filters' name|'or' op|'{' op|'}' newline|'\n' nl|'\n' name|'query' op|'=' name|'Flavor' op|'.' name|'_flavor_get_query_from_db' op|'(' name|'context' op|')' newline|'\n' nl|'\n' name|'if' string|"'min_memory_mb'" name|'in' name|'filters' op|':' newline|'\n' indent|' ' name|'query' op|'=' name|'query' op|'.' name|'filter' op|'(' nl|'\n' name|'api_models' op|'.' name|'Flavors' op|'.' name|'memory_mb' op|'>=' name|'filters' op|'[' string|"'min_memory_mb'" op|']' op|')' newline|'\n' nl|'\n' dedent|'' name|'if' string|"'min_root_gb'" name|'in' name|'filters' op|':' newline|'\n' indent|' ' name|'query' op|'=' name|'query' op|'.' name|'filter' op|'(' nl|'\n' name|'api_models' op|'.' name|'Flavors' op|'.' name|'root_gb' op|'>=' name|'filters' op|'[' string|"'min_root_gb'" op|']' op|')' newline|'\n' nl|'\n' dedent|'' name|'if' string|"'disabled'" name|'in' name|'filters' op|':' newline|'\n' indent|' ' name|'query' op|'=' name|'query' op|'.' name|'filter' op|'(' nl|'\n' name|'api_models' op|'.' name|'Flavors' op|'.' name|'disabled' op|'==' name|'filters' op|'[' string|"'disabled'" op|']' op|')' newline|'\n' nl|'\n' dedent|'' name|'if' string|"'is_public'" name|'in' name|'filters' name|'and' name|'filters' op|'[' string|"'is_public'" op|']' name|'is' name|'not' name|'None' op|':' newline|'\n' indent|' ' name|'the_filter' op|'=' op|'[' name|'api_models' op|'.' name|'Flavors' op|'.' name|'is_public' op|'==' name|'filters' op|'[' string|"'is_public'" op|']' op|']' newline|'\n' name|'if' name|'filters' op|'[' string|"'is_public'" op|']' name|'and' name|'context' op|'.' name|'project_id' name|'is' name|'not' name|'None' op|':' newline|'\n' indent|' ' name|'the_filter' op|'.' name|'extend' op|'(' op|'[' name|'api_models' op|'.' name|'Flavors' op|'.' name|'projects' op|'.' name|'any' op|'(' nl|'\n' name|'project_id' op|'=' name|'context' op|'.' name|'project_id' op|')' op|']' op|')' newline|'\n' dedent|'' name|'if' name|'len' op|'(' name|'the_filter' op|')' op|'>' number|'1' op|':' newline|'\n' indent|' ' name|'query' op|'=' name|'query' op|'.' name|'filter' op|'(' name|'or_' op|'(' op|'*' name|'the_filter' op|')' op|')' newline|'\n' dedent|'' name|'else' op|':' newline|'\n' indent|' ' name|'query' op|'=' name|'query' op|'.' name|'filter' op|'(' name|'the_filter' op|'[' number|'0' op|']' op|')' newline|'\n' dedent|'' dedent|'' name|'marker_row' op|'=' name|'None' newline|'\n' name|'if' name|'marker' name|'is' name|'not' name|'None' op|':' newline|'\n' indent|' ' name|'marker_row' op|'=' name|'Flavor' op|'.' name|'_flavor_get_query_from_db' op|'(' name|'context' op|')' op|'.' name|'filter_by' op|'(' name|'flavorid' op|'=' name|'marker' op|')' op|'.' name|'first' op|'(' op|')' newline|'\n' name|'if' name|'not' name|'marker_row' op|':' newline|'\n' indent|' ' name|'raise' name|'exception' op|'.' name|'MarkerNotFound' op|'(' name|'marker' op|')' newline|'\n' nl|'\n' dedent|'' dedent|'' name|'query' op|'=' name|'sqlalchemyutils' op|'.' name|'paginate_query' op|'(' name|'query' op|',' name|'api_models' op|'.' name|'Flavors' op|',' nl|'\n' name|'limit' op|',' nl|'\n' op|'[' name|'sort_key' op|',' string|"'id'" op|']' op|',' nl|'\n' name|'marker' op|'=' name|'marker_row' op|',' nl|'\n' name|'sort_dir' op|'=' name|'sort_dir' op|')' newline|'\n' name|'return' op|'[' name|'_dict_with_extra_specs' op|'(' name|'i' op|')' name|'for' name|'i' name|'in' name|'query' op|'.' name|'all' op|'(' op|')' op|']' newline|'\n' nl|'\n' nl|'\n' dedent|'' op|'@' name|'base' op|'.' name|'NovaObjectRegistry' op|'.' name|'register' newline|'\n' DECL|class|FlavorList name|'class' name|'FlavorList' op|'(' name|'base' op|'.' name|'ObjectListBase' op|',' name|'base' op|'.' name|'NovaObject' op|')' op|':' newline|'\n' DECL|variable|VERSION indent|' ' name|'VERSION' op|'=' string|"'1.1'" newline|'\n' nl|'\n' DECL|variable|fields name|'fields' op|'=' op|'{' nl|'\n' string|"'objects'" op|':' name|'fields' op|'.' name|'ListOfObjectsField' op|'(' string|"'Flavor'" op|')' op|',' nl|'\n' op|'}' newline|'\n' nl|'\n' op|'@' name|'base' op|'.' name|'remotable_classmethod' newline|'\n' DECL|member|get_all name|'def' name|'get_all' op|'(' name|'cls' op|',' name|'context' op|',' name|'inactive' op|'=' name|'False' op|',' name|'filters' op|'=' name|'None' op|',' nl|'\n' name|'sort_key' op|'=' string|"'flavorid'" op|',' name|'sort_dir' op|'=' string|"'asc'" op|',' name|'limit' op|'=' name|'None' op|',' name|'marker' op|'=' name|'None' op|')' op|':' newline|'\n' indent|' ' name|'try' op|':' newline|'\n' indent|' ' name|'api_db_flavors' op|'=' name|'_flavor_get_all_from_db' op|'(' name|'context' op|',' nl|'\n' name|'inactive' op|'=' name|'inactive' op|',' nl|'\n' name|'filters' op|'=' name|'filters' op|',' nl|'\n' name|'sort_key' op|'=' name|'sort_key' op|',' nl|'\n' name|'sort_dir' op|'=' name|'sort_dir' op|',' nl|'\n' name|'limit' op|'=' name|'limit' op|',' nl|'\n' name|'marker' op|'=' name|'marker' op|')' newline|'\n' comment|'# NOTE(danms): If we were asked for a marker and found it in' nl|'\n' comment|'# results from the API DB, we must continue our pagination with' nl|'\n' comment|'# just the limit (if any) to the main DB.' nl|'\n' name|'marker' op|'=' name|'None' newline|'\n' dedent|'' name|'except' name|'exception' op|'.' name|'MarkerNotFound' op|':' newline|'\n' indent|' ' name|'api_db_flavors' op|'=' op|'[' op|']' newline|'\n' nl|'\n' dedent|'' name|'if' name|'limit' name|'is' name|'not' name|'None' op|':' newline|'\n' indent|' ' name|'limit_more' op|'=' name|'limit' op|'-' name|'len' op|'(' name|'api_db_flavors' op|')' newline|'\n' dedent|'' name|'else' op|':' newline|'\n' indent|' ' name|'limit_more' op|'=' name|'None' newline|'\n' nl|'\n' dedent|'' name|'if' name|'limit_more' name|'is' name|'None' name|'or' name|'limit_more' op|'>' number|'0' op|':' newline|'\n' indent|' ' name|'db_flavors' op|'=' name|'db' op|'.' name|'flavor_get_all' op|'(' name|'context' op|',' name|'inactive' op|'=' name|'inactive' op|',' nl|'\n' name|'filters' op|'=' name|'filters' op|',' name|'sort_key' op|'=' name|'sort_key' op|',' nl|'\n' name|'sort_dir' op|'=' name|'sort_dir' op|',' name|'limit' op|'=' name|'limit_more' op|',' nl|'\n' name|'marker' op|'=' name|'marker' op|')' newline|'\n' dedent|'' name|'else' op|':' newline|'\n' indent|' ' name|'db_flavors' op|'=' op|'[' op|']' newline|'\n' dedent|'' name|'return' name|'base' op|'.' name|'obj_make_list' op|'(' name|'context' op|',' name|'cls' op|'(' name|'context' op|')' op|',' name|'objects' op|'.' name|'Flavor' op|',' nl|'\n' name|'api_db_flavors' op|'+' name|'db_flavors' op|',' nl|'\n' name|'expected_attrs' op|'=' op|'[' string|"'extra_specs'" op|']' op|')' newline|'\n' nl|'\n' nl|'\n' dedent|'' dedent|'' op|'@' name|'db_api' op|'.' name|'main_context_manager' op|'.' name|'reader' newline|'\n' DECL|function|_get_main_db_flavor_ids name|'def' name|'_get_main_db_flavor_ids' op|'(' name|'context' op|',' name|'limit' op|')' op|':' newline|'\n' comment|"# NOTE(danms): We don't need this imported at runtime, so" nl|'\n' comment|'# keep it separate here' nl|'\n' indent|' ' name|'from' name|'nova' op|'.' name|'db' op|'.' name|'sqlalchemy' name|'import' name|'models' newline|'\n' name|'return' op|'[' name|'x' op|'[' number|'0' op|']' name|'for' name|'x' name|'in' name|'context' op|'.' name|'session' op|'.' name|'query' op|'(' name|'models' op|'.' name|'InstanceTypes' op|'.' name|'id' op|')' op|'.' nl|'\n' name|'filter_by' op|'(' name|'deleted' op|'=' number|'0' op|')' op|'.' nl|'\n' name|'limit' op|'(' name|'limit' op|')' op|']' newline|'\n' nl|'\n' nl|'\n' dedent|'' op|'@' name|'db_api' op|'.' name|'main_context_manager' op|'.' name|'writer' newline|'\n' DECL|function|_destroy_flavor_hard name|'def' name|'_destroy_flavor_hard' op|'(' name|'context' op|',' name|'name' op|')' op|':' newline|'\n' comment|"# NOTE(danms): We don't need this imported at runtime, so" nl|'\n' comment|'# keep it separate here' nl|'\n' indent|' ' name|'from' name|'nova' op|'.' name|'db' op|'.' name|'sqlalchemy' name|'import' name|'models' newline|'\n' name|'context' op|'.' name|'session' op|'.' name|'query' op|'(' name|'models' op|'.' name|'InstanceTypes' op|')' op|'.' name|'filter_by' op|'(' name|'name' op|'=' name|'name' op|')' op|'.' name|'delete' op|'(' op|')' newline|'\n' nl|'\n' nl|'\n' DECL|function|migrate_flavors dedent|'' name|'def' name|'migrate_flavors' op|'(' name|'ctxt' op|',' name|'count' op|',' name|'hard_delete' op|'=' name|'False' op|')' op|':' newline|'\n' indent|' ' name|'main_db_ids' op|'=' name|'_get_main_db_flavor_ids' op|'(' name|'ctxt' op|',' name|'count' op|')' newline|'\n' name|'if' name|'not' name|'main_db_ids' op|':' newline|'\n' indent|' ' name|'return' number|'0' op|',' number|'0' newline|'\n' nl|'\n' dedent|'' name|'count_all' op|'=' name|'len' op|'(' name|'main_db_ids' op|')' newline|'\n' name|'count_hit' op|'=' number|'0' newline|'\n' nl|'\n' name|'for' name|'flavor_id' name|'in' name|'main_db_ids' op|':' newline|'\n' indent|' ' name|'try' op|':' newline|'\n' indent|' ' name|'flavor' op|'=' name|'Flavor' op|'.' name|'get_by_id' op|'(' name|'ctxt' op|',' name|'flavor_id' op|')' newline|'\n' name|'flavor_values' op|'=' op|'{' name|'field' op|':' name|'getattr' op|'(' name|'flavor' op|',' name|'field' op|')' nl|'\n' name|'for' name|'field' name|'in' name|'flavor' op|'.' name|'fields' op|'}' newline|'\n' name|'flavor' op|'.' name|'_flavor_create' op|'(' name|'ctxt' op|',' name|'flavor_values' op|')' newline|'\n' name|'count_hit' op|'+=' number|'1' newline|'\n' name|'if' name|'hard_delete' op|':' newline|'\n' indent|' ' name|'_destroy_flavor_hard' op|'(' name|'ctxt' op|',' name|'flavor' op|'.' name|'name' op|')' newline|'\n' dedent|'' name|'else' op|':' newline|'\n' indent|' ' name|'db' op|'.' name|'flavor_destroy' op|'(' name|'ctxt' op|',' name|'flavor' op|'.' name|'name' op|')' newline|'\n' dedent|'' dedent|'' name|'except' name|'exception' op|'.' name|'FlavorNotFound' op|':' newline|'\n' indent|' ' name|'LOG' op|'.' name|'warning' op|'(' name|'_LW' op|'(' string|"'Flavor id %(id)i disappeared during migration'" op|')' op|',' nl|'\n' op|'{' string|"'id'" op|':' name|'flavor_id' op|'}' op|')' newline|'\n' dedent|'' name|'except' op|'(' name|'exception' op|'.' name|'FlavorExists' op|',' name|'exception' op|'.' name|'FlavorIdExists' op|')' name|'as' name|'e' op|':' newline|'\n' indent|' ' name|'LOG' op|'.' name|'error' op|'(' name|'str' op|'(' name|'e' op|')' op|')' newline|'\n' nl|'\n' dedent|'' dedent|'' name|'return' name|'count_all' op|',' name|'count_hit' newline|'\n' nl|'\n' nl|'\n' DECL|function|_adjust_autoincrement dedent|'' name|'def' name|'_adjust_autoincrement' op|'(' name|'context' op|',' name|'value' op|')' op|':' newline|'\n' indent|' ' name|'engine' op|'=' name|'db_api' op|'.' name|'get_api_engine' op|'(' op|')' newline|'\n' name|'if' name|'engine' op|'.' name|'name' op|'==' string|"'postgresql'" op|':' newline|'\n' comment|'# NOTE(danms): If we migrated some flavors in the above function,' nl|'\n' comment|"# then we will have confused postgres' sequence for the autoincrement" nl|'\n' comment|'# primary key. MySQL does not care about this, but since postgres does,' nl|'\n' comment|'# we need to reset this to avoid a failure on the next flavor creation.' nl|'\n' indent|' ' name|'engine' op|'.' name|'execute' op|'(' nl|'\n' name|'text' op|'(' string|"'ALTER SEQUENCE flavors_id_seq RESTART WITH %i;'" op|'%' op|'(' nl|'\n' name|'value' op|')' op|')' op|')' newline|'\n' nl|'\n' nl|'\n' dedent|'' dedent|'' op|'@' name|'db_api' op|'.' name|'api_context_manager' op|'.' name|'reader' newline|'\n' DECL|function|_get_max_flavor_id name|'def' name|'_get_max_flavor_id' op|'(' name|'context' op|')' op|':' newline|'\n' indent|' ' name|'max_id' op|'=' name|'context' op|'.' name|'session' op|'.' name|'query' op|'(' name|'func' op|'.' name|'max' op|'(' name|'api_models' op|'.' name|'Flavors' op|'.' name|'id' op|')' op|')' op|'.' name|'one' op|'(' op|')' op|'[' number|'0' op|']' newline|'\n' name|'return' name|'max_id' name|'or' number|'0' newline|'\n' nl|'\n' nl|'\n' DECL|function|migrate_flavor_reset_autoincrement dedent|'' name|'def' name|'migrate_flavor_reset_autoincrement' op|'(' name|'ctxt' op|',' name|'count' op|')' op|':' newline|'\n' indent|' ' name|'max_id' op|'=' name|'_get_max_flavor_id' op|'(' name|'ctxt' op|')' newline|'\n' name|'_adjust_autoincrement' op|'(' name|'ctxt' op|',' name|'max_id' op|'+' number|'1' op|')' newline|'\n' name|'return' number|'0' op|',' number|'0' newline|'\n' dedent|'' endmarker|'' end_unit
12.511605
162
0.597573
9,582
64,685
3.924024
0.046024
0.169149
0.092553
0.065532
0.866303
0.814069
0.756569
0.696064
0.643059
0.579495
0
0.000769
0.115823
64,685
5,169
163
12.514026
0.656654
0
0
0.960534
0
0.000387
0.369174
0.017346
0
0
0
0
0
0
null
null
0.000193
0.00445
null
null
0
0
0
0
null
0
0
0
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
5
1c0022632125c586628c375e523f8a08d3babe0c
71
py
Python
trekipsum/dialog/__init__.py
infinitewarp/trekipsum
1401d5cedef5ac20e9941708d846911c7ec9ca09
[ "MIT" ]
1
2019-09-25T19:31:07.000Z
2019-09-25T19:31:07.000Z
trekipsum/dialog/__init__.py
infinitewarp/trekipsum
1401d5cedef5ac20e9941708d846911c7ec9ca09
[ "MIT" ]
2
2017-08-23T15:01:36.000Z
2018-01-26T20:24:03.000Z
trekipsum/dialog/__init__.py
infinitewarp/trekipsum
1401d5cedef5ac20e9941708d846911c7ec9ca09
[ "MIT" ]
1
2018-01-25T15:17:13.000Z
2018-01-25T15:17:13.000Z
from .sqlite import DialogChooser as SqliteRandomChooser # noqa: F401
35.5
70
0.816901
8
71
7.25
1
0
0
0
0
0
0
0
0
0
0
0.04918
0.140845
71
1
71
71
0.901639
0.140845
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
1c1e94a6b87cd3fde319757c1600fa4d0a6ba15b
50
py
Python
archives/views/__init__.py
by46/archives
b97e2e2d5f413a7702a8980bcb07bd5b46d5a3f8
[ "MIT" ]
null
null
null
archives/views/__init__.py
by46/archives
b97e2e2d5f413a7702a8980bcb07bd5b46d5a3f8
[ "MIT" ]
null
null
null
archives/views/__init__.py
by46/archives
b97e2e2d5f413a7702a8980bcb07bd5b46d5a3f8
[ "MIT" ]
null
null
null
from . import catalogue from .hooks import hook
16.666667
24
0.76
7
50
5.428571
0.714286
0
0
0
0
0
0
0
0
0
0
0
0.2
50
2
25
25
0.95
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
1c2ad84bd58333fca1a453e809b5571f7a6e794d
149
py
Python
average_scores.py
Kunalpod/codewars
8dc1af2f3c70e209471045118fd88b3ea1e627e5
[ "MIT" ]
null
null
null
average_scores.py
Kunalpod/codewars
8dc1af2f3c70e209471045118fd88b3ea1e627e5
[ "MIT" ]
null
null
null
average_scores.py
Kunalpod/codewars
8dc1af2f3c70e209471045118fd88b3ea1e627e5
[ "MIT" ]
null
null
null
#Kunal Gautam #Codewars : @Kunalpod #Problem name: Average Scores #Problem level: 7 kyu def average(array): return round(sum(array)/len(array))
18.625
39
0.731544
21
149
5.190476
0.809524
0
0
0
0
0
0
0
0
0
0
0.007874
0.147651
149
7
40
21.285714
0.850394
0.536913
0
0
0
0
0
0
0
0
0
0
0
1
0.5
false
0
0
0.5
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
5
1c3e1ac1ffbcaae6a149c06bab5c2378e1b7ba27
5,010
py
Python
bitflow-script/generated/python2/BitflowLexer.py
bitflow-stream/bitflow-script
b8427c02f65820108ca08356f9483e400fcbcaee
[ "Apache-2.0" ]
null
null
null
bitflow-script/generated/python2/BitflowLexer.py
bitflow-stream/bitflow-script
b8427c02f65820108ca08356f9483e400fcbcaee
[ "Apache-2.0" ]
null
null
null
bitflow-script/generated/python2/BitflowLexer.py
bitflow-stream/bitflow-script
b8427c02f65820108ca08356f9483e400fcbcaee
[ "Apache-2.0" ]
null
null
null
# Generated from Bitflow.g4 by ANTLR 4.7.1 # encoding: utf-8 from __future__ import print_function from antlr4 import * from io import StringIO import sys def serializedATN(): with StringIO() as buf: buf.write(u"\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\2") buf.write(u"\23|\b\1\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7") buf.write(u"\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\4\f\t\f\4\r\t") buf.write(u"\r\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22") buf.write(u"\3\2\3\2\3\3\3\3\3\4\3\4\3\5\3\5\3\5\3\6\3\6\3\7\3\7") buf.write(u"\3\b\3\b\3\t\3\t\3\n\3\n\3\13\3\13\3\f\3\f\3\f\3\f\3") buf.write(u"\f\3\f\3\r\3\r\7\rC\n\r\f\r\16\rF\13\r\3\r\3\r\3\r\7") buf.write(u"\rK\n\r\f\r\16\rN\13\r\3\r\3\r\3\r\7\rS\n\r\f\r\16\r") buf.write(u"V\13\r\3\r\5\rY\n\r\3\16\6\16\\\n\16\r\16\16\16]\3\17") buf.write(u"\3\17\7\17b\n\17\f\17\16\17e\13\17\3\17\3\17\3\17\3\17") buf.write(u"\3\20\3\20\3\20\5\20n\n\20\3\20\3\20\3\21\3\21\3\21\5") buf.write(u"\21u\n\21\3\21\3\21\3\22\3\22\3\22\3\22\5DLT\2\23\3\3") buf.write(u"\5\4\7\5\t\6\13\7\r\b\17\t\21\n\23\13\25\f\27\r\31\16") buf.write(u"\33\17\35\20\37\21!\22#\23\3\2\4\n\2\'(,-/<AAC\\^^aa") buf.write(u"c|\4\2\f\f\17\17\2\u0084\2\3\3\2\2\2\2\5\3\2\2\2\2\7") buf.write(u"\3\2\2\2\2\t\3\2\2\2\2\13\3\2\2\2\2\r\3\2\2\2\2\17\3") buf.write(u"\2\2\2\2\21\3\2\2\2\2\23\3\2\2\2\2\25\3\2\2\2\2\27\3") buf.write(u"\2\2\2\2\31\3\2\2\2\2\33\3\2\2\2\2\35\3\2\2\2\2\37\3") buf.write(u"\2\2\2\2!\3\2\2\2\2#\3\2\2\2\3%\3\2\2\2\5\'\3\2\2\2\7") buf.write(u")\3\2\2\2\t+\3\2\2\2\13.\3\2\2\2\r\60\3\2\2\2\17\62\3") buf.write(u"\2\2\2\21\64\3\2\2\2\23\66\3\2\2\2\258\3\2\2\2\27:\3") buf.write(u"\2\2\2\31X\3\2\2\2\33[\3\2\2\2\35_\3\2\2\2\37m\3\2\2") buf.write(u"\2!t\3\2\2\2#x\3\2\2\2%&\7}\2\2&\4\3\2\2\2\'(\7\177\2") buf.write(u"\2(\6\3\2\2\2)*\7=\2\2*\b\3\2\2\2+,\7/\2\2,-\7@\2\2-") buf.write(u"\n\3\2\2\2./\7*\2\2/\f\3\2\2\2\60\61\7+\2\2\61\16\3\2") buf.write(u"\2\2\62\63\7?\2\2\63\20\3\2\2\2\64\65\7.\2\2\65\22\3") buf.write(u"\2\2\2\66\67\7]\2\2\67\24\3\2\2\289\7_\2\29\26\3\2\2") buf.write(u"\2:;\7d\2\2;<\7c\2\2<=\7v\2\2=>\7e\2\2>?\7j\2\2?\30\3") buf.write(u"\2\2\2@D\7$\2\2AC\13\2\2\2BA\3\2\2\2CF\3\2\2\2DE\3\2") buf.write(u"\2\2DB\3\2\2\2EG\3\2\2\2FD\3\2\2\2GY\7$\2\2HL\7)\2\2") buf.write(u"IK\13\2\2\2JI\3\2\2\2KN\3\2\2\2LM\3\2\2\2LJ\3\2\2\2M") buf.write(u"O\3\2\2\2NL\3\2\2\2OY\7)\2\2PT\7b\2\2QS\13\2\2\2RQ\3") buf.write(u"\2\2\2SV\3\2\2\2TU\3\2\2\2TR\3\2\2\2UW\3\2\2\2VT\3\2") buf.write(u"\2\2WY\7b\2\2X@\3\2\2\2XH\3\2\2\2XP\3\2\2\2Y\32\3\2\2") buf.write(u"\2Z\\\t\2\2\2[Z\3\2\2\2\\]\3\2\2\2][\3\2\2\2]^\3\2\2") buf.write(u"\2^\34\3\2\2\2_c\7%\2\2`b\n\3\2\2a`\3\2\2\2be\3\2\2\2") buf.write(u"ca\3\2\2\2cd\3\2\2\2df\3\2\2\2ec\3\2\2\2fg\5\37\20\2") buf.write(u"gh\3\2\2\2hi\b\17\2\2i\36\3\2\2\2jn\t\3\2\2kl\7\17\2") buf.write(u"\2ln\7\f\2\2mj\3\2\2\2mk\3\2\2\2no\3\2\2\2op\b\20\2\2") buf.write(u"p \3\2\2\2qu\7\"\2\2rs\7^\2\2su\7u\2\2tq\3\2\2\2tr\3") buf.write(u"\2\2\2uv\3\2\2\2vw\b\21\2\2w\"\3\2\2\2xy\7\13\2\2yz\3") buf.write(u"\2\2\2z{\b\22\2\2{$\3\2\2\2\13\2DLTX]cmt\3\b\2\2") return buf.getvalue() class BitflowLexer(Lexer): atn = ATNDeserializer().deserialize(serializedATN()) decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ] OPEN = 1 CLOSE = 2 EOP = 3 NEXT = 4 OPEN_PARAMS = 5 CLOSE_PARAMS = 6 EQ = 7 SEP = 8 OPEN_HINTS = 9 CLOSE_HINTS = 10 BATCH = 11 STRING = 12 IDENTIFIER = 13 COMMENT = 14 NEWLINE = 15 WHITESPACE = 16 TAB = 17 channelNames = [ u"DEFAULT_TOKEN_CHANNEL", u"HIDDEN" ] modeNames = [ u"DEFAULT_MODE" ] literalNames = [ u"<INVALID>", u"'{'", u"'}'", u"';'", u"'->'", u"'('", u"')'", u"'='", u"','", u"'['", u"']'", u"'batch'", u"'\t'" ] symbolicNames = [ u"<INVALID>", u"OPEN", u"CLOSE", u"EOP", u"NEXT", u"OPEN_PARAMS", u"CLOSE_PARAMS", u"EQ", u"SEP", u"OPEN_HINTS", u"CLOSE_HINTS", u"BATCH", u"STRING", u"IDENTIFIER", u"COMMENT", u"NEWLINE", u"WHITESPACE", u"TAB" ] ruleNames = [ u"OPEN", u"CLOSE", u"EOP", u"NEXT", u"OPEN_PARAMS", u"CLOSE_PARAMS", u"EQ", u"SEP", u"OPEN_HINTS", u"CLOSE_HINTS", u"BATCH", u"STRING", u"IDENTIFIER", u"COMMENT", u"NEWLINE", u"WHITESPACE", u"TAB" ] grammarFileName = u"Bitflow.g4" def __init__(self, input=None, output=sys.stdout): super(BitflowLexer, self).__init__(input, output=output) self.checkVersion("4.7.1") self._interp = LexerATNSimulator(self, self.atn, self.decisionsToDFA, PredictionContextCache()) self._actions = None self._predicates = None
46.388889
103
0.530539
1,261
5,010
2.080888
0.179223
0.133384
0.088034
0.060976
0.349848
0.288872
0.228277
0.180259
0.140244
0.132622
0
0.23396
0.172455
5,010
107
104
46.82243
0.398939
0.011178
0
0
1
0.438202
0.499899
0.432411
0
0
0
0
0
1
0.022472
false
0
0.044944
0
0.370787
0.011236
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
1
0
0
0
1
1
0
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
1c527dcb1cbd05eb3791bdd15c463efe0d6ec249
74
py
Python
scripts/setup.py
yat011/singk_qt
b0d09fc3d1b5e18e10331356e4a22e75c7fa3c01
[ "MIT" ]
null
null
null
scripts/setup.py
yat011/singk_qt
b0d09fc3d1b5e18e10331356e4a22e75c7fa3c01
[ "MIT" ]
null
null
null
scripts/setup.py
yat011/singk_qt
b0d09fc3d1b5e18e10331356e4a22e75c7fa3c01
[ "MIT" ]
null
null
null
from distutils.core import setup import py2exe setup(console=['main'])
18.5
33
0.756757
10
74
5.6
0.8
0
0
0
0
0
0
0
0
0
0
0.015625
0.135135
74
4
34
18.5
0.859375
0
0
0
0
0
0.055556
0
0
0
0
0
0
1
0
true
0
0.666667
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
98dea635909b2da0e4d0530ee0b53398c8690a49
77
py
Python
nnunet/network_architecture/transunet/__init__.py
nntrongnghia/TDSI21-Shoulder-Muscle-Segmentation
29f0f83d93e4fdd8127261283dcf9242d9914ba6
[ "Apache-2.0" ]
1
2022-02-13T15:07:40.000Z
2022-02-13T15:07:40.000Z
nnunet/network_architecture/transunet/__init__.py
nntrongnghia/TDSI21-Shoulder-Muscle-Segmentation
29f0f83d93e4fdd8127261283dcf9242d9914ba6
[ "Apache-2.0" ]
null
null
null
nnunet/network_architecture/transunet/__init__.py
nntrongnghia/TDSI21-Shoulder-Muscle-Segmentation
29f0f83d93e4fdd8127261283dcf9242d9914ba6
[ "Apache-2.0" ]
2
2022-01-17T16:33:38.000Z
2022-01-17T17:13:26.000Z
print("TransUNet implementation from https://github.com/Beckschen/TransUNet")
77
77
0.831169
9
77
7.111111
0.888889
0
0
0
0
0
0
0
0
0
0
0
0.038961
77
1
77
77
0.864865
0
0
0
0
0
0.871795
0
0
0
0
0
0
1
0
true
0
0
0
0
1
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
1
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
5
98f42fd69ebe7ddfc10267dcc2d3ee72578a6d8b
2,139
py
Python
trader/sequence/abstract_book_manager.py
rev3ks/Crypto-Liquid-Market-Maker
82b2c8cfd0488d12fde71691147b5f54bce93179
[ "MIT" ]
19
2018-05-04T02:36:29.000Z
2021-12-26T22:17:34.000Z
trader/sequence/abstract_book_manager.py
rev3ks/Crypto-Liquid-Market-Maker
82b2c8cfd0488d12fde71691147b5f54bce93179
[ "MIT" ]
18
2018-04-28T02:35:20.000Z
2019-11-22T04:26:47.000Z
trader/sequence/abstract_book_manager.py
rev3ks/Crypto-Liquid-Market-Maker
82b2c8cfd0488d12fde71691147b5f54bce93179
[ "MIT" ]
5
2018-04-27T03:04:34.000Z
2022-01-10T14:34:50.000Z
import abc from trader.exchange.api_wrapper.exchange_api import ExchangeApi from trader.exchange.api_wrapper.noop_trader import NoopApi from trader.exchange.persisted_book import Book class AbstractBookManager(abc.ABC): @abc.abstractmethod def load_book(self): pass @abc.abstractmethod def close_book(self): pass @abc.abstractmethod def add_orders(self, side, count, first_size, first_price, size_change): pass @abc.abstractmethod def send_orders(self): pass @abc.abstractmethod def add_and_send_order(self, side, size, price): pass @abc.abstractmethod def post_at_best_post_only(self, order): pass @abc.abstractmethod def send_order(self, order): pass @abc.abstractmethod def update_order(self, order, match_size): pass @abc.abstractmethod def cancel_all_orders(self): pass @abc.abstractmethod def cancel_order_list(self, order_list): pass @abc.abstractmethod def cancel_sent_order(self, order): pass @abc.abstractmethod def look_for_order(self, match): pass @abc.abstractmethod def cancel_order_by_attribute(self, side, size): pass @abc.abstractmethod def send_trade_sequence(self, filled_order): """ Sends trades in a sequence in response to a filled trade. The first size will be the terms minimum size and each trade following will be larger according terms size increase. :param filled_order: order to respond to :return: None """ pass @abc.abstractmethod def set_adjust_queue(self, adjust_queue): """ used to set an queue to send messages from outside to the inside of a thread running the send_trade_sequence method. The message will be used to pause or stop the thread. :param adjust_queue: a queue to carry messages :return: None """ self.adjust_queue = adjust_queue @abc.abstractmethod def get_terms_min_size(self): return self.terms.min_size @abc.abstractmethod def get_terms_size_change(self): return self.terms.size_change @abc.abstractmethod def is_trading_api_test(self): return self.trading_api.test
22.28125
78
0.73352
301
2,139
5.019934
0.299003
0.202515
0.238253
0.222369
0.352747
0.193911
0.050298
0
0
0
0
0
0.194951
2,139
95
79
22.515789
0.877468
0.215989
0
0.542373
0
0
0
0
0
0
0
0
0
1
0.305085
false
0.237288
0.067797
0.050847
0.440678
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
0
0
0
5
c71243ec0c84bf1d8efee1ca4ef5ed16a7edb391
48
py
Python
ichnaea/api/__init__.py
crankycoder/ichnaea
fb54000e92c605843b7a41521e36fd648c11ae94
[ "Apache-2.0" ]
348
2015-01-13T11:48:07.000Z
2022-03-31T08:33:07.000Z
ichnaea/api/__init__.py
crankycoder/ichnaea
fb54000e92c605843b7a41521e36fd648c11ae94
[ "Apache-2.0" ]
1,274
2015-01-02T18:15:56.000Z
2022-03-23T15:29:08.000Z
ichnaea/api/__init__.py
crankycoder/ichnaea
fb54000e92c605843b7a41521e36fd648c11ae94
[ "Apache-2.0" ]
149
2015-01-04T21:15:07.000Z
2021-12-10T06:05:09.000Z
""" Implementation of the public HTTP APIs. """
12
39
0.6875
6
48
5.5
1
0
0
0
0
0
0
0
0
0
0
0
0.166667
48
3
40
16
0.825
0.8125
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
5
c74297b1b291bcb8655cb660b6c0294ac4af5045
1,085
py
Python
tradgram/caseFiles/migrations/0005_auto_20181201_2249.py
didils/tradgram
4868ca082ab78a1b5b96f25ee9f958567bd1bb1e
[ "MIT" ]
null
null
null
tradgram/caseFiles/migrations/0005_auto_20181201_2249.py
didils/tradgram
4868ca082ab78a1b5b96f25ee9f958567bd1bb1e
[ "MIT" ]
4
2020-06-05T20:21:46.000Z
2021-09-08T00:57:42.000Z
tradgram/caseFiles/migrations/0005_auto_20181201_2249.py
didils/tradgram
4868ca082ab78a1b5b96f25ee9f958567bd1bb1e
[ "MIT" ]
null
null
null
# Generated by Django 2.0.8 on 2018-12-01 13:49 from django.db import migrations import imagekit.models.fields class Migration(migrations.Migration): dependencies = [ ('caseFiles', '0004_auto_20181201_2245'), ] operations = [ migrations.AlterField( model_name='casefile', name='file_page2', field=imagekit.models.fields.ProcessedImageField(blank=True, null=True, upload_to=''), ), migrations.AlterField( model_name='casefile', name='file_page3', field=imagekit.models.fields.ProcessedImageField(blank=True, null=True, upload_to=''), ), migrations.AlterField( model_name='casefile', name='file_page4', field=imagekit.models.fields.ProcessedImageField(blank=True, null=True, upload_to=''), ), migrations.AlterField( model_name='casefile', name='file_page5', field=imagekit.models.fields.ProcessedImageField(blank=True, null=True, upload_to=''), ), ]
31
98
0.61106
108
1,085
6
0.407407
0.108025
0.154321
0.179012
0.703704
0.703704
0.703704
0.634259
0.634259
0.634259
0
0.044081
0.268203
1,085
34
99
31.911765
0.77204
0.041475
0
0.571429
1
0
0.100193
0.022158
0
0
0
0
0
1
0
false
0
0.071429
0
0.178571
0
0
0
0
null
0
0
1
0
1
1
0
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
c75e2966f162b5819898a0791a3c766143525134
153
py
Python
venv/Lib/site-packages/pandas/tests/extension/json/__init__.py
arnoyu-hub/COMP0016miemie
59af664dcf190eab4f93cefb8471908717415fea
[ "MIT" ]
null
null
null
venv/Lib/site-packages/pandas/tests/extension/json/__init__.py
arnoyu-hub/COMP0016miemie
59af664dcf190eab4f93cefb8471908717415fea
[ "MIT" ]
null
null
null
venv/Lib/site-packages/pandas/tests/extension/json/__init__.py
arnoyu-hub/COMP0016miemie
59af664dcf190eab4f93cefb8471908717415fea
[ "MIT" ]
null
null
null
from pandas.tests.extension.json.array import ( JSONArray, JSONDtype, make_data, ) __all__ = ["JSONArray", "JSONDtype", "make_data"]
19.125
50
0.653595
16
153
5.875
0.75
0.382979
0.468085
0.553191
0
0
0
0
0
0
0
0
0.215686
153
7
51
21.857143
0.783333
0
0
0
0
0
0.184932
0
0
0
0
0
0
1
0
false
0
0.166667
0
0.166667
0
1
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
c77969bc14eede6b58977cf63f6b26b25b4c2123
4,077
py
Python
z2/part3/updated_part2_batch/jm/parser_errors_2/414514510.py
kozakusek/ipp-2020-testy
09aa008fa53d159672cc7cbf969a6b237e15a7b8
[ "MIT" ]
1
2020-04-16T12:13:47.000Z
2020-04-16T12:13:47.000Z
z2/part3/updated_part2_batch/jm/parser_errors_2/414514510.py
kozakusek/ipp-2020-testy
09aa008fa53d159672cc7cbf969a6b237e15a7b8
[ "MIT" ]
18
2020-03-06T17:50:15.000Z
2020-05-19T14:58:30.000Z
z2/part3/updated_part2_batch/jm/parser_errors_2/414514510.py
kozakusek/ipp-2020-testy
09aa008fa53d159672cc7cbf969a6b237e15a7b8
[ "MIT" ]
18
2020-03-06T17:45:13.000Z
2020-06-09T19:18:31.000Z
from part1 import ( gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new, ) """ scenario: test_random_actions uuid: 414514510 """ """ random actions, total chaos """ board = gamma_new(5, 5, 4, 6) assert board is not None assert gamma_move(board, 1, 0, 0) == 1 assert gamma_golden_possible(board, 1) == 0 assert gamma_move(board, 2, 2, 0) == 1 assert gamma_golden_move(board, 2, 0, 0) == 1 assert gamma_move(board, 4, 3, 3) == 1 assert gamma_move(board, 1, 0, 3) == 1 assert gamma_move(board, 1, 0, 1) == 1 assert gamma_move(board, 3, 4, 0) == 1 assert gamma_move(board, 3, 1, 1) == 1 assert gamma_move(board, 4, 1, 4) == 1 assert gamma_move(board, 1, 2, 0) == 0 assert gamma_move(board, 1, 3, 2) == 1 assert gamma_free_fields(board, 1) == 16 assert gamma_move(board, 2, 2, 2) == 1 board832229873 = gamma_board(board) assert board832229873 is not None assert board832229873 == (".4...\n" "1..4.\n" "..21.\n" "13...\n" "2.2.3\n") del board832229873 board832229873 = None assert gamma_move(board, 3, 0, 4) == 1 assert gamma_move(board, 4, 4, 2) == 1 assert gamma_move(board, 1, 2, 1) == 1 assert gamma_move(board, 1, 4, 1) == 1 assert gamma_move(board, 2, 3, 4) == 1 assert gamma_move(board, 4, 0, 3) == 0 assert gamma_move(board, 4, 0, 3) == 0 assert gamma_move(board, 1, 2, 1) == 0 assert gamma_move(board, 1, 1, 2) == 1 assert gamma_move(board, 2, 4, 3) == 1 assert gamma_move(board, 3, 4, 4) == 1 assert gamma_move(board, 3, 0, 3) == 0 assert gamma_golden_move(board, 3, 2, 1) == 1 assert gamma_move(board, 4, 3, 1) == 1 assert gamma_move(board, 1, 0, 0) == 0 assert gamma_move(board, 1, 2, 1) == 0 assert gamma_move(board, 2, 0, 1) == 0 assert gamma_move(board, 2, 0, 4) == 0 assert gamma_move(board, 3, 4, 4) == 0 assert gamma_move(board, 4, 0, 1) == 0 assert gamma_move(board, 1, 2, 0) == 0 assert gamma_move(board, 2, 2, 0) == 0 assert gamma_move(board, 2, 2, 0) == 0 assert gamma_move(board, 3, 2, 3) == 1 assert gamma_free_fields(board, 3) == 5 assert gamma_move(board, 1, 1, 4) == 0 assert gamma_golden_possible(board, 1) == 1 assert gamma_move(board, 2, 0, 2) == 1 assert gamma_move(board, 3, 4, 1) == 0 assert gamma_move(board, 4, 0, 3) == 0 assert gamma_move(board, 4, 0, 0) == 0 board915398393 = gamma_board(board) assert board915398393 is not None assert board915398393 == ("34.23\n" "1.342\n" "21214\n" "13341\n" "2.2.3\n") del board915398393 board915398393 = None assert gamma_move(board, 1, 1, 4) == 0 assert gamma_move(board, 2, 3, 1) == 0 board836323401 = gamma_board(board) assert board836323401 is not None assert board836323401 == ("34.23\n" "1.342\n" "21214\n" "13341\n" "2.2.3\n") del board836323401 board836323401 = None assert gamma_move(board, 3, 3, 1) == 0 assert gamma_move(board, 4, 4, 0) == 0 assert gamma_move(board, 1, 4, 4) == 0 assert gamma_move(board, 1, 1, 4) == 0 assert gamma_busy_fields(board, 1) == 5 assert gamma_move(board, 2, 3, 1) == 0 assert gamma_move(board, 3, 3, 2) == 0 assert gamma_move(board, 4, 3, 1) == 0 assert gamma_move(board, 1, 3, 1) == 0 assert gamma_move(board, 1, 2, 2) == 0 assert gamma_golden_move(board, 1, 3, 2) == 0 assert gamma_move(board, 2, 0, 3) == 0 assert gamma_move(board, 2, 3, 0) == 1 assert gamma_move(board, 3, 0, 1) == 0 assert gamma_move(board, 3, 0, 1) == 0 assert gamma_golden_move(board, 3, 4, 3) == 0 board926808166 = gamma_board(board) assert board926808166 is not None assert board926808166 == ("34.23\n" "1.342\n" "21214\n" "13341\n" "2.223\n") del board926808166 board926808166 = None assert gamma_move(board, 4, 4, 2) == 0 assert gamma_move(board, 1, 0, 1) == 0 assert gamma_move(board, 2, 1, 2) == 0 assert gamma_move(board, 2, 2, 2) == 0 assert gamma_move(board, 3, 0, 1) == 0 assert gamma_move(board, 4, 0, 1) == 0 assert gamma_move(board, 4, 0, 4) == 0 assert gamma_move(board, 1, 3, 1) == 0 assert gamma_move(board, 2, 0, 0) == 0 assert gamma_free_fields(board, 2) == 2 gamma_delete(board)
28.51049
46
0.659554
744
4,077
3.475806
0.063172
0.310518
0.365429
0.487239
0.728538
0.702243
0.613302
0.354602
0.29041
0.289637
0
0.16442
0.181015
4,077
142
47
28.711268
0.610063
0
0
0.270492
0
0
0.035097
0
0
0
0
0
0.672131
1
0
false
0
0.008197
0
0.008197
0
0
0
0
null
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
0
0
0
0
0
0
0
0
5
c7a86e7da0f40d6ce5e8c453fff0e849bb0930d2
390
py
Python
modules/tests/test_flights.py
joemulray/ostorybook-jarvis
c08502b843691207f68270e621032fe4299350d7
[ "MIT" ]
3
2018-01-25T18:00:50.000Z
2019-02-20T03:22:07.000Z
modules/tests/test_flights.py
joemulray/ostorybook-jarvis
c08502b843691207f68270e621032fe4299350d7
[ "MIT" ]
null
null
null
modules/tests/test_flights.py
joemulray/ostorybook-jarvis
c08502b843691207f68270e621032fe4299350d7
[ "MIT" ]
null
null
null
import modules def test_flights(): assert ('flights' == modules.process_query('Find me cheap flights')[0]) assert ('flights' == modules.process_query('Find me a flight home')[0]) assert ('flights' == modules.process_query('Can you find me a flight from <Location> to <Location>')[0]) assert ('flights' != modules.process_query('Flights to <Location> from <Location>')[0])
43.333333
108
0.689744
53
390
4.981132
0.377358
0.19697
0.30303
0.409091
0.541667
0.541667
0.287879
0
0
0
0
0.012048
0.148718
390
8
109
48.75
0.783133
0
0
0
0
0
0.412821
0
0
0
0
0
0.666667
1
0.166667
true
0
0.166667
0
0.333333
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
0
1
0
0
0
0
0
0
5
c7b1d2a8a949a049702260e062c2cb35261f1aea
223
py
Python
tseries_patterns/ml/hmm/__init__.py
tr8dr/patterns
757a0b9d4936a0c6af633af6f16c0ca8ee676bb0
[ "MIT" ]
127
2020-07-12T21:48:20.000Z
2022-03-27T21:12:26.000Z
tseries_patterns/ml/hmm/__init__.py
kumprj/tseries-patterns
99c5279d1a06e4ab0fe92f2a04102d09ae6300c7
[ "MIT" ]
11
2020-08-08T05:17:16.000Z
2022-02-23T13:29:23.000Z
tseries_patterns/ml/hmm/__init__.py
kumprj/tseries-patterns
99c5279d1a06e4ab0fe92f2a04102d09ae6300c7
[ "MIT" ]
46
2020-07-22T20:50:55.000Z
2021-12-16T00:57:50.000Z
from .HMM import HMM from .GaussianHMM import GaussianHMM from .HMM2State import HMM2State from .HMM3State import HMM3State from .HMMExponential2State import HMMExponential2State from .WalkforwardHMM import WalkforwardHMM
27.875
54
0.860987
24
223
8
0.333333
0
0
0
0
0
0
0
0
0
0
0.030303
0.112108
223
7
55
31.857143
0.939394
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
c7b5c597d2ae731abfddeac6a7dceafceccda0bb
174
py
Python
PythonStarter/7_3_2.py
fsbd1285228/PythonCodes
e71fcd695e19e3d286ec249113791d0729bec751
[ "MIT" ]
null
null
null
PythonStarter/7_3_2.py
fsbd1285228/PythonCodes
e71fcd695e19e3d286ec249113791d0729bec751
[ "MIT" ]
null
null
null
PythonStarter/7_3_2.py
fsbd1285228/PythonCodes
e71fcd695e19e3d286ec249113791d0729bec751
[ "MIT" ]
null
null
null
'''Exercist 7.2''' import numpy x = numpy.arange(6.0) y = x**2 z = x**3 print 'The first array is :\n', x print 'The second array is :\n', y print 'The third array is :\n', z
21.75
34
0.62069
37
174
2.918919
0.540541
0.222222
0.222222
0
0
0
0
0
0
0
0
0.042553
0.189655
174
8
35
21.75
0.723404
0
0
0
0
0
0.426752
0
0
0
0
0
0
0
null
null
0
0.142857
null
null
0.428571
0
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
1
0
5
1bf5569e276bfb15df60c3ce1cf095739060828e
32
py
Python
Python/30-infinite-loops.py
strawman2511/Learning
21ee7bdad376060503fdc0a739fed2d7bd40f9b9
[ "MIT" ]
1
2022-03-16T23:25:54.000Z
2022-03-16T23:25:54.000Z
Python/30-infinite-loops.py
strawman2511/Learning
21ee7bdad376060503fdc0a739fed2d7bd40f9b9
[ "MIT" ]
null
null
null
Python/30-infinite-loops.py
strawman2511/Learning
21ee7bdad376060503fdc0a739fed2d7bd40f9b9
[ "MIT" ]
null
null
null
while True: print("Python")
10.666667
19
0.625
4
32
5
1
0
0
0
0
0
0
0
0
0
0
0
0.21875
32
2
20
16
0.8
0
0
0
0
0
0.1875
0
0
0
0
0
0
1
0
true
0
0
0
0
0.5
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
5
400534810b56739810dc2d7dacb25b14769fccdc
593
py
Python
ECC_main/platform/platformBase.py
dongh9508/ECC-main
904110b70ba3e459d92c6d21a5ad1693b4ee726a
[ "MIT" ]
2
2019-01-23T00:04:18.000Z
2019-02-01T10:09:15.000Z
ECC_main/platform/platformBase.py
dongh9508/ECC-main
904110b70ba3e459d92c6d21a5ad1693b4ee726a
[ "MIT" ]
26
2018-07-11T07:59:46.000Z
2021-02-08T20:21:46.000Z
ECC_main/platform/platformBase.py
dongh9508/ECC-main
904110b70ba3e459d92c6d21a5ad1693b4ee726a
[ "MIT" ]
2
2017-10-01T15:55:55.000Z
2017-10-01T18:27:11.000Z
from abc import ABCMeta, abstractmethod class PlatformBase(metaclass=ABCMeta): @abstractmethod def slash_command(request, func): pass @abstractmethod def lazy_slash_command(request, func): pass @abstractmethod def platform(): pass @abstractmethod def _get_chat_id(json_body): pass @abstractmethod def _get_user_id(json_body): pass @abstractmethod def _get_user_name(json_body): pass @abstractmethod def _get_json_list(request_body): pass
19.766667
42
0.617201
60
593
5.783333
0.4
0.342939
0.363112
0.276657
0.564842
0.564842
0.472622
0.21902
0
0
0
0
0.323777
593
30
43
19.766667
0.865337
0
0
0.608696
0
0
0
0
0
0
0
0
0
1
0.304348
false
0.304348
0.043478
0
0.391304
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
0
0
0
5
400be34b47ce85ef5674a1307f43fda532124949
340
py
Python
builder/views.py
jtrussell/kf-cube-builder
8d90ddc76e6b052b98ff91273b1891a2b394a6be
[ "MIT" ]
null
null
null
builder/views.py
jtrussell/kf-cube-builder
8d90ddc76e6b052b98ff91273b1891a2b394a6be
[ "MIT" ]
null
null
null
builder/views.py
jtrussell/kf-cube-builder
8d90ddc76e6b052b98ff91273b1891a2b394a6be
[ "MIT" ]
null
null
null
from django.shortcuts import render def index(request): return render(request, 'builder/form_page.html', {}) def results(request): return render(request, 'builder/results_page.html', {}) def error(request): return render(request, 'builder/error_page.html', { 'errorMessage': 'That username does\'t exist!' })
20
59
0.685294
41
340
5.609756
0.512195
0.169565
0.247826
0.33913
0.430435
0
0
0
0
0
0
0
0.173529
340
16
60
21.25
0.818505
0
0
0
0
0
0.297935
0.20649
0
0
0
0
0
1
0.333333
false
0
0.111111
0.333333
0.777778
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
5
403a4698f1216bbbc111b2c149244cfaa3f342e8
45
py
Python
guillotina/contrib/redis/exceptions.py
rboixaderg/guillotina
fcae65c2185222272f3b8fee4bc2754e81e0e983
[ "BSD-2-Clause" ]
173
2017-03-10T18:26:12.000Z
2022-03-03T06:48:56.000Z
guillotina/contrib/redis/exceptions.py
rboixaderg/guillotina
fcae65c2185222272f3b8fee4bc2754e81e0e983
[ "BSD-2-Clause" ]
921
2017-03-08T14:04:43.000Z
2022-03-30T10:28:56.000Z
guillotina/contrib/redis/exceptions.py
rboixaderg/guillotina
fcae65c2185222272f3b8fee4bc2754e81e0e983
[ "BSD-2-Clause" ]
60
2017-03-16T19:59:44.000Z
2022-03-03T06:48:59.000Z
class NoRedisConfigured(Exception): pass
15
35
0.777778
4
45
8.75
1
0
0
0
0
0
0
0
0
0
0
0
0.155556
45
2
36
22.5
0.921053
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0.5
0
0
0.5
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
0
0
0
0
0
5
403fb7ba9f31f7045891c065163e860b9240edf3
174
py
Python
dask/bytes/__init__.py
TryTestspace/dask
86d4f7d8c6d48ec6c4b1de1b6cfd2d3f4e5a4c1b
[ "BSD-3-Clause" ]
1
2021-06-06T10:39:50.000Z
2021-06-06T10:39:50.000Z
dask/bytes/__init__.py
TryTestspace/dask
86d4f7d8c6d48ec6c4b1de1b6cfd2d3f4e5a4c1b
[ "BSD-3-Clause" ]
1
2021-04-30T20:41:53.000Z
2021-04-30T20:41:53.000Z
dask/bytes/__init__.py
TryTestspace/dask
86d4f7d8c6d48ec6c4b1de1b6cfd2d3f4e5a4c1b
[ "BSD-3-Clause" ]
1
2021-03-28T04:50:43.000Z
2021-03-28T04:50:43.000Z
from __future__ import print_function, division, absolute_import from ..utils import ignoring from .core import read_bytes, open_files, open_text_files from . import local
24.857143
64
0.827586
25
174
5.36
0.64
0
0
0
0
0
0
0
0
0
0
0
0.126437
174
6
65
29
0.881579
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0.25
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
4065fd9b9086cfb7da2c0f0f68d30accbaf13030
66
py
Python
appz.py
joyfulflyer/billboard-reader
4cd2a6346954e886c7aaa352ffa396d8c9ff0648
[ "MIT" ]
null
null
null
appz.py
joyfulflyer/billboard-reader
4cd2a6346954e886c7aaa352ffa396d8c9ff0648
[ "MIT" ]
3
2021-06-01T23:42:24.000Z
2021-08-09T17:19:36.000Z
appz.py
joyfulflyer/billboard-reader
4cd2a6346954e886c7aaa352ffa396d8c9ff0648
[ "MIT" ]
null
null
null
from . import reader from reader import getSongsFlaskApp as app
22
43
0.80303
9
66
5.888889
0.666667
0
0
0
0
0
0
0
0
0
0
0
0.181818
66
2
44
33
0.981481
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
407065abc520a4ed8aef6525e26c88cb0be626c7
20,864
py
Python
src/fmthdl.py
jeppeter/py-obcode
c2cdcb198b3cbc19f87dafd2a63e05901c11fcd9
[ "BSD-Source-Code" ]
null
null
null
src/fmthdl.py
jeppeter/py-obcode
c2cdcb198b3cbc19f87dafd2a63e05901c11fcd9
[ "BSD-Source-Code" ]
null
null
null
src/fmthdl.py
jeppeter/py-obcode
c2cdcb198b3cbc19f87dafd2a63e05901c11fcd9
[ "BSD-Source-Code" ]
null
null
null
#! /usr/bin/env python import random import json import logging import re import sys import os ##importdebugstart sys.path.append(os.path.abspath(os.path.dirname(__file__))) from strparser import * ##importdebugend ##extractcode_start class Utf8Encode(object): def __dict_utf8(self,val): newdict =dict() for k in val.keys(): newk = self.__encode_utf8(k) newv = self.__encode_utf8(val[k]) newdict[newk] = newv return newdict def __list_utf8(self,val): newlist = [] for k in val: newk = self.__encode_utf8(k) newlist.append(newk) return newlist def __encode_utf8(self,val): retval = val if sys.version[0]=='2' and isinstance(val,unicode): retval = val.encode('utf8') elif isinstance(val,dict): retval = self.__dict_utf8(val) elif isinstance(val,list): retval = self.__list_utf8(val) return retval def __init__(self,val): self.__val = self.__encode_utf8(val) return def __str__(self): return self.__val def __repr__(self): return self.__val def get_val(self): return self.__val def format_line(l, tab=0): retstr = '' retstr += format_tabs(tab) retstr += '%s\n'%(l) return retstr def format_comment_line(l): s = '' idx = 0 while idx < len(l): if l[idx] == '*': s += '\\*' elif l[idx] == '/': s += '\\/' else: s += l[idx] idx += 1 return s def format_debug_line(l,tab=0,debug=0): rets = '' if debug >= 3: rets += format_line('/* %s */'%(format_comment_line(l)), tab) return rets def format_xor_encode_function(nameprefix='prefix',namelen=10,tabs=0,debug=0): funcstr = '' funcname = '%s_%s'%(nameprefix,get_random_name(namelen)) if debug >= 3: funcstr += format_line('/*********************************************',tabs) funcstr += format_line('* to make xor encoded functions', tabs) funcstr += format_line('* it will be simple ,may be it will give more complex', tabs) funcstr += format_line('*********************************************/',tabs) funcstr += format_debug_line('variable:namelen %d variable:prefix [%s]'%(namelen,nameprefix), tabs,debug) funcstr += format_line('int %s(unsigned char* pbuf,int size,unsigned char* pxorcode, int xorsize)'%(funcname),tabs) funcstr += format_line('{',tabs) funcstr += format_line('int i,curi;',tabs+1) funcstr += format_line('',tabs) funcstr += format_line('for (i=0;i<size;i++){', tabs + 1) funcstr += format_line('curi = (i % xorsize);',tabs + 2) funcstr += format_line('pbuf[i] = (unsigned char)(pbuf[i] ^ pxorcode[curi]);', tabs + 2) funcstr += format_line('}', tabs + 1) funcstr += format_line('', tabs) funcstr += format_line('return size;',tabs + 1) funcstr += format_line('}',tabs) return funcstr,funcname def format_xor_decode_function(nameprefix='prefix_',namelen=10, tabs=0, debug=0): funcstr = '' funcname = '%s_%s'%(nameprefix,get_random_name(namelen)) if debug >= 3: funcstr += format_line('/*********************************************',tabs) funcstr += format_line('* to make xor decoded functions', tabs) funcstr += format_line('* it will be simple ,may be it will give more complex', tabs) funcstr += format_line('*********************************************/',tabs) funcstr += format_debug_line('variable:namelen %d variable:prefix [%s]'%(namelen,nameprefix), tabs,debug) funcstr += format_line('int %s(unsigned char* pbuf,int size,unsigned char* pxorcode, int xorsize)'%(funcname),tabs) funcstr += format_line('{',tabs) funcstr += format_line('int i,curi;',tabs+1) funcstr += format_line('',tabs) funcstr += format_line('for (i=0;i<size;i++){', tabs + 1) funcstr += format_line('curi = (i % xorsize);',tabs + 2) funcstr += format_line('pbuf[i] = (unsigned char)(pbuf[i] ^ pxorcode[curi]);', tabs + 2) funcstr += format_line('}', tabs + 1) funcstr += format_line('', tabs) funcstr += format_line('return size;',tabs + 1) funcstr += format_line('}',tabs) return funcstr,funcname def get_xor_code(cnum=16): xorcode = [] for i in range(cnum): xorcode.append(random.randint(0,255)) return xorcode def format_key_ctr_function(xorcode,nameprefix='prefix', namelen=10, numturns=30, tabs=0,debug=0): funcstr = '' funcname = '%s_%s'%(nameprefix,get_random_name(namelen)) presentxor = [] funcstr += format_line('int %s(unsigned char* pbuf,int size)'%(funcname),tabs) funcstr += format_line('{',tabs) codestr = '' for i in range(len(xorcode)): if i > 0: codestr += ',' codestr += '0x%02x'%(xorcode[i]) funcstr += format_debug_line('keys %s size %d'%(codestr,len(xorcode)), tabs + 1 , debug) funcstr += format_line('',tabs) for i in range(len(xorcode)): if (i%5) == 0: funcstr += format_line('',tabs) curnum = random.randint(0, 255) funcstr += format_line('if ( %d < size) {'%(i), tabs + 1) funcstr += format_line('pbuf[%d] = %d;'%(i,curnum), tabs + 2) funcstr += format_line('}',tabs + 1) presentxor.append(curnum) funcstr += format_line('',tabs) funcstr += format_debug_line('variable:numturns %d'%(numturns), tabs + 1, debug) for i in range(numturns): if (i%5) == 0 and i > 0: funcstr += format_line('',tabs) curi = random.randint(0, len(xorcode)-1) curj = random.randint(0, len(xorcode)-1) funcstr += format_line('if (%d < size && %d < size){'%(curi,curj), tabs + 1) funcstr += format_debug_line('%d = %d ^ %d'%((presentxor[curi] ^ presentxor[curj]) & 0xff, presentxor[curi],presentxor[curj]), tabs + 2,debug) presentxor[curi] = (presentxor[curi] ^ presentxor[curj]) & 0xff funcstr += format_line('pbuf[%d] = (unsigned char)(pbuf[%d] ^ pbuf[%d]);'%(curi, curi, curj),tabs + 2) funcstr += format_line('}', tabs + 1) for i in range(len(xorcode)): if (i%3) == 0: funcstr += format_line('', tabs) curi = random.randint(0, len(xorcode)-1) curv = presentxor[i] ^ presentxor[curi] curv = xorcode[i] ^ curv funcstr += format_line('if (%d < size){'%(curi), tabs + 1) funcstr += format_debug_line('%d = %d ^ %d'%((presentxor[curi] ^ presentxor[curj]) & 0xff, presentxor[curi],presentxor[curj]), tabs + 2,debug) presentxor[i] = (presentxor[i] ^ presentxor[curi]) & 0xff funcstr += format_debug_line('%d = %d ^ %d'%((presentxor[i] ^ curv) & 0xff, presentxor[curi],curv), tabs + 2,debug) presentxor[i] = (presentxor[i] ^ curv) & 0xff assert(presentxor[i] == xorcode[i]) funcstr += format_line('pbuf[%d] = (unsigned char)(pbuf[%d] ^ pbuf[%d]);'%(i ,i,curi), tabs+2) funcstr += format_line('pbuf[%d] = (unsigned char)(pbuf[%d] ^ %d);'%(i,i,curv), tabs+2) funcstr += format_line('}', tabs + 1) funcstr += format_line('',tabs) funcstr += format_line('return %d < size ? %d : size;'%(len(xorcode),len(xorcode)), tabs+1) funcstr += format_line('}',tabs) return funcstr,funcname def format_key_dtr_function(xorcode,nameprefix='prefix', namelen=10, numturns=30, tabs=0,debug=0): funcstr = '' funcname = '%s_%s'%(nameprefix,get_random_name(namelen)) funcstr += format_line('void %s(unsigned char* pbuf,int size)'%(funcname),tabs) funcstr += format_line('{',tabs) funcstr += format_debug_line('variable:nameprefix %s variable:namelen %d variable:numturns %d'%(nameprefix,namelen,numturns), tabs+1,debug) funcstr += format_line('',tabs) storecode = [] for x in xorcode: storecode.append(x) for i in range(numturns): if (i%5) == 0: funcstr += format_line('',tabs) curi = random.randint(0, len(xorcode)-1) curj = random.randint(0, len(xorcode)-1) funcstr += format_line('if (%d < size && %d < size){'%(curi,curj), tabs+1) funcstr += format_debug_line('%d = %d ^ %d'%((storecode[curi] ^ storecode[curj]),storecode[curi],storecode[curj]), tabs + 2, debug) funcstr += format_line('pbuf[%d] = (unsigned char)(pbuf[%d] ^ pbuf[%d]);'%(curi, curi, curj),tabs + 2) funcstr += format_line('}',tabs + 1) funcstr += format_line('',tabs) funcstr += format_line('return;',tabs + 1) funcstr += format_line('}',tabs) return funcstr,funcname def format_printf_func(l,tabs): #return format_line(l,tabs) return '' def format_bytes_set_function(sbyte,nameprefix='prefix', namelen=10, numturns=30, tabs=0,debug=0,line=None): funcname = '%s_%s'%(nameprefix,get_random_name(namelen)) funcstr = '' funcstr += format_line('int %s(unsigned char* pbuf, int size)'%(funcname),tabs) funcstr += format_line('{', tabs) funcstr += format_printf_func('int i;', tabs + 1) if line is not None: funcstr += format_debug_line('first at line [%d]'%(line), tabs + 1 , debug) funcstr += format_debug_line('variable:nameprefix %s variable:namelen %d variable:numturns %d length(%d)'%(nameprefix,namelen,numturns,len(sbyte)), tabs + 1, debug) ss = '' for c in sbyte: if len(ss) > 0: ss += ',0x%02x'%(c) else: ss += 'sbyte [0x%02x'%(c) ss += ']' funcstr += format_debug_line('%s'%(ss), tabs + 1, debug) funcstr += format_printf_func('for (i=0;i<size;i++){', tabs + 1) funcstr += format_printf_func('printf("[%d]=[%d:0x%x]\\n",i,pbuf[i],pbuf[i]);', tabs + 2) funcstr += format_printf_func('}', tabs + 1) clbits = [] leastmask = [] for i in range(len(sbyte)): clbits.append(random.randint(0,255)) leastmask.append(0) ss = '' for c in clbits: if len(ss) > 0: ss += ',0x%x:%d'%(c,c) else: ss += 'clbits [0x%x:%d'%(c,c) ss += ']' funcstr += format_printf_func('/* %s */'%(ss), tabs+1) for i in range(len(sbyte)): curnum = clear_bit(sbyte[i], clbits[i]) funcstr += format_line('',tabs+1) funcstr += format_debug_line('pbuf[%d] & 0x%x ?=> 0x%x'%(i,curnum,sbyte[i]), tabs + 1, debug) funcstr += format_line('if (size > %d){'%(i), tabs + 1) funcstr += format_printf_func('printf("[%d][%%d:0x%%x] & [%%d:0x%%x] = [%%d:0x%%x] [target:0x%x:%d]\\n",pbuf[%d], pbuf[%d], %d,%d, (pbuf[%d] & %d), (pbuf[%d] & %d));'%(i,sbyte[i],sbyte[i],i,i,curnum,curnum,i,curnum,i,curnum), tabs+2) funcstr += format_line('pbuf[%d] = (unsigned char)(pbuf[%d] & 0x%x);'%(i,i,curnum), tabs + 2) funcstr += format_line('}',tabs + 1) # we need not to set the number directly ,but just used for i in range(numturns): cidx = random.randint(0, len(sbyte) - 1) cbit = random.randint(0, 255) if random.randint(0,1) == 1: cnum = clear_bit(sbyte[cidx], cbit) leastmask[cidx] = leastmask[cidx] | cnum funcstr += format_line('', tabs + 1) funcstr += format_line('if ( size > %d){'%(cidx), tabs + 1) funcstr += format_printf_func('printf("||[%d][%%d:0x%%x] | [%%d:0x%%x] = [%%d:0x%%x] [target:0x%x:%d]\\n",pbuf[%d],pbuf[%d],%d,%d,(pbuf[%d] | %d), (pbuf[%d] | %d));'%(cidx,sbyte[cidx],sbyte[cidx],cidx,cidx,cnum,cnum,cidx,cnum,cidx,cnum), tabs + 2) funcstr += format_line('pbuf[%d] = (unsigned char)(pbuf[%d] | 0x%x);'%(cidx,cidx, cnum), tabs + 2) funcstr += format_line('}', tabs + 1) else: cnum = expand_bit(sbyte[cidx], cbit) funcstr += format_line('', tabs + 1) funcstr += format_line('if ( size > %d){'%(cidx), tabs + 1) funcstr += format_printf_func('printf("&&[%d][%%d:0x%%x] & [%%d:0x%%x] = [%%d:0x%%x] [target:0x%x:%d]\\n",pbuf[%d],pbuf[%d],%d,%d,(pbuf[%d] & %d), (pbuf[%d] & %d));'%(cidx,sbyte[cidx],sbyte[cidx],cidx,cidx,cnum,cnum,cidx,cnum,cidx,cnum), tabs + 2) funcstr += format_line('pbuf[%d] = (unsigned char)(pbuf[%d] & 0x%x);'%(cidx,cidx, cnum), tabs + 2) funcstr += format_line('}', tabs + 1) # now we should filled the number for i in range(len(sbyte)): cnum = sbyte[i] & (~leastmask[i]) if cnum > 0: funcstr += format_line('',tabs+1) funcstr += format_debug_line('pbuf[%d] | 0x%x ?=> 0x%x'%(i,cnum, sbyte[i]), tabs + 1, debug) funcstr += format_line('if (size > %d){'%(i), tabs + 1) funcstr += format_printf_func('printf("[%d] [%%d:0x%%x] | [%%d:0x%%x] = [%%d:0x%%x] [target:0x%x:%d]\\n", pbuf[%d], pbuf[%d], %d ,%d , (pbuf[%d] | %d), (pbuf[%d] | %d));'%(i,sbyte[i],sbyte[i],i,i,cnum,cnum,i,cnum,i,cnum), tabs + 2) funcstr += format_line('pbuf[%d] = (unsigned char)(pbuf[%d] | 0x%x);'%(i,i,cnum), tabs + 2) funcstr += format_line('}',tabs + 1) funcstr += format_line('', tabs + 1) funcstr += format_line('return size;', tabs + 1) funcstr += format_line('}', tabs) return funcstr,funcname def format_bytes_xor_function(sbyte,abyte,abytefunc,bbyte,bbytefunc,nameprefix='prefix', namelen=10, numturns=30, tabs=0,debug=0, line=None): assert(len(sbyte) == len(abyte)) assert(len(abyte) == len(bbyte)) funcname = '%s_%s'%(nameprefix,get_random_name(namelen)) bname = '%s_%s'%(nameprefix, get_random_name(namelen)) retname = '%s_%s'%(nameprefix, get_random_name(namelen)) funcstr = '' funcstr += format_line('int %s(unsigned char* pbuf, int size)'%(funcname),tabs) funcstr += format_line('{', tabs) if line is not None: funcstr += format_debug_line('first at line [%d]'%(line),tabs + 1, debug) funcstr += format_debug_line('variable:nameprefix %s variable:namelen %d variable:numturns %d length(%d)'%(nameprefix,namelen,numturns,len(sbyte)), tabs + 1, debug) ss = '' for c in sbyte: if len(ss) > 0: ss += ',0x%x'%(c) else: ss += 'sbyte [0x%x'%(c) if len(ss) > 0: ss += ']' funcstr += format_debug_line('%s'%(ss), tabs + 1, debug) if len(sbyte) >= 4 and sbyte[-1] == 0 and sbyte[-2] == 0 and sbyte[-3] == 0 and sbyte[-4] == 0: funcstr += format_debug_line('var wstring:[%s]'%(quote_string(uni32_to_string(sbyte))), tabs + 1, debug) elif len(sbyte) >= 2 and sbyte[-1] == 0 and sbyte[-2] == 0: funcstr += format_debug_line('var wstring:[%s]'%(quote_string(uni16_to_string(sbyte))), tabs + 1, debug) else: funcstr += format_debug_line('var string:[%s]'%(quote_string(ints_to_string(sbyte))), tabs + 1, debug) funcstr += format_line('unsigned char %s[%d];'%(bname, len(sbyte)), tabs + 1) funcstr += format_line('int ret;', tabs + 1) funcstr += format_line('', tabs + 1) funcstr += format_line('ret = %s(pbuf,size);'%(abytefunc), tabs + 1) funcstr += format_line('if ( ret < 0) {' , tabs + 1) funcstr += format_line('return ret;', tabs + 2) funcstr += format_line('}', tabs + 1) funcstr += format_line('', tabs + 1) funcstr += format_line('ret = %s(%s,%d);'%(bbytefunc, bname, len(sbyte)), tabs + 1) funcstr += format_line('if ( ret < 0) {' , tabs + 1) funcstr += format_line('return ret;', tabs + 2) funcstr += format_line('}', tabs + 1) # now to give the value for i in range(numturns): cidx = random.randint(0, len(sbyte) - 1) didx = random.randint(0, len(sbyte) - 1) hdl = random.randint(0, 5) funcstr += format_line('', tabs + 1) if hdl == 0: # to make abyte[cidx] = abyte[cidx] & bbyte[didx] funcstr += format_debug_line('abyte[%d] = abyte[%d] & bbyte[%d]'%(cidx,cidx,didx), tabs + 1, debug) funcstr += format_debug_line('0x%x & 0x%x = 0x%0x'%(abyte[cidx],bbyte[didx], (abyte[cidx] & bbyte[didx])), tabs + 1, debug) funcstr += format_line('if ( %d < size && %d < size ) {'%(cidx, didx), tabs + 1 ) funcstr += format_line('pbuf[%d] = (unsigned char)(pbuf[%d] & %s[%d]);'%(cidx, cidx, bname, didx), tabs + 2) funcstr += format_line('}', tabs + 1) abyte[cidx] = abyte[cidx] & bbyte[didx] elif hdl == 1: # to make abyte[cidx] = abyte[cidx] | bbyte[didx] funcstr += format_debug_line('abyte[%d] = abyte[%d] | bbyte[%d]'%(cidx,cidx,didx), tabs + 1, debug) funcstr += format_debug_line('0x%x | 0x%x = 0x%0x'%(abyte[cidx],bbyte[didx], (abyte[cidx] | bbyte[didx])), tabs + 1, debug) funcstr += format_line('if ( %d < size && %d < size ) {'%(cidx, didx), tabs + 1 ) funcstr += format_line('pbuf[%d] = (unsigned char)(pbuf[%d] | %s[%d]);'%(cidx, cidx, bname, didx), tabs + 2) funcstr += format_line('}', tabs + 1) abyte[cidx] = abyte[cidx] | bbyte[didx] elif hdl == 2: # to make bbyte[didx] = abyte[cidx] & bbyte[didx] funcstr += format_debug_line('bbyte[%d] = abyte[%d] & bbyte[%d]'%(didx,cidx,didx), tabs + 1, debug) funcstr += format_debug_line('0x%x & 0x%x = 0x%0x'%(abyte[cidx],bbyte[didx], (abyte[cidx] & bbyte[didx])), tabs + 1, debug) funcstr += format_line('if ( %d < size && %d < size ) {'%(cidx, didx), tabs + 1 ) funcstr += format_line('%s[%d] = (unsigned char)(pbuf[%d] & %s[%d]);'%(bname,didx, cidx, bname, didx), tabs + 2) funcstr += format_line('}', tabs + 1) bbyte[didx] = abyte[cidx] & bbyte[didx] elif hdl == 3: # to make bbyte[didx] = abyte[cidx] | bbyte[didx] funcstr += format_debug_line('bbyte[%d] = abyte[%d] | bbyte[%d]'%(didx,cidx,didx), tabs + 1, debug) funcstr += format_debug_line('0x%x & 0x%x = 0x%0x'%(abyte[cidx],bbyte[didx], (abyte[cidx] | bbyte[didx])), tabs + 1, debug) funcstr += format_line('if ( %d < size && %d < size ) {'%(cidx, didx), tabs + 1 ) funcstr += format_line('%s[%d] = (unsigned char)(pbuf[%d] | %s[%d]);'%(bname,didx, cidx, bname, didx), tabs + 2) funcstr += format_line('}', tabs + 1) bbyte[didx] = abyte[cidx] | bbyte[didx] elif hdl == 4: # to make abyte[cidx] = abyte[cidx] ^ bbyte[didx] funcstr += format_debug_line('abyte[%d] = abyte[%d] ^ bbyte[%d]'%(cidx,cidx,didx), tabs + 1, debug) funcstr += format_debug_line('0x%x ^ 0x%x = 0x%0x'%(abyte[cidx],bbyte[didx], (abyte[cidx] ^ bbyte[didx])), tabs + 1, debug) funcstr += format_line('if ( %d < size && %d < size ) {'%(cidx, didx), tabs + 1 ) funcstr += format_line('pbuf[%d] = (unsigned char)(pbuf[%d] ^ %s[%d]);'%(cidx, cidx, bname, didx), tabs + 2) funcstr += format_line('}', tabs + 1) abyte[cidx] = abyte[cidx] ^ bbyte[didx] elif hdl == 5: # to make bbyte[didx] = abyte[cidx] ^ bbyte[didx] funcstr += format_debug_line('bbyte[%d] = abyte[%d] ^ bbyte[%d]'%(didx,cidx,didx), tabs + 1, debug) funcstr += format_debug_line('0x%x ^ 0x%x = 0x%0x'%(abyte[cidx],bbyte[didx], (abyte[cidx] ^ bbyte[didx])), tabs + 1, debug) funcstr += format_line('if ( %d < size && %d < size ) {'%(cidx, didx), tabs + 1 ) funcstr += format_line('%s[%d] = (unsigned char)(pbuf[%d] ^ %s[%d]);'%(bname,didx, cidx, bname, didx), tabs + 2) funcstr += format_line('}', tabs + 1) bbyte[didx] = abyte[cidx] ^ bbyte[didx] else: raise Exception('unexpected random valud [%d]'%(hdl)) for i in range(len(sbyte)): hdl = random.randint(0, 1) funcstr += format_line('',tabs + 1) if hdl == 0: cnum = sbyte[i] ^ abyte[i] funcstr += format_debug_line('pbuf[%d] = (abyte[%d])0x%x ^ 0x%x'%(i,i,abyte[i], cnum),tabs + 1, debug) funcstr += format_line('if (%d < size){'%(i), tabs + 1) funcstr += format_line('pbuf[%d] = (unsigned char)(pbuf[%d] ^ 0x%x);'%(i,i,cnum),tabs + 2) funcstr += format_line('}',tabs + 1) elif hdl == 1: cnum = sbyte[i] ^ bbyte[i] funcstr += format_debug_line('pbuf[%d] = (bbyte[%d])0x%x ^ 0x%x'%(i,i,bbyte[i], cnum),tabs + 1, debug) funcstr += format_line('if (%d < size){'%(i), tabs + 1) funcstr += format_line('pbuf[%d] = (unsigned char)(%s[%d] ^ 0x%x);'%(i,bname,i,cnum),tabs + 2) funcstr += format_line('}',tabs + 1) else: raise Exception('unexpected random valud [%d]'%(hdl)) funcstr += format_line('',tabs + 1) funcstr += format_line('return size;', tabs + 1) funcstr += format_line('}',tabs) return funcstr, funcname ##extractcode_end
48.976526
259
0.562883
2,844
20,864
4.012307
0.065049
0.190255
0.184734
0.108579
0.798791
0.773639
0.756113
0.733941
0.698799
0.67952
0
0.021349
0.234423
20,864
426
260
48.976526
0.693045
0.024204
0
0.509537
0
0.013624
0.192763
0.022713
0
0
0.00118
0
0.008174
1
0.049046
false
0
0.019074
0.010899
0.119891
0.027248
0
0
0
null
0
1
0
0
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
407134bc2f10896590c4ce1fc32aa475b238114c
196
py
Python
customer/admin.py
lautarianoo/django_shop
9bc575df8b7af5452bd15cc3cf4fb375be6384bd
[ "MIT" ]
null
null
null
customer/admin.py
lautarianoo/django_shop
9bc575df8b7af5452bd15cc3cf4fb375be6384bd
[ "MIT" ]
null
null
null
customer/admin.py
lautarianoo/django_shop
9bc575df8b7af5452bd15cc3cf4fb375be6384bd
[ "MIT" ]
null
null
null
from django.contrib import admin from .models import Customer, CompanySeller, ShippingAddress admin.site.register(Customer) admin.site.register(CompanySeller) admin.site.register(ShippingAddress)
32.666667
60
0.852041
23
196
7.26087
0.478261
0.161677
0.305389
0
0
0
0
0
0
0
0
0
0.066327
196
6
61
32.666667
0.912568
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.4
0
0.4
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
407a9244e727197497bf01ebcc8c36a1738cb867
2,613
py
Python
test/test_chain.py
micetti/TweetsOnBlocks
e9355a0d35c0bfe1755d80fc64ae2d781c762e30
[ "MIT" ]
null
null
null
test/test_chain.py
micetti/TweetsOnBlocks
e9355a0d35c0bfe1755d80fc64ae2d781c762e30
[ "MIT" ]
null
null
null
test/test_chain.py
micetti/TweetsOnBlocks
e9355a0d35c0bfe1755d80fc64ae2d781c762e30
[ "MIT" ]
null
null
null
from blockchain.chain import Chain from blockchain.block import Block class TestChain: def test_chain_constructor(self): test_chain = Chain() assert len(test_chain.chain) == 1 assert type(test_chain.get_latest_block()) == Block assert test_chain.get_latest_block().data == 'first Block' assert test_chain.get_latest_block().index == 0 def test_add_new_block(self): test_chain = Chain() data = 'New Block' test_chain.add_new_block(data) assert len(test_chain.chain) == 2 assert type(test_chain.get_latest_block()) == Block assert test_chain.get_previous_block().data == 'first Block' assert test_chain.get_latest_block().data == 'New Block' assert test_chain.get_previous_block().index == 0 assert test_chain.get_latest_block().index == 1 def test_all_blocks_are_valid(self): test_chain = Chain() test_chain.add_new_block('Block one') test_chain.add_new_block('Block two') valid = test_chain.all_blocks_valid() assert valid is True def test_changed_index_or_deleted_block_detected(self): test_chain = Chain() test_chain.add_new_block('Block one') test_chain.add_new_block('Block two') del test_chain.chain[1] valid = test_chain.all_blocks_valid() assert valid is False def test_changed_previous_hash_detected(self): test_chain = Chain() test_chain.add_new_block('Block one') test_chain.add_new_block('Block two') test_chain.chain[1].previous_hash = '76af6354wronghash' valid = test_chain.all_blocks_valid() assert valid is False def test_changed_hash_detected(self): test_chain = Chain() test_chain.add_new_block('Block one') test_chain.add_new_block('Block two') test_chain.chain[1].hash = '76af6354wronghash' valid = test_chain.all_blocks_valid() assert valid is False def test_changed_time_stamp_detected(self): test_chain = Chain() test_chain.add_new_block('Block one') test_chain.add_new_block('Block two') test_chain.chain[1].time_stamp = '2017-08-14T06:16:23.225877' valid = test_chain.all_blocks_valid() assert valid is False def test_changed_data_detected(self): test_chain = Chain() test_chain.add_new_block('Block one') test_chain.add_new_block('Block two') test_chain.chain[1].data = 'Block 42' valid = test_chain.all_blocks_valid() assert valid is False
31.107143
69
0.667432
358
2,613
4.51676
0.139665
0.239332
0.12987
0.120594
0.796537
0.75572
0.75572
0.704391
0.698207
0.672851
0
0.022144
0.239571
2,613
83
70
31.481928
0.791646
0
0
0.55
0
0
0.082664
0.00995
0
0
0
0
0.266667
1
0.133333
false
0
0.033333
0
0.183333
0
0
0
0
null
1
0
0
0
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
40841b864aeda8b827c43907fdf948d724c837f9
4,135
py
Python
tests/darwin/cli_functions_test.py
sachasamama/darwin-py
d263132fa3526e0a5ddf4e5cf0450b1ba9d40ef6
[ "MIT" ]
28
2019-10-23T09:05:45.000Z
2022-03-07T16:28:36.000Z
tests/darwin/cli_functions_test.py
sachasamama/darwin-py
d263132fa3526e0a5ddf4e5cf0450b1ba9d40ef6
[ "MIT" ]
91
2019-09-20T10:31:25.000Z
2022-03-30T15:27:17.000Z
tests/darwin/cli_functions_test.py
sachasamama/darwin-py
d263132fa3526e0a5ddf4e5cf0450b1ba9d40ef6
[ "MIT" ]
11
2020-04-02T08:31:37.000Z
2021-12-20T22:57:38.000Z
from unittest.mock import call, patch import pytest import responses from rich.console import Console from tests.fixtures import * from darwin.cli_functions import upload_data from darwin.client import Client from darwin.config import Config from darwin.dataset import RemoteDataset def describe_upload_data(): @pytest.fixture def remote_dataset(dataset_slug: str, local_config_file: Config): client = Client(local_config_file) return RemoteDataset(client=client, team="v7", name="TEST_DATASET", slug=dataset_slug, dataset_id=1) @pytest.fixture def request_upload_endpoint(team_slug: str, dataset_slug: str): return f"http://localhost/api/teams/{team_slug}/datasets/{dataset_slug}/data" @pytest.mark.usefixtures("file_read_write_test") @responses.activate def default_non_verbose( team_slug: str, dataset_slug: str, remote_dataset: RemoteDataset, request_upload_endpoint: str ): request_upload_response = { "blocked_items": [ {"dataset_item_id": 1, "filename": "test_1.jpg", "path": "/", "reason": "ALREADY_EXISTS"}, {"dataset_item_id": 2, "filename": "test_2.jpg", "path": "/", "reason": "UNKNOWN_TAGS"}, ], "items": [{"dataset_item_id": 3, "filename": "test_3.jpg", "path": "/"}], } responses.add(responses.PUT, request_upload_endpoint, json=request_upload_response, status=200) with patch.object(Client, "get_remote_dataset", return_value=remote_dataset) as get_remote_dataset_mock: with patch.object(Console, "print", return_value=None) as print_mock: upload_data( f"{team_slug}/{dataset_slug}", ["test_1.jpg", "test_2.jpg", "test_3.jpg"], [], 0, None, None, False, False, ) get_remote_dataset_mock.assert_called_once() assert call("Skipped 1 files already in the dataset.\n", style="warning") in print_mock.call_args_list assert ( call("2 files couldn't be uploaded because an error occurred.\n", style="error") in print_mock.call_args_list ) assert call('Re-run with "--verbose" for further details') in print_mock.call_args_list @pytest.mark.usefixtures("file_read_write_test") @responses.activate def with_verbose_flag( team_slug: str, dataset_slug: str, remote_dataset: RemoteDataset, request_upload_endpoint: str ): request_upload_response = { "blocked_items": [ {"dataset_item_id": 1, "filename": "test_1.jpg", "path": "/", "reason": "ALREADY_EXISTS"}, {"dataset_item_id": 2, "filename": "test_2.jpg", "path": "/", "reason": "UNKNOWN_TAGS"}, ], "items": [{"dataset_item_id": 3, "filename": "test_3.jpg", "path": "/"}], } responses.add(responses.PUT, request_upload_endpoint, json=request_upload_response, status=200) with patch.object(Client, "get_remote_dataset", return_value=remote_dataset) as get_remote_dataset_mock: with patch.object(Console, "print", return_value=None) as print_mock: upload_data( f"{team_slug}/{dataset_slug}", ["test_1.jpg", "test_2.jpg", "test_3.jpg"], [], 0, None, None, False, True, ) get_remote_dataset_mock.assert_called_once() assert call("Skipped 1 files already in the dataset.\n", style="warning") in print_mock.call_args_list assert ( call("2 files couldn't be uploaded because an error occurred.\n", style="error") in print_mock.call_args_list ) assert call('Re-run with "--verbose" for further details') not in print_mock.call_args_list
43.526316
118
0.592019
476
4,135
4.863445
0.226891
0.061771
0.033693
0.038877
0.754644
0.754644
0.723974
0.723974
0.723974
0.723974
0
0.010985
0.295526
4,135
94
119
43.989362
0.783728
0
0
0.679012
0
0
0.2237
0.012576
0
0
0
0
0.098765
1
0.061728
false
0
0.111111
0.012346
0.197531
0.098765
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
40a1da6680f2e46695cf02669bdfb2d50798566a
861
py
Python
tests/test_run.py
0just0/ibench
72b5c42d202b71ba11a09e1f3323186cc6aa5275
[ "MIT" ]
null
null
null
tests/test_run.py
0just0/ibench
72b5c42d202b71ba11a09e1f3323186cc6aa5275
[ "MIT" ]
null
null
null
tests/test_run.py
0just0/ibench
72b5c42d202b71ba11a09e1f3323186cc6aa5275
[ "MIT" ]
1
2021-11-24T04:24:00.000Z
2021-11-24T04:24:00.000Z
# Copyright (C) 2016-2018 Intel Corporation # # SPDX-License-Identifier: MIT import subprocess def test_run_plugin(): subprocess.check_call('IBENCH_PLUGINS="os sys" python -m ibench run -b cholesky --size test --file foo', shell=True) def test_run(): subprocess.check_call('python -m ibench run', shell=True) def test_run_simple(): subprocess.check_call('python -m ibench run -b cholesky --size test --file foo', shell=True) def test_run_sizes(): subprocess.check_call('python -m ibench run -b fft --size test --file foo', shell=True) subprocess.check_call('python -m ibench run -b fft --size small --file foo', shell=True) subprocess.check_call('python -m ibench run -b fft --size large --file foo', shell=True) def test_run_groups(): subprocess.check_call('python -m ibench run -b linalg --size test --file foo', shell=True)
37.434783
120
0.718931
134
861
4.492537
0.283582
0.174419
0.22093
0.186047
0.734219
0.702658
0.656146
0.554817
0.463455
0.463455
0
0.010929
0.149826
861
22
121
39.136364
0.811475
0.081301
0
0
0
0
0.456163
0
0
0
0
0
0
1
0.384615
true
0
0.076923
0
0.461538
0
0
0
0
null
0
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
0
0
0
0
0
0
5
40ce4507c346b824c9ee80f1b42ad09283580db7
1,010
py
Python
tests/conftest.py
lykius/hesiod
091ba1b06cfa870133415fc1df6efdd8e50a2cfe
[ "MIT" ]
19
2020-12-11T15:40:55.000Z
2022-01-17T16:55:13.000Z
tests/conftest.py
lykius/hesiod
091ba1b06cfa870133415fc1df6efdd8e50a2cfe
[ "MIT" ]
null
null
null
tests/conftest.py
lykius/hesiod
091ba1b06cfa870133415fc1df6efdd8e50a2cfe
[ "MIT" ]
null
null
null
from pathlib import Path import pytest @pytest.fixture def cwd() -> Path: return Path(".").absolute() @pytest.fixture def base_cfg_dir(cwd: Path) -> Path: return cwd / "tests/configs/bases" @pytest.fixture def simple_run_file(cwd: Path) -> Path: return cwd / "tests/configs/runs/simple.yaml" @pytest.fixture def complex_run_file(cwd: Path) -> Path: return cwd / "tests/configs/runs/complex.yaml" @pytest.fixture def wrong_run_file(cwd: Path) -> Path: return cwd / "tests/configs/runs/wrong.yaml" @pytest.fixture def no_run_name_run_file(cwd: Path) -> Path: return cwd / "tests/configs/runs/no_run_name.yaml" @pytest.fixture def cifar100_cfg_file(cwd: Path) -> Path: return cwd / "tests/configs/bases/dataset/cifar/cifar100.yaml" @pytest.fixture def complex_template_file(cwd: Path) -> Path: return cwd / "tests/configs/templates/complex.yaml" @pytest.fixture def simple_template_file(cwd: Path) -> Path: return cwd / "tests/configs/templates/simple.yaml"
20.612245
66
0.719802
146
1,010
4.842466
0.205479
0.165488
0.203678
0.192362
0.656294
0.503536
0.503536
0.503536
0.393211
0.393211
0
0.006961
0.146535
1,010
48
67
21.041667
0.813225
0
0
0.310345
0
0
0.260396
0.240594
0
0
0
0
0
1
0.310345
false
0
0.068966
0.310345
0.689655
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
5
40d0d27b368d55bff7390319356bc22470a657e8
21
py
Python
hello.py
snidu001/rest-api
75aa4368546de306fa3ac32ff437b29adff6ff02
[ "MIT" ]
null
null
null
hello.py
snidu001/rest-api
75aa4368546de306fa3ac32ff437b29adff6ff02
[ "MIT" ]
7
2019-12-05T00:00:31.000Z
2022-02-10T10:19:20.000Z
hello.py
snidu001/rest-api
75aa4368546de306fa3ac32ff437b29adff6ff02
[ "MIT" ]
null
null
null
print("How are you")
10.5
20
0.666667
4
21
3.5
1
0
0
0
0
0
0
0
0
0
0
0
0.142857
21
1
21
21
0.777778
0
0
0
0
0
0.52381
0
0
0
0
0
0
1
0
true
0
0
0
0
1
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
5
906b42c5ddda3e5fe8e1a3f8127a54abc949a901
1,390
py
Python
test/test_perpetual_snake_draft.py
pahwaranger/draft
154f929293bd357db02702aa4a4939c91e9f79b7
[ "MIT" ]
null
null
null
test/test_perpetual_snake_draft.py
pahwaranger/draft
154f929293bd357db02702aa4a4939c91e9f79b7
[ "MIT" ]
null
null
null
test/test_perpetual_snake_draft.py
pahwaranger/draft
154f929293bd357db02702aa4a4939c91e9f79b7
[ "MIT" ]
null
null
null
import pytest from draft import perpetual_snake_draft class TestPerpetualSnakeDraft: @pytest.mark.parametrize("df", ["draft_zero.csv"], indirect=["df"]) def test_draft_zero(self, df): fairness, assignment = perpetual_snake_draft.get_fairest_attempt(perpetual_snake_draft.permute_draft_order(df)) assert fairness == 0 @pytest.mark.parametrize("df", ["draft_one.csv"], indirect=["df"]) def test_draft_one(self, df): fairness, assignment = perpetual_snake_draft.get_fairest_attempt(perpetual_snake_draft.permute_draft_order(df)) assert fairness == 0 @pytest.mark.parametrize("df", ["draft_two.csv"], indirect=["df"]) def test_draft_two(self, df): fairness, assignment = perpetual_snake_draft.get_fairest_attempt(perpetual_snake_draft.permute_draft_order(df)) assert fairness == 4.242640687119285 @pytest.mark.parametrize("df", ["draft_three.csv"], indirect=["df"]) def test_draft_three(self, df): fairness, assignment = perpetual_snake_draft.get_fairest_attempt(perpetual_snake_draft.permute_draft_order(df)) assert fairness == 3.0 @pytest.mark.parametrize("df", ["draft_four.csv"], indirect=["df"]) def test_draft_four(self, df): fairness, assignment = perpetual_snake_draft.get_fairest_attempt(perpetual_snake_draft.permute_draft_order(df)) assert fairness == 2.0
44.83871
119
0.730935
178
1,390
5.38764
0.191011
0.160584
0.217935
0.119917
0.863399
0.805005
0.644421
0.644421
0.644421
0.644421
0
0.018565
0.147482
1,390
30
120
46.333333
0.790717
0
0
0.304348
0
0
0.064029
0
0
0
0
0
0.217391
1
0.217391
false
0
0.086957
0
0.347826
0
0
0
0
null
0
1
0
1
1
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
5
909287dde4c76ddaed6de62368ba5e0bba248ba0
443
py
Python
ParallelPyMetaMap/main/__init__.py
biomedicalinformaticsgroup/ParallelPyMetaMap
7d78cc026d914de81803a794f833223f1dab2df9
[ "MIT" ]
null
null
null
ParallelPyMetaMap/main/__init__.py
biomedicalinformaticsgroup/ParallelPyMetaMap
7d78cc026d914de81803a794f833223f1dab2df9
[ "MIT" ]
null
null
null
ParallelPyMetaMap/main/__init__.py
biomedicalinformaticsgroup/ParallelPyMetaMap
7d78cc026d914de81803a794f833223f1dab2df9
[ "MIT" ]
null
null
null
from ParallelPyMetaMap.main.removeNonAscii import removeNonAscii from ParallelPyMetaMap.main.concept2dict import concept2dict from ParallelPyMetaMap.main.annotation_func import annotation_func from ParallelPyMetaMap.main.parralelism_metamap import ppmm from ParallelPyMetaMap.main.output_files import output_files from ParallelPyMetaMap.main.df_semantictypes import df_semantictypes from ParallelPyMetaMap.main.df_semgroups import df_semgroups
63.285714
68
0.907449
51
443
7.705882
0.313725
0.374046
0.445293
0.137405
0
0
0
0
0
0
0
0.004808
0.060948
443
7
69
63.285714
0.939904
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
90b603285f517e4d861f06c6f0fafbac78c2b669
263
py
Python
src/trunk/apps/templates/initd.py
yannikbehr/seiscomp3
ebb44c77092555eef7786493d00ac4efc679055f
[ "Naumen", "Condor-1.1", "MS-PL" ]
null
null
null
src/trunk/apps/templates/initd.py
yannikbehr/seiscomp3
ebb44c77092555eef7786493d00ac4efc679055f
[ "Naumen", "Condor-1.1", "MS-PL" ]
null
null
null
src/trunk/apps/templates/initd.py
yannikbehr/seiscomp3
ebb44c77092555eef7786493d00ac4efc679055f
[ "Naumen", "Condor-1.1", "MS-PL" ]
1
2021-09-15T08:13:27.000Z
2021-09-15T08:13:27.000Z
import seiscomp3.Kernel class Module(seiscomp3.Kernel.Module): def __init__(self, env): seiscomp3.Kernel.Module.__init__(self, env, env.moduleName(__file__)) def supportsAliases(self): # The default handler does not support aliases return True
23.909091
73
0.752852
33
263
5.636364
0.636364
0.241935
0.225806
0
0
0
0
0
0
0
0
0.013514
0.155894
263
10
74
26.3
0.824324
0.1673
0
0
0
0
0
0
0
0
0
0
0
1
0.333333
false
0
0.166667
0.166667
0.833333
0
0
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
5
90b7a303b3f9624b507786a45a64cf6613f1a985
2,627
py
Python
test/test_data/test_datasets.py
ZJCV/Non-local
fbd8d4377c7526f482081d7268eac0df474196f9
[ "Apache-2.0" ]
13
2020-11-07T08:53:49.000Z
2021-12-29T02:48:20.000Z
test/test_data/test_datasets.py
ZJCV/Non-local
fbd8d4377c7526f482081d7268eac0df474196f9
[ "Apache-2.0" ]
3
2021-05-17T10:19:23.000Z
2021-07-22T05:57:17.000Z
test/test_data/test_datasets.py
ZJCV/Non-local
fbd8d4377c7526f482081d7268eac0df474196f9
[ "Apache-2.0" ]
2
2021-06-15T02:39:50.000Z
2021-09-30T05:03:29.000Z
# -*- coding: utf-8 -*- """ @date: 2020/10/9 下午8:34 @file: test_datasets.py @author: zj @description: """ from tsn.config import cfg from tsn.data.datasets.build import build_dataset from tsn.data.transforms.build import build_transform def test_ucf101_rgb(): cfg.merge_from_file('configs/tsn_r50_ucf101_rgb_raw_seg_1x1x3.yaml') cfg.DATASETS.NUM_CLIPS = 8 transform = build_transform(cfg, is_train=True) dataset = build_dataset(cfg, transform=transform, is_train=True) image, target = dataset.__getitem__(20) print(image.shape) print(target) assert image.shape == (3, 8, 224, 224) def test_ucf101_rgbdiff(): cfg.merge_from_file('configs/tsn_r50_ucf101_rgbdiff_raw_seg_1x1x3.yaml') transform = build_transform(cfg, is_train=True) dataset = build_dataset(cfg, transform=transform, is_train=True) image, target = dataset.__getitem__(20) print(image.shape) print(target) assert image.shape == (3, 15, 224, 224) def test_hmdb51_rgb(): cfg.merge_from_file('configs/tsn_r50_hmdb51_rgb_raw_seg_1x1x3.yaml') cfg.DATASETS.NUM_CLIPS = 8 transform = build_transform(cfg, is_train=True) dataset = build_dataset(cfg, transform=transform, is_train=True) image, target = dataset.__getitem__(20) print(image.shape) print(target) assert image.shape == (3, 8, 224, 224) def test_hmdb51_rgbdiff(): cfg.merge_from_file('configs/tsn_r50_hmdb51_rgbdiff_raw_seg_1x1x3.yaml') transform = build_transform(cfg, is_train=True) dataset = build_dataset(cfg, transform=transform, is_train=True) image, target = dataset.__getitem__(20) print(image.shape) print(target) assert image.shape == (3, 15, 224, 224) def test_jester_rgb(): cfg.merge_from_file('configs/tsn_r50_jester_rgb_raw_seg_1x1x3.yaml') cfg.DATASETS.NUM_CLIPS = 8 transform = build_transform(cfg, is_train=True) dataset = build_dataset(cfg, transform=transform, is_train=True) image, target = dataset.__getitem__(20) print(image.shape) print(target) assert image.shape == (3, 8, 224, 224) def test_jester_rgbdiff(): cfg.merge_from_file('configs/tsn_r50_jester_rgbdiff_raw_seg_1x1x3.yaml') transform = build_transform(cfg, is_train=True) dataset = build_dataset(cfg, transform=transform, is_train=True) image, target = dataset.__getitem__(20) print(image.shape) print(target) assert image.shape == (3, 15, 224, 224) if __name__ == '__main__': test_ucf101_rgb() test_ucf101_rgbdiff() test_hmdb51_rgb() test_hmdb51_rgbdiff() test_jester_rgb() test_jester_rgbdiff()
27.082474
76
0.722497
373
2,627
4.729223
0.152815
0.047619
0.07483
0.054422
0.834467
0.820862
0.820862
0.820862
0.684807
0.684807
0
0.062329
0.163304
2,627
96
77
27.364583
0.740218
0.036924
0
0.639344
0
0
0.114988
0.111816
0
0
0
0
0.098361
1
0.098361
false
0
0.04918
0
0.147541
0.196721
0
0
0
null
0
0
0
1
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
2906c28e1f1c54e5b5b3178b807f6d37f290b241
58
py
Python
datasets/__init__.py
nota-github/ssd_tf2
e69260cad8595749ca38ee89a0f0bc0619b717e4
[ "MIT" ]
8
2020-12-24T06:21:42.000Z
2022-03-20T01:40:54.000Z
datasets/__init__.py
nota-github/ssd_tf2
e69260cad8595749ca38ee89a0f0bc0619b717e4
[ "MIT" ]
null
null
null
datasets/__init__.py
nota-github/ssd_tf2
e69260cad8595749ca38ee89a0f0bc0619b717e4
[ "MIT" ]
2
2021-09-04T07:30:25.000Z
2022-03-16T12:06:13.000Z
from .voc import VOCDataset from .coco import COCODataset
29
29
0.827586
8
58
6
0.75
0
0
0
0
0
0
0
0
0
0
0
0.137931
58
2
29
29
0.96
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
29226528b940150df9c3afe571d541c5dc154f48
77
py
Python
scripts/dbpedia/dbpedia/basic.py
aitrek/quepy3
977452585bd04765a1e3d30d3d354b73d4a261cf
[ "BSD-3-Clause" ]
1
2022-03-20T06:37:30.000Z
2022-03-20T06:37:30.000Z
scripts/dbpedia/dbpedia/basic.py
aitrek/quepy3
977452585bd04765a1e3d30d3d354b73d4a261cf
[ "BSD-3-Clause" ]
null
null
null
scripts/dbpedia/dbpedia/basic.py
aitrek/quepy3
977452585bd04765a1e3d30d3d354b73d4a261cf
[ "BSD-3-Clause" ]
null
null
null
# coding: utf-8 """ Basic queries for dbpedia quepy. """ from dsl import *
9.625
32
0.649351
11
77
4.545455
1
0
0
0
0
0
0
0
0
0
0
0.016393
0.207792
77
7
33
11
0.803279
0.61039
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
29263158b10efb99a6059069ccac08153f4c8c61
270
py
Python
src/simapi_web/rest_api/admin.py
RichieBrady/SimAPI-Python
9bd07e9fd0b3b4f37c968b061791ef9c1c6d213c
[ "MIT" ]
1
2020-04-08T19:40:10.000Z
2020-04-08T19:40:10.000Z
src/simapi_web/rest_api/admin.py
RichieBrady/SimAPI-Python
9bd07e9fd0b3b4f37c968b061791ef9c1c6d213c
[ "MIT" ]
8
2020-04-12T08:52:21.000Z
2021-09-22T18:34:19.000Z
src/simapi_web/rest_api/admin.py
RichieBrady/SimAPI-Python
9bd07e9fd0b3b4f37c968b061791ef9c1c6d213c
[ "MIT" ]
2
2020-04-08T08:52:15.000Z
2021-04-24T12:44:39.000Z
from django.contrib import admin from . import models # Register your models here. admin.site.register(models.User) admin.site.register(models.FmuModel) admin.site.register(models.Input) admin.site.register(models.Output) admin.site.register(models.ContainerHostNames)
27
46
0.822222
37
270
6
0.405405
0.202703
0.382883
0.518018
0
0
0
0
0
0
0
0
0.07037
270
9
47
30
0.884462
0.096296
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.285714
0
0.285714
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
5
294d8cccd7adc33a8fa4ec68397d0d3ebc19a7c8
271
py
Python
lib/exceptions.py
OneIdentity/safeguard-sessions-plugin-onelogin
ade31df93a8eeadf4269da712b5b65ddc3085621
[ "MIT" ]
null
null
null
lib/exceptions.py
OneIdentity/safeguard-sessions-plugin-onelogin
ade31df93a8eeadf4269da712b5b65ddc3085621
[ "MIT" ]
null
null
null
lib/exceptions.py
OneIdentity/safeguard-sessions-plugin-onelogin
ade31df93a8eeadf4269da712b5b65ddc3085621
[ "MIT" ]
null
null
null
class PluginError(Exception): pass class OneLoginClientError(PluginError): pass class UserNotFound(PluginError): pass class FactorNotFound(PluginError): pass class TimeOutError(PluginError): pass class APIResponseError(PluginError): pass
12.318182
39
0.745387
24
271
8.416667
0.375
0.222772
0.39604
0
0
0
0
0
0
0
0
0
0.188192
271
22
40
12.318182
0.918182
0
0
0.5
0
0
0
0
0
0
0
0
0
1
0
true
0.5
0
0
0.5
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
0
0
0
0
0
5
29816e37d95eafd46ff49906d1280e8bb2e0e368
727
py
Python
recipe_parser/recipes/dr.py
tyler-a-cox/recipe-parsing
fa883f66a39063cf72912527628b082cda455e76
[ "MIT" ]
null
null
null
recipe_parser/recipes/dr.py
tyler-a-cox/recipe-parsing
fa883f66a39063cf72912527628b082cda455e76
[ "MIT" ]
null
null
null
recipe_parser/recipes/dr.py
tyler-a-cox/recipe-parsing
fa883f66a39063cf72912527628b082cda455e76
[ "MIT" ]
null
null
null
from ._schema import DefaultSchema class Dr(DefaultSchema): @classmethod def host(cls): return "dr.dk" def title(self): return self.schema.title() def total_time(self): return self.schema.total_time() def yields(self): return self.schema.yields() def image(self): return self.schema.image() def language(self): meta_language = self.soup.find( "meta", attrs={"name": lambda x: x and x.lower() == "language", "content": True}, ) return meta_language.get("content") def ingredients(self): return self.schema.ingredients() def instructions(self): return self.schema.instructions()
21.382353
85
0.598349
83
727
5.180723
0.39759
0.139535
0.195349
0.27907
0
0
0
0
0
0
0
0
0.280605
727
33
86
22.030303
0.82218
0
0
0
0
0
0.048143
0
0
0
0
0
0
1
0.347826
false
0
0.043478
0.304348
0.782609
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
5
465bbdf377eb91a6d7fa0b9bb6d789d9fdab976a
189
py
Python
O4/__init__.py
ShAlireza/ML-Tries
4516be7a3275c9bdedd7bd258800be384b6b34f0
[ "MIT" ]
null
null
null
O4/__init__.py
ShAlireza/ML-Tries
4516be7a3275c9bdedd7bd258800be384b6b34f0
[ "MIT" ]
null
null
null
O4/__init__.py
ShAlireza/ML-Tries
4516be7a3275c9bdedd7bd258800be384b6b34f0
[ "MIT" ]
null
null
null
from .utils import csv_file, categorical_dataframe, prepare_data from ._23_select_meaningful_features.SBS import SBS __all__ = ('csv_file', 'categorical_dataframe', 'prepare_data', 'SBS')
37.8
70
0.809524
25
189
5.56
0.6
0.100719
0.258993
0.388489
0.546763
0.546763
0
0
0
0
0
0.011628
0.089947
189
4
71
47.25
0.796512
0
0
0
0
0
0.232804
0.111111
0
0
0
0
0
1
0
false
0
0.666667
0
0.666667
0
1
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
5
466c83f0f68fbce139fd0ba939a3a34f6f158644
1,063
py
Python
jterritory/exceptions/method.py
jameysharp/jterritory
b41e53ce04fe63db8a5943d2808e4fa9f9b15b32
[ "Apache-2.0" ]
5
2021-05-14T18:50:11.000Z
2021-05-23T03:08:55.000Z
jterritory/exceptions/method.py
jameysharp/jterritory
b41e53ce04fe63db8a5943d2808e4fa9f9b15b32
[ "Apache-2.0" ]
null
null
null
jterritory/exceptions/method.py
jameysharp/jterritory
b41e53ce04fe63db8a5943d2808e4fa9f9b15b32
[ "Apache-2.0" ]
null
null
null
from __future__ import annotations from typing import Optional from . import MethodError class ServerUnavailable(MethodError): pass class ServerFail(MethodError): description: Optional[str] class ServerPartialFail(MethodError): pass class UnknownMethod(MethodError): pass class InvalidArguments(MethodError): description: Optional[str] class InvalidResultReference(MethodError): pass class Forbidden(MethodError): pass class AccountNotFound(MethodError): pass class AccountNotSupportedByMethod(MethodError): pass class AccountReadOnly(MethodError): pass class RequestTooLarge(MethodError): pass class CannotCalculateChanges(MethodError): pass class StateMismatch(MethodError): pass class FromAccountNotFound(MethodError): pass class FromAccountNotSupportedByMethod(MethodError): pass class AnchorNotFound(MethodError): pass class UnsupportedSort(MethodError): pass class UnsupportedFilter(MethodError): pass class TooManyChanges(MethodError): pass
13.123457
51
0.764817
91
1,063
8.89011
0.32967
0.315204
0.39555
0.081582
0.093943
0
0
0
0
0
0
0
0.174976
1,063
80
52
13.2875
0.922463
0
0
0.463415
0
0
0
0
0
0
0
0
0
1
0
true
0.414634
0.073171
0
0.585366
0
0
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
0
0
1
0
0
5
467a538c28944cd7c022906526287ae5c942a0b7
162
py
Python
chatServer/admin.py
odeke-em/restAssured
72d5c3a3fe9fd090f067a4332f2a1df15370b1bb
[ "MIT" ]
1
2016-11-20T17:54:02.000Z
2016-11-20T17:54:02.000Z
chatServer/admin.py
odeke-em/restAssured
72d5c3a3fe9fd090f067a4332f2a1df15370b1bb
[ "MIT" ]
null
null
null
chatServer/admin.py
odeke-em/restAssured
72d5c3a3fe9fd090f067a4332f2a1df15370b1bb
[ "MIT" ]
null
null
null
from django.contrib import admin from chatServer.models import * admin.site.register(Message) admin.site.register(MessageMarker) admin.site.register(Receipient)
23.142857
34
0.833333
21
162
6.428571
0.571429
0.2
0.377778
0
0
0
0
0
0
0
0
0
0.074074
162
6
35
27
0.9
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.4
0
0.4
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
46acbda3033470eab088040369aa3421cadcdff4
2,452
py
Python
permissions/department.py
CloudCIX/membership
a7a62918c7d7c65dd1bf2068431dbf2ec2573e4b
[ "Apache-2.0" ]
null
null
null
permissions/department.py
CloudCIX/membership
a7a62918c7d7c65dd1bf2068431dbf2ec2573e4b
[ "Apache-2.0" ]
null
null
null
permissions/department.py
CloudCIX/membership
a7a62918c7d7c65dd1bf2068431dbf2ec2573e4b
[ "Apache-2.0" ]
null
null
null
# stdlib from typing import Optional # lib from cloudcix_rest.exceptions import Http403 from rest_framework.request import Request # local from membership.models import Department, User class Permissions: @staticmethod def create(request: Request) -> Optional[Http403]: """ The request to create a new Department record is valid if; - The requesting User's Member is self-managed - The requesting User is an administrator of their Member """ # The requesting User's Member is self-managed if not request.user.member['self_managed']: return Http403(error_code='membership_department_create_201') # The requesting User is an administrator of their Member if not request.user.administrator: return Http403(error_code='membership_department_create_202') return None @staticmethod def update(request: Request) -> Optional[Http403]: """ The request to update a Department record is valid if; - The requesting User's Member is self-managed - The requesting User is an administrator of their Member """ # The requesting User's Member is self-managed if not request.user.member['self_managed']: return Http403(error_code='membership_department_update_201') # The requesting User is an administrator of their Member if not request.user.administrator: return Http403(error_code='membership_department_update_202') return None @staticmethod def delete(request: Request, obj: Department) -> Optional[Http403]: """ The request to delete a Department record is valid if; - The requesting User's Member is self-managed - The requesting User is an administrator of their Member - The specified Department is empty """ # The requesting User's Member is self-managed if not request.user.member['self_managed']: return Http403(error_code='membership_department_delete_201') # The requesting User is an administrator of their Member if not request.user.administrator: return Http403(error_code='membership_department_delete_202') # The specified Department is empty if User.objects.filter(department=obj).exists(): return Http403(error_code='membership_department_delete_203') return None
36.597015
73
0.682708
298
2,452
5.506711
0.184564
0.095064
0.124314
0.093845
0.813528
0.729433
0.729433
0.628275
0.628275
0.628275
0
0.029476
0.252855
2,452
66
74
37.151515
0.866266
0.356444
0
0.428571
0
0
0.18018
0.155232
0
0
0
0
0
1
0.107143
false
0
0.142857
0
0.642857
0
0
0
0
null
0
0
0
1
1
1
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
5
46b5d9659284fc76390dfdb3dc0b54fb96b6d010
206
py
Python
prettyqr/error.py
olorin/prettyqr
6206c32ce6f3a8c91ee5f25b4d71b49d4c0b7cde
[ "MIT" ]
2
2018-05-13T14:42:36.000Z
2021-12-17T15:08:15.000Z
prettyqr/error.py
olorin/prettyqr
6206c32ce6f3a8c91ee5f25b4d71b49d4c0b7cde
[ "MIT" ]
4
2021-03-18T20:25:28.000Z
2022-01-13T00:42:14.000Z
prettyqr/error.py
olorin/prettyqr
6206c32ce6f3a8c91ee5f25b4d71b49d4c0b7cde
[ "MIT" ]
null
null
null
class PrettyException(Exception): "Semantic errors in prettyqr." pass class ImageNotSquareException(PrettyException): pass class BaseImageSmallerThanQRCodeException(PrettyException): pass
20.6
59
0.791262
16
206
10.1875
0.625
0.110429
0
0
0
0
0
0
0
0
0
0
0.150485
206
9
60
22.888889
0.931429
0.135922
0
0.428571
0
0
0.135922
0
0
0
0
0
0
1
0
true
0.428571
0
0
0.428571
0
1
0
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
0
0
0
0
0
5
d3bc2e8c84b574db78e425e12f4437a0fab243f6
164
py
Python
subproc/__init__.py
luxmeter/subproc
bb76d7a2ef63a91189a2b72792d6d5334f20d78f
[ "Apache-2.0" ]
null
null
null
subproc/__init__.py
luxmeter/subproc
bb76d7a2ef63a91189a2b72792d6d5334f20d78f
[ "Apache-2.0" ]
null
null
null
subproc/__init__.py
luxmeter/subproc
bb76d7a2ef63a91189a2b72792d6d5334f20d78f
[ "Apache-2.0" ]
null
null
null
from .subproc import DEVNULL, run, run_cmds, run_cmds_redirected, run_redirected __all__ = ['run', 'run_cmds', 'run_redirected', 'run_cmds_redirected', 'DEVNULL']
41
81
0.768293
22
164
5.181818
0.363636
0.245614
0.175439
0.22807
0
0
0
0
0
0
0
0
0.097561
164
3
82
54.666667
0.77027
0
0
0
0
0
0.310976
0
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0
1
0
0
null
1
0
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
5
31024f429d501c05f80ed0db1ace43bfa32e0114
86
py
Python
novafitness_sds018/__init__.py
FEEprojects/novafitness-sds018
1ea85baea8fb7666a3369586b113f1edd3312172
[ "MIT" ]
2
2019-01-24T10:58:38.000Z
2021-03-16T20:33:11.000Z
novafitness_sds018/__init__.py
FEEprojects/novafitness-sds018
1ea85baea8fb7666a3369586b113f1edd3312172
[ "MIT" ]
null
null
null
novafitness_sds018/__init__.py
FEEprojects/novafitness-sds018
1ea85baea8fb7666a3369586b113f1edd3312172
[ "MIT" ]
null
null
null
from .novafitness_sds018 import NovafitnessReading, Novafitness, NovafitnessException
43
85
0.895349
7
86
10.857143
0.857143
0
0
0
0
0
0
0
0
0
0
0.0375
0.069767
86
1
86
86
0.9125
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
3129d941048ad90a5b940f17927ae9125c7448ce
75
py
Python
states/__init__.py
sd65/ssm-diff
f141212276cbb6865a66949b4acd307f35862d38
[ "MIT" ]
1
2021-11-14T21:14:20.000Z
2021-11-14T21:14:20.000Z
states/__init__.py
sd65/ssm-diff
f141212276cbb6865a66949b4acd307f35862d38
[ "MIT" ]
7
2019-06-24T19:07:18.000Z
2020-07-02T18:46:42.000Z
states/__init__.py
sd65/ssm-diff
f141212276cbb6865a66949b4acd307f35862d38
[ "MIT" ]
4
2019-07-30T14:56:47.000Z
2021-02-01T19:57:29.000Z
from .storage import YAMLFile, ParameterStore from .engine import DiffBase
25
45
0.84
9
75
7
0.777778
0
0
0
0
0
0
0
0
0
0
0
0.12
75
2
46
37.5
0.954545
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
312c0e9577f108b94b325522ea5d16103ac9183b
241
py
Python
grammar/ptl/decl.py
xurxodiz/cardwalker
110e391d71854aaaa6d2b231d6c29f7d4f2d1dfa
[ "MIT" ]
1
2018-07-22T22:52:12.000Z
2018-07-22T22:52:12.000Z
grammar/ptl/decl.py
xurxodiz/cardwalker
110e391d71854aaaa6d2b231d6c29f7d4f2d1dfa
[ "MIT" ]
null
null
null
grammar/ptl/decl.py
xurxodiz/cardwalker
110e391d71854aaaa6d2b231d6c29f7d4f2d1dfa
[ "MIT" ]
null
null
null
from pyparsing import * import act loyaltymod = Forward().setParseAction(act.loyaltymod) ptmod = Forward().setParseAction(act.ptmod) loyaltystart = Forward().setParseAction(act.loyaltystart) ptstart = Forward().setParseAction(act.ptstart)
26.777778
57
0.79668
25
241
7.68
0.4
0.4375
0.5
0
0
0
0
0
0
0
0
0
0.082988
241
9
58
26.777778
0.868778
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.333333
0
0.333333
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
5
3137733a779df89392191c3b812a27d4a8e532c4
267
py
Python
rest_api/src/restapi/mixins.py
supermonkeyparadise/python
ce8799d643dd7af2b1cb5bc9179bcd83be9769d6
[ "MIT" ]
null
null
null
rest_api/src/restapi/mixins.py
supermonkeyparadise/python
ce8799d643dd7af2b1cb5bc9179bcd83be9769d6
[ "MIT" ]
null
null
null
rest_api/src/restapi/mixins.py
supermonkeyparadise/python
ce8799d643dd7af2b1cb5bc9179bcd83be9769d6
[ "MIT" ]
null
null
null
from django.http import JsonResponse class JsonResponseMixin(object): def render_to_json_response(self, context, **response_kwargs): return JsonResponse(self.get_data(context), **response_kwargs) def get_data(self, context): return context
26.7
70
0.745318
32
267
6
0.59375
0.114583
0.21875
0
0
0
0
0
0
0
0
0
0.168539
267
9
71
29.666667
0.864865
0
0
0
0
0
0
0
0
0
0
0
0
1
0.333333
false
0
0.166667
0.333333
1
0
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
5
31467481191e5773e833bc50d4424c8751c803ff
2,922
py
Python
mecache/core.py
AberSheeran/mecache
1e4995bae16320c6d26ad9b0b0200a092a42eea0
[ "MIT" ]
4
2019-04-01T12:11:27.000Z
2019-04-09T11:14:33.000Z
mecache/core.py
AberSheeran/mecache
1e4995bae16320c6d26ad9b0b0200a092a42eea0
[ "MIT" ]
null
null
null
mecache/core.py
AberSheeran/mecache
1e4995bae16320c6d26ad9b0b0200a092a42eea0
[ "MIT" ]
1
2020-07-30T07:37:01.000Z
2020-07-30T07:37:01.000Z
import time import pickle import hmac from hashlib import sha1 from functools import wraps class Aboriginal: @staticmethod def keyf(*args, **kwargs): key = pickle.dumps([args, kwargs]) mac = hmac.new(b'mecache', msg=key, digestmod=sha1) return str(mac.hexdigest()) class BaseCache(Aboriginal): def get_cache(self, func, key, max_time): raise NotImplementedError("You must overwrite 'get_cache'!") def set_cache(self, result, func, key, max_time): raise NotImplementedError("You must overwrite 'set_cache'!") def cache(self, max_time, keyf=None): """ if *args or **kwargs can't be pickle.dumps, use keyf to transform them example: def calc(*args, **kwargs): return str(args)+str(kwargs) c = Cache() @c.cache(max_time=10, keyf=calc) def add(x, y): return x + y """ def timeout(func): @wraps(func) def warpper(*args, **kwargs): if keyf is None: key = self.keyf(*args, **kwargs) else: key = keyf(*args, **kwargs) # get result from cache result = self.get_cache(func, key, max_time) if result is None: # cache invalid result = func(*args, **kwargs) # set new cache self.set_cache(result, func, key, max_time) return result return warpper return timeout class AioBaseCache(Aboriginal): async def get_cache(self, func, key, max_time): raise NotImplementedError("You must overwrite 'get_cache'!") async def set_cache(self, result, func, key, max_time): raise NotImplementedError("You must overwrite 'set_cache'!") def cache(self, max_time, keyf=None): """ if *args or **kwargs can't be pickle.dumps, use keyf to transform them example: def calc(*args, **kwargs): return str(args)+str(kwargs) c = Cache() @c.cache(max_time=10, keyf=calc) async def add(x, y): return await do.something """ def timeout(func): @wraps(func) async def warpper(*args, **kwargs): if keyf is None: key = self.keyf(*args, **kwargs) else: key = keyf(*args, **kwargs) # get result from cache result = await self.get_cache(func, key, max_time) if result is None: # cache invalid result = await func(*args, **kwargs) # set new cache await self.set_cache(result, func, key, max_time) return result return warpper return timeout
29.515152
78
0.52601
333
2,922
4.543544
0.201201
0.079313
0.052875
0.074025
0.797092
0.748182
0.715135
0.715135
0.715135
0.715135
0
0.003286
0.375086
2,922
98
79
29.816327
0.825301
0.207734
0
0.509804
0
0
0.060959
0
0
0
0
0
0
1
0.156863
false
0
0.098039
0
0.45098
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
3165b190e8f17df5618b7a166bf04f02aaa9e80d
121
py
Python
atest/robot/running/enable_ctrl_c_event.py
bhirsz/robotframework
d62ee5091ed932aee8fc12ae5e340a5b19288f05
[ "ECL-2.0", "Apache-2.0" ]
7,073
2015-01-01T17:19:16.000Z
2022-03-31T22:01:29.000Z
atest/robot/running/enable_ctrl_c_event.py
bhirsz/robotframework
d62ee5091ed932aee8fc12ae5e340a5b19288f05
[ "ECL-2.0", "Apache-2.0" ]
2,412
2015-01-02T09:29:05.000Z
2022-03-31T13:10:46.000Z
atest/robot/running/enable_ctrl_c_event.py
bhirsz/robotframework
d62ee5091ed932aee8fc12ae5e340a5b19288f05
[ "ECL-2.0", "Apache-2.0" ]
2,298
2015-01-03T02:47:15.000Z
2022-03-31T02:00:16.000Z
try: from ctypes import windll windll.kernel32.SetConsoleCtrlHandler(None, False) except ImportError: pass
15.125
54
0.743802
13
121
6.923077
0.923077
0
0
0
0
0
0
0
0
0
0
0.020619
0.198347
121
7
55
17.285714
0.907216
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0.2
0.4
0
0.4
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
1
0
0
0
0
5
316727da0660dbed8d5f83dc5317f376c0f05bc1
177
py
Python
depsland/data_struct/__init__.py
Likianta/depsland
695c55acea755bc5ad5da5cf1d2f5990baaa5a7a
[ "MIT" ]
null
null
null
depsland/data_struct/__init__.py
Likianta/depsland
695c55acea755bc5ad5da5cf1d2f5990baaa5a7a
[ "MIT" ]
null
null
null
depsland/data_struct/__init__.py
Likianta/depsland
695c55acea755bc5ad5da5cf1d2f5990baaa5a7a
[ "MIT" ]
null
null
null
from . import special_versions from .packge_info import PackageInfo from .requester import Requester from .requirement import Requirement from .venv_options import VenvOptions
25.285714
37
0.853107
22
177
6.727273
0.545455
0
0
0
0
0
0
0
0
0
0
0
0.118644
177
6
38
29.5
0.948718
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
31976c47657409305368cf5781f97e6e67e84d0a
50
py
Python
fewrel/__init__.py
bloomberg/emnlp21_fewrel
894bee155196f99116e2ff12a01e2a942f861242
[ "MIT" ]
5
2021-11-04T20:55:42.000Z
2022-02-26T08:03:23.000Z
fewrel/__init__.py
bloomberg/emnlp21_fewrel
894bee155196f99116e2ff12a01e2a942f861242
[ "MIT" ]
null
null
null
fewrel/__init__.py
bloomberg/emnlp21_fewrel
894bee155196f99116e2ff12a01e2a942f861242
[ "MIT" ]
4
2021-11-09T05:13:06.000Z
2022-02-26T13:10:37.000Z
from . import models from . import fewshot_re_kit
16.666667
28
0.8
8
50
4.75
0.75
0.526316
0
0
0
0
0
0
0
0
0
0
0.16
50
2
29
25
0.904762
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
31bc2e0bd6364f613e1275bf1be59d62d2c776ba
197
py
Python
cabot/metricsapp/models/__init__.py
Affirm/cabot
c648d50364efe8506cfbb849d38e328afdaa4e37
[ "MIT" ]
12
2016-04-01T18:49:05.000Z
2020-10-21T18:25:23.000Z
cabot/metricsapp/models/__init__.py
Affirm/cabot
c648d50364efe8506cfbb849d38e328afdaa4e37
[ "MIT" ]
125
2016-01-19T23:52:11.000Z
2021-04-08T00:30:36.000Z
cabot/metricsapp/models/__init__.py
Affirm/cabot
c648d50364efe8506cfbb849d38e328afdaa4e37
[ "MIT" ]
8
2015-12-01T14:54:02.000Z
2020-01-13T11:06:40.000Z
from .base import MetricsSourceBase, MetricsStatusCheckBase from .elastic import ElasticsearchSource, ElasticsearchStatusCheck from .grafana import GrafanaInstance, GrafanaDataSource, GrafanaPanel
49.25
69
0.883249
16
197
10.875
0.75
0
0
0
0
0
0
0
0
0
0
0
0.081218
197
3
70
65.666667
0.961326
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
31cbcd52cbda60c239eff04d0ca9972de8f1a938
108
py
Python
xnmt/test/utils.py
mukund-v/xnmt
d9e227b04c8ec54b4bb136e827bcfaf14cd11e3a
[ "Apache-2.0" ]
null
null
null
xnmt/test/utils.py
mukund-v/xnmt
d9e227b04c8ec54b4bb136e827bcfaf14cd11e3a
[ "Apache-2.0" ]
null
null
null
xnmt/test/utils.py
mukund-v/xnmt
d9e227b04c8ec54b4bb136e827bcfaf14cd11e3a
[ "Apache-2.0" ]
null
null
null
def has_cython(): try: from xnmt.cython import xnmt_cython return True except: return False
15.428571
39
0.685185
15
108
4.8
0.733333
0.277778
0
0
0
0
0
0
0
0
0
0
0.259259
108
6
40
18
0.9
0
0
0
0
0
0
0
0
0
0
0
0
1
0.166667
true
0
0.166667
0
0.666667
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
1
0
0
5
9ed4c9481d1f988354d656f11f9d98ee7291a5c8
55
py
Python
src/bookmark/tag/__init__.py
microservice-experiment-flask-0hsn/pocket-ws-flask
e7582a6ebe4b554070f183e43042c87762633085
[ "MIT" ]
null
null
null
src/bookmark/tag/__init__.py
microservice-experiment-flask-0hsn/pocket-ws-flask
e7582a6ebe4b554070f183e43042c87762633085
[ "MIT" ]
null
null
null
src/bookmark/tag/__init__.py
microservice-experiment-flask-0hsn/pocket-ws-flask
e7582a6ebe4b554070f183e43042c87762633085
[ "MIT" ]
null
null
null
from .domain import Tag from .api import blueprint_tag
18.333333
30
0.818182
9
55
4.888889
0.666667
0
0
0
0
0
0
0
0
0
0
0
0.145455
55
2
31
27.5
0.93617
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0.5
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
1
0
5
9ede2c2b1fb03b803184ad0361695515a113541f
126
py
Python
services/controller/src/plz/controller/results/__init__.py
prodo-ai/plz
46d179fca5730b7ed2f236d53d78c42358aed72b
[ "MIT" ]
29
2018-04-14T20:05:41.000Z
2019-04-15T09:02:40.000Z
services/controller/src/plz/controller/results/__init__.py
neomatrix369/plz
12f05a8d071e9c1976c444d34161530ffa73eeae
[ "MIT" ]
23
2018-04-14T23:32:32.000Z
2019-06-07T21:38:58.000Z
services/controller/src/plz/controller/results/__init__.py
neomatrix369/plz
12f05a8d071e9c1976c444d34161530ffa73eeae
[ "MIT" ]
3
2018-09-19T15:08:21.000Z
2019-03-22T12:21:07.000Z
from .local import LocalResultsStorage # noqa: F401 (unused) from .results_base import ResultsStorage # noqa: F401 (unused)
42
63
0.777778
15
126
6.466667
0.666667
0.164948
0.28866
0
0
0
0
0
0
0
0
0.055556
0.142857
126
2
64
63
0.842593
0.309524
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
9ef9ebb50bc1a5ca4c6d6d16bbe5864b7bd76e9c
3,724
py
Python
packages/pyright-internal/src/tests/samples/arrowCallable3.py
PylanceBot/pyright
66204014fef40dc4ff9d4822ba0095f3073d49bf
[ "MIT" ]
1
2022-02-14T04:03:29.000Z
2022-02-14T04:03:29.000Z
packages/pyright-internal/src/tests/samples/arrowCallable3.py
PylanceBot/pyright
66204014fef40dc4ff9d4822ba0095f3073d49bf
[ "MIT" ]
null
null
null
packages/pyright-internal/src/tests/samples/arrowCallable3.py
PylanceBot/pyright
66204014fef40dc4ff9d4822ba0095f3073d49bf
[ "MIT" ]
null
null
null
# This sample tests deeply-nested parentheses, which can cause problems # in the parser for arrow callables. from typing import Any def func(*args: Any): ... def func2(*args: Any) -> str: ... def func3(a: str | None, b: str | None, x: str | None, y: str | None, z1: int, z2: int): func( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( "text " + func2( ( "null" if a is None else a ) + ( "null" if b is None else b ) ) ) + " " ) + ( "null" if x is None else x ) ) + " " ) + ("null" if y is None else y) ) + " " ) + func2( (("null" if ((y is None)) else str(y))) ) ) + " " ) + ("null" if y is None else y) ) + " => " ) + str(z1) ) + " " ) + str(z2) ) + " " ) + str(type(z1)) ) ) x: (((((((((((((() -> None) -> None) -> None) -> None) -> None) -> None) -> None) -> None) -> None) -> None) -> None) -> None) -> None)
44.333333
135
0.104458
115
3,724
3.382609
0.347826
0.246787
0.339332
0.411311
0.269923
0.269923
0.226221
0.133676
0.133676
0.133676
0
0.015126
0.840226
3,724
83
136
44.86747
0.638655
0.027927
0
0.236111
0
0
0.010782
0
0
0
0
0
0
0
null
null
0
0.013889
null
null
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
5
73256e00ddb3876659ba2a8a94e72ff1272f1e51
437
py
Python
ariadne/contrib/__init__.py
MeTaNoV/inception-external-recommender
41d894c05053720d2b37510568d11d36433c3cf9
[ "Apache-2.0" ]
30
2019-12-03T21:41:44.000Z
2022-02-23T16:11:24.000Z
ariadne/contrib/__init__.py
MeTaNoV/inception-external-recommender
41d894c05053720d2b37510568d11d36433c3cf9
[ "Apache-2.0" ]
31
2019-10-22T19:59:09.000Z
2022-02-07T19:41:42.000Z
ariadne/contrib/__init__.py
MeTaNoV/inception-external-recommender
41d894c05053720d2b37510568d11d36433c3cf9
[ "Apache-2.0" ]
11
2019-11-01T06:30:40.000Z
2022-03-21T16:01:07.000Z
from ariadne.contrib.adapters import AdapterSequenceTagger from ariadne.contrib.jieba import JiebaSegmenter from ariadne.contrib.nltk import NltkStemmer from ariadne.contrib.sbert import SbertSentenceClassifier from ariadne.contrib.sklearn import SklearnSentenceClassifier, SklearnMentionDetector from ariadne.contrib.spacy import SpacyNerClassifier, SpacyPosClassifier from ariadne.contrib.stringmatcher import LevenshteinStringMatcher
54.625
85
0.894737
44
437
8.886364
0.454545
0.196931
0.322251
0
0
0
0
0
0
0
0
0
0.06865
437
7
86
62.428571
0.960688
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
7348ddefe8db2334e25e2cabecc2f2cf5ec9ded5
114
py
Python
costar_task_plan/python/costar_task_plan/robotics/perception/__init__.py
cpaxton/costar_plan
be5c12f9d0e9d7078e6a5c283d3be059e7f3d040
[ "Apache-2.0" ]
66
2018-10-31T04:58:53.000Z
2022-03-17T02:32:25.000Z
costar_task_plan/python/costar_task_plan/robotics/perception/__init__.py
cpaxton/costar_plan
be5c12f9d0e9d7078e6a5c283d3be059e7f3d040
[ "Apache-2.0" ]
8
2018-10-23T21:19:25.000Z
2018-12-03T02:08:41.000Z
costar_task_plan/python/costar_task_plan/robotics/perception/__init__.py
cpaxton/costar_plan
be5c12f9d0e9d7078e6a5c283d3be059e7f3d040
[ "Apache-2.0" ]
25
2018-10-19T00:54:17.000Z
2021-10-10T08:28:15.000Z
from .transform_integrator import TransformIntegator from .urdf_to_collision_object import CollisionObjectManager
38
60
0.912281
12
114
8.333333
0.833333
0
0
0
0
0
0
0
0
0
0
0
0.070175
114
2
61
57
0.943396
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
b4383875b089c239ea13a6848e41eed0f0ab8c02
67
py
Python
utils/__init__.py
LividWo/Perturbed-Masking
f57859261a7daa24de6310ad6ea8cce977d52fde
[ "MIT" ]
94
2020-05-24T03:57:08.000Z
2022-03-16T08:47:27.000Z
utils/__init__.py
LividWo/Perturbed-Masking
f57859261a7daa24de6310ad6ea8cce977d52fde
[ "MIT" ]
9
2020-06-22T03:52:20.000Z
2021-09-08T06:53:42.000Z
utils/__init__.py
LividWo/Perturbed-Masking
f57859261a7daa24de6310ad6ea8cce977d52fde
[ "MIT" ]
13
2020-06-14T03:07:39.000Z
2022-02-24T08:56:45.000Z
from .tokens import UToken from .conlludataset import ConllUDataset
33.5
40
0.865672
8
67
7.25
0.625
0
0
0
0
0
0
0
0
0
0
0
0.104478
67
2
40
33.5
0.966667
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
b4391bd6012aa042e2e78de4bd4dc8296c8f4b3e
27
py
Python
keras/train.py
nynicg/graduation
b02cfc083b37c4511fa48be0ebc8528e21d4159b
[ "FTL" ]
null
null
null
keras/train.py
nynicg/graduation
b02cfc083b37c4511fa48be0ebc8528e21d4159b
[ "FTL" ]
null
null
null
keras/train.py
nynicg/graduation
b02cfc083b37c4511fa48be0ebc8528e21d4159b
[ "FTL" ]
null
null
null
import snnn snnn.train()
9
12
0.703704
4
27
4.75
0.75
0
0
0
0
0
0
0
0
0
0
0
0.185185
27
3
13
9
0.863636
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
b441e7c82bb9214f8c4c89ccd5df6b7f168c015a
118
wsgi
Python
flaskapp.wsgi
dcollan/Gator-Grub
e46b81ab93dc5296679ee8bb6a203c2d1a2c69f3
[ "MIT" ]
null
null
null
flaskapp.wsgi
dcollan/Gator-Grub
e46b81ab93dc5296679ee8bb6a203c2d1a2c69f3
[ "MIT" ]
null
null
null
flaskapp.wsgi
dcollan/Gator-Grub
e46b81ab93dc5296679ee8bb6a203c2d1a2c69f3
[ "MIT" ]
null
null
null
import sys sys.path.insert(0, '/var/www/html/csc648-03-sp21-Team02/application') from main import app as application
23.6
69
0.779661
20
118
4.6
0.85
0
0
0
0
0
0
0
0
0
0
0.093458
0.09322
118
4
70
29.5
0.766355
0
0
0
0
0
0.398305
0.398305
0
0
0
0
0
1
0
true
0
0.666667
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
b4608ced0546db35235eabb8a5c7d5b8bc3dad83
89
py
Python
ulidr/__init__.py
RonquilloAeon/ulidr
8406ece22ff3d47c3c360928769cfa85adaec36a
[ "MIT" ]
null
null
null
ulidr/__init__.py
RonquilloAeon/ulidr
8406ece22ff3d47c3c360928769cfa85adaec36a
[ "MIT" ]
null
null
null
ulidr/__init__.py
RonquilloAeon/ulidr
8406ece22ff3d47c3c360928769cfa85adaec36a
[ "MIT" ]
null
null
null
from ._rust_ulid import generate_ulid __version__ = "0.1.0" __all__ = ["generate_ulid"]
17.8
37
0.752809
13
89
4.230769
0.692308
0.436364
0
0
0
0
0
0
0
0
0
0.038462
0.123596
89
4
38
22.25
0.666667
0
0
0
1
0
0.202247
0
0
0
0
0
0
1
0
false
0
0.333333
0
0.333333
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
5
b47379248dad547b3811d27d09660f72e71494ed
192
py
Python
contact/urls.py
Sahil1515/VeriListo
7488df770196dc416bf5a5ef0ff29055d0154f74
[ "MIT" ]
null
null
null
contact/urls.py
Sahil1515/VeriListo
7488df770196dc416bf5a5ef0ff29055d0154f74
[ "MIT" ]
null
null
null
contact/urls.py
Sahil1515/VeriListo
7488df770196dc416bf5a5ef0ff29055d0154f74
[ "MIT" ]
null
null
null
from django.urls import path from . import views urlpatterns = [ path('',views.contact,name='contact'), path('send_mail_by_user',views.send_mail_by_user,name='send_mail_by_user'), ]
21.333333
79
0.734375
29
192
4.551724
0.448276
0.181818
0.227273
0.318182
0
0
0
0
0
0
0
0
0.125
192
9
80
21.333333
0.785714
0
0
0
0
0
0.213542
0
0
0
0
0
0
1
0
false
0
0.333333
0
0.333333
0
1
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
5
b4818ce695a52ff6cbc3f1cd492575e1570ecafd
105
py
Python
week1.py
alicia3106/csws-week1
3506e2bd288e1427e0a1b87dc25f1e5185a484bd
[ "Apache-2.0" ]
null
null
null
week1.py
alicia3106/csws-week1
3506e2bd288e1427e0a1b87dc25f1e5185a484bd
[ "Apache-2.0" ]
null
null
null
week1.py
alicia3106/csws-week1
3506e2bd288e1427e0a1b87dc25f1e5185a484bd
[ "Apache-2.0" ]
null
null
null
message = "Hello python world" print(message) message = "Hello python crash course world" print(message)
21
43
0.771429
14
105
5.785714
0.5
0.296296
0.444444
0
0
0
0
0
0
0
0
0
0.133333
105
5
44
21
0.89011
0
0
0.5
0
0
0.462264
0
0
0
0
0
0
1
0
false
0
0
0
0
0.5
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
5
b483908674abf4a64f4c506fc37adea0482f1c0f
79
py
Python
easyTCP2/Server/__init__.py
dsal3389/easyTCP2
f595442c9e56e36cbc067df95675f4fa1f7b889f
[ "MIT" ]
1
2019-03-28T02:10:52.000Z
2019-03-28T02:10:52.000Z
easyTCP2/Server/__init__.py
dsal3389/easyTCP2
f595442c9e56e36cbc067df95675f4fa1f7b889f
[ "MIT" ]
null
null
null
easyTCP2/Server/__init__.py
dsal3389/easyTCP2
f595442c9e56e36cbc067df95675f4fa1f7b889f
[ "MIT" ]
null
null
null
from .Server import Server from .Client import Client from .Groups import Group
26.333333
26
0.822785
12
79
5.416667
0.5
0
0
0
0
0
0
0
0
0
0
0
0.139241
79
3
27
26.333333
0.955882
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
81e7d8781d7961f182d1dd71188b8f3d4f6f18be
12
py
Python
Ex_Files_Python_EssT/Exercise Files/Chap09/teste.py
cinthiatengan/Basic-Python
9dbc280af2e9af9b79421018be814d0f178d8097
[ "MIT" ]
null
null
null
Ex_Files_Python_EssT/Exercise Files/Chap09/teste.py
cinthiatengan/Basic-Python
9dbc280af2e9af9b79421018be814d0f178d8097
[ "MIT" ]
null
null
null
Ex_Files_Python_EssT/Exercise Files/Chap09/teste.py
cinthiatengan/Basic-Python
9dbc280af2e9af9b79421018be814d0f178d8097
[ "MIT" ]
null
null
null
print('OLA')
12
12
0.666667
2
12
4
1
0
0
0
0
0
0
0
0
0
0
0
0
12
1
12
12
0.666667
0
0
0
0
0
0.230769
0
0
0
0
0
0
1
0
true
0
0
0
0
1
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
5
c31ed96af97354d17c41b803d86885e194942ea0
247
py
Python
profiles_api/admin.py
gouemoolaf28/profiles-rest-api
a4b1f2ef136bc445df0923930e3c42af0de7e800
[ "MIT" ]
null
null
null
profiles_api/admin.py
gouemoolaf28/profiles-rest-api
a4b1f2ef136bc445df0923930e3c42af0de7e800
[ "MIT" ]
7
2020-06-06T01:35:34.000Z
2022-02-10T10:17:36.000Z
profiles_api/admin.py
gouemoolaf28/profiles-rest-api
a4b1f2ef136bc445df0923930e3c42af0de7e800
[ "MIT" ]
null
null
null
from django.contrib import admin #from .models import UserProfile from profiles_api import models # Register your models here. # admin.site.register(UserProfile) admin.site.register(models.UserProfile) admin.site.register(models.ProfileFeedItem)
27.444444
43
0.82996
32
247
6.375
0.4375
0.132353
0.25
0.27451
0.333333
0
0
0
0
0
0
0
0.089069
247
8
44
30.875
0.906667
0.368421
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
c33a7f1cd832956cc2f72cb5203978bbc71d475d
18,897
py
Python
sdk/python/pulumi_aws/organizations/delegated_administrator.py
chivandikwa/pulumi-aws
19c08bf9dcb90544450ffa4eec7bf6751058fde2
[ "ECL-2.0", "Apache-2.0" ]
1
2021-11-10T16:33:40.000Z
2021-11-10T16:33:40.000Z
sdk/python/pulumi_aws/organizations/delegated_administrator.py
chivandikwa/pulumi-aws
19c08bf9dcb90544450ffa4eec7bf6751058fde2
[ "ECL-2.0", "Apache-2.0" ]
null
null
null
sdk/python/pulumi_aws/organizations/delegated_administrator.py
chivandikwa/pulumi-aws
19c08bf9dcb90544450ffa4eec7bf6751058fde2
[ "ECL-2.0", "Apache-2.0" ]
null
null
null
# coding=utf-8 # *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. *** # *** Do not edit by hand unless you're certain you know what you are doing! *** import warnings import pulumi import pulumi.runtime from typing import Any, Mapping, Optional, Sequence, Union, overload from .. import _utilities __all__ = ['DelegatedAdministratorArgs', 'DelegatedAdministrator'] @pulumi.input_type class DelegatedAdministratorArgs: def __init__(__self__, *, account_id: pulumi.Input[str], service_principal: pulumi.Input[str]): """ The set of arguments for constructing a DelegatedAdministrator resource. :param pulumi.Input[str] account_id: The account ID number of the member account in the organization to register as a delegated administrator. :param pulumi.Input[str] service_principal: The service principal of the AWS service for which you want to make the member account a delegated administrator. """ pulumi.set(__self__, "account_id", account_id) pulumi.set(__self__, "service_principal", service_principal) @property @pulumi.getter(name="accountId") def account_id(self) -> pulumi.Input[str]: """ The account ID number of the member account in the organization to register as a delegated administrator. """ return pulumi.get(self, "account_id") @account_id.setter def account_id(self, value: pulumi.Input[str]): pulumi.set(self, "account_id", value) @property @pulumi.getter(name="servicePrincipal") def service_principal(self) -> pulumi.Input[str]: """ The service principal of the AWS service for which you want to make the member account a delegated administrator. """ return pulumi.get(self, "service_principal") @service_principal.setter def service_principal(self, value: pulumi.Input[str]): pulumi.set(self, "service_principal", value) @pulumi.input_type class _DelegatedAdministratorState: def __init__(__self__, *, account_id: Optional[pulumi.Input[str]] = None, arn: Optional[pulumi.Input[str]] = None, delegation_enabled_date: Optional[pulumi.Input[str]] = None, email: Optional[pulumi.Input[str]] = None, joined_method: Optional[pulumi.Input[str]] = None, joined_timestamp: Optional[pulumi.Input[str]] = None, name: Optional[pulumi.Input[str]] = None, service_principal: Optional[pulumi.Input[str]] = None, status: Optional[pulumi.Input[str]] = None): """ Input properties used for looking up and filtering DelegatedAdministrator resources. :param pulumi.Input[str] account_id: The account ID number of the member account in the organization to register as a delegated administrator. :param pulumi.Input[str] arn: The Amazon Resource Name (ARN) of the delegated administrator's account. :param pulumi.Input[str] delegation_enabled_date: The date when the account was made a delegated administrator. :param pulumi.Input[str] email: The email address that is associated with the delegated administrator's AWS account. :param pulumi.Input[str] joined_method: The method by which the delegated administrator's account joined the organization. :param pulumi.Input[str] joined_timestamp: The date when the delegated administrator's account became a part of the organization. :param pulumi.Input[str] name: The friendly name of the delegated administrator's account. :param pulumi.Input[str] service_principal: The service principal of the AWS service for which you want to make the member account a delegated administrator. :param pulumi.Input[str] status: The status of the delegated administrator's account in the organization. """ if account_id is not None: pulumi.set(__self__, "account_id", account_id) if arn is not None: pulumi.set(__self__, "arn", arn) if delegation_enabled_date is not None: pulumi.set(__self__, "delegation_enabled_date", delegation_enabled_date) if email is not None: pulumi.set(__self__, "email", email) if joined_method is not None: pulumi.set(__self__, "joined_method", joined_method) if joined_timestamp is not None: pulumi.set(__self__, "joined_timestamp", joined_timestamp) if name is not None: pulumi.set(__self__, "name", name) if service_principal is not None: pulumi.set(__self__, "service_principal", service_principal) if status is not None: pulumi.set(__self__, "status", status) @property @pulumi.getter(name="accountId") def account_id(self) -> Optional[pulumi.Input[str]]: """ The account ID number of the member account in the organization to register as a delegated administrator. """ return pulumi.get(self, "account_id") @account_id.setter def account_id(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "account_id", value) @property @pulumi.getter def arn(self) -> Optional[pulumi.Input[str]]: """ The Amazon Resource Name (ARN) of the delegated administrator's account. """ return pulumi.get(self, "arn") @arn.setter def arn(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "arn", value) @property @pulumi.getter(name="delegationEnabledDate") def delegation_enabled_date(self) -> Optional[pulumi.Input[str]]: """ The date when the account was made a delegated administrator. """ return pulumi.get(self, "delegation_enabled_date") @delegation_enabled_date.setter def delegation_enabled_date(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "delegation_enabled_date", value) @property @pulumi.getter def email(self) -> Optional[pulumi.Input[str]]: """ The email address that is associated with the delegated administrator's AWS account. """ return pulumi.get(self, "email") @email.setter def email(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "email", value) @property @pulumi.getter(name="joinedMethod") def joined_method(self) -> Optional[pulumi.Input[str]]: """ The method by which the delegated administrator's account joined the organization. """ return pulumi.get(self, "joined_method") @joined_method.setter def joined_method(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "joined_method", value) @property @pulumi.getter(name="joinedTimestamp") def joined_timestamp(self) -> Optional[pulumi.Input[str]]: """ The date when the delegated administrator's account became a part of the organization. """ return pulumi.get(self, "joined_timestamp") @joined_timestamp.setter def joined_timestamp(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "joined_timestamp", value) @property @pulumi.getter def name(self) -> Optional[pulumi.Input[str]]: """ The friendly name of the delegated administrator's account. """ return pulumi.get(self, "name") @name.setter def name(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "name", value) @property @pulumi.getter(name="servicePrincipal") def service_principal(self) -> Optional[pulumi.Input[str]]: """ The service principal of the AWS service for which you want to make the member account a delegated administrator. """ return pulumi.get(self, "service_principal") @service_principal.setter def service_principal(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "service_principal", value) @property @pulumi.getter def status(self) -> Optional[pulumi.Input[str]]: """ The status of the delegated administrator's account in the organization. """ return pulumi.get(self, "status") @status.setter def status(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "status", value) class DelegatedAdministrator(pulumi.CustomResource): @overload def __init__(__self__, resource_name: str, opts: Optional[pulumi.ResourceOptions] = None, account_id: Optional[pulumi.Input[str]] = None, service_principal: Optional[pulumi.Input[str]] = None, __props__=None): """ Provides a resource to manage an [AWS Organizations Delegated Administrator](https://docs.aws.amazon.com/organizations/latest/APIReference/API_RegisterDelegatedAdministrator.html). ## Example Usage ```python import pulumi import pulumi_aws as aws example = aws.organizations.DelegatedAdministrator("example", account_id="AWS ACCOUNT ID", service_principal="Service principal") ``` ## Import `aws_organizations_delegated_administrator` can be imported by using the account ID and its service principal, e.g., ```sh $ pulumi import aws:organizations/delegatedAdministrator:DelegatedAdministrator example 123456789012/config.amazonaws.com ``` :param str resource_name: The name of the resource. :param pulumi.ResourceOptions opts: Options for the resource. :param pulumi.Input[str] account_id: The account ID number of the member account in the organization to register as a delegated administrator. :param pulumi.Input[str] service_principal: The service principal of the AWS service for which you want to make the member account a delegated administrator. """ ... @overload def __init__(__self__, resource_name: str, args: DelegatedAdministratorArgs, opts: Optional[pulumi.ResourceOptions] = None): """ Provides a resource to manage an [AWS Organizations Delegated Administrator](https://docs.aws.amazon.com/organizations/latest/APIReference/API_RegisterDelegatedAdministrator.html). ## Example Usage ```python import pulumi import pulumi_aws as aws example = aws.organizations.DelegatedAdministrator("example", account_id="AWS ACCOUNT ID", service_principal="Service principal") ``` ## Import `aws_organizations_delegated_administrator` can be imported by using the account ID and its service principal, e.g., ```sh $ pulumi import aws:organizations/delegatedAdministrator:DelegatedAdministrator example 123456789012/config.amazonaws.com ``` :param str resource_name: The name of the resource. :param DelegatedAdministratorArgs args: The arguments to use to populate this resource's properties. :param pulumi.ResourceOptions opts: Options for the resource. """ ... def __init__(__self__, resource_name: str, *args, **kwargs): resource_args, opts = _utilities.get_resource_args_opts(DelegatedAdministratorArgs, pulumi.ResourceOptions, *args, **kwargs) if resource_args is not None: __self__._internal_init(resource_name, opts, **resource_args.__dict__) else: __self__._internal_init(resource_name, *args, **kwargs) def _internal_init(__self__, resource_name: str, opts: Optional[pulumi.ResourceOptions] = None, account_id: Optional[pulumi.Input[str]] = None, service_principal: Optional[pulumi.Input[str]] = None, __props__=None): if opts is None: opts = pulumi.ResourceOptions() if not isinstance(opts, pulumi.ResourceOptions): raise TypeError('Expected resource options to be a ResourceOptions instance') if opts.version is None: opts.version = _utilities.get_version() if opts.id is None: if __props__ is not None: raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource') __props__ = DelegatedAdministratorArgs.__new__(DelegatedAdministratorArgs) if account_id is None and not opts.urn: raise TypeError("Missing required property 'account_id'") __props__.__dict__["account_id"] = account_id if service_principal is None and not opts.urn: raise TypeError("Missing required property 'service_principal'") __props__.__dict__["service_principal"] = service_principal __props__.__dict__["arn"] = None __props__.__dict__["delegation_enabled_date"] = None __props__.__dict__["email"] = None __props__.__dict__["joined_method"] = None __props__.__dict__["joined_timestamp"] = None __props__.__dict__["name"] = None __props__.__dict__["status"] = None super(DelegatedAdministrator, __self__).__init__( 'aws:organizations/delegatedAdministrator:DelegatedAdministrator', resource_name, __props__, opts) @staticmethod def get(resource_name: str, id: pulumi.Input[str], opts: Optional[pulumi.ResourceOptions] = None, account_id: Optional[pulumi.Input[str]] = None, arn: Optional[pulumi.Input[str]] = None, delegation_enabled_date: Optional[pulumi.Input[str]] = None, email: Optional[pulumi.Input[str]] = None, joined_method: Optional[pulumi.Input[str]] = None, joined_timestamp: Optional[pulumi.Input[str]] = None, name: Optional[pulumi.Input[str]] = None, service_principal: Optional[pulumi.Input[str]] = None, status: Optional[pulumi.Input[str]] = None) -> 'DelegatedAdministrator': """ Get an existing DelegatedAdministrator resource's state with the given name, id, and optional extra properties used to qualify the lookup. :param str resource_name: The unique name of the resulting resource. :param pulumi.Input[str] id: The unique provider ID of the resource to lookup. :param pulumi.ResourceOptions opts: Options for the resource. :param pulumi.Input[str] account_id: The account ID number of the member account in the organization to register as a delegated administrator. :param pulumi.Input[str] arn: The Amazon Resource Name (ARN) of the delegated administrator's account. :param pulumi.Input[str] delegation_enabled_date: The date when the account was made a delegated administrator. :param pulumi.Input[str] email: The email address that is associated with the delegated administrator's AWS account. :param pulumi.Input[str] joined_method: The method by which the delegated administrator's account joined the organization. :param pulumi.Input[str] joined_timestamp: The date when the delegated administrator's account became a part of the organization. :param pulumi.Input[str] name: The friendly name of the delegated administrator's account. :param pulumi.Input[str] service_principal: The service principal of the AWS service for which you want to make the member account a delegated administrator. :param pulumi.Input[str] status: The status of the delegated administrator's account in the organization. """ opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id)) __props__ = _DelegatedAdministratorState.__new__(_DelegatedAdministratorState) __props__.__dict__["account_id"] = account_id __props__.__dict__["arn"] = arn __props__.__dict__["delegation_enabled_date"] = delegation_enabled_date __props__.__dict__["email"] = email __props__.__dict__["joined_method"] = joined_method __props__.__dict__["joined_timestamp"] = joined_timestamp __props__.__dict__["name"] = name __props__.__dict__["service_principal"] = service_principal __props__.__dict__["status"] = status return DelegatedAdministrator(resource_name, opts=opts, __props__=__props__) @property @pulumi.getter(name="accountId") def account_id(self) -> pulumi.Output[str]: """ The account ID number of the member account in the organization to register as a delegated administrator. """ return pulumi.get(self, "account_id") @property @pulumi.getter def arn(self) -> pulumi.Output[str]: """ The Amazon Resource Name (ARN) of the delegated administrator's account. """ return pulumi.get(self, "arn") @property @pulumi.getter(name="delegationEnabledDate") def delegation_enabled_date(self) -> pulumi.Output[str]: """ The date when the account was made a delegated administrator. """ return pulumi.get(self, "delegation_enabled_date") @property @pulumi.getter def email(self) -> pulumi.Output[str]: """ The email address that is associated with the delegated administrator's AWS account. """ return pulumi.get(self, "email") @property @pulumi.getter(name="joinedMethod") def joined_method(self) -> pulumi.Output[str]: """ The method by which the delegated administrator's account joined the organization. """ return pulumi.get(self, "joined_method") @property @pulumi.getter(name="joinedTimestamp") def joined_timestamp(self) -> pulumi.Output[str]: """ The date when the delegated administrator's account became a part of the organization. """ return pulumi.get(self, "joined_timestamp") @property @pulumi.getter def name(self) -> pulumi.Output[str]: """ The friendly name of the delegated administrator's account. """ return pulumi.get(self, "name") @property @pulumi.getter(name="servicePrincipal") def service_principal(self) -> pulumi.Output[str]: """ The service principal of the AWS service for which you want to make the member account a delegated administrator. """ return pulumi.get(self, "service_principal") @property @pulumi.getter def status(self) -> pulumi.Output[str]: """ The status of the delegated administrator's account in the organization. """ return pulumi.get(self, "status")
43.844548
188
0.665238
2,176
18,897
5.556985
0.08364
0.065498
0.081045
0.072775
0.803837
0.766788
0.718492
0.689216
0.674744
0.620906
0
0.001747
0.242525
18,897
430
189
43.946512
0.843021
0.359898
0
0.483051
1
0
0.109122
0.028298
0
0
0
0
0
1
0.161017
false
0.004237
0.021186
0
0.283898
0
0
0
0
null
0
0
0
1
1
1
0
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
c35c0e2e3d2137535858db4788e0ffd1e985b97d
68
py
Python
Strain/test1.py
chrishoen/Plots
d7fac99b0c80ebfc9148442576b1f14c54f2e88b
[ "MIT" ]
null
null
null
Strain/test1.py
chrishoen/Plots
d7fac99b0c80ebfc9148442576b1f14c54f2e88b
[ "MIT" ]
null
null
null
Strain/test1.py
chrishoen/Plots
d7fac99b0c80ebfc9148442576b1f14c54f2e88b
[ "MIT" ]
null
null
null
import matplotlib.pyplot as plt import numpy as np print("hello")
11.333333
31
0.764706
11
68
4.727273
0.818182
0
0
0
0
0
0
0
0
0
0
0
0.161765
68
5
32
13.6
0.912281
0
0
0
0
0
0.074627
0
0
0
0
0
0
1
0
true
0
0.666667
0
0.666667
0.333333
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
5ed92d0fbbc74dcaa8eba144fb1888c504c1d64f
9,324
py
Python
test/test_squash.py
max-wittig/smartsquash
3f68f2f3f3909316017126941ae2d5373593025c
[ "MIT" ]
2
2020-03-09T13:17:29.000Z
2020-03-10T14:09:25.000Z
test/test_squash.py
max-wittig/smartsquash
3f68f2f3f3909316017126941ae2d5373593025c
[ "MIT" ]
1
2020-03-11T08:59:03.000Z
2020-03-11T22:00:16.000Z
test/test_squash.py
max-wittig/smartsquash
3f68f2f3f3909316017126941ae2d5373593025c
[ "MIT" ]
null
null
null
import git import re from smartsquash import helpers, sq, squash from typing import List, Dict, Set, Callable def test_get_commits_in_between(repository: git.Repo, commit_files: Callable): commit_1: git.Commit = commit_files(repository, ["test.txt"], "whatever") commit_2: git.Commit = commit_files( repository, ["test.txt", "stuff.txt"], "some other content" ) commit_3: git.Commit = commit_files( repository, ["something.txt"], "even other content" ) commit_4: git.Commit = commit_files( repository, ["another-one.txt"], "even other content" ) commits: List[git.Commit] = sq.retrieve_commits(repository, "master") in_between: List[git.Commit] = squash.get_commits_in_between( commit_4, commit_1, commits ) assert len(in_between) == 2 assert in_between[0] == commit_2 assert in_between[1] == commit_3 def test_has_relevant_files_changed_in_between_1( repository: git.Repo, commit_files: Callable ): commit_1 = commit_files(repository, ["test.txt"], "Some content") commit_2 = commit_files(repository, ["whatever.txt"], "Some content") commit_3 = commit_files(repository, ["another.txt"], "Some content") commit_4 = commit_files(repository, ["test.txt"], "Other content") commits = [commit_4, commit_3, commit_2, commit_1] commit_changed_files: Dict[str, Set[str]] = { commit_1.hexsha: {"test.txt"}, commit_2.hexsha: {"whatever.txt"}, commit_3.hexsha: {"another.txt"}, commit_4.hexsha: {"test.txt"}, } assert ( squash.has_relevant_files_changed_in_between( commit_4, commit_1, commits, commit_changed_files ) is False ) def test_has_relevant_files_changed_in_between_2( repository: git.Repo, commit_files: Callable ): commit_1 = commit_files(repository, ["test.txt"], "Some content") commit_2 = commit_files(repository, ["whatever.txt"], "Some content") commit_3 = commit_files(repository, ["another.txt", "test.txt"], "Some content") commit_4 = commit_files(repository, ["another.txt", "test.txt"], "Some") commit_5 = commit_files(repository, ["test.txt"], "Other content") commits = [commit_5, commit_4, commit_3, commit_2, commit_1] commit_changed_files: Dict[str, Set[str]] = { commit_1.hexsha: {"test.txt"}, commit_2.hexsha: {"whatever.txt"}, commit_3.hexsha: {"another.txt", "test.txt"}, commit_4.hexsha: {"another.txt", "test.txt"}, commit_5.hexsha: {"test.txt"}, } assert ( squash.has_relevant_files_changed_in_between( commit_5, commit_1, commits, commit_changed_files ) is True ) assert ( squash.has_relevant_files_changed_in_between( commit_4, commit_3, commits, commit_changed_files ) is False ) def test_commit_diff_empty(repository: git.Repo, commit_files: Callable): content = "old content" filename = "test.txt" # commit file to repo commit_a = commit_files(repository, [filename], content) # now write something else in file commit_b = commit_files(repository, [filename], "new content") # change file back commit_c = commit_files(repository, [filename], content) assert squash.commit_diff_empty(commit_a, commit_c) assert not squash.commit_diff_empty(commit_a, commit_b) assert not squash.commit_diff_empty(commit_b, commit_c) def test_has_same_files(): files_a: List[str] = ["test.txt", "README.md"] files_b: List[str] = ["README.md", "test.txt"] assert squash.has_same_files(files_a, files_b) files_b.append("Stuff.whatever") assert not squash.has_same_files(files_a, files_b) files_c: List[str] = ["1.txt", "2.txt", "3.txt"] assert not squash.has_same_files(files_a, files_c) def test_get_rebase_data_1(repository: git.Repo, commit_files: Callable): commit_files(repository, ["test.txt"], "whatever") commit_files(repository, ["test.txt"], "some other content") commit_files(repository, ["test.txt"], "even other content") commit_files(repository, ["test.txt"], "wwaaaaa") commits = helpers.retrieve_commits(repository, "master") is_rebase, rebase_data = squash.get_rebase_data(commits) rebase_data = rebase_data.split("\n") assert is_rebase is True assert re.match(r"^pick\s.{7}\s.*", rebase_data[0]) assert re.match(r"^fixup\s.{7}\s.*", rebase_data[1]) assert re.match(r"^fixup\s.{7}\s.*", rebase_data[2]) assert re.match(r"^fixup\s.{7}\s.*", rebase_data[3]) def test_get_rebase_data_2(repository: git.Repo, commit_files: Callable): commit_files(repository, ["test.txt"], "whatever") commit_files(repository, ["test.txt", "stuff.txt"], "some other content") commit_files(repository, ["test.txt"], "even other content") commits = helpers.retrieve_commits(repository, "master") is_rebase, rebase_data = squash.get_rebase_data(commits) rebase_data = rebase_data.split("\n") assert is_rebase is False assert re.match(r"^pick\s.{7}\s.*", rebase_data[0]) assert re.match(r"^pick\s.{7}\s.*", rebase_data[1]) assert re.match(r"^pick\s.{7}\s.*", rebase_data[2]) def test_get_rebase_data_3(repository: git.Repo, commit_files: Callable): commit_files(repository, ["test.txt", "stuff.txt"], "whatever") commit_files(repository, ["test.txt", "stuff.txt"], "some other content") commit_files(repository, ["test.txt"], "even other content") commits = helpers.retrieve_commits(repository, "master") is_rebase, rebase_data = squash.get_rebase_data(commits) rebase_data = rebase_data.split("\n") assert is_rebase is True assert re.match(r"^pick\s.{7}\s.*", rebase_data[0]) assert re.match(r"^fixup\s.{7}\s.*", rebase_data[1]) assert re.match(r"^pick\s.{7}\s.*", rebase_data[2]) def test_get_rebase_data_4(repository: git.Repo, commit_files: Callable): commit_1 = commit_files(repository, ["test.txt"], "whatever") commit_2 = commit_files(repository, ["whatever.img"], "some other content") commit_3 = commit_files(repository, ["test.txt"], "even other content") commits = helpers.retrieve_commits(repository, "master") is_rebase, rebase_data = squash.get_rebase_data(commits) rebase_data = rebase_data.split("\n") assert is_rebase is True assert re.match(rf"^pick\s{commit_1.hexsha[:7]}\s.*", rebase_data[0]) assert re.match(rf"^fixup\s{commit_3.hexsha[:7]}\s.*", rebase_data[1]) assert re.match(rf"^pick\s{commit_2.hexsha[:7]}\s.*", rebase_data[2]) def test_get_rebase_data_5(repository: git.Repo, commit_files: Callable): commit_1 = commit_files(repository, ["test.txt"], "whatever") commit_2 = commit_files(repository, ["whatever.img"], "some other content") commit_3 = commit_files(repository, ["test.txt"], "whatever2") commit_4 = commit_files( repository, ["whatever.img", "test.txt"], "some other content2" ) commit_5 = commit_files(repository, ["test.txt"], "even other content") commits = helpers.retrieve_commits(repository, "master") is_rebase, rebase_data = squash.get_rebase_data(commits) rebase_data = rebase_data.split("\n") assert is_rebase is True assert re.match(rf"^pick\s{commit_1.hexsha[:7]}\s.*", rebase_data[0]) assert re.match(rf"^fixup\s{commit_3.hexsha[:7]}\s.*", rebase_data[1]) assert re.match(rf"^pick\s{commit_2.hexsha[:7]}\s.*", rebase_data[2]) assert re.match(rf"^pick\s{commit_4.hexsha[:7]}\s.*", rebase_data[3]) assert re.match(rf"^pick\s{commit_5.hexsha[:7]}\s.*", rebase_data[4]) def test_get_rebase_data_6(repository: git.Repo, commit_files: Callable): commit_1 = commit_files( repository, ["test.txt", "1.txt", "5.txt", "gdf.txt"], "whatever" ) commit_2 = commit_files(repository, ["5.txt"], "some other content") commit_3 = commit_files(repository, ["test.txt"], "whatever2") commit_4 = commit_files(repository, ["5.txt", "test.txt"], "some other content2") commit_5 = commit_files(repository, ["test.txt"], "even other content") commits = helpers.retrieve_commits(repository, "master") is_rebase, rebase_data = squash.get_rebase_data(commits) rebase_data = rebase_data.split("\n") assert is_rebase is False assert re.match(rf"^pick\s{commit_1.hexsha[:7]}\s.*", rebase_data[0]) assert re.match(rf"^pick\s{commit_2.hexsha[:7]}\s.*", rebase_data[1]) assert re.match(rf"^pick\s{commit_3.hexsha[:7]}\s.*", rebase_data[2]) assert re.match(rf"^pick\s{commit_4.hexsha[:7]}\s.*", rebase_data[3]) assert re.match(rf"^pick\s{commit_5.hexsha[:7]}\s.*", rebase_data[4]) def test_get_commit_message_empty( repository: git.Repo, commit_files: Callable, make_files: Callable ): make_files(repository, ["test.txt"], "test") repository.index.add(["test.txt"]) repository.git.commit("--allow-empty-message", "-m", "") commits: List[git.Commit] = helpers.retrieve_commits( repository, "master", reverse=False ) assert squash.get_commit_message(commits[0]) == "" def test_get_commit_message(repository: git.Repo, commit_files: Callable): commit: git.Commit = commit_files( repository, ["test.txt"], "content", message="Test" ) assert squash.get_commit_message(commit) == commit.message
43.981132
85
0.683934
1,325
9,324
4.57434
0.071698
0.087444
0.138591
0.094374
0.872463
0.804653
0.797063
0.725128
0.671341
0.635539
0
0.017382
0.160875
9,324
211
86
44.189573
0.757285
0.0074
0
0.43956
0
0
0.193601
0.047454
0
0
0
0
0.236264
1
0.071429
false
0
0.021978
0
0.093407
0
0
0
0
null
0
0
0
1
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
5eeb52e5ccdb4e6ec1ca42f0a4fe0e0dbc21fd04
330
py
Python
immvis/messagehandler/response/response_builder.py
imdavi/ImmVisWebsocketsServerPython
6eb9e1166e97525ab1f252ef6ff176515f8dbbe8
[ "MIT" ]
null
null
null
immvis/messagehandler/response/response_builder.py
imdavi/ImmVisWebsocketsServerPython
6eb9e1166e97525ab1f252ef6ff176515f8dbbe8
[ "MIT" ]
null
null
null
immvis/messagehandler/response/response_builder.py
imdavi/ImmVisWebsocketsServerPython
6eb9e1166e97525ab1f252ef6ff176515f8dbbe8
[ "MIT" ]
null
null
null
from abc import ABC, abstractmethod from ..actions.action_result import ActionResult class ResponseBuilder(ABC): @abstractmethod def build_response_from_result(self, action_result: ActionResult) -> str: pass @abstractmethod def build_response_from_error(self, error: Exception) -> str: pass
25.384615
77
0.730303
37
330
6.297297
0.486486
0.145923
0.188841
0.257511
0.291845
0
0
0
0
0
0
0
0.2
330
12
78
27.5
0.882576
0
0
0.444444
0
0
0
0
0
0
0
0
0
1
0.222222
false
0.222222
0.222222
0
0.555556
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
1
0
0
5
6f14ee83f7983ff5856195a697eda6d8464b17bf
220
py
Python
app/api/config.py
daghan/graph-explorer
0a01f3525cf618b45b92b092b10f48d70706f28c
[ "MIT" ]
30
2018-06-18T13:45:10.000Z
2021-11-22T19:10:24.000Z
app/api/config.py
daghan/graph-explorer
0a01f3525cf618b45b92b092b10f48d70706f28c
[ "MIT" ]
1
2022-02-10T22:51:29.000Z
2022-02-10T22:51:29.000Z
app/api/config.py
gavinJLing/graphDataExplorer
f06501598e49649e68c7261a973dfd89ae1a2ea7
[ "MIT" ]
12
2018-09-28T07:31:02.000Z
2020-12-18T08:02:59.000Z
class Config(object): pass class Production(Config): ELASTICSEARCH_HOST = 'elasticsearch' JANUS_HOST = 'janus' class Development(Config): ELASTICSEARCH_HOST = '127.0.0.1' JANUS_HOST = '127.0.0.1'
16.923077
40
0.681818
28
220
5.214286
0.428571
0.260274
0.315068
0.123288
0.136986
0
0
0
0
0
0
0.067797
0.195455
220
12
41
18.333333
0.757062
0
0
0
0
0
0.163636
0
0
0
0
0
0
1
0
false
0.125
0
0
0.875
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
1
0
0
5
6f531c8094873ea86814f3c33ab1b9d4952760dc
97
py
Python
ClassWorks/Shtepa/Lesson4/untitled/venv/BIBOT.py
BIBOT-BIBOT/FamousTeamDream
273a9b831a80218cd6f904bc156d5d8f0a8f087e
[ "Apache-2.0" ]
null
null
null
ClassWorks/Shtepa/Lesson4/untitled/venv/BIBOT.py
BIBOT-BIBOT/FamousTeamDream
273a9b831a80218cd6f904bc156d5d8f0a8f087e
[ "Apache-2.0" ]
null
null
null
ClassWorks/Shtepa/Lesson4/untitled/venv/BIBOT.py
BIBOT-BIBOT/FamousTeamDream
273a9b831a80218cd6f904bc156d5d8f0a8f087e
[ "Apache-2.0" ]
null
null
null
a = int(input()) b = int(input()) if a == b: print('a == b') if a < b: print('a < b') if
12.125
19
0.43299
19
97
2.210526
0.315789
0.190476
0.190476
0.428571
0.571429
0.571429
0.571429
0
0
0
0
0
0.298969
97
8
20
12.125
0.617647
0
0
0
0
0
0.112245
0
0
0
0
0
0
0
null
null
0
0
null
null
0.285714
1
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
5
6f5fd910ce0fc0e2d343cf24247b43889bf5183f
5,877
py
Python
numom2b_preprocessing/unittests/preprocess_tests/test_read_from_csv.py
batflyer/nuMoM2b-preprocessing
e3831d33e06a2620df84655a3c9474794ecc8c44
[ "MIT" ]
2
2021-11-11T22:24:35.000Z
2021-12-17T23:13:49.000Z
numom2b_preprocessing/unittests/preprocess_tests/test_read_from_csv.py
batflyer/nuMoM2b-preprocessing
e3831d33e06a2620df84655a3c9474794ecc8c44
[ "MIT" ]
5
2019-05-16T20:09:37.000Z
2019-10-14T13:41:18.000Z
numom2b_preprocessing/unittests/preprocess_tests/test_read_from_csv.py
hayesall/nuMoM2b_preprocessing
e3831d33e06a2620df84655a3c9474794ecc8c44
[ "MIT" ]
null
null
null
# Copyright 2019 Alexander L. Hayes """ Test for the preprocess module. """ from pandas import DataFrame from pandas.util.testing import assert_frame_equal from numpy import float64 import unittest # Tests for: from ... import preprocess class ReadTargetTest(unittest.TestCase): """ Tests for the ``preprocess._build_target_table`` module. Assert target csv files load with valid parameters. """ @staticmethod def test_reading_from_target1_csv_1(): """ Test ``target1.csv`` while dropping ``column1`` and ``column2``. """ _parameters = { "target": ( "numom2b_preprocessing/unittests/preprocess_tests/sample_csv_files/target1.csv", ["PublicID", "target_variable"], ) } _name, _variables = _parameters["target"][0], _parameters["target"][1] _target = preprocess._build_target_table(_name, _variables) _expected = DataFrame( {"PublicID": ["A1", "B2", "B4"], "target_variable": [0, 0, 1]} ) assert_frame_equal(_expected, _target) @staticmethod def test_reading_from_target1_csv_2(): """ Test ``target1.csv`` dropping ``column1``. """ _parameters = { "target": ( "numom2b_preprocessing/unittests/preprocess_tests/sample_csv_files/target1.csv", ["PublicID", "column2", "target_variable"], ) } _name, _variables = _parameters["target"][0], _parameters["target"][1] _target = preprocess._build_target_table(_name, _variables) _expected = DataFrame( { "PublicID": ["A1", "B2", "B4"], "column2": [23, 11, 11], "target_variable": [0, 0, 1], } ) assert_frame_equal(_expected, _target) @staticmethod def test_reading_from_target1_csv_3(): """ Test ``target1.csv`` dropping ``column2``. """ _parameters = { "target": ( "numom2b_preprocessing/unittests/preprocess_tests/sample_csv_files/target1.csv", ["PublicID", "column1", "target_variable"], ) } _name, _variables = _parameters["target"][0], _parameters["target"][1] _target = preprocess._build_target_table(_name, _variables) _expected = DataFrame( { "PublicID": ["A1", "B2", "B4"], "column1": [25, 24, 20], "target_variable": [0, 0, 1], } ) assert_frame_equal(_expected, _target) class ReadCSVTest(unittest.TestCase): """ Tests for ``preprocess._build_table``. """ @staticmethod def test_target1_and_csv1_1(): """ Join ``target1.csv`` and ``csv1.csv`` """ _parameters = { "target": ( "numom2b_preprocessing/unittests/preprocess_tests/sample_csv_files/target1.csv", ["PublicID", "target_variable"], ), "paths": [ ( "numom2b_preprocessing/unittests/preprocess_tests/sample_csv_files/csv1.csv", ["PublicID", "column5"], ) ], } _table = preprocess._build_table(_parameters) _expected = DataFrame( { "PublicID": ["A1", "B2", "B4"], "target_variable": [0, 0, 1], "column5": [float64("nan"), float64("nan"), 5.0], } ) assert_frame_equal(_expected, _table) @staticmethod def test_target1_and_csv1_2(): """ Join ``target1.csv`` and ``csv1.csv`` """ _parameters = { "target": ( "numom2b_preprocessing/unittests/preprocess_tests/sample_csv_files/target1.csv", ["PublicID", "column1", "target_variable"], ), "paths": [ ( "numom2b_preprocessing/unittests/preprocess_tests/sample_csv_files/csv1.csv", ["PublicID", "column3", "column4", "column5"], ) ], } _table = preprocess._build_table(_parameters) _expected = DataFrame( { "PublicID": ["A1", "B2", "B4"], "column1": [25, 24, 20], "target_variable": [0, 0, 1], "column3": [1, 2, 3], "column4": [float64("nan"), 3.0, 4.0], "column5": [float64("nan"), float64("nan"), 5.0], } ) assert_frame_equal(_expected, _table) @staticmethod def test_target1_csv1_and_csv2_1(): """ Join ``target1.csv``, ``csv1.csv``, and ``csv2.csv``. Maintain one column from each. """ _parameters = { "target": ( "numom2b_preprocessing/unittests/preprocess_tests/sample_csv_files/target1.csv", ["PublicID", "column1", "target_variable"], ), "paths": [ ( "numom2b_preprocessing/unittests/preprocess_tests/sample_csv_files/csv1.csv", ["PublicID", "column2"], ), ( "numom2b_preprocessing/unittests/preprocess_tests/sample_csv_files/csv2.csv", ["PublicID", "column3"], ), ], } _table = preprocess._build_table(_parameters) _expected = DataFrame( { "PublicID": ["A1", "B2", "B4"], "column1": [25, 24, 20], "target_variable": [0, 0, 1], "column2": [3, 2, 1], "column3": [1, 2, 3], } ) assert_frame_equal(_expected, _table)
29.532663
112
0.508253
503
5,877
5.610338
0.159046
0.053154
0.102764
0.1382
0.781715
0.763997
0.763997
0.733877
0.713324
0.713324
0
0.044697
0.356645
5,877
198
113
29.681818
0.701666
0.091373
0
0.6
0
0
0.260314
0.146814
0
0
0
0
0.051852
1
0.044444
false
0
0.037037
0
0.096296
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
48e350aedf53a5716922d8eb05853f809c5ba381
110
py
Python
ex6_threaded.py
buq2/cpp_embedded_python
6b6faf0a5f0ecf9b5ec0e21a89143498b52765cc
[ "BSD-3-Clause" ]
null
null
null
ex6_threaded.py
buq2/cpp_embedded_python
6b6faf0a5f0ecf9b5ec0e21a89143498b52765cc
[ "BSD-3-Clause" ]
null
null
null
ex6_threaded.py
buq2/cpp_embedded_python
6b6faf0a5f0ecf9b5ec0e21a89143498b52765cc
[ "BSD-3-Clause" ]
null
null
null
print('Python module loaded') import numpy as np def sum(i, j): return np.sum(np.array(i) + np.array(j))
18.333333
44
0.663636
21
110
3.47619
0.666667
0.191781
0
0
0
0
0
0
0
0
0
0
0.172727
110
6
44
18.333333
0.802198
0
0
0
0
0
0.18018
0
0
0
0
0
0
1
0.25
false
0
0.25
0.25
0.75
0.25
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
5
48f124189894818a7bb9ddde4518b8610619442a
142
py
Python
paginate-example.py
xflr6/latexpages
65d2961ecbbf47ab1ab126cab90b63e1ba121e26
[ "MIT" ]
2
2019-05-19T00:08:10.000Z
2021-03-29T14:10:25.000Z
paginate-example.py
xflr6/latexpages
65d2961ecbbf47ab1ab126cab90b63e1ba121e26
[ "MIT" ]
null
null
null
paginate-example.py
xflr6/latexpages
65d2961ecbbf47ab1ab126cab90b63e1ba121e26
[ "MIT" ]
1
2021-03-29T14:14:18.000Z
2021-03-29T14:14:18.000Z
#!/usr/bin/env python3 # paginate-example.py import latexpages if __name__ == '__main__': latexpages.paginate('example/latexpages.ini')
17.75
49
0.739437
17
142
5.705882
0.764706
0.309278
0
0
0
0
0
0
0
0
0
0.008
0.119718
142
7
50
20.285714
0.768
0.288732
0
0
0
0
0.30303
0.222222
0
0
0
0
0
1
0
true
0
0.333333
0
0.333333
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
48f367e916083ba41358350176af78574fe5e97d
70
py
Python
effector_database/tests.py
LuukHenk/effector_database
d9702f89ffd2a23b4f5233d4ece69c1ceebd6164
[ "MIT" ]
null
null
null
effector_database/tests.py
LuukHenk/effector_database
d9702f89ffd2a23b4f5233d4ece69c1ceebd6164
[ "MIT" ]
null
null
null
effector_database/tests.py
LuukHenk/effector_database
d9702f89ffd2a23b4f5233d4ece69c1ceebd6164
[ "MIT" ]
null
null
null
from django.test import TestCase # Too lazy to add some tests!??!??!
17.5
35
0.7
11
70
4.454545
1
0
0
0
0
0
0
0
0
0
0
0
0.171429
70
3
36
23.333333
0.844828
0.471429
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5