hexsha
string
size
int64
ext
string
lang
string
max_stars_repo_path
string
max_stars_repo_name
string
max_stars_repo_head_hexsha
string
max_stars_repo_licenses
list
max_stars_count
int64
max_stars_repo_stars_event_min_datetime
string
max_stars_repo_stars_event_max_datetime
string
max_issues_repo_path
string
max_issues_repo_name
string
max_issues_repo_head_hexsha
string
max_issues_repo_licenses
list
max_issues_count
int64
max_issues_repo_issues_event_min_datetime
string
max_issues_repo_issues_event_max_datetime
string
max_forks_repo_path
string
max_forks_repo_name
string
max_forks_repo_head_hexsha
string
max_forks_repo_licenses
list
max_forks_count
int64
max_forks_repo_forks_event_min_datetime
string
max_forks_repo_forks_event_max_datetime
string
content
string
avg_line_length
float64
max_line_length
int64
alphanum_fraction
float64
qsc_code_num_words_quality_signal
int64
qsc_code_num_chars_quality_signal
float64
qsc_code_mean_word_length_quality_signal
float64
qsc_code_frac_words_unique_quality_signal
float64
qsc_code_frac_chars_top_2grams_quality_signal
float64
qsc_code_frac_chars_top_3grams_quality_signal
float64
qsc_code_frac_chars_top_4grams_quality_signal
float64
qsc_code_frac_chars_dupe_5grams_quality_signal
float64
qsc_code_frac_chars_dupe_6grams_quality_signal
float64
qsc_code_frac_chars_dupe_7grams_quality_signal
float64
qsc_code_frac_chars_dupe_8grams_quality_signal
float64
qsc_code_frac_chars_dupe_9grams_quality_signal
float64
qsc_code_frac_chars_dupe_10grams_quality_signal
float64
qsc_code_frac_chars_replacement_symbols_quality_signal
float64
qsc_code_frac_chars_digital_quality_signal
float64
qsc_code_frac_chars_whitespace_quality_signal
float64
qsc_code_size_file_byte_quality_signal
float64
qsc_code_num_lines_quality_signal
float64
qsc_code_num_chars_line_max_quality_signal
float64
qsc_code_num_chars_line_mean_quality_signal
float64
qsc_code_frac_chars_alphabet_quality_signal
float64
qsc_code_frac_chars_comments_quality_signal
float64
qsc_code_cate_xml_start_quality_signal
float64
qsc_code_frac_lines_dupe_lines_quality_signal
float64
qsc_code_cate_autogen_quality_signal
float64
qsc_code_frac_lines_long_string_quality_signal
float64
qsc_code_frac_chars_string_length_quality_signal
float64
qsc_code_frac_chars_long_word_length_quality_signal
float64
qsc_code_frac_lines_string_concat_quality_signal
float64
qsc_code_cate_encoded_data_quality_signal
float64
qsc_code_frac_chars_hex_words_quality_signal
float64
qsc_code_frac_lines_prompt_comments_quality_signal
float64
qsc_code_frac_lines_assert_quality_signal
float64
qsc_codepython_cate_ast_quality_signal
float64
qsc_codepython_frac_lines_func_ratio_quality_signal
float64
qsc_codepython_cate_var_zero_quality_signal
bool
qsc_codepython_frac_lines_pass_quality_signal
float64
qsc_codepython_frac_lines_import_quality_signal
float64
qsc_codepython_frac_lines_simplefunc_quality_signal
float64
qsc_codepython_score_lines_no_logic_quality_signal
float64
qsc_codepython_frac_lines_print_quality_signal
float64
qsc_code_num_words
int64
qsc_code_num_chars
int64
qsc_code_mean_word_length
int64
qsc_code_frac_words_unique
null
qsc_code_frac_chars_top_2grams
int64
qsc_code_frac_chars_top_3grams
int64
qsc_code_frac_chars_top_4grams
int64
qsc_code_frac_chars_dupe_5grams
int64
qsc_code_frac_chars_dupe_6grams
int64
qsc_code_frac_chars_dupe_7grams
int64
qsc_code_frac_chars_dupe_8grams
int64
qsc_code_frac_chars_dupe_9grams
int64
qsc_code_frac_chars_dupe_10grams
int64
qsc_code_frac_chars_replacement_symbols
int64
qsc_code_frac_chars_digital
int64
qsc_code_frac_chars_whitespace
int64
qsc_code_size_file_byte
int64
qsc_code_num_lines
int64
qsc_code_num_chars_line_max
int64
qsc_code_num_chars_line_mean
int64
qsc_code_frac_chars_alphabet
int64
qsc_code_frac_chars_comments
int64
qsc_code_cate_xml_start
int64
qsc_code_frac_lines_dupe_lines
int64
qsc_code_cate_autogen
int64
qsc_code_frac_lines_long_string
int64
qsc_code_frac_chars_string_length
int64
qsc_code_frac_chars_long_word_length
int64
qsc_code_frac_lines_string_concat
null
qsc_code_cate_encoded_data
int64
qsc_code_frac_chars_hex_words
int64
qsc_code_frac_lines_prompt_comments
int64
qsc_code_frac_lines_assert
int64
qsc_codepython_cate_ast
int64
qsc_codepython_frac_lines_func_ratio
int64
qsc_codepython_cate_var_zero
int64
qsc_codepython_frac_lines_pass
int64
qsc_codepython_frac_lines_import
int64
qsc_codepython_frac_lines_simplefunc
int64
qsc_codepython_score_lines_no_logic
int64
qsc_codepython_frac_lines_print
int64
effective
string
hits
int64
8c42bc054164bf102cb3c019bde55c78ebac365a
2,523
py
Python
tests/psychology/typicality/objects/test_objs_typicality_min.py
rvyjidacek/fcapsy
6d531a337b0e65cac10e41b84d232498f3a05b76
[ "MIT" ]
null
null
null
tests/psychology/typicality/objects/test_objs_typicality_min.py
rvyjidacek/fcapsy
6d531a337b0e65cac10e41b84d232498f3a05b76
[ "MIT" ]
null
null
null
tests/psychology/typicality/objects/test_objs_typicality_min.py
rvyjidacek/fcapsy
6d531a337b0e65cac10e41b84d232498f3a05b76
[ "MIT" ]
null
null
null
import pytest from bitsets import bitset from fcapsy import Concept, Context from fcapsy.psychology.typicality import typicality_min from fcapsy.similarity import jaccard, smc, rosch @pytest.mark.parametrize("similarity_function", [jaccard, smc]) def test_objs_typicality_min_1(similarity_function): context = Context([[1, 0, 0, 0]], range(1), range(4)) concept = Concept.from_extent([0], context) assert typicality_min(0, concept, context, similarity_function) == 1 @ pytest.mark.parametrize("similarity_function", [jaccard, smc, rosch]) def test_objs_typicality_min_2(similarity_function): context = Context([[1, 0, 0, 0], [0, 1, 0, 0], [0, 0, 1, 0], [0, 0, 0, 1]], range(4), range(4)) concept = Concept.from_extent(list(range(4)), context) objects = tuple(context.filter(concept.extent)) similarities = (similarity_function(objects[0], objects[0]), similarity_function(objects[0], objects[1]), similarity_function(objects[0], objects[2]), similarity_function(objects[0], objects[3])) expected = min(similarities) assert typicality_min(0, concept, context, similarity_function) == expected @ pytest.mark.parametrize("similarity_function", [jaccard, smc, rosch]) def test_objs_typicality_min_3(similarity_function): context = Context([[1, 1, 1, 1], [1, 0, 1, 0], [0, 0, 0, 1]], range(3), range(4)) concept = Concept.from_extent(list(range(3)), context) objects = tuple(context.filter(concept.extent)) similarities = (similarity_function(objects[0], objects[0]), similarity_function(objects[0], objects[1]), similarity_function(objects[0], objects[2])) expected = min(similarities) assert typicality_min(0, concept, context, similarity_function) == expected @ pytest.mark.parametrize("similarity_function", [smc, rosch]) def test_objs_typicality_min_4(similarity_function): context = Context([[0, 0, 0, 0], [1, 1, 1, 1]], range(2), range(4)) concept = Concept.from_extent(list(range(2)), context) objects = tuple(context.filter(concept.extent)) similarities = (similarity_function(objects[0], objects[0]), similarity_function(objects[0], objects[1])) expected = min(similarities) assert typicality_min(0, concept, context, similarity_function) == expected
35.041667
79
0.647642
301
2,523
5.275748
0.116279
0.238035
0.020781
0.147355
0.858312
0.798489
0.779597
0.728589
0.56864
0.56864
0
0.040142
0.219976
2,523
71
80
35.535211
0.766768
0
0
0.347826
0
0
0.030123
0
0
0
0
0
0.086957
1
0.086957
false
0
0.108696
0
0.195652
0
0
0
0
null
1
0
0
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
4fd6109c925edcbdf1a157f12190d8d3cc422ecb
6,638
py
Python
fluentcheck/tests/tests_check/test_numbers.py
jstoebel/fluentcheck
9258dab4f46776b5df50528f5028ce2d11a443c5
[ "MIT" ]
83
2018-05-31T13:21:06.000Z
2022-03-20T14:27:49.000Z
fluentcheck/tests/tests_check/test_numbers.py
jstoebel/fluentcheck
9258dab4f46776b5df50528f5028ce2d11a443c5
[ "MIT" ]
20
2019-02-10T15:07:44.000Z
2021-04-02T13:18:52.000Z
fluentcheck/tests/tests_check/test_numbers.py
jstoebel/fluentcheck
9258dab4f46776b5df50528f5028ce2d11a443c5
[ "MIT" ]
11
2019-02-16T21:33:11.000Z
2022-03-25T03:39:52.000Z
import unittest from fluentcheck.classes import Check from fluentcheck.exceptions import CheckError class TestNumbersAssertions(unittest.TestCase): def test_is_number(self): # ints val = 123 res = Check(val).is_number() self.assertIsInstance(res, Check) # floats val = float(123) res = Check(val).is_number() self.assertIsInstance(res, Check) # complexes val = complex(33.44, 55.66) res = Check(val).is_number() self.assertIsInstance(res, Check) # test failure val = 'not-a-number' self.assertTrue(all([not isinstance(val, kls) for kls in Check.NUMERIC_TYPES])) try: Check(val).is_number() self.fail() except CheckError: pass def test_is_not_number(self): val = 'not-a-number' self.assertTrue(all([not isinstance(val, kls) for kls in Check.NUMERIC_TYPES])) res = Check(val).is_not_number() self.assertIsInstance(res, Check) # test failures # ints val = 123 try: Check(val).is_not_number() self.fail() except CheckError: pass # floats val = float(123) try: Check(val).is_not_number() self.fail() except CheckError: pass # complexes val = complex(33.44, 55.66) try: Check(val).is_not_number() self.fail() except CheckError: pass def test_is_integer(self): res = Check(123).is_integer() self.assertIsInstance(res, Check) try: Check(float(123)).is_integer() self.fail() except CheckError: pass def test_is_not_integer(self): res = Check('test').is_not_integer() self.assertIsInstance(res, Check) try: Check(123).is_not_integer() self.fail() except CheckError: pass def test_is_float(self): res = Check(123.9).is_float() self.assertIsInstance(res, Check) try: Check(123).is_float() self.fail() except CheckError: pass def test_is_not_float(self): res = Check('test').is_not_float() self.assertIsInstance(res, Check) try: Check(123.9).is_not_float() self.fail() except CheckError: pass def test_is_real(self): res = Check(123.9).is_real() self.assertIsInstance(res, Check) try: val = complex(1.2, 3.4) Check(val).is_real() Check('test').is_real() self.fail() except CheckError: pass def test_is_not_real(self): val = complex(1.2, 3.4) res = Check(val).is_not_real() self.assertIsInstance(res, Check) try: Check('test').is_not_real() self.fail() except: pass try: Check(123.9).is_not_real() self.fail() except CheckError: pass def test_is_complex(self): val = complex(1.2, 3.4) res = Check(val).is_complex() self.assertIsInstance(res, Check) try: Check(123).is_complex() self.fail() except CheckError: pass def test_is_not_complex(self): res = Check('test').is_not_complex() self.assertIsInstance(res, Check) try: val = complex(1.2, 3.4) Check(val).is_not_complex() self.fail() except CheckError: pass def test_is_positive(self): res = Check(2).is_positive() self.assertIsInstance(res, Check) try: Check(0).is_positive() self.fail() except CheckError: pass try: Check(-1).is_positive() self.fail() except CheckError: pass def test_is_not_positive(self): res = Check(-1).is_not_positive() self.assertIsInstance(res, Check) res = Check(0).is_not_positive() self.assertIsInstance(res, Check) try: Check(123).is_not_positive() self.fail() except CheckError: pass def test_is_negative(self): res = Check(-2).is_negative() self.assertIsInstance(res, Check) try: Check(0).is_negative() self.fail() except CheckError: pass try: Check(1).is_negative() self.fail() except CheckError: pass def test_is_not_negative(self): res = Check(1).is_not_negative() self.assertIsInstance(res, Check) res = Check(0).is_not_negative() self.assertIsInstance(res, Check) try: Check(-1).is_not_negative() self.fail() except CheckError: pass def test_is_zero(self): res = Check(0).is_zero() self.assertIsInstance(res, Check) try: Check(1).is_zero() self.fail() except CheckError: pass def test_is_not_zero(self): res = Check(1).is_not_zero() self.assertIsInstance(res, Check) try: Check(0).is_not_zero() self.fail() except CheckError: pass def test_is_at_least(self): res = Check(7).is_at_least(6.5) self.assertIsInstance(res, Check) res = Check(7).is_at_least(7) self.assertIsInstance(res, Check) try: Check(7).is_at_least(99) self.fail() except CheckError: pass def test_is_at_most(self): res = Check(6.5).is_at_most(7) self.assertIsInstance(res, Check) res = Check(7).is_at_most(7) self.assertIsInstance(res, Check) try: Check(7).is_at_most(3) self.fail() except CheckError: pass def test_is_between(self): res = Check(5.4).is_between(5, 6) self.assertIsInstance(res, Check) try: Check(5.4).is_between(2, 3) self.fail() except CheckError: pass def test_is_not_between(self): res = Check(5.4).is_not_between(1, 2) self.assertIsInstance(res, Check) try: Check(5.4).is_not_between(5, 6) self.fail() except CheckError: pass
23.45583
87
0.524706
758
6,638
4.418206
0.084433
0.124216
0.178561
0.217378
0.884443
0.847417
0.743506
0.674231
0.573007
0.197074
0
0.029679
0.370594
6,638
282
88
23.539007
0.7719
0.010545
0
0.671296
0
0
0.006708
0
0
0
0
0
0.134259
1
0.092593
false
0.115741
0.013889
0
0.111111
0
0
0
0
null
0
0
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
5
8b2f97c31f1387f7647ddbdc4f16523c472d3c75
156
py
Python
hebpipe/lib/xrenner/modules/__init__.py
amir-zeldes/HebPipe
a3eb95abd2dd9588806b545eba7b28f57e589973
[ "Apache-2.0" ]
21
2019-02-04T16:27:37.000Z
2022-03-22T11:45:05.000Z
hebpipe/lib/xrenner/modules/__init__.py
amir-zeldes/HebPipe
a3eb95abd2dd9588806b545eba7b28f57e589973
[ "Apache-2.0" ]
12
2020-01-02T15:56:31.000Z
2022-03-31T10:35:16.000Z
hebpipe/lib/xrenner/modules/__init__.py
amir-zeldes/HebPipe
a3eb95abd2dd9588806b545eba7b28f57e589973
[ "Apache-2.0" ]
5
2020-01-01T20:51:35.000Z
2021-12-21T12:56:53.000Z
# __init__.py """ xrenner - eXternally configurable REference and Non Named Entity Recognizer Author: Amir Zeldes """ from .xrenner_xrenner import Xrenner
19.5
75
0.788462
19
156
6.210526
0.842105
0
0
0
0
0
0
0
0
0
0
0
0.141026
156
8
76
19.5
0.880597
0.692308
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
8ca0d44fdbb4524918ccdfc3beaf1d26f1e30188
44
py
Python
model/error/invalid_name_error.py
FellowHashbrown/virus.sh
757e50fa402f63ab7161518dcec7e1441aa880dd
[ "MIT" ]
null
null
null
model/error/invalid_name_error.py
FellowHashbrown/virus.sh
757e50fa402f63ab7161518dcec7e1441aa880dd
[ "MIT" ]
null
null
null
model/error/invalid_name_error.py
FellowHashbrown/virus.sh
757e50fa402f63ab7161518dcec7e1441aa880dd
[ "MIT" ]
null
null
null
class InvalidNameError(Exception): pass
14.666667
34
0.772727
4
44
8.5
1
0
0
0
0
0
0
0
0
0
0
0
0.159091
44
2
35
22
0.918919
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0.5
0
0
0.5
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
0
0
0
0
0
5
50887cb10db54c44f1a8e3849f8b30c9ce7bfe92
282
py
Python
algorithm_learning/learning_repeat_copy_ntm.py
SigmaQuan/NTM-Keras
a6fbc7374e596c46d0fa073e4bdb100fc8cf310a
[ "MIT" ]
33
2016-11-06T05:42:35.000Z
2020-12-18T22:42:40.000Z
algorithm_learning/learning_repeat_copy_ntm.py
imbi7py/NTM-Keras
a6fbc7374e596c46d0fa073e4bdb100fc8cf310a
[ "MIT" ]
2
2017-01-22T09:06:54.000Z
2017-06-29T13:18:05.000Z
algorithm_learning/learning_repeat_copy_ntm.py
imbi7py/NTM-Keras
a6fbc7374e596c46d0fa073e4bdb100fc8cf310a
[ "MIT" ]
17
2016-09-17T22:15:50.000Z
2020-12-19T05:55:59.000Z
# -*- coding: utf-8 -*- """An implementation of learning priority sort algorithm_learning with NTM. Input sequence length: "1 ~ 20: (1*2+1)=3 ~ (20*2+1)=41" Input dimension: "8" Output sequence length: equal to input sequence length. Output dimension: equal to input dimension. """
35.25
75
0.716312
43
282
4.674419
0.55814
0.208955
0.189055
0
0
0
0
0
0
0
0
0.062241
0.14539
282
7
76
40.285714
0.771784
0.968085
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
0
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
5
508d4354cdaf231610773aba6d9b867dbb817af4
157
py
Python
custom_objects/__init__.py
RamonWill/portfolio-management-project
ac8ce313f8d62f09810fc1da19d6b252f193871b
[ "MIT" ]
14
2020-01-01T04:59:06.000Z
2022-02-08T06:48:21.000Z
custom_objects/__init__.py
linhvien/portfolio-management-project
ac8ce313f8d62f09810fc1da19d6b252f193871b
[ "MIT" ]
null
null
null
custom_objects/__init__.py
linhvien/portfolio-management-project
ac8ce313f8d62f09810fc1da19d6b252f193871b
[ "MIT" ]
8
2020-10-15T06:52:37.000Z
2021-10-04T06:44:36.000Z
from .callback import Callback from .datatable import DataTable from .finance_calculator import FinanceCalculator from .reconciliation import Reconciliation
31.4
49
0.872611
17
157
8
0.470588
0
0
0
0
0
0
0
0
0
0
0
0.101911
157
4
50
39.25
0.964539
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
50c8e6cb9d1edd54a993fe9940f254cd090df3d2
76
py
Python
atlasnet_v2/__init__.py
RobinBaumann/TF-AtlasNetV2
32bd962407becacc9b9bee6c76c96216818e4c89
[ "MIT" ]
null
null
null
atlasnet_v2/__init__.py
RobinBaumann/TF-AtlasNetV2
32bd962407becacc9b9bee6c76c96216818e4c89
[ "MIT" ]
null
null
null
atlasnet_v2/__init__.py
RobinBaumann/TF-AtlasNetV2
32bd962407becacc9b9bee6c76c96216818e4c89
[ "MIT" ]
null
null
null
""" Created by Robin Baumann <mail@robin-baumann.com> on April 27, 2020. """
25.333333
68
0.697368
12
76
4.416667
0.833333
0.45283
0
0
0
0
0
0
0
0
0
0.090909
0.131579
76
3
69
25.333333
0.712121
0.894737
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
5
0f97107bea0e9537c55a2ca4308f4d217489ff91
3,062
py
Python
torchdrug/layers/sampler.py
wconnell/torchdrug
a710097cb4ad4c48e0de0d18fbb77ef0e806cdc8
[ "Apache-2.0" ]
772
2021-08-10T05:03:46.000Z
2022-03-31T12:48:31.000Z
torchdrug/layers/sampler.py
wconnell/torchdrug
a710097cb4ad4c48e0de0d18fbb77ef0e806cdc8
[ "Apache-2.0" ]
77
2021-08-12T16:19:15.000Z
2022-03-30T14:32:14.000Z
torchdrug/layers/sampler.py
wconnell/torchdrug
a710097cb4ad4c48e0de0d18fbb77ef0e806cdc8
[ "Apache-2.0" ]
90
2021-08-11T16:27:13.000Z
2022-03-28T11:41:53.000Z
from torch import nn from torch_scatter import scatter_add from torchdrug.layers import functional class NodeSampler(nn.Module): """ Node sampler from `GraphSAINT: Graph Sampling Based Inductive Learning Method`_. .. _GraphSAINT\: Graph Sampling Based Inductive Learning Method: https://arxiv.org/pdf/1907.04931.pdf Parameters: budget (int, optional): number of node to keep ratio (int, optional): ratio of node to keep """ def __init__(self, budget=None, ratio=None): super(NodeSampler, self).__init__() if budget is None and ratio is None: raise ValueError("At least one of `budget` and `ratio` should be provided") self.budget = budget self.ratio = ratio def forward(self, graph): """ Sample a subgraph from the graph. Parameters: graph (Graph): graph(s) """ # this is exact for a single graph # but approximate for packed graphs num_sample = graph.num_node if self.budget: num_sample = min(num_sample, self.budget) if self.ratio: num_sample = min(num_sample, int(self.ratio * graph.num_node)) prob = scatter_add(graph.edge_weight ** 2, graph.edge_list[:, 1], dim_size=graph.num_node) prob /= prob.mean() index = functional.multinomial(prob, num_sample) new_graph = graph.node_mask(index) node_out = new_graph.edge_list[:, 1] new_graph._edge_weight /= num_sample * prob[node_out] / graph.num_node return new_graph class EdgeSampler(nn.Module): """ Edge sampler from `GraphSAINT: Graph Sampling Based Inductive Learning Method`_. .. _GraphSAINT\: Graph Sampling Based Inductive Learning Method: https://arxiv.org/pdf/1907.04931.pdf Parameters: budget (int, optional): number of node to keep ratio (int, optional): ratio of node to keep """ def __init__(self, budget=None, ratio=None): super(EdgeSampler, self).__init__() if budget is None and ratio is None: raise ValueError("At least one of `budget` and `ratio` should be provided") self.budget = budget self.ratio = ratio def forward(self, graph): """ Sample a subgraph from the graph. Parameters: graph (Graph): graph(s) """ # this is exact for a single graph # but approximate for packed graphs node_in, node_out = graph.edge_list.t()[:2] num_sample = graph.num_edge if self.budget: num_sample = min(num_sample, self.budget) if self.ratio: num_sample = min(num_sample, int(self.ratio * graph.num_edge)) prob = 1 / graph.degree_out[node_out] + 1 / graph.degree_in[node_in] prob = prob / prob.mean() index = functional.multinomial(prob, num_sample) new_graph = graph.edge_mask(index) new_graph._edge_weight /= num_sample * prob[index] / graph.num_edge return new_graph
32.924731
98
0.628674
396
3,062
4.684343
0.219697
0.067925
0.049596
0.060377
0.748248
0.748248
0.748248
0.714825
0.714825
0.714825
0
0.01085
0.277596
3,062
92
99
33.282609
0.827758
0.289353
0
0.511628
0
0
0.054348
0
0
0
0
0
0
1
0.093023
false
0
0.069767
0
0.255814
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
0fdb5693c4799a79206317582712053e3e346f76
145
py
Python
lms.py
daryavasilyeva/lms
39e4b225c6ad446ed31d903b7c9fea4f4443c827
[ "MIT" ]
null
null
null
lms.py
daryavasilyeva/lms
39e4b225c6ad446ed31d903b7c9fea4f4443c827
[ "MIT" ]
null
null
null
lms.py
daryavasilyeva/lms
39e4b225c6ad446ed31d903b7c9fea4f4443c827
[ "MIT" ]
null
null
null
from app import app, db from app.models import User, Student, Teacher, Group, Course, Materials, Homework, Homework_parcel app.run(debug = True)
36.25
98
0.77931
22
145
5.090909
0.727273
0.125
0
0
0
0
0
0
0
0
0
0
0.131034
145
4
99
36.25
0.888889
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.666667
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
0fdf92bf103a83901d4b5707cd0e0eb976348a61
55
py
Python
classcharts/homework/__init__.py
danieljtrowbridge/classcharts.py
64bf3e6cbb46da91db166e7c8a33154aaf297d60
[ "MIT" ]
null
null
null
classcharts/homework/__init__.py
danieljtrowbridge/classcharts.py
64bf3e6cbb46da91db166e7c8a33154aaf297d60
[ "MIT" ]
null
null
null
classcharts/homework/__init__.py
danieljtrowbridge/classcharts.py
64bf3e6cbb46da91db166e7c8a33154aaf297d60
[ "MIT" ]
null
null
null
from .objects import * from .api import HomeworkClient
18.333333
31
0.8
7
55
6.285714
0.714286
0
0
0
0
0
0
0
0
0
0
0
0.145455
55
2
32
27.5
0.93617
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
0fe6b1820d6b4ab39cab35a339e661ae9008b906
145
py
Python
intvalpy/linear/__init__.py
AndrosovAS/intvalpy
0e809d7de44b037fa6162a5bd5b1e0f984d1fc55
[ "MIT" ]
4
2021-05-31T07:45:19.000Z
2022-01-27T08:36:22.000Z
intvalpy/linear/__init__.py
AndrosovAS/intvalpy
0e809d7de44b037fa6162a5bd5b1e0f984d1fc55
[ "MIT" ]
8
2021-08-18T14:07:42.000Z
2022-03-23T12:12:17.000Z
intvalpy/linear/__init__.py
AndrosovAS/intvalpy
0e809d7de44b037fa6162a5bd5b1e0f984d1fc55
[ "MIT" ]
2
2021-02-12T06:02:05.000Z
2022-01-28T17:40:26.000Z
from .overdetermined import Rohn, PSS, ASh from .square_system import Gauss, Gauss_Seidel from .system_properties import Uni, Tol, ive, outliers
36.25
54
0.813793
21
145
5.47619
0.714286
0
0
0
0
0
0
0
0
0
0
0
0.124138
145
3
55
48.333333
0.905512
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
0fe71983d588ab47fa3540f9852e5f3a6451b9f4
60
py
Python
demo1.py
TechRepublics/PyHub
eb4115cf55768bc9ef457516302c2f39e0e61d71
[ "Apache-2.0" ]
null
null
null
demo1.py
TechRepublics/PyHub
eb4115cf55768bc9ef457516302c2f39e0e61d71
[ "Apache-2.0" ]
null
null
null
demo1.py
TechRepublics/PyHub
eb4115cf55768bc9ef457516302c2f39e0e61d71
[ "Apache-2.0" ]
null
null
null
from numpy import numpy print("sky") print("city")
8.571429
24
0.616667
8
60
4.625
0.75
0
0
0
0
0
0
0
0
0
0
0
0.25
60
6
25
10
0.822222
0
0
0
0
0
0.12963
0
0
0
0
0
0
1
0
true
0
0.333333
0
0.333333
0.666667
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
1
0
5
ba27c1098d7728e0a9a20768a1f7bf30e0c497b2
166
py
Python
HDPython/ast/ast_classes/ast_expr.py
HardwareDesignWithPython/HDPython
aade03aaa092b1684fa12bffd17674cf1c45f5ac
[ "MIT" ]
null
null
null
HDPython/ast/ast_classes/ast_expr.py
HardwareDesignWithPython/HDPython
aade03aaa092b1684fa12bffd17674cf1c45f5ac
[ "MIT" ]
null
null
null
HDPython/ast/ast_classes/ast_expr.py
HardwareDesignWithPython/HDPython
aade03aaa092b1684fa12bffd17674cf1c45f5ac
[ "MIT" ]
1
2021-10-20T20:08:16.000Z
2021-10-20T20:08:16.000Z
from HDPython.ast.ast_classes.ast_base import add_class def body_expr(astParser,Node): return astParser.Unfold_body(Node.value) add_class("Expr", body_expr)
23.714286
55
0.789157
26
166
4.769231
0.615385
0.129032
0
0
0
0
0
0
0
0
0
0
0.114458
166
7
56
23.714286
0.843537
0
0
0
0
0
0.023952
0
0
0
0
0
0
1
0.25
false
0
0.25
0.25
0.75
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
5
e863640d43c7da7639f3fc8c22963358590ca325
1,172
py
Python
book_center/tests/bc_contact/models/test_contact_form_model.py
geodimitrov/Python-Web-Framework-SoftUni
06b7e11aee0024a564d1b266d5ed6271351ac116
[ "MIT" ]
null
null
null
book_center/tests/bc_contact/models/test_contact_form_model.py
geodimitrov/Python-Web-Framework-SoftUni
06b7e11aee0024a564d1b266d5ed6271351ac116
[ "MIT" ]
null
null
null
book_center/tests/bc_contact/models/test_contact_form_model.py
geodimitrov/Python-Web-Framework-SoftUni
06b7e11aee0024a564d1b266d5ed6271351ac116
[ "MIT" ]
null
null
null
from django.core.exceptions import ValidationError from tests.core.test_cases import BookCenterTestCase class ContactFormModelTests(BookCenterTestCase): def test_contact_form_model_when_subject_contains_non_english_chars__expect_exception(self): self.contact_form.subject = 'Бълхичка' with self.assertRaises(ValidationError) as context: self.contact_form.full_clean() self.contact_form.save() self.assertIsNotNone(context.exception) def test_user_model_when_email_contains_non_english_chars__expect_exception(self): self.contact_form.email = 'gъл@хичка.com' with self.assertRaises(ValidationError) as context: self.contact_form.full_clean() self.contact_form.save() self.assertIsNotNone(context.exception) def test_user_model_when_message_contains_non_english_chars__expect_exception(self): self.contact_form.message = 'Винаги съм искал да се свържа с вас' with self.assertRaises(ValidationError) as context: self.contact_form.full_clean() self.contact_form.save() self.assertIsNotNone(context.exception)
35.515152
96
0.743174
137
1,172
6.014599
0.350365
0.133495
0.163835
0.083738
0.700243
0.700243
0.700243
0.700243
0.700243
0.700243
0
0
0.187713
1,172
32
97
36.625
0.865546
0
0
0.571429
0
0
0.047822
0
0
0
0
0
0.285714
1
0.142857
false
0
0.095238
0
0.285714
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
e88de061c0e3e6aab34d95a0d7bbf50d9e892f7c
150
py
Python
utils/makeModel.py
L-Zhe/CoRPG
aa7a14a2b1c5397b327b4f91846795f7956bf2cd
[ "MIT" ]
13
2021-09-15T05:44:05.000Z
2022-03-24T12:24:00.000Z
utils/makeModel.py
L-Zhe/CoRPG
aa7a14a2b1c5397b327b4f91846795f7956bf2cd
[ "MIT" ]
null
null
null
utils/makeModel.py
L-Zhe/CoRPG
aa7a14a2b1c5397b327b4f91846795f7956bf2cd
[ "MIT" ]
4
2021-09-17T20:00:48.000Z
2022-03-10T03:09:57.000Z
from Transformer.Model import transformer import importlib from utils import constants def make_model(config): return transformer(config)
21.428571
44
0.786667
18
150
6.5
0.611111
0
0
0
0
0
0
0
0
0
0
0
0.173333
150
6
45
25
0.943548
0
0
0
0
0
0
0
0
0
0
0
0
1
0.2
false
0
0.6
0.2
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
1
1
0
0
5
e893b85996d7b281d9c9cdd445cf12e7243e0b53
183
py
Python
ftc/lib/util/__init__.py
efulet/ann_text_classification
fba05a1789a19aa6d607ee36069dda419bb98e28
[ "MIT" ]
null
null
null
ftc/lib/util/__init__.py
efulet/ann_text_classification
fba05a1789a19aa6d607ee36069dda419bb98e28
[ "MIT" ]
null
null
null
ftc/lib/util/__init__.py
efulet/ann_text_classification
fba05a1789a19aa6d607ee36069dda419bb98e28
[ "MIT" ]
null
null
null
""" @created_at 2015-01-18 @author Exequiel Fuentes Lettura <efulet@gmail.com> """ from options import Options from system_utils import SystemUtils from file_utils import FileUtils
18.3
51
0.797814
26
183
5.5
0.769231
0.153846
0
0
0
0
0
0
0
0
0
0.05
0.125683
183
9
52
20.333333
0.84375
0.404372
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
e8bd7dc685e424188d5ffc8f6cc6b2aecf15b6b7
35
py
Python
addsource/__init__.py
t-brandt/acorns-adi
6645fae7878a1801beeda0c6604b01e61f37ca15
[ "BSD-2-Clause" ]
1
2016-10-30T16:29:51.000Z
2016-10-30T16:29:51.000Z
addsource/__init__.py
t-brandt/acorns-adi
6645fae7878a1801beeda0c6604b01e61f37ca15
[ "BSD-2-Clause" ]
null
null
null
addsource/__init__.py
t-brandt/acorns-adi
6645fae7878a1801beeda0c6604b01e61f37ca15
[ "BSD-2-Clause" ]
null
null
null
from addsource import addsource
7
31
0.8
4
35
7
0.75
0
0
0
0
0
0
0
0
0
0
0
0.2
35
4
32
8.75
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
e8cb15a2165132fb6ce2866f04fe63450ca4a346
1,642
py
Python
pymtl3/passes/sverilog/TranslationImportPass.py
hsqforfun/pymtl3
05e06601cf262a663a95d1235cb99056ece84580
[ "BSD-3-Clause" ]
1
2019-11-12T12:26:01.000Z
2019-11-12T12:26:01.000Z
pymtl3/passes/sverilog/TranslationImportPass.py
hsqforfun/pymtl3
05e06601cf262a663a95d1235cb99056ece84580
[ "BSD-3-Clause" ]
null
null
null
pymtl3/passes/sverilog/TranslationImportPass.py
hsqforfun/pymtl3
05e06601cf262a663a95d1235cb99056ece84580
[ "BSD-3-Clause" ]
null
null
null
#========================================================================= # TranslationImportPass.py #========================================================================= # Translate and import components having the `sverilog_translate_import` # attribute. # # Author : Peitian Pan # Date : Aug 6, 2019 from pymtl3.passes.BasePass import BasePass from pymtl3.passes.sverilog.import_.ImportConfigs import ImportConfigs from pymtl3.passes.sverilog.import_.ImportPass import ImportPass from pymtl3.passes.sverilog.translation.TranslationPass import TranslationPass class TranslationImportPass( BasePass ): def __call__( s, top ): s.top = top s.traverse_hierarchy( top ) top.apply( s.get_translation_pass() ) return s.get_import_pass()( top ) def traverse_hierarchy( s, m ): if hasattr(m, s.get_flag_name()): if not hasattr(m, s.get_translation_flag_name()): setattr(m, s.get_translation_flag_name(), s.get_translation_configs()) if not hasattr(m, s.get_import_flag_name()): setattr(m, s.get_import_flag_name(), s.get_import_configs()) for child in m.get_child_components(): s.traverse_hierarchy( child ) def get_translation_pass( s ): return TranslationPass() def get_import_pass( s ): return ImportPass() def get_flag_name( s ): return "sverilog_translate_import" def get_translation_flag_name( s ): return "sverilog_translate" def get_import_flag_name( s ): return "sverilog_import" def get_translation_configs( s ): return True def get_import_configs( s ): return ImportConfigs(vl_Wno_list=['UNOPTFLAT', 'UNSIGNED'])
30.981132
78
0.667479
198
1,642
5.247475
0.267677
0.034649
0.024062
0.069297
0.284889
0.180943
0
0
0
0
0
0.006489
0.155298
1,642
52
79
31.576923
0.74261
0.180268
0
0
0
0
0.056054
0.018685
0
0
0
0
0
1
0.28125
false
0.34375
0.46875
0.21875
1.03125
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
1
1
1
0
0
5
2cdaa12d95236390cf439f073cc850995e270822
7,742
py
Python
tests.py
iamtrask/PyAono
4d847d7552dcee2c1e31727f37c4618ee7241896
[ "Apache-2.0" ]
2
2019-04-15T14:00:15.000Z
2020-04-15T04:36:04.000Z
tests.py
iamtrask/PyAono
4d847d7552dcee2c1e31727f37c4618ee7241896
[ "Apache-2.0" ]
null
null
null
tests.py
iamtrask/PyAono
4d847d7552dcee2c1e31727f37c4618ee7241896
[ "Apache-2.0" ]
2
2018-09-18T13:52:29.000Z
2019-09-06T03:49:35.000Z
import unittest import Aono class correctness_test(unittest.TestCase): def __init__(self, *args, **kwargs): super(correctness_test, self).__init__(*args, **kwargs) keys = Aono.key_gen() self.keys = keys.generate_key(100, 64, 40, 8, 4, 7) def test_single_data(self): ct = Aono.ciphertext(5, self.keys.pk) pt = Aono.pari_GEN(5) self.assertEqual(ct.decrypt(self.keys.sk)[0][0], pt) def test_multiple_data(self): ct = Aono.ciphertext([1, 2, 3, 4, 5, 6, 7, 8, 9, 10], self.keys.pk) pt = Aono.pari_GEN([1, 2, 3, 4, 5, 6, 7, 8, 9, 10]) self.assertEqual(ct.decrypt(self.keys.sk).sub_mat_array(0, 10), pt) class addition_test(unittest.TestCase): def __init__(self, *args, **kwargs): super(addition_test, self).__init__(*args, **kwargs) keys = Aono.key_gen() self.keys = keys.generate_key(100, 64, 40, 8, 4, 7) def test_single_data(self): ct_1 = Aono.ciphertext(10, self.keys.pk) ct_2 = Aono.ciphertext(5, self.keys.pk) pt = Aono.pari_GEN(15) ct = ct_1 + ct_2 self.assertEqual(ct.decrypt(self.keys.sk)[0][0], pt) def test_multiple_data(self): ct_1 = Aono.ciphertext([1, 2, 3, 4, 5], self.keys.pk) ct_2 = Aono.ciphertext([1, 2, 3, 4, 5, 6, 7, 8, 9, 10], self.keys.pk) pt = Aono.pari_GEN([2, 4, 6, 8, 10, 6, 7, 8, 9, 10]) ct = ct_1 + ct_2 self.assertEqual(ct.decrypt(self.keys.sk).sub_mat_array(0, 10), pt) class subtraction_test(unittest.TestCase): def __init__(self, *args, **kwargs): super(subtraction_test, self).__init__(*args, **kwargs) keys = Aono.key_gen() self.keys = keys.generate_key(100, 64, 40, 8, 4, 7) def test_single_data(self): ct_1 = Aono.ciphertext(10, self.keys.pk) ct_2 = Aono.ciphertext(5, self.keys.pk) pt = Aono.pari_GEN(5) ct = ct_1 - ct_2 self.assertEqual(ct.decrypt(self.keys.sk)[0][0], pt) def test_multiple_data(self): ct_1 = Aono.ciphertext([1, 2, 3, 4, 5, 6, 7, 8, 9, 10], self.keys.pk) ct_2 = Aono.ciphertext([1, 2, 3, 4, 5], self.keys.pk) pt = Aono.pari_GEN([0, 0, 0, 0, 0, 6, 7, 8, 9, 10]) ct = ct_1 - ct_2 self.assertEqual(ct.decrypt(self.keys.sk).sub_mat_array(0, 10), pt) class miscellaneous(unittest.TestCase): def __init__(self, *args, **kwargs): super(miscellaneous, self).__init__(*args, **kwargs) keys = Aono.key_gen() self.keys = keys.generate_key(100, 64, 40, 8, 4, 7) def test_ciphertext_array(self): ciphertext_array = [] for i in range(1, 10): ciphertext_array.append(Aono.ciphertext(i, self.keys.pk)) pt = Aono.pari_GEN(5) self.assertEqual(ciphertext_array[4].decrypt(self.keys.sk)[0][0], pt) def test_nested_operations(self): ct_1 = Aono.ciphertext(3, self.keys.pk) ct_2 = Aono.ciphertext(4, self.keys.pk) pt = Aono.pari_GEN(38) ct = 2 * ((ct_1 * ct_2) + (ct_1 + ct_2)) self.assertEqual(ct.decrypt(self.keys.sk)[0][0], pt) class multiplication_test(unittest.TestCase): def __init__(self, *args, **kwargs): super(multiplication_test, self).__init__(*args, **kwargs) keys = Aono.key_gen() self.keys = keys.generate_key(100, 64, 40, 8, 4, 7) def test_single_data(self): ct_1 = Aono.ciphertext(10, self.keys.pk) ct_2 = Aono.ciphertext(5, self.keys.pk) pt = Aono.pari_GEN(50) ct = ct_1 * ct_2 self.assertEqual(ct.decrypt(self.keys.sk)[0][0], pt) def test_multiple_data(self): ct_1 = Aono.ciphertext([1, 2, 3, 4, 5], self.keys.pk) ct_2 = Aono.ciphertext([1, 2, 3, 4, 5], self.keys.pk) pt = Aono.pari_GEN([[1,2,3,4,5],[2,4,6,8,10],[3,6,9,12,15],[4,8,12,16,20],[5,10,15,20,25]]) ct = ct_1 * ct_2 self.assertEqual(ct.decrypt(self.keys.sk).sub_mat_array(0, 5, 0, 5), pt) def test_single_data_plaintext(self): ct = Aono.ciphertext(10, self.keys.pk) pt = Aono.pari_GEN(40) ct_1 = ct * 4 ct_2 = 4 * ct self.assertEqual(ct_1.decrypt(self.keys.sk)[0][0], pt) self.assertEqual(ct_1.decrypt(self.keys.sk)[0][0], pt) def test_multiple_data_plaintext(self): ct = Aono.ciphertext([1, 2, 3, 4, 5, 6, 7, 8, 9, 10], self.keys.pk) pt = Aono.pari_GEN([4, 8, 12, 16, 20, 24, 28, 32, 36, 40]) ct_1 = ct * 4 ct_2 = 4 * ct self.assertEqual(ct_1.decrypt(self.keys.sk).sub_mat_array(0, 10), pt) self.assertEqual(ct_2.decrypt(self.keys.sk).sub_mat_array(0, 10), pt) class keyswitching_test(unittest.TestCase): def __init__(self, *args, **kwargs): super(keyswitching_test, self).__init__(*args, **kwargs) self.keysgen = Aono.key_gen() self.keys = self.keysgen.generate_key(100, 64, 20, 8, 4, 7) self.keys2 = self.keysgen.generate_key(100, 64, 10, 16, 4, 7) self.keys3 = self.keysgen.generate_key(100, 64, 25, 8, 4, 7) self.updatingkey = Aono.updation_key_gen() self.ukey1 = self.updatingkey.generate_key(self.keys, self.keys2) self.ukey2 = self.updatingkey.generate_key(self.keys, self.keys3) def test_single_data_decrease_security(self): ct = Aono.ciphertext(5, self.keys.pk) ctupdated = self.ukey1.cipher_switch(ct) pt = Aono.pari_GEN(5) self.assertEqual(ctupdated.decrypt(self.keys2.sk)[0][0], pt) def test_single_data_increase_security(self): ct = Aono.ciphertext(5, self.keys.pk) ctupdated = self.ukey2.cipher_switch(ct) pt = Aono.pari_GEN(5) self.assertEqual(ctupdated.decrypt(self.keys3.sk)[0][0], pt) def test_multiple_data_increase_security(self): ct = Aono.ciphertext([5,4,3,2,1], self.keys.pk) ctupdated = self.ukey2.cipher_switch(ct) pt = Aono.pari_GEN([5,4,3,2,1]) self.assertEqual(ctupdated.decrypt(self.keys3.sk).sub_mat_array(0, 5), pt) def test_single_data_multiplication(self): ct1 = Aono.ciphertext(5, self.keys.pk) ct2 = Aono.ciphertext(4, self.keys3.pk) ctupdated = self.ukey2.cipher_switch(ct1) pt = Aono.pari_GEN(20) ct = ctupdated * ct2 self.assertEqual(ct.decrypt(self.keys3.sk)[0][0], pt) ct = 5 * ctupdated pt = Aono.pari_GEN(25) self.assertEqual(ct.decrypt(self.keys3.sk)[0][0], pt) def test_single_data_addition(self): ct1 = Aono.ciphertext(5, self.keys.pk) ct2 = Aono.ciphertext(4, self.keys3.pk) ct3 = Aono.ciphertext(6, self.keys3.pk) ctupdated = self.ukey2.cipher_switch(ct1) pt = Aono.pari_GEN(9) ct = ctupdated + ct2 self.assertEqual(ct.decrypt(self.keys3.sk)[0][0], pt) ctmul = ct2 * ct3 ct = ctmul + ctupdated pt = Aono.pari_GEN(29) self.assertEqual(ct.decrypt(self.keys3.sk)[0][0], pt) ct = (2 * ctupdated) + ct2 pt = Aono.pari_GEN(14) self.assertEqual(ct.decrypt(self.keys3.sk)[0][0], pt) def test_multiple_data_addition(self): ct1 = Aono.ciphertext([5,4,3,2], self.keys.pk) ct2 = Aono.ciphertext([4,3,2,1], self.keys3.pk) ct3 = Aono.ciphertext([6,5,4,3], self.keys3.pk) ctupdated = self.ukey2.cipher_switch(ct1) pt = Aono.pari_GEN([9,7,5,3]) ct = ctupdated + ct2 self.assertEqual(ct.decrypt(self.keys3.sk).sub_mat_array(0, 4), pt) ct = 2 * ( 2 * ctupdated + ct2 ) pt = Aono.pari_GEN([28,22,16,10]) self.assertEqual(ct.decrypt(self.keys3.sk).sub_mat_array(0, 4), pt) if __name__ == '__main__': unittest.main()
40.962963
99
0.604237
1,225
7,742
3.640816
0.075102
0.084305
0.056054
0.064126
0.873991
0.843498
0.811659
0.731839
0.692601
0.634753
0
0.078032
0.238569
7,742
188
100
41.180851
0.678541
0
0
0.5125
0
0
0.001033
0
0
0
0
0
0.15
1
0.15
false
0
0.0125
0
0.2
0
0
0
0
null
0
0
0
1
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
fa0cdf82c5d25a24979b9ef081cad21bde7f4560
323
py
Python
src/mynumbers.py
metaperl/gridtrader-ng
eeba5f0dfbdd70026a49f9648d170b5f517edb7e
[ "MIT" ]
1
2020-10-10T22:50:09.000Z
2020-10-10T22:50:09.000Z
src/mynumbers.py
metaperl/gridtrader-ng
eeba5f0dfbdd70026a49f9648d170b5f517edb7e
[ "MIT" ]
null
null
null
src/mynumbers.py
metaperl/gridtrader-ng
eeba5f0dfbdd70026a49f9648d170b5f517edb7e
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- from sympy import N from sympy.core.numbers import Float def mystr(f): return "{:.8f}".format(float(f)) Float.__str__ = Float.__repr__ = mystr def F(n): return N(n, 8) def CF(config, config_section, config_parm): return F(config.getfloat(config_section, config_parm))
19
59
0.656347
48
323
4.166667
0.479167
0.09
0.19
0.23
0
0
0
0
0
0
0
0.011628
0.201238
323
16
60
20.1875
0.763566
0.065015
0
0
0
0
0.021201
0
0
0
0
0
0
1
0.375
false
0
0.25
0.375
0.875
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
5
fa5addbe54e3a8afcab23d0cd1765c22c4091637
15,384
py
Python
tests/test_smartapp.py
andrewsayre/pysmartapp
5c3be867584d7e82d00b5998295b20bd12eccf94
[ "MIT" ]
10
2019-02-07T20:07:10.000Z
2020-12-30T20:29:32.000Z
tests/test_smartapp.py
andrewsayre/pysmartapp
5c3be867584d7e82d00b5998295b20bd12eccf94
[ "MIT" ]
1
2021-12-05T15:00:13.000Z
2021-12-05T15:00:13.000Z
tests/test_smartapp.py
andrewsayre/pysmartapp
5c3be867584d7e82d00b5998295b20bd12eccf94
[ "MIT" ]
2
2020-10-17T20:20:45.000Z
2021-09-28T12:58:50.000Z
"""Tests for the SmartApp file.""" import asyncio import pytest from pysmartapp.dispatch import Dispatcher from pysmartapp.errors import ( SignatureVerificationError, SmartAppNotRegisteredError) from pysmartapp.smartapp import SmartApp, SmartAppManager from .utilities import get_dispatch_handler, get_fixture INSTALLED_APP_ID = '8a0dcdc9-1ab4-4c60-9de7-cb78f59a1121' APP_ID = 'f6c071aa-6ae7-463f-b0ad-8620ac23140f' class TestSmartApp: """Tests for the SmartApp class.""" @staticmethod def test_initialize(): """Tests the property initialization.""" # Arrange path = '/my/test/path' public_key = 'test' dispatcher = Dispatcher() # Act app = SmartApp(path=path, public_key=public_key, dispatcher=dispatcher) # Assert assert app.path == path assert app.public_key == public_key assert app.dispatcher == dispatcher assert app.permissions == [] assert app.config_app_id == 'app' @staticmethod def test_setters(): """Tests the property setters.""" # Arrange app = SmartApp() # Act app.app_id = "Test" app.config_app_id = "Test Config Id" app.description = "Description" app.name = "Name" # Assert assert app.app_id == "Test" assert app.config_app_id == "Test Config Id" assert app.description == "Description" assert app.name == "Name" @staticmethod @pytest.mark.asyncio async def test_ping(smartapp): """Tests the ping lifecycle event.""" # Arrange request = get_fixture("ping_request") expected_response = get_fixture("ping_response") handler = get_dispatch_handler(smartapp) smartapp.connect_ping(handler) # Act response = await smartapp.handle_request(request) # ensure dispatched tasks complete await asyncio.gather(*smartapp.dispatcher.last_sent) # Assert assert handler.fired assert response == expected_response @staticmethod @pytest.mark.asyncio async def test_config_init(smartapp): """Tests the configuration initialization lifecycle event.""" # Arrange request = get_fixture("config_init_request") expected_response = get_fixture("config_init_response") handler = get_dispatch_handler(smartapp) smartapp.connect_config(handler) # Act response = await smartapp.handle_request(request, None, False) # ensure dispatched tasks complete await asyncio.gather(*smartapp.dispatcher.last_sent) # Assert assert handler.fired assert response == expected_response @staticmethod @pytest.mark.asyncio async def test_config_page(smartapp): """Tests the configuration initialization page event.""" # Arrange request = get_fixture("config_page_request") expected_response = get_fixture("config_page_response") handler = get_dispatch_handler(smartapp) smartapp.connect_config(handler) # Act response = await smartapp.handle_request(request, None, False) # ensure dispatched tasks complete await asyncio.gather(*smartapp.dispatcher.last_sent) # Assert assert handler.fired assert response == expected_response @staticmethod @pytest.mark.asyncio async def test_install(smartapp): """Tests the install lifecycle event.""" # Arrange request = get_fixture("install_request") expected_response = get_fixture("install_response") handler = get_dispatch_handler(smartapp) smartapp.connect_install(handler) # Act response = await smartapp.handle_request(request, None, False) # ensure dispatched tasks complete await asyncio.gather(*smartapp.dispatcher.last_sent) # Assert assert handler.fired assert response == expected_response @staticmethod @pytest.mark.asyncio async def test_update(smartapp): """Tests the update lifecycle event.""" # Arrange request = get_fixture("update_request") expected_response = get_fixture("update_response") handler = get_dispatch_handler(smartapp) smartapp.connect_update(handler) # Act response = await smartapp.handle_request(request, None, False) # ensure dispatched tasks complete await asyncio.gather(*smartapp.dispatcher.last_sent) # Assert assert handler.fired assert response == expected_response @staticmethod @pytest.mark.asyncio async def test_event(smartapp): """Tests the event lifecycle event.""" # Arrange request = get_fixture("event_request") expected_response = get_fixture("event_response") handler = get_dispatch_handler(smartapp) smartapp.connect_event(handler) # Act response = await smartapp.handle_request(request, None, False) # ensure dispatched tasks complete await asyncio.gather(*smartapp.dispatcher.last_sent) # Assert assert handler.fired assert response == expected_response @staticmethod @pytest.mark.asyncio async def test_oauth_callback(smartapp): """Tests the oauth_callback lifecycle event.""" # Arrange request = get_fixture("oauth_callback_request") expected_response = get_fixture("oauth_callback_response") handler = get_dispatch_handler(smartapp) smartapp.connect_oauth_callback(handler) # Act response = await smartapp.handle_request(request, None, False) # ensure dispatched tasks complete await asyncio.gather(*smartapp.dispatcher.last_sent) # Assert assert handler.fired assert response == expected_response @staticmethod @pytest.mark.asyncio async def test_uninstall(smartapp): """Tests the uninstall lifecycle event.""" # Arrange request = get_fixture("uninstall_request") expected_response = get_fixture("uninstall_response") handler = get_dispatch_handler(smartapp) smartapp.connect_uninstall(handler) # Act response = await smartapp.handle_request(request, None, False) # ensure dispatched tasks complete await asyncio.gather(*smartapp.dispatcher.last_sent) # Assert assert handler.fired assert response == expected_response @staticmethod @pytest.mark.asyncio async def test_handle_request_sig_verification(): """Tests handle_request with sig verification.""" # Arrange public_key = get_fixture('public_key', 'pem') data = get_fixture('config_init_sig_pass_request') smartapp = SmartApp(public_key=public_key) # Act resp = await smartapp.handle_request( data['body'], data['headers'], True) # Assert assert resp @staticmethod @pytest.mark.asyncio async def test_handle_request_sig_verification_missing_headers(): """Tests handle_request with sig verification.""" # Arrange public_key = get_fixture('public_key', 'pem') data = get_fixture('config_init_sig_pass_request') smartapp = SmartApp(public_key=public_key) # Act/Assert with pytest.raises(SignatureVerificationError): await smartapp.handle_request(data['body'], [], True) @staticmethod @pytest.mark.asyncio async def test_handle_request_sig_verification_fails(): """Tests handle_request with sig verification.""" # Arrange public_key = get_fixture('public_key', 'pem') data = get_fixture('config_init_sig_fail_request') smartapp = SmartApp(public_key=public_key) # Act/Assert with pytest.raises(SignatureVerificationError): await smartapp.handle_request(data['body'], data['headers'], True) class TestSmartAppManager: """Tests for the SmartAppManager class.""" @staticmethod @pytest.mark.asyncio async def test_handle_request_ping_not_registered(manager): """Tests the ping lifecycle event with no registered apps.""" # Arrange request = get_fixture("ping_request") expected_response = get_fixture("ping_response") handler = get_dispatch_handler(manager) manager.connect_ping(handler) # Act response = await manager.handle_request(request) # ensure dispatched tasks complete await asyncio.gather(*manager.dispatcher.last_sent) # Assert assert handler.fired assert response == expected_response @staticmethod @pytest.mark.asyncio async def test_handle_request_not_registered(manager: SmartAppManager): """Tests processing a request when no SmartApp has been registered.""" # Arrange request = get_fixture("config_init_request") # Act with pytest.raises(SmartAppNotRegisteredError) as e_info: await manager.handle_request(request, None, False) # Assert assert e_info.value.installed_app_id == INSTALLED_APP_ID @staticmethod @pytest.mark.asyncio async def test_handle_request_not_app_id(manager: SmartAppManager): """Tests processing a request when no SmartApp has been registered.""" # Arrange request = get_fixture("config_init_sig_fail_request")['body'] # Act with pytest.raises(SmartAppNotRegisteredError) as e_info: await manager.handle_request(request, None, False) # Assert assert e_info.value.installed_app_id == INSTALLED_APP_ID @staticmethod def test_register(manager: SmartAppManager): """Test register.""" public_key = '123' # Act app = manager.register(APP_ID, public_key) # Assert assert app.app_id == APP_ID assert app.public_key == public_key assert app.path == manager.path assert APP_ID in manager.smartapps @staticmethod def test_register_no_app_id(manager: SmartAppManager): """Test register with no SmartApp app id.""" # Act with pytest.raises(ValueError) as e_info: manager.register(None, '') # Assert assert str(e_info.value) == 'smartapp must have an app_id.' @staticmethod def test_register_twice(manager: SmartAppManager): """Test register with the same app twice.""" # Arrange public_key = '123' manager.register(APP_ID, public_key) # Act with pytest.raises(ValueError) as e_info: manager.register(APP_ID, public_key) # Assert assert str(e_info.value) == 'smartapp already registered.' @staticmethod def test_unregister(manager: SmartAppManager): """Test unregister.""" # Arrange' manager.register(APP_ID, '123') # Act manager.unregister(APP_ID) # Assert assert APP_ID not in manager.smartapps @staticmethod def test_unregister_no_app_id(manager: SmartAppManager): """Test unregister with no SmartApp app id.""" # Act with pytest.raises(ValueError) as e_info: manager.unregister(None) # Assert assert str(e_info.value) == 'smartapp must have an app_id.' @staticmethod def test_unregister_not_registered(manager: SmartAppManager): """Test register with the same app twice.""" # Act with pytest.raises(ValueError) as e_info: manager.unregister(APP_ID) # Assert assert str(e_info.value) == 'smartapp was not previously registered.' @staticmethod @pytest.mark.asyncio async def test_on_config(manager: SmartAppManager): """Tests the config event handler at the manager level.""" # Arrange request = get_fixture("config_init_request") app = manager.register(APP_ID, 'none') handler = get_dispatch_handler(app) manager.connect_config(handler) # Act await manager.handle_request(request, None, False) # ensure dispatched tasks complete await asyncio.gather(*manager.dispatcher.last_sent) # Assert assert handler.fired @staticmethod @pytest.mark.asyncio async def test_on_install(manager: SmartAppManager): """Tests the config event handler at the manager level.""" # Arrange request = get_fixture("install_request") app = manager.register(APP_ID, 'none') handler = get_dispatch_handler(app) manager.connect_install(handler) # Act await manager.handle_request(request, None, False) # ensure dispatched tasks complete await asyncio.gather(*manager.dispatcher.last_sent) # Assert assert handler.fired @staticmethod @pytest.mark.asyncio async def test_on_update(manager: SmartAppManager): """Tests the config event handler at the manager level.""" # Arrange request = get_fixture("update_request") app = manager.register(APP_ID, 'none') handler = get_dispatch_handler(app) manager.connect_update(handler) # Act await manager.handle_request(request, None, False) # ensure dispatched tasks complete await asyncio.gather(*manager.dispatcher.last_sent) # Assert assert handler.fired @staticmethod @pytest.mark.asyncio async def test_on_event(manager: SmartAppManager): """Tests the config event handler at the manager level.""" # Arrange request = get_fixture("event_request") app = manager.register(APP_ID, 'none') handler = get_dispatch_handler(app) manager.connect_event(handler) # Act await manager.handle_request(request, None, False) # ensure dispatched tasks complete await asyncio.gather(*manager.dispatcher.last_sent) # Assert assert handler.fired @staticmethod @pytest.mark.asyncio async def test_on_oauth_callback(manager: SmartAppManager): """Tests the config event handler at the manager level.""" # Arrange request = get_fixture("oauth_callback_request") app = manager.register(APP_ID, 'none') handler = get_dispatch_handler(app) manager.connect_oauth_callback(handler) # Act await manager.handle_request(request, None, False) # ensure dispatched tasks complete await asyncio.gather(*manager.dispatcher.last_sent) # Assert assert handler.fired @staticmethod @pytest.mark.asyncio async def test_on_uninstall(manager: SmartAppManager): """Tests the config event handler at the manager level.""" # Arrange request = get_fixture("uninstall_request") app = manager.register(APP_ID, 'none') handler = get_dispatch_handler(app) manager.connect_uninstall(handler) # Act await manager.handle_request(request, None, False) # ensure dispatched tasks complete await asyncio.gather(*manager.dispatcher.last_sent) # Assert assert handler.fired
35.86014
78
0.657566
1,650
15,384
5.929697
0.078182
0.017375
0.044971
0.05928
0.834832
0.798855
0.732318
0.7037
0.650961
0.628066
0
0.004032
0.25832
15,384
428
79
35.943925
0.853462
0.087429
0
0.632959
0
0
0.070978
0.019556
0
0
0
0
0.168539
1
0.029963
false
0.007491
0.022472
0
0.059925
0
0
0
0
null
0
0
0
1
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
d73a7001aab9a79102dd6b286d1d05150d74c65b
114
py
Python
networking_bgpvpn/tests/functional/test_placeholder.py
cgoncalves/networking-bgpvpn
3ec876c4ead840874e08d6dc876a36814d5f1f81
[ "Apache-2.0" ]
38
2015-06-23T08:06:16.000Z
2022-01-25T16:03:10.000Z
networking_bgpvpn/tests/functional/test_placeholder.py
cgoncalves/networking-bgpvpn
3ec876c4ead840874e08d6dc876a36814d5f1f81
[ "Apache-2.0" ]
null
null
null
networking_bgpvpn/tests/functional/test_placeholder.py
cgoncalves/networking-bgpvpn
3ec876c4ead840874e08d6dc876a36814d5f1f81
[ "Apache-2.0" ]
17
2015-11-28T00:45:22.000Z
2021-07-22T09:22:30.000Z
from neutron.tests import base class PlaceholderTest(base.BaseTestCase): def test_noop(self): pass
14.25
41
0.719298
14
114
5.785714
0.928571
0
0
0
0
0
0
0
0
0
0
0
0.210526
114
7
42
16.285714
0.9
0
0
0
0
0
0
0
0
0
0
0
0
1
0.25
false
0.25
0.25
0
0.75
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
1
0
0
5
d74a801247cef3fdcc59e23b442d46496591bf5c
593
py
Python
haasomeapi/dataobjects/accountdata/OrderContainer.py
iamcos/haasomeapi
eac1640cc13e1e7649b8a8d6ed88184722c907c8
[ "MIT" ]
9
2018-07-08T22:40:53.000Z
2022-03-21T20:32:43.000Z
haasomeapi/dataobjects/accountdata/OrderContainer.py
iamcos/haasomeapi
eac1640cc13e1e7649b8a8d6ed88184722c907c8
[ "MIT" ]
5
2018-08-25T11:48:05.000Z
2019-12-12T19:57:20.000Z
haasomeapi/dataobjects/accountdata/OrderContainer.py
iamcos/haasomeapi
eac1640cc13e1e7649b8a8d6ed88184722c907c8
[ "MIT" ]
6
2018-08-31T23:49:36.000Z
2022-01-08T04:51:21.000Z
from typing import Dict from haasomeapi.dataobjects.accountdata.BaseOrder import BaseOrder class OrderContainer: """ Data Object containing the Base Order :ivar exchangeOrderList: Dict[str, :class:`~haasomeapi.dataobjects.accountdata.BaseOrder`]: :ivar marginOrderList: Dict[str, :class:`~haasomeapi.dataobjects.accountdata.BaseOrder`]: :ivar leverageOrderList: Dict[str, :class:`~haasomeapi.dataobjects.accountdata.BaseOrder`]: """ exchangeOrderList: Dict[str, BaseOrder] marginOrderList: Dict[str, BaseOrder] leverageOrderList: Dict[str, BaseOrder]
34.882353
96
0.755481
58
593
7.724138
0.362069
0.09375
0.285714
0.366071
0.372768
0.372768
0.372768
0.254464
0
0
0
0
0.136594
593
16
97
37.0625
0.875
0.526138
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.333333
0
1
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
d78b04e8dc2d6bb30b4ac5e86e2e6ad33ec10ee8
50
py
Python
examples/_tests_scripts/dl_utils.py
rhololkeolke/catalyst-rl
ec18ff4a58b6d00652f772231db8de86debb4b3d
[ "Apache-2.0" ]
46
2020-03-27T20:12:32.000Z
2021-11-21T19:08:51.000Z
examples/_tests_scripts/dl_utils.py
rhololkeolke/catalyst-rl
ec18ff4a58b6d00652f772231db8de86debb4b3d
[ "Apache-2.0" ]
2
2020-04-06T10:43:04.000Z
2020-07-01T18:26:10.000Z
examples/_tests_scripts/dl_utils.py
rhololkeolke/catalyst-rl
ec18ff4a58b6d00652f772231db8de86debb4b3d
[ "Apache-2.0" ]
5
2020-04-17T14:09:53.000Z
2021-05-10T08:58:29.000Z
# flake8: noqa from catalyst_rl.dl.utils import *
16.666667
34
0.76
8
50
4.625
1
0
0
0
0
0
0
0
0
0
0
0.023256
0.14
50
2
35
25
0.837209
0.24
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
ad07ae8a0091e1caafe7216f8c5098e96d81dccb
136
py
Python
animals_app/admin.py
wagnergbriel/IloveAnimals-ProjectComplete
67fa321427bd8e9db91e3b0d6acaa0d5a32967e0
[ "MIT" ]
1
2021-12-10T23:05:00.000Z
2021-12-10T23:05:00.000Z
animals_app/admin.py
wagnergbriel/IloveAnimals-ProjectComplete
67fa321427bd8e9db91e3b0d6acaa0d5a32967e0
[ "MIT" ]
null
null
null
animals_app/admin.py
wagnergbriel/IloveAnimals-ProjectComplete
67fa321427bd8e9db91e3b0d6acaa0d5a32967e0
[ "MIT" ]
null
null
null
from django.contrib import admin from .models import Colaborador, Animais admin.site.register(Colaborador) admin.site.register(Animais)
27.2
40
0.838235
18
136
6.333333
0.555556
0.157895
0.298246
0
0
0
0
0
0
0
0
0
0.080882
136
5
41
27.2
0.912
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
ad6c367e3d29fe52f7a9d1fa81bf12b30a345fb5
95
py
Python
post3/src/main.py
abelsiqueira/call-julia-from-python-experiments
c9f5e8dd6d2d50f0b5ad331df75e028e0ab3b5b5
[ "MIT" ]
8
2021-11-17T09:42:24.000Z
2022-01-04T10:44:11.000Z
post3/src/main.py
abelsiqueira/faster-python-using-julia-blogposts
c9f5e8dd6d2d50f0b5ad331df75e028e0ab3b5b5
[ "MIT" ]
20
2021-12-02T14:59:20.000Z
2022-01-05T12:24:06.000Z
post3/src/main.py
abelsiqueira/faster-python-using-julia-blogposts
c9f5e8dd6d2d50f0b5ad331df75e028e0ab3b5b5
[ "MIT" ]
3
2022-02-15T17:32:05.000Z
2022-03-16T18:54:56.000Z
import run_experiments import run_analysis import gain_over_effort import split_pandas_analysis
23.75
28
0.926316
14
95
5.857143
0.642857
0.219512
0
0
0
0
0
0
0
0
0
0
0.073684
95
4
28
23.75
0.931818
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
d110c22c4d68c906e3ff729409033d3a0379cafc
62
py
Python
nntransfer/models/wrappers/__init__.py
sinzlab/nntransfer
78622feb568b1cc50ac0f73c8297f9785876a659
[ "MIT" ]
null
null
null
nntransfer/models/wrappers/__init__.py
sinzlab/nntransfer
78622feb568b1cc50ac0f73c8297f9785876a659
[ "MIT" ]
null
null
null
nntransfer/models/wrappers/__init__.py
sinzlab/nntransfer
78622feb568b1cc50ac0f73c8297f9785876a659
[ "MIT" ]
2
2021-07-30T08:39:04.000Z
2022-03-01T16:26:11.000Z
from .intermediate_layer_getter import IntermediateLayerGetter
62
62
0.935484
6
62
9.333333
1
0
0
0
0
0
0
0
0
0
0
0
0.048387
62
1
62
62
0.949153
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
d1172978f57e43d2f8399889e0e801ccc83d0059
27,911
py
Python
tests/unit/storage/cassandra/test_services.py
satroutr/poppy
27417f86854d9e0a04726acc263ef0a2ce9f8f6e
[ "Apache-2.0" ]
3
2017-07-05T20:09:59.000Z
2018-11-27T22:02:57.000Z
tests/unit/storage/cassandra/test_services.py
satroutr/poppy
27417f86854d9e0a04726acc263ef0a2ce9f8f6e
[ "Apache-2.0" ]
24
2017-04-18T15:14:04.000Z
2019-03-20T19:09:07.000Z
tests/unit/storage/cassandra/test_services.py
satroutr/poppy
27417f86854d9e0a04726acc263ef0a2ce9f8f6e
[ "Apache-2.0" ]
8
2017-04-03T13:24:27.000Z
2021-11-08T20:28:10.000Z
# Copyright (c) 2014 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import json import uuid try: import ordereddict as collections except ImportError: # pragma: no cover import collections # pragma: no cover import ddt import mock from oslo_config import cfg import testtools from poppy.model.helpers import provider_details from poppy.storage.cassandra import driver from poppy.storage.cassandra import services from poppy.transport.pecan.models.request import service as req_service from tests.unit import base @ddt.ddt class CassandraStorageServiceTests(base.TestCase): def setUp(self): super(CassandraStorageServiceTests, self).setUp() # mock arguments to use self.project_id = '123456' self.service_id = uuid.uuid4() self.service_name = 'mocksite' # create mocked config and driver conf = cfg.ConfigOpts() conf.register_opt( cfg.StrOpt( 'datacenter', default='', help='datacenter where the C* cluster hosted')) conf.register_opts(driver.CASSANDRA_OPTIONS, group=driver.CASSANDRA_GROUP) cassandra_driver = driver.CassandraStorageDriver(conf) migrations_patcher = mock.patch( 'cdeploy.migrator.Migrator' ) migrations_patcher.start() self.addCleanup(migrations_patcher.stop) cluster_patcher = mock.patch('cassandra.cluster.Cluster') self.mock_cluster = cluster_patcher.start() self.mock_session = self.mock_cluster().connect() self.addCleanup(cluster_patcher.stop) # stubbed cassandra driver self.sc = services.ServicesController(cassandra_driver) @ddt.file_data('data_get_service.json') def test_get_service(self, value): # mock the response from cassandra value[0]['service_id'] = self.service_id self.mock_session.execute.return_value = value actual_response = self.sc.get_service(self.project_id, self.service_id) # TODO(amitgandhinz): assert the response # matches the expectation (using jsonschema) self.assertEqual(str(actual_response.service_id), str(self.service_id)) @ddt.file_data('data_get_service.json') def test_update_state(self, value): details = value[0]['provider_details'] new_details = {} for provider, detail in list(details.items()): detail = json.loads(detail) detail['status'] = 'deployed' detail['access_urls'] = [ { 'provider_url': "{0}.com".format(provider.lower()), 'domain': detail['access_urls'][0] } ] new_details[provider] = json.dumps(detail) value[0]['provider_details'] = new_details # mock the response from cassandra value[0]['service_id'] = self.service_id self.mock_session.execute.return_value = [value[0]] expected_obj = self.sc.get_service(self.project_id, self.service_id) actual_obj = self.sc.update_state(self.project_id, self.service_id, 'deployed') self.assertEqual(expected_obj.service_id, actual_obj.service_id) def test_get_service_with_exception(self): # mock the response from cassandra self.mock_session.execute.return_value = [] self.assertRaises( ValueError, self.sc.get_service, self.project_id, self.service_id ) @ddt.file_data('../data/data_create_service.json') @mock.patch.object(services.ServicesController, 'domain_exists_elsewhere', return_value=False) def test_create_service(self, value, mock_check): service_obj = req_service.load_from_json(value) responses = self.sc.create_service(self.project_id, service_obj) # Expect the response to be None as there are no providers passed # into the driver to respond to this call self.assertEqual(responses, None) # TODO(amitgandhinz): need to validate the create to cassandra worked. @ddt.file_data('../data/data_create_service.json') @mock.patch.object(services.ServicesController, 'domain_exists_elsewhere', return_value=True) def test_create_service_exist(self, value, mock_check): service_obj = req_service.load_from_json(value) self.sc.get = mock.Mock(return_value=service_obj) self.assertRaises( ValueError, self.sc.create_service, self.project_id, service_obj ) @ddt.file_data('data_list_services.json') def test_list_services(self, value): # mock the response from cassandra value[0]['project_id'] = self.project_id self.mock_session.prepare.return_value = mock.Mock() self.mock_session.execute.return_value = value actual_response = self.sc.get_services(self.project_id, None, None) # TODO(amitgandhinz): assert the response # matches the expectation (using jsonschema) self.assertEqual(actual_response[0].name, "mocksite") self.assertEqual(actual_response[0].project_id, self.project_id) @ddt.file_data('data_get_service.json') def test_delete_service(self, value): details = value[0]['provider_details'] new_details = {} for provider, detail in list(details.items()): detail = json.loads(detail) detail['status'] = 'deployed' detail['access_urls'] = [ { 'provider_url': "{0}.com".format(provider.lower()), 'domain': detail['access_urls'][0] } ] new_details[provider] = json.dumps(detail) value[0]['provider_details'] = new_details # mock the response from cassandra value[0]['service_id'] = self.service_id # self.mock_session.execute.return_value = value def mock_execute_side_effect(*args): if args[0].query_string == services.CQL_GET_SERVICE: return [value[0]] else: return None self.mock_session.execute.side_effect = mock_execute_side_effect self.sc.delete_service( self.project_id, self.service_id ) # TODO(isaacm): Add assertions on queries called def test_delete_service_no_result(self): # mock the response from cassandra self.mock_session.execute.return_value = iter([{}]) actual_response = self.sc.delete_service( self.project_id, self.service_id ) # Expect the response to be None as there are no providers passed # into the driver to respond to this call self.assertEqual(actual_response, None) @ddt.file_data('../data/data_update_service.json') @mock.patch.object(services.ServicesController, 'domain_exists_elsewhere', return_value=False) @mock.patch.object(services.ServicesController, 'set_service_provider_details') def test_update_service(self, service_json, mock_set_service_provider_details, mock_check): with mock.patch.object( services.ServicesController, 'get_provider_details') as mock_provider_det: mock_provider_det.return_value = { "MaxCDN": "{\"id\": 11942, \"access_urls\": " "[{\"provider_url\": \"maxcdn.provider.com\", " "\"domain\": \"xk.cd\"}], " "\"domains_certificate_status\":" "{\"mypullzone.com\": " "\"failed\"} }", } self.mock_session.execute.return_value = iter([{}]) service_obj = req_service.load_from_json(service_json) actual_response = self.sc.update_service( self.project_id, self.service_id, service_obj ) # Expect the response to be None as there are no # providers passed into the driver to respond to this call self.assertEqual(actual_response, None) @ddt.file_data('data_provider_details.json') def test_get_provider_details(self, provider_details_json): # mock the response from cassandra self.mock_session.execute.return_value = [ {'provider_details': provider_details_json} ] actual_response = self.sc.get_provider_details( self.project_id, self.service_id ) self.assertTrue("MaxCDN" in actual_response) self.assertTrue("Mock" in actual_response) self.assertTrue("CloudFront" in actual_response) self.assertTrue("Fastly" in actual_response) @ddt.file_data('data_provider_details.json') def test_get_provider_details_value_error(self, provider_details_json): # mock the response from cassandra self.mock_session.execute.return_value = [] with testtools.ExpectedException(ValueError): self.sc.get_provider_details( self.project_id, self.service_id ) @ddt.file_data('data_provider_details.json') def test_update_provider_details(self, provider_details_json): provider_details_dict = {} for k, v in provider_details_json.items(): provider_detail_dict = json.loads(v) provider_details_dict[k] = provider_details.ProviderDetail( provider_service_id=( provider_detail_dict["id"]), access_urls=provider_detail_dict["access_urls"], domains_certificate_status=provider_detail_dict.get( "domains_certificate_status", {})) # mock the response from cassandra self.mock_session.execute.return_value = None # this is for update_provider_details unittest code coverage arg_provider_details_dict = {} status = None for provider_name in provider_details_dict: the_provider_detail_dict = collections.OrderedDict() the_provider_detail_dict["id"] = ( provider_details_dict[provider_name].provider_service_id) the_provider_detail_dict["access_urls"] = ( provider_details_dict[provider_name].access_urls) the_provider_detail_dict["status"] = ( provider_details_dict[provider_name].status) status = the_provider_detail_dict["status"] the_provider_detail_dict["name"] = ( provider_details_dict[provider_name].name) the_provider_detail_dict["domains_certificate_status"] = ( provider_details_dict[provider_name]. domains_certificate_status.to_dict()) the_provider_detail_dict["error_info"] = ( provider_details_dict[provider_name].error_info) the_provider_detail_dict["error_message"] = ( provider_details_dict[provider_name].error_message) arg_provider_details_dict[provider_name] = json.dumps( the_provider_detail_dict) provider_details_args = { 'project_id': self.project_id, 'service_id': self.service_id, 'provider_details': arg_provider_details_dict } status_args = { 'status': status, 'project_id': self.project_id, 'service_id': self.service_id } # This is to verify mock has been called with the correct arguments def assert_mock_execute_args(*args): if args[0].query_string == services.CQL_UPDATE_PROVIDER_DETAILS: self.assertEqual(args[1], provider_details_args) elif args[0].query_string == services.CQL_SET_SERVICE_STATUS: self.assertEqual(args[1], status_args) self.mock_session.execute.side_effect = assert_mock_execute_args with mock.patch.object( services.ServicesController, 'get_provider_details') as mock_provider_det: mock_provider_det.return_value = { "MaxCDN": # "{\"id\": 11942, \"access_urls\": " # "[{\"provider_url\": \"maxcdn.provider.com\", " # "\"domain\": \"xk.cd\"}], " # "\"domains_certificate_status\":" # "{\"mypullzone.com\": " # "\"failed\"} }", provider_details.ProviderDetail( provider_service_id='{}', access_urls=[] ) } self.sc.update_provider_details( self.project_id, self.service_id, provider_details_dict ) @ddt.file_data('data_provider_details.json') def test_update_provider_details_domain_deleted( self, provider_details_json, ): provider_details_dict = {} for k, v in provider_details_json.items(): provider_detail_dict = json.loads(v) provider_details_dict[k] = provider_details.ProviderDetail( provider_service_id=( provider_detail_dict["id"]), access_urls=provider_detail_dict["access_urls"], domains_certificate_status=provider_detail_dict.get( "domains_certificate_status", {})) # mock the response from cassandra self.mock_session.execute.return_value = None # this is for update_provider_details unittest code coverage arg_provider_details_dict = {} status = None for provider_name in provider_details_dict: the_provider_detail_dict = collections.OrderedDict() the_provider_detail_dict["id"] = ( provider_details_dict[provider_name].provider_service_id) the_provider_detail_dict["access_urls"] = ( provider_details_dict[provider_name].access_urls) the_provider_detail_dict["status"] = ( provider_details_dict[provider_name].status) status = the_provider_detail_dict["status"] the_provider_detail_dict["name"] = ( provider_details_dict[provider_name].name) the_provider_detail_dict["domains_certificate_status"] = ( provider_details_dict[provider_name]. domains_certificate_status.to_dict()) the_provider_detail_dict["error_info"] = ( provider_details_dict[provider_name].error_info) the_provider_detail_dict["error_message"] = ( provider_details_dict[provider_name].error_message) arg_provider_details_dict[provider_name] = json.dumps( the_provider_detail_dict) provider_details_args = { 'project_id': self.project_id, 'service_id': self.service_id, 'provider_details': arg_provider_details_dict } status_args = { 'status': status, 'project_id': self.project_id, 'service_id': self.service_id } # This is to verify mock has been called with the correct arguments def assert_mock_execute_args(*args): if args[0].query_string == services.CQL_UPDATE_PROVIDER_DETAILS: self.assertEqual(args[1], provider_details_args) elif args[0].query_string == services.CQL_SET_SERVICE_STATUS: self.assertEqual(args[1], status_args) self.mock_session.execute.side_effect = assert_mock_execute_args with mock.patch.object( services.ServicesController, 'get_provider_details') as mock_provider_det: mock_provider_det.return_value = { "MaxCDN": provider_details.ProviderDetail( provider_service_id=( "{\"id\": 11942, \"access_urls\": " "[{\"provider_url\": \"maxcdn.provider.com\", " "\"domain\": \"xk2.cd\"}], " "\"domains_certificate_status\":" "{\"mypullzone.com\": " "\"failed\"} }" ), access_urls=[ { "provider_url": "fastly.provider.com", "domain": "xk2.cd" } ] ) } self.sc.update_provider_details( self.project_id, self.service_id, provider_details_dict ) delete_queries = [] deleted_domains = [] for query_mock_call in self.sc.session.execute.mock_calls: name, args, kwargs = query_mock_call for arg in args: if hasattr(arg, 'query_string'): if ( arg.query_string == services.CQL_DELETE_PROVIDER_URL ): delete_queries.append(query_mock_call) _, delete_query_args = args deleted_domains.append( delete_query_args["domain_name"]) self.assertEqual(1, len(delete_queries)) self.assertEqual(['xk2.cd'], deleted_domains) self.assertTrue(self.sc.session.execute.called) def test_update_provider_details_new_provider_details_empty(self): provider_details_dict = {} # mock the response from cassandra self.mock_session.execute.return_value = None # this is for update_provider_details unittest code coverage arg_provider_details_dict = {} status = None provider_details_args = { 'project_id': self.project_id, 'service_id': self.service_id, 'provider_details': arg_provider_details_dict } status_args = { 'status': status, 'project_id': self.project_id, 'service_id': self.service_id } # This is to verify mock has been called with the correct arguments def assert_mock_execute_args(*args): if args[0].query_string == services.CQL_UPDATE_PROVIDER_DETAILS: self.assertEqual(args[1], provider_details_args) elif args[0].query_string == services.CQL_SET_SERVICE_STATUS: self.assertEqual(args[1], status_args) self.mock_session.execute.side_effect = assert_mock_execute_args with mock.patch.object( services.ServicesController, 'get_provider_details') as mock_provider_det: mock_provider_det.return_value = { "MaxCDN": provider_details.ProviderDetail( provider_service_id=( "{\"id\": 11942, \"access_urls\": " "[{\"provider_url\": \"maxcdn.provider.com\", " "\"domain\": \"xk2.cd\"}], " "\"domains_certificate_status\":" "{\"mypullzone.com\": " "\"failed\"} }" ), access_urls=[ { "provider_url": "fastly.provider.com", "domain": "xk2.cd" } ] ) } self.sc.update_provider_details( self.project_id, self.service_id, provider_details_dict ) delete_queries = [] deleted_domains = [] for query_mock_call in self.sc.session.execute.mock_calls: name, args, kwargs = query_mock_call for arg in args: if hasattr(arg, 'query_string'): if ( arg.query_string == services.CQL_DELETE_PROVIDER_URL ): delete_queries.append(query_mock_call) _, delete_query_args = args deleted_domains.append( delete_query_args["domain_name"]) self.assertEqual(1, len(delete_queries)) self.assertEqual(['xk2.cd'], deleted_domains) self.assertTrue(self.sc.session.execute.called) def test_session(self): session = self.sc.session self.assertNotEqual(session, None) def test_domain_exists_elsewhere_true(self): self.mock_session.execute.return_value = [ { 'service_id': 'service_id', 'project_id': 'project_id', 'domain_name': 'domain_name' } ] self.assertTrue( self.sc.domain_exists_elsewhere('domain_name', 'new_service_id')) def test_domain_exists_elsewhere_false(self): self.mock_session.execute.return_value = [ { 'service_id': 'service_id', 'project_id': 'project_id', 'domain_name': 'domain_name' } ] self.assertFalse( self.sc.domain_exists_elsewhere('domain_name', 'service_id')) def test_domain_exists_elsewhere_no_results(self): self.mock_session.execute.return_value = [] self.assertFalse( self.sc.domain_exists_elsewhere('domain_name', 'new_service_id')) def test_domain_exists_elsewhere_value_error(self): self.mock_session.execute.side_effect = ValueError( 'Mock -- Something went wrong!' ) self.assertFalse( self.sc.domain_exists_elsewhere('domain_name', 'new_service_id')) def test_get_service_count_positive(self): self.mock_session.execute.return_value = [ { 'count': 1 } ] self.assertEqual(1, self.sc.get_service_count('project_id')) @ddt.file_data('data_list_services.json') def test_get_services_marker_not_none(self, data): self.mock_session.execute.return_value = data results = self.sc.get_services('project_id', uuid.uuid4(), 1) self.assertEqual(data[0]["project_id"], results[0].project_id) def test_get_services_by_status_positive(self): self.mock_session.execute.return_value = [ {'service_id': 1}, {'service_id': 2}, {'service_id': 3} ] self.assertEqual( [ {'service_id': '1'}, {'service_id': '2'}, {'service_id': '3'} ], self.sc.get_services_by_status('project_id') ) def test_delete_services_by_status_positive(self): try: self.sc.delete_services_by_status( 'project_id', uuid.uuid4(), 'status' ) except Exception as e: self.fail(e) def test_get_domains_by_provider_url_positive(self): self.mock_session.execute.return_value = [ {'domain_name': 'www.xyz.com'}, ] self.assertEqual([{'domain_name': 'www.xyz.com'}], self.sc.get_domains_by_provider_url('provider_url')) def test_delete_provider_url_positive(self): try: self.sc.delete_provider_url('provider_url', 'domain_name') except Exception as e: self.fail(e) def test_get_service_limit_positive(self): self.mock_session.execute.return_value = [ {'project_limit': 999} ] self.assertEqual(999, self.sc.get_service_limit('project_id')) def test_get_service_limit_empty_result(self): self.mock_session.execute.return_value = [] self.assertEqual( self.sc._driver.max_services_conf.max_services_per_project, self.sc.get_service_limit('project_id')) def test_get_service_limit_value_error(self): self.mock_session.execute.side_effect = ValueError( 'Mock -- Something went wrong!' ) self.assertEqual( self.sc._driver.max_services_conf.max_services_per_project, self.sc.get_service_limit('project_id') ) def test_set_service_limit_positive(self): try: self.sc.set_service_limit('project_id', 'project_limit') except Exception as e: self.fail(e) @ddt.file_data('data_list_services.json') def test_get_service_details_by_domain_name(self, data): service_id = uuid.uuid4() self.mock_session.execute.side_effect = [ [{ 'project_id': 'project_id', 'service_id': service_id, 'domain_name': 'domain_name' }], [data[0]] ] results = self.sc.get_service_details_by_domain_name('domain_name') self.assertEqual(data[0]["project_id"], results.project_id) @ddt.file_data('data_list_services.json') def test_get_service_details_by_domain_name_domain_not_present( self, data): self.mock_session.execute.side_effect = [ [{ 'project_id': 'proj_id', # differs from arg to func 'service_id': uuid.uuid4(), 'domain_name': 'domain_name' }], [data[0]] ] with testtools.ExpectedException(ValueError): self.sc.get_service_details_by_domain_name( 'domain_name', project_id='project_id' ) @ddt.file_data('data_provider_details.json') def test_set_service_provider_details(self, data): service_id = uuid.uuid4() def mock_execute_side_effect(*args): if args[0].query_string == services.CQL_GET_PROVIDER_DETAILS: return [{'provider_details': data}] else: return None self.mock_session.execute.side_effect = mock_execute_side_effect self.sc.set_service_provider_details( 'project_id', service_id, 'deployed' ) [ update_service_status, get_provider_details, _, update_provider_details, _, _, _, _, _, ] = self.mock_session.execute.mock_calls self.assertEqual(services.CQL_SET_SERVICE_STATUS, update_service_status[1][0].query_string) self.assertEqual(services.CQL_GET_PROVIDER_DETAILS, get_provider_details[1][0].query_string) self.assertEqual(services.CQL_UPDATE_PROVIDER_DETAILS, update_provider_details[1][0].query_string)
37.565276
79
0.585361
2,928
27,911
5.241462
0.09597
0.094807
0.032254
0.044439
0.806477
0.759692
0.73024
0.708021
0.683195
0.664755
0
0.005505
0.32317
27,911
742
80
37.615903
0.806892
0.082727
0
0.599297
0
0
0.093482
0.024741
0
0
0
0.001348
0.082601
1
0.066784
false
0
0.024605
0
0.100176
0
0
0
0
null
0
0
0
1
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
d133474cc6f160b8e0fcb521d4ea0851d072aadc
297
py
Python
passbook/sources/saml/apps.py
fossabot/passbook
cba17f6659404445ac3025f11657d89368cc8b4f
[ "MIT" ]
null
null
null
passbook/sources/saml/apps.py
fossabot/passbook
cba17f6659404445ac3025f11657d89368cc8b4f
[ "MIT" ]
null
null
null
passbook/sources/saml/apps.py
fossabot/passbook
cba17f6659404445ac3025f11657d89368cc8b4f
[ "MIT" ]
null
null
null
"""Passbook SAML app config""" from django.apps import AppConfig class PassbookSourceSAMLConfig(AppConfig): """passbook saml_idp app config""" name = "passbook.sources.saml" label = "passbook_sources_saml" verbose_name = "passbook Sources.SAML" mountpoint = "source/saml/"
22.846154
42
0.717172
33
297
6.333333
0.545455
0.215311
0.272727
0.220096
0
0
0
0
0
0
0
0
0.171717
297
12
43
24.75
0.849594
0.178451
0
0
0
0
0.321888
0.180258
0
0
0
0
0
1
0
false
0.666667
0.166667
0
1
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
1
0
0
5
d15db9ba571837dceb934100ca10abecac1623c4
123
py
Python
cardapio/urls.py
SquadUninove/API
825c791b9494f4b0116ddf6ddc74f7d89e07faf3
[ "MIT" ]
null
null
null
cardapio/urls.py
SquadUninove/API
825c791b9494f4b0116ddf6ddc74f7d89e07faf3
[ "MIT" ]
null
null
null
cardapio/urls.py
SquadUninove/API
825c791b9494f4b0116ddf6ddc74f7d89e07faf3
[ "MIT" ]
1
2021-09-21T23:51:53.000Z
2021-09-21T23:51:53.000Z
from django.urls import include, path from .views import CardapioDetail, CardapioList, CardapioUpdate, CardapioDelete
17.571429
80
0.813008
13
123
7.692308
0.846154
0
0
0
0
0
0
0
0
0
0
0
0.138211
123
6
81
20.5
0.943396
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
0f4eb63b6f0f55ef2385538911f88b4bd21b68eb
21
py
Python
python/testData/completion/moduleWithNoImportablePath/-/m7.py
jnthn/intellij-community
8fa7c8a3ace62400c838e0d5926a7be106aa8557
[ "Apache-2.0" ]
2
2019-04-28T07:48:50.000Z
2020-12-11T14:18:08.000Z
python/testData/completion/moduleWithNoImportablePath/-/m7.py
Cyril-lamirand/intellij-community
60ab6c61b82fc761dd68363eca7d9d69663cfa39
[ "Apache-2.0" ]
173
2018-07-05T13:59:39.000Z
2018-08-09T01:12:03.000Z
python/testData/completion/moduleWithNoImportablePath/-/m7.py
Cyril-lamirand/intellij-community
60ab6c61b82fc761dd68363eca7d9d69663cfa39
[ "Apache-2.0" ]
2
2020-03-15T08:57:37.000Z
2020-04-07T04:48:14.000Z
class MyFoo: pass
10.5
12
0.666667
3
21
4.666667
1
0
0
0
0
0
0
0
0
0
0
0
0.285714
21
2
13
10.5
0.933333
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0.5
0
0
0.5
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
0
0
0
0
0
5
7e344ae675629c2f04b16aab0526dba27d6c8623
4,470
py
Python
plasma/filters/functional/gradient.py
jambobjambo/Deep-Edit
a5e3a474a497b9788fab8f96555763bcf9622e66
[ "Apache-2.0" ]
null
null
null
plasma/filters/functional/gradient.py
jambobjambo/Deep-Edit
a5e3a474a497b9788fab8f96555763bcf9622e66
[ "Apache-2.0" ]
null
null
null
plasma/filters/functional/gradient.py
jambobjambo/Deep-Edit
a5e3a474a497b9788fab8f96555763bcf9622e66
[ "Apache-2.0" ]
null
null
null
# # Plasma # Copyright (c) 2020 Homedeck, LLC. # from torch import cat, linspace, meshgrid, Tensor from torch.nn.functional import interpolate def radial_gradient (input: Tensor, radius: Tensor) -> Tensor: """ Create a radial gradient which starts from the center of the given image. We use the equation: f(x) = 2|cx|^3 - 3|cx|^2 + 1 where c = 1 / radius. This operation is differentiable w.r.t the radius. Parameters: input (Tensor): Input image with shape (N,C,H,W) in range [-1., 1.]. radius (Tensor | float): Normalized radius with shape (N,1) in range [0., 1.]. Returns: Tensor: Gradient mask with shape (N,1,H,W) in range [0., 1.]. """ samples, _, height, width = input.shape extent = min(width, height) hg, wg = meshgrid(linspace(-1., 1., extent), linspace(-1., 1., extent)) hg = hg.repeat(samples, 1, 1, 1).to(input.device) wg = wg.repeat(samples, 1, 1, 1).to(input.device) field = cat([hg, wg], dim=1) field = field.norm(dim=1, p=2, keepdim=True) field = field.flatten(start_dim=1) / radius field = field.view(-1, 1, extent, extent).clamp(max=1.) mask = 2 * field.abs().pow(3) - 3 * field.abs().pow(2) + 1 mask = interpolate(mask, size=(height, width), mode="bilinear", align_corners=False) return mask def top_bottom_gradient (input: Tensor, length: Tensor): """ Create a vertical gradient which starts from the top of the given image. This operation is differentiable w.r.t the length. Parameters: input (Tensor): Input image with shape (N,C,H,W) in range [-1., 1.]. length (Tensor | float): Normalized length with shape (N,1) in range [0., 1.]. Returns: Tensor: Gradient mask with shape (N,1,H,W) in range [0., 1.]. """ samples, _, height, width = input.shape field = linspace(0., 1., height).to(input.device) field = field.repeat(samples, 1, width, 1).permute(0, 1, 3, 2).contiguous() field = field.flatten(start_dim=1) / length field = field.view(-1, 1, height, width).clamp(max=1.) field = 1. - field return field def bottom_top_gradient (input: Tensor, length: Tensor) -> Tensor: """ Create a vertical gradient which starts from the bottom of the given image. This operation is differentiable w.r.t the length. Parameters: input (Tensor): Input image with shape (N,C,H,W) in range [-1., 1.]. length (Tensor | float): Normalized length with shape (N,1) in range [0., 1.]. Returns: Tensor: Gradient mask with shape (N,1,H,W) in range [0., 1.]. """ samples, _, height, width = input.shape field = linspace(1., 0., height).to(input.device) field = field.repeat(samples, 1, width, 1).permute(0, 1, 3, 2).contiguous() field = field.flatten(start_dim=1) / length field = field.view(-1, 1, height, width).clamp(max=1.) field = 1. - field return field def left_right_gradient (input: Tensor, length: Tensor) -> Tensor: """ Create a horizontal gradient which starts from the left of the given image. This operation is differentiable w.r.t the length. Parameters: input (Tensor): Input image with shape (N,C,H,W) in range [-1., 1.]. length (Tensor | float): Normalized length with shape (N,1) in range [0., 1.]. Returns: Tensor: Gradient mask with shape (N,1,H,W) in range [0., 1.]. """ samples, _, height, width = input.shape field = linspace(0., 1., width).to(input.device) field = field.repeat(samples, 1, height, 1) field = field.flatten(start_dim=1) / length field = field.view(-1, 1, height, width).clamp(max=1.) field = 1. - field return field def right_left_gradient (input: Tensor, length: Tensor) -> Tensor: """ Create a horizontal gradient which starts from the right of the given image. This operation is differentiable w.r.t the length. Parameters: input (Tensor): Input image with shape (N,C,H,W) in range [-1., 1.]. length (Tensor | float): Normalized length in range [0., 1.]. Returns: Tensor: Gradient mask with shape (N,1,H,W) in range [0., 1.]. """ samples, _, height, width = input.shape field = linspace(1., 0., width).to(input.device) field = field.repeat(samples, 1, height, 1) field = field.flatten(start_dim=1) / length field = field.view(-1, 1, height, width).clamp(max=1.) field = 1. - field return field
37.881356
88
0.630425
672
4,470
4.16369
0.145833
0.011437
0.050036
0.032166
0.794496
0.77055
0.761258
0.761258
0.716941
0.683345
0
0.033275
0.226846
4,470
118
89
37.881356
0.776331
0.438926
0
0.531915
0
0
0.003475
0
0
0
0
0
0
1
0.106383
false
0
0.042553
0
0.255319
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
7e997135a4e09d03fad952b043a2b08252042ceb
129
py
Python
defter/feature/new/backend.py
BarryWangQwQ/Defter
e5e9c54525e77510268052e40a31cda4ec1ee7b4
[ "MIT" ]
5
2021-08-12T11:28:43.000Z
2021-11-22T10:01:18.000Z
defter/feature/new/backend.py
BarryWangQwQ/Defter
e5e9c54525e77510268052e40a31cda4ec1ee7b4
[ "MIT" ]
null
null
null
defter/feature/new/backend.py
BarryWangQwQ/Defter
e5e9c54525e77510268052e40a31cda4ec1ee7b4
[ "MIT" ]
null
null
null
from defter import backend backend.init(path='src', extensions=['xxx.js']) # TODO Coding here :-) backend.start('xxx.html')
12.9
47
0.689922
18
129
4.944444
0.833333
0
0
0
0
0
0
0
0
0
0
0
0.131783
129
9
48
14.333333
0.794643
0.155039
0
0
0
0
0.158879
0
0
0
0
0.111111
0
1
0
true
0
0.333333
0
0.333333
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
0
1
0
1
0
0
0
0
5
7ea21c377bb1d5b6a5996b225843a97c27bed064
285
py
Python
mapper.py
marcos-tulio/python-nes-emulator
6ef18d702d6c173926da035700ebd9c59209dd0a
[ "Apache-2.0" ]
1
2021-04-29T10:45:04.000Z
2021-04-29T10:45:04.000Z
mapper.py
marcos-tulio/python-nes-emulator
6ef18d702d6c173926da035700ebd9c59209dd0a
[ "Apache-2.0" ]
null
null
null
mapper.py
marcos-tulio/python-nes-emulator
6ef18d702d6c173926da035700ebd9c59209dd0a
[ "Apache-2.0" ]
null
null
null
class Mapper(): prg_banks = 0 chr_banks = 0 def __init__(self, prg_banks, chr_banks): pass def cpuMapRead(self, addr): pass def cpuMapWrite(self, addr): pass def ppuMapRead(self, addr): pass def ppuMapWrite(self, addr): pass def reset(self): pass
17.8125
50
0.652632
40
285
4.45
0.4
0.196629
0.269663
0.337079
0
0
0
0
0
0
0
0.009302
0.245614
285
15
51
19
0.818605
0
0
0
0
0
0
0
0
0
0
0
0
1
0.666667
false
0.666667
0
0
1
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
1
0
0
5
0e59452f22c63100aed896e28a5620041d1bd3af
28
py
Python
neurolib/models/aln/__init__.py
leonidas228/neurolib
a7aa6f487db73c3b64471007ac5a965da4a65f9a
[ "MIT" ]
258
2020-01-26T14:38:09.000Z
2022-03-31T14:54:04.000Z
neurolib/models/aln/__init__.py
leonidas228/neurolib
a7aa6f487db73c3b64471007ac5a965da4a65f9a
[ "MIT" ]
172
2020-01-27T11:02:28.000Z
2022-03-22T22:25:38.000Z
neurolib/models/aln/__init__.py
leonidas228/neurolib
a7aa6f487db73c3b64471007ac5a965da4a65f9a
[ "MIT" ]
49
2020-02-04T08:34:44.000Z
2022-03-28T09:29:12.000Z
from .model import ALNModel
14
27
0.821429
4
28
5.75
1
0
0
0
0
0
0
0
0
0
0
0
0.142857
28
1
28
28
0.958333
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
0e5bb58bd43198097f117ebea2f317a7603fc102
53
py
Python
testproj/goodbye_world.py
shonteag/librapy
2f647a64204c39e6ab1ecd65696bec6fa8c23e9f
[ "MIT" ]
null
null
null
testproj/goodbye_world.py
shonteag/librapy
2f647a64204c39e6ab1ecd65696bec6fa8c23e9f
[ "MIT" ]
1
2018-09-28T02:29:34.000Z
2018-09-28T02:29:34.000Z
testproj/goodbye_world.py
shonteag/librapy
2f647a64204c39e6ab1ecd65696bec6fa8c23e9f
[ "MIT" ]
null
null
null
print "Goodbye, bitter world. I shall sys.exit() now"
53
53
0.735849
9
53
4.333333
1
0
0
0
0
0
0
0
0
0
0
0
0.132075
53
1
53
53
0.847826
0
0
0
0
0
0.833333
0
0
0
0
0
0
0
null
null
0
0
null
null
1
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
1
0
null
0
0
0
0
1
0
0
0
0
0
0
1
0
5
0e8aa461f61864d0ce948804527138b272fd946b
23
py
Python
molsysmt/native/old/former_topology/elements/__init__.py
dprada/molsysmt
83f150bfe3cfa7603566a0ed4aed79d9b0c97f5d
[ "MIT" ]
3
2020-06-02T03:55:52.000Z
2022-03-21T04:43:52.000Z
molsysmt/native/old/former_topology/elements/__init__.py
dprada/molsysmt
83f150bfe3cfa7603566a0ed4aed79d9b0c97f5d
[ "MIT" ]
28
2020-06-24T00:55:53.000Z
2021-07-16T22:09:19.000Z
molsysmt/native/old/former_topology/elements/__init__.py
dprada/molsysmt
83f150bfe3cfa7603566a0ed4aed79d9b0c97f5d
[ "MIT" ]
1
2021-06-17T18:55:25.000Z
2021-06-17T18:55:25.000Z
from .makers import *
7.666667
21
0.695652
3
23
5.333333
1
0
0
0
0
0
0
0
0
0
0
0
0.217391
23
2
22
11.5
0.888889
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
7ec117210d655992543a5f28ba6e8e8aa1da2845
113
py
Python
controllers/home.py
Zhanelya/sprks
917256da5c994f2b9f2352c126eb8377d5728d85
[ "MIT" ]
1
2021-11-09T00:53:31.000Z
2021-11-09T00:53:31.000Z
controllers/home.py
Zhanelya/sprks
917256da5c994f2b9f2352c126eb8377d5728d85
[ "MIT" ]
null
null
null
controllers/home.py
Zhanelya/sprks
917256da5c994f2b9f2352c126eb8377d5728d85
[ "MIT" ]
null
null
null
from localsys.environment import render import web class home: def GET(self): return render.home()
14.125
39
0.699115
15
113
5.266667
0.8
0
0
0
0
0
0
0
0
0
0
0
0.230089
113
8
40
14.125
0.908046
0
0
0
0
0
0
0
0
0
0
0
0
1
0.2
false
0
0.4
0.2
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
1
1
0
0
5
7efaed9668dcac878a5d6aa1db327daf6e4d0649
161
py
Python
CodeWars/8 Kyu/repeatIt.py
anubhab-code/Competitive-Programming
de28cb7d44044b9e7d8bdb475da61e37c018ac35
[ "MIT" ]
null
null
null
CodeWars/8 Kyu/repeatIt.py
anubhab-code/Competitive-Programming
de28cb7d44044b9e7d8bdb475da61e37c018ac35
[ "MIT" ]
null
null
null
CodeWars/8 Kyu/repeatIt.py
anubhab-code/Competitive-Programming
de28cb7d44044b9e7d8bdb475da61e37c018ac35
[ "MIT" ]
null
null
null
def repeat_it(string,n): print( string) if type(string) == str: return string * n print(string*n) else: return 'Not a string'
23
29
0.559006
22
161
4.045455
0.590909
0.235955
0.269663
0.404494
0
0
0
0
0
0
0
0
0.329193
161
7
29
23
0.824074
0
0
0
0
0
0.074074
0
0
0
0
0
0
1
0.142857
false
0
0
0
0.428571
0.285714
1
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
7d2b2dea7276bbeb977be8e8148dbd31dcff807c
76
py
Python
openslides_backend/services/database/__init__.py
ThomasJunk/openslides-backend
798ed65d1490bf93ed3bd870cfc6f2a8c6f47986
[ "MIT" ]
null
null
null
openslides_backend/services/database/__init__.py
ThomasJunk/openslides-backend
798ed65d1490bf93ed3bd870cfc6f2a8c6f47986
[ "MIT" ]
null
null
null
openslides_backend/services/database/__init__.py
ThomasJunk/openslides-backend
798ed65d1490bf93ed3bd870cfc6f2a8c6f47986
[ "MIT" ]
null
null
null
from .adapter import Adapter # noqa from .engine import HTTPEngine # noqa
25.333333
38
0.763158
10
76
5.8
0.6
0
0
0
0
0
0
0
0
0
0
0
0.184211
76
2
39
38
0.935484
0.118421
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
7d2fc62ef1b9b6a7ea32ee8a8093f1a9b2991a5e
61
py
Python
src/pdfui/error/error.py
ichiro-kazusa/PDFCon
529c22145bfd20919b015b5ba70e8bab33feed01
[ "MIT" ]
null
null
null
src/pdfui/error/error.py
ichiro-kazusa/PDFCon
529c22145bfd20919b015b5ba70e8bab33feed01
[ "MIT" ]
null
null
null
src/pdfui/error/error.py
ichiro-kazusa/PDFCon
529c22145bfd20919b015b5ba70e8bab33feed01
[ "MIT" ]
null
null
null
class SelectionNotContinuousException(Exception): pass
12.2
49
0.803279
4
61
12.25
1
0
0
0
0
0
0
0
0
0
0
0
0.147541
61
4
50
15.25
0.942308
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0.5
0
0
0.5
0
1
0
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
0
0
0
0
0
5
7d312a7fc2915c47511e151dcf74c1c9a5e6c5ff
43
py
Python
tests/test_entry.py
regginold/drosophila-courtship
a40249ee538a73f66b25d55c47e06f8505ba10f2
[ "MIT" ]
4
2019-05-01T19:14:45.000Z
2020-04-23T04:31:21.000Z
tests/test_entry.py
benshahary/drosophila-courtship
a40249ee538a73f66b25d55c47e06f8505ba10f2
[ "MIT" ]
null
null
null
tests/test_entry.py
benshahary/drosophila-courtship
a40249ee538a73f66b25d55c47e06f8505ba10f2
[ "MIT" ]
1
2020-02-26T17:14:03.000Z
2020-02-26T17:14:03.000Z
from context import entry entry.main()
10.75
26
0.72093
6
43
5.166667
0.833333
0
0
0
0
0
0
0
0
0
0
0
0.209302
43
4
27
10.75
0.911765
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
adb512ad7ed9506241105faf4baaa31865c22fe1
6,261
py
Python
acouchbase/tests/cases/mutation_tokens_t.py
dfresh613/couchbase-python-client
c77af56490ed4c6d364fcf8fc1a374570de0239b
[ "Apache-2.0" ]
189
2015-01-07T18:34:31.000Z
2022-03-21T17:41:56.000Z
acouchbase/tests/cases/mutation_tokens_t.py
dfresh613/couchbase-python-client
c77af56490ed4c6d364fcf8fc1a374570de0239b
[ "Apache-2.0" ]
24
2015-05-19T14:00:16.000Z
2022-03-16T22:01:30.000Z
acouchbase/tests/cases/mutation_tokens_t.py
dfresh613/couchbase-python-client
c77af56490ed4c6d364fcf8fc1a374570de0239b
[ "Apache-2.0" ]
60
2015-03-10T22:12:50.000Z
2022-03-07T21:57:40.000Z
from unittest import SkipTest from functools import wraps from nose.tools import nottest from acouchbase.cluster import (Cluster, get_event_loop, close_event_loop) from couchbase_tests.async_base import AsyncioTestCase from couchbase.exceptions import DocumentNotFoundException import couchbase.subdocument as SD @nottest def async_test(func): @wraps(func) def wrapper(self, *args, **kwargs): return self.loop.run_until_complete(func(self, *args, **kwargs)) return wrapper class AcouchbaseMutationTokensEnabledTests(AsyncioTestCase): CONTENT = {"some": "content"} KEY = "imakey" NOKEY = "somerandomkey" @classmethod def setUpClass(cls) -> None: super(AcouchbaseMutationTokensEnabledTests, cls).setUpClass( get_event_loop(), cluster_class=Cluster) @classmethod def tearDownClass(cls) -> None: super(AcouchbaseMutationTokensEnabledTests, cls).tearDownClass() close_event_loop() def setUp(self): super(AcouchbaseMutationTokensEnabledTests, self).setUp() self.loop.run_until_complete(self.initialize()) async def initialize(self): # retry just in case doc is locked from previous test await self.try_n_times_async(1, 3, self.collection.upsert, self.KEY, self.CONTENT) # be sure NOKEY isn't in there try: await self.collection.remove(self.NOKEY) except DocumentNotFoundException: pass # make sure NOKEY is gone await self.try_n_times_till_exception_async(1, 1, self.collection.get, self.NOKEY) def verify_mutation_tokens(self, result): mutation_token = result.mutation_token() self.assertTrue(mutation_token) vb, uuid, seq, bktname = mutation_token.as_tuple() self.assertIsInstance(vb, int) self.assertIsInstance(uuid, int) self.assertIsInstance(seq, int) self.assertEqual(self.bucket_name, bktname) @async_test async def test_mutation_tokens_upsert(self): result = await self.collection.upsert(self.NOKEY, {"some": "thing"}) self.verify_mutation_tokens(result) @async_test async def test_mutation_tokens_insert(self): result = await self.collection.insert(self.NOKEY, {"some": "thing"}) self.verify_mutation_tokens(result) @async_test async def test_mutation_tokens_replace(self): result = await self.collection.replace(self.KEY, {"some": "other content"}) self.verify_mutation_tokens(result) @async_test async def test_mutation_tokens_remove(self): result = await self.collection.remove(self.KEY) self.verify_mutation_tokens(result) def test_mutation_tokens_touch(self): raise SkipTest('Pending mutation token implementation for touch') # result = await self.collection.touch(self.KEY, timedelta(seconds=3)) # self.verify_mutation_tokens(result) @async_test async def test_mutation_tokens_mutate_in(self): async def cas_matches(key, cas): result = await self.collection.get(key) if result.cas == cas: return result raise Exception("nope") res = await self.collection.upsert(self.KEY, {"a": "aaa", "b": {"c": {"d": "yo!"}}}) await self.try_n_times_async(10, 3, cas_matches, self.KEY, res.cas) result = await self.collection.mutate_in(self.KEY, (SD.upsert("c", "ccc"), SD.replace("b", "XXX"),)) self.verify_mutation_tokens(result) class AcouchbaseMutationTokensDisabledTests(AsyncioTestCase): CONTENT = {"some": "content"} KEY = "imakey" NOKEY = "somerandomkey" @classmethod def setUpClass(cls) -> None: super(AcouchbaseMutationTokensDisabledTests, cls).setUpClass( get_event_loop(), cluster_class=Cluster, enable_mutation_tokens=False) @classmethod def tearDownClass(cls) -> None: super(AcouchbaseMutationTokensDisabledTests, cls).tearDownClass() close_event_loop() def setUp(self): super(AcouchbaseMutationTokensDisabledTests, self).setUp() self.loop.run_until_complete(self.initialize()) async def initialize(self): # retry just in case doc is locked from previous test await self.try_n_times_async(1, 3, self.collection.upsert, self.KEY, self.CONTENT) # be sure NOKEY isn't in there try: await self.collection.remove(self.NOKEY) except DocumentNotFoundException: pass # make sure NOKEY is gone await self.try_n_times_till_exception_async(1, 1, self.collection.get, self.NOKEY) @async_test async def test_mutinfo_upsert(self): result = await self.collection.upsert(self.NOKEY, {"some": "thing"}) self.assertIsNone(result.mutation_token()) @async_test async def test_mutinfo_insert(self): result = await self.collection.insert(self.NOKEY, {"some": "thing"}) self.assertIsNone(result.mutation_token()) @async_test async def test_mutinfo_replace(self): result = await self.collection.replace(self.KEY, {"some": "other content"}) self.assertIsNone(result.mutation_token()) @async_test async def test_mutinfo_remove(self): result = await self.collection.remove(self.KEY) self.assertIsNone(result.mutation_token()) def test_mutation_tokens_touch(self): raise SkipTest('Pending mutation token implementation for touch') # result = await self.collection.touch(self.KEY, timedelta(seconds=3)) # self.assertIsNone(result.mutation_token()) @async_test async def test_mutation_tokens_mutate_in(self): async def cas_matches(key, cas): result = await self.collection.get(key) if result.cas == cas: return result raise Exception("nope") res = await self.collection.upsert( self.KEY, {"a": "aaa", "b": {"c": {"d": "yo!"}}}) await self.try_n_times_async(10, 3, cas_matches, self.KEY, res.cas) result = await self.collection.mutate_in(self.KEY, (SD.upsert("c", "ccc"), SD.replace("b", "XXX"),)) self.assertIsNone(result.mutation_token())
36.614035
108
0.674493
738
6,261
5.555556
0.172087
0.052683
0.083415
0.085366
0.80878
0.739512
0.713659
0.705122
0.683659
0.660732
0
0.003275
0.219773
6,261
170
109
36.829412
0.836029
0.06804
0
0.656
0
0
0.046016
0
0
0
0
0
0.08
1
0.088
false
0.016
0.056
0.008
0.24
0
0
0
0
null
0
0
0
1
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
adf883127657dd0135e3b583897f92264df82a26
31
py
Python
jarbas_hive_mind/discovery/__init__.py
emphasize/HiveMind-core
e476e9341f817c726b9f68ca094cdd837fd38bd5
[ "Apache-2.0" ]
43
2020-11-23T17:53:47.000Z
2022-02-07T13:30:57.000Z
jarbas_hive_mind/discovery/__init__.py
emphasize/HiveMind-core
e476e9341f817c726b9f68ca094cdd837fd38bd5
[ "Apache-2.0" ]
24
2020-11-10T07:53:09.000Z
2021-12-13T22:58:50.000Z
jarbas_hive_mind/discovery/__init__.py
emphasize/HiveMind-core
e476e9341f817c726b9f68ca094cdd837fd38bd5
[ "Apache-2.0" ]
5
2020-12-26T00:44:29.000Z
2021-09-14T16:38:51.000Z
from HiveMind_presence import *
31
31
0.870968
4
31
6.5
1
0
0
0
0
0
0
0
0
0
0
0
0.096774
31
1
31
31
0.928571
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
bc108c4d00651c3767583925e6647a1ae92e3e72
177
py
Python
fasttext_python/fasttext_python/FastTextModel.py
Limmen/data_science
337b105cda0c58f2cb4200b553a47e0311fe32c1
[ "MIT" ]
null
null
null
fasttext_python/fasttext_python/FastTextModel.py
Limmen/data_science
337b105cda0c58f2cb4200b553a47e0311fe32c1
[ "MIT" ]
null
null
null
fasttext_python/fasttext_python/FastTextModel.py
Limmen/data_science
337b105cda0c58f2cb4200b553a47e0311fe32c1
[ "MIT" ]
null
null
null
class FastTextModel(object): def __init__(self, word_index, raw_word_vectors): pass def save_to_word2vec_format(self, word_vectors, output_path): pass
22.125
65
0.711864
23
177
4.956522
0.73913
0.140351
0
0
0
0
0
0
0
0
0
0.007194
0.214689
177
7
66
25.285714
0.81295
0
0
0.4
0
0
0
0
0
0
0
0
0
1
0.4
false
0.4
0
0
0.6
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
1
0
0
5
bc10ae8d84d6db8bac705dc2264a1268e5f9b334
290
py
Python
ursina/shaders/__init__.py
clayboone/ursina
9aebd9403b924af260fbefbfd7cef5ad82feeff7
[ "MIT" ]
1
2020-09-04T14:32:33.000Z
2020-09-04T14:32:33.000Z
ursina/shaders/__init__.py
clayboone/ursina
9aebd9403b924af260fbefbfd7cef5ad82feeff7
[ "MIT" ]
null
null
null
ursina/shaders/__init__.py
clayboone/ursina
9aebd9403b924af260fbefbfd7cef5ad82feeff7
[ "MIT" ]
1
2020-09-04T14:32:41.000Z
2020-09-04T14:32:41.000Z
from ursina.shaders.normals import normals_shader # post processing effects from ursina.shaders.camera_vertical_blur import camera_vertical_blur_shader from ursina.shaders.camera_contrast import camera_contrast_shader from ursina.shaders.camera_grayscale import camera_grayscale_shader
29
75
0.886207
39
290
6.282051
0.358974
0.163265
0.277551
0.281633
0.236735
0
0
0
0
0
0
0
0.082759
290
9
76
32.222222
0.921053
0.07931
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
bc39f953581d7288eff2833151be41cc9b2412e8
166
py
Python
programming-language/cases/python/examples/py.test/conventions/mymodule_test.py
wdv4758h/notes
60fa483961245ec5bb264d3f28a885fb82a1c25e
[ "Unlicense" ]
136
2015-06-15T13:26:40.000Z
2022-03-03T07:47:31.000Z
programming-language/cases/python/examples/py.test/conventions/test_mymodule.py
wdv4758h/notes
60fa483961245ec5bb264d3f28a885fb82a1c25e
[ "Unlicense" ]
82
2017-01-06T06:32:55.000Z
2020-09-03T03:34:24.000Z
programming-language/cases/python/examples/py.test/conventions/mymodule_test.py
wdv4758h/notes
60fa483961245ec5bb264d3f28a885fb82a1c25e
[ "Unlicense" ]
18
2015-12-04T04:02:44.000Z
2022-02-24T03:48:57.000Z
#!/usr/bin/env python class TestMyModule: ''' ``Test`` prefix ''' def test_f(self): ''' ``test_`` prefix ''' pass
11.066667
24
0.421687
15
166
4.533333
0.8
0.294118
0
0
0
0
0
0
0
0
0
0
0.403614
166
14
25
11.857143
0.686869
0.319277
0
0
0
0
0
0
0
0
0
0
0
1
0.333333
false
0.333333
0
0
0.666667
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
1
0
0
5
70c5d17adc5066cff6466b9a2922fd757dee6148
11,311
py
Python
tests/test_skunky.py
gvauter/skunky
d625d800d533162289361da0d27bbd3feb837b8b
[ "Apache-2.0" ]
44
2018-03-11T22:12:32.000Z
2021-02-16T22:30:21.000Z
tests/test_skunky.py
gvauter/skunky
d625d800d533162289361da0d27bbd3feb837b8b
[ "Apache-2.0" ]
2
2018-03-11T22:56:42.000Z
2018-12-12T13:22:20.000Z
tests/test_skunky.py
gvauter/skunky
d625d800d533162289361da0d27bbd3feb837b8b
[ "Apache-2.0" ]
6
2018-08-09T23:38:17.000Z
2020-08-10T03:59:39.000Z
#!/usr/bin/env python import boto3 from botocore import exceptions import json import logging import time import unittest from mock import patch, Mock, PropertyMock, MagicMock from skunky.aws.lambdakickass import LambdaKickass from skunky.skunky import Skunky class SkunkyTestCase(unittest.TestCase): @patch('boto3.Session', autospec=True) def test_skunky__init(self, session_mock): testSkunky = Skunky(session_mock) assert testSkunky.dynamodb_table_name == 'Skunky' assert testSkunky.queue_name == 'skunky' assert testSkunky.to_be_skunked == {} @patch('boto3.Session', autospec=True) def test_get_queue_url(self, session_mock): sqs_client_mock = Mock() sqs_client_mock.get_queue_url.return_value = { 'QueueUrl': 'https://queue.amazonaws.com/123456789101112/MyQueue', 'ResponseMetadata': { }, } testSkunky = Skunky(session_mock) testSkunky.sqs_client = sqs_client_mock assert testSkunky.get_queue_url() == 'https://queue.amazonaws.com/123456789101112/MyQueue' @patch('boto3.Session', autospec=True) def test_add_to_skunk_single_region(self, session_mock): identity = TEST_IDENTITY_1_A_US_WEST_2 testSkunky = Skunky(session_mock) testSkunky.add_to_skunk(identity) assert len(testSkunky.to_be_skunked['000000000001']['us-west-2']) == 1 @patch('boto3.Session', autospec=True) def test_add_to_skunk_multiple_regions(self, session_mock): testSkunky = Skunky(session_mock) testSkunky.add_to_skunk(TEST_IDENTITY_1_A_US_WEST_2) testSkunky.add_to_skunk(TEST_IDENTITY_1_A_US_EAST_1) assert len(testSkunky.to_be_skunked['000000000001']['us-west-2']) == 1 assert len(testSkunky.to_be_skunked['000000000001']['us-east-1']) == 1 @patch('boto3.Session', autospec=True) def test_add_to_skunk_single_region_multiple_instances(self, session_mock): testSkunky = Skunky(session_mock) testSkunky.add_to_skunk(TEST_IDENTITY_1_A_US_WEST_2) testSkunky.add_to_skunk(TEST_IDENTITY_2_A_US_WEST_2) assert len(testSkunky.to_be_skunked['000000000001']['us-west-2']) == 2 @patch('boto3.Session', autospec=True) def test_receive_identities_from_queue_single(self, session_mock): sqs_client_mock = Mock() sqs_client_mock.receive_message.return_value = { 'Messages': [ { 'MessageId': '123', 'ReceiptHandle': 'handle123', 'MD5OfBody': 'md5123', 'Body': json.dumps(TEST_IDENTITY_1_A_US_WEST_2['identity']), 'MD5OfMessageAttributes': 'string', 'Attributes': { 'SentTimestamp': '123123123' } }, ] } testSkunky = Skunky(session_mock) testSkunky.sqs_client = sqs_client_mock testSkunky.receive_identities_from_queue() assert len(testSkunky.to_be_skunked['000000000001']['us-west-2']) == 1 @patch('boto3.Session', autospec=True) def test_receive_identities_from_queue_multiple(self, session_mock): sqs_client_mock = Mock() sqs_client_mock.receive_message.return_value = { 'Messages': [ { 'MessageId': '123', 'ReceiptHandle': 'handle123', 'MD5OfBody': 'md5123', 'Body': json.dumps(TEST_IDENTITY_1_A_US_WEST_2['identity']), 'MD5OfMessageAttributes': 'string', 'Attributes': { 'SentTimestamp': '123123123' } }, { 'MessageId': '124', 'ReceiptHandle': 'handle124', 'MD5OfBody': 'md5124', 'Body': json.dumps(TEST_IDENTITY_1_B_US_EAST_1['identity']), 'MD5OfMessageAttributes': 'string', 'Attributes': { 'SentTimestamp': '123123124' } }, ] } testSkunky = Skunky(session_mock) testSkunky.sqs_client = sqs_client_mock testSkunky.receive_identities_from_queue() assert len(testSkunky.to_be_skunked['000000000001']['us-west-2']) == 1 assert len(testSkunky.to_be_skunked['000000000002']['us-east-1']) == 1 @patch('boto3.Session', autospec=True) def test_identity_hash(self, session_mock): testSkunky = Skunky(session_mock) assert testSkunky.hash(TEST_IDENTITY_1_A_US_WEST_2) == TEST_IDENTITY_1_A_US_WEST_2_HASH mock_time = Mock() mock_time.return_value = 0 @patch('time.time', mock_time) @patch.object(Skunky, '_start') @patch('boto3.Session', autospec=True) def test_run(self, session_mock, skunky_start_mock): testSkunky = Skunky(session_mock) testSkunky.run() assert testSkunky.expire == TIME_RUN @patch('time.time', mock_time) @patch('boto3.Session', autospec=True) def test_ttl(self, session_mock): testSkunky = Skunky(session_mock) assert testSkunky.ttl() == TTL_TIME @patch.object(Skunky,'tag') @patch.object(Skunky,'put') @patch('boto3.Session', autospec=True) def test_skunk_1_account_1_region_single_instance(self, session_mock, skunky_put_mock, skunky_tag_mock): testSkunky = Skunky(session_mock) testSkunky.add_to_skunk(TEST_IDENTITY_1_A_US_WEST_2) testSkunky.skunk_instances() assert skunky_put_mock.call_count == 1 assert skunky_tag_mock.call_count == 1 assert len(testSkunky.to_be_skunked['000000000001']['us-west-2']) == 0 @patch.object(Skunky,'tag') @patch.object(Skunky,'put') @patch('boto3.Session', autospec=True) def test_skunk_1_account_1_region_multiple_instances(self, session_mock, skunky_put_mock, skunky_tag_mock): testSkunky = Skunky(session_mock) testSkunky.add_to_skunk(TEST_IDENTITY_1_A_US_WEST_2) testSkunky.add_to_skunk(TEST_IDENTITY_2_A_US_WEST_2) testSkunky.skunk_instances() assert skunky_put_mock.call_count == 2 assert skunky_tag_mock.call_count == 1 assert len(testSkunky.to_be_skunked['000000000001']['us-west-2']) == 0 @patch.object(Skunky,'tag') @patch.object(Skunky,'put') @patch('boto3.Session', autospec=True) def test_skunk_2_accounts_2_regions_multiple_instances(self, session_mock, skunky_put_mock, skunky_tag_mock): testSkunky = Skunky(session_mock) testSkunky.add_to_skunk(TEST_IDENTITY_1_A_US_WEST_2) testSkunky.add_to_skunk(TEST_IDENTITY_2_A_US_WEST_2) testSkunky.add_to_skunk(TEST_IDENTITY_1_A_US_EAST_1) testSkunky.add_to_skunk(TEST_IDENTITY_1_B_US_EAST_1) testSkunky.skunk_instances() assert skunky_put_mock.call_count == 4 assert skunky_tag_mock.call_count == 3 assert len(testSkunky.to_be_skunked['000000000001']['us-west-2']) == 0 assert len(testSkunky.to_be_skunked['000000000001']['us-east-1']) == 0 assert len(testSkunky.to_be_skunked['000000000002']['us-east-1']) == 0 @patch('boto3.Session', autospec=True) def test_skunky_put_identity_dynamodb_true(self, session_mock): testSkunky = Skunky(session_mock) self.assertTrue(testSkunky.put(TEST_IDENTITY_1_A_US_WEST_2)) @patch('boto3.Session', autospec=True) def test_skunky_put_identity_dynamodb_false(self, session_mock): response = { 'Error': { 'Code': "ConditionalCheckFailedException" } } dynamodb_table_mock = Mock() dynamodb_table_mock.put_item.side_effect = exceptions.ClientError(response, 'put_item') testSkunky = Skunky(session_mock) testSkunky.dynamodb_table = dynamodb_table_mock self.assertFalse(testSkunky.put(TEST_IDENTITY_1_A_US_WEST_2)) @patch('boto3.Session', autospec=True) def test_skunky_put_identity_dynamodb_raise_error(self, session_mock): dynamodb_table_mock = Mock() dynamodb_table_mock.put_item.side_effect = Exception('Some unknown exception') testSkunky = Skunky(session_mock) testSkunky.dynamodb_table = dynamodb_table_mock self.assertRaises(Exception, lambda: testSkunky.put(TEST_IDENTITY_1_A_US_WEST_2)) TTL_TIME = 2592000 TIME_RUN = (5 * 60) - 15 TEST_IDENTITY_1_A_US_WEST_2_HASH = '000000000001::us-west-2::i-xxxxxxxxxxxxxxxx1::123123123' TEST_IDENTITY_1_A_US_WEST_2 = { "identity": { "devpayProductCodes": None, "availabilityZone": "us-nflx-1a", "privateIp": '10.0.0.1', "version": "2010-08-31", "instanceId": "i-xxxxxxxxxxxxxxxx1", "billingProducts": None, "instanceType": "m3.medium", "accountId": "000000000001", "architecture": "x86_64", "kernelId": "aki-fc8f11cc", "ramdiskId": None, "imageId": "ami-12345678", "pendingTime": "2017-04-11T18:12:03Z", "region": "us-west-2" }, "dirty_timestamp": 123123123, "receipt_handle": 'receipthandle1' } TEST_IDENTITY_2_A_US_WEST_2 = { "identity": { "devpayProductCodes": None, "availabilityZone": "us-nflx-1a", "privateIp": '10.0.0.2', "version": "2010-08-31", "instanceId": "i-xxxxxxxxxxxxxxxx2", "billingProducts": None, "instanceType": "m3.medium", "accountId": "000000000001", "architecture": "x86_64", "kernelId": "aki-fc8f11cc", "ramdiskId": None, "imageId": "ami-12345678", "pendingTime": "2017-04-11T18:12:03Z", "region": "us-west-2" }, "dirty_timestamp": 123123124, "receipt_handle": 'receipthandle2' } TEST_IDENTITY_1_A_US_EAST_1 = { "identity": { "devpayProductCodes": None, "availabilityZone": "us-nflx-1a", "privateIp": '10.1.0.1', "version": "2010-08-31", "instanceId": "i-xxxxxxxxxxxxxxxx3", "billingProducts": None, "instanceType": "m3.medium", "accountId": "000000000001", "architecture": "x86_64", "kernelId": "aki-fc8f11cc", "ramdiskId": None, "imageId": "ami-12345678", "pendingTime": "2017-04-11T18:12:03Z", "region": "us-east-1" }, "dirty_timestamp": 123123125, "receipt_handle": 'receipthandle3' } TEST_IDENTITY_1_B_US_EAST_1 = { "identity": { "devpayProductCodes": None, "availabilityZone": "us-nflx-1a", "privateIp": '10.0.0.1', "version": "2010-08-31", "instanceId": "i-xxxxxxxxxxxxxxxx4", "billingProducts": None, "instanceType": "m3.medium", "accountId": "000000000002", "architecture": "x86_64", "kernelId": "aki-fc8f11cc", "ramdiskId": None, "imageId": "ami-12345678", "pendingTime": "2017-04-11T18:12:03Z", "region": "us-east-1" }, "dirty_timestamp": 123123126, "receipt_handle": 'receipthandle4' } if __name__ == '__main__': logging.disable(logging.INFO) logging.disable(logging.DEBUG) unittest.main()
33.764179
113
0.635664
1,274
11,311
5.305338
0.142072
0.052079
0.03107
0.022489
0.807812
0.800118
0.754549
0.719041
0.682793
0.644326
0
0.07571
0.243303
11,311
335
114
33.764179
0.713985
0.001768
0
0.558935
0
0
0.217873
0.013462
0
0
0
0
0.106464
1
0.060837
false
0
0.034221
0
0.102662
0
0
0
0
null
0
0
0
1
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
70daa8ba6258909a2e4aad81185623a116dacb20
97
py
Python
earthquake/admin.py
thaiseerp/Live-Events-Earthquake-map-using-in-Google-Maps-using-Django
9359b36275cfb60619f2dbcfdd1a83cdbac6ef1f
[ "MIT" ]
null
null
null
earthquake/admin.py
thaiseerp/Live-Events-Earthquake-map-using-in-Google-Maps-using-Django
9359b36275cfb60619f2dbcfdd1a83cdbac6ef1f
[ "MIT" ]
null
null
null
earthquake/admin.py
thaiseerp/Live-Events-Earthquake-map-using-in-Google-Maps-using-Django
9359b36275cfb60619f2dbcfdd1a83cdbac6ef1f
[ "MIT" ]
null
null
null
""" Author: Thaiseer Parammal """ from django.contrib import admin # Register your models here.
13.857143
32
0.742268
12
97
6
1
0
0
0
0
0
0
0
0
0
0
0
0.154639
97
6
33
16.166667
0.878049
0.546392
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
70fc99637de072e9ce23f5b2617639b522b5ada1
165
py
Python
rrError.py
andrew54068/rentalRadar
8d4d4a62d9d8ecc8c29d4648f6a325b8e3e82cbd
[ "MIT" ]
2
2020-07-25T03:40:15.000Z
2020-08-30T15:17:25.000Z
rrError.py
andrew54068/rentalRadar
8d4d4a62d9d8ecc8c29d4648f6a325b8e3e82cbd
[ "MIT" ]
null
null
null
rrError.py
andrew54068/rentalRadar
8d4d4a62d9d8ecc8c29d4648f6a325b8e3e82cbd
[ "MIT" ]
null
null
null
class SqlError(Exception): def __init__(self, message="Salary != in (5000, 15000) range"): self.message = message super().__init__(self.message)
33
67
0.654545
19
165
5.263158
0.684211
0.33
0.3
0
0
0
0
0
0
0
0
0.068702
0.206061
165
5
68
33
0.694656
0
0
0
0
0
0.192771
0
0
0
0
0
0
1
0.25
false
0
0
0
0.5
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
5
70fddcc1fcfc0f986859504f58154eec0eb5086d
280
py
Python
recipes/models.py
evemon/ohsiha
91d635bf5159b63cbd276b9dccabbf3ec8e4aa92
[ "MIT" ]
null
null
null
recipes/models.py
evemon/ohsiha
91d635bf5159b63cbd276b9dccabbf3ec8e4aa92
[ "MIT" ]
null
null
null
recipes/models.py
evemon/ohsiha
91d635bf5159b63cbd276b9dccabbf3ec8e4aa92
[ "MIT" ]
null
null
null
from django.db import models from django.urls import reverse class Recipe(models.Model): title = models.CharField(max_length=200) href = models.CharField(max_length=100) ingredients = models.CharField(max_length=200) thumbnail = models.CharField(max_length=200)
28
50
0.764286
38
280
5.526316
0.5
0.285714
0.342857
0.457143
0.385714
0
0
0
0
0
0
0.05
0.142857
280
9
51
31.111111
0.825
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.285714
0
1
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
5
cb080f625f95b0f2191799dc47575aa3567ff3a7
172
py
Python
codechain/sdk/key/__init__.py
CodeChain-io/codechain-sdk-python
e21420fe8e1105f23f04fc6ca3f18a444c2bf9a3
[ "0BSD" ]
11
2018-08-22T09:42:54.000Z
2019-11-30T07:19:42.000Z
codechain/sdk/key/__init__.py
CodeChain-io/codechain-sdk-python
e21420fe8e1105f23f04fc6ca3f18a444c2bf9a3
[ "0BSD" ]
38
2019-07-22T06:13:39.000Z
2021-06-02T00:43:21.000Z
codechain/sdk/key/__init__.py
CodeChain-io/codechain-sdk-python
e21420fe8e1105f23f04fc6ca3f18a444c2bf9a3
[ "0BSD" ]
5
2019-07-24T19:13:00.000Z
2020-03-18T12:13:27.000Z
import sys from .key import Key from .key import KeyStoreType # ------- # Pythons # ------- if sys.version_info < (3, 6): raise ValueError("Please use >=python3.6")
14.333333
46
0.627907
23
172
4.652174
0.695652
0.130841
0.242991
0
0
0
0
0
0
0
0
0.028571
0.186047
172
11
47
15.636364
0.735714
0.133721
0
0
0
0
0.151724
0
0
0
0
0
0
1
0
true
0
0.6
0
0.6
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
cb4058b26a49665945127397e17e8d8aa0f0ccc6
322
py
Python
wagtail_sb_material/wagtail_hooks.py
softbutterfly/softbutterfly-wagtail-materialize
8619166b5688d4a51b1a6e39a03e22661bc1a7ea
[ "BSD-3-Clause" ]
null
null
null
wagtail_sb_material/wagtail_hooks.py
softbutterfly/softbutterfly-wagtail-materialize
8619166b5688d4a51b1a6e39a03e22661bc1a7ea
[ "BSD-3-Clause" ]
null
null
null
wagtail_sb_material/wagtail_hooks.py
softbutterfly/softbutterfly-wagtail-materialize
8619166b5688d4a51b1a6e39a03e22661bc1a7ea
[ "BSD-3-Clause" ]
null
null
null
from django.templatetags.static import static from django.utils.html import format_html from wagtail.core import hooks @hooks.register('insert_editor_css') def editor_css(): return format_html( '<link rel="stylesheet" href="{}">', static('softbutterfly/wagtail/css/softbutterfly-wagtail.css') )
24.769231
69
0.729814
40
322
5.75
0.55
0.086957
0.2
0
0
0
0
0
0
0
0
0
0.152174
322
12
70
26.833333
0.842491
0
0
0
0
0
0.313665
0.158385
0
0
0
0
0
1
0.111111
true
0
0.333333
0.111111
0.555556
0
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
1
1
0
0
5
cb43a57dc95d0358bc28192f38754bacaf60e68e
78
py
Python
pythonteste/aula11.py
kaue-pessoa/cursoemvideo-python
d0f651a85d43c1800fcbc14cad0d8c20c86dbacf
[ "MIT" ]
null
null
null
pythonteste/aula11.py
kaue-pessoa/cursoemvideo-python
d0f651a85d43c1800fcbc14cad0d8c20c86dbacf
[ "MIT" ]
null
null
null
pythonteste/aula11.py
kaue-pessoa/cursoemvideo-python
d0f651a85d43c1800fcbc14cad0d8c20c86dbacf
[ "MIT" ]
null
null
null
print('\033[4;33;45mOlá Mundo!\033[m') # para adicionar cor no python pychar
26
38
0.717949
14
78
4
0.928571
0
0
0
0
0
0
0
0
0
0
0.161765
0.128205
78
2
39
39
0.661765
0.448718
0
0
0
0
0.725
0
0
0
0
0
0
1
0
true
0
0
0
0
1
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
1
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
5
cb4ca588754de90204c71911427283ccdc95ce1a
286
py
Python
backend/app/literature/crud/resource_descriptor_crud.py
alliance-genome/agr_literature_service_demo
48cd3a3797f96ef94e6d40d2c94e379bfc48914f
[ "MIT" ]
null
null
null
backend/app/literature/crud/resource_descriptor_crud.py
alliance-genome/agr_literature_service_demo
48cd3a3797f96ef94e6d40d2c94e379bfc48914f
[ "MIT" ]
39
2021-10-18T17:02:49.000Z
2022-03-28T20:56:24.000Z
backend/app/literature/crud/resource_descriptor_crud.py
alliance-genome/agr_literature_service_demo
48cd3a3797f96ef94e6d40d2c94e379bfc48914f
[ "MIT" ]
1
2021-10-21T00:11:18.000Z
2021-10-21T00:11:18.000Z
from sqlalchemy.orm import Session from literature.models import ResourceDescriptorModel from initialize import update_resource_descriptor def update(db: Session): return update_resource_descriptor(db) def show(db: Session): return db.query(ResourceDescriptorModel).all()
20.428571
53
0.811189
34
286
6.705882
0.529412
0.122807
0.210526
0
0
0
0
0
0
0
0
0
0.125874
286
13
54
22
0.912
0
0
0
0
0
0
0
0
0
0
0
0
1
0.285714
false
0
0.428571
0.285714
1
0
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
1
1
1
0
0
5
cbdca94a99ef0120b85aecf2caffedcc8f73bf9d
115
py
Python
examples/imported.py
tardyp/pyserde
2bef77d9888ffcc650f031f0e883cb2ff08cbf60
[ "MIT" ]
113
2019-06-15T08:04:41.000Z
2022-03-31T22:48:54.000Z
examples/imported.py
tardyp/pyserde
2bef77d9888ffcc650f031f0e883cb2ff08cbf60
[ "MIT" ]
168
2019-06-25T23:44:13.000Z
2022-03-31T17:39:30.000Z
examples/imported.py
tardyp/pyserde
2bef77d9888ffcc650f031f0e883cb2ff08cbf60
[ "MIT" ]
26
2020-05-25T15:17:09.000Z
2022-03-24T05:31:45.000Z
import enum class ImportedEnum(enum.IntEnum): V0 = enum.auto() V1 = enum.auto() V2 = 10 V3 = 100
12.777778
33
0.582609
16
115
4.1875
0.75
0.238806
0
0
0
0
0
0
0
0
0
0.111111
0.295652
115
8
34
14.375
0.716049
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.333333
0
1.166667
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
5
1dbde6ca7778ad6eb356d4eefff96a1f6b3f2379
56
py
Python
app/utilities/__init__.py
actini/storage-manager
0f6995fe65e2ed8dd862ffd03b52c3808c0ff873
[ "MIT" ]
null
null
null
app/utilities/__init__.py
actini/storage-manager
0f6995fe65e2ed8dd862ffd03b52c3808c0ff873
[ "MIT" ]
null
null
null
app/utilities/__init__.py
actini/storage-manager
0f6995fe65e2ed8dd862ffd03b52c3808c0ff873
[ "MIT" ]
null
null
null
from .configloader import ConfigLoader, ConfigException
28
55
0.875
5
56
9.8
0.8
0
0
0
0
0
0
0
0
0
0
0
0.089286
56
1
56
56
0.960784
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
1df9eedc3047cdf63be37f3aa9def5de2653adee
276
py
Python
condenser/database_helper.py
Spantree/condenser
430c950c4a871ced2eaa2b48445957a4fe0562b1
[ "MIT" ]
null
null
null
condenser/database_helper.py
Spantree/condenser
430c950c4a871ced2eaa2b48445957a4fe0562b1
[ "MIT" ]
null
null
null
condenser/database_helper.py
Spantree/condenser
430c950c4a871ced2eaa2b48445957a4fe0562b1
[ "MIT" ]
null
null
null
from . import config_reader def get_specific_helper(): if config_reader.get_db_type() == 'postgres': from . import psql_database_helper return psql_database_helper else: from . import mysql_database_helper return mysql_database_helper
27.6
49
0.713768
34
276
5.382353
0.5
0.306011
0.196721
0
0
0
0
0
0
0
0
0
0.231884
276
9
50
30.666667
0.863208
0
0
0
0
0
0.028986
0
0
0
0
0
0
1
0.125
true
0
0.375
0
0.75
0
0
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
381b8e485ad8fda54be642dadcdbca6da6565166
63
py
Python
python/phonetisaurus/__init__.py
aplawson/Phonetisaurus
e73a6042d5f99d5feeb9963e3c3696190a9e0e5e
[ "BSD-3-Clause" ]
349
2015-04-07T10:19:21.000Z
2022-03-28T22:43:43.000Z
python/phonetisaurus/__init__.py
aplawson/Phonetisaurus
e73a6042d5f99d5feeb9963e3c3696190a9e0e5e
[ "BSD-3-Clause" ]
61
2015-05-19T15:48:46.000Z
2022-01-12T06:49:58.000Z
python/phonetisaurus/__init__.py
aplawson/Phonetisaurus
e73a6042d5f99d5feeb9963e3c3696190a9e0e5e
[ "BSD-3-Clause" ]
117
2015-06-07T00:55:37.000Z
2022-03-23T00:59:30.000Z
from Phonetisaurus import PhonetisaurusScript as Phonetisaurus
31.5
62
0.904762
6
63
9.5
0.833333
0
0
0
0
0
0
0
0
0
0
0
0.095238
63
1
63
63
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
381fb64327f16bec6fd1bef8a4b81fa86440c1b0
4,133
py
Python
clients/python/client/relevanced_client/test/test_centroid_document_operations.py
scivey/relevanced
8f0fe67f48ea1da7468a70eef026ed23b4298a27
[ "MIT" ]
10
2015-12-02T01:41:52.000Z
2020-05-11T04:05:56.000Z
clients/python/client/relevanced_client/test/test_centroid_document_operations.py
scivey/relevanced
8f0fe67f48ea1da7468a70eef026ed23b4298a27
[ "MIT" ]
null
null
null
clients/python/client/relevanced_client/test/test_centroid_document_operations.py
scivey/relevanced
8f0fe67f48ea1da7468a70eef026ed23b4298a27
[ "MIT" ]
3
2017-05-20T19:31:44.000Z
2019-04-14T03:47:07.000Z
from __future__ import print_function from .common import IsolatedTestCase from .. import ( EDocumentDoesNotExist, ECentroidDoesNotExist, EDocumentAlreadyInCentroid, EDocumentNotInCentroid ) class TestCentroidDocumentOperations(IsolatedTestCase): def test_add_document_1(self): self.client.create_centroid('centroid-1') self.client.create_document_with_id('doc-1', 'doc 1 text') self.client.add_document_to_centroid('centroid-1', 'doc-1') documents = self.client.list_all_documents_for_centroid('centroid-1').documents self.assertEqual(['doc-1'], documents) def test_add_remove_documents(self): self.client.create_centroid('centroid-1') self.client.create_document_with_id('doc-1', 'doc 1 text') self.client.add_document_to_centroid('centroid-1', 'doc-1') documents = self.client.list_all_documents_for_centroid('centroid-1').documents self.assertEqual(['doc-1'], documents) self.client.create_document_with_id('doc-2', 'doc 2 text') self.client.create_document_with_id('doc-3', 'doc 3 text') self.client.add_documents_to_centroid('centroid-1', ['doc-2', 'doc-3']) documents = self.client.list_all_documents_for_centroid('centroid-1').documents self.assertEqual(set(['doc-1', 'doc-2', 'doc-3']), set(documents)) self.client.remove_document_from_centroid('centroid-1', 'doc-2') documents = self.client.list_all_documents_for_centroid('centroid-1').documents self.assertEqual(set(['doc-1', 'doc-3']), set(documents)) def test_add_document_missing_centroid(self): self.client.create_centroid('good-centroid') self.client.create_document_with_id('doc-1', 'some text') with self.assertRaises(ECentroidDoesNotExist): self.client.add_document_to_centroid('bad-centroid', 'doc-1') def test_add_document_missing_document(self): self.client.create_centroid('good-centroid') with self.assertRaises(EDocumentDoesNotExist): self.client.add_document_to_centroid('good-centroid', 'doc-1') def test_add_document_already_in_centroid(self): self.client.create_centroid('centroid-1') self.client.create_document_with_id('doc-1', 'some text') self.client.add_document_to_centroid('centroid-1', 'doc-1') with self.assertRaises(EDocumentAlreadyInCentroid): self.client.add_document_to_centroid('centroid-1', 'doc-1') def test_add_document_already_in_centroid_ignore(self): self.client.create_centroid('centroid-1') self.client.create_document_with_id('doc-1', 'some text') self.client.add_document_to_centroid('centroid-1', 'doc-1') self.client.add_document_to_centroid('centroid-1', 'doc-1', ignore_already_in_centroid=True ) def test_remove_document_not_in_centroid(self): self.client.create_centroid('centroid-1') self.client.create_document_with_id('doc-1', 'some text') self.client.create_document_with_id('doc-2', 'more text') self.client.add_document_to_centroid('centroid-1', 'doc-1') with self.assertRaises(EDocumentNotInCentroid): self.client.remove_document_from_centroid('centroid-1', 'doc-2') def test_remove_document_not_in_centroid_ignore(self): self.client.create_centroid('centroid-1') self.client.create_document_with_id('doc-1', 'some text') self.client.create_document_with_id('doc-2', 'more text') self.client.add_document_to_centroid('centroid-1', 'doc-1') self.client.remove_document_from_centroid('centroid-1', 'doc-2', ignore_not_in_centroid=True ) def test_add_document_missing_document_and_centroid(self): with self.assertRaises(ECentroidDoesNotExist): self.client.add_document_to_centroid('some-centroid', 'some-doc') def test_remove_document_missing_document_and_centroid(self): with self.assertRaises(ECentroidDoesNotExist): self.client.remove_document_from_centroid('some-centroid', 'some-doc')
45.922222
87
0.711106
528
4,133
5.255682
0.090909
0.140541
0.134775
0.086486
0.838919
0.794595
0.762162
0.702342
0.676757
0.657297
0
0.017401
0.165739
4,133
89
88
46.438202
0.787413
0
0
0.486111
0
0
0.14061
0
0
0
0
0
0.138889
1
0.138889
false
0
0.041667
0
0.194444
0.013889
0
0
0
null
0
0
0
1
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
383f8a303e9e612596921aafdd8826c71e44c6ab
13,835
py
Python
testing/objects/test_arrayobject.py
jweinraub/hippyvm
09c7643aaa1c4ade566e8681abd2543f12bf874c
[ "MIT" ]
289
2015-01-01T15:36:55.000Z
2022-03-27T00:22:27.000Z
testing/objects/test_arrayobject.py
jweinraub/hippyvm
09c7643aaa1c4ade566e8681abd2543f12bf874c
[ "MIT" ]
26
2015-01-21T16:34:41.000Z
2020-08-26T15:12:54.000Z
testing/objects/test_arrayobject.py
jweinraub/hippyvm
09c7643aaa1c4ade566e8681abd2543f12bf874c
[ "MIT" ]
35
2015-01-05T12:09:41.000Z
2022-03-16T09:30:16.000Z
import py, sys from rpython.rlib.rfloat import INFINITY, NAN, isnan from testing.test_interpreter import BaseTestInterpreter def doset(space, w_array, w_index, w_newvalue): w_res, w_new2 = w_array.setitem2_maybe_inplace(space, w_index, w_newvalue) assert w_new2 is w_newvalue assert w_res is w_array # worked in-place def doset_not_inplace(space, w_array, w_index, w_newvalue): w_res, w_new2 = w_array.setitem2_maybe_inplace(space, w_index, w_newvalue) assert w_new2 is w_newvalue assert w_res is not w_array # did not work in-place return w_res def dounset(space, w_array, w_index): w_result = w_array._unsetitem(space, w_index) assert w_result is w_array # worked in-place def dounset_not_inplace(space, w_array, w_index): w_result = w_array._unsetitem(space, w_index) assert w_result is not w_array # did not work in-place return w_result def doappend(space, w_array, w_newvalue): w_new2 = w_array.appenditem_inplace(space, w_newvalue) assert w_new2 is w_newvalue class TestArrayObject(BaseTestInterpreter): def test_is_true(self): space = self.space w_array = space.new_array_from_list([]) assert space.is_true(w_array) is False w_array = space.new_array_from_list([space.newint(0)]) assert space.is_true(w_array) is True def test_getitem(self): space = self.space w_array = space.new_array_from_list([space.newint(42)]) w_item = space.getitem(w_array, space.newint(0)) assert space.is_w(w_item, space.newint(42)) assert w_array.as_dict() == {"0": w_item} def test_getitem_hash(self): space = self.space w_array = space.new_array_from_dict({"foo": space.newint(42), "-84": space.newint(43)}) w_item = space.getitem(w_array, space.newstr("foo")) assert space.is_w(w_item, space.newint(42)) w_item = space.getitem(w_array, space.newint(-84)) assert space.is_w(w_item, space.newint(43)) def test_setitem(self): space = self.space w_array = space.new_array_from_list([]) w_item = space.newstr("bok") doset(space, w_array, space.newint(0), w_item) assert w_array.as_dict() == {"0": w_item} w_item2 = space.newstr("bok2") doset(space, w_array, space.newint(0), w_item2) assert w_array.as_dict() == {"0": w_item2} w_item3 = space.newstr("bok3") doset(space, w_array, space.newint(1), w_item3) assert w_array.as_dict() == {"0": w_item2, "1": w_item3} def test_setitem_hash(self): space = self.space w_array = space.new_array_from_dict({}) w_item = space.newstr("bok") doset(space, w_array, space.newint(0), w_item) assert w_array.as_dict() == {"0": w_item} w_item2 = space.newstr("bok2") doset(space, w_array, space.newstr("0"), w_item2) assert w_array.as_dict() == {"0": w_item2} w_item3 = space.newstr("bok3") doset(space, w_array, space.newstr("aAa"), w_item3) assert w_array.as_dict() == {"0": w_item2, "aAa": w_item3} def test_getitem_str(self): space = self.space w_array = space.new_array_from_list([space.newint(42)]) w_item = space.getitem(w_array, space.newstr("0")) assert space.is_w(w_item, space.newint(42)) w_item = space.getitem(w_array, space.newstr("")) assert w_item is space.w_Null w_item = space.getitem(w_array, space.newstr("00")) assert w_item is space.w_Null w_item = space.getitem(w_array, space.newstr("foo")) assert w_item is space.w_Null w_item = space.getitem(w_array, space.newstr(str(1 << 128))) assert w_item is space.w_Null def test_list2hash_out_of_bound(self): space = self.space w_x = space.newstr("x") w_y = space.newstr("y") w_array = space.new_array_from_list([w_x]) w_array = doset_not_inplace(space, w_array, space.newint(100), w_y) assert w_array.as_dict() == {"0": w_x, "100": w_y} def test_list2hash_str(self): space = self.space w_x = space.newstr("x") w_y = space.newstr("y") w_array = space.new_array_from_list([w_x]) w_array = doset_not_inplace(space, w_array, space.newstr("z"), w_y) assert w_array.as_dict() == {"0": w_x, "z": w_y} assert w_array._has_string_keys def test_setitem_numeric_str(self): space = self.space w_x = space.newstr("x") w_y = space.newstr("y") w_array = space.new_array_from_list([w_x]) doset(space, w_array, space.newstr("0"), w_y) assert w_array.as_dict() == {"0": w_y} assert not w_array._has_string_keys def test_unsetitem(self): space = self.space for w_0, w_2 in [(space.newint(0), space.newint(2)), (space.newstr("0"), space.newstr("2"))]: w_x = space.newstr("x") w_y = space.newstr("y") w_z = space.newstr("z") w_array = space.new_array_from_list([w_x, w_y, w_z]) dounset(space, w_array, w_2) assert w_array.as_dict() == {"0": w_x, "1": w_y} assert not w_array._has_string_keys dounset(space, w_array, w_2) assert w_array.as_dict() == {"0": w_x, "1": w_y} assert not w_array._has_string_keys w_array = dounset_not_inplace(space, w_array, w_0) assert w_array.as_dict() == {"1": w_y} assert w_array._has_string_keys # for now def test_unsetitem_hash(self): space = self.space w_x = space.newstr("x") w_y = space.newstr("y") w_array = space.new_array_from_dict({"foo": w_x, "42": w_y}) dounset(space, w_array, space.newint(42)) assert w_array.as_dict() == {"foo": w_x} dounset(space, w_array, space.newstr("bar")) assert w_array.as_dict() == {"foo": w_x} dounset(space, w_array, space.newstr("foo")) assert w_array.as_dict() == {} def test_append_item(self): space = self.space w_x = space.newstr("x") w_y = space.newstr("y") w_int = space.newint(330) w_array = space.new_array_from_list([w_x]) doappend(space, w_array, w_x) doappend(space, w_array, w_x) w_array = doset_not_inplace(space, w_array, space.newint(99), w_y) doappend(space, w_array, w_y) assert w_array.as_dict() == {"0": w_x, "1": w_x, "2": w_x, "99": w_y, "100": w_y} doappend(space, w_array, w_y) assert w_array.as_dict() == {"0": w_x, "1": w_x, "2": w_x, "99": w_y, "100": w_y, "101": w_y} dounset(space, w_array, space.newint(101)) assert w_array.as_dict() == {"0": w_x, "1": w_x, "2": w_x, "99": w_y, "100": w_y} doappend(space, w_array, w_y) assert w_array.as_dict() == {"0": w_x, "1": w_x, "2": w_x, "99": w_y, "100": w_y, "102": w_y} doset(space, w_array, space.newstr("255"), w_y) dounset(space, w_array, space.newint(255)) assert w_array.as_dict() == {"0": w_x, "1": w_x, "2": w_x, "99": w_y, "100": w_y, "102": w_y} doappend(space, w_array, w_y) assert w_array.as_dict() == {"0": w_x, "1": w_x, "2": w_x, "99": w_y, "100": w_y, "102": w_y, '256': w_y} assert w_array.as_dict() == {"0": w_x, "1": w_x, "2": w_x, "99": w_y, "100": w_y, "102": w_y, '256': w_y} doset(space, w_array, space.newstr("monday"), w_y) doappend(space, w_array, w_y) assert w_array.as_dict() == {"0": w_x, "1": w_x, "2": w_x, "99": w_y, "100": w_y, "102": w_y, '256': w_y, "monday": w_y, '257': w_y} w_array = space.new_array_from_dict({"one": w_x, "-84": w_int}) assert w_array.as_dict() == {'-84': w_int, 'one': w_x} doappend(space, w_array, w_x) assert w_array.as_dict() == {'-84': w_int, '0': w_x, 'one': w_x} doappend(space, w_array, w_x) assert w_array.as_dict() == {'-84': w_int, '0': w_x, 'one': w_x, '1': w_x} doset(space, w_array, space.newint(100), w_y) assert w_array.as_dict() == {'-84': w_int, '0': w_x, 'one': w_x, '1': w_x, '100': w_y} dounset(space, w_array, space.newint(100)) doappend(space, w_array, w_x) assert w_array.as_dict() == {'-84': w_int, '0': w_x, 'one': w_x, '1': w_x, '101': w_x} doset(space, w_array, space.newstr("255"), w_y) dounset(space, w_array, space.newint(255)) doappend(space, w_array, w_x) assert w_array.as_dict() == {'-84': w_int, '0': w_x, 'one': w_x, '1': w_x, '101': w_x, '256': w_x} doset(space, w_array, space.newstr("monday"), w_y) doappend(space, w_array, w_y) assert w_array.as_dict() == {'-84': w_int, '0': w_x, 'one': w_x, '1': w_x, '101': w_x, '256': w_x, "monday": w_y, '257': w_y} @py.test.mark.xfail( "not config.option.runappdirect and sys.maxint > 2**32", reason="parsing of floats doesn't get a 1-1 exact result") def test_index_overflow(self): def check(inputfloat, outputint): if isnan(inputfloat): inputfloat = 'NAN' elif inputfloat == INFINITY: inputfloat = 'INF' elif inputfloat == -INFINITY: inputfloat = '-INF' else: inputfloat = repr(inputfloat) output = self.run(""" $arr1 = array(%d=>4); echo $arr1[%s]; """ % (outputint, inputfloat)) assert self.space.is_w(output[0], self.space.newint(4)) check(123.95, 123) check(-123.95, -123) check(2147483647.1, 2147483647) check(-1234567898765432123456789.0, 0) check(1234567898765432123456789.0, 0) check(INFINITY, 0) check(-INFINITY, 0) check(NAN, -sys.maxint-1) check(-9.223372036855e+18, 0) check(9.223372036855e+18, 0) check(9.223372036854767e+18, -9216) check(9.223372036854766e+18, -10240) check(9.223372036854786e+18, 0) check(9.214148664817921e+18, 1511828480) def test_reference_update_does_not_change_array(self): space = self.space w_array = space.new_array_from_list([]) w_itemref = space.empty_ref() w_new = w_array._setitem_ref(space, space.newint(0), w_itemref) assert w_new is w_array # worked in-place doset(space, w_array, space.newint(0), space.newint(42)) assert space.int_w(w_itemref.deref_temp()) == 42 # w_new = w_array._setitem_ref(space, space.newstr('XY'), w_itemref) assert w_new is not w_array # did not work in-place w_array = w_new doset(space, w_array, space.newstr('XY'), space.newint(42)) assert space.int_w(w_itemref.deref_temp()) == 42 def test_setitem_appenditem(self): space = self.space w_array = space.new_array_from_list([]) w_item = space.newstr("bok") doset(space, w_array, space.newint(0), w_item) assert w_array.as_dict() == {"0": w_item} w_item2 = space.newstr("bok2") doappend(space, w_array, w_item2) assert w_array.as_dict() == {"0": w_item, "1": w_item2} def test_setitem_negative_integer_append(self): space = self.space w_array = space.new_array_from_list([]) w_item = space.newstr("bok") w_array = doset_not_inplace(space, w_array, space.newint(-5), w_item) assert w_array.as_dict() == {"-5": w_item} w_item2 = space.newstr("bok2") doappend(space, w_array, w_item2) assert w_array.as_dict() == {"-5": w_item, "0": w_item2}
42.700617
78
0.507987
1,857
13,835
3.485191
0.084545
0.124227
0.096879
0.086527
0.815822
0.770859
0.749382
0.708436
0.633498
0.61063
0
0.059219
0.365305
13,835
323
79
42.832817
0.677827
0.008746
0
0.557823
0
0
0.038088
0.001897
0
0
0
0
0.210884
1
0.07483
false
0
0.010204
0
0.095238
0
0
0
0
null
0
0
0
1
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
69957c45f7d53be6bde147eb378accc40900434e
100
py
Python
Chakravala/__init__.py
Rachiiit/Chakravala
feee7cf8b847ace583c5b102648659f67ae17372
[ "CC0-1.0" ]
null
null
null
Chakravala/__init__.py
Rachiiit/Chakravala
feee7cf8b847ace583c5b102648659f67ae17372
[ "CC0-1.0" ]
null
null
null
Chakravala/__init__.py
Rachiiit/Chakravala
feee7cf8b847ace583c5b102648659f67ae17372
[ "CC0-1.0" ]
null
null
null
from .chakravala import chakravala, chakravala_terno from .sqrt_approx import sqrt_approx, sqrt_app
50
53
0.86
14
100
5.857143
0.5
0.243902
0
0
0
0
0
0
0
0
0
0
0.1
100
2
54
50
0.911111
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
6998713b5a740bc6859d84a270bc28e065be9ac0
59
py
Python
mymldev/utils/__init__.py
Suneel123/mymldev
d80826432f97c9004986cd5a625f74757cf362bb
[ "MIT" ]
null
null
null
mymldev/utils/__init__.py
Suneel123/mymldev
d80826432f97c9004986cd5a625f74757cf362bb
[ "MIT" ]
null
null
null
mymldev/utils/__init__.py
Suneel123/mymldev
d80826432f97c9004986cd5a625f74757cf362bb
[ "MIT" ]
null
null
null
# Utility snippets from .misc import ConfigHolder, DotDict
19.666667
39
0.813559
7
59
6.857143
1
0
0
0
0
0
0
0
0
0
0
0
0.135593
59
2
40
29.5
0.941176
0.271186
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
699c8fd5329616005a0a8e0f48a54d4ab6336cab
124
py
Python
class_room_backend/chat/admin.py
mdShakilHossainNsu2018/classroom_flutter
226be17a0bce9522377714b380764881e914040a
[ "MIT" ]
null
null
null
class_room_backend/chat/admin.py
mdShakilHossainNsu2018/classroom_flutter
226be17a0bce9522377714b380764881e914040a
[ "MIT" ]
null
null
null
class_room_backend/chat/admin.py
mdShakilHossainNsu2018/classroom_flutter
226be17a0bce9522377714b380764881e914040a
[ "MIT" ]
null
null
null
from django.contrib import admin from .models import TrainData # Register your models here. admin.site.register(TrainData)
20.666667
32
0.814516
17
124
5.941176
0.647059
0
0
0
0
0
0
0
0
0
0
0
0.120968
124
5
33
24.8
0.926606
0.209677
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.666667
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
69a71bf3bd2fc019376062296b1aa19a42800846
104
py
Python
colorterm/__init__.py
jalonsors/colorterm
af861ead1da772f6f461733caee867b63a88837d
[ "MIT" ]
1
2015-12-30T17:13:09.000Z
2015-12-30T17:13:09.000Z
colorterm/__init__.py
jalonsors/colorterm
af861ead1da772f6f461733caee867b63a88837d
[ "MIT" ]
null
null
null
colorterm/__init__.py
jalonsors/colorterm
af861ead1da772f6f461733caee867b63a88837d
[ "MIT" ]
1
2020-12-23T21:05:17.000Z
2020-12-23T21:05:17.000Z
from .colorterm import ColorTerm, formatter from .table import Table colorterm = colorterm.ColorTerm()
20.8
43
0.807692
12
104
7
0.416667
0.428571
0
0
0
0
0
0
0
0
0
0
0.125
104
4
44
26
0.923077
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.666667
0
0.666667
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
5
3884bd31e83ca0c616e30b0cda78c9cd644ba9b7
129
py
Python
Chapter 06/ch6_1r.py
bpbpublications/TEST-YOUR-SKILLS-IN-PYTHON-LANGUAGE
f6a4194684515495d00aa38347a725dd08f39a0c
[ "MIT" ]
null
null
null
Chapter 06/ch6_1r.py
bpbpublications/TEST-YOUR-SKILLS-IN-PYTHON-LANGUAGE
f6a4194684515495d00aa38347a725dd08f39a0c
[ "MIT" ]
null
null
null
Chapter 06/ch6_1r.py
bpbpublications/TEST-YOUR-SKILLS-IN-PYTHON-LANGUAGE
f6a4194684515495d00aa38347a725dd08f39a0c
[ "MIT" ]
null
null
null
z=10 w=-10 while(z<50): if (z>0 and w<0): print(z**2, w**3) z = z+10 w=w+10 # 100 -1000
14.333333
27
0.364341
26
129
1.807692
0.5
0.12766
0.170213
0
0
0
0
0
0
0
0
0.291667
0.44186
129
9
28
14.333333
0.361111
0.069767
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0
0
0
0.142857
1
0
1
null
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
38a716d342d37dabc2be6d9237dd3d8ab2a7939e
97
py
Python
newdust/__init__.py
eblur/newdust
7e843ae2604a844826606ea04c459694fdd5c178
[ "BSD-2-Clause" ]
4
2018-02-04T19:04:01.000Z
2022-02-09T04:11:18.000Z
newdust/__init__.py
eblur/newdust
7e843ae2604a844826606ea04c459694fdd5c178
[ "BSD-2-Clause" ]
21
2017-08-15T21:13:42.000Z
2021-12-23T20:07:24.000Z
newdust/__init__.py
eblur/newdust
7e843ae2604a844826606ea04c459694fdd5c178
[ "BSD-2-Clause" ]
1
2021-01-28T18:29:12.000Z
2021-01-28T18:29:12.000Z
from . import constants from . import graindist from . import scatmodels from .grainpop import *
19.4
24
0.783505
12
97
6.333333
0.5
0.394737
0
0
0
0
0
0
0
0
0
0
0.164948
97
4
25
24.25
0.938272
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
38ac7830eed8a95e2299fbce24c1cbdd6598a315
589
py
Python
tests/lanczos/run.py
weikengchen/Virgo
73b924f33e18c017bafa3ed37da84a7b0d80ec5d
[ "Apache-2.0" ]
9
2020-06-02T04:54:08.000Z
2021-12-07T12:54:09.000Z
tests/lanczos/run.py
weikengchen/Virgo
73b924f33e18c017bafa3ed37da84a7b0d80ec5d
[ "Apache-2.0" ]
2
2021-01-07T18:34:10.000Z
2021-03-22T20:29:43.000Z
tests/lanczos/run.py
weikengchen/Virgo
73b924f33e18c017bafa3ed37da84a7b0d80ec5d
[ "Apache-2.0" ]
5
2020-06-08T09:11:37.000Z
2021-07-29T12:15:47.000Z
import os os.system('mkdir -p LOG') os.system('./zk_proof lanczos2_112_N=16_circuit.txt lanczos2_112_N=16_meta.txt LOG/lanczos2_112_N=16.txt') os.system('./zk_proof lanczos2_176_N=64_circuit.txt lanczos2_176_N=64_meta.txt LOG/lanczos2_176_N=64.txt') os.system('./zk_proof lanczos2_304_N=256_circuit.txt lanczos2_304_N=256_meta.txt LOG/lanczos2_304_N=256.txt') os.system('./zk_proof lanczos2_560_N=1024_circuit.txt lanczos2_560_N=1024_meta.txt LOG/lanczos2_560_N=1024.txt') os.system('./zk_proof lanczos2_1072_N=4096_circuit.txt lanczos2_1072_N=4096_meta.txt LOG/lanczos2_1072_N=4096.txt')
73.625
115
0.828523
117
589
3.786325
0.196581
0.108352
0.112867
0.1693
0.286682
0.234763
0
0
0
0
0
0.191489
0.042445
589
7
116
84.142857
0.593972
0
0
0
0
0
0.840407
0.709677
0
0
0
0
0
1
0
true
0
0.142857
0
0.142857
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
1
1
null
0
0
0
0
0
0
1
0
0
0
0
0
0
5
2a1e102925d13f2518d375ccaecffd9244889f3d
4,460
py
Python
models/base.py
OrangeBai/C3DLab
24c3a92706aad43fe7a4d559bba1f956f8f0bef7
[ "MIT" ]
null
null
null
models/base.py
OrangeBai/C3DLab
24c3a92706aad43fe7a4d559bba1f956f8f0bef7
[ "MIT" ]
null
null
null
models/base.py
OrangeBai/C3DLab
24c3a92706aad43fe7a4d559bba1f956f8f0bef7
[ "MIT" ]
null
null
null
from tensorflow.python.keras.engine.input_layer import Input from tensorflow.python.keras.layers import Conv2D, TimeDistributed, Dense,Flatten,Dropout from nets.RoiPoolingConv import RoiPoolingConv from tensorflow.python.keras.engine.training import Model from tensorflow.keras.optimizers import Adam, SGD from helper import losses # # class C3D(ModelBase): # def __init__(self, config): # super().__init__() # self.num_class = config.num_cls # self.num_anchor = config.num_anchors # self.pooling_region = config.pooling_region # self.num_rois = config.num_roi # self.feature_shape = config.feature_shape # # self.img_input = Input(config.img_shape) # self.roi_input = Input(shape=(None, 4)) # self.feature_input = Input(self.feature_shape) # # self.base_fun = config.base_net # # self.num_anchors = config.num_anchors # self.num_cls = config.num_cls # # def rpn(self, base_layers): # x = Conv2D(512, (3, 3), padding='same', activation='relu', # kernel_initializer='normal', name='rpn_conv1')(base_layers) # # x_class = Conv2D(self.num_anchors, (1, 1), activation='sigmoid', kernel_initializer='uniform', # name='rpn_out_class')(x) # x_regr = Conv2D(self.num_anchors * 4, (1, 1), activation='linear', kernel_initializer='zero', # name='rpn_out_regress')(x) # # return [x_class, x_regr, base_layers] # # def classifier(self, base_layers, trainable=True): # out_roi_pool = RoiPoolingConv(self.pooling_region, self.num_rois)([base_layers, self.roi_input]) # # out = TimeDistributed(Flatten(name='flatten'))(out_roi_pool) # out = TimeDistributed(Dense(4096, activation='relu', name='fc1', trainable=trainable))(out) # out = TimeDistributed(Dropout(0.5))(out) # out = TimeDistributed(Dense(4096, activation='relu', name='fc2', trainable=trainable))(out) # out = TimeDistributed(Dropout(0.5))(out) # # out_class = TimeDistributed( # Dense(self.num_cls, activation='softmax', kernel_initializer='zero', trainable=trainable), # name='dense_class_{}'.format(self.num_cls))(out) # # note: no regression target for bg class # out_regr = TimeDistributed(Dense(4 * (self.num_cls - 1), activation='linear', kernel_initializer='zero', # trainable=trainable), name='dense_regress_{}'.format(self.num_cls))(out) # # return [out_class, out_regr] # # def train_model(self, model_weight=None, optimizers=None, lr=None): # # base = self.base_fun(self.img_input) # rpn = self.rpn(base) # classifier = self.classifier(base) # # if lr is None: # lr = 1e-4 # if optimizers == 'SGD': # optimizer_rpn = SGD(lr=lr) # optimizer_classifier = SGD(lr=lr) # else: # optimizer_rpn = Adam(lr=lr) # optimizer_classifier = Adam(lr=lr) # model_rpn = Model(self.img_input, rpn[:2]) # model_classifier = Model([self.img_input, self.roi_input], classifier) # model_all = Model([self.img_input, self.roi_input], rpn[:2] + classifier) # # if model_weight is not None: # model_rpn.load_weights(model_weight, by_name=True) # model_classifier.load_weights(model_weight, by_name=True) # model_all.load_weights(model_weight, by_name=True) # # model_rpn.compile(optimizer=optimizer_rpn, # loss=[losses.rpn_loss_cls(self.num_anchors), losses.rpn_loss_reg(self.num_anchors)]) # model_classifier.compile(optimizer=optimizer_classifier, # loss=[losses.class_loss_cls, losses.class_loss_reg(self.num_cls - 1)], # metrics={'dense_class_{}'.format(self.num_cls): 'accuracy'}) # model_all.compile(optimizer='sgd', loss='mae') # # return model_rpn, model_classifier, model_all # # def test_model(self): # base = self.base_fun(self.img_input) # rpn = self.rpn(base) # classifier = self.classifier(base) # # model_rpn = Model(self.img_input, rpn) # model_classifier_only = Model([self.feature_input, self.roi_input], classifier) # return model_rpn, model_classifier_only if __name__ == '__main__': pass
44.158416
115
0.631166
539
4,460
4.961039
0.204082
0.041885
0.031414
0.022438
0.367614
0.282723
0.244577
0.133508
0.091997
0.091997
0
0.011226
0.241031
4,460
100
116
44.6
0.77873
0.876682
0
0
0
0
0.017391
0
0
0
0
0
0
1
0
true
0.125
0.75
0
0.75
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
1
0
1
0
0
5
2a46cc9d10a0388dbd22373d199fe3fc112c697a
38
py
Python
code/brain/story/trigger.py
OpenGenus/lama
d8c0c38549c0fed402cf66b311e74374a6ba62af
[ "MIT" ]
null
null
null
code/brain/story/trigger.py
OpenGenus/lama
d8c0c38549c0fed402cf66b311e74374a6ba62af
[ "MIT" ]
null
null
null
code/brain/story/trigger.py
OpenGenus/lama
d8c0c38549c0fed402cf66b311e74374a6ba62af
[ "MIT" ]
null
null
null
# Find the appropriate story for user.
38
38
0.789474
6
38
5
1
0
0
0
0
0
0
0
0
0
0
0
0.157895
38
1
38
38
0.9375
0.947368
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
5
2a520b3c66202912712eecf2fde70f1115edd6ed
37
py
Python
utils.py
corenel/pytorch-starter-kit
f09e3193d4e03e0d018f66158e3d426cb8766e22
[ "MIT" ]
2
2019-06-07T05:22:49.000Z
2020-07-22T10:11:26.000Z
utils.py
corenel/pytorch-starter-kit
f09e3193d4e03e0d018f66158e3d426cb8766e22
[ "MIT" ]
null
null
null
utils.py
corenel/pytorch-starter-kit
f09e3193d4e03e0d018f66158e3d426cb8766e22
[ "MIT" ]
null
null
null
"""Helpful functions for project."""
18.5
36
0.702703
4
37
6.5
1
0
0
0
0
0
0
0
0
0
0
0
0.108108
37
1
37
37
0.787879
0.810811
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
5
2a83bdf29bf73da17ac76e1333665bb7b11f230e
210
py
Python
odm2admin/apps.py
ocefpaf/ODM2-Admin
2a0d17daf4d3a50386f4f0c1e37d18da1f287c16
[ "MIT" ]
5
2016-08-10T17:20:36.000Z
2017-04-26T17:46:06.000Z
odm2admin/apps.py
ocefpaf/ODM2-Admin
2a0d17daf4d3a50386f4f0c1e37d18da1f287c16
[ "MIT" ]
85
2016-07-01T16:25:15.000Z
2017-10-03T18:06:38.000Z
odm2admin/apps.py
ocefpaf/ODM2-Admin
2a0d17daf4d3a50386f4f0c1e37d18da1f287c16
[ "MIT" ]
8
2016-07-26T16:40:24.000Z
2018-11-15T15:45:43.000Z
from django.apps import AppConfig from django.core.management import settings class ODM2AdminConfig(AppConfig): name = '{}'.format(settings.APP_NAME) verbose_name = '{}'.format(settings.VERBOSE_NAME)
26.25
53
0.761905
25
210
6.28
0.56
0.127389
0.229299
0
0
0
0
0
0
0
0
0.005435
0.12381
210
7
54
30
0.847826
0
0
0
0
0
0.019048
0
0
0
0
0
0
1
0
false
0
0.4
0
1
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
5
aa5e5120bd82c96328a67334e512440d4af5c8bc
806
py
Python
fileSafe/src/LoadDlls.py
wangzhongtian/vobTool.srcGit
255a33ce109b63d335084007b3d1f3635a2992bf
[ "Unlicense" ]
null
null
null
fileSafe/src/LoadDlls.py
wangzhongtian/vobTool.srcGit
255a33ce109b63d335084007b3d1f3635a2992bf
[ "Unlicense" ]
null
null
null
fileSafe/src/LoadDlls.py
wangzhongtian/vobTool.srcGit
255a33ce109b63d335084007b3d1f3635a2992bf
[ "Unlicense" ]
null
null
null
import clr import sys import System import System.IO def getFullNames1(dllname): filename=None filename = "./"+dllname if ( System.IO.File.Exists(filename) ): print("Load Dlls:" ,filename) return dllname for p in System.Environment.GetEnvironmentVariable("libpath").split(";"): filename = p+"/"+dllname if ( System.IO.File.Exists(filename) ): print("Load Dlls:" ,filename) return filename def getFullNames(dllname): filename=None filename = "./"+dllname if ( System.IO.File.Exists(filename) ): print("Load Dlls:" ,filename) return dllname for p in sys.path: print(p) filename = p+"/"+dllname if ( System.IO.File.Exists(filename) ): print("Load Dlls:" ,filename) return filename
26.866667
75
0.621588
92
806
5.445652
0.282609
0.07984
0.11976
0.135729
0.722555
0.722555
0.722555
0.722555
0.722555
0.722555
0
0.001647
0.246898
806
30
76
26.866667
0.823723
0
0
0.666667
0
0
0.066998
0
0
0
0
0
0
1
0.074074
false
0
0.148148
0
0.37037
0.185185
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
aa791ffaf483d76317e380054bd5be53b3fd5fab
4,726
py
Python
content/views.py
farhadmpr/PyEditorial
ae81bf2cd35ff354b58a114e1c49732358b5750e
[ "CC0-1.0" ]
2
2020-08-24T09:34:35.000Z
2021-01-06T10:39:14.000Z
content/views.py
farhadmpr/PyEditorial
ae81bf2cd35ff354b58a114e1c49732358b5750e
[ "CC0-1.0" ]
null
null
null
content/views.py
farhadmpr/PyEditorial
ae81bf2cd35ff354b58a114e1c49732358b5750e
[ "CC0-1.0" ]
null
null
null
from django.views import generic from django.db.models import Q from content import models as ContentModels from constance import config from .forms import SearchForm class Base(generic.ListView): template_name = 'base.html' def get_context_data(self, *, object_list=None, **kwargs): context = super().get_context_data(**kwargs) context['blogs_categories'] = ContentModels.BlogCategory.objects.all() context['videocasts_categories'] = ContentModels.VideocastCategory.objects.all() context['podcast_categories'] = ContentModels.PodcastCategory.objects.all() context['podcasts'] = ContentModels.Podcast.objects.order_by('-pk').filter(publish=True)[:2] context['config'] = config return context def get_queryset(self): pass class Index(Base): template_name = 'index.html' def get_context_data(self, *, object_list=None, **kwargs): context = super().get_context_data(**kwargs) context['last_blog'] = ContentModels.Blog.objects.order_by('-pk').filter(publish=True)[:1] context['skills'] = ContentModels.Skill.objects.all() context['blogs'] = ContentModels.Blog.objects.order_by('-pk').filter(publish=True)[1:5] context['videocasts'] = ContentModels.Videocast.objects.order_by('-pk').filter(publish=True)[:4] return context class Search(Base): template_name = 'search.html' def get_context_data(self, *, object_list=None, **kwargs): context = super().get_context_data(**kwargs) form = SearchForm(self.request.GET) if form.is_valid(): query = form.cleaned_data['query'] context['blogs'] = ContentModels.Blog.objects.order_by('-pk').filter(Q(title__icontains=query) | Q(content__icontains=query)) context['videocasts'] = ContentModels.Videocast.objects.order_by('-pk').filter(Q(title__icontains=query) | Q(content__icontains=query)) context['podcasts'] = ContentModels.Podcast.objects.order_by('-pk').filter(Q(title__icontains=query) | Q(content__icontains=query)) return context class Blog(Base): template_name = 'archive.html' def get_context_data(self, *, object_list=None, **kwargs): context = super().get_context_data(**kwargs) context['archives'] = ContentModels.Blog.objects.all() return context class BlogArchiveByCategoryPK(Base): template_name = 'archive.html' def get_context_data(self, *, object_list=None, **kwargs): context = super().get_context_data(**kwargs) context['archives'] = ContentModels.Blog.objects.filter(category=self.kwargs['pk']) return context class BlogSingle(Base): template_name = 'single.html' def get_context_data(self, *, object_list=None, **kwargs): context = super().get_context_data(**kwargs) context['single_content'] = ContentModels.Blog.objects.filter(slug=self.kwargs['slug']) return context class Videocast(Base): template_name = 'archive.html' def get_context_data(self, *, object_list=None, **kwargs): context = super().get_context_data(**kwargs) context['archives'] = ContentModels.Videocast.objects.all() return context class VideocastArchiveByCategoryPK(Base): template_name = 'archive.html' def get_context_data(self, *, object_list=None, **kwargs): context = super().get_context_data(**kwargs) context['archives'] = ContentModels.Videocast.objects.filter(category=self.kwargs['pk']) return context class VideocastSingle(Base): template_name = 'single.html' def get_context_data(self, *, object_list=None, **kwargs): context = super().get_context_data(**kwargs) context['single_content'] = ContentModels.Videocast.objects.filter(slug=self.kwargs['slug']) return context class Podcast(Base): template_name = 'archive.html' def get_context_data(self, *, object_list=None, **kwargs): context = super().get_context_data(**kwargs) context['archives'] = ContentModels.Podcast.objects.all() return context class PodArchiveByCategoryPK(Base): template_name = 'archive.html' def get_context_data(self, *, object_list=None, **kwargs): context = super().get_context_data(**kwargs) context['archives'] = ContentModels.Podcast.objects.filter(category=self.kwargs['pk']) return context class PodSingle(Base): template_name = 'single.html' def get_context_data(self, *, object_list=None, **kwargs): context = super().get_context_data(**kwargs) context['single_content'] = ContentModels.Podcast.objects.filter(slug=self.kwargs['slug']) return context
35.533835
147
0.688108
545
4,726
5.781651
0.148624
0.076166
0.106633
0.064741
0.754364
0.73437
0.73437
0.727388
0.713424
0.57093
0
0.001284
0.175836
4,726
132
148
35.80303
0.807702
0
0
0.494505
0
0
0.082734
0.004444
0
0
0
0
0
1
0.142857
false
0.010989
0.054945
0
0.593407
0
0
0
0
null
0
0
0
0
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
5
aab8ce71d41c1382037da88b49fac08b459be554
9,644
py
Python
cotidia/admin/templatetags/admin_search_dashboard_tags.py
hayden5-mwac/cotidia-admin
cfdd9d2677dd1098019fafbec8a6d07e1a42f9eb
[ "BSD-3-Clause" ]
2
2019-07-20T14:43:21.000Z
2021-04-30T15:43:49.000Z
cotidia/admin/templatetags/admin_search_dashboard_tags.py
hayden5-mwac/cotidia-admin
cfdd9d2677dd1098019fafbec8a6d07e1a42f9eb
[ "BSD-3-Clause" ]
16
2020-07-17T04:26:20.000Z
2022-03-23T14:47:31.000Z
cotidia/admin/templatetags/admin_search_dashboard_tags.py
hayden5-mwac/cotidia-admin
cfdd9d2677dd1098019fafbec8a6d07e1a42f9eb
[ "BSD-3-Clause" ]
1
2020-05-18T20:56:45.000Z
2020-05-18T20:56:45.000Z
import json from django import template from django.core.serializers.json import DjangoJSONEncoder from django.contrib.contenttypes.models import ContentType from cotidia.admin.utils import get_model_serializer_class register = template.Library() # @register.inclusion_tag("admin/partials/dynamic_list_config.html", takes_context=True) # def render_search_dashboard_config( # context, # app_label, # model_name, # auth_token, # serializer=None, # endpoint=None, # default_columns=None, # default_order_by=None, # default_filters=None, # batch_actions=None, # ): # context.update({"app_label": app_label, "model_name": model_name}) # # Get model class and its corresponding serializer class. # model = ContentType.objects.get(app_label=app_label, model=model_name).model_class() # if not serializer: # # Double call here because `serializer()` gets the serializer class, # # not an instance of it. So, we call `serializer()` to get the class # # and then call *that* to instantiate it. # # TODO: Refactor this so that the method is `get_serializer_class` or # # something like that. # serializer = model.SearchProvider.dynamic_list_serializer()() # # Calculate API endpoint # if not endpoint: # endpoint = serializer.get_endpoint() # # Calculate `default_columns` # if not default_columns: # default_columns = serializer.get_default_columns() # # Calculate default_oder_by # if not default_order_by: # default_order_by = serializer.get_option("default_order_by") # field_representation = serializer.get_field_representation() # column_representation = {} # filter_representation = {} # for name, field in field_representation.items(): # column_representation[name] = field # if field.get("filter") is not None: # filter_representation[name] = {} # filter_representation[name]["filter"] = field["filter"] # column_representation[name]["filter"] = name # filter_representation[name]["label"] = field["label"] # if field.get("configuration"): # filter_representation[name]["configuration"] = field["configuration"] # query_param = filter_representation.get("queryParameter") or name # filter_representation[name]["queryParameter"] = query_param # extra_filters = serializer.get_option("extra_filters") # if extra_filters: # for name, field in serializer.get_option("extra_filters").items(): # filter_representation[name] = field # context["verbose_name"] = model._meta.verbose_name # context["verbose_name_plural"] = model._meta.verbose_name_plural # context["columns"] = serializer.get_columns() # context["default_columns"] = default_columns # context["default_order_by"] = default_order_by # context["default_filters"] = default_filters # context["endpoint"] = endpoint # context["auth_token"] = auth_token # context["title"] = serializer.get_option( # "title", default=model._meta.verbose_name_plural.title() # ) # context["field_representation"] = column_representation # context["filters"] = filter_representation # # Get some config values # context["primary_color"] = serializer.get_option("primary_color") # context["date_format"] = serializer.get_option("date_format") # context["datetime_format"] = serializer.get_option("datetime_format") # context["week_day_start"] = serializer.get_option("week_day_start") # context["table_striped"] = serializer.get_option("table_striped") # context["columns_configurable"] = serializer.get_option("columns_configurable") # context["search_visible"] = True # # Stuff passed straight from the serializer. # context["list_handling"] = serializer.get_option("list_handling") # context["extra_filters"] = serializer.get_option("extra_filters") # context["toolbar_filters"] = serializer.get_option("toolbar_filters") # context["sidebar_filters"] = serializer.get_option("sidebar_filters") # context["global_actions"] = serializer.get_option("global_actions") # context["categorise_by"] = serializer.get_option("categorise_by") # context["list_fields"] = serializer.get_option("list_fields") # context["sidebar_starts_shown"] = serializer.get_option("sidebar_starts_shown") # context["ignore_stored_config"] = serializer.get_option("ignore_stored_config") # context["detail_mode"] = serializer.get_detail_mode() # context["detail_url_field"] = serializer.get_detail_url_field() # context["detail_component"] = serializer.get_detail_url_field() # context[ # "detail_component_props_template" # ] = serializer.get_component_props_template() # # Batch actions can be overridden by the caller, so allow for that. # if batch_actions: # context["batch_actions"] = batch_actions # else: # context["batch_actions"] = serializer.get_option("batch_actions") # return context @register.inclusion_tag("admin/partials/dynamic_list_config.html", takes_context=True) def render_dynamic_list_config( context, app_label, model_name, auth_token, serializer=None, endpoint=None, default_columns=None, default_order_by=None, default_filters=None, batch_actions=None, ): context.update({"app_label": app_label, "model_name": model_name}) # Get model class and its corresponding serializer class. model = ContentType.objects.get(app_label=app_label, model=model_name).model_class() if not serializer: # Double call here because `serializer()` gets the serializer class, # not an instance of it. So, we call `serializer()` to get the class # and then call *that* to instantiate it. # TODO: Refactor this so that the method is `get_serializer_class` or # something like that. serializer = get_model_serializer_class(model)() # Calculate API endpoint if not endpoint: endpoint = serializer.get_endpoint() # Calculate `default_columns` if not default_columns: default_columns = serializer.get_default_columns() # Calculate default_oder_by if not default_order_by: default_order_by = serializer.get_option("default_order_by") context["verbose_name"] = model._meta.verbose_name context["verbose_name_plural"] = model._meta.verbose_name_plural context["columns"] = serializer.get_columns() context["filters"] = serializer.get_filter_representation() context["default_columns"] = default_columns context["default_order_by"] = default_order_by context["default_filters"] = default_filters context["endpoint"] = endpoint context["auth_token"] = auth_token context["title"] = serializer.get_option( "title", default=model._meta.verbose_name_plural.title() ) context["field_representation"] = serializer.get_field_representation() # Get some config values context["primary_color"] = serializer.get_option("primary_color") context["date_format"] = serializer.get_option("date_format") context["datetime_format"] = serializer.get_option("datetime_format") context["week_day_start"] = serializer.get_option("week_day_start") context["table_striped"] = serializer.get_option("table_striped") context["columns_configurable"] = serializer.get_option("columns_configurable") context["search_visible"] = serializer.get_option("search_visible", True) context["filter_tag_bar_visible"] = serializer.get_option( "filter_tag_bar_visible", False ) # Stuff passed straight from the serializer. context["list_handling"] = serializer.get_option("list_handling") context["extra_filters"] = serializer.get_option("extra_filters") context["toolbar_filters"] = serializer.get_option("toolbar_filters") context["sidebar_filters"] = serializer.get_option("sidebar_filters") context["global_actions"] = serializer.get_option("global_actions") context["categorise_by"] = serializer.get_option("categorise_by") context["list_fields"] = serializer.get_option("list_fields") context["sidebar_starts_shown"] = serializer.get_option("sidebar_starts_shown") context["ignore_stored_config"] = serializer.get_option("ignore_stored_config") context["filter_suggest_configuration"] = serializer.get_option( "filter_suggest_configuration" ) context["allowed_results_modes"] = serializer.get_option("allowed_results_modes") context["default_results_mode"] = serializer.get_option("default_results_mode") context["map_configuration"] = serializer.get_option("map_configuration") context["default_per_page"] = serializer.get_option("default_per_page") context["detail_mode"] = serializer.get_detail_mode() context["detail_url_field"] = serializer.get_detail_url_field() context["detail_modal_component"] = serializer.get_detail_modal_component() context[ "detail_component_props_template" ] = serializer.get_component_props_template() context["detail_modal_configuration"] = serializer.get_detail_modal_configuration() # Batch actions can be overridden by the caller, so allow for that. if batch_actions: context["batch_actions"] = batch_actions else: context["batch_actions"] = serializer.get_option("batch_actions") return context @register.filter(name="json") def json_dumps(data): return json.dumps(data, cls=DjangoJSONEncoder)
42.672566
90
0.716197
1,100
9,644
5.955455
0.139091
0.127004
0.130514
0.027782
0.771333
0.766601
0.756068
0.749504
0.744009
0.744009
0
0
0.170676
9,644
225
91
42.862222
0.81908
0.538988
0
0
0
0
0.249076
0.060074
0
0
0
0.004444
0
1
0.024096
false
0
0.060241
0.012048
0.108434
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
aac04d1acc383386dcf33fc340658c1121bc7745
696
py
Python
tests/work_with_gdscript/pysubnode.py
blueflamegames/godot-python
b4ae8ecda7f698e1d64eb0a9acc133094fb40843
[ "CC-BY-3.0" ]
1,323
2016-11-17T21:28:18.000Z
2022-03-31T17:42:37.000Z
tests/work_with_gdscript/pysubnode.py
blueflamegames/godot-python
b4ae8ecda7f698e1d64eb0a9acc133094fb40843
[ "CC-BY-3.0" ]
301
2017-01-02T17:49:13.000Z
2022-03-14T13:17:42.000Z
tests/work_with_gdscript/pysubnode.py
blueflamegames/godot-python
b4ae8ecda7f698e1d64eb0a9acc133094fb40843
[ "CC-BY-3.0" ]
131
2017-02-09T08:05:03.000Z
2022-03-15T06:44:34.000Z
from godot import exposed, export from pynode import PyNode @exposed class PySubNode(PyNode): _sub_ready_called = False _overloaded_by_child_prop_value = None def _ready(self): super()._ready() self._sub_ready_called = True def is_sub_ready_called(self): return self._sub_ready_called def overloaded_by_child_meth(self, attr): return f"sub:{attr}" @export(str, default="default") @property def overloaded_by_child_prop(self): return self._overloaded_by_child_prop_value @overloaded_by_child_prop.setter def overloaded_by_child_prop(self, value): self._overloaded_by_child_prop_value = f"sub:{value}"
24
61
0.715517
94
696
4.861702
0.329787
0.183807
0.260394
0.275711
0.310722
0.253829
0
0
0
0
0
0
0.20546
696
28
62
24.857143
0.826401
0
0
0
0
0
0.04023
0
0
0
0
0
0
1
0.25
false
0
0.1
0.15
0.65
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
5
aae3ebb7805c174284fc1df8a0f6d3cf5cc23671
390
py
Python
continuous_integration/display_versions.py
jeremiedbb/threadpoolctl
e8c9dc6ebd5565a75284aed31beec2e439d4f80e
[ "BSD-3-Clause" ]
null
null
null
continuous_integration/display_versions.py
jeremiedbb/threadpoolctl
e8c9dc6ebd5565a75284aed31beec2e439d4f80e
[ "BSD-3-Clause" ]
null
null
null
continuous_integration/display_versions.py
jeremiedbb/threadpoolctl
e8c9dc6ebd5565a75284aed31beec2e439d4f80e
[ "BSD-3-Clause" ]
null
null
null
from threadpoolctl import threadpool_info from pprint import pprint try: import numpy as np print("numpy", np.__version__) except ImportError: pass try: import scipy import scipy.linalg print("scipy", scipy.__version__) except ImportError: pass try: from tests._openmp_test_helper import * # noqa except ImportError: pass pprint(threadpool_info())
16.25
51
0.725641
48
390
5.625
0.458333
0.188889
0.233333
0.207407
0.22963
0
0
0
0
0
0
0
0.207692
390
23
52
16.956522
0.873786
0.010256
0
0.5
0
0
0.026042
0
0
0
0
0
0
1
0
true
0.166667
0.5
0
0.5
0.222222
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
1
0
0
0
0
5
2a9fb03ffb5b363a77d9500e616f057178b18dcc
302
py
Python
python/datagraph/neo/svc/__init__.py
jiportilla/ontology
8a66bb7f76f805c64fc76cfc40ab7dfbc1146f40
[ "MIT" ]
null
null
null
python/datagraph/neo/svc/__init__.py
jiportilla/ontology
8a66bb7f76f805c64fc76cfc40ab7dfbc1146f40
[ "MIT" ]
null
null
null
python/datagraph/neo/svc/__init__.py
jiportilla/ontology
8a66bb7f76f805c64fc76cfc40ab7dfbc1146f40
[ "MIT" ]
null
null
null
from .find_common_relationships import FindCommonRelationships from .generate_similarity_metric import GenerateSimilarityMetric from .generate_similarity_triples import GenerateSimilarityTriples from .initialize_neo_graph import InitializeNeoGraph from .transform_owl_to_json import TransformOwltoJson
50.333333
66
0.917219
31
302
8.580645
0.677419
0.090226
0.165414
0
0
0
0
0
0
0
0
0
0.066225
302
5
67
60.4
0.943262
0
0
0
1
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
2ac7b97776139d8b20449a0811832ef7927d0efc
263
gyp
Python
test_FrozenNode/fileTypes/binding.gyp
sefcom/FrozenNode-nodeRequireResolver
f307505aed7ff340f570e28c1694969dfca9eed5
[ "Apache-2.0" ]
null
null
null
test_FrozenNode/fileTypes/binding.gyp
sefcom/FrozenNode-nodeRequireResolver
f307505aed7ff340f570e28c1694969dfca9eed5
[ "Apache-2.0" ]
null
null
null
test_FrozenNode/fileTypes/binding.gyp
sefcom/FrozenNode-nodeRequireResolver
f307505aed7ff340f570e28c1694969dfca9eed5
[ "Apache-2.0" ]
null
null
null
{ "targets": [ { "target_name": "simpleTest", "sources": [ "simpleTest.cc" ] }, { "target_name": "simpleTest2", "sources": [ "simpleTest.cc" ] }, { "target_name": "simpleTest3", "sources": [ "simpleTest.cc" ] } ] }
16.4375
36
0.490494
19
263
6.631579
0.421053
0.238095
0.452381
0.396825
0.460317
0
0
0
0
0
0
0.010753
0.292776
263
16
37
16.4375
0.666667
0
0
0.1875
0
0
0.5
0
0
0
0
0
0
1
0
true
0
0
0
0
0
1
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
5
2acc7cd9ee1f35977a43399a913af635efdc5146
9,840
py
Python
src/waldur_mastermind/marketplace_azure/tests/test_order_item.py
opennode/nodeconductor-assembly-waldur
cad9966389dc9b52b13d2301940c99cf4b243900
[ "MIT" ]
2
2017-01-20T15:26:25.000Z
2017-08-03T04:38:08.000Z
src/waldur_mastermind/marketplace_azure/tests/test_order_item.py
opennode/nodeconductor-assembly-waldur
cad9966389dc9b52b13d2301940c99cf4b243900
[ "MIT" ]
null
null
null
src/waldur_mastermind/marketplace_azure/tests/test_order_item.py
opennode/nodeconductor-assembly-waldur
cad9966389dc9b52b13d2301940c99cf4b243900
[ "MIT" ]
null
null
null
from django.core.exceptions import ObjectDoesNotExist from rest_framework import test from waldur_azure import models as azure_models from waldur_azure.tests import factories as azure_factories from waldur_azure.tests import fixtures as azure_fixtures from waldur_core.core import utils as core_utils from waldur_mastermind.marketplace import models as marketplace_models from waldur_mastermind.marketplace import tasks as marketplace_tasks from waldur_mastermind.marketplace.tests import factories as marketplace_factories from waldur_mastermind.marketplace_azure import SQL_SERVER_TYPE, VIRTUAL_MACHINE_TYPE class VirtualMachineCreateTest(test.APITransactionTestCase): def test_virtual_machine_is_created_when_order_item_is_processed(self): order_item = self.trigger_virtual_machine_creation() self.assertEqual( order_item.state, marketplace_models.OrderItem.States.EXECUTING ) self.assertTrue(azure_models.VirtualMachine.objects.exists()) def test_request_payload_is_validated(self): order_item = self.trigger_virtual_machine_creation( name='Name should not contain spaces' ) self.assertEqual(order_item.state, marketplace_models.OrderItem.States.ERRED) def test_virtual_machine_state_is_synchronized(self): order_item = self.trigger_virtual_machine_creation() virtual_machine = order_item.resource.scope virtual_machine.begin_creating() virtual_machine.save() virtual_machine.set_ok() virtual_machine.save() order_item.refresh_from_db() self.assertEqual(order_item.state, order_item.States.DONE) order_item.resource.refresh_from_db() self.assertEqual( order_item.resource.state, marketplace_models.Resource.States.OK ) order_item.order.refresh_from_db() self.assertEqual(order_item.order.state, marketplace_models.Order.States.DONE) def trigger_virtual_machine_creation(self, **kwargs): fixture = azure_fixtures.AzureFixture() service_settings = fixture.settings azure_models.SizeAvailabilityZone.objects.create( size=fixture.size, location=fixture.location, zone=1 ) attributes = { 'size': azure_factories.SizeFactory.get_url(fixture.size), 'image': azure_factories.ImageFactory.get_url(fixture.image), 'name': 'virtual-machine', 'location': azure_factories.LocationFactory.get_url(fixture.location), } attributes.update(kwargs) offering = marketplace_factories.OfferingFactory( type=VIRTUAL_MACHINE_TYPE, scope=service_settings ) order = marketplace_factories.OrderFactory( project=fixture.project, state=marketplace_models.Order.States.EXECUTING, ) order_item = marketplace_factories.OrderItemFactory( offering=offering, attributes=attributes, order=order, ) serialized_order = core_utils.serialize_instance(order_item.order) serialized_user = core_utils.serialize_instance(fixture.staff) marketplace_tasks.process_order(serialized_order, serialized_user) order_item.refresh_from_db() return order_item class VirtualMachineDeleteTest(test.APITransactionTestCase): def setUp(self): self.fixture = azure_fixtures.AzureFixture() self.virtual_machine = self.fixture.virtual_machine self.offering = marketplace_factories.OfferingFactory(type=VIRTUAL_MACHINE_TYPE) self.resource = marketplace_factories.ResourceFactory( scope=self.virtual_machine, offering=self.offering ) self.order = marketplace_factories.OrderFactory( project=self.fixture.project, state=marketplace_models.Order.States.EXECUTING, ) self.order_item = marketplace_factories.OrderItemFactory( resource=self.resource, type=marketplace_models.RequestTypeMixin.Types.TERMINATE, ) def test_deletion_is_scheduled(self): self.trigger_deletion() self.assertEqual( self.order_item.state, marketplace_models.OrderItem.States.EXECUTING ) self.assertEqual( self.resource.state, marketplace_models.Resource.States.TERMINATING ) self.assertEqual( self.virtual_machine.state, azure_models.VirtualMachine.States.DELETION_SCHEDULED, ) def test_deletion_is_completed(self): self.trigger_deletion() self.virtual_machine.delete() self.order_item.refresh_from_db() self.resource.refresh_from_db() self.assertEqual( self.order_item.state, marketplace_models.OrderItem.States.DONE ) self.assertEqual( self.resource.state, marketplace_models.Resource.States.TERMINATED ) self.assertRaises(ObjectDoesNotExist, self.virtual_machine.refresh_from_db) def trigger_deletion(self): serialized_order = core_utils.serialize_instance(self.order_item.order) serialized_user = core_utils.serialize_instance(self.fixture.staff) marketplace_tasks.process_order(serialized_order, serialized_user) self.order_item.refresh_from_db() self.resource.refresh_from_db() self.virtual_machine.refresh_from_db() class SQLServerCreateTest(test.APITransactionTestCase): def test_sql_server_is_created_when_order_item_is_processed(self): order_item = self.trigger_resource_creation() self.assertEqual( order_item.state, marketplace_models.OrderItem.States.EXECUTING ) self.assertTrue(azure_models.SQLServer.objects.exists()) def test_request_payload_is_validated(self): order_item = self.trigger_resource_creation( name='Name should not contain spaces' ) self.assertEqual(order_item.state, marketplace_models.OrderItem.States.ERRED) def test_sql_server_state_is_synchronized(self): order_item = self.trigger_resource_creation() sql_server = order_item.resource.scope sql_server.begin_creating() sql_server.save() sql_server.set_ok() sql_server.save() order_item.refresh_from_db() self.assertEqual(order_item.state, order_item.States.DONE) order_item.resource.refresh_from_db() self.assertEqual( order_item.resource.state, marketplace_models.Resource.States.OK ) order_item.order.refresh_from_db() self.assertEqual(order_item.order.state, marketplace_models.Order.States.DONE) def trigger_resource_creation(self, **kwargs): fixture = azure_fixtures.AzureFixture() service_settings = fixture.settings attributes = { 'name': 'database-server', 'location': azure_factories.LocationFactory.get_url(), } attributes.update(kwargs) offering = marketplace_factories.OfferingFactory( type=SQL_SERVER_TYPE, scope=service_settings ) order = marketplace_factories.OrderFactory( project=fixture.project, state=marketplace_models.Order.States.EXECUTING, ) order_item = marketplace_factories.OrderItemFactory( offering=offering, attributes=attributes, order=order, ) serialized_order = core_utils.serialize_instance(order_item.order) serialized_user = core_utils.serialize_instance(fixture.staff) marketplace_tasks.process_order(serialized_order, serialized_user) order_item.refresh_from_db() return order_item class SQLServerDeleteTest(test.APITransactionTestCase): def setUp(self): self.fixture = azure_fixtures.AzureFixture() self.sql_server = self.fixture.sql_server self.offering = marketplace_factories.OfferingFactory(type=SQL_SERVER_TYPE) self.resource = marketplace_factories.ResourceFactory( scope=self.sql_server, offering=self.offering ) self.order = marketplace_factories.OrderFactory( project=self.fixture.project, state=marketplace_models.Order.States.EXECUTING, ) self.order_item = marketplace_factories.OrderItemFactory( resource=self.resource, type=marketplace_models.RequestTypeMixin.Types.TERMINATE, ) def test_deletion_is_scheduled(self): self.trigger_deletion() self.assertEqual( self.order_item.state, marketplace_models.OrderItem.States.EXECUTING ) self.assertEqual( self.resource.state, marketplace_models.Resource.States.TERMINATING ) self.assertEqual( self.sql_server.state, azure_models.VirtualMachine.States.DELETION_SCHEDULED ) def test_deletion_is_completed(self): self.trigger_deletion() self.sql_server.delete() self.order_item.refresh_from_db() self.resource.refresh_from_db() self.assertEqual( self.order_item.state, marketplace_models.OrderItem.States.DONE ) self.assertEqual( self.resource.state, marketplace_models.Resource.States.TERMINATED ) self.assertRaises(ObjectDoesNotExist, self.sql_server.refresh_from_db) def trigger_deletion(self): serialized_order = core_utils.serialize_instance(self.order_item.order) serialized_user = core_utils.serialize_instance(self.fixture.staff) marketplace_tasks.process_order(serialized_order, serialized_user) self.order_item.refresh_from_db() self.resource.refresh_from_db() self.sql_server.refresh_from_db()
37.992278
88
0.705996
1,039
9,840
6.382098
0.110683
0.065149
0.066355
0.035892
0.814658
0.793545
0.768512
0.763686
0.683155
0.683155
0
0.00013
0.219106
9,840
258
89
38.139535
0.862832
0
0
0.59434
0
0
0.0125
0
0
0
0
0
0.113208
1
0.075472
false
0
0.04717
0
0.150943
0
0
0
0
null
0
0
0
1
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
2acfae2eb073ce66613d3772c688b271c376e112
513
py
Python
sdk/kamonohashi/op/rest/api/__init__.py
sinpcw/kamonohashi
fb779c69a7746c3a100394f82001d8a914355861
[ "Apache-2.0" ]
null
null
null
sdk/kamonohashi/op/rest/api/__init__.py
sinpcw/kamonohashi
fb779c69a7746c3a100394f82001d8a914355861
[ "Apache-2.0" ]
null
null
null
sdk/kamonohashi/op/rest/api/__init__.py
sinpcw/kamonohashi
fb779c69a7746c3a100394f82001d8a914355861
[ "Apache-2.0" ]
null
null
null
from __future__ import absolute_import # flake8: noqa # import apis into api package from kamonohashi.op.rest.api.account_api import AccountApi from kamonohashi.op.rest.api.data_api import DataApi from kamonohashi.op.rest.api.data_set_api import DataSetApi from kamonohashi.op.rest.api.inference_api import InferenceApi from kamonohashi.op.rest.api.preprocessing_api import PreprocessingApi from kamonohashi.op.rest.api.storage_api import StorageApi from kamonohashi.op.rest.api.training_api import TrainingApi
39.461538
70
0.85575
76
513
5.605263
0.355263
0.246479
0.279343
0.34507
0.413146
0.131455
0
0
0
0
0
0.002128
0.083821
513
12
71
42.75
0.904255
0.079922
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
2ae30030df8644188012aac72115881801f9b08a
927
py
Python
tests/test_splitter.py
gaqzi/radish
42cbface3bc181d3fa62afbb15f7a6f5433d4509
[ "Beerware" ]
null
null
null
tests/test_splitter.py
gaqzi/radish
42cbface3bc181d3fa62afbb15f7a6f5433d4509
[ "Beerware" ]
1
2016-09-18T16:00:10.000Z
2016-09-18T16:00:10.000Z
tests/test_splitter.py
gaqzi/radish
42cbface3bc181d3fa62afbb15f7a6f5433d4509
[ "Beerware" ]
null
null
null
from radish import splitter class TestSplit(object): def test_splits_one_into_one_piece(self): assert splitter.split(['1', '2', '3']) == ['1', '2', '3'] def test_splits_into_jobs_pieces_consistently(self): assert splitter.split(['1', '2', '3'], splits=2) == [['1', '3'], ['2']] def test_returns_the_index_specified_by_job_for_a_split(self): assert splitter.split(['1', '2', '3'], splits=2, index=1) == ['2'] def test_splits_empty_splittables(self): assert splitter.split([], splits=2) == [[], []] assert splitter.split([], splits=2, index=1) == [] class TestArgumentConversion(object): def test_jobs_is_converted_to_integer(self): assert splitter.split(['1', '2', '3'], splits='2') == [['1', '3'], ['2']] def test_index_is_converted_to_integer(self): assert splitter.split(['1', '2', '3'], splits=2, index='1') == ['2']
38.625
85
0.599784
125
927
4.192
0.296
0.030534
0.253817
0.263359
0.541985
0.442748
0.442748
0.39313
0.39313
0.39313
0
0.046917
0.195254
927
23
86
40.304348
0.655496
0
0
0
0
0
0.030205
0
0
0
0
0
0.4375
1
0.375
false
0
0.0625
0
0.5625
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
1
0
0
0
0
1
0
0
5
2ae927a5f0e7c9bd7e61141d8dbd16e4a31dcd9b
122
py
Python
src/env/exmaple.py
jarbus/hackRPI2019
cf154ca1844926ab45241f7d3315b198bdfa6d76
[ "MIT" ]
null
null
null
src/env/exmaple.py
jarbus/hackRPI2019
cf154ca1844926ab45241f7d3315b198bdfa6d76
[ "MIT" ]
null
null
null
src/env/exmaple.py
jarbus/hackRPI2019
cf154ca1844926ab45241f7d3315b198bdfa6d76
[ "MIT" ]
null
null
null
""" Example on how to set up the recovery environment. """ import gym import env.recovery dir(gym.make("revcovery-v0"))
17.428571
58
0.721311
19
122
4.631579
0.842105
0
0
0
0
0
0
0
0
0
0
0.009615
0.147541
122
6
59
20.333333
0.836538
0.409836
0
0
0
0
0.1875
0
0
0
0
0
0
1
0
true
0
0.666667
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
2af74339063f2b5258e4257e434ec26b295f5977
23
py
Python
python3.7/app/prin.py
brdhunga/uvicorn-gunicorn-fastapi-docker
a7ff8f5fd3e5b9a8a76ce74a7b1c03071a7dc8b1
[ "MIT" ]
null
null
null
python3.7/app/prin.py
brdhunga/uvicorn-gunicorn-fastapi-docker
a7ff8f5fd3e5b9a8a76ce74a7b1c03071a7dc8b1
[ "MIT" ]
null
null
null
python3.7/app/prin.py
brdhunga/uvicorn-gunicorn-fastapi-docker
a7ff8f5fd3e5b9a8a76ce74a7b1c03071a7dc8b1
[ "MIT" ]
null
null
null
print(' i the file yo')
23
23
0.652174
5
23
3
1
0
0
0
0
0
0
0
0
0
0
0
0.173913
23
1
23
23
0.789474
0
0
0
0
0
0.583333
0
0
0
0
0
0
1
0
true
0
0
0
0
1
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
5
6316da447557b2425a0c9444ccb7a26e0baaa970
60
py
Python
Arknights/__init__.py
boatliuz/ArknightsAutoHelper
2dd260b59b4734301dabb1b6793bba05a716517c
[ "MIT" ]
null
null
null
Arknights/__init__.py
boatliuz/ArknightsAutoHelper
2dd260b59b4734301dabb1b6793bba05a716517c
[ "MIT" ]
null
null
null
Arknights/__init__.py
boatliuz/ArknightsAutoHelper
2dd260b59b4734301dabb1b6793bba05a716517c
[ "MIT" ]
null
null
null
# coding: utf-8 from Arknights.base import ArknightsHelper
15
42
0.8
8
60
6
1
0
0
0
0
0
0
0
0
0
0
0.019231
0.133333
60
3
43
20
0.903846
0.216667
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
63291e262b4d8c1167fc37640dc2925ce15508db
6,460
py
Python
addons/point_of_sale/__manifest__.py
SHIVJITH/Odoo_Machine_Test
310497a9872db7844b521e6dab5f7a9f61d365a4
[ "Apache-2.0" ]
null
null
null
addons/point_of_sale/__manifest__.py
SHIVJITH/Odoo_Machine_Test
310497a9872db7844b521e6dab5f7a9f61d365a4
[ "Apache-2.0" ]
null
null
null
addons/point_of_sale/__manifest__.py
SHIVJITH/Odoo_Machine_Test
310497a9872db7844b521e6dab5f7a9f61d365a4
[ "Apache-2.0" ]
null
null
null
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. { 'name': 'Point of Sale', 'version': '1.0.1', 'category': 'Sales/Point of Sale', 'sequence': 40, 'summary': 'User-friendly PoS interface for shops and restaurants', 'description': "", 'depends': ['stock_account', 'barcodes', 'web_editor', 'digest'], 'data': [ 'security/point_of_sale_security.xml', 'security/ir.model.access.csv', 'data/default_barcode_patterns.xml', 'data/digest_data.xml', 'wizard/pos_box.xml', 'wizard/pos_details.xml', 'wizard/pos_payment.xml', 'views/pos_assets_common.xml', 'views/pos_assets_index.xml', 'views/pos_assets_qunit.xml', 'views/point_of_sale_report.xml', 'views/point_of_sale_view.xml', 'views/pos_order_view.xml', 'views/pos_category_view.xml', 'views/product_view.xml', 'views/account_journal_view.xml', 'views/pos_payment_method_views.xml', 'views/pos_payment_views.xml', 'views/pos_config_view.xml', 'views/pos_session_view.xml', 'views/point_of_sale_sequence.xml', 'views/customer_facing_display.xml', 'data/point_of_sale_data.xml', 'views/pos_order_report_view.xml', 'views/account_statement_view.xml', 'views/res_config_settings_views.xml', 'views/digest_views.xml', 'views/res_partner_view.xml', 'views/report_userlabel.xml', 'views/report_saledetails.xml', 'views/point_of_sale_dashboard.xml', ], 'demo': [ 'data/point_of_sale_demo.xml', ], 'installable': True, 'application': True, 'qweb': [ 'static/src/xml/Chrome.xml', 'static/src/xml/debug_manager.xml', 'static/src/xml/Screens/ProductScreen/ProductScreen.xml', 'static/src/xml/Screens/ClientListScreen/ClientLine.xml', 'static/src/xml/Screens/ClientListScreen/ClientDetailsEdit.xml', 'static/src/xml/Screens/ClientListScreen/ClientListScreen.xml', 'static/src/xml/Screens/OrderManagementScreen/ControlButtons/InvoiceButton.xml', 'static/src/xml/Screens/OrderManagementScreen/ControlButtons/ReprintReceiptButton.xml', 'static/src/xml/Screens/OrderManagementScreen/OrderManagementScreen.xml', 'static/src/xml/Screens/OrderManagementScreen/MobileOrderManagementScreen.xml', 'static/src/xml/Screens/OrderManagementScreen/OrderManagementControlPanel.xml', 'static/src/xml/Screens/OrderManagementScreen/OrderList.xml', 'static/src/xml/Screens/OrderManagementScreen/OrderRow.xml', 'static/src/xml/Screens/OrderManagementScreen/OrderDetails.xml', 'static/src/xml/Screens/OrderManagementScreen/OrderlineDetails.xml', 'static/src/xml/Screens/OrderManagementScreen/ReprintReceiptScreen.xml', 'static/src/xml/Screens/TicketScreen/TicketScreen.xml', 'static/src/xml/Screens/PaymentScreen/PSNumpadInputButton.xml', 'static/src/xml/Screens/PaymentScreen/PaymentScreenNumpad.xml', 'static/src/xml/Screens/PaymentScreen/PaymentScreenElectronicPayment.xml', 'static/src/xml/Screens/PaymentScreen/PaymentScreenPaymentLines.xml', 'static/src/xml/Screens/PaymentScreen/PaymentScreenStatus.xml', 'static/src/xml/Screens/PaymentScreen/PaymentMethodButton.xml', 'static/src/xml/Screens/PaymentScreen/PaymentScreen.xml', 'static/src/xml/Screens/ProductScreen/Orderline.xml', 'static/src/xml/Screens/ProductScreen/OrderSummary.xml', 'static/src/xml/Screens/ProductScreen/OrderWidget.xml', 'static/src/xml/Screens/ProductScreen/NumpadWidget.xml', 'static/src/xml/Screens/ProductScreen/ActionpadWidget.xml', 'static/src/xml/Screens/ProductScreen/CategoryBreadcrumb.xml', 'static/src/xml/Screens/ProductScreen/CashBoxOpening.xml', 'static/src/xml/Screens/ProductScreen/CategoryButton.xml', 'static/src/xml/Screens/ProductScreen/CategorySimpleButton.xml', 'static/src/xml/Screens/ProductScreen/HomeCategoryBreadcrumb.xml', 'static/src/xml/Screens/ProductScreen/ProductsWidgetControlPanel.xml', 'static/src/xml/Screens/ProductScreen/ProductItem.xml', 'static/src/xml/Screens/ProductScreen/ProductList.xml', 'static/src/xml/Screens/ProductScreen/ProductsWidget.xml', 'static/src/xml/Screens/ReceiptScreen/WrappedProductNameLines.xml', 'static/src/xml/Screens/ReceiptScreen/OrderReceipt.xml', 'static/src/xml/Screens/ReceiptScreen/ReceiptScreen.xml', 'static/src/xml/Screens/ScaleScreen/ScaleScreen.xml', 'static/src/xml/ChromeWidgets/CashierName.xml', 'static/src/xml/ChromeWidgets/ProxyStatus.xml', 'static/src/xml/ChromeWidgets/SyncNotification.xml', 'static/src/xml/ChromeWidgets/OrderManagementButton.xml', 'static/src/xml/ChromeWidgets/HeaderButton.xml', 'static/src/xml/ChromeWidgets/SaleDetailsButton.xml', 'static/src/xml/ChromeWidgets/TicketButton.xml', 'static/src/xml/SaleDetailsReport.xml', 'static/src/xml/Misc/Draggable.xml', 'static/src/xml/Misc/NotificationSound.xml', 'static/src/xml/Misc/SearchBar.xml', 'static/src/xml/ChromeWidgets/DebugWidget.xml', 'static/src/xml/Popups/ErrorPopup.xml', 'static/src/xml/Popups/ErrorBarcodePopup.xml', 'static/src/xml/Popups/ConfirmPopup.xml', 'static/src/xml/Popups/TextInputPopup.xml', 'static/src/xml/Popups/TextAreaPopup.xml', 'static/src/xml/Popups/ErrorTracebackPopup.xml', 'static/src/xml/Popups/SelectionPopup.xml', 'static/src/xml/Popups/EditListInput.xml', 'static/src/xml/Popups/EditListPopup.xml', 'static/src/xml/Popups/NumberPopup.xml', 'static/src/xml/Popups/OfflineErrorPopup.xml', 'static/src/xml/Popups/OrderImportPopup.xml', 'static/src/xml/Popups/ProductConfiguratorPopup.xml', 'static/src/xml/Screens/ProductScreen/ControlButtons/SetPricelistButton.xml', 'static/src/xml/Screens/ProductScreen/ControlButtons/SetFiscalPositionButton.xml', 'static/src/xml/ChromeWidgets/ClientScreenButton.xml', 'static/src/xml/Misc/MobileOrderWidget.xml', ], 'website': 'https://www.odoo.com/page/point-of-sale-shop', 'license': 'LGPL-3', }
51.269841
95
0.697988
699
6,460
6.354793
0.240343
0.143854
0.191805
0.23638
0.510131
0.347816
0.047726
0
0
0
0
0.001296
0.164087
6,460
125
96
51.68
0.821296
0.014551
0
0.02459
0
0
0.773063
0.723558
0
0
0
0
0
1
0
true
0
0.008197
0
0.008197
0.016393
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
1
null
0
0
0
0
0
0
1
0
0
0
0
0
0
5
2d5fa2b96d88000223f30b61574a48e61e2168f3
42
py
Python
src/processing/alternative_actor_suggestion/__init__.py
1997alireza/Movie-Casting-Problems
df555e57401ec1b120d8e9d3c2d51b1d3a070f21
[ "MIT" ]
3
2021-04-20T06:02:34.000Z
2021-04-24T04:16:45.000Z
src/processing/alternative_actor_suggestion/__init__.py
1997alireza/Movie-Casting-Problems
df555e57401ec1b120d8e9d3c2d51b1d3a070f21
[ "MIT" ]
null
null
null
src/processing/alternative_actor_suggestion/__init__.py
1997alireza/Movie-Casting-Problems
df555e57401ec1b120d8e9d3c2d51b1d3a070f21
[ "MIT" ]
null
null
null
""" AAS = Alternative Actor Suggestion """
14
34
0.690476
4
42
7.25
1
0
0
0
0
0
0
0
0
0
0
0
0.142857
42
3
35
14
0.805556
0.809524
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
5
2d6c44ce62999b9c39718c333ce8f8ac837841df
6,570
py
Python
python/akg/ops/math/gpu/tensorcore_batch_matmul.py
tianjiashuo/akg
a9cbf642063fb1086a93e8bc6be6feb145689817
[ "Apache-2.0" ]
286
2020-06-23T06:40:44.000Z
2022-03-30T01:27:49.000Z
python/akg/ops/math/gpu/tensorcore_batch_matmul.py
tianjiashuo/akg
a9cbf642063fb1086a93e8bc6be6feb145689817
[ "Apache-2.0" ]
10
2020-07-31T03:26:59.000Z
2021-12-27T15:00:54.000Z
python/akg/ops/math/gpu/tensorcore_batch_matmul.py
tianjiashuo/akg
a9cbf642063fb1086a93e8bc6be6feb145689817
[ "Apache-2.0" ]
30
2020-07-17T01:04:14.000Z
2021-12-27T14:05:19.000Z
# Copyright 2021 Huawei Technologies Co., Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """operator dsl function: batch_matmul""" import numpy as np import akg.topi as topi import akg.tvm as tvm import akg.utils as utils def batch_matmul(data1, data2, bias=None, out_dtype="float32", layout1="NHDT", layout2="NHDT", layout_out="NHDT"): if len(data1.shape) == 4: res = batch_matmul_4D(data1, data2, bias, out_dtype, layout1, layout2, layout_out) elif len(data1.shape) == 2: res = batch_matmul_2D(data1, data2, bias, out_dtype, layout1, layout2, layout_out) else: res = batch_matmul_3D(data1, data2, bias, out_dtype, layout1, layout2, layout_out) return res def auto_out_transpose(expect, layout_out="NHDT"): if len(expect.shape) == 3: layout_out = layout_out[1:] if len(expect.shape) == 2: layout_out = layout_out[2:] layout_out_int = layout_out.replace('N', '0').replace('H', '1').replace('D', '2').replace('T', '3') layout_out_list = list(layout_out_int) layout_out_axis = np.argsort(layout_out_list) expect = topi.transpose(expect, axes=tuple(layout_out_axis)) return expect def batch_matmul_3D(data1, data2, bias=None, out_dtype="float32", layout1="NHDT", layout2="NHDT", layout_out="NHDT"): layout1_dict = {} layout2_dict = {} layout1 = layout1[1:] layout2 = layout2[1:] layout1_str = layout1.replace('N', 'b').replace('H', 'b').replace('D', 'm').replace('T', 'k') layout2_str = layout2.replace('N', 'b').replace('H', 'b').replace('D', 'n').replace('T', 'k') layout1_list = list(layout1_str) layout2_list = list(layout2_str) for i in range(len(layout1)): layout1_dict[layout1_list[i]] = data1.shape[i] layout2_dict[layout2_list[i]] = data2.shape[i] reduce_axis = tvm.reduce_axis((0, layout1_dict['k']), name='reduce_axis') if out_dtype=="float32": res = tvm.compute((layout1_dict['b'], layout1_dict['m'], layout2_dict['n']), lambda b, i, j: tvm.sum( data1[b, i if layout1_list[1] == 'm' else reduce_axis, reduce_axis if layout1_list[2] == 'k' else i].astype("float") * data2[b, j if layout2_list[1] == 'n' else reduce_axis, reduce_axis if layout2_list[2] == 'k' else j].astype("float"), axis=reduce_axis)) else: res = tvm.compute((layout1_dict['b'], layout1_dict['m'], layout2_dict['n']), lambda b, i, j: tvm.sum( data1[b, i if layout1_list[1] == 'm' else reduce_axis, reduce_axis if layout1_list[2] == 'k' else i] * data2[b, j if layout2_list[1] == 'n' else reduce_axis, reduce_axis if layout2_list[2] == 'k' else j], axis=reduce_axis)) if bias is not None: res = topi.add(res, bias) if layout_out != "NHDT": res = auto_out_transpose(res, layout_out) return res def batch_matmul_4D(data1, data2, bias=None, out_dtype="float32", layout1="NHDT", layout2="NHDT", layout_out="NHDT"): layout1_dict = {} layout2_dict = {} layout1_str = layout1.replace('N', 'B').replace('H', 'b').replace('D', 'm').replace('T', 'k') layout2_str = layout2.replace('N', 'B').replace('H', 'b').replace('D', 'n').replace('T', 'k') layout1_list = list(layout1_str) layout2_list = list(layout2_str) for i in range(len(layout1)): layout1_dict[layout1_list[i]] = data1.shape[i] layout2_dict[layout2_list[i]] = data2.shape[i] reduce_axis = tvm.reduce_axis((0, layout1_dict['k']), name='reduce_axis') if out_dtype == "float32": res = tvm.compute((layout1_dict['B'], layout1_dict['b'], layout1_dict['m'], layout2_dict['n']), lambda B, b, i, j: tvm.sum( data1[B, b, i if layout1_list[2] == 'm' else reduce_axis, reduce_axis if layout1_list[3] == 'k' else i].astype("float") * data2[B, b, j if layout2_list[2] == 'n' else reduce_axis, reduce_axis if layout2_list[3] == 'k' else j].astype("float"), axis=reduce_axis)) else: res = tvm.compute((layout1_dict['B'], layout1_dict['b'], layout1_dict['m'], layout2_dict['n']), lambda B, b, i, j: tvm.sum( data1[B, b, i if layout1_list[2] == 'm' else reduce_axis, reduce_axis if layout1_list[3] == 'k' else i] * data2[B, b, j if layout2_list[2] == 'n' else reduce_axis, reduce_axis if layout2_list[3] == 'k' else j], axis=reduce_axis)) if bias is not None: res = topi.add(res, bias) if layout_out != "NHDT": res = auto_out_transpose(res, layout_out) return res def batch_matmul_2D(data1, data2, bias=None, out_dtype="float32", layout1="NHDT", layout2="NHDT", layout_out="NHDT"): layout1_dict = {} layout2_dict = {} layout1 = layout1[2:] layout2 = layout2[2:] layout1_str = layout1.replace('D', 'm').replace('T', 'k') layout2_str = layout2.replace('D', 'n').replace('T', 'k') layout1_list = list(layout1_str) layout2_list = list(layout2_str) for i in range(len(layout1)): layout1_dict[layout1_list[i]] = data1.shape[i] layout2_dict[layout2_list[i]] = data2.shape[i] reduce_axis = tvm.reduce_axis((0, layout1_dict['k']), name='reduce_axis') if out_dtype=="float32": res = tvm.compute((layout1_dict['m'], layout2_dict['n']), lambda i, j: tvm.sum( data1[i if layout1_list[0] == 'm' else reduce_axis, reduce_axis if layout1_list[1] == 'k' else i].astype("float") * data2[j if layout2_list[0] == 'n' else reduce_axis, reduce_axis if layout2_list[1] == 'k' else j].astype("float"), axis=reduce_axis)) else: res = tvm.compute((layout1_dict['m'], layout2_dict['n']), lambda i, j: tvm.sum( data1[i if layout1_list[0] == 'm' else reduce_axis, reduce_axis if layout1_list[1] == 'k' else i] * data2[j if layout2_list[0] == 'n' else reduce_axis, reduce_axis if layout2_list[1] == 'k' else j], axis=reduce_axis)) if bias is not None: res = topi.add(res, bias) if layout_out != "NHDT": res = auto_out_transpose(res, layout_out) return res
49.029851
155
0.641705
1,006
6,570
4.008946
0.131213
0.096702
0.053558
0.059509
0.779569
0.760476
0.743863
0.738408
0.738408
0.704934
0
0.040882
0.199543
6,570
133
156
49.398496
0.725994
0.09102
0
0.540816
0
0
0.045187
0
0
0
0
0
0
1
0.05102
false
0
0.040816
0
0.142857
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
2d7e6ccb07dd2e10be11b1e8f97566223a0935c2
104
py
Python
backend/app/app/schemas/msg.py
tgogos/NEF_emulator
be126a1967e11148f02f526be4ab192b6ce6df68
[ "Apache-2.0" ]
10
2022-01-27T12:13:45.000Z
2022-03-22T10:44:37.000Z
backend/app/app/schemas/msg.py
EVOLVED-5G/NEF_emulator
bcbce43bf1226a4ddd81db8662c982460cff2583
[ "Apache-2.0" ]
6
2022-01-28T14:22:17.000Z
2022-02-09T13:31:45.000Z
backend/app/app/schemas/msg.py
EVOLVED-5G/NEF_emulator
bcbce43bf1226a4ddd81db8662c982460cff2583
[ "Apache-2.0" ]
2
2022-01-27T12:04:18.000Z
2022-01-27T12:18:59.000Z
from pydantic import BaseModel, constr class Msg(BaseModel): supi: constr(regex=r'^[0-9]{15,16}$')
20.8
41
0.692308
16
104
4.5
0.875
0
0
0
0
0
0
0
0
0
0
0.066667
0.134615
104
5
41
20.8
0.733333
0
0
0
0
0
0.133333
0
0
0
0
0
0
1
0
true
0
0.333333
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
2d806c38714802b82a20a5320d422c9a8fa69c27
73
py
Python
wiggin/actions/__init__.py
golobor/plugychrom
df5c46895a9e419cda7793272625e289b1095137
[ "MIT" ]
null
null
null
wiggin/actions/__init__.py
golobor/plugychrom
df5c46895a9e419cda7793272625e289b1095137
[ "MIT" ]
1
2021-12-16T10:52:24.000Z
2021-12-16T10:52:24.000Z
wiggin/actions/__init__.py
golobor/plugychrom
df5c46895a9e419cda7793272625e289b1095137
[ "MIT" ]
2
2021-03-05T17:06:06.000Z
2021-12-16T10:51:49.000Z
from . import sim, conformations, constraints, interactions # noqa: F401
73
73
0.780822
8
73
7.125
1
0
0
0
0
0
0
0
0
0
0
0.047619
0.136986
73
1
73
73
0.857143
0.136986
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
2d86983a60366010ac0b7f912a1dd4d70bb1e89e
71
py
Python
build/lib/requestmap/Protocols/__init__.py
yyjlincoln/RequestMap
3fff1117e4aef59afe66cce29cd29e7670ae46f7
[ "Apache-2.0" ]
2
2021-12-12T06:25:51.000Z
2021-12-26T11:08:27.000Z
build/lib/requestmap/Protocols/__init__.py
yyjlincoln/RequestMap
3fff1117e4aef59afe66cce29cd29e7670ae46f7
[ "Apache-2.0" ]
null
null
null
build/lib/requestmap/Protocols/__init__.py
yyjlincoln/RequestMap
3fff1117e4aef59afe66cce29cd29e7670ae46f7
[ "Apache-2.0" ]
null
null
null
from . import Flask as Flask from . import ProtocolBase as ProtocolBase
35.5
42
0.816901
10
71
5.8
0.5
0.344828
0
0
0
0
0
0
0
0
0
0
0.15493
71
2
42
35.5
0.966667
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
2dad59f66a070611ef17b0b700de51a14b485383
68,173
py
Python
tacker/vnflcm/vnflcm_driver.py
lkpdn/tacker
578b12e989876ab99effbb372fec9345de85bcf5
[ "Apache-2.0" ]
null
null
null
tacker/vnflcm/vnflcm_driver.py
lkpdn/tacker
578b12e989876ab99effbb372fec9345de85bcf5
[ "Apache-2.0" ]
null
null
null
tacker/vnflcm/vnflcm_driver.py
lkpdn/tacker
578b12e989876ab99effbb372fec9345de85bcf5
[ "Apache-2.0" ]
1
2020-11-16T02:14:35.000Z
2020-11-16T02:14:35.000Z
# Copyright (C) 2020 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import copy from datetime import datetime import functools import inspect import re import time import traceback import yaml from oslo_config import cfg from oslo_log import log as logging from oslo_serialization import jsonutils from oslo_utils import encodeutils from oslo_utils import excutils from tacker.common import log from tacker.common import driver_manager from tacker.common import exceptions from tacker.common import safe_utils from tacker.common import utils from tacker.conductor.conductorrpc import vnf_lcm_rpc from tacker import manager from tacker import objects from tacker.objects import fields from tacker.vnflcm import abstract_driver from tacker.vnflcm import utils as vnflcm_utils from tacker.vnfm.mgmt_drivers import constants as mgmt_constants LOG = logging.getLogger(__name__) CONF = cfg.CONF @utils.expects_func_args('vnf_info', 'vnf_instance', 'scale_vnf_request') def revert_to_error_scale(function): """Decorator to revert task_state to error on failure.""" @functools.wraps(function) def decorated_function(self, context, *args, **kwargs): try: return function(self, context, *args, **kwargs) except Exception as ex: with excutils.save_and_reraise_exception(): wrapped_func = safe_utils.get_wrapped_function(function) keyed_args = inspect.getcallargs(wrapped_func, self, context, *args, **kwargs) try: vnf_info = keyed_args['vnf_info'] vnf_instance = keyed_args['vnf_instance'] scale_vnf_request = keyed_args['scale_vnf_request'] vim_info = vnflcm_utils._get_vim(context, vnf_instance.vim_connection_info) vim_connection_info = \ objects.VimConnectionInfo.obj_from_primitive( vim_info, context) if vnf_info.get('resource_changes'): resource_changes = vnf_info.get('resource_changes') else: resource_changes = self._scale_resource_update(context, vnf_info, vnf_instance, scale_vnf_request, vim_connection_info, error=True) except Exception as e: LOG.warning(traceback.format_exc()) LOG.warning("Failed to scale resource update " "instance %(id)s. Error: %(error)s", {"id": vnf_instance.id, "error": e}) try: self._vnfm_plugin._update_vnf_scaling_status_err(context, vnf_info) except Exception as e: LOG.warning("Failed to revert scale info for event " "instance %(id)s. Error: %(error)s", {"id": vnf_instance.id, "error": e}) try: self._vnf_instance_update(context, vnf_instance) except Exception as e: LOG.warning("Failed to revert instantiation info for vnf " "instance %(id)s. Error: %(error)s", {"id": vnf_instance.id, "error": e}) problem = objects.ProblemDetails(status=500, detail=str(ex)) try: timestamp = datetime.utcnow() vnf_lcm_op_occ = vnf_info['vnf_lcm_op_occ'] vnf_lcm_op_occ.operation_state = 'FAILED_TEMP' vnf_lcm_op_occ.state_entered_time = timestamp vnf_lcm_op_occ.resource_changes = resource_changes vnf_lcm_op_occ.error = problem vnf_lcm_op_occ.save() except Exception as e: LOG.warning("Failed to update vnf_lcm_op_occ for vnf " "instance %(id)s. Error: %(error)s", {"id": vnf_instance.id, "error": e}) try: notification = vnf_info['notification'] notification['notificationStatus'] = 'RESULT' notification['operationState'] = 'FAILED_TEMP' notification['error'] = problem.to_dict() resource_dict = resource_changes.to_dict() if resource_dict.get('affected_vnfcs'): notification['affectedVnfcs'] =\ jsonutils.dump_as_bytes( resource_dict.get('affected_vnfcs')) if resource_dict.get('affected_virtual_links'): notification['affectedVirtualLinks'] =\ jsonutils.dump_as_bytes( resource_dict.get('affected_virtual_links')) if resource_dict.get('affected_virtual_storages'): notification['affectedVirtualStorages'] =\ jsonutils.dump_as_bytes( resource_dict.get('affected_virtual_storages')) self.rpc_api.send_notification(context, notification) except Exception as e: LOG.warning("Failed to revert scale info for vnf " "instance %(id)s. Error: %(error)s", {"id": vnf_instance.id, "error": e}) return decorated_function @utils.expects_func_args('vnf_instance') def revert_to_error_task_state(function): """Decorator to revert task_state to error on failure.""" @functools.wraps(function) def decorated_function(self, context, *args, **kwargs): try: return function(self, context, *args, **kwargs) except Exception: with excutils.save_and_reraise_exception(): wrapped_func = safe_utils.get_wrapped_function(function) keyed_args = inspect.getcallargs(wrapped_func, self, context, *args, **kwargs) vnf_instance = keyed_args['vnf_instance'] previous_task_state = vnf_instance.task_state try: self._vnf_instance_update(context, vnf_instance, task_state=fields.VnfInstanceTaskState.ERROR) LOG.info("Successfully reverted task state from " "%(state)s to %(error)s on failure for vnf " "instance %(id)s.", {"state": previous_task_state, "id": vnf_instance.id, "error": fields.VnfInstanceTaskState.ERROR}) except Exception as e: LOG.warning("Failed to revert task state for vnf " "instance %(id)s. Error: %(error)s", {"id": vnf_instance.id, "error": e}) return decorated_function @utils.expects_func_args('vnf_info', 'vnf_instance', 'operation_params') def revert_to_error_rollback(function): """Decorator to revert task_state to error on failure.""" @functools.wraps(function) def decorated_function(self, context, *args, **kwargs): try: return function(self, context, *args, **kwargs) except Exception as ex: with excutils.save_and_reraise_exception(): wrapped_func = safe_utils.get_wrapped_function(function) keyed_args = inspect.getcallargs(wrapped_func, self, context, *args, **kwargs) resource_changes = None try: vnf_info = keyed_args['vnf_info'] vnf_instance = keyed_args['vnf_instance'] operation_params = keyed_args['operation_params'] vim_info = vnflcm_utils._get_vim(context, vnf_instance.vim_connection_info) vim_connection_info =\ objects.VimConnectionInfo.obj_from_primitive( vim_info, context) vnf_lcm_op_occs = vnf_info['vnf_lcm_op_occ'] if vnf_info.get('resource_changes'): resource_changes = vnf_info.get('resource_changes') else: if vnf_lcm_op_occs.operation == 'SCALE': scale_vnf_request =\ objects.ScaleVnfRequest.obj_from_primitive( operation_params, context=context) scale_vnf_request_copy = \ copy.deepcopy(scale_vnf_request) scale_vnf_request_copy.type = 'SCALE_IN' resource_changes = self._scale_resource_update( context, vnf_info, vnf_instance, scale_vnf_request_copy, vim_connection_info, error=True) else: resource_changes = self._term_resource_update( context, vnf_info, vnf_instance) except Exception as e: LOG.warning(traceback.format_exc()) LOG.warning("Failed to scale resource update " "instance %(id)s. Error: %(error)s", {"id": vnf_instance.id, "error": e}) try: self._update_vnf_rollback_status_err(context, vnf_info) except Exception as e: LOG.warning("Failed to revert scale info for event " "instance %(id)s. Error: %(error)s", {"id": vnf_instance.id, "error": e}) try: self._vnf_instance_update(context, vnf_instance) except Exception as e: LOG.warning("Failed to revert instantiation info for vnf " "instance %(id)s. Error: %(error)s", {"id": vnf_instance.id, "error": e}) problem = objects.ProblemDetails(status=500, detail=str(ex)) try: timestamp = datetime.utcnow() vnf_lcm_op_occ = vnf_info['vnf_lcm_op_occ'] vnf_lcm_op_occ.operation_state = 'FAILED_TEMP' vnf_lcm_op_occ.state_entered_time = timestamp if resource_changes: vnf_lcm_op_occ.resource_changes = resource_changes vnf_lcm_op_occ.error = problem vnf_lcm_op_occ.save() except Exception as e: LOG.warning("Failed to update vnf_lcm_op_occ for vnf " "instance %(id)s. Error: %(error)s", {"id": vnf_instance.id, "error": e}) try: notification = vnf_info['notification'] notification['notificationStatus'] = 'RESULT' notification['operationState'] = 'FAILED_TEMP' notification['error'] = problem.to_dict() if resource_changes: resource_dict = resource_changes.to_dict() if resource_dict.get('affected_vnfcs'): notification['affectedVnfcs'] = \ jsonutils.dump_as_bytes( resource_dict.get('affected_vnfcs')) if resource_dict.get('affected_virtual_links'): notification['affectedVirtualLinks'] = \ jsonutils.dump_as_bytes( resource_dict.get( 'affected_virtual_links')) if resource_dict.get('affected_virtual_storages'): notification['affectedVirtualStorages'] = \ jsonutils.dump_as_bytes( resource_dict.get( 'affected_virtual_storages')) self.rpc_api.sendNotification(context, notification) except Exception as e: LOG.warning("Failed to revert scale info for vnf " "instance %(id)s. Error: %(error)s", {"id": vnf_instance.id, "error": e}) return decorated_function class VnfLcmDriver(abstract_driver.VnfInstanceAbstractDriver): def __init__(self): super(VnfLcmDriver, self).__init__() self.rpc_api = vnf_lcm_rpc.VNFLcmRPCAPI() self._vnfm_plugin = manager.TackerManager.get_service_plugins()['VNFM'] self._vnf_manager = driver_manager.DriverManager( 'tacker.tacker.vnfm.drivers', cfg.CONF.tacker.infra_driver) def _vnf_instance_update(self, context, vnf_instance, **kwargs): """Update vnf instance in the database using kwargs as value.""" for k, v in kwargs.items(): setattr(vnf_instance, k, v) vnf_instance.save() def _instantiate_vnf(self, context, vnf_instance, vnf_dict, vim_connection_info, instantiate_vnf_req): vnfd_dict = vnflcm_utils._get_vnfd_dict(context, vnf_instance.vnfd_id, instantiate_vnf_req.flavour_id) base_hot_dict, nested_hot_dict = \ vnflcm_utils.get_base_nest_hot_dict( context, instantiate_vnf_req.flavour_id, vnf_instance.vnfd_id) vnf_package_path = None if base_hot_dict is not None: vnf_package_path = vnflcm_utils._get_vnf_package_path( context, vnf_instance.vnfd_id) param_for_subs_map = vnflcm_utils._get_param_data(vnfd_dict, instantiate_vnf_req) package_uuid = vnflcm_utils._get_vnf_package_id(context, vnf_instance.vnfd_id) vnf_software_images = vnflcm_utils._create_grant_request(vnfd_dict, package_uuid) vnf_resources = self._vnf_manager.invoke( vim_connection_info.vim_type, 'pre_instantiation_vnf', context=context, vnf_instance=vnf_instance, vim_connection_info=vim_connection_info, vnf_software_images=vnf_software_images, instantiate_vnf_req=instantiate_vnf_req, vnf_package_path=vnf_package_path) # save the vnf resources in the db for _, resources in vnf_resources.items(): for vnf_resource in resources: vnf_resource.create() vnfd_dict_to_create_final_dict = copy.deepcopy(vnfd_dict) final_vnf_dict = vnflcm_utils._make_final_vnf_dict( vnfd_dict_to_create_final_dict, vnf_instance.id, vnf_instance.vnf_instance_name, param_for_subs_map, vnf_dict) try: instance_id = self._vnf_manager.invoke( vim_connection_info.vim_type, 'instantiate_vnf', context=context, plugin=self._vnfm_plugin, vnf_instance=vnf_instance, vnfd_dict=final_vnf_dict, grant_response=vnf_resources, vim_connection_info=vim_connection_info, base_hot_dict=base_hot_dict, vnf_package_path=vnf_package_path, instantiate_vnf_req=instantiate_vnf_req) except Exception as exp: with excutils.save_and_reraise_exception(): exp.reraise = False LOG.error("Unable to instantiate vnf instance " "%(id)s due to error : %(error)s", {"id": vnf_instance.id, "error": encodeutils.exception_to_unicode(exp)}) raise exceptions.VnfInstantiationFailed( id=vnf_instance.id, error=encodeutils.exception_to_unicode(exp)) if vnf_instance.instantiated_vnf_info and\ not vnf_instance.instantiated_vnf_info.instance_id: vnf_instance.instantiated_vnf_info.instance_id = instance_id if vnf_dict['attributes'].get('scaling_group_names'): vnf_instance.instantiated_vnf_info.scale_status = \ vnf_dict['scale_status'] try: self._vnf_manager.invoke( vim_connection_info.vim_type, 'create_wait', plugin=self._vnfm_plugin, context=context, vnf_dict=final_vnf_dict, vnf_id=final_vnf_dict['instance_id'], auth_attr=vim_connection_info.access_info) except Exception as exp: with excutils.save_and_reraise_exception(): exp.reraise = False LOG.error("Vnf creation wait failed for vnf instance " "%(id)s due to error : %(error)s", {"id": vnf_instance.id, "error": encodeutils.exception_to_unicode(exp)}) raise exceptions.VnfInstantiationWaitFailed( id=vnf_instance.id, error=encodeutils.exception_to_unicode(exp)) @log.log def instantiate_vnf(self, context, vnf_instance, vnf_dict, instantiate_vnf_req): vim_connection_info_list = vnflcm_utils.\ _get_vim_connection_info_from_vnf_req(vnf_instance, instantiate_vnf_req) self._vnf_instance_update(context, vnf_instance, vim_connection_info=vim_connection_info_list) vim_info = vnflcm_utils._get_vim(context, instantiate_vnf_req.vim_connection_info) vim_connection_info = objects.VimConnectionInfo.obj_from_primitive( vim_info, context) self._instantiate_vnf(context, vnf_instance, vnf_dict, vim_connection_info, instantiate_vnf_req) @log.log @revert_to_error_task_state def terminate_vnf(self, context, vnf_instance, terminate_vnf_req): vim_info = vnflcm_utils._get_vim(context, vnf_instance.vim_connection_info) vim_connection_info = objects.VimConnectionInfo.obj_from_primitive( vim_info, context) LOG.info("Terminating vnf %s", vnf_instance.id) try: self._delete_vnf_instance_resources(context, vnf_instance, vim_connection_info, terminate_vnf_req=terminate_vnf_req) vnf_instance.instantiated_vnf_info.reinitialize() self._vnf_instance_update(context, vnf_instance, vim_connection_info=[], task_state=None) LOG.info("Vnf terminated %s successfully", vnf_instance.id) except Exception as exp: with excutils.save_and_reraise_exception(): LOG.error("Unable to terminate vnf '%s' instance. " "Error: %s", vnf_instance.id, encodeutils.exception_to_unicode(exp)) def _delete_vnf_instance_resources(self, context, vnf_instance, vim_connection_info, terminate_vnf_req=None, update_instantiated_state=True): if (vnf_instance.instantiated_vnf_info and vnf_instance.instantiated_vnf_info.instance_id) or \ vim_connection_info.vim_type == 'kubernetes': instance_id = vnf_instance.instantiated_vnf_info.instance_id \ if vnf_instance.instantiated_vnf_info else None access_info = vim_connection_info.access_info LOG.info("Deleting stack %(instance)s for vnf %(id)s ", {"instance": instance_id, "id": vnf_instance.id}) self._vnf_manager.invoke(vim_connection_info.vim_type, 'delete', plugin=self, context=context, vnf_id=instance_id, auth_attr=access_info, vnf_instance=vnf_instance, terminate_vnf_req=terminate_vnf_req) if update_instantiated_state: vnf_instance.instantiation_state = \ fields.VnfInstanceState.NOT_INSTANTIATED vnf_instance.save() self._vnf_manager.invoke(vim_connection_info.vim_type, 'delete_wait', plugin=self, context=context, vnf_id=instance_id, auth_attr=access_info, vnf_instance=vnf_instance) vnf_resources = objects.VnfResourceList.get_by_vnf_instance_id( context, vnf_instance.id) for vnf_resource in vnf_resources: self._vnf_manager.invoke(vim_connection_info.vim_type, 'delete_vnf_instance_resource', context=context, vnf_instance=vnf_instance, vim_connection_info=vim_connection_info, vnf_resource=vnf_resource) vnf_resource.destroy(context) def _heal_vnf(self, context, vnf_instance, vim_connection_info, heal_vnf_request): inst_vnf_info = vnf_instance.instantiated_vnf_info try: self._vnf_manager.invoke( vim_connection_info.vim_type, 'heal_vnf', context=context, vnf_instance=vnf_instance, vim_connection_info=vim_connection_info, heal_vnf_request=heal_vnf_request) except Exception as exp: with excutils.save_and_reraise_exception() as exc_ctxt: exc_ctxt.reraise = False LOG.error("Failed to heal vnf %(id)s in infra driver. " "Error: %(error)s", {"id": vnf_instance.id, "error": encodeutils.exception_to_unicode(exp)}) raise exceptions.VnfHealFailed(id=vnf_instance.id, error=encodeutils.exception_to_unicode(exp)) try: self._vnf_manager.invoke( vim_connection_info.vim_type, 'heal_vnf_wait', context=context, vnf_instance=vnf_instance, vim_connection_info=vim_connection_info) except Exception as exp: LOG.error("Failed to update vnf %(id)s resources for instance " "%(instance)s. Error: %(error)s", {'id': vnf_instance.id, 'instance': inst_vnf_info.instance_id, 'error': encodeutils.exception_to_unicode(exp)}) try: self._vnf_manager.invoke( vim_connection_info.vim_type, 'post_heal_vnf', context=context, vnf_instance=vnf_instance, vim_connection_info=vim_connection_info, heal_vnf_request=heal_vnf_request) self._vnf_instance_update(context, vnf_instance, task_state=None) except Exception as exp: with excutils.save_and_reraise_exception() as exc_ctxt: exc_ctxt.reraise = False LOG.error("Failed to store updated resources information for " "instance %(instance)s for vnf %(id)s. " "Error: %(error)s", {'id': vnf_instance.id, 'instance': inst_vnf_info.instance_id, 'error': encodeutils.exception_to_unicode(exp)}) raise exceptions.VnfHealFailed(id=vnf_instance.id, error=encodeutils.exception_to_unicode(exp)) def _respawn_vnf(self, context, vnf_instance, vnf_dict, vim_connection_info, heal_vnf_request): try: self._delete_vnf_instance_resources(context, vnf_instance, vim_connection_info, update_instantiated_state=False) except Exception as exc: with excutils.save_and_reraise_exception() as exc_ctxt: exc_ctxt.reraise = False err_msg = ("Failed to delete vnf resources for vnf instance " "%(id)s before respawning. The vnf is in " "inconsistent state. Error: %(error)s") LOG.error(err_msg % {"id": vnf_instance.id, "error": str(exc)}) raise exceptions.VnfHealFailed(id=vnf_instance.id, error=encodeutils.exception_to_unicode(exc)) # InstantiateVnfRequest is not stored in the db as it's mapped # to InstantiatedVnfInfo version object. Convert InstantiatedVnfInfo # version object to InstantiateVnfRequest so that vnf can be # instantiated. instantiate_vnf_request = objects.InstantiateVnfRequest.\ from_vnf_instance(vnf_instance) vnf_instance.instantiated_vnf_info.reinitialize() vnf_instance.task_state = fields.VnfInstanceTaskState.INSTANTIATING vnfd_dict = vnflcm_utils._get_vnfd_dict( context, vnf_instance.vnfd_id, instantiate_vnf_request.flavour_id) vnflcm_utils._build_instantiated_vnf_info( vnfd_dict, instantiate_vnf_request, vnf_instance, vim_connection_info.vim_id) try: self._instantiate_vnf(context, vnf_instance, vnf_dict, vim_connection_info, instantiate_vnf_request) self._vnf_manager.invoke( vim_connection_info.vim_type, 'post_vnf_instantiation', context=context, vnf_instance=vnf_instance, vim_connection_info=vim_connection_info) except Exception as exc: with excutils.save_and_reraise_exception() as exc_ctxt: exc_ctxt.reraise = False err_msg = ("Failed to instantiate vnf instance " "%(id)s after termination. The vnf is in " "inconsistent state. Error: %(error)s") LOG.error(err_msg % {"id": vnf_instance.id, "error": str(exc)}) raise exceptions.VnfHealFailed(id=vnf_instance.id, error=encodeutils.exception_to_unicode(exc)) self._vnf_instance_update(context, vnf_instance, instantiation_state=fields.VnfInstanceState.INSTANTIATED, task_state=None) @log.log @revert_to_error_task_state def heal_vnf(self, context, vnf_instance, vnf_dict, heal_vnf_request): LOG.info("Request received for healing vnf '%s'", vnf_instance.id) vim_info = vnflcm_utils._get_vim(context, vnf_instance.vim_connection_info) vim_connection_info = objects.VimConnectionInfo.obj_from_primitive( vim_info, context) if not heal_vnf_request.vnfc_instance_id: self._respawn_vnf(context, vnf_instance, vnf_dict, vim_connection_info, heal_vnf_request) else: self._heal_vnf(context, vnf_instance, vim_connection_info, heal_vnf_request) LOG.info("Request received for healing vnf '%s' is completed " "successfully", vnf_instance.id) def _scale_vnf_pre(self, context, vnf_info, vnf_instance, scale_vnf_request, vim_connection_info): self._vnfm_plugin._update_vnf_scaling( context, vnf_info, 'ACTIVE', 'PENDING_' + scale_vnf_request.type) vnf_lcm_op_occ = vnf_info['vnf_lcm_op_occ'] vnf_lcm_op_occ.error_point = 2 scale_id_list = [] scale_name_list = [] grp_id = None vnf_info['policy_name'] = scale_vnf_request.aspect_id if scale_vnf_request.type == 'SCALE_IN': vnfd_yaml = vnf_info['vnfd']['attributes'].get( 'vnfd_' + vnf_instance.instantiated_vnf_info.flavour_id, '') vnfd_dict = yaml.safe_load(vnfd_yaml) # mgmt_driver from vnfd vnf_node = self._get_node_template_for_vnf(vnfd_dict) if vnf_node and vnf_node.get('interfaces'): if vnf_node['interfaces']['Vnflcm']['scale_start']: vnf_info['vnfd']['mgmt_driver'] = \ vnf_node['interfaces']['Vnflcm']['scale_start'] vnf_info['action'] = 'in' scale_id_list, scale_name_list, grp_id, res_num = \ self._vnf_manager.invoke( vim_connection_info.vim_type, 'get_scale_in_ids', plugin=self, context=context, vnf_dict=vnf_info, is_reverse=scale_vnf_request.additional_params.get('\ is_reverse'), auth_attr=vim_connection_info.access_info, region_name=vim_connection_info.access_info.get('\ region_name'), number_of_steps=scale_vnf_request.number_of_steps ) vnf_info['res_num'] = res_num # mgmt_driver pre if len(scale_id_list) != 0 and vnf_info['vnfd'].get('mgmt_driver'): if len(scale_id_list) > 1: stack_value = [] stack_value = scale_id_list else: stack_value = scale_id_list[0] kwargs = { mgmt_constants.KEY_ACTION: mgmt_constants.ACTION_SCALE_IN_VNF, mgmt_constants.KEY_KWARGS: {'vnf': vnf_info}, mgmt_constants.KEY_SCALE: stack_value, } self._vnfm_plugin.mgmt_call(context, vnf_info, kwargs) else: vnf_info['action'] = 'out' scale_id_list = self._vnf_manager.invoke( vim_connection_info.vim_type, 'get_scale_ids', plugin=self, context=context, vnf_dict=vnf_info, auth_attr=vim_connection_info.access_info, region_name=vim_connection_info.access_info.get('region_name') ) vnf_lcm_op_occ.error_point = 3 return scale_id_list, scale_name_list, grp_id def _get_node_template_for_vnf(self, vnfd_dict): node_tmp = vnfd_dict['topology_template']['node_templates'] for node_template in node_tmp.values(): LOG.debug("node_template %s", node_template) if not re.match('^tosca', node_template['type']): LOG.debug("VNF node_template %s", node_template) return node_template return {} def _scale_vnf_post(self, context, vnf_info, vnf_instance, scale_vnf_request, vim_connection_info, scale_id_list, resource_changes): vnf_lcm_op_occ = vnf_info['vnf_lcm_op_occ'] vnf_lcm_op_occ.error_point = 6 if scale_vnf_request.type == 'SCALE_OUT': vnfd_yaml =\ vnf_info['vnfd']['attributes'].\ get('vnfd_' + vnf_instance.instantiated_vnf_info.flavour_id, '') vnf_info['policy_name'] = scale_vnf_request.aspect_id vnfd_dict = yaml.safe_load(vnfd_yaml) # mgmt_driver from vnfd vnf_node = self._get_node_template_for_vnf(vnfd_dict) if vnf_node and vnf_node.get('interfaces'): if vnf_node['interfaces']['Vnflcm']['scale_end']: vnf_info['vnfd']['mgmt_driver'] = \ vnf_node['interfaces']['Vnflcm']['scale_end'] scale_id_after = self._vnf_manager.invoke( vim_connection_info.vim_type, 'get_scale_ids', plugin=self, context=context, vnf_dict=vnf_info, auth_attr=vim_connection_info.access_info, region_name=vim_connection_info.access_info.get('region_name') ) id_list = [] id_list = list(set(scale_id_after) - set(scale_id_list)) vnf_info['res_num'] = len(scale_id_after) if len(id_list) != 0 and vnf_info['vnfd'].get('mgmt_driver'): if len(id_list) > 1: stack_value = [] stack_value = id_list else: stack_value = id_list[0] kwargs = { mgmt_constants.KEY_ACTION: mgmt_constants.ACTION_SCALE_OUT_VNF, mgmt_constants.KEY_KWARGS: {'vnf': vnf_info}, mgmt_constants.KEY_SCALE: stack_value, } self._vnfm_plugin.mgmt_call(context, vnf_info, kwargs) vnf_lcm_op_occ.error_point = 7 vnf_instance.instantiated_vnf_info.scale_level =\ vnf_info['after_scale_level'] scaleGroupDict = \ jsonutils.loads(vnf_info['attributes']['scale_group']) (scaleGroupDict ['scaleGroupDict'][scale_vnf_request.aspect_id]['default']) =\ vnf_info['res_num'] vnf_info['attributes']['scale_group'] =\ jsonutils.dump_as_bytes(scaleGroupDict) vnf_lcm_op_occ = vnf_info['vnf_lcm_op_occ'] vnf_lcm_op_occ.operation_state = 'COMPLETED' vnf_lcm_op_occ.resource_changes = resource_changes self._vnfm_plugin._update_vnf_scaling(context, vnf_info, 'PENDING_' + scale_vnf_request.type, 'ACTIVE', vnf_instance=vnf_instance, vnf_lcm_op_occ=vnf_lcm_op_occ) notification = vnf_info['notification'] notification['notificationStatus'] = 'RESULT' notification['operationState'] = 'COMPLETED' resource_dict = resource_changes.to_dict() if resource_dict.get('affected_vnfcs'): notification['affectedVnfcs'] = resource_dict.get('affected_vnfcs') if resource_dict.get('affected_virtual_links'): notification['affectedVirtualLinks'] =\ resource_dict.get('affected_virtual_links') if resource_dict.get('affected_virtual_storages'): notification['affectedVirtualStorages'] =\ resource_dict.get('affected_virtual_storages') self.rpc_api.send_notification(context, notification) def _scale_resource_update(self, context, vnf_info, vnf_instance, scale_vnf_request, vim_connection_info, error=False): vnf_lcm_op_occs = vnf_info['vnf_lcm_op_occ'] instantiated_vnf_before = \ copy.deepcopy(vnf_instance.instantiated_vnf_info) self._vnf_manager.invoke( vim_connection_info.vim_type, 'scale_resource_update', context=context, vnf_instance=vnf_instance, scale_vnf_request=scale_vnf_request, vim_connection_info=vim_connection_info ) for scale in vnf_instance.instantiated_vnf_info.scale_status: if scale_vnf_request.aspect_id == scale.aspect_id: if not error: scale.scale_level = vnf_info['after_scale_level'] break else: scale.scale_level = vnf_info['scale_level'] break LOG.debug("vnf_instance.instantiated_vnf_info %s", vnf_instance.instantiated_vnf_info) affected_vnfcs = [] affected_virtual_storages = [] affected_virtual_links = [] if scale_vnf_request.type == 'SCALE_IN': for vnfc in instantiated_vnf_before.vnfc_resource_info: vnfc_delete = True for rsc in vnf_instance.instantiated_vnf_info.\ vnfc_resource_info: if vnfc.compute_resource.resource_id == \ rsc.compute_resource.resource_id: vnfc_delete = False break if vnfc_delete: affected_vnfc = objects.AffectedVnfc(id=vnfc.id, vdu_id=vnfc.vdu_id, change_type='REMOVED', compute_resource=vnfc.compute_resource) affected_vnfcs.append(affected_vnfc) for st in instantiated_vnf_before.virtual_storage_resource_info: st_delete = True for rsc in vnf_instance.instantiated_vnf_info.\ virtual_storage_resource_info: if st.storage_resource.resource_id == \ rsc.storage_resource.resource_id: st_delete = False break if st_delete: affected_st = objects.AffectedVirtualStorage( id=st.id, virtual_storage_desc_id=st.virtual_storage_desc_id, change_type='REMOVED', storage_resource=st.storage_resource) affected_virtual_storages.append(affected_st) for vl in instantiated_vnf_before.vnf_virtual_link_resource_info: port_delete = False for rsc in vnf_instance.\ instantiated_vnf_info.vnf_virtual_link_resource_info: if vl.network_resource.resource_id == \ rsc.network_resource.resource_id: if len(vl.vnf_link_ports) != len(rsc.vnf_link_ports): port_delete = True break if port_delete: affected_vl = objects.AffectedVirtualLink( id=vl.id, vnf_virtual_link_desc_id=vl.vnf_virtual_link_desc_id, change_type='LINK_PORT_REMOVED', network_resource=vl.network_resource) affected_virtual_links.append(affected_vl) else: for rsc in vnf_instance.instantiated_vnf_info.vnfc_resource_info: vnfc_add = True for vnfc in instantiated_vnf_before.vnfc_resource_info: if vnfc.compute_resource.resource_id == \ rsc.compute_resource.resource_id: vnfc_add = False break if vnfc_add: affected_vnfc = objects.AffectedVnfc( id=rsc.id, vdu_id=rsc.vdu_id, change_type='ADDED', compute_resource=rsc.compute_resource) affected_vnfcs.append(affected_vnfc) for rsc in vnf_instance.instantiated_vnf_info.\ virtual_storage_resource_info: st_add = True for st in instantiated_vnf_before.\ virtual_storage_resource_info: if st.storage_resource.resource_id == \ rsc.storage_resource.resource_id: st_add = False break if st_add: affected_st = objects.AffectedVirtualStorage( id=rsc.id, virtual_storage_desc_id=rsc.virtual_storage_desc_id, change_type='ADDED', storage_resource=rsc.storage_resource) affected_virtual_storages.append(affected_st) for vl in instantiated_vnf_before.vnf_virtual_link_resource_info: port_add = False for rsc in vnf_instance.instantiated_vnf_info.\ vnf_virtual_link_resource_info: if vl.network_resource.resource_id == \ rsc.network_resource.resource_id: if len(vl.vnf_link_ports) != len(rsc.vnf_link_ports): port_add = True break if port_add: affected_vl = objects.AffectedVirtualLink( id=vl.id, vnf_virtual_link_desc_id=vl.vnf_virtual_link_desc_id, change_type='LINK_PORT_ADDED', network_resource=vl.network_resource) affected_virtual_links.append(affected_vl) resource_changes = objects.ResourceChanges() resource_changes.affected_vnfcs = [] resource_changes.affected_virtual_links = [] resource_changes.affected_virtual_storages = [] if 'resource_changes' in \ vnf_lcm_op_occs and vnf_lcm_op_occs.resource_changes: res_chg = vnf_lcm_op_occs.resource_changes if 'affected_vnfcs' in res_chg: if res_chg.affected_vnfcs and \ len(res_chg.affected_vnfcs) > 0: resource_changes.affected_vnfcs.\ extend(res_chg.affected_vnfcs) if 'affected_virtual_storages' in res_chg: if res_chg.affected_virtual_storages and \ len(res_chg.affected_virtual_storages) > 0: resource_changes.affected_virtual_storages.extend( res_chg.affected_virtual_storages) if 'affected_virtual_links' in res_chg: if res_chg.affected_virtual_links and \ len(res_chg.affected_virtual_links) > 0: resource_changes.affected_virtual_links.\ extend(res_chg.affected_virtual_links) resource_changes.affected_vnfcs.extend(affected_vnfcs) resource_changes.affected_virtual_storages.extend( affected_virtual_storages) resource_changes.affected_virtual_links = [] resource_changes.affected_virtual_links.extend(affected_virtual_links) vnf_info['resource_changes'] = resource_changes return resource_changes def _scale_vnf(self, context, vnf_info, vnf_instance, scale_vnf_request, vim_connection_info, scale_name_list, grp_id): # action_driver LOG.debug("vnf_info['vnfd']['attributes'] %s", vnf_info['vnfd']['attributes']) vnf_lcm_op_occ = vnf_info['vnf_lcm_op_occ'] vnf_lcm_op_occ.error_point = 4 self.scale(context, vnf_info, scale_vnf_request, vim_connection_info, scale_name_list, grp_id) vnf_lcm_op_occ.error_point = 5 @log.log @revert_to_error_scale def scale_vnf(self, context, vnf_info, vnf_instance, scale_vnf_request): LOG.info("Request received for scale vnf '%s'", vnf_instance.id) timestamp = datetime.utcnow() vnf_lcm_op_occ = vnf_info['vnf_lcm_op_occ'] vnf_lcm_op_occ.operation_state = 'PROCESSING' vnf_lcm_op_occ.state_entered_time = timestamp LOG.debug("vnf_lcm_op_occ %s", vnf_lcm_op_occ) vnf_lcm_op_occ.save() notification = vnf_info['notification'] notification['operationState'] = 'PROCESSING' self.rpc_api.send_notification(context, notification) vim_info = vnflcm_utils._get_vim(context, vnf_instance.vim_connection_info) vim_connection_info = objects.VimConnectionInfo.obj_from_primitive( vim_info, context) scale_id_list, scale_name_list, grp_id = self._scale_vnf_pre( context, vnf_info, vnf_instance, scale_vnf_request, vim_connection_info) self._scale_vnf(context, vnf_info, vnf_instance, scale_vnf_request, vim_connection_info, scale_name_list, grp_id) resource_changes = self._scale_resource_update(context, vnf_info, vnf_instance, scale_vnf_request, vim_connection_info) self._scale_vnf_post(context, vnf_info, vnf_instance, scale_vnf_request, vim_connection_info, scale_id_list, resource_changes) LOG.info("Request received for scale vnf '%s' is completed " "successfully", vnf_instance.id) def scale( self, context, vnf_info, scale_vnf_request, vim_connection_info, scale_name_list, grp_id): self._vnf_manager = driver_manager.DriverManager( 'tacker.tacker.vnfm.drivers', cfg.CONF.tacker.infra_driver) policy = {} policy['instance_id'] = vnf_info['instance_id'] policy['name'] = scale_vnf_request.aspect_id policy['vnf'] = vnf_info if scale_vnf_request.type == 'SCALE_IN': policy['action'] = 'in' else: policy['action'] = 'out' LOG.debug( "is_reverse: %s", scale_vnf_request.additional_params.get('is_reverse')) scale_json = vnf_info['attributes']['scale_group'] scaleGroupDict = jsonutils.loads(scale_json) key_aspect = scale_vnf_request.aspect_id default = scaleGroupDict['scaleGroupDict'][key_aspect]['default'] if (scale_vnf_request.type == 'SCALE_IN' and scale_vnf_request.additional_params['is_reverse'] == 'True'): self._vnf_manager.invoke( vim_connection_info.vim_type, 'scale_in_reverse', plugin=self, context=context, auth_attr=vim_connection_info.access_info, vnf_info=vnf_info, scale_vnf_request=scale_vnf_request, region_name=vim_connection_info.access_info.get('region_name'), scale_name_list=scale_name_list, grp_id=grp_id ) self._vnf_manager.invoke( vim_connection_info.vim_type, 'scale_update_wait', plugin=self, context=context, auth_attr=vim_connection_info.access_info, vnf_info=vnf_info, region_name=vim_connection_info.access_info.get('region_name') ) elif scale_vnf_request.type == 'SCALE_OUT' and default == 0: self._vnf_manager.invoke( vim_connection_info.vim_type, 'scale_out_initial', plugin=self, context=context, auth_attr=vim_connection_info.access_info, vnf_info=vnf_info, scale_vnf_request=scale_vnf_request, region_name=vim_connection_info.access_info.get('region_name') ) self._vnf_manager.invoke( vim_connection_info.vim_type, 'scale_update_wait', plugin=self, context=context, auth_attr=vim_connection_info.access_info, vnf_info=vnf_info, region_name=vim_connection_info.access_info.get('region_name') ) else: heat_template = vnf_info['attributes']['heat_template'] policy_in_name = scale_vnf_request.aspect_id + '_scale_in' policy_out_name = scale_vnf_request.aspect_id + '_scale_out' heat_resource = yaml.safe_load(heat_template) if scale_vnf_request.type == 'SCALE_IN': policy['action'] = 'in' policy_temp = heat_resource['resources'][policy_in_name] policy_prop = policy_temp['properties'] cooldown = policy_prop.get('cooldown') policy_name = policy_in_name else: policy['action'] = 'out' policy_temp = heat_resource['resources'][policy_out_name] policy_prop = policy_temp['properties'] cooldown = policy_prop.get('cooldown') policy_name = policy_out_name policy_temp = heat_resource['resources'][policy_name] policy_prop = policy_temp['properties'] for i in range(scale_vnf_request.number_of_steps): last_event_id = self._vnf_manager.invoke( vim_connection_info.vim_type, 'scale', plugin=self, context=context, auth_attr=vim_connection_info.access_info, policy=policy, region_name=vim_connection_info.access_info.get('\ region_name') ) self._vnf_manager.invoke( vim_connection_info.vim_type, 'scale_wait', plugin=self, context=context, auth_attr=vim_connection_info.access_info, policy=policy, region_name=vim_connection_info.access_info.get('\ region_name'), last_event_id=last_event_id) if i != scale_vnf_request.number_of_steps - 1: if cooldown: time.sleep(cooldown) def _term_resource_update(self, context, vnf_info, vnf_instance, error=False): if not vnf_instance.instantiated_vnf_info: resource_changes = objects.ResourceChanges() resource_changes.affected_vnfcs = [] resource_changes.affected_virtual_links = [] resource_changes.affected_virtual_storages = [] vnf_info['resource_changes'] = resource_changes return resource_changes instantiated_vnf_before = copy.deepcopy( vnf_instance.instantiated_vnf_info) vnf_instance.instantiated_vnf_info.reinitialize() if not error: vnf_instance.vim_connection_info = [] vnf_instance.task_state = None LOG.debug( "vnf_instance.instantiated_vnf_info %s", vnf_instance.instantiated_vnf_info) affected_vnfcs = [] affected_virtual_storages = [] affected_virtual_links = [] for vnfc in instantiated_vnf_before.vnfc_resource_info: vnfc_delete = True for rsc in vnf_instance.instantiated_vnf_info.vnfc_resource_info: if vnfc.compute_resource.resource_id == \ rsc.compute_resource.resource_id: vnfc_delete = False break if vnfc_delete: affected_vnfc = objects.AffectedVnfc( id=vnfc.id, vdu_id=vnfc.vdu_id, change_type='REMOVED', compute_resource=vnfc.compute_resource) affected_vnfcs.append(affected_vnfc) for st in instantiated_vnf_before.virtual_storage_resource_info: st_delete = True for rsc in \ vnf_instance.instantiated_vnf_info.\ virtual_storage_resource_info: if st.storage_resource.resource_id == \ rsc.storage_resource.resource_id: st_delete = False break if st_delete: affected_st = objects.AffectedVirtualStorage( id=st.id, virtual_storage_desc_id=st.virtual_storage_desc_id, change_type='REMOVED', storage_resource=st.storage_resource) affected_virtual_storages.append(affected_st) for vl in instantiated_vnf_before.vnf_virtual_link_resource_info: vm_delete = False for rsc in \ vnf_instance.instantiated_vnf_info.\ vnf_virtual_link_resource_info: if st.network_resource.resource_id == \ rsc.network_resource.resource_id: vm_delete = False break if vm_delete: affected_vl = objects.AffectedVirtualLink( id=vl.id, vnf_virtual_link_desc_id=vl.vnf_virtual_link_desc_id, change_type='REMOVED', network_resource=vl.network_resource) affected_virtual_links.append(affected_vl) vnf_lcm_op_occs = vnf_info['vnf_lcm_op_occ'] resource_changes = objects.ResourceChanges() resource_changes.affected_vnfcs = [] resource_changes.affected_virtual_links = [] resource_changes.affected_virtual_storages = [] if 'resource_changes' in vnf_lcm_op_occs \ and vnf_lcm_op_occs.resource_changes: if 'affected_vnfcs' in vnf_lcm_op_occs.resource_changes: if len(vnf_lcm_op_occs.resource_changes.affected_vnfcs) > 0: resource_changes.affected_vnfcs.extend( vnf_lcm_op_occs.resource_changes.affected_vnfcs) if 'affected_virtual_storages' in vnf_lcm_op_occs.resource_changes: if len(vnf_lcm_op_occs.resource_changes. affected_virtual_storages) > 0: resource_changes.affected_virtual_storages.extend( vnf_lcm_op_occs.resource_changes. affected_virtual_storages) if 'affected_virtual_links' in vnf_lcm_op_occs.resource_changes: if len(vnf_lcm_op_occs.resource_changes. affected_virtual_links) > 0: resource_changes.affected_virtual_links.extend( vnf_lcm_op_occs.resource_changes. affected_virtual_links) resource_changes.affected_vnfcs.extend(affected_vnfcs) resource_changes.affected_virtual_storages.extend( affected_virtual_storages) resource_changes.affected_virtual_links.extend(affected_virtual_links) vnf_info['resource_changes'] = resource_changes return resource_changes def _rollback_vnf_pre( self, context, vnf_info, vnf_instance, operation_params, vim_connection_info): vnf_lcm_op_occs = vnf_info['vnf_lcm_op_occ'] scale_id_list = [] scale_name_list = [] grp_id = None self._update_vnf_rollback_pre(context, vnf_info) if vnf_lcm_op_occs.operation == 'SCALE': scaleGroupDict = jsonutils.loads( vnf_info['attributes']['scale_group']) cap_size = scaleGroupDict['scaleGroupDict'][operation_params ['aspect_id']]['default'] vnf_info['res_num'] = cap_size scale_vnf_request = objects.ScaleVnfRequest.obj_from_primitive( operation_params, context=context) for scale in vnf_instance.instantiated_vnf_info.scale_status: if scale_vnf_request.aspect_id == scale.aspect_id: vnf_info['after_scale_level'] = scale.scale_level break if vnf_lcm_op_occs.operation == 'SCALE' \ and vnf_lcm_op_occs.error_point >= 4: scale_id_list, scale_name_list, grp_id = self._vnf_manager.invoke( vim_connection_info.vim_type, 'get_rollback_ids', plugin=self, context=context, vnf_dict=vnf_info, aspect_id=operation_params['aspect_id'], auth_attr=vim_connection_info.access_info, region_name=vim_connection_info.access_info.get('region_name') ) if vnf_lcm_op_occs.error_point == 7: if vnf_lcm_op_occs.operation == 'SCALE': vnfd_yaml = vnf_info['vnfd']['attributes'].\ get('vnfd_' + vnf_instance.instantiated_vnf_info.flavour_id, '') vnfd_dict = yaml.safe_load(vnfd_yaml) # mgmt_driver from vnfd vnf_node = self._get_node_template_for_vnf(vnfd_dict) if vnf_node and vnf_node.get('interfaces'): if vnf_node['interfaces'].get('Vnflcm'): if vnf_node['interfaces']['Vnflcm'].get('scale_start'): vnf_info['vnfd']['mgmt_driver'] = \ vnf_node['interfaces']['Vnflcm']['scale_start'] vnf_info['action'] = 'in' if len(scale_id_list) != 0 and vnf_info['vnfd'].get( 'mgmt_driver'): if len(scale_id_list) > 1: stack_value = [] stack_value = scale_id_list else: stack_value = scale_id_list[0] kwargs = { mgmt_constants.KEY_ACTION: mgmt_constants.ACTION_SCALE_IN_VNF, mgmt_constants.KEY_KWARGS: {'vnf': vnf_info}, mgmt_constants.KEY_SCALE: stack_value, } self._rollback_mgmt_call(context, vnf_info, kwargs) else: vnfd_yaml = vnf_info['vnfd']['attributes'].\ get('vnfd_' + vnf_instance.instantiated_vnf_info.flavour_id, '') vnfd_dict = yaml.safe_load(vnfd_yaml) # mgmt_driver from vnfd vnf_node = self._get_node_template_for_vnf(vnfd_dict) if vnf_node and vnf_node.get('interfaces'): if vnf_node['interfaces'].get('Vnflcm'): if vnf_node['interfaces']['Vnflcm'].get( 'termination_start'): vnf_info['vnfd']['mgmt_driver'] = vnf_node[ 'interfaces']['Vnflcm']['termination_start'] if len(scale_id_list) != 0 and vnf_info['vnfd'].get( 'mgmt_driver'): kwargs = { mgmt_constants.KEY_ACTION: mgmt_constants.ACTION_DELETE_VNF, mgmt_constants.KEY_KWARGS: {'vnf': vnf_info} } self._rollback_mgmt_call(context, vnf_info, kwargs) vnf_lcm_op_occs.error_point = 6 return scale_name_list, grp_id def _rollback_vnf( self, context, vnf_info, vnf_instance, operation_params, vim_connection_info, scale_name_list, grp_id): vnf_lcm_op_occs = vnf_info['vnf_lcm_op_occ'] if vnf_lcm_op_occs.error_point >= 4: if vnf_lcm_op_occs.operation == 'SCALE': scale_vnf_request = objects.ScaleVnfRequest.obj_from_primitive( operation_params, context=context) self._vnf_manager.invoke( vim_connection_info.vim_type, 'scale_in_reverse', plugin=self, context=context, auth_attr=vim_connection_info.access_info, vnf_info=vnf_info, scale_vnf_request=scale_vnf_request, region_name=vim_connection_info.access_info.get( 'region_name'), scale_name_list=scale_name_list, grp_id=grp_id) self._vnf_manager.invoke( vim_connection_info.vim_type, 'scale_update_wait', plugin=self, context=context, auth_attr=vim_connection_info.access_info, vnf_info=vnf_info, region_name=vim_connection_info.access_info.get( 'region_name')) else: instance_id = vnf_instance.instantiated_vnf_info.instance_id access_info = vim_connection_info.access_info self._vnf_manager.invoke(vim_connection_info.vim_type, 'delete', plugin=self, context=context, vnf_id=instance_id, auth_attr=access_info) self._vnf_manager.invoke(vim_connection_info.vim_type, 'delete_wait', plugin=self, context=context, vnf_id=instance_id, auth_attr=access_info) vnf_lcm_op_occs.error_point = 3 def _update_vnf_rollback_pre(self, context, vnf_info): self._vnfm_plugin._update_vnf_rollback_pre(context, vnf_info) def _update_vnf_rollback(self, context, vnf_info, vnf_instance, vnf_lcm_op_occs): self._vnfm_plugin._update_vnf_rollback(context, vnf_info, 'ERROR', 'ACTIVE', vnf_instance=vnf_instance, vnf_lcm_op_occ=vnf_lcm_op_occs) def _update_vnf_rollback_status_err(self, context, vnf_info): self._vnfm_plugin._update_vnf_rollback_status_err(context, vnf_info) def _rollback_mgmt_call(self, context, vnf_info, kwargs): self._vnfm_plugin.mgmt_call(context, vnf_info, kwargs) def _rollback_vnf_post( self, context, vnf_info, vnf_instance, operation_params, vim_connection_info): vnf_lcm_op_occs = vnf_info['vnf_lcm_op_occ'] if vnf_lcm_op_occs.operation == 'SCALE': scale_vnf_request = objects.ScaleVnfRequest.obj_from_primitive( operation_params, context=context) scale_vnf_request_copy = copy.deepcopy(scale_vnf_request) scale_vnf_request_copy.type = 'SCALE_IN' resource_changes = self._scale_resource_update(context, vnf_info, vnf_instance, scale_vnf_request_copy, vim_connection_info) else: resource_changes = self._term_resource_update( context, vnf_info, vnf_instance) vnf_lcm_op_occs.error_point = 2 timestamp = datetime.utcnow() vnf_lcm_op_occs.operation_state = 'ROLLED_BACK' vnf_lcm_op_occs.state_entered_time = timestamp vnf_lcm_op_occs.resource_changes = resource_changes self._update_vnf_rollback(context, vnf_info, vnf_instance, vnf_lcm_op_occs) notification = vnf_info['notification'] notification['notificationStatus'] = 'RESULT' notification['operationState'] = 'ROLLED_BACK' resource_dict = resource_changes.to_dict() if resource_dict.get('affected_vnfcs'): notification['affectedVnfcs'] = resource_dict.get('affected_vnfcs') if resource_dict.get('affected_virtual_links'): notification['affectedVirtualLinks'] = \ resource_dict.get('affected_virtual_links') if resource_dict.get('affected_virtual_storages'): notification['affectedVirtualStorages'] = \ resource_dict.get('affected_virtual_storages') self.rpc_api.send_notification(context, notification) @log.log @revert_to_error_rollback def rollback_vnf(self, context, vnf_info, vnf_instance, operation_params): LOG.info("Request received for rollback vnf '%s'", vnf_instance.id) vnf_lcm_op_occs = vnf_info['vnf_lcm_op_occ'] if vnf_lcm_op_occs.operation == 'SCALE': scale_vnf_request = objects.ScaleVnfRequest.obj_from_primitive( operation_params, context=context) for scale in vnf_instance.instantiated_vnf_info.scale_status: if scale_vnf_request.aspect_id == scale.aspect_id: vnf_info['after_scale_level'] = scale.scale_level break timestamp = datetime.utcnow() vnf_lcm_op_occs.operation_state = 'ROLLING_BACK' vnf_lcm_op_occs.state_entered_time = timestamp LOG.debug("vnf_lcm_op_occs %s", vnf_lcm_op_occs) insta_url = CONF.vnf_lcm.endpoint_url + \ "/vnflcm/v1/vnf_instances/" + \ vnf_instance.id vnflcm_url = CONF.vnf_lcm.endpoint_url + \ "/vnflcm/v1/vnf_lcm_op_occs/" + \ vnf_lcm_op_occs.id notification = {} notification['notificationType'] = \ 'VnfLcmOperationOccurrenceNotification' notification['vnfInstanceId'] = vnf_instance.id notification['notificationStatus'] = 'START' notification['operation'] = vnf_lcm_op_occs.operation notification['operationState'] = 'ROLLING_BACK' if vnf_lcm_op_occs.operation == 'SCALE': notification['isAutomaticInvocation'] = \ vnf_lcm_op_occs.is_automatic_invocation else: notification['isAutomaticInvocation'] = False notification['vnfLcmOpOccId'] = vnf_lcm_op_occs.id notification['_links'] = {} notification['_links']['vnfInstance'] = {} notification['_links']['vnfInstance']['href'] = insta_url notification['_links']['vnfLcmOpOcc'] = {} notification['_links']['vnfLcmOpOcc']['href'] = vnflcm_url vnf_info['notification'] = notification vnf_lcm_op_occs.save() self.rpc_api.send_notification(context, notification) vim_info = vnflcm_utils._get_vim(context, vnf_instance.vim_connection_info) vim_connection_info = objects.VimConnectionInfo.obj_from_primitive( vim_info, context) scale_name_list, grp_id = self._rollback_vnf_pre( context, vnf_info, vnf_instance, operation_params, vim_connection_info) self._rollback_vnf( context, vnf_info, vnf_instance, operation_params, vim_connection_info, scale_name_list, grp_id) self._rollback_vnf_post( context, vnf_info, vnf_instance, operation_params, vim_connection_info)
46.629959
79
0.567189
7,055
68,173
5.065627
0.057548
0.06279
0.056606
0.017125
0.831076
0.801304
0.754183
0.7229
0.690805
0.641334
0
0.001054
0.360099
68,173
1,461
80
46.661875
0.818173
0.017148
0
0.634277
0
0
0.091534
0.013964
0
0
0
0
0
1
0.023718
false
0
0.019128
0
0.053558
0
0
0
0
null
0
0
0
1
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
2dc2f16f3693de4d82e91dffc7fe342189ef8011
1,291
py
Python
tests/test_sanity.py
gilbrookie/cmdr
ee31e5b75a01f00e45f8181bf78017f232f0287e
[ "ISC" ]
null
null
null
tests/test_sanity.py
gilbrookie/cmdr
ee31e5b75a01f00e45f8181bf78017f232f0287e
[ "ISC" ]
null
null
null
tests/test_sanity.py
gilbrookie/cmdr
ee31e5b75a01f00e45f8181bf78017f232f0287e
[ "ISC" ]
null
null
null
import unittest class SanityTest(unittest.TestCase): def test_module_import(self): try: import cmdr except ImportError, ex: self.fail("ImportError", ex) try: import cmdr.application except ImportError, ex: self.fail("ImportError", ex) try: import cmdr.command except ImportError, ex: self.fail("ImportError", ex) self.assertTrue(cmdr.__version__) def test_symbol_import(self): try: from cmdr import Cmdr except ImportError, ex: self.fail("ImportError", ex) try: from cmdr import Command except ImportError, ex: self.fail("ImportError", ex) try: from cmdr import subcmd except ImportError, ex: self.fail("ImportError", ex) self.assertTrue(Cmdr) self.assertTrue(Command) self.assertTrue(subcmd) def test_Cmdr_create(self): import cmdr c = cmdr.Cmdr(__name__) self.assertIsInstance(c, cmdr.Cmdr) def test_Command_create(self): import cmdr c = cmdr.Command() self.assertIsInstance(c, cmdr.Command) if __name__ == '__main__': unittest.main()
22.649123
46
0.570875
133
1,291
5.360902
0.210526
0.218794
0.190743
0.193548
0.58906
0.58906
0.518934
0.518934
0.499299
0.499299
0
0
0.343145
1,291
56
47
23.053571
0.840802
0
0
0.5
0
0
0.05732
0
0
0
0
0
0.142857
0
null
null
0
0.547619
null
null
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
1
0
0
0
0
5
930b420e9675769f0e4ee22cabcb0c4c19a4720f
70
py
Python
References/hello_package/diff.py
pywaker/pybasics
0e1eed95049cad9c989b6e6c5e60a8ca3bd2ae89
[ "CC-BY-4.0" ]
6
2020-07-14T12:05:08.000Z
2022-01-06T05:28:51.000Z
References/hello_package/diff.py
pywaker/pybasics
0e1eed95049cad9c989b6e6c5e60a8ca3bd2ae89
[ "CC-BY-4.0" ]
null
null
null
References/hello_package/diff.py
pywaker/pybasics
0e1eed95049cad9c989b6e6c5e60a8ca3bd2ae89
[ "CC-BY-4.0" ]
13
2020-06-12T16:04:57.000Z
2020-10-13T04:49:05.000Z
def diff(): print("Diff Diff") def patch(): print("Patch")
8.75
22
0.542857
9
70
4.222222
0.444444
0
0
0
0
0
0
0
0
0
0
0
0.257143
70
7
23
10
0.730769
0
0
0
0
0
0.202899
0
0
0
0
0
0
1
0.5
true
0
0
0
0.5
0.5
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
0
0
0
0
1
0
5
932743858a860b6da1d32a706d8697c0158b5fe1
94
py
Python
Tests/test_M2.py
Abhi-1U/pyrix
a6ea9577196d23dbebf6709c17d7c60eca187473
[ "MIT" ]
2
2020-10-02T17:59:05.000Z
2020-11-29T09:25:39.000Z
Tests/test_M2.py
Abhi-1U/pyrix
a6ea9577196d23dbebf6709c17d7c60eca187473
[ "MIT" ]
null
null
null
Tests/test_M2.py
Abhi-1U/pyrix
a6ea9577196d23dbebf6709c17d7c60eca187473
[ "MIT" ]
null
null
null
import pytest from pyrix import Matrix from pyrix.exception import incompaitableTypeException
23.5
54
0.882979
11
94
7.545455
0.636364
0.216867
0
0
0
0
0
0
0
0
0
0
0.106383
94
3
55
31.333333
0.988095
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
934a87be5728ff8097f343827a87dc0c30b1a0b1
152
py
Python
tests/travel/test_travel_01.py
hackebrot/earth
2787eeb37692f1c82bc12cb24a4c1574826204a7
[ "MIT" ]
12
2019-08-03T00:55:46.000Z
2021-01-17T15:02:05.000Z
tests/events/test_events_04.py
hackebrot/earth
2787eeb37692f1c82bc12cb24a4c1574826204a7
[ "MIT" ]
5
2019-03-12T13:14:34.000Z
2019-10-29T15:01:53.000Z
tests/year/test_year_01.py
hackebrot/earth
2787eeb37692f1c82bc12cb24a4c1574826204a7
[ "MIT" ]
3
2019-12-21T04:35:13.000Z
2020-12-11T14:44:39.000Z
def test_numbers(): assert 1234 == 1234 def test_hello_world(): assert "hello" + "world" == "helloworld" def test_foobar(): assert True
13.818182
44
0.644737
19
152
4.947368
0.526316
0.223404
0
0
0
0
0
0
0
0
0
0.067797
0.223684
152
10
45
15.2
0.728814
0
0
0
0
0
0.131579
0
0
0
0
0
0.5
1
0.5
true
0
0
0
0.5
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
1
1
0
0
0
0
0
0
5
936355052461378ad0f693cdd154ae5488b0c3f9
105
py
Python
freqtrade/plugins/protections/__init__.py
Fractate/freqbot
47b35d2320dc97977411454c1466c762d339fdee
[ "MIT" ]
1
2022-03-06T22:44:30.000Z
2022-03-06T22:44:30.000Z
freqtrade/plugins/protections/__init__.py
Fractate/freqbot
47b35d2320dc97977411454c1466c762d339fdee
[ "MIT" ]
null
null
null
freqtrade/plugins/protections/__init__.py
Fractate/freqbot
47b35d2320dc97977411454c1466c762d339fdee
[ "MIT" ]
1
2021-09-22T23:28:21.000Z
2021-09-22T23:28:21.000Z
# flake8: noqa: F401 from freqtrade.plugins.protections.iprotection import IProtection, ProtectionReturn
35
83
0.847619
11
105
8.090909
0.909091
0
0
0
0
0
0
0
0
0
0
0.041667
0.085714
105
2
84
52.5
0.885417
0.171429
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
9366b14ac438e976d06aa5b03b8736dd965f2b51
12,723
py
Python
MLStudio/supervised/visual/base.py
j2slab/MLStudio
7d7c4b1073617968c28f0e496020e4720b552451
[ "BSD-3-Clause" ]
1
2019-05-13T01:07:23.000Z
2019-05-13T01:07:23.000Z
MLStudio/supervised/visual/base.py
DecisionScients/MLStudio
7d7c4b1073617968c28f0e496020e4720b552451
[ "BSD-3-Clause" ]
1
2020-04-11T22:14:42.000Z
2020-04-11T22:14:42.000Z
MLStudio/supervised/visual/base.py
decisionscients/MLStudio
7d7c4b1073617968c28f0e496020e4720b552451
[ "BSD-3-Clause" ]
null
null
null
#!/usr/bin/env python3 # -*- coding:utf-8 -*- # ============================================================================ # # Project : MLStudio # # Version : 0.1.0 # # File : base.py # # Python : 3.8.2 # # ---------------------------------------------------------------------------- # # Author : John James # # Company : DecisionScients # # Email : jjames@decisionscients.com # # URL : https://github.com/decisionscients/MLStudio # # ---------------------------------------------------------------------------- # # Created : Tuesday, March 17th 2020, 7:15:23 pm # # Last Modified : Tuesday, March 17th 2020, 7:15:23 pm # # Modified By : John James (jjames@decisionscients.com) # # ---------------------------------------------------------------------------- # # License : BSD # # Copyright (c) 2020 DecisionScients # # ============================================================================ # """Base classes for visualizations.""" import os from abc import ABC, abstractmethod import plotly.express as px import plotly.graph_objects as go import plotly.io as pio from sklearn.base import BaseEstimator from mlstudio.utils.file_manager import save_plotly_figure, save_plotly_animation # --------------------------------------------------------------------------- # # BASE VISUALIZER # # --------------------------------------------------------------------------- # class BaseVisualizer(ABC, BaseEstimator): """Abstact base class for static visualizations of a single model. Parameters ---------- estimator : An unfitted MLStudio estimator object The estimator object being visualized title : str (default=None) The title of the visualization. Each subclass will have a default title. height : int (default=450) The height of the visualization in pixels width : int (default=900) The width of the visualization in pixels template : str (default='plotly_white') The plotly template to be used. Valid values include: 'ggplot2', 'seaborn', 'simple_white', 'plotly', 'plotly_white', 'plotly_dark', 'presentation', 'xgridoff', 'ygridoff', 'gridon', 'none' kwargs : dict Additional keyword arguments to be passed to the underlying plotly object. """ def __init__(self, estimator, title=None, height=450, width=900, template='plotly_white', **kwargs): self.estimator = estimator self.title = title self.height = height self.width = width self.template = template for k, v in kwargs.items(): setattr(self, k, v) @abstractmethod def fit(self, X, y): """Fits the visualizer to the data. Parameters ---------- X : array-like, shape (n_samples, n_features) Training vector, where n_samples is the number of samples and n_features is the number of features. y : array-like, shape (n_samples) or (n_samples, n_features) Target relative to X for classification or regression """ self.estimator.fit(X,y) def show(self, **kwargs): """Renders the visualization""" self.fig.show() def save(self, filepath): """Saves image to filepath Parameters ---------- filepath : str Relative filepath including file name and extension """ directory = os.path.dirname(filepath) filename = os.path.basename(filepath) save_plotly_figure(fig=self.fig, directory=directory, filename=filename) # --------------------------------------------------------------------------- # # BASE MODEL SELECTION VISUALIZER # # --------------------------------------------------------------------------- # class BaseModelSelectioniVisualizer(ABC, BaseEstimator): """Abstact base class for static visualizations of multiple models. Parameters ---------- estimators : list of MLStudio estimator objects The estimators to be visualized title : str (default=None) The title of the visualization. Each subclass will have a default title. height : int (default=450) The height of the visualization in pixels width : int (default=900) The width of the visualization in pixels template : str (default='plotly_white') The plotly template to be used. Valid values include: 'ggplot2', 'seaborn', 'simple_white', 'plotly', 'plotly_white', 'plotly_dark', 'presentation', 'xgridoff', 'ygridoff', 'gridon', 'none' kwargs : dict Additional keyword arguments to be passed to the underlying plotly object. """ def __init__(self, estimators, title=None, height=450, width=900, template='plotly_white', **kwargs): self.estimators = estimators self.title = title self.height = height self.width = width self.template = template for k, v in kwargs.items(): setattr(self, k, v) @abstractmethod def fit(self, X, y): """Fits the visualizer to the data.""" self.models_ = [] for estimator in self.estimators: self.models_.append(estimator.fit(X, y)) def show(self, **kwargs): """Renders the visualization""" self.fig.show() def save(self, filepath): """Saves image to filepath Parameters ---------- filepath : str Relative filepath including file name and extension """ directory = os.path.dirname(filepath) filename = os.path.basename(filepath) save_plotly_figure(fig=self.fig, directory=directory, filename=filename) # --------------------------------------------------------------------------- # # BASE ANIMATOR # # --------------------------------------------------------------------------- # class BaseAnimator(ABC, BaseEstimator): """Abstract base class for animations for a single model. Parameters ---------- estimator : MLStudio estimator object The estimator being visualized title : str (default=None) The title of the visualization. Each subclass will have a default title. height : int (default=450) The height of the visualization in pixels width : int (default=900) The width of the visualization in pixels template : str (default='plotly_white') The plotly template to be used. Valid values include: 'ggplot2', 'seaborn', 'simple_white', 'plotly', 'plotly_white', 'plotly_dark', 'presentation', 'xgridoff', 'ygridoff', 'gridon', 'none' kwargs : dict Additional keyword arguments to be passed to the underlying plotly object. """ def __init__(self, estimator, title=None, height=450, width=900, template='plotly_white', **kwargs): self.estimator = estimator self.title = title self.height = height self.width = width self.template = template for k, v in kwargs.items(): setattr(self, k, v) @abstractmethod def fit(self, X, y): """Fits the visualizer to the data.""" self.estimator.fit(X, y) def show(self, **kwargs): """Renders the visualization""" pio.renderers.default = "browser" self.fig.show() def save(self, filepath): """Saves image to filepath Parameters ---------- filepath : str Relative filepath including file name and extension """ directory = os.path.dirname(filepath) filename = os.path.basename(filepath) save_plotly_animation(fig=self.fig, directory=directory, filename=filename) # --------------------------------------------------------------------------- # # BASE MODEL SELECTION ANIMATOR # # --------------------------------------------------------------------------- # class BaseModelSelectioniAnimator(ABC, BaseEstimator): """Abstract base class for animations for a single model. Parameters ---------- estimators : List A list of MLStudio estimator objects title : str (default=None) The title of the visualization. Each subclass will have a default title. height : int (default=450) The height of the visualization in pixels width : int (default=900) The width of the visualization in pixels template : str (default='plotly_white') The plotly template to be used. Valid values include: 'ggplot2', 'seaborn', 'simple_white', 'plotly', 'plotly_white', 'plotly_dark', 'presentation', 'xgridoff', 'ygridoff', 'gridon', 'none' kwargs : dict Additional keyword arguments to be passed to the underlying plotly object. """ def __init__(self, estimators, title=None, height=450, width=900, template='plotly_white', **kwargs): self.estimators = estimators self.title = title self.height = height self.width = width self.template = template for k, v in kwargs.items(): setattr(self, k, v) @abstractmethod def fit(self, X, y): """Fits the visualizer to the data.""" self.models_ = [] for estimator in self.estimators: self.models_.append(estimator.fit(X, y)) def show(self, **kwargs): """Renders the visualization""" pio.renderers.default = "browser" self.fig.show() def save(self, filepath): """Saves image to filepath Parameters ---------- filepath : str Relative filepath including file name and extension """ directory = os.path.dirname(filepath) filename = os.path.basename(filepath) save_plotly_animation(fig=self.fig, directory=directory, filename=filename) # --------------------------------------------------------------------------- # # BASE SUBPLOT # # --------------------------------------------------------------------------- # class BaseSubplot(ABC, BaseEstimator): """Abstract base class for all subplot classes. Subplot classes add traces and updates to a figure object passed as a parameter to the class. These objects do not render plots or animations in and of themselves. The purpose is to create modularization in the way that plots having 2 or more subplots are built. Parameters ---------- fig : plotly Figure object The object upon which the subplot is built row : int The row in which the subplot is to be rendered col : int The column in which the subplot is to be rendered xaxis_label : str or None (default=None) The label for the xaxis of the subplot yaxis_label : str or None (default=None) The label for the yaxis of the subplot kwargs : dict Additional keyword arguments required by subclasses. """ @abstractmethod def __init__(self, fig, row, col, xaxis_label=None, yaxis_label=None, **kwargs): self.fig = fig self.row = row self.col = col self.xaxis_label = xaxis_label self.yaxis_label = yaxis_label for k, v in kwargs.items(): setattr(self, k, v) @abstractmethod def fit(self, X, y=None): """Adds the traces and updates to the fig object. Parameters ---------- X : array-like in 1 dimension The x-axis data y : array-like of 1 dimension (optional) The y-axis data """ pass
35.243767
94
0.512379
1,239
12,723
5.202583
0.17837
0.039715
0.033509
0.024822
0.75287
0.714086
0.708501
0.708501
0.690506
0.67282
0
0.010245
0.317221
12,723
360
95
35.341667
0.731783
0.601981
0
0.796117
0
0
0.014811
0
0
0
0
0
0
1
0.174757
false
0.009709
0.067961
0
0.291262
0
0
0
0
null
0
0
0
0
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
9371be72319dc9f573c765edaafa3ffebfd864d5
75
py
Python
src/scanlib/__init__.py
heavyrage/scanserver
cee3d43f0d1c3af56da8e616c6f2251d8a4b3c19
[ "MIT" ]
null
null
null
src/scanlib/__init__.py
heavyrage/scanserver
cee3d43f0d1c3af56da8e616c6f2251d8a4b3c19
[ "MIT" ]
null
null
null
src/scanlib/__init__.py
heavyrage/scanserver
cee3d43f0d1c3af56da8e616c6f2251d8a4b3c19
[ "MIT" ]
null
null
null
from flask_restful import fields, marshal from constants import Constants
37.5
42
0.853333
10
75
6.3
0.7
0
0
0
0
0
0
0
0
0
0
0
0.133333
75
2
43
37.5
0.969231
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
937b49031d762a296966718d10bb19119acb2122
18,383
py
Python
message_evarilos_engine_type2_pb2.py
flemic/ECE-EVARILOS
d4492dd218dbe2e4f19357af2668766ea13fcd17
[ "BSD-3-Clause" ]
1
2017-02-02T09:51:37.000Z
2017-02-02T09:51:37.000Z
message_evarilos_engine_type2_pb2.py
flemic/ECE-EVARILOS
d4492dd218dbe2e4f19357af2668766ea13fcd17
[ "BSD-3-Clause" ]
null
null
null
message_evarilos_engine_type2_pb2.py
flemic/ECE-EVARILOS
d4492dd218dbe2e4f19357af2668766ea13fcd17
[ "BSD-3-Clause" ]
null
null
null
# Generated by the protocol buffer compiler. DO NOT EDIT! # source: message_evarilos_engine_type2.proto from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection from google.protobuf import descriptor_pb2 # @@protoc_insertion_point(imports) DESCRIPTOR = _descriptor.FileDescriptor( name='message_evarilos_engine_type2.proto', package='evarilos', serialized_pb='\n#message_evarilos_engine_type2.proto\x12\x08\x65varilos\"\xfd\x08\n\tece_type2\x12\x15\n\rtimestamp_utc\x18\x01 \x02(\x03\x12\x18\n\x10\x65xperiment_label\x18\x02 \x02(\t\x12:\n\x08scenario\x18\x03 \x02(\x0b\x32(.evarilos.ece_type2.Scenario_description\x12\x1f\n\x10request_raw_data\x18\x04 \x02(\x08:\x05\x66\x61lse\x12 \n\x11request_estimates\x18\x05 \x02(\x08:\x05\x66\x61lse\x12\x1c\n\rstore_metrics\x18\x06 \x02(\x08:\x05\x66\x61lse\x12(\n\x19request_power_consumption\x18\x07 \x02(\x08:\x05\x66\x61lse\x12\x36\n\x0cground_truth\x18\x08 \x02(\x0b\x32 .evarilos.ece_type2.Ground_truth\x12\x38\n\x08\x65stimate\x18\t \x01(\x0b\x32&.evarilos.ece_type2.Estimated_location\x12!\n\x19sut_location_estimate_URI\x18\n \x01(\t\x12\x18\n\x10sut_raw_data_URI\x18\x0b \x01(\t\x12\x1e\n\x16sut_power_estimate_URI\x18\x0c \x01(\t\x12\x1b\n\x13metrics_storage_URI\x18\r \x01(\t\x12 \n\x18metrics_storage_database\x18\x0e \x01(\t\x12\"\n\x1ametrics_storage_collection\x18\x0f \x01(\t\x1a\xba\x01\n\x0cGround_truth\x12\x10\n\x08point_id\x18\x01 \x02(\x05\x12\x19\n\x11localized_node_id\x18\x02 \x02(\x05\x12\x13\n\x0bpoint_label\x18\x03 \x01(\t\x12\x19\n\x11true_coordinate_x\x18\x04 \x01(\x01\x12\x19\n\x11true_coordinate_y\x18\x05 \x01(\x01\x12\x19\n\x11true_coordinate_z\x18\x06 \x01(\x01\x12\x17\n\x0ftrue_room_label\x18\x07 \x01(\t\x1a\xa6\x01\n\x12\x45stimated_location\x12\x18\n\x10\x65st_coordinate_x\x18\x01 \x01(\x01\x12\x18\n\x10\x65st_coordinate_y\x18\x02 \x01(\x01\x12\x18\n\x10\x65st_coordinate_z\x18\x03 \x01(\x01\x12\x16\n\x0e\x65st_room_label\x18\x04 \x01(\t\x12\x0f\n\x07latency\x18\x05 \x01(\x01\x12\x19\n\x11power_consumption\x18\x06 \x01(\x01\x1a\xdf\x01\n\x14Scenario_description\x12\x15\n\rtestbed_label\x18\x01 \x02(\t\x12\x1b\n\x13testbed_description\x18\x02 \x02(\t\x12\x1e\n\x16\x65xperiment_description\x18\x03 \x02(\t\x12\x17\n\x0fsut_description\x18\x04 \x02(\t\x12\x1c\n\x14receiver_description\x18\x05 \x02(\t\x12\x1a\n\x12sender_description\x18\x06 \x02(\t\x12 \n\x18interference_description\x18\x07 \x02(\t') _ECE_TYPE2_GROUND_TRUTH = _descriptor.Descriptor( name='Ground_truth', full_name='evarilos.ece_type2.Ground_truth', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='point_id', full_name='evarilos.ece_type2.Ground_truth.point_id', index=0, number=1, type=5, cpp_type=1, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='localized_node_id', full_name='evarilos.ece_type2.Ground_truth.localized_node_id', index=1, number=2, type=5, cpp_type=1, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='point_label', full_name='evarilos.ece_type2.Ground_truth.point_label', index=2, number=3, type=9, cpp_type=9, label=1, has_default_value=False, default_value=unicode("", "utf-8"), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='true_coordinate_x', full_name='evarilos.ece_type2.Ground_truth.true_coordinate_x', index=3, number=4, type=1, cpp_type=5, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='true_coordinate_y', full_name='evarilos.ece_type2.Ground_truth.true_coordinate_y', index=4, number=5, type=1, cpp_type=5, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='true_coordinate_z', full_name='evarilos.ece_type2.Ground_truth.true_coordinate_z', index=5, number=6, type=1, cpp_type=5, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='true_room_label', full_name='evarilos.ece_type2.Ground_truth.true_room_label', index=6, number=7, type=9, cpp_type=9, label=1, has_default_value=False, default_value=unicode("", "utf-8"), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), ], extensions=[ ], nested_types=[], enum_types=[ ], options=None, is_extendable=False, extension_ranges=[], serialized_start=618, serialized_end=804, ) _ECE_TYPE2_ESTIMATED_LOCATION = _descriptor.Descriptor( name='Estimated_location', full_name='evarilos.ece_type2.Estimated_location', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='est_coordinate_x', full_name='evarilos.ece_type2.Estimated_location.est_coordinate_x', index=0, number=1, type=1, cpp_type=5, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='est_coordinate_y', full_name='evarilos.ece_type2.Estimated_location.est_coordinate_y', index=1, number=2, type=1, cpp_type=5, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='est_coordinate_z', full_name='evarilos.ece_type2.Estimated_location.est_coordinate_z', index=2, number=3, type=1, cpp_type=5, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='est_room_label', full_name='evarilos.ece_type2.Estimated_location.est_room_label', index=3, number=4, type=9, cpp_type=9, label=1, has_default_value=False, default_value=unicode("", "utf-8"), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='latency', full_name='evarilos.ece_type2.Estimated_location.latency', index=4, number=5, type=1, cpp_type=5, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='power_consumption', full_name='evarilos.ece_type2.Estimated_location.power_consumption', index=5, number=6, type=1, cpp_type=5, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), ], extensions=[ ], nested_types=[], enum_types=[ ], options=None, is_extendable=False, extension_ranges=[], serialized_start=807, serialized_end=973, ) _ECE_TYPE2_SCENARIO_DESCRIPTION = _descriptor.Descriptor( name='Scenario_description', full_name='evarilos.ece_type2.Scenario_description', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='testbed_label', full_name='evarilos.ece_type2.Scenario_description.testbed_label', index=0, number=1, type=9, cpp_type=9, label=2, has_default_value=False, default_value=unicode("", "utf-8"), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='testbed_description', full_name='evarilos.ece_type2.Scenario_description.testbed_description', index=1, number=2, type=9, cpp_type=9, label=2, has_default_value=False, default_value=unicode("", "utf-8"), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='experiment_description', full_name='evarilos.ece_type2.Scenario_description.experiment_description', index=2, number=3, type=9, cpp_type=9, label=2, has_default_value=False, default_value=unicode("", "utf-8"), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='sut_description', full_name='evarilos.ece_type2.Scenario_description.sut_description', index=3, number=4, type=9, cpp_type=9, label=2, has_default_value=False, default_value=unicode("", "utf-8"), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='receiver_description', full_name='evarilos.ece_type2.Scenario_description.receiver_description', index=4, number=5, type=9, cpp_type=9, label=2, has_default_value=False, default_value=unicode("", "utf-8"), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='sender_description', full_name='evarilos.ece_type2.Scenario_description.sender_description', index=5, number=6, type=9, cpp_type=9, label=2, has_default_value=False, default_value=unicode("", "utf-8"), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='interference_description', full_name='evarilos.ece_type2.Scenario_description.interference_description', index=6, number=7, type=9, cpp_type=9, label=2, has_default_value=False, default_value=unicode("", "utf-8"), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), ], extensions=[ ], nested_types=[], enum_types=[ ], options=None, is_extendable=False, extension_ranges=[], serialized_start=976, serialized_end=1199, ) _ECE_TYPE2 = _descriptor.Descriptor( name='ece_type2', full_name='evarilos.ece_type2', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='timestamp_utc', full_name='evarilos.ece_type2.timestamp_utc', index=0, number=1, type=3, cpp_type=2, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='experiment_label', full_name='evarilos.ece_type2.experiment_label', index=1, number=2, type=9, cpp_type=9, label=2, has_default_value=False, default_value=unicode("", "utf-8"), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='scenario', full_name='evarilos.ece_type2.scenario', index=2, number=3, type=11, cpp_type=10, label=2, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='request_raw_data', full_name='evarilos.ece_type2.request_raw_data', index=3, number=4, type=8, cpp_type=7, label=2, has_default_value=True, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='request_estimates', full_name='evarilos.ece_type2.request_estimates', index=4, number=5, type=8, cpp_type=7, label=2, has_default_value=True, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='store_metrics', full_name='evarilos.ece_type2.store_metrics', index=5, number=6, type=8, cpp_type=7, label=2, has_default_value=True, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='request_power_consumption', full_name='evarilos.ece_type2.request_power_consumption', index=6, number=7, type=8, cpp_type=7, label=2, has_default_value=True, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='ground_truth', full_name='evarilos.ece_type2.ground_truth', index=7, number=8, type=11, cpp_type=10, label=2, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='estimate', full_name='evarilos.ece_type2.estimate', index=8, number=9, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='sut_location_estimate_URI', full_name='evarilos.ece_type2.sut_location_estimate_URI', index=9, number=10, type=9, cpp_type=9, label=1, has_default_value=False, default_value=unicode("", "utf-8"), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='sut_raw_data_URI', full_name='evarilos.ece_type2.sut_raw_data_URI', index=10, number=11, type=9, cpp_type=9, label=1, has_default_value=False, default_value=unicode("", "utf-8"), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='sut_power_estimate_URI', full_name='evarilos.ece_type2.sut_power_estimate_URI', index=11, number=12, type=9, cpp_type=9, label=1, has_default_value=False, default_value=unicode("", "utf-8"), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='metrics_storage_URI', full_name='evarilos.ece_type2.metrics_storage_URI', index=12, number=13, type=9, cpp_type=9, label=1, has_default_value=False, default_value=unicode("", "utf-8"), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='metrics_storage_database', full_name='evarilos.ece_type2.metrics_storage_database', index=13, number=14, type=9, cpp_type=9, label=1, has_default_value=False, default_value=unicode("", "utf-8"), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='metrics_storage_collection', full_name='evarilos.ece_type2.metrics_storage_collection', index=14, number=15, type=9, cpp_type=9, label=1, has_default_value=False, default_value=unicode("", "utf-8"), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), ], extensions=[ ], nested_types=[_ECE_TYPE2_GROUND_TRUTH, _ECE_TYPE2_ESTIMATED_LOCATION, _ECE_TYPE2_SCENARIO_DESCRIPTION, ], enum_types=[ ], options=None, is_extendable=False, extension_ranges=[], serialized_start=50, serialized_end=1199, ) _ECE_TYPE2_GROUND_TRUTH.containing_type = _ECE_TYPE2; _ECE_TYPE2_ESTIMATED_LOCATION.containing_type = _ECE_TYPE2; _ECE_TYPE2_SCENARIO_DESCRIPTION.containing_type = _ECE_TYPE2; _ECE_TYPE2.fields_by_name['scenario'].message_type = _ECE_TYPE2_SCENARIO_DESCRIPTION _ECE_TYPE2.fields_by_name['ground_truth'].message_type = _ECE_TYPE2_GROUND_TRUTH _ECE_TYPE2.fields_by_name['estimate'].message_type = _ECE_TYPE2_ESTIMATED_LOCATION DESCRIPTOR.message_types_by_name['ece_type2'] = _ECE_TYPE2 class ece_type2(_message.Message): __metaclass__ = _reflection.GeneratedProtocolMessageType class Ground_truth(_message.Message): __metaclass__ = _reflection.GeneratedProtocolMessageType DESCRIPTOR = _ECE_TYPE2_GROUND_TRUTH # @@protoc_insertion_point(class_scope:evarilos.ece_type2.Ground_truth) class Estimated_location(_message.Message): __metaclass__ = _reflection.GeneratedProtocolMessageType DESCRIPTOR = _ECE_TYPE2_ESTIMATED_LOCATION # @@protoc_insertion_point(class_scope:evarilos.ece_type2.Estimated_location) class Scenario_description(_message.Message): __metaclass__ = _reflection.GeneratedProtocolMessageType DESCRIPTOR = _ECE_TYPE2_SCENARIO_DESCRIPTION # @@protoc_insertion_point(class_scope:evarilos.ece_type2.Scenario_description) DESCRIPTOR = _ECE_TYPE2 # @@protoc_insertion_point(class_scope:evarilos.ece_type2) # @@protoc_insertion_point(module_scope)
48.376316
2,049
0.747212
2,546
18,383
5.07502
0.077769
0.067487
0.056962
0.057349
0.809767
0.738178
0.706137
0.668911
0.57511
0.551196
0
0.050125
0.131807
18,383
379
2,050
48.503958
0.759461
0.024697
0
0.683908
1
0.002874
0.254129
0.215123
0
0
0
0
0
1
0
false
0
0.011494
0
0.028736
0
0
0
0
null
0
0
0
1
1
1
0
0
0
0
0
0
0
0
1
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5