hexsha string | size int64 | ext string | lang string | max_stars_repo_path string | max_stars_repo_name string | max_stars_repo_head_hexsha string | max_stars_repo_licenses list | max_stars_count int64 | max_stars_repo_stars_event_min_datetime string | max_stars_repo_stars_event_max_datetime string | max_issues_repo_path string | max_issues_repo_name string | max_issues_repo_head_hexsha string | max_issues_repo_licenses list | max_issues_count int64 | max_issues_repo_issues_event_min_datetime string | max_issues_repo_issues_event_max_datetime string | max_forks_repo_path string | max_forks_repo_name string | max_forks_repo_head_hexsha string | max_forks_repo_licenses list | max_forks_count int64 | max_forks_repo_forks_event_min_datetime string | max_forks_repo_forks_event_max_datetime string | content string | avg_line_length float64 | max_line_length int64 | alphanum_fraction float64 | qsc_code_num_words_quality_signal int64 | qsc_code_num_chars_quality_signal float64 | qsc_code_mean_word_length_quality_signal float64 | qsc_code_frac_words_unique_quality_signal float64 | qsc_code_frac_chars_top_2grams_quality_signal float64 | qsc_code_frac_chars_top_3grams_quality_signal float64 | qsc_code_frac_chars_top_4grams_quality_signal float64 | qsc_code_frac_chars_dupe_5grams_quality_signal float64 | qsc_code_frac_chars_dupe_6grams_quality_signal float64 | qsc_code_frac_chars_dupe_7grams_quality_signal float64 | qsc_code_frac_chars_dupe_8grams_quality_signal float64 | qsc_code_frac_chars_dupe_9grams_quality_signal float64 | qsc_code_frac_chars_dupe_10grams_quality_signal float64 | qsc_code_frac_chars_replacement_symbols_quality_signal float64 | qsc_code_frac_chars_digital_quality_signal float64 | qsc_code_frac_chars_whitespace_quality_signal float64 | qsc_code_size_file_byte_quality_signal float64 | qsc_code_num_lines_quality_signal float64 | qsc_code_num_chars_line_max_quality_signal float64 | qsc_code_num_chars_line_mean_quality_signal float64 | qsc_code_frac_chars_alphabet_quality_signal float64 | qsc_code_frac_chars_comments_quality_signal float64 | qsc_code_cate_xml_start_quality_signal float64 | qsc_code_frac_lines_dupe_lines_quality_signal float64 | qsc_code_cate_autogen_quality_signal float64 | qsc_code_frac_lines_long_string_quality_signal float64 | qsc_code_frac_chars_string_length_quality_signal float64 | qsc_code_frac_chars_long_word_length_quality_signal float64 | qsc_code_frac_lines_string_concat_quality_signal float64 | qsc_code_cate_encoded_data_quality_signal float64 | qsc_code_frac_chars_hex_words_quality_signal float64 | qsc_code_frac_lines_prompt_comments_quality_signal float64 | qsc_code_frac_lines_assert_quality_signal float64 | qsc_codepython_cate_ast_quality_signal float64 | qsc_codepython_frac_lines_func_ratio_quality_signal float64 | qsc_codepython_cate_var_zero_quality_signal bool | qsc_codepython_frac_lines_pass_quality_signal float64 | qsc_codepython_frac_lines_import_quality_signal float64 | qsc_codepython_frac_lines_simplefunc_quality_signal float64 | qsc_codepython_score_lines_no_logic_quality_signal float64 | qsc_codepython_frac_lines_print_quality_signal float64 | qsc_code_num_words int64 | qsc_code_num_chars int64 | qsc_code_mean_word_length int64 | qsc_code_frac_words_unique null | qsc_code_frac_chars_top_2grams int64 | qsc_code_frac_chars_top_3grams int64 | qsc_code_frac_chars_top_4grams int64 | qsc_code_frac_chars_dupe_5grams int64 | qsc_code_frac_chars_dupe_6grams int64 | qsc_code_frac_chars_dupe_7grams int64 | qsc_code_frac_chars_dupe_8grams int64 | qsc_code_frac_chars_dupe_9grams int64 | qsc_code_frac_chars_dupe_10grams int64 | qsc_code_frac_chars_replacement_symbols int64 | qsc_code_frac_chars_digital int64 | qsc_code_frac_chars_whitespace int64 | qsc_code_size_file_byte int64 | qsc_code_num_lines int64 | qsc_code_num_chars_line_max int64 | qsc_code_num_chars_line_mean int64 | qsc_code_frac_chars_alphabet int64 | qsc_code_frac_chars_comments int64 | qsc_code_cate_xml_start int64 | qsc_code_frac_lines_dupe_lines int64 | qsc_code_cate_autogen int64 | qsc_code_frac_lines_long_string int64 | qsc_code_frac_chars_string_length int64 | qsc_code_frac_chars_long_word_length int64 | qsc_code_frac_lines_string_concat null | qsc_code_cate_encoded_data int64 | qsc_code_frac_chars_hex_words int64 | qsc_code_frac_lines_prompt_comments int64 | qsc_code_frac_lines_assert int64 | qsc_codepython_cate_ast int64 | qsc_codepython_frac_lines_func_ratio int64 | qsc_codepython_cate_var_zero int64 | qsc_codepython_frac_lines_pass int64 | qsc_codepython_frac_lines_import int64 | qsc_codepython_frac_lines_simplefunc int64 | qsc_codepython_score_lines_no_logic int64 | qsc_codepython_frac_lines_print int64 | effective string | hits int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
4a882340e389d58567246eb4bd9f99b34280b6ba | 7,827 | py | Python | venv/Lib/site-packages/caffe2/python/operator_test/adagrad_test.py | Westlanderz/AI-Plat1 | 1187c22819e5135e8e8189c99b86a93a0d66b8d8 | [
"MIT"
] | 1 | 2022-01-08T12:30:44.000Z | 2022-01-08T12:30:44.000Z | venv/Lib/site-packages/caffe2/python/operator_test/adagrad_test.py | Westlanderz/AI-Plat1 | 1187c22819e5135e8e8189c99b86a93a0d66b8d8 | [
"MIT"
] | null | null | null | venv/Lib/site-packages/caffe2/python/operator_test/adagrad_test.py | Westlanderz/AI-Plat1 | 1187c22819e5135e8e8189c99b86a93a0d66b8d8 | [
"MIT"
] | null | null | null | import functools
import caffe2.python.hypothesis_test_util as hu
import caffe2.python.serialized_test.serialized_test_util as serial
import hypothesis.strategies as st
import numpy as np
from caffe2.python import core
from caffe2.python.operator_test.adagrad_test_helper import (
adagrad_sparse_test_helper,
ref_adagrad,
)
from hypothesis import HealthCheck, given, settings
class TestAdagrad(serial.SerializedTestCase):
@given(
inputs=hu.tensors(n=3),
lr=st.floats(
min_value=0.01, max_value=0.99, allow_nan=False, allow_infinity=False
),
epsilon=st.floats(
min_value=0.01, max_value=0.99, allow_nan=False, allow_infinity=False
),
weight_decay=st.sampled_from([0.0, 0.1]),
**hu.gcs
)
@settings(deadline=10000)
def test_adagrad(self, inputs, lr, epsilon, weight_decay, gc, dc):
param, momentum, grad = inputs
momentum = np.abs(momentum)
lr = np.array([lr], dtype=np.float32)
op = core.CreateOperator(
"Adagrad",
["param", "momentum", "grad", "lr"],
["param", "momentum"],
epsilon=epsilon,
weight_decay=weight_decay,
device_option=gc,
)
self.assertReferenceChecks(
gc,
op,
[param, momentum, grad, lr],
functools.partial(ref_adagrad, epsilon=epsilon, weight_decay=weight_decay),
)
@given(
inputs=hu.tensors(n=3),
lr=st.floats(
min_value=0.01, max_value=0.99, allow_nan=False, allow_infinity=False
),
epsilon=st.floats(
min_value=0.01, max_value=0.99, allow_nan=False, allow_infinity=False
),
weight_decay=st.sampled_from([0.0, 0.1]),
**hu.gcs_cpu_only
)
@settings(deadline=10000)
def test_adagrad_output_effective_lr(
self, inputs, lr, epsilon, weight_decay, gc, dc
):
param, momentum, grad = inputs
momentum = np.abs(momentum)
lr = np.array([lr], dtype=np.float32)
op = core.CreateOperator(
"Adagrad",
["param", "momentum", "grad", "lr"],
["param", "momentum", "effective_lr"],
epsilon=epsilon,
weight_decay=weight_decay,
device_option=gc,
)
self.assertReferenceChecks(
gc,
op,
[param, momentum, grad, lr],
functools.partial(
ref_adagrad,
epsilon=epsilon,
output_effective_lr=True,
weight_decay=weight_decay,
),
)
@given(
inputs=hu.tensors(n=3),
lr=st.floats(
min_value=0.01, max_value=0.99, allow_nan=False, allow_infinity=False
),
epsilon=st.floats(
min_value=0.01, max_value=0.99, allow_nan=False, allow_infinity=False
),
**hu.gcs_cpu_only
)
@settings(deadline=10000)
def test_adagrad_output_effective_lr_and_update(self, inputs, lr, epsilon, gc, dc):
param, momentum, grad = inputs
momentum = np.abs(momentum)
lr = np.array([lr], dtype=np.float32)
op = core.CreateOperator(
"Adagrad",
["param", "momentum", "grad", "lr"],
["param", "momentum", "effective_lr", "update"],
epsilon=epsilon,
device_option=gc,
)
self.assertReferenceChecks(
gc,
op,
[param, momentum, grad, lr],
functools.partial(
ref_adagrad, epsilon=epsilon, output_effective_lr_and_update=True
),
)
# Suppress filter_too_much health check.
# Likely caused by `assume` call falling through too often.
@settings(suppress_health_check=[HealthCheck.filter_too_much], deadline=10000)
@given(
inputs=hu.tensors(n=3),
lr=st.floats(
min_value=0.01, max_value=0.99, allow_nan=False, allow_infinity=False
),
epsilon=st.floats(
min_value=0.01, max_value=0.99, allow_nan=False, allow_infinity=False
),
weight_decay=st.sampled_from([0.0, 0.1]),
**hu.gcs
)
def test_sparse_adagrad(self, inputs, lr, epsilon, weight_decay, gc, dc):
adagrad_sparse_test_helper(
self,
inputs,
lr,
epsilon,
None,
ref_adagrad,
gc,
dc,
weight_decay=weight_decay,
)
@given(
inputs=hu.tensors(n=2),
lr=st.floats(
min_value=0.01, max_value=0.99, allow_nan=False, allow_infinity=False
),
epsilon=st.floats(
min_value=0.01, max_value=0.99, allow_nan=False, allow_infinity=False
),
**hu.gcs
)
@settings(deadline=10000)
def test_sparse_adagrad_empty(self, inputs, lr, epsilon, gc, dc):
param, momentum = inputs
grad = np.empty(shape=(0,) + param.shape[1:], dtype=np.float32)
ref_using_fp16_values = [False]
if gc == hu.gpu_do:
ref_using_fp16_values.append(True)
for ref_using_fp16 in ref_using_fp16_values:
if ref_using_fp16:
print("test_sparse_adagrad_empty with half precision embedding")
momentum_i = momentum.astype(np.float16)
param_i = param.astype(np.float16)
else:
print("test_sparse_adagrad_empty with full precision embedding")
momentum_i = momentum.astype(np.float32)
param_i = param.astype(np.float32)
adagrad_sparse_test_helper(
self,
[param_i, momentum_i, grad],
lr,
epsilon,
None,
ref_adagrad,
gc,
dc,
)
# Suppress filter_too_much health check.
# Likely caused by `assume` call falling through too often.
@settings(suppress_health_check=[HealthCheck.filter_too_much], deadline=10000)
@given(
inputs=hu.tensors(n=3),
lr=st.sampled_from([0.01, 0.99]),
epsilon=st.sampled_from([0.01, 0.99]),
weight_decay=st.sampled_from([0.0, 0.1]),
counter_halflife=st.sampled_from([-1, 5]),
**hu.gcs
)
def test_row_wise_sparse_adagrad(
self, inputs, lr, epsilon, weight_decay, counter_halflife, gc, dc
):
adagrad_sparse_test_helper(
self,
inputs,
lr,
epsilon,
None,
functools.partial(ref_adagrad, row_wise=True),
gc,
dc,
row_wise=True,
weight_decay=weight_decay,
counter_halflife=counter_halflife,
)
@given(
inputs=hu.tensors(n=2),
lr=st.floats(
min_value=0.01, max_value=0.99, allow_nan=False, allow_infinity=False
),
epsilon=st.floats(
min_value=0.01, max_value=0.99, allow_nan=False, allow_infinity=False
),
**hu.gcs
)
@settings(deadline=None)
def test_row_wise_sparse_adagrad_empty(self, inputs, lr, epsilon, gc, dc):
param, momentum = inputs
grad = np.empty(shape=(0,) + param.shape[1:], dtype=np.float32)
adagrad_sparse_test_helper(
self,
[param, momentum, grad],
lr,
epsilon,
None,
ref_adagrad,
gc,
dc,
row_wise=True,
)
| 32.342975 | 88 | 0.544653 | 882 | 7,827 | 4.61678 | 0.133787 | 0.035363 | 0.032417 | 0.047151 | 0.834725 | 0.808694 | 0.786591 | 0.743369 | 0.693517 | 0.670432 | 0 | 0.034483 | 0.351603 | 7,827 | 241 | 89 | 32.477178 | 0.767882 | 0.024658 | 0 | 0.712329 | 0 | 0 | 0.034786 | 0.006768 | 0 | 0 | 0 | 0 | 0.013699 | 1 | 0.031963 | false | 0 | 0.03653 | 0 | 0.073059 | 0.009132 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
4a967d979646d23be4ccd7b75fc09adda76b80a5 | 9,308 | py | Python | python/paddle/fluid/tests/unittests/test_math_op_patch_var_base.py | slf12/Paddle | fa43d74a3a16ac696db5dc893c9a7b1c6913dc85 | [
"Apache-2.0"
] | 2 | 2020-02-11T08:53:05.000Z | 2020-02-20T08:06:25.000Z | python/paddle/fluid/tests/unittests/test_math_op_patch_var_base.py | slf12/Paddle | fa43d74a3a16ac696db5dc893c9a7b1c6913dc85 | [
"Apache-2.0"
] | null | null | null | python/paddle/fluid/tests/unittests/test_math_op_patch_var_base.py | slf12/Paddle | fa43d74a3a16ac696db5dc893c9a7b1c6913dc85 | [
"Apache-2.0"
] | 2 | 2019-08-16T12:03:28.000Z | 2019-09-03T13:02:57.000Z | # Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import unittest
from decorator_helper import prog_scope
import paddle.fluid as fluid
import numpy as np
import six
class TestMathOpPatchesVarBase(unittest.TestCase):
def setUp(self):
self.shape = [10, 10]
self.dtype = np.float32
def test_add(self):
a_np = np.random.random(self.shape).astype(self.dtype)
b_np = np.random.random(self.shape).astype(self.dtype)
with fluid.dygraph.guard():
a = fluid.dygraph.to_variable(a_np)
b = fluid.dygraph.to_variable(b_np)
res = a + b
self.assertTrue(np.array_equal(res.numpy(), a_np + b_np))
def test_sub(self):
a_np = np.random.random(self.shape).astype(self.dtype)
b_np = np.random.random(self.shape).astype(self.dtype)
with fluid.dygraph.guard():
a = fluid.dygraph.to_variable(a_np)
b = fluid.dygraph.to_variable(b_np)
res = a - b
self.assertTrue(np.array_equal(res.numpy(), a_np - b_np))
def test_mul(self):
a_np = np.random.random(self.shape).astype(self.dtype)
b_np = np.random.random(self.shape).astype(self.dtype)
with fluid.dygraph.guard():
a = fluid.dygraph.to_variable(a_np)
b = fluid.dygraph.to_variable(b_np)
res = a * b
self.assertTrue(np.array_equal(res.numpy(), a_np * b_np))
def test_div(self):
a_np = np.random.random(self.shape).astype(self.dtype)
b_np = np.random.random(self.shape).astype(self.dtype)
with fluid.dygraph.guard():
a = fluid.dygraph.to_variable(a_np)
b = fluid.dygraph.to_variable(b_np)
res = a / b
self.assertTrue(np.array_equal(res.numpy(), a_np / b_np))
def test_add_scalar(self):
a_np = np.random.random(self.shape).astype(self.dtype)
with fluid.dygraph.guard():
a = fluid.dygraph.to_variable(a_np)
b = 0.1
res = a + b
self.assertTrue(np.array_equal(res.numpy(), a_np + b))
def test_add_scalar_reverse(self):
a_np = np.random.random(self.shape).astype(self.dtype)
with fluid.dygraph.guard():
a = fluid.dygraph.to_variable(a_np)
b = 0.1
res = b + a
self.assertTrue(np.array_equal(res.numpy(), b + a_np))
def test_sub_scalar(self):
a_np = np.random.random(self.shape).astype(self.dtype)
with fluid.dygraph.guard():
a = fluid.dygraph.to_variable(a_np)
b = 0.1
res = a - b
self.assertTrue(np.array_equal(res.numpy(), a_np - b))
def test_sub_scalar_reverse(self):
a_np = np.random.random(self.shape).astype(self.dtype)
with fluid.dygraph.guard():
a = fluid.dygraph.to_variable(a_np)
b = 0.1
res = b - a
self.assertTrue(np.array_equal(res.numpy(), b - a_np))
def test_mul_scalar(self):
a_np = np.random.random(self.shape).astype(self.dtype)
with fluid.dygraph.guard():
a = fluid.dygraph.to_variable(a_np)
b = 0.1
res = a * b
self.assertTrue(np.array_equal(res.numpy(), a_np * b))
# div_scalar, not equal
def test_div_scalar(self):
a_np = np.random.random(self.shape).astype(self.dtype)
with fluid.dygraph.guard():
a = fluid.dygraph.to_variable(a_np)
b = 0.1
res = a / b
self.assertTrue(np.allclose(res.numpy(), a_np / b))
# pow of float type, not equal
def test_pow(self):
a_np = np.random.random(self.shape).astype(self.dtype)
b_np = np.random.random(self.shape).astype(self.dtype)
with fluid.dygraph.guard():
a = fluid.dygraph.to_variable(a_np)
b = fluid.dygraph.to_variable(b_np)
res = a**b
self.assertTrue(np.allclose(res.numpy(), a_np**b_np))
def test_floor_div(self):
a_np = np.random.randint(1, 100, size=self.shape)
b_np = np.random.randint(1, 100, size=self.shape)
with fluid.dygraph.guard():
a = fluid.dygraph.to_variable(a_np)
b = fluid.dygraph.to_variable(b_np)
res = a // b
self.assertTrue(np.array_equal(res.numpy(), a_np // b_np))
def test_mod(self):
a_np = np.random.randint(1, 100, size=self.shape)
b_np = np.random.randint(1, 100, size=self.shape)
with fluid.dygraph.guard():
a = fluid.dygraph.to_variable(a_np)
b = fluid.dygraph.to_variable(b_np)
res = a % b
self.assertTrue(np.array_equal(res.numpy(), a_np % b_np))
# for logical compare
def test_equal(self):
a_np = np.asarray([1, 2, 3, 4, 5])
b_np = np.asarray([1, 2, 3, 4, 5])
c_np = np.asarray([1, 2, 2, 4, 5])
with fluid.dygraph.guard():
a = fluid.dygraph.to_variable(a_np)
b = fluid.dygraph.to_variable(b_np)
c = fluid.dygraph.to_variable(c_np)
res1 = (a == b)
res2 = (a == c)
self.assertTrue(np.array_equal(res1.numpy(), a_np == b_np))
self.assertTrue(np.array_equal(res2.numpy(), a_np == c_np))
def test_not_equal(self):
a_np = np.asarray([1, 2, 3, 4, 5])
b_np = np.asarray([1, 2, 3, 4, 5])
c_np = np.asarray([1, 2, 2, 4, 5])
with fluid.dygraph.guard():
a = fluid.dygraph.to_variable(a_np)
b = fluid.dygraph.to_variable(b_np)
c = fluid.dygraph.to_variable(c_np)
res1 = (a != b)
res2 = (a != c)
self.assertTrue(np.array_equal(res1.numpy(), a_np != b_np))
self.assertTrue(np.array_equal(res2.numpy(), a_np != c_np))
def test_less_than(self):
a_np = np.random.random(self.shape).astype(self.dtype)
b_np = np.random.random(self.shape).astype(self.dtype)
with fluid.dygraph.guard():
a = fluid.dygraph.to_variable(a_np)
b = fluid.dygraph.to_variable(b_np)
res = (a < b)
self.assertTrue(np.array_equal(res.numpy(), a_np < b_np))
def test_less_equal(self):
a_np = np.random.random(self.shape).astype(self.dtype)
b_np = np.random.random(self.shape).astype(self.dtype)
with fluid.dygraph.guard():
a = fluid.dygraph.to_variable(a_np)
b = fluid.dygraph.to_variable(b_np)
res = (a <= b)
self.assertTrue(np.array_equal(res.numpy(), a_np <= b_np))
def test_greater_than(self):
a_np = np.random.random(self.shape).astype(self.dtype)
b_np = np.random.random(self.shape).astype(self.dtype)
with fluid.dygraph.guard():
a = fluid.dygraph.to_variable(a_np)
b = fluid.dygraph.to_variable(b_np)
res = (a > b)
self.assertTrue(np.array_equal(res.numpy(), a_np > b_np))
def test_greater_equal(self):
a_np = np.random.random(self.shape).astype(self.dtype)
b_np = np.random.random(self.shape).astype(self.dtype)
with fluid.dygraph.guard():
a = fluid.dygraph.to_variable(a_np)
b = fluid.dygraph.to_variable(b_np)
res = (a >= b)
self.assertTrue(np.array_equal(res.numpy(), a_np >= b_np))
def test_neg(self):
a_np = np.random.uniform(-1, 1, self.shape).astype(self.dtype)
with fluid.dygraph.guard():
a = fluid.dygraph.to_variable(a_np)
res = -a
self.assertTrue(np.array_equal(res.numpy(), -a_np))
def test_float_int_long(self):
with fluid.dygraph.guard():
a = fluid.dygraph.to_variable(np.array([100.1]))
self.assertTrue(float(a) == 100.1)
self.assertTrue(int(a) == 100)
if six.PY2:
self.assertTrue(long(a) == 100)
else:
self.assertTrue(int(a) == 100)
def test_len(self):
a_np = np.random.uniform(-1, 1, self.shape).astype(self.dtype)
with fluid.dygraph.guard():
a = fluid.dygraph.to_variable(a_np)
self.assertTrue(len(a) == 10)
def test_index(self):
with fluid.dygraph.guard():
var1 = fluid.dygraph.to_variable(np.array([2]))
i_tmp = 0
for i in range(var1):
self.assertTrue(i == i_tmp)
i_tmp = i_tmp + 1
list1 = [1, 2, 3, 4, 5]
self.assertTrue(list1[var1] == 3)
str1 = "just test"
self.assertTrue(str1[var1] == 's')
if __name__ == '__main__':
unittest.main()
| 38.304527 | 74 | 0.584443 | 1,360 | 9,308 | 3.833824 | 0.109559 | 0.036824 | 0.102033 | 0.160338 | 0.792865 | 0.778289 | 0.770234 | 0.770234 | 0.768316 | 0.752781 | 0 | 0.01816 | 0.284164 | 9,308 | 242 | 75 | 38.46281 | 0.76437 | 0.070262 | 0 | 0.563452 | 0 | 0 | 0.002085 | 0 | 0 | 0 | 0 | 0 | 0.152284 | 1 | 0.121827 | false | 0 | 0.030457 | 0 | 0.15736 | 0.005076 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
4a985fd14c32371ab38c9347bbb136fa33b9a30c | 64,974 | py | Python | tests/test_database.py | santosderek/Vitality | cc90d3b561c3b75f000288345d7a1442fb2b3fec | [
"MIT"
] | 1 | 2020-09-18T17:08:53.000Z | 2020-09-18T17:08:53.000Z | tests/test_database.py | santosderek/Vitality | cc90d3b561c3b75f000288345d7a1442fb2b3fec | [
"MIT"
] | 91 | 2020-09-25T23:12:58.000Z | 2020-12-19T04:57:50.000Z | tests/test_database.py | santosderek/4155-Team | cc90d3b561c3b75f000288345d7a1442fb2b3fec | [
"MIT"
] | 3 | 2020-09-26T22:35:42.000Z | 2020-10-13T18:22:22.000Z | from bson.objectid import ObjectId
from copy import deepcopy
from datetime import datetime
from vitality.database import *
from vitality.trainee import Trainee
from vitality.trainer import Trainer
from vitality.workout import Workout
from vitality.settings import MONGO_URI
import unittest
class TestDatabase(unittest.TestCase):
# Creating database object
database = Database(MONGO_URI)
# Creating new Trainee object
test_trainee = Trainee(
_id=None,
username="testtrainee",
password="password",
name="first last",
phone=1234567890,
trainers=[])
# Creating new Trainer object
test_trainer = Trainer(
_id=None,
username="testtrainer",
password="password",
name="first last",
phone=1234567890,
trainees=[])
# Creating new Workout Object
test_workout = Workout(
_id=None,
creator_id=None,
name="testing",
difficulty="easy",
about="workout",
is_complete=False,
total_time="20 minutes",
reps="10",
miles="2",
category="cardio")
def setUp(self):
self.tearDown()
self.assertTrue(self.test_trainee.password == 'password')
self.database.add_trainee(self.test_trainee)
self.database.add_trainer(self.test_trainer)
# Add workout
self.test_workout.creator_id = self.database.get_trainee_by_username(
self.test_trainee.username)._id
self.database.add_workout(self.test_workout)
self.assertTrue(self.test_trainee.password == 'password')
def tearDown(self):
# Remove test Workout if found
self.database.mongo.workout.delete_many(
{'name': self.test_workout.name})
# Removing a test workout
self.database.mongo.workout.delete_many({'name': 'goingtoremove'})
# Remove test Trainee if found
self.database.mongo.trainee.delete_many({
'username': self.test_trainee.username
})
# Remove test Trainer if found
self.database.mongo.trainer.delete_many({
'username': self.test_trainer.username
})
def test_password_sha256(self):
password = 'asupersecretpassword'
hashed_password = '009e3e71eed006baa4441cdc417e58f72a635e52f814400e6301881620628d8b'
self.assertTrue(password_sha256(password) == hashed_password)
"""Trainee tests"""
def test_trainee_add_trainer(self):
trainee = self.database.get_trainee_by_username('testtrainee')
trainer = self.database.get_trainer_by_username('testtrainer')
with self.assertRaises(UserNotFoundError):
self.database.trainee_add_trainer("123456789012345678901234",
trainer._id)
with self.assertRaises(UserNotFoundError):
self.database.trainee_add_trainer(trainee._id,
"123456789012345678901234")
self.database.trainee_add_trainer(trainee._id, trainer._id)
assert ObjectId(trainer._id) in self.database.mongo.trainee.find_one({
'_id': ObjectId(trainee._id)
})['trainers']
def test_trainer_add_trainee(self):
trainee = self.database.get_trainee_by_username('testtrainee')
trainer = self.database.get_trainer_by_username('testtrainer')
with self.assertRaises(UserNotFoundError):
self.database.trainer_add_trainee("123456789012345678901234",
trainee._id)
with self.assertRaises(UserNotFoundError):
self.database.trainer_add_trainee(trainer._id,
"123456789012345678901234")
self.database.trainer_add_trainee(trainer._id, trainee._id)
assert ObjectId(trainee._id) in self.database.mongo.trainer.find_one({
'_id': ObjectId(trainer._id)
})['trainees']
def test_add_trainee(self):
# Raise exception if 'testTrainee' username found
with self.assertRaises(UsernameTakenError):
new_trainer = deepcopy(self.test_trainer)
new_trainer.username = "testtrainee"
self.database.add_trainee(new_trainer)
# Raise exception if 'testTrainer' username found
with self.assertRaises(UsernameTakenError):
new_trainer = deepcopy(self.test_trainer)
new_trainer.username = "testtrainer"
self.database.add_trainee(new_trainer)
# Copy test_trainer and change to unused trainer name
new_trainee = deepcopy(self.test_trainee)
new_trainee.username = "testusername"
# Remove testUsername
while self.database.get_trainee_by_username(new_trainee.username) is not None:
db_user = self.database.get_trainee_by_username(
new_trainee.username)
self.database.remove_trainee(db_user._id)
# Get database testUsername trainer
database_trainee = self.database.get_trainee_by_username(
new_trainee.username)
self.assertTrue(database_trainee is None)
# Add a new trainer
self.database.add_trainee(new_trainee)
# Get database testUsername trainer
database_trainee = self.database.get_trainee_by_username(
new_trainee.username)
self.assertTrue(database_trainee is not None)
# Remove newly added trainer
self.database.remove_trainee(database_trainee._id)
database_trainee = self.database.get_trainee_by_username(
new_trainee.username)
self.assertTrue(database_trainee is None)
def test_set_trainee_username(self):
new_trainee = deepcopy(self.test_trainee)
# Geting the new user by their username
db_user_1 = self.database.get_trainee_by_username(
new_trainee.username)
# Setting our current user object's id as mongodb id
new_trainee._id = db_user_1._id
# Need to hash new_trainee's password
new_trainee.password = password_sha256(new_trainee.password)
# Checking if user objects are the same through their dicts
self.assertTrue(db_user_1.as_dict() == new_trainee.as_dict())
# Changing new trainee's name to 'elijah'
new_trainee.username = "elijah"
self.database.set_trainee_username(
new_trainee._id, new_trainee.username)
# Checking if database updated
db_user_2 = self.database.get_trainee_by_id(new_trainee._id)
self.assertTrue(db_user_2.as_dict() == new_trainee.as_dict())
# Removing temp user from database
self.database.remove_trainee(db_user_2._id)
self.assertTrue(self.database.get_trainee_by_id(db_user_2._id) is None)
def test_set_trainee_password(self):
new_trainee = deepcopy(self.test_trainee)
# Updating user object to database user
new_trainee = self.database.get_trainee_by_username(
new_trainee.username)
# Changing password
new_trainee.password = "newPassword"
self.database.set_trainee_password(
new_trainee._id, new_trainee.password)
# Checking password
db_user = self.database.get_trainee_by_username(
new_trainee.username)
new_trainee.password = password_sha256(new_trainee.password)
self.assertTrue(db_user.password == new_trainee.password)
self.database.remove_trainee(db_user._id)
self.assertTrue(
self.database.get_trainee_by_id(db_user._id) is None)
def test_set_trainee_phone(self):
new_trainee = deepcopy(self.test_trainee)
# Updating user object to database user
new_trainee = self.database.get_trainee_by_username(
new_trainee.username)
# Changing phone
new_trainee.phone = "newPhone"
self.database.set_trainee_phone(new_trainee._id, new_trainee.phone)
# Checking phone
db_user = self.database.get_trainee_by_username(
new_trainee.username)
self.assertTrue(db_user.phone == new_trainee.phone)
self.database.remove_trainee(db_user._id)
self.assertTrue(
self.database.get_trainee_by_id(db_user._id) is None)
def test_add_trainee_experience(self):
trainee = self.database.mongo.trainee.find_one({
'username': self.test_trainee.username
})
assert trainee is not None
assert trainee['exp'] == 0
self.database.add_trainee_experience(str(trainee['_id']), 10)
trainee = self.database.mongo.trainee.find_one({
'username': self.test_trainee.username
})
assert trainee is not None
assert trainee['exp'] == 10
self.database.add_trainee_experience(str(trainee['_id']), 20)
trainee = self.database.mongo.trainee.find_one({
'username': self.test_trainee.username
})
assert trainee is not None
assert trainee['exp'] == 30
self.database.add_trainee_experience(str(trainee['_id']), 30)
trainee = self.database.mongo.trainee.find_one({
'username': self.test_trainee.username
})
assert trainee is not None
assert trainee['exp'] == 60
def test_set_trainee_name(self):
new_trainee = deepcopy(self.test_trainee)
# Updating user object to database user
new_trainee = self.database.get_trainee_by_username(
new_trainee.username)
# Changing name
new_trainee.name = "newname"
self.database.set_trainee_name(
new_trainee._id, new_trainee.name)
# Checking name
db_user = self.database.get_trainee_by_username(
new_trainee.username)
self.assertTrue(db_user.name == new_trainee.name)
self.database.remove_trainee(db_user._id)
self.assertTrue(
self.database.get_trainee_by_username(db_user._id) is None)
def test_list_trainers_by_search(self):
# Searching for testTrainer with input "testTrain"
found_trainers = self.database.list_trainers_by_search("testtrain")
self.assertEqual(len(found_trainers), 1)
trainer = self.database.get_trainer_by_username("testtrainer")
self.assertEqual(found_trainers[0].as_dict(), trainer.as_dict())
def test_list_trainees_by_search(self):
# Searching for testTrainee with input "testTrain"
found_trainees = self.database.list_trainees_by_search("testtrain")
self.assertEqual(len(found_trainees), 1)
trainee = self.database.get_trainee_by_username("testtrainee")
self.assertEqual(found_trainees[0].as_dict(), trainee.as_dict())
""" Test trainer """
def test_add_trainer_experience(self):
trainer = self.database.mongo.trainer.find_one({
'username': self.test_trainer.username
})
assert trainer is not None
assert trainer['exp'] == 0
self.database.add_trainer_experience(str(trainer['_id']), 10)
trainer = self.database.mongo.trainer.find_one({
'username': self.test_trainer.username
})
assert trainer is not None
assert trainer['exp'] == 10
self.database.add_trainer_experience(str(trainer['_id']), 20)
trainer = self.database.mongo.trainer.find_one({
'username': self.test_trainer.username
})
assert trainer is not None
assert trainer['exp'] == 30
self.database.add_trainer_experience(str(trainer['_id']), 30)
trainer = self.database.mongo.trainer.find_one({
'username': self.test_trainer.username
})
assert trainer is not None
assert trainer['exp'] == 60
def test_add_trainer(self):
# Raise exception if 'testTrainee' username found
with self.assertRaises(UsernameTakenError):
new_trainer = deepcopy(self.test_trainer)
new_trainer.username = "testtrainee"
self.database.add_trainer(new_trainer)
# Raise exception if 'testTrainer' username found
with self.assertRaises(UsernameTakenError):
new_trainer = deepcopy(self.test_trainer)
new_trainer.username = "testtrainer"
self.database.add_trainer(new_trainer)
# Copy test_trainer and change to unused trainer name
new_trainer = deepcopy(self.test_trainer)
new_trainer.username = "testUsername"
# Remove testUsername
while self.database.get_trainer_by_username(new_trainer.username) is not None:
db_user = self.database.get_trainer_by_username(
new_trainer.username)
self.database.remove_trainer(db_user._id)
# Get database testUsername trainer
database_trainer = self.database.get_trainer_by_username(
new_trainer.username)
self.assertTrue(database_trainer is None)
# Add a new trainer
self.database.add_trainer(new_trainer)
# Get database testUsername trainer
database_trainer = self.database.get_trainer_by_username(
new_trainer.username)
self.assertTrue(database_trainer is not None)
# Remove newly added trainer
self.database.remove_trainer(database_trainer._id)
database_trainer = self.database.get_trainer_by_username(
new_trainer.username)
self.assertTrue(database_trainer is None)
def test_set_trainer_username(self):
new_trainer = deepcopy(self.test_trainer)
# Geting the new user by their username
db_user_1 = self.database.get_trainer_by_username(
new_trainer.username)
# Setting our current user object's id as mongodb id
new_trainer._id = db_user_1._id
# Need to hash new_trainer's password
new_trainer.password = password_sha256(new_trainer.password)
# Checking if user objects are the same through their dicts
self.assertTrue(db_user_1.as_dict() == new_trainer.as_dict())
# Changing new trainer's name to 'elijah'
new_trainer.username = "elijah"
self.database.set_trainer_username(
new_trainer._id, new_trainer.username)
# Checking if database updated
db_user_2 = self.database.get_trainer_by_id(new_trainer._id)
self.assertTrue(db_user_2.as_dict() == new_trainer.as_dict())
# Removing temp user from database
self.database.remove_trainer(db_user_2._id)
self.assertTrue(self.database.get_trainer_by_id(db_user_2._id) is None)
def test_set_trainer_password(self):
new_trainer = deepcopy(self.test_trainer)
# Updating user object to database user
new_trainer = self.database.get_trainer_by_username(
new_trainer.username)
# Changing password
new_trainer.password = "newPassword"
self.database.set_trainer_password(
new_trainer._id, new_trainer.password)
# Checking password
db_user = self.database.get_trainer_by_username(
new_trainer.username)
self.assertTrue(db_user.password ==
password_sha256(new_trainer.password))
self.database.remove_trainer(db_user._id)
self.assertTrue(
self.database.get_trainer_by_id(db_user._id) is None)
def test_set_coords(self):
# tests the set_coords method for both trainer and trainee
new_trainer = deepcopy(self.test_trainer)
# Updating user object to database user
new_trainer = self.database.get_trainer_by_username(
new_trainer.username)
# Changing coordinates
new_trainer.lng = 5
new_trainer.lat = 5
self.database.set_coords(
new_trainer._id, new_trainer.lng, new_trainer.lat)
# Checking coordinates
db_user = self.database.get_trainer_by_username(
new_trainer.username)
self.assertTrue(db_user.lng == new_trainer.lng)
self.assertTrue(db_user.lat == new_trainer.lat)
self.database.remove_trainer(db_user._id)
self.assertTrue(
self.database.get_trainer_by_id(db_user._id) is None)
new_trainee = deepcopy(self.test_trainee)
# Updating user object to database user
new_trainee = self.database.get_trainee_by_username(
new_trainee.username)
# Changing coordinates
new_trainee.lng = 5
new_trainee.lat = 5
self.database.set_coords(
new_trainee._id, new_trainee.lng, new_trainee.lat)
# Checking coordinates
db_user = self.database.get_trainee_by_username(
new_trainee.username)
self.assertTrue(db_user.lng == new_trainee.lng)
self.assertTrue(db_user.lat == new_trainee.lat)
self.database.remove_trainee(db_user._id)
self.assertTrue(
self.database.get_trainee_by_id(db_user._id) is None)
def test_set_trainer_phone(self):
new_trainer = deepcopy(self.test_trainer)
# Updating user object to database user
new_trainer = self.database.get_trainer_by_username(
new_trainer.username)
# Changing phone
new_trainer.phone = "newPhone"
self.database.set_trainer_phone(new_trainer._id, new_trainer.phone)
# Checking phone
db_user = self.database.get_trainer_by_username(
new_trainer.username)
self.assertTrue(db_user.phone == new_trainer.phone)
self.database.remove_trainer(db_user._id)
self.assertTrue(
self.database.get_trainer_by_id(db_user._id) is None)
def test_set_trainer_name(self):
new_trainer = deepcopy(self.test_trainer)
# Updating user object to database user
new_trainer = self.database.get_trainer_by_username(
new_trainer.username)
# Changing name
new_trainer.name = "newname"
self.database.set_trainer_name(
new_trainer._id, new_trainer.name)
# Checking name
db_user = self.database.get_trainer_by_username(
new_trainer.username)
self.assertTrue(db_user.name == new_trainer.name)
self.database.remove_trainer(db_user._id)
self.assertTrue(
self.database.get_trainer_by_username(db_user._id) is None)
"""Workout tests"""
def test_workout_dict_to_class(self):
new_workout = deepcopy(self.test_workout)
# Get workout from database
database_workout = self.database.workout_dict_to_class(
new_workout.as_dict())
# Need to pass in the mongo id
new_workout._id = database_workout._id
# Check if equal
self.assertTrue(new_workout.as_dict() == database_workout.as_dict())
def test_get_workout_by_attributes(self):
trainee = self.database.mongo.trainee.find_one({
'username': self.test_trainee.username
})
assert trainee is not None
workout = self.database.get_workout_by_attributes(creator_id=trainee['_id'],
about='workout',
name='testing')
assert workout is not None
assert workout.creator_id == str(trainee['_id'])
assert workout.about == 'workout'
assert workout.name == 'testing'
with self.assertRaises(WorkoutNotFound):
self.database.get_workout_by_attributes(about='not a workout at all',
name='nope not a name')
workout = self.database.get_workout_by_attributes(_id=str(workout._id))
assert workout is not None
def test_get_all_workout_by_attributes(self):
trainee = self.database.mongo.trainee.find_one({
'username': self.test_trainee.username
})
assert trainee is not None
workout = self.database.get_all_workout_by_attributes(creator_id=trainee['_id'],
about='workout',
name='testing')
assert workout is not None
with self.assertRaises(WorkoutNotFound):
self.database.get_workout_by_attributes(about='not a workout at all',
name='nope not a name')
def test_get_workout_class_by_id(self):
new_workout = deepcopy(self.test_workout)
# Get workout from database
trainee = self.database.get_trainee_by_username(
self.test_trainee.username)
# Get workout from database
database_workout = self.database.get_workout_by_attributes(name=new_workout.name,
creator_id=trainee._id)
# Need to pass in the mongo id
new_workout._id = database_workout._id
# Check if workouts are the same
self.assertTrue(new_workout.as_dict() == database_workout.as_dict())
# Get workout from database by id this time
database_workout = self.database.get_workout_by_id(new_workout._id)
# Check if workouts are the same
self.assertTrue(new_workout.as_dict() == database_workout.as_dict())
def test_get_workout_class_by_name(self):
new_workout = deepcopy(self.test_workout)
# Get workout from database
trainee = self.database.get_trainee_by_username(
self.test_trainee.username)
# Get workout from database
database_workout = self.database.get_workout_by_attributes(name=new_workout.name,
creator_id=trainee._id)
# Need to pass in the mongo id
new_workout._id = database_workout._id
# Check if workouts are the same
self.assertTrue(new_workout.as_dict() == database_workout.as_dict())
self.database.remove_workout(database_workout._id)
def test_set_workout_creator_id(self):
try:
new_workout = deepcopy(self.test_workout)
# Get trainee from database
trainee = self.database.get_trainee_by_username(
self.test_trainee.username)
# Get trainer from database
trainer = self.database.get_trainer_by_username(
self.test_trainer.username)
database_workout = self.database.get_workout_by_attributes(name=new_workout.name,
creator_id=trainee._id)
assert database_workout is not None
# Set to trainer id
self.database.set_workout_creator_id(database_workout._id,
trainer._id)
# Get back the new workout
database_workout = self.database.get_workout_by_attributes(name=new_workout.name,
creator_id=trainer._id)
assert database_workout is not None
# Check that the creator_id is now changed
assert database_workout.creator_id == trainer._id
finally:
trainer = self.database.get_trainer_by_username(
self.test_trainer.username)
self.database.mongo.workout.delete_many({
'creator_id': ObjectId(trainer._id)
})
def test_set_workout_name(self):
try:
new_workout = deepcopy(self.test_workout)
trainee = self.database.get_trainee_by_username(
self.test_trainee.username)
self.database.mongo.workout.delete_many(
{
'name': "newname",
'creator_id': trainee._id
}
)
# Get workout from database
database_workout = self.database.get_workout_by_attributes(name=new_workout.name,
creator_id=trainee._id)
# Get id and change name
new_workout._id = database_workout._id
new_workout.name = "newname"
new_workout.creator_id = database_workout.creator_id
# Set it in database
self.database.set_workout_name(new_workout._id, new_workout.name)
# Get workout from database
database_workout = self.database.get_workout_by_attributes(name=new_workout.name,
creator_id=trainee._id)
self.assertTrue(database_workout.as_dict()
== new_workout.as_dict())
# Removing workout since we changed name. Teardown wont do it
self.database.remove_workout(new_workout._id)
finally:
self.database.mongo.workout.delete_many(
{
'creator_id': trainee._id
}
)
def test_set_workout_difficulty(self):
new_workout = deepcopy(self.test_workout)
# Get workout from database
trainee = self.database.get_trainee_by_username(
self.test_trainee.username)
# Get workout from database
database_workout = self.database.get_workout_by_attributes(name=new_workout.name,
creator_id=trainee._id)
# Get id and change name
new_workout._id = database_workout._id
new_workout.difficulty = "newdifficulty"
# Set it in database
self.database.set_workout_difficulty(
new_workout._id, new_workout.difficulty)
# Get workout from database
database_workout = self.database.get_workout_by_attributes(name=new_workout.name,
creator_id=trainee._id)
self.assertTrue(database_workout.as_dict() == new_workout.as_dict())
def test_set_workout_about(self):
new_workout = deepcopy(self.test_workout)
# Get workout from database
trainee = self.database.get_trainee_by_username(
self.test_trainee.username)
# Get workout from database
database_workout = self.database.get_workout_by_attributes(name=new_workout.name,
creator_id=trainee._id)
# Get id and change name
new_workout._id = database_workout._id
new_workout.about = "newabout"
# Set it in database
self.database.set_workout_about(new_workout._id, new_workout.about)
# Get workout from database
database_workout = self.database.get_workout_by_attributes(name=new_workout.name,
creator_id=trainee._id)
self.assertTrue(database_workout.as_dict() == new_workout.as_dict())
def test_remove_workout(self):
new_workout = deepcopy(self.test_workout)
new_workout.name = "goingtoremove"
# Adding workout to database
self.database.add_workout(new_workout)
# Get workout from database
trainee = self.database.get_trainee_by_username(
self.test_trainee.username)
# Get workout from database
database_workout = self.database.get_workout_by_attributes(name=new_workout.name,
creator_id=trainee._id)
# Get id and change name
new_workout._id = database_workout._id
new_workout.creator_id = database_workout.creator_id
self.assertTrue(database_workout.as_dict() == new_workout.as_dict())
self.database.remove_workout(new_workout._id)
with self.assertRaises(WorkoutNotFound):
self.database.get_workout_by_attributes(name=new_workout.name,
creator_id=trainee._id)
def test_add_workout(self):
new_trainee = self.database.get_trainee_by_username(
self.test_trainee.username)
new_workout = deepcopy(self.test_workout)
# Getting the workout by their name
trainee = self.database.get_trainee_by_username(
self.test_trainee.username)
# Get workout from database
database_workout = self.database.get_workout_by_attributes(name=new_workout.name,
creator_id=trainee._id)
# Set ids
new_workout._id = database_workout._id
new_workout.creator_id = new_trainee._id
database_workout.creator_id = new_workout.creator_id
self.assertTrue(new_trainee._id == new_workout.creator_id)
self.assertTrue(database_workout.as_dict() == new_workout.as_dict())
# Removing temp workout from database
self.database.remove_workout(new_workout._id)
self.assertTrue(
self.database.get_workout_by_id(database_workout._id) is None)
# Removing temp user from database
self.database.remove_trainee(new_trainee._id)
self.assertTrue(self.database.get_trainee_by_id(
new_trainee._id) is None)
# Testing to see if an error occurs if adding a workout with no creator id
new_workout = deepcopy(self.test_workout)
new_workout.creator_id = None
with self.assertRaises(WorkoutCreatorIdNotFoundError):
self.database.add_workout(new_workout)
def test_remove_trainee(self):
try:
self.database.add_trainee(Trainee(_id=None,
username="testtrainee1",
password="pass",
name="testTrainee1",
phone=1234567890))
self.database.add_trainee(Trainee(_id=None,
username="testtrainee2",
password="pass",
name="testTrainee2",
phone=1234567890))
self.database.add_trainee(Trainee(_id=None,
username="testtrainee3",
password="pass",
name="testTrainee3",
phone=1234567890))
self.database.add_trainer(Trainer(_id=None,
username="testtrainer1",
password="pass",
name="testTrainer3",
phone=1234567890))
trainee1_id = str(self.database.mongo.trainee.find_one(
{'username': 'testtrainee1'})['_id'])
trainee2_id = str(self.database.mongo.trainee.find_one(
{'username': 'testtrainee2'})['_id'])
trainee3_id = str(self.database.mongo.trainee.find_one(
{'username': 'testtrainee3'})['_id'])
trainer_id = str(self.database.mongo.trainer.find_one(
{'username': 'testtrainer1'})['_id'])
assert self.database.get_trainer_by_username(
"testtrainer1") is not None
assert self.database.get_trainee_by_username(
"testtrainee1") is not None
assert self.database.get_trainee_by_username(
"testtrainee2") is not None
assert self.database.get_trainee_by_username(
"testtrainee3") is not None
self.database.trainer_add_trainee(trainer_id, trainee1_id)
self.database.trainer_add_trainee(trainer_id, trainee2_id)
self.database.trainer_add_trainee(trainer_id, trainee3_id)
assert len(self.database.get_trainer_by_id(
trainer_id).trainees) == 3
self.database.remove_trainee(trainee1_id)
assert len(self.database.get_trainer_by_id(
trainer_id).trainees) == 2
self.database.remove_trainee(trainee2_id)
assert len(self.database.get_trainer_by_id(
trainer_id).trainees) == 1
self.database.remove_trainee(trainee3_id)
assert len(self.database.get_trainer_by_id(
trainer_id).trainees) == 0
finally:
self.database.mongo.trainee.delete_many(
{"username": "testtrainee1"})
self.database.mongo.trainee.delete_many(
{"username": "testtrainee2"})
self.database.mongo.trainee.delete_many(
{"username": "testtrainee3"})
self.database.mongo.trainer.delete_many(
{"username": "testTrainer1"})
def test_remove_trainee(self):
try:
self.database.add_trainer(Trainer(_id=None,
username="testtrainer1",
password="pass",
name="testTrainer1",
phone=1234567890))
self.database.add_trainer(Trainer(_id=None,
username="testtrainer2",
password="pass",
name="testTrainer2",
phone=1234567890))
self.database.add_trainer(Trainer(_id=None,
username="testtrainer3",
password="pass",
name="testTrainer3",
phone=1234567890))
self.database.add_trainee(Trainee(_id=None,
username="testtrainee1",
password="pass",
name="testTrainer3",
phone=1234567890))
trainer1_id = str(self.database.mongo.trainer.find_one(
{'username': 'testtrainer1'})['_id'])
trainer2_id = str(self.database.mongo.trainer.find_one(
{'username': 'testtrainer2'})['_id'])
trainer3_id = str(self.database.mongo.trainer.find_one(
{'username': 'testtrainer3'})['_id'])
trainee_id = str(self.database.mongo.trainee.find_one(
{'username': 'testtrainee1'})['_id'])
assert self.database.get_trainee_by_username(
"testtrainee1") is not None
assert self.database.get_trainer_by_username(
"testtrainer1") is not None
assert self.database.get_trainer_by_username(
"testtrainer2") is not None
assert self.database.get_trainer_by_username(
"testtrainer3") is not None
self.database.trainee_add_trainer(trainee_id, trainer1_id)
self.database.trainee_add_trainer(trainee_id, trainer2_id)
self.database.trainee_add_trainer(trainee_id, trainer3_id)
assert len(self.database.get_trainee_by_id(
trainee_id).trainers) == 3
self.database.remove_trainer(trainer1_id)
assert len(self.database.get_trainee_by_id(
trainee_id).trainers) == 2
self.database.remove_trainer(trainer2_id)
assert len(self.database.get_trainee_by_id(
trainee_id).trainers) == 1
self.database.remove_trainer(trainer3_id)
assert len(self.database.get_trainee_by_id(
trainee_id).trainers) == 0
finally:
self.database.mongo.trainer.delete_many(
{"username": "testtrainer1"})
self.database.mongo.trainer.delete_many(
{"username": "testtrainer2"})
self.database.mongo.trainer.delete_many(
{"username": "testtrainer3"})
self.database.mongo.trainee.delete_many(
{"username": "testtrainee1"})
def test_get_all_workouts_by_creatorid(self):
# Checking if workout total is equal to 1
trainee = self.database.get_trainee_by_username(
self.test_trainee.username)
workouts = self.database.get_all_workouts_by_creatorid(trainee._id)
assert len(workouts) == 1
new_workout = Workout(
_id=None,
creator_id=trainee._id,
name="goingtoremove", # tearDown removes all of these
difficulty="novice",
about="something something else"
)
self.database.add_workout(new_workout)
workouts = self.database.get_all_workouts_by_creatorid(trainee._id)
assert len(workouts) == 2
def test_set_workout_status(self):
trainee = self.database.get_trainee_by_username(
self.test_trainee.username)
workout = self.database.mongo.workout.find_one({
'name': "testing",
'creator_id': ObjectId(trainee._id)
})
assert workout is not None
assert workout['is_complete'] is False
self.database.set_workout_status(trainee._id, workout['name'], True)
workout = self.database.mongo.workout.find_one({
'name': "testing",
'creator_id': ObjectId(trainee._id)
})
assert workout is not None
assert workout['is_complete'] is True
def test_set_workout_total_time(self):
trainee = self.database.get_trainee_by_username(
self.test_trainee.username)
workout = self.database.mongo.workout.find_one({
'name': "testing",
'creator_id': ObjectId(trainee._id)
})
assert workout is not None
assert workout['total_time'] == "20 minutes"
self.database.set_workout_total_time(trainee._id, workout['name'], "10")
workout = self.database.mongo.workout.find_one({
'name': "testing",
'creator_id': ObjectId(trainee._id)
})
assert workout is not None
assert workout['total_time'] =="10"
def test_set_workout_reps(self):
trainee = self.database.get_trainee_by_username(
self.test_trainee.username)
workout = self.database.mongo.workout.find_one({
'name': "testing",
'creator_id': ObjectId(trainee._id)
})
assert workout is not None
assert workout['reps'] == "10"
self.database.set_workout_reps(trainee._id, workout['name'], "5")
workout = self.database.mongo.workout.find_one({
'name': "testing",
'creator_id': ObjectId(trainee._id)
})
assert workout is not None
assert workout['reps'] == "5"
def test_set_workout_miles(self):
trainee = self.database.get_trainee_by_username(
self.test_trainee.username)
workout = self.database.mongo.workout.find_one({
'name': "testing",
'creator_id': ObjectId(trainee._id)
})
assert workout is not None
assert workout['miles'] == "2"
self.database.set_workout_miles(trainee._id, workout['name'], "5")
workout = self.database.mongo.workout.find_one({
'name': "testing",
'creator_id': ObjectId(trainee._id)
})
assert workout is not None
assert workout['miles'] == "5"
def test_set_workout_category(self):
trainee = self.database.get_trainee_by_username(
self.test_trainee.username)
workout = self.database.mongo.workout.find_one({
'name': "testing",
'creator_id': ObjectId(trainee._id)
})
assert workout is not None
assert workout['category'] == "cardio"
self.database.set_workout_category(trainee._id, workout['name'], "Abs")
workout = self.database.mongo.workout.find_one({
'name': "testing",
'creator_id': ObjectId(trainee._id)
})
assert workout is not None
assert workout['category'] == "Abs"
"""Invitation tests"""
def test_create_invitation(self):
"""Testing invitation creation"""
def clean_up(user_one, user_two):
# Clean up
self.database.mongo.invitation.delete_many({
'sender': ObjectId(user_one._id)
})
self.database.mongo.invitation.delete_many({
'recipient': ObjectId(user_one._id)
})
self.database.mongo.invitation.delete_many({
'sender': ObjectId(user_two._id)
})
self.database.mongo.invitation.delete_many({
'recipient': ObjectId(user_two._id)
})
try:
trainee = self.database.get_trainee_by_username('testtrainee')
trainer = self.database.get_trainer_by_username('testtrainer')
clean_up(trainee, trainer)
invitation_id = self.database.create_invitation(trainee._id,
trainer._id)
database_invitation = self.database.mongo.invitation.find_one({
'sender': ObjectId(trainee._id),
'recipient': ObjectId(trainer._id)
})
assert invitation_id is not None
assert database_invitation is not None
assert str(database_invitation['_id']) == str(invitation_id)
assert str(database_invitation['sender']) == trainee._id
assert str(database_invitation['recipient']) == trainer._id
# Check if non-existent user throws error
with self.assertRaises(UserNotFoundError):
self.database.create_invitation('000000000000000000000000',
trainer._id)
with self.assertRaises(UserNotFoundError):
self.database.create_invitation(trainee._id,
'000000000000000000000000')
finally:
clean_up(trainee, trainer)
def test_delete_invitation(self):
"""Testing invitation deletion"""
def clean_up(user_one, user_two):
# Clean up
self.database.mongo.invitation.delete_many({
'sender': ObjectId(user_one._id)
})
self.database.mongo.invitation.delete_many({
'recipient': ObjectId(user_one._id)
})
self.database.mongo.invitation.delete_many({
'sender': ObjectId(user_two._id)
})
self.database.mongo.invitation.delete_many({
'recipient': ObjectId(user_two._id)
})
try:
trainee = self.database.get_trainee_by_username('testtrainee')
trainer = self.database.get_trainer_by_username('testtrainer')
clean_up(trainee, trainer)
invitation = self.database.mongo.invitation.insert_one({
'sender': ObjectId(trainee._id),
'recipient': ObjectId(trainer._id)
})
self.database.delete_invitation(invitation.inserted_id)
database_invitation = self.database.mongo.invitation.find_one({
'_id': invitation.inserted_id
})
assert database_invitation is None
database_invitation = self.database.mongo.invitation.find_one({
'sender': trainee._id,
'recipient': trainer._id
})
assert database_invitation is None
finally:
clean_up(trainee, trainer)
def test_search_invitation(self):
"""Testing invitation search"""
def clean_up(user_one, user_two):
# Clean up
self.database.mongo.invitation.delete_many({
'sender': ObjectId(user_one._id)
})
self.database.mongo.invitation.delete_many({
'recipient': ObjectId(user_one._id)
})
self.database.mongo.invitation.delete_many({
'sender': ObjectId(user_two._id)
})
self.database.mongo.invitation.delete_many({
'recipient': ObjectId(user_two._id)
})
try:
trainee = self.database.get_trainee_by_username('testtrainee')
trainer = self.database.get_trainer_by_username('testtrainer')
clean_up(trainee, trainer)
with self.assertRaises(InvitationNotFound):
self.database.search_invitation("000000000000000000000000")
invitation = self.database.mongo.invitation.insert_one({
'sender': ObjectId(trainee._id),
'recipient': ObjectId(trainer._id)
})
searched_invitation = self.database.search_invitation(
invitation.inserted_id)
assert searched_invitation._id == str(invitation.inserted_id)
assert searched_invitation.sender == str(trainee._id)
assert searched_invitation.recipient == str(trainer._id)
finally:
clean_up(trainee, trainer)
def test_search_all_user_invitations(self):
"""Testing the search feature to get all sent and recieved invitations by a user."""
def clean_up(user_one, user_two):
# Clean up
self.database.mongo.invitation.delete_many({
'sender': ObjectId(user_one._id)
})
self.database.mongo.invitation.delete_many({
'recipient': ObjectId(user_one._id)
})
self.database.mongo.invitation.delete_many({
'sender': ObjectId(user_two._id)
})
self.database.mongo.invitation.delete_many({
'recipient': ObjectId(user_two._id)
})
try:
trainee = self.database.get_trainee_by_username('testtrainee')
trainer = self.database.get_trainer_by_username('testtrainer')
clean_up(trainee, trainer)
invitation = self.database.mongo.invitation.insert_one({
'sender': ObjectId(trainee._id),
'recipient': ObjectId(trainer._id)
})
all_sent, all_recieved = self.database.search_all_user_invitations(
trainee._id)
assert len(all_sent) > 0
assert len(all_recieved) == 0
all_sent, all_recieved = self.database.search_all_user_invitations(
trainer._id)
assert len(all_recieved) > 0
assert len(all_sent) == 0
finally:
clean_up(trainee, trainer)
def test_accept_invitation(self):
"""Checking to see that a user can accept a recieved invitation."""
def clean_up(user_one, user_two):
# Clean up
self.database.mongo.invitation.delete_many({
'sender': ObjectId(user_one._id)
})
self.database.mongo.invitation.delete_many({
'recipient': ObjectId(user_one._id)
})
self.database.mongo.invitation.delete_many({
'sender': ObjectId(user_two._id)
})
self.database.mongo.invitation.delete_many({
'recipient': ObjectId(user_two._id)
})
trainee = self.database.get_trainee_by_username('testtrainee')
trainer = self.database.get_trainer_by_username('testtrainer')
try:
clean_up(trainee, trainer)
invitation = self.database.mongo.invitation.insert_one({
'sender': ObjectId(trainee._id),
'recipient': ObjectId(trainer._id)
})
with self.assertRaises(InvitationNotFound):
self.database.accept_invitation('000000000000000000000000',
str(trainee._id))
assert self.database.mongo.invitation.find_one({
'_id': ObjectId(invitation.inserted_id)
}) is not None
assert self.database.mongo.invitation.find_one({
'sender': ObjectId(trainee._id)
}) is not None
assert self.database.mongo.invitation.find_one({
'recipient': ObjectId(trainer._id)
}) is not None
self.database.accept_invitation(str(invitation.inserted_id),
str(trainer._id))
assert self.database.mongo.invitation.find_one({
'_id': invitation.inserted_id
}) is None
assert ObjectId(trainee._id) in self.database.mongo.trainer.find_one({
'_id': ObjectId(trainer._id)
})['trainees']
assert ObjectId(trainer._id) in self.database.mongo.trainee.find_one({
'_id': ObjectId(trainee._id)
})['trainers']
clean_up(trainee, trainer)
invitation = self.database.mongo.invitation.insert_one({
'sender': ObjectId(trainer._id),
'recipient': ObjectId(trainee._id)
})
with self.assertRaises(InvitationNotFound):
self.database.accept_invitation('000000000000000000000000',
str(trainer._id))
assert self.database.mongo.invitation.find_one({
'_id': ObjectId(invitation.inserted_id)
}) is not None
assert self.database.mongo.invitation.find_one({
'sender': ObjectId(trainer._id)
}) is not None
assert self.database.mongo.invitation.find_one({
'recipient': ObjectId(trainee._id)
}) is not None
self.database.accept_invitation(str(invitation.inserted_id),
str(trainee._id))
assert self.database.mongo.invitation.find_one({
'_id': invitation.inserted_id
}) is None
assert ObjectId(trainee._id) in self.database.mongo.trainer.find_one({
'_id': ObjectId(trainer._id)
})['trainees']
assert ObjectId(trainer._id) in self.database.mongo.trainee.find_one({
'_id': ObjectId(trainee._id)
})['trainers']
finally:
clean_up(trainee, trainer)
def test_trainee_remove_trainer(self):
"""Tests to see if a trainee gets removed from a trainers list"""
trainee = self.database.get_trainee_by_username('testtrainee')
trainer = self.database.get_trainer_by_username('testtrainer')
with self.assertRaises(UserNotFoundError):
self.database.trainee_remove_trainer("123456789012345678901234",
trainer._id)
with self.assertRaises(UserNotFoundError):
self.database.trainee_remove_trainer(trainee._id,
"123456789012345678901234")
self.database.mongo.trainee.update_one(
{"_id": ObjectId(trainee._id)},
{
"$addToSet": {
"trainers": ObjectId(trainer._id)
}
})
assert ObjectId(trainer._id) in self.database.mongo.trainee.find_one({
'_id': ObjectId(trainee._id)
})['trainers']
self.database.trainee_remove_trainer(trainee._id, trainer._id)
assert ObjectId(trainer._id) not in self.database.mongo.trainee.find_one({
'_id': ObjectId(trainee._id)
})['trainers']
def test_trainer_remove_trainee(self):
"""Tests to see if a trainee gets removed from a trainers list"""
trainee = self.database.get_trainee_by_username('testtrainee')
trainer = self.database.get_trainer_by_username('testtrainer')
with self.assertRaises(UserNotFoundError):
self.database.trainer_remove_trainee("123456789012345678901234",
trainee._id)
with self.assertRaises(UserNotFoundError):
self.database.trainer_remove_trainee(trainer._id,
"123456789012345678901234")
self.database.mongo.trainer.update_one(
{"_id": ObjectId(trainer._id)},
{
"$addToSet": {
"trainees": ObjectId(trainee._id)
}
})
assert ObjectId(trainee._id) in self.database.mongo.trainer.find_one({
'_id': ObjectId(trainer._id)
})['trainees']
self.database.trainer_remove_trainee(trainer._id, trainee._id)
assert ObjectId(trainee._id) not in self.database.mongo.trainer.find_one({
'_id': ObjectId(trainer._id)
})['trainees']
def test_find_trainers_near_user(self):
"""Tests the find nearby trainers function to see if it returns a populated list"""
new_trainee = deepcopy(self.test_trainee)
# Updating user object to database user
new_trainee = self.database.get_trainee_by_username(
new_trainee.username)
new_trainer = deepcopy(self.test_trainer)
# Updating user object to database user
new_trainer = self.database.get_trainer_by_username(
new_trainer.username)
# setting trainee and trainer with coordinates that should be close enough
self.database.set_coords(new_trainee._id, new_trainee.lng, new_trainee.lat)
self.database.set_coords(new_trainer._id, new_trainer.lng, new_trainer.lat)
# running test
returned_list = self.database.find_trainers_near_user(new_trainee.lng, new_trainee.lat)
# checking if list is empty
assert returned_list
def test_create_event(self):
"""Tests the creation of an event within the database"""
def clean_up(trainee, trainer):
self.database.mongo.event.delete_many({
'title': 'testEvent',
'creator_id': ObjectId(trainee._id)
})
self.database.mongo.event.delete_many({
'title': 'testEvent',
'creator_id': ObjectId(trainer._id)
})
trainee = self.database.get_trainee_by_username('testtrainee')
trainer = self.database.get_trainer_by_username('testtrainer')
try:
clean_up(trainee, trainer)
event = Event(
_id=None,
creator_id=trainee._id,
title='testEvent',
date=datetime(2020, 12, 2),
description='a simple desc',
participant_id=trainer._id
)
self.database.create_event(event)
database_event = self.database.mongo.event.find_one({
'title': event.title,
'creator_id': ObjectId(trainee._id)
})
assert database_event['title'] == event.title
assert str(database_event['creator_id']) == str(event.creator_id)
assert database_event['date'] == str(event.date)
assert database_event['title'] == event.title
assert database_event['description'] == event.description
assert str(database_event['participant_id']
) == event.participant_id
clean_up(trainee, trainer)
event = Event(
_id=None,
creator_id=trainer._id,
title='testEvent',
date=datetime(2020, 12, 2),
description='a simple desc',
participant_id=trainer._id
)
self.database.create_event(event)
database_event = self.database.mongo.event.find_one({
'title': event.title,
'creator_id': ObjectId(trainer._id)
})
assert database_event['title'] == event.title
assert str(database_event['creator_id']) == str(event.creator_id)
assert database_event['date'] == str(event.date)
assert database_event['title'] == event.title
assert database_event['description'] == event.description
assert str(database_event['participant_id']
) == event.participant_id
finally:
clean_up(trainee, trainer)
def test_remove_event(self):
def clean_up(trainee, trainer):
self.database.mongo.event.delete_many({
'title': 'testEvent',
'creator_id': ObjectId(trainee._id)
})
self.database.mongo.event.delete_many({
'title': 'testEvent',
'creator_id': ObjectId(trainer._id)
})
trainee = self.database.get_trainee_by_username('testtrainee')
trainer = self.database.get_trainer_by_username('testtrainer')
try:
clean_up(trainee, trainer)
event = Event(
_id=None,
creator_id=trainee._id,
title='testEvent',
date=datetime(2020, 12, 2),
description='a simple desc',
participant_id=trainer._id
)
self.database.create_event(event)
database_event = self.database.mongo.event.find_one({
'title': event.title,
'creator_id': ObjectId(trainee._id)
})
assert database_event['title'] == event.title
assert str(database_event['creator_id']) == str(event.creator_id)
assert database_event['date'] == str(event.date)
assert database_event['title'] == event.title
assert database_event['description'] == event.description
assert str(database_event['participant_id']
) == event.participant_id
self.database.delete_event(database_event['_id'], trainee._id)
database_event = self.database.mongo.event.find_one({
'title': event.title,
'creator_id': ObjectId(trainee._id)
})
assert database_event is None
event = Event(
_id=None,
creator_id=trainer._id,
title='testEvent',
date=datetime(2020, 12, 2),
description='a simple desc',
participant_id=trainee._id
)
self.database.create_event(event)
database_event = self.database.mongo.event.find_one({
'title': event.title,
'creator_id': ObjectId(trainer._id)
})
assert database_event['title'] == event.title
assert str(database_event['creator_id']) == str(event.creator_id)
assert database_event['date'] == str(event.date)
assert database_event['title'] == event.title
assert database_event['description'] == event.description
assert str(database_event['participant_id']
) == event.participant_id
self.database.delete_event(database_event['_id'], trainer._id)
database_event = self.database.mongo.event.find_one({
'title': event.title,
'creator_id': ObjectId(trainer._id)
})
assert database_event is None
finally:
clean_up(trainee, trainer)
def test_get_event_by_attributes(self):
"""Test to get an event class from the database using specific attributes"""
def clean_up(trainee, trainer):
self.database.mongo.event.delete_many({
'title': 'testEvent',
'creator_id': ObjectId(trainee._id)
})
self.database.mongo.event.delete_many({
'title': 'testEvent',
'creator_id': ObjectId(trainer._id)
})
trainee = self.database.get_trainee_by_username('testtrainee')
trainer = self.database.get_trainer_by_username('testtrainer')
try:
clean_up(trainee, trainer)
event = Event(
_id=None,
creator_id=trainee._id,
title='testEvent',
date=datetime(2020, 12, 2),
description='a simple desc',
participant_id=trainer._id
)
self.database.create_event(event)
database_event = self.database.mongo.event.find_one({
'title': event.title,
'creator_id': ObjectId(trainee._id)
})
assert database_event is not None
database_event = self.database.get_event_by_attributes(creator_id=event.creator_id,
title=event.title)
assert database_event is not None
assert database_event.title == event.title
database_event = self.database.get_event_by_attributes(creator_id=event.creator_id,
date=str(event.date))
assert database_event is not None
assert database_event.date == event.date
database_event = self.database.get_event_by_attributes(creator_id=event.creator_id,
date=event.date)
assert database_event is not None
assert database_event.date == event.date
database_event = self.database.get_event_by_attributes(creator_id=event.creator_id,
description=event.description)
assert database_event is not None
assert database_event.description == event.description
database_event = self.database.get_event_by_attributes(creator_id=event.creator_id,
participant_id=event.participant_id)
assert database_event is not None
assert database_event.participant_id == event.participant_id
finally:
clean_up(trainee, trainer)
def test_list_events(self):
"""Checks to see if a list of recieved and created events are stored within the database"""
def clean_up(trainee, trainer):
self.database.mongo.event.delete_many({
'creator_id': ObjectId(trainee._id)
})
self.database.mongo.event.delete_many({
'creator_id': ObjectId(trainer._id)
})
trainee = self.database.get_trainee_by_username('testtrainee')
trainer = self.database.get_trainer_by_username('testtrainer')
try:
clean_up(trainee, trainer)
event = Event(
_id=None,
creator_id=trainee._id,
title='testEvent',
date=datetime(2020, 12, 2),
description='a simple desc',
participant_id=trainer._id
)
self.database.create_event(event)
database_event = self.database.mongo.event.find_one({
'title': event.title,
'creator_id': ObjectId(trainee._id)
})
assert database_event is not None
assert str(database_event['creator_id']) == event.creator_id
assert str(database_event['participant_id']
) == event.participant_id
event = Event(
_id=None,
creator_id=trainer._id,
title='testEvent',
date=datetime(2020, 12, 2),
description='a simple desc',
participant_id=trainee._id
)
self.database.create_event(event)
database_event = self.database.mongo.event.find_one({
'title': event.title,
'creator_id': ObjectId(trainer._id)
})
assert database_event is not None
assert str(database_event['creator_id']) == event.creator_id
assert str(database_event['participant_id']
) == event.participant_id
finally:
clean_up(trainee, trainer)
| 38.468917 | 103 | 0.591206 | 6,787 | 64,974 | 5.384264 | 0.037867 | 0.116575 | 0.054183 | 0.036122 | 0.857374 | 0.820294 | 0.790083 | 0.745669 | 0.698574 | 0.66393 | 0 | 0.015048 | 0.321898 | 64,974 | 1,688 | 104 | 38.491706 | 0.814363 | 0.065811 | 0 | 0.700748 | 0 | 0 | 0.058636 | 0.006221 | 0 | 0 | 0 | 0 | 0.175395 | 1 | 0.049875 | false | 0.024938 | 0.007481 | 0 | 0.061513 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
4aba4343e27429a07aa1f018a1ce3dbd2c6e891c | 7,249 | py | Python | tests/test_integration.py | caseynbrock/opal2 | 518291955c9a2bc2c958988bf405afd57546e1f2 | [
"MIT"
] | null | null | null | tests/test_integration.py | caseynbrock/opal2 | 518291955c9a2bc2c958988bf405afd57546e1f2 | [
"MIT"
] | null | null | null | tests/test_integration.py | caseynbrock/opal2 | 518291955c9a2bc2c958988bf405afd57546e1f2 | [
"MIT"
] | 1 | 2019-05-14T22:01:35.000Z | 2019-05-14T22:01:35.000Z | #!/usr/bin/env python
import sys
sys.path.append('.')
import os
import shutil
import tools_for_tests
import numpy as np
import pytest
import eval_pp
import analysis_driver
# directory of test input files
main_test_inputs_dir = os.path.join(os.getcwd(), 'tests', 'test_inputs_integration')
def test_eval_pp_main_no_converge():
"""
raises NoCutoffConvergence if there is no gcut convergence
"""
test_inputs_dir = os.path.join(main_test_inputs_dir, 'eval_pp_main_test')
with pytest.raises(eval_pp.NoCutoffConvergence):
with tools_for_tests.TemporaryDirectory() as tmp_dir:
# set up a mock work directory:
shutil.copy(os.path.join('..', 'calc_nflops'), os.getcwd())
shutil.copy(os.path.join(test_inputs_dir, 'configurations.in.example'), 'configurations.in')
shutil.copy(os.path.join(test_inputs_dir, 'allelectron_forces.dat.example'), 'allelectron_forces.dat')
os.mkdir('workdir.example')
os.chdir('workdir.example')
shutil.copy(os.path.join(test_inputs_dir, 'argvf.template'), 'argvf.template')
shutil.copy(os.path.join(test_inputs_dir, 'crystal.template'), 'crystal.template')
shutil.copy(os.path.join(test_inputs_dir, 'PAW.Si'), 'PAW.Si')
shutil.copy(os.path.join(test_inputs_dir, 'PAW.Ge'), 'PAW.Ge')
# run eval_pp
gcuts = [20., 30., 40.]
energy_tol = 1.e-100 # set impossible tolerance so it doesn't converge
objectives = eval_pp.main(['Si', 'Ge'], gcuts, energy_tol)
def test_eval_pp_main():
"""
This should converge at gcut=40 and then return objectives:
accu = 0.12408939054384546
work = 0.009064640532217023
the "correct" accuracy objectives could depend on the socorro build,
and the work objective may depend on some other things such as parallelization.
"""
test_inputs_dir = os.path.join(main_test_inputs_dir, 'eval_pp_main_test')
with tools_for_tests.TemporaryDirectory() as tmp_dir:
# set up a mock work directory:
shutil.copy(os.path.join('..', 'calc_nflops'), os.getcwd())
shutil.copy(os.path.join(test_inputs_dir, 'configurations.in.example'), 'configurations.in')
shutil.copy(os.path.join(test_inputs_dir, 'allelectron_forces.dat.example'), 'allelectron_forces.dat')
os.mkdir('workdir.example')
os.chdir('workdir.example')
shutil.copy(os.path.join(test_inputs_dir, 'argvf.template'), 'argvf.template')
shutil.copy(os.path.join(test_inputs_dir, 'crystal.template'), 'crystal.template')
shutil.copy(os.path.join(test_inputs_dir, 'PAW.Si'), 'PAW.Si')
shutil.copy(os.path.join(test_inputs_dir, 'PAW.Ge'), 'PAW.Ge')
# run eval_pp
gcuts = [20., 30., 40., 50.]
energy_tol = 3.e-3
objectives = eval_pp.main(['Si', 'Ge'], gcuts, energy_tol)
assert np.isclose(objectives['accu'], 0.12408939054384546, rtol=0., atol=0.0002)
assert np.isclose(objectives['work'], 0.009064640532217023, rtol=0., atol=0.000001)
def test_analysis_driver_main_Si_noconverge():
"""
For this test, the silicon inputs are bad so atompaw does not converge,
and the analysis driver returns 100s for both objectives
"""
test_inputs_dir = os.path.join(main_test_inputs_dir, 'analysis_driver_main_Si_noconverge')
with tools_for_tests.TemporaryDirectory() as tmp_dir:
# set up a mock work directory:
shutil.copy(os.path.join('..', 'calc_nflops'), os.getcwd())
shutil.copy(os.path.join(test_inputs_dir, 'opal.in'), 'opal.in')
shutil.copy(os.path.join(test_inputs_dir, 'configurations.in.example'), 'configurations.in')
shutil.copy(os.path.join(test_inputs_dir, 'allelectron_forces.dat.example'), 'allelectron_forces.dat')
os.mkdir('workdir.example')
os.chdir('workdir.example')
shutil.copy(os.path.join(test_inputs_dir, 'argvf.template'), 'argvf.template')
shutil.copy(os.path.join(test_inputs_dir, 'crystal.template'), 'crystal.template')
shutil.copy(os.path.join(test_inputs_dir, 'Si.in.template'), os.getcwd())
shutil.copy(os.path.join(test_inputs_dir, 'Ge.in.template'), os.getcwd())
shutil.copy(os.path.join(test_inputs_dir, 'params'), os.getcwd())
# run analysis driver
analysis_driver.main()
with open('results') as fin:
assert fin.readlines()==[' 1.0000000000000000E+02 accu\n', ' 1.0000000000000000E+02 work\n']
def test_analysis_driver_main_success():
"""
"""
test_inputs_dir = os.path.join(main_test_inputs_dir, 'analysis_driver_main_success')
with tools_for_tests.TemporaryDirectory() as tmp_dir:
# set up a mock work directory:
shutil.copy(os.path.join('..', 'calc_nflops'), os.getcwd())
shutil.copy(os.path.join(test_inputs_dir, 'opal.in'), 'opal.in')
shutil.copy(os.path.join(test_inputs_dir, 'configurations.in.example'), 'configurations.in')
shutil.copy(os.path.join(test_inputs_dir, 'allelectron_forces.dat.example'), 'allelectron_forces.dat')
os.mkdir('workdir.example')
os.chdir('workdir.example')
shutil.copy(os.path.join(test_inputs_dir, 'argvf.template'), 'argvf.template')
shutil.copy(os.path.join(test_inputs_dir, 'crystal.template'), 'crystal.template')
shutil.copy(os.path.join(test_inputs_dir, 'Si.in.template'), os.getcwd())
shutil.copy(os.path.join(test_inputs_dir, 'Ge.in.template'), os.getcwd())
shutil.copy(os.path.join(test_inputs_dir, 'params'), os.getcwd())
# run analysis driver
analysis_driver.main()
with open('results') as fin:
assert fin.readlines()==[' 7.6992177462473416E-02 accu\n', ' 8.7573645819723784E-03 work\n']
def test_analysis_driver_main_nogcut_converge():
"""
returns proper obectives of 95 when no gcut convergence
sets impossible energy tolerance in opal.in
"""
test_inputs_dir = os.path.join(main_test_inputs_dir, 'analysis_driver_main_nogcut_converge')
with tools_for_tests.TemporaryDirectory() as tmp_dir:
# set up a mock work directory:
shutil.copy(os.path.join('..', 'calc_nflops'), os.getcwd())
shutil.copy(os.path.join(test_inputs_dir, 'opal.in'), 'opal.in')
shutil.copy(os.path.join(test_inputs_dir, 'configurations.in.example'), 'configurations.in')
shutil.copy(os.path.join(test_inputs_dir, 'allelectron_forces.dat.example'), 'allelectron_forces.dat')
os.mkdir('workdir.example')
os.chdir('workdir.example')
shutil.copy(os.path.join(test_inputs_dir, 'argvf.template'), 'argvf.template')
shutil.copy(os.path.join(test_inputs_dir, 'crystal.template'), 'crystal.template')
shutil.copy(os.path.join(test_inputs_dir, 'Si.in.template'), os.getcwd())
shutil.copy(os.path.join(test_inputs_dir, 'Ge.in.template'), os.getcwd())
shutil.copy(os.path.join(test_inputs_dir, 'params'), os.getcwd())
# run analysis driver
analysis_driver.main()
with open('results') as fin:
assert fin.readlines()==[' 9.5000000000000000E+01 accu\n', ' 9.5000000000000000E+01 work\n']
| 48.97973 | 114 | 0.678163 | 999 | 7,249 | 4.733734 | 0.152152 | 0.101501 | 0.129203 | 0.138719 | 0.778389 | 0.754916 | 0.750053 | 0.737365 | 0.737365 | 0.721294 | 0 | 0.038526 | 0.180025 | 7,249 | 147 | 115 | 49.312925 | 0.757066 | 0.122776 | 0 | 0.702128 | 0 | 0 | 0.24972 | 0.102064 | 0 | 0 | 0 | 0 | 0.053191 | 1 | 0.053191 | false | 0 | 0.085106 | 0 | 0.138298 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
4359ddba3b02f31eb08c9143b767f943ec3e26e0 | 320 | py | Python | practices/practice_2.2/src/database/manage/__init__.py | JoelHernandez343/networking-administration | 65de814cde6fc08c47e989027a95a328ba9950df | [
"MIT"
] | null | null | null | practices/practice_2.2/src/database/manage/__init__.py | JoelHernandez343/networking-administration | 65de814cde6fc08c47e989027a95a328ba9950df | [
"MIT"
] | null | null | null | practices/practice_2.2/src/database/manage/__init__.py | JoelHernandez343/networking-administration | 65de814cde6fc08c47e989027a95a328ba9950df | [
"MIT"
] | 1 | 2022-03-02T16:19:34.000Z | 2022-03-02T16:19:34.000Z | from database import models
from database.database import engine
from database.manage import interface
from database.manage import vlan
def drop_db():
models.Base.metadata.drop_all(bind=engine)
def create_db():
models.Base.metadata.create_all(bind=engine)
def recreate_db():
drop_db()
create_db()
| 16.842105 | 48 | 0.759375 | 46 | 320 | 5.130435 | 0.369565 | 0.20339 | 0.152542 | 0.20339 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.153125 | 320 | 18 | 49 | 17.777778 | 0.870849 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.272727 | true | 0 | 0.363636 | 0 | 0.636364 | 0 | 0 | 0 | 0 | null | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
435a6237d6348784a8a2945a0a9ce59ab99f4a11 | 7,846 | py | Python | hypha/apply/projects/models/payment.py | killapop/hypha | a2880e384029dae77012abfc753f2af9cef1a5e1 | [
"BSD-3-Clause"
] | 16 | 2020-01-24T11:52:46.000Z | 2021-02-02T22:21:04.000Z | hypha/apply/projects/models/payment.py | killapop/hypha | a2880e384029dae77012abfc753f2af9cef1a5e1 | [
"BSD-3-Clause"
] | 538 | 2020-01-24T08:27:13.000Z | 2021-04-05T07:15:01.000Z | hypha/apply/projects/models/payment.py | killapop/hypha | a2880e384029dae77012abfc753f2af9cef1a5e1 | [
"BSD-3-Clause"
] | 17 | 2020-02-07T14:55:54.000Z | 2021-04-04T19:32:38.000Z | import decimal
import os
from django.conf import settings
from django.core.validators import MinValueValidator
from django.db import models
from django.db.models import Sum, Value
from django.db.models.functions import Coalesce
from django.urls import reverse
from django.utils.translation import gettext_lazy as _
from hypha.apply.utils.storage import PrivateStorage
SUBMITTED = 'submitted'
CHANGES_REQUESTED = 'changes_requested'
UNDER_REVIEW = 'under_review'
PAID = 'paid'
DECLINED = 'declined'
REQUEST_STATUS_CHOICES = [
(SUBMITTED, _('Submitted')),
(CHANGES_REQUESTED, _('Changes Requested')),
(UNDER_REVIEW, _('Under Review')),
(PAID, _('Paid')),
(DECLINED, _('Declined')),
]
def receipt_path(instance, filename):
return f'projects/{instance.payment_request.project_id}/payment_receipts/{filename}'
def invoice_path(instance, filename):
return f'projects/{instance.project_id}/payment_invoices/{filename}'
class PaymentApproval(models.Model):
request = models.ForeignKey('PaymentRequest', on_delete=models.CASCADE, related_name="approvals")
by = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE, related_name="payment_approvals")
created_at = models.DateTimeField(auto_now_add=True)
def __str__(self):
return _('Approval for {request} by {user}').format(request=self.request, user=self.by)
class PaymentReceipt(models.Model):
payment_request = models.ForeignKey("PaymentRequest", on_delete=models.CASCADE, related_name="receipts")
file = models.FileField(upload_to=receipt_path, storage=PrivateStorage())
def __str__(self):
return os.path.basename(self.file.name)
class PaymentRequestQueryset(models.QuerySet):
def in_progress(self):
return self.exclude(status__in=[DECLINED, PAID])
def rejected(self):
return self.filter(status=DECLINED)
def not_rejected(self):
return self.exclude(status=DECLINED)
def total_value(self, field):
return self.aggregate(total=Coalesce(Sum(field), Value(0)))['total']
def paid_value(self):
return self.filter(status=PAID).total_value('paid_value')
def unpaid_value(self):
return self.filter(status__in=[SUBMITTED, UNDER_REVIEW]).total_value('requested_value')
class InvoiceQueryset(models.QuerySet):
def in_progress(self):
return self.exclude(status__in=[DECLINED, PAID])
def rejected(self):
return self.filter(status=DECLINED)
def not_rejected(self):
return self.exclude(status=DECLINED)
def total_value(self, field):
return self.aggregate(total=Coalesce(Sum(field), Value(0)))['total']
def paid_value(self):
return self.filter(status=PAID).total_value('paid_value')
def unpaid_value(self):
return self.filter(status__in=[SUBMITTED, UNDER_REVIEW]).total_value('requested_value')
class Invoice(models.Model):
project = models.ForeignKey("Project", on_delete=models.CASCADE, related_name="invoices")
by = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE, related_name="invoices")
date_from = models.DateTimeField()
date_to = models.DateTimeField()
amount = models.DecimalField(
default=0,
max_digits=10,
decimal_places=2,
validators=[MinValueValidator(decimal.Decimal('0.01'))],
)
paid_value = models.DecimalField(
max_digits=10,
decimal_places=2,
validators=[MinValueValidator(decimal.Decimal('0.01'))],
null=True
)
document = models.FileField(upload_to=invoice_path, storage=PrivateStorage())
requested_at = models.DateTimeField(auto_now_add=True)
message_for_pm = models.TextField(blank=True, verbose_name=_('Message'))
comment = models.TextField(blank=True)
status = models.TextField(choices=REQUEST_STATUS_CHOICES, default=SUBMITTED)
objects = InvoiceQueryset.as_manager()
def __str__(self):
return _('Invoice requested for {project}').format(project=self.project)
@property
def has_changes_requested(self):
return self.status == CHANGES_REQUESTED
@property
def status_display(self):
return self.get_status_display()
def can_user_delete(self, user):
if user.is_applicant:
if self.status in (SUBMITTED, CHANGES_REQUESTED):
return True
if user.is_apply_staff:
if self.status in {SUBMITTED}:
return True
return False
def can_user_edit(self, user):
if user.is_applicant:
if self.status in {SUBMITTED, CHANGES_REQUESTED}:
return True
if user.is_apply_staff:
if self.status in {SUBMITTED}:
return True
return False
def can_user_change_status(self, user):
if not user.is_apply_staff:
return False # Users can't change status
if self.status in {PAID, DECLINED}:
return False
return True
@property
def value(self):
return self.paid_value or self.amount
def get_absolute_url(self):
return reverse('apply:projects:invoices:detail', args=[self.pk])
class SupportingDocument(models.Model):
document = models.FileField(
upload_to="supporting_documents", storage=PrivateStorage()
)
invoice = models.ForeignKey(
Invoice,
on_delete=models.CASCADE,
related_name='supporting_documents',
)
def __str__(self):
return self.invoice.name + ' -> ' + self.document.name
class PaymentRequest(models.Model):
project = models.ForeignKey("Project", on_delete=models.CASCADE, related_name="payment_requests")
by = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE, related_name="payment_requests")
requested_value = models.DecimalField(
default=0,
max_digits=10,
decimal_places=2,
validators=[MinValueValidator(decimal.Decimal('0.01'))],
)
paid_value = models.DecimalField(
max_digits=10,
decimal_places=2,
validators=[MinValueValidator(decimal.Decimal('0.01'))],
null=True
)
invoice = models.FileField(upload_to=invoice_path, storage=PrivateStorage())
requested_at = models.DateTimeField(auto_now_add=True)
date_from = models.DateTimeField()
date_to = models.DateTimeField()
comment = models.TextField(blank=True)
status = models.TextField(choices=REQUEST_STATUS_CHOICES, default=SUBMITTED)
objects = PaymentRequestQueryset.as_manager()
def __str__(self):
return _('Payment requested for {project}').format(project=self.project)
@property
def has_changes_requested(self):
return self.status == CHANGES_REQUESTED
@property
def status_display(self):
return self.get_status_display()
def can_user_delete(self, user):
if user.is_applicant:
if self.status in (SUBMITTED, CHANGES_REQUESTED):
return True
if user.is_apply_staff:
if self.status in {SUBMITTED}:
return True
return False
def can_user_edit(self, user):
if user.is_applicant:
if self.status in {SUBMITTED, CHANGES_REQUESTED}:
return True
if user.is_apply_staff:
if self.status in {SUBMITTED}:
return True
return False
def can_user_change_status(self, user):
if not user.is_apply_staff:
return False # Users can't change status
if self.status in {PAID, DECLINED}:
return False
return True
@property
def value(self):
return self.paid_value or self.requested_value
def get_absolute_url(self):
return reverse('apply:projects:payments:detail', args=[self.pk])
| 30.410853 | 112 | 0.684935 | 927 | 7,846 | 5.586839 | 0.153182 | 0.04441 | 0.045955 | 0.027032 | 0.779301 | 0.771771 | 0.755937 | 0.729484 | 0.704962 | 0.686812 | 0 | 0.004534 | 0.212847 | 7,846 | 257 | 113 | 30.529183 | 0.834035 | 0.0065 | 0 | 0.62963 | 0 | 0 | 0.081751 | 0.024641 | 0 | 0 | 0 | 0 | 0 | 1 | 0.174603 | false | 0 | 0.05291 | 0.142857 | 0.661376 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 6 |
436321348fe8fda261946859f8cfaea74aa02537 | 222 | py | Python | gdc/gdc/doctype/kurseinladungsteilnehmerin/kurseinladungsteilnehmerin.py | motzmose/gdcvw | 356cb094b70219ccda060c4c0ba9fcca842162ff | [
"MIT"
] | null | null | null | gdc/gdc/doctype/kurseinladungsteilnehmerin/kurseinladungsteilnehmerin.py | motzmose/gdcvw | 356cb094b70219ccda060c4c0ba9fcca842162ff | [
"MIT"
] | null | null | null | gdc/gdc/doctype/kurseinladungsteilnehmerin/kurseinladungsteilnehmerin.py | motzmose/gdcvw | 356cb094b70219ccda060c4c0ba9fcca842162ff | [
"MIT"
] | null | null | null | # Copyright (c) 2022, didaktik-aktuell e.V. and contributors
# For license information, please see license.txt
# import frappe
from frappe.model.document import Document
class Kurseinladungsteilnehmerin(Document):
pass
| 24.666667 | 60 | 0.801802 | 28 | 222 | 6.357143 | 0.821429 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.020619 | 0.126126 | 222 | 8 | 61 | 27.75 | 0.896907 | 0.540541 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0.333333 | 0.333333 | 0 | 0.666667 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 6 |
436ba9c698a1431a95278733e5d431fe030d9795 | 406 | py | Python | vertica_python/vertica/messages/frontend_messages/copy_fail.py | jbfavre/vertica-python | c53ffc49a971e9a806679f95e8680847120f49e4 | [
"MIT"
] | 1 | 2016-10-01T20:28:31.000Z | 2016-10-01T20:28:31.000Z | vertica_python/vertica/messages/frontend_messages/copy_fail.py | jbfavre/vertica-python | c53ffc49a971e9a806679f95e8680847120f49e4 | [
"MIT"
] | null | null | null | vertica_python/vertica/messages/frontend_messages/copy_fail.py | jbfavre/vertica-python | c53ffc49a971e9a806679f95e8680847120f49e4 | [
"MIT"
] | null | null | null | from __future__ import absolute_import
from struct import pack
from vertica_python.vertica.messages.message import FrontendMessage
class CopyFail(FrontendMessage):
def __init__(self, error_message):
self.error_message = error_message
def to_bytes(self):
return self.message_string(pack('{0}sx'.format(len(self.error_message)), self.error_message))
CopyFail._message_id('f')
| 22.555556 | 101 | 0.76601 | 53 | 406 | 5.509434 | 0.509434 | 0.205479 | 0.219178 | 0.136986 | 0.219178 | 0.219178 | 0 | 0 | 0 | 0 | 0 | 0.002874 | 0.142857 | 406 | 17 | 102 | 23.882353 | 0.836207 | 0 | 0 | 0 | 0 | 0 | 0.014778 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.222222 | false | 0 | 0.333333 | 0.111111 | 0.777778 | 0 | 0 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 1 | 1 | 0 | 0 | 6 |
4375453c54665a3e3f4b980173d08daf1903eb0a | 33 | py | Python | demo/test3/test3.py | phpython/phpython | 1dd7a4f36461eca1fbe04364fd05f2e08209a499 | [
"MIT"
] | 13 | 2017-09-03T17:33:14.000Z | 2022-03-16T00:38:32.000Z | demo/test3/test3.py | phpython/phpython | 1dd7a4f36461eca1fbe04364fd05f2e08209a499 | [
"MIT"
] | 2 | 2017-10-09T11:33:11.000Z | 2019-01-08T17:45:28.000Z | demo/test3/test3.py | phpython/phpython | 1dd7a4f36461eca1fbe04364fd05f2e08209a499 | [
"MIT"
] | 4 | 2019-01-08T15:33:33.000Z | 2020-09-28T15:17:08.000Z | print "A"
import lupa
print "C" | 6.6 | 11 | 0.666667 | 6 | 33 | 3.666667 | 0.833333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.212121 | 33 | 5 | 12 | 6.6 | 0.846154 | 0 | 0 | 0 | 0 | 0 | 0.058824 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.333333 | null | null | 0.666667 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 6 |
43845483b6501fe50b53f265595351a82afaf910 | 26,265 | py | Python | tests/test_broker.py | mikenerone/distmqtt | aa33c401bcc9728516b2cae123ee78d7b22bbbe9 | [
"MIT"
] | null | null | null | tests/test_broker.py | mikenerone/distmqtt | aa33c401bcc9728516b2cae123ee78d7b22bbbe9 | [
"MIT"
] | null | null | null | tests/test_broker.py | mikenerone/distmqtt | aa33c401bcc9728516b2cae123ee78d7b22bbbe9 | [
"MIT"
] | null | null | null | # Copyright (c) 2015 Nicolas JOUANIN
#
# See the file license.txt for copying permission.
import anyio
import os
import logging
import unittest
from unittest.mock import patch, call, MagicMock
from distmqtt.adapters import StreamAdapter
from distmqtt.broker import (
EVENT_BROKER_PRE_START,
EVENT_BROKER_POST_START,
EVENT_BROKER_PRE_SHUTDOWN,
EVENT_BROKER_POST_SHUTDOWN,
EVENT_BROKER_CLIENT_CONNECTED,
EVENT_BROKER_CLIENT_DISCONNECTED,
EVENT_BROKER_CLIENT_SUBSCRIBED,
EVENT_BROKER_CLIENT_UNSUBSCRIBED,
EVENT_BROKER_MESSAGE_RECEIVED,
create_broker,
)
from distmqtt.client import open_mqttclient, ConnectException
from distmqtt.mqtt import (
ConnectPacket,
ConnackPacket,
PublishPacket,
PubrecPacket,
PubrelPacket,
DisconnectPacket,
)
from distmqtt.mqtt.connect import ConnectVariableHeader, ConnectPayload
from distmqtt.mqtt.constants import QOS_0, QOS_1, QOS_2
formatter = "%(asctime)s %(name)s:%(lineno)d %(levelname)s - %(message)s"
logging.basicConfig(level=logging.INFO, format=formatter)
log = logging.getLogger(__name__)
PORT = 40000 + (os.getpid() + 3) % 10000
URL = "mqtt://127.0.0.1:%d/" % PORT
test_config = {
"listeners": {
"default": {"type": "tcp", "bind": "127.0.0.1:%d" % PORT, "max_connections": 10}
},
"sys_interval": 0,
"auth": {"allow-anonymous": True},
}
class AsyncMock(MagicMock):
def __await__(self):
async def foo():
return self
return foo().__await__()
class BrokerTest(unittest.TestCase):
@patch("distmqtt.broker.PluginManager", new_callable=AsyncMock)
def test_start_stop(self, MockPluginManager): # pylint: disable=unused-argument
async def test_coro():
async with create_broker(
test_config, plugin_namespace="distmqtt.test.plugins"
) as broker:
broker.plugins_manager._tg = broker._tg
self.assertTrue(broker.transitions.is_started())
self.assertDictEqual(broker._sessions, {})
self.assertIn("default", broker._servers)
MockPluginManager.assert_has_calls(
[
call().fire_event(EVENT_BROKER_PRE_START),
call().fire_event(EVENT_BROKER_POST_START),
],
any_order=True,
)
MockPluginManager.reset_mock()
MockPluginManager.assert_has_calls(
[
call().fire_event(EVENT_BROKER_PRE_SHUTDOWN),
call().fire_event(EVENT_BROKER_POST_SHUTDOWN),
],
any_order=True,
)
self.assertTrue(broker.transitions.is_stopped())
anyio.run(test_coro, backend="trio")
@patch("distmqtt.broker.PluginManager", new_callable=AsyncMock)
def test_client_connect(self, MockPluginManager):
async def test_coro():
async with create_broker(
test_config, plugin_namespace="distmqtt.test.plugins"
) as broker:
broker.plugins_manager._tg = broker._tg
self.assertTrue(broker.transitions.is_started())
async with open_mqttclient() as client:
ret = await client.connect(URL)
self.assertEqual(ret, 0)
self.assertEqual(len(broker._sessions), 1)
self.assertIn(client.session.client_id, broker._sessions)
await anyio.sleep(0.1) # let the broker task process the packet
self.assertTrue(broker.transitions.is_stopped())
self.assertDictEqual(broker._sessions, {})
MockPluginManager.assert_has_calls(
[
call().fire_event(
EVENT_BROKER_CLIENT_CONNECTED, client_id=client.session.client_id,
),
call().fire_event(
EVENT_BROKER_CLIENT_DISCONNECTED, client_id=client.session.client_id,
),
],
any_order=True,
)
anyio.run(test_coro)
@patch("distmqtt.broker.PluginManager", new_callable=AsyncMock)
def test_client_connect_will_flag(self, MockPluginManager): # pylint: disable=unused-argument
async def test_coro():
async with create_broker(
test_config, plugin_namespace="distmqtt.test.plugins"
) as broker:
broker.plugins_manager._tg = broker._tg
self.assertTrue(broker.transitions.is_started())
async with await anyio.connect_tcp("127.0.0.1", PORT) as conn:
stream = StreamAdapter(conn)
vh = ConnectVariableHeader()
payload = ConnectPayload()
vh.keep_alive = 10
vh.clean_session_flag = False
vh.will_retain_flag = False
vh.will_flag = True
vh.will_qos = QOS_0
payload.client_id = "test_id"
payload.will_message = b"test"
payload.will_topic = "/topic"
connect = ConnectPacket(vh=vh, payload=payload)
await connect.to_stream(stream)
await ConnackPacket.from_stream(stream)
disconnect = DisconnectPacket()
await disconnect.to_stream(stream)
self.assertTrue(broker.transitions.is_stopped())
self.assertDictEqual(broker._sessions, {})
anyio.run(test_coro, backend="trio")
@patch("distmqtt.broker.PluginManager", new_callable=AsyncMock)
def test_client_connect_clean_session_false(
self, MockPluginManager
): # pylint: disable=unused-argument
async def test_coro():
async with create_broker(
test_config, plugin_namespace="distmqtt.test.plugins"
) as broker:
broker.plugins_manager._tg = broker._tg
self.assertTrue(broker.transitions.is_started())
async with open_mqttclient(
client_id="", config={"auto_reconnect": False}
) as client:
return_code = None
try:
await client.connect(URL, cleansession=False)
except ConnectException as ce:
return_code = ce.return_code
self.assertEqual(return_code, 0x02)
self.assertNotIn(client.session.client_id, broker._sessions)
anyio.run(test_coro)
@patch("distmqtt.broker.PluginManager", new_callable=AsyncMock)
def test_client_subscribe(self, MockPluginManager):
async def test_coro():
async with create_broker(
test_config, plugin_namespace="distmqtt.test.plugins"
) as broker:
broker.plugins_manager._tg = broker._tg
self.assertTrue(broker.transitions.is_started())
async with open_mqttclient() as client:
ret = await client.connect(URL)
self.assertEqual(ret, 0)
await client.subscribe([("/topic", QOS_0)])
# Test if the client test client subscription is registered
subs = broker._subscriptions[("", "topic")]
self.assertEqual(len(subs), 1)
(s, qos) = subs[0]
self.assertEqual(s, client.session)
self.assertEqual(qos, QOS_0)
self.assertTrue(broker.transitions.is_stopped())
MockPluginManager.assert_has_calls(
[
call().fire_event(
EVENT_BROKER_CLIENT_SUBSCRIBED,
client_id=client.session.client_id,
topic="/topic",
qos=QOS_0,
)
],
any_order=True,
)
anyio.run(test_coro, backend="trio")
@patch("distmqtt.broker.PluginManager", new_callable=AsyncMock)
def test_client_subscribe_twice(self, MockPluginManager):
async def test_coro():
async with create_broker(
test_config, plugin_namespace="distmqtt.test.plugins"
) as broker:
broker.plugins_manager._tg = broker._tg
self.assertTrue(broker.transitions.is_started())
async with open_mqttclient() as client:
ret = await client.connect(URL)
self.assertEqual(ret, 0)
await client.subscribe([("/topic", QOS_0)])
# Test if the client test client subscription is registered
subs = broker._subscriptions[("", "topic")]
self.assertEqual(len(subs), 1)
(s, qos) = subs[0]
self.assertEqual(s, client.session)
self.assertEqual(qos, QOS_0)
await client.subscribe([("/topic", QOS_0)])
self.assertEqual(len(subs), 1)
(s, qos) = subs[0]
self.assertEqual(s, client.session)
self.assertEqual(qos, QOS_0)
self.assertTrue(broker.transitions.is_stopped())
MockPluginManager.assert_has_calls(
[
call().fire_event(
EVENT_BROKER_CLIENT_SUBSCRIBED,
client_id=client.session.client_id,
topic="/topic",
qos=QOS_0,
)
],
any_order=True,
)
anyio.run(test_coro, backend="trio")
@patch("distmqtt.broker.PluginManager", new_callable=AsyncMock)
def test_client_unsubscribe(self, MockPluginManager):
async def test_coro():
async with create_broker(
test_config, plugin_namespace="distmqtt.test.plugins"
) as broker:
broker.plugins_manager._tg = broker._tg
self.assertTrue(broker.transitions.is_started())
async with open_mqttclient() as client:
ret = await client.connect(URL)
self.assertEqual(ret, 0)
await client.subscribe([("/topic", QOS_0)])
# Test if the client test client subscription is registered
subs = broker._subscriptions[("", "topic")]
self.assertEqual(len(subs), 1)
(s, qos) = subs[0]
self.assertEqual(s, client.session)
self.assertEqual(qos, QOS_0)
await client.unsubscribe(["/topic"])
self.assertEqual(broker._subscriptions[("", "topic")], [])
self.assertTrue(broker.transitions.is_stopped())
MockPluginManager.assert_has_calls(
[
call().fire_event(
EVENT_BROKER_CLIENT_SUBSCRIBED,
client_id=client.session.client_id,
topic="/topic",
qos=QOS_0,
),
call().fire_event(
EVENT_BROKER_CLIENT_UNSUBSCRIBED,
client_id=client.session.client_id,
topic="/topic",
),
],
any_order=True,
)
anyio.run(test_coro, backend="trio")
@patch("distmqtt.broker.PluginManager", new_callable=AsyncMock)
def test_client_publish(self, MockPluginManager):
async def test_coro():
async with create_broker(
test_config, plugin_namespace="distmqtt.test.plugins"
) as broker:
broker.plugins_manager._tg = broker._tg
self.assertTrue(broker.transitions.is_started())
async with open_mqttclient() as pub_client:
ret = await pub_client.connect(URL)
self.assertEqual(ret, 0)
ret_message = await pub_client.publish("/topic", b"data", QOS_0)
await anyio.sleep(0.1) # let the broker task process the packet
self.assertEqual(broker._retained_messages, {})
self.assertTrue(broker.transitions.is_stopped())
MockPluginManager.assert_has_calls(
[
call().fire_event(
EVENT_BROKER_MESSAGE_RECEIVED,
client_id=pub_client.session.client_id,
message=ret_message,
)
],
any_order=True,
)
anyio.run(test_coro)
# @patch('distmqtt.broker.PluginManager', new_callable=AsyncMock)
def test_client_publish_dup(self):
async def test_coro():
async with create_broker(
test_config, plugin_namespace="distmqtt.test.plugins"
) as broker:
broker.plugins_manager._tg = broker._tg
self.assertTrue(broker.transitions.is_started())
async with await anyio.connect_tcp("127.0.0.1", PORT) as conn:
stream = StreamAdapter(conn)
vh = ConnectVariableHeader()
payload = ConnectPayload()
vh.keep_alive = 10
vh.clean_session_flag = False
vh.will_retain_flag = False
payload.client_id = "test_id"
connect = ConnectPacket(vh=vh, payload=payload)
await connect.to_stream(stream)
await ConnackPacket.from_stream(stream)
publish_1 = PublishPacket.build("/test", b"data", 1, False, QOS_2, False)
await publish_1.to_stream(stream)
await PubrecPacket.from_stream(stream)
publish_dup = PublishPacket.build("/test", b"data", 1, True, QOS_2, False)
await publish_dup.to_stream(stream)
await PubrecPacket.from_stream(stream)
pubrel = PubrelPacket.build(1)
await pubrel.to_stream(stream)
# await PubcompPacket.from_stream(stream)
disconnect = DisconnectPacket()
await disconnect.to_stream(stream)
anyio.run(test_coro, backend="trio")
@patch("distmqtt.broker.PluginManager", new_callable=AsyncMock)
def test_client_publish_invalid_topic(
self, MockPluginManager
): # pylint: disable=unused-argument
async def test_coro():
async with create_broker(
test_config, plugin_namespace="distmqtt.test.plugins"
) as broker:
broker.plugins_manager._tg = broker._tg
self.assertTrue(broker.transitions.is_started())
async with open_mqttclient() as pub_client:
ret = await pub_client.connect(URL)
self.assertEqual(ret, 0)
await pub_client.publish("/+", b"data", QOS_0)
self.assertTrue(broker.transitions.is_stopped())
anyio.run(test_coro)
@patch("distmqtt.broker.PluginManager", new_callable=AsyncMock)
def test_client_publish_big(self, MockPluginManager):
async def test_coro():
async with create_broker(
test_config, plugin_namespace="distmqtt.test.plugins"
) as broker:
broker.plugins_manager._tg = broker._tg
self.assertTrue(broker.transitions.is_started())
async with open_mqttclient() as pub_client:
ret = await pub_client.connect(URL)
self.assertEqual(ret, 0)
ret_message = await pub_client.publish(
"/topic", bytearray(b"\x99" * 256 * 1024), QOS_2
)
self.assertEqual(broker._retained_messages, {})
self.assertTrue(broker.transitions.is_stopped())
MockPluginManager.assert_has_calls(
[
call().fire_event(
EVENT_BROKER_MESSAGE_RECEIVED,
client_id=pub_client.session.client_id,
message=ret_message,
)
],
any_order=True,
)
anyio.run(test_coro, backend="trio")
@patch("distmqtt.broker.PluginManager", new_callable=AsyncMock)
def test_client_publish_retain(self, MockPluginManager): # pylint: disable=unused-argument
async def test_coro():
async with create_broker(
test_config, plugin_namespace="distmqtt.test.plugins"
) as broker:
broker.plugins_manager._tg = broker._tg
self.assertTrue(broker.transitions.is_started())
async with open_mqttclient() as pub_client:
ret = await pub_client.connect(URL)
self.assertEqual(ret, 0)
await pub_client.publish("/topic", b"data", QOS_0, retain=True)
await anyio.sleep(0.1) # let the broker task process the packet
self.assertIn("/topic", broker._retained_messages)
retained_message = broker._retained_messages["/topic"]
self.assertEqual(retained_message.source_session, pub_client.session)
self.assertEqual(retained_message.topic, "/topic")
self.assertEqual(retained_message.data, b"data")
self.assertEqual(retained_message.qos, QOS_0)
self.assertTrue(broker.transitions.is_stopped())
anyio.run(test_coro)
@patch("distmqtt.broker.PluginManager", new_callable=AsyncMock)
def test_client_publish_retain_delete(
self, MockPluginManager
): # pylint: disable=unused-argument
async def test_coro():
async with create_broker(
test_config, plugin_namespace="distmqtt.test.plugins"
) as broker:
broker.plugins_manager._tg = broker._tg
self.assertTrue(broker.transitions.is_started())
async with open_mqttclient() as pub_client:
ret = await pub_client.connect(URL)
self.assertEqual(ret, 0)
await pub_client.publish("/topic", b"", QOS_0, retain=True)
await anyio.sleep(0.1) # let the broker task process the packet
self.assertNotIn("/topic", broker._retained_messages)
self.assertTrue(broker.transitions.is_stopped())
anyio.run(test_coro)
@patch("distmqtt.broker.PluginManager", new_callable=AsyncMock)
def test_client_subscribe_publish(self, MockPluginManager): # pylint: disable=unused-argument
async def test_coro():
async with create_broker(
test_config, plugin_namespace="distmqtt.test.plugins"
) as broker:
broker.plugins_manager._tg = broker._tg
self.assertTrue(broker.transitions.is_started())
async with open_mqttclient() as sub_client:
await sub_client.connect(URL)
ret = await sub_client.subscribe(
[("/qos0", QOS_0), ("/qos1", QOS_1), ("/qos2", QOS_2)]
)
self.assertEqual(ret, [QOS_0, QOS_1, QOS_2])
await self._client_publish("/qos0", b"data", QOS_0)
await self._client_publish("/qos1", b"data", QOS_1)
await self._client_publish("/qos2", b"data", QOS_2)
for qos in [QOS_0, QOS_1, QOS_2]:
message = await sub_client.deliver_message()
self.assertIsNotNone(message)
self.assertEqual(message.topic, "/qos%s" % qos)
self.assertEqual(message.data, b"data")
self.assertEqual(message.qos, qos)
self.assertTrue(broker.transitions.is_stopped())
anyio.run(test_coro)
@patch("distmqtt.broker.PluginManager", new_callable=AsyncMock)
def test_client_subscribe_invalid(self, MockPluginManager): # pylint: disable=unused-argument
async def test_coro():
async with create_broker(
test_config, plugin_namespace="distmqtt.test.plugins"
) as broker:
broker.plugins_manager._tg = broker._tg
self.assertTrue(broker.transitions.is_started())
async with open_mqttclient() as sub_client:
await sub_client.connect(URL)
ret = await sub_client.subscribe(
[
("+", QOS_0),
("+/tennis/#", QOS_0),
("sport+", QOS_0),
("sport/+/player1", QOS_0),
]
)
self.assertEqual(ret, [QOS_0, QOS_0, 0x80, QOS_0])
self.assertTrue(broker.transitions.is_stopped())
anyio.run(test_coro, backend="trio")
@patch("distmqtt.broker.PluginManager", new_callable=AsyncMock)
def test_client_subscribe_publish_dollar_topic_1(
self, MockPluginManager
): # pylint: disable=unused-argument
async def test_coro():
async with create_broker(
test_config, plugin_namespace="distmqtt.test.plugins"
) as broker:
broker.plugins_manager._tg = broker._tg
self.assertTrue(broker.transitions.is_started())
async with open_mqttclient() as sub_client:
await sub_client.connect(URL)
ret = await sub_client.subscribe([("#", QOS_0)])
self.assertEqual(ret, [QOS_0])
await self._client_publish("/topic", b"data", QOS_0)
message = await sub_client.deliver_message()
self.assertIsNotNone(message)
await self._client_publish("$topic", b"data", QOS_0)
message = None
with self.assertRaises(TimeoutError):
async with anyio.fail_after(1):
message = await sub_client.deliver_message()
self.assertIsNone(message)
self.assertTrue(broker.transitions.is_stopped())
anyio.run(test_coro)
@patch("distmqtt.broker.PluginManager", new_callable=AsyncMock)
def test_client_subscribe_publish_dollar_topic_2(
self, MockPluginManager
): # pylint: disable=unused-argument
async def test_coro():
async with create_broker(
test_config, plugin_namespace="distmqtt.test.plugins"
) as broker:
broker.plugins_manager._tg = broker._tg
self.assertTrue(broker.transitions.is_started())
async with open_mqttclient() as sub_client:
await sub_client.connect(URL)
ret = await sub_client.subscribe([("+/monitor/Clients", QOS_0)])
self.assertEqual(ret, [QOS_0])
await self._client_publish("test/monitor/Clients", b"data", QOS_0)
message = await sub_client.deliver_message()
self.assertIsNotNone(message)
await self._client_publish("$SYS/monitor/Clients", b"data", QOS_0)
message = None
with self.assertRaises(TimeoutError):
async with anyio.fail_after(1):
message = await sub_client.deliver_message()
self.assertIsNone(message)
self.assertTrue(broker.transitions.is_stopped())
anyio.run(test_coro)
@patch("distmqtt.broker.PluginManager", new_callable=AsyncMock)
def test_client_publish_retain_subscribe(
self, MockPluginManager
): # pylint: disable=unused-argument
async def test_coro():
async with create_broker(
test_config, plugin_namespace="distmqtt.test.plugins"
) as broker:
broker.plugins_manager._tg = broker._tg
self.assertTrue(broker.transitions.is_started())
async with open_mqttclient() as sub_client:
await sub_client.connect(URL, cleansession=False)
ret = await sub_client.subscribe(
[("/qos0", QOS_0), ("/qos1", QOS_1), ("/qos2", QOS_2)]
)
self.assertEqual(ret, [QOS_0, QOS_1, QOS_2])
await sub_client.disconnect()
await self._client_publish("/qos0", b"data", QOS_0, retain=True)
await self._client_publish("/qos1", b"data", QOS_1, retain=True)
await self._client_publish("/qos2", b"data", QOS_2, retain=True)
await sub_client.reconnect()
for qos in [QOS_0, QOS_1, QOS_2]:
log.debug("TEST QOS: %d", qos)
message = await sub_client.deliver_message()
log.debug("Message: %r", message.publish_packet)
self.assertIsNotNone(message)
self.assertEqual(message.topic, "/qos%s" % qos)
self.assertEqual(message.data, b"data")
self.assertEqual(message.qos, qos)
self.assertTrue(broker.transitions.is_stopped())
anyio.run(test_coro)
async def _client_publish(self, topic, data, qos, retain=False):
async with open_mqttclient() as pub_client:
ret = await pub_client.connect(URL)
self.assertEqual(ret, 0)
ret = await pub_client.publish(topic, data, qos, retain)
return ret
| 42.986907 | 98 | 0.5585 | 2,592 | 26,265 | 5.42091 | 0.085262 | 0.044837 | 0.048395 | 0.075012 | 0.846274 | 0.830404 | 0.801011 | 0.795175 | 0.785709 | 0.758878 | 0 | 0.010088 | 0.350847 | 26,265 | 610 | 99 | 43.057377 | 0.814018 | 0.033086 | 0 | 0.697897 | 0 | 0.001912 | 0.060579 | 0.034329 | 0 | 0 | 0.000315 | 0 | 0.191205 | 1 | 0.036329 | false | 0 | 0.021033 | 0 | 0.066922 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
439cdaafc8510e05f9b5b80fad907e91cd97f3da | 193 | py | Python | src/graph_transpiler/webdnn/backend/webassembly/kernels/rsqrt.py | steerapi/webdnn | 1df51cc094e5a528cfd3452c264905708eadb491 | [
"MIT"
] | 1 | 2021-04-09T15:55:35.000Z | 2021-04-09T15:55:35.000Z | src/graph_transpiler/webdnn/backend/webassembly/kernels/rsqrt.py | steerapi/webdnn | 1df51cc094e5a528cfd3452c264905708eadb491 | [
"MIT"
] | null | null | null | src/graph_transpiler/webdnn/backend/webassembly/kernels/rsqrt.py | steerapi/webdnn | 1df51cc094e5a528cfd3452c264905708eadb491 | [
"MIT"
] | null | null | null | from webdnn.backend.webassembly.kernels.elementwise import register_elementwise_kernel
from webdnn.graph.operators.rsqrt import Rsqrt
register_elementwise_kernel(Rsqrt, "y = 1.0 / sqrt(x0);")
| 38.6 | 86 | 0.829016 | 26 | 193 | 6 | 0.653846 | 0.128205 | 0.320513 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.016854 | 0.07772 | 193 | 4 | 87 | 48.25 | 0.859551 | 0 | 0 | 0 | 0 | 0 | 0.098446 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.666667 | 0 | 0.666667 | 0 | 1 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
439ff71e647df8c6d48ba7b3f0d655999507fc62 | 49 | py | Python | app/subrunner.py | AI-Wars-Soc/web-api | d5a2048f94e92c95fed40af84abccca9c75a4eca | [
"MIT"
] | null | null | null | app/subrunner.py | AI-Wars-Soc/web-api | d5a2048f94e92c95fed40af84abccca9c75a4eca | [
"MIT"
] | null | null | null | app/subrunner.py | AI-Wars-Soc/web-api | d5a2048f94e92c95fed40af84abccca9c75a4eca | [
"MIT"
] | null | null | null | def get_next_board(board, move):
return None
| 16.333333 | 32 | 0.734694 | 8 | 49 | 4.25 | 0.875 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.183673 | 49 | 2 | 33 | 24.5 | 0.85 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.5 | false | 0 | 0 | 0.5 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 6 |
43d559f3dc40af4a000629b2cbf7ad9bddadd74f | 14,629 | py | Python | ambari-server/src/test/python/stacks/test_ambari_configuration.py | thaibui/ambari | e8bf4ec5f0e8de15048b4c81027277de4faa94d3 | [
"Apache-2.0"
] | null | null | null | ambari-server/src/test/python/stacks/test_ambari_configuration.py | thaibui/ambari | e8bf4ec5f0e8de15048b4c81027277de4faa94d3 | [
"Apache-2.0"
] | null | null | null | ambari-server/src/test/python/stacks/test_ambari_configuration.py | thaibui/ambari | e8bf4ec5f0e8de15048b4c81027277de4faa94d3 | [
"Apache-2.0"
] | null | null | null | """
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
from mock.mock import MagicMock, patch
from unittest import TestCase
# Mock classes for reading from a file
class MagicFile(object):
def __init__(self, data):
self.data = data
def read(self):
return self.data
def __exit__(self, exc_type, exc_val, exc_tb):
pass
def __enter__(self):
return self
pass
class TestAmbariConfiguration(TestCase):
def setUp(self):
import imp
self.test_directory = os.path.dirname(os.path.abspath(__file__))
relative_path = '../../../main/resources/stacks/ambari_configuration.py'
ambari_configuration_path = os.path.abspath(os.path.join(self.test_directory, relative_path))
class_name = 'AmbariConfiguration'
with open(ambari_configuration_path, 'rb') as fp:
ambari_configuration_impl = imp.load_module('ambari_configuration', fp,
ambari_configuration_path,
('.py', 'rb', imp.PY_SOURCE))
self.ambari_configuration_class = getattr(ambari_configuration_impl, class_name)
def testMissingData(self):
ambari_configuration = self.ambari_configuration_class('{}')
self.assertIsNone(ambari_configuration.get_ambari_server_configuration())
self.assertIsNone(ambari_configuration.get_ambari_server_properties())
def testMissingSSOConfiguration(self):
services_json = {
"ambari-server-configuration": {
}
}
ambari_configuration = self.ambari_configuration_class(services_json)
self.assertIsNone(ambari_configuration.get_ambari_sso_configuration())
self.assertIsNone(ambari_configuration.get_ambari_sso_configuration_value("ambari.sso.property"))
self.assertFalse(ambari_configuration.should_enable_sso("AMBARI"))
def testMissingAmbariProperties(self):
services_json = {
"ambari-server-configuration": {
}
}
ambari_configuration = self.ambari_configuration_class(services_json)
ambari_sso_details = ambari_configuration.get_ambari_sso_details()
self.assertFalse(ambari_sso_details.is_jwt_enabled())
self.assertIsNone(ambari_sso_details.get_jwt_audiences())
self.assertIsNone(ambari_sso_details.get_jwt_cookie_name())
self.assertIsNone(ambari_sso_details.get_jwt_provider_url())
self.assertIsNone(ambari_sso_details.get_jwt_public_key_file())
self.assertIsNone(ambari_sso_details.get_jwt_public_key())
def testAmbariSSOConfigurationNotManagingServices(self):
services_json = {
"ambari-server-configuration": {
"sso-configuration": {
"ambari.sso.enabled_services": "AMBARI"
}
}
}
ambari_configuration = self.ambari_configuration_class(services_json)
self.assertIsNotNone(ambari_configuration.get_ambari_sso_configuration())
self.assertEquals("AMBARI", ambari_configuration.get_ambari_sso_configuration_value("ambari.sso.enabled_services"))
self.assertFalse(ambari_configuration.is_managing_services())
self.assertFalse(ambari_configuration.should_enable_sso("AMBARI"))
self.assertFalse(ambari_configuration.should_disable_sso("AMBARI"))
services_json = {
"ambari-server-configuration": {
"sso-configuration": {
"ambari.sso.manage_services" : "false",
"ambari.sso.enabled_services" : "AMBARI, RANGER"
}
}
}
ambari_configuration = self.ambari_configuration_class(services_json)
self.assertIsNotNone(ambari_configuration.get_ambari_sso_configuration())
self.assertEquals("AMBARI, RANGER", ambari_configuration.get_ambari_sso_configuration_value("ambari.sso.enabled_services"))
self.assertFalse(ambari_configuration.is_managing_services())
self.assertFalse(ambari_configuration.should_enable_sso("AMBARI"))
self.assertFalse(ambari_configuration.should_disable_sso("AMBARI"))
self.assertFalse(ambari_configuration.should_enable_sso("RANGER"))
self.assertFalse(ambari_configuration.should_disable_sso("RANGER"))
services_json = {
"ambari-server-configuration": {
"sso-configuration": {
"ambari.sso.manage_services" : "false",
"ambari.sso.enabled_services" : "*"
}
}
}
ambari_configuration = self.ambari_configuration_class(services_json)
self.assertIsNotNone(ambari_configuration.get_ambari_sso_configuration())
self.assertEquals("*", ambari_configuration.get_ambari_sso_configuration_value("ambari.sso.enabled_services"))
self.assertFalse(ambari_configuration.is_managing_services())
self.assertFalse(ambari_configuration.should_enable_sso("AMBARI"))
self.assertFalse(ambari_configuration.should_disable_sso("AMBARI"))
self.assertFalse(ambari_configuration.should_enable_sso("RANGER"))
self.assertFalse(ambari_configuration.should_disable_sso("RANGER"))
def testAmbariSSOConfigurationManagingServices(self):
services_json = {
"ambari-server-configuration": {
"sso-configuration": {
"ambari.sso.manage_services" : "true",
"ambari.sso.enabled_services": "AMBARI"
}
}
}
ambari_configuration = self.ambari_configuration_class(services_json)
self.assertIsNotNone(ambari_configuration.get_ambari_sso_configuration())
self.assertEquals("AMBARI", ambari_configuration.get_ambari_sso_configuration_value("ambari.sso.enabled_services"))
self.assertTrue(ambari_configuration.is_managing_services())
self.assertTrue(ambari_configuration.should_enable_sso("AMBARI"))
self.assertFalse(ambari_configuration.should_disable_sso("AMBARI"))
self.assertFalse(ambari_configuration.should_enable_sso("RANGER"))
self.assertTrue(ambari_configuration.should_disable_sso("RANGER"))
services_json = {
"ambari-server-configuration": {
"sso-configuration": {
"ambari.sso.manage_services" : "true",
"ambari.sso.enabled_services" : "AMBARI, RANGER"
}
}
}
ambari_configuration = self.ambari_configuration_class(services_json)
self.assertIsNotNone(ambari_configuration.get_ambari_sso_configuration())
self.assertEquals("AMBARI, RANGER", ambari_configuration.get_ambari_sso_configuration_value("ambari.sso.enabled_services"))
self.assertTrue(ambari_configuration.is_managing_services())
self.assertTrue(ambari_configuration.should_enable_sso("AMBARI"))
self.assertFalse(ambari_configuration.should_disable_sso("AMBARI"))
self.assertTrue(ambari_configuration.should_enable_sso("RANGER"))
self.assertFalse(ambari_configuration.should_disable_sso("RANGER"))
services_json = {
"ambari-server-configuration": {
"sso-configuration": {
"ambari.sso.manage_services" : "true",
"ambari.sso.enabled_services" : "*"
}
}
}
ambari_configuration = self.ambari_configuration_class(services_json)
self.assertIsNotNone(ambari_configuration.get_ambari_sso_configuration())
self.assertEquals("*", ambari_configuration.get_ambari_sso_configuration_value("ambari.sso.enabled_services"))
self.assertTrue(ambari_configuration.is_managing_services())
self.assertTrue(ambari_configuration.should_enable_sso("AMBARI"))
self.assertFalse(ambari_configuration.should_disable_sso("AMBARI"))
self.assertTrue(ambari_configuration.should_enable_sso("RANGER"))
self.assertFalse(ambari_configuration.should_disable_sso("RANGER"))
def testAmbariJWTProperties(self):
services_json = {
"ambari-server-properties": {
"authentication.jwt.publicKey": "/etc/ambari-server/conf/jwt-cert.pem",
"authentication.jwt.enabled": "true",
"authentication.jwt.providerUrl": "https://knox.ambari.apache.org",
"authentication.jwt.cookieName": "hadoop-jwt",
"authentication.jwt.audiences": ""
},
"ambari-server-configuration": {
}
}
ambari_configuration = self.ambari_configuration_class(services_json)
ambari_sso_details = ambari_configuration.get_ambari_sso_details()
self.assertTrue(ambari_sso_details.is_jwt_enabled())
self.assertEquals('', ambari_sso_details.get_jwt_audiences())
self.assertEquals('hadoop-jwt', ambari_sso_details.get_jwt_cookie_name())
self.assertEquals('https://knox.ambari.apache.org', ambari_sso_details.get_jwt_provider_url())
self.assertEquals('/etc/ambari-server/conf/jwt-cert.pem', ambari_sso_details.get_jwt_public_key_file())
self.assertIsNone(ambari_sso_details.get_jwt_public_key()) # This is none since the file does not exist for unit tests.
@patch("os.path.isfile", new=MagicMock(return_value=True))
@patch('__builtin__.open')
def testReadCertFileWithHeaderAndFooter(self, open_mock):
mock_file = MagicFile(
'-----BEGIN CERTIFICATE-----\n'
'MIIE3DCCA8SgAwIBAgIJAKfbOMmFyOlNMA0GCSqGSIb3DQEBBQUAMIGkMQswCQYD\n'
'................................................................\n'
'dXRpbmcxFzAVBgNVBAMTDmNsb3VkYnJlYWstcmdsMSUwIwYJKoZIhvcNAQkBFhZy\n'
'-----END CERTIFICATE-----\n')
open_mock.side_effect = [mock_file, mock_file, mock_file, mock_file]
services_json = {
"ambari-server-properties": {
"authentication.jwt.publicKey": "/etc/ambari-server/conf/jwt-cert.pem",
"authentication.jwt.enabled": "true",
"authentication.jwt.providerUrl": "https://knox.ambari.apache.org",
"authentication.jwt.cookieName": "hadoop-jwt",
"authentication.jwt.audiences": ""
},
"ambari-server-configuration": {
}
}
ambari_configuration = self.ambari_configuration_class(services_json)
ambari_sso_details = ambari_configuration.get_ambari_sso_details()
self.assertEquals('-----BEGIN CERTIFICATE-----\n'
'MIIE3DCCA8SgAwIBAgIJAKfbOMmFyOlNMA0GCSqGSIb3DQEBBQUAMIGkMQswCQYD\n'
'................................................................\n'
'dXRpbmcxFzAVBgNVBAMTDmNsb3VkYnJlYWstcmdsMSUwIwYJKoZIhvcNAQkBFhZy\n'
'-----END CERTIFICATE-----',
ambari_sso_details.get_jwt_public_key(True, False))
self.assertEquals('-----BEGIN CERTIFICATE-----'
'MIIE3DCCA8SgAwIBAgIJAKfbOMmFyOlNMA0GCSqGSIb3DQEBBQUAMIGkMQswCQYD'
'................................................................'
'dXRpbmcxFzAVBgNVBAMTDmNsb3VkYnJlYWstcmdsMSUwIwYJKoZIhvcNAQkBFhZy'
'-----END CERTIFICATE-----',
ambari_sso_details.get_jwt_public_key(True, True))
self.assertEquals('MIIE3DCCA8SgAwIBAgIJAKfbOMmFyOlNMA0GCSqGSIb3DQEBBQUAMIGkMQswCQYD\n'
'................................................................\n'
'dXRpbmcxFzAVBgNVBAMTDmNsb3VkYnJlYWstcmdsMSUwIwYJKoZIhvcNAQkBFhZy',
ambari_sso_details.get_jwt_public_key(False, False))
self.assertEquals('MIIE3DCCA8SgAwIBAgIJAKfbOMmFyOlNMA0GCSqGSIb3DQEBBQUAMIGkMQswCQYD'
'................................................................'
'dXRpbmcxFzAVBgNVBAMTDmNsb3VkYnJlYWstcmdsMSUwIwYJKoZIhvcNAQkBFhZy',
ambari_sso_details.get_jwt_public_key(False, True))
@patch("os.path.isfile", new=MagicMock(return_value=True))
@patch('__builtin__.open')
def testReadCertFileWithoutHeaderAndFooter(self, open_mock):
mock_file = MagicFile(
'MIIE3DCCA8SgAwIBAgIJAKfbOMmFyOlNMA0GCSqGSIb3DQEBBQUAMIGkMQswCQYD\n'
'................................................................\n'
'dXRpbmcxFzAVBgNVBAMTDmNsb3VkYnJlYWstcmdsMSUwIwYJKoZIhvcNAQkBFhZy\n')
open_mock.side_effect = [mock_file, mock_file, mock_file, mock_file]
services_json = {
"ambari-server-properties": {
"authentication.jwt.publicKey": "/etc/ambari-server/conf/jwt-cert.pem",
"authentication.jwt.enabled": "true",
"authentication.jwt.providerUrl": "https://knox.ambari.apache.org",
"authentication.jwt.cookieName": "hadoop-jwt",
"authentication.jwt.audiences": ""
},
"ambari-server-configuration": {
}
}
ambari_configuration = self.ambari_configuration_class(services_json)
ambari_sso_details = ambari_configuration.get_ambari_sso_details()
self.assertEquals('-----BEGIN CERTIFICATE-----\n'
'MIIE3DCCA8SgAwIBAgIJAKfbOMmFyOlNMA0GCSqGSIb3DQEBBQUAMIGkMQswCQYD\n'
'................................................................\n'
'dXRpbmcxFzAVBgNVBAMTDmNsb3VkYnJlYWstcmdsMSUwIwYJKoZIhvcNAQkBFhZy\n'
'-----END CERTIFICATE-----',
ambari_sso_details.get_jwt_public_key(True, False))
self.assertEquals('-----BEGIN CERTIFICATE-----'
'MIIE3DCCA8SgAwIBAgIJAKfbOMmFyOlNMA0GCSqGSIb3DQEBBQUAMIGkMQswCQYD'
'................................................................'
'dXRpbmcxFzAVBgNVBAMTDmNsb3VkYnJlYWstcmdsMSUwIwYJKoZIhvcNAQkBFhZy'
'-----END CERTIFICATE-----',
ambari_sso_details.get_jwt_public_key(True, True))
self.assertEquals('MIIE3DCCA8SgAwIBAgIJAKfbOMmFyOlNMA0GCSqGSIb3DQEBBQUAMIGkMQswCQYD\n'
'................................................................\n'
'dXRpbmcxFzAVBgNVBAMTDmNsb3VkYnJlYWstcmdsMSUwIwYJKoZIhvcNAQkBFhZy',
ambari_sso_details.get_jwt_public_key(False, False))
self.assertEquals('MIIE3DCCA8SgAwIBAgIJAKfbOMmFyOlNMA0GCSqGSIb3DQEBBQUAMIGkMQswCQYD'
'................................................................'
'dXRpbmcxFzAVBgNVBAMTDmNsb3VkYnJlYWstcmdsMSUwIwYJKoZIhvcNAQkBFhZy',
ambari_sso_details.get_jwt_public_key(False, True))
| 46.44127 | 127 | 0.686855 | 1,356 | 14,629 | 7.09882 | 0.140118 | 0.159879 | 0.046541 | 0.058176 | 0.823187 | 0.805734 | 0.794827 | 0.762414 | 0.734469 | 0.726366 | 0 | 0.004456 | 0.171645 | 14,629 | 314 | 128 | 46.589172 | 0.789899 | 0.05824 | 0 | 0.676 | 0 | 0 | 0.312995 | 0.246677 | 0 | 0 | 0 | 0 | 0.26 | 1 | 0.052 | false | 0.008 | 0.016 | 0.008 | 0.084 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
60284e69f92a3beef43109e0a416d36bb81978c6 | 1,632 | py | Python | pi1/GPD Test.py | Rose-Hulman-Rover-Team/Rover-2019-2020 | d75a9086fa733f8a8b5240005bee058737ad82c7 | [
"MIT"
] | null | null | null | pi1/GPD Test.py | Rose-Hulman-Rover-Team/Rover-2019-2020 | d75a9086fa733f8a8b5240005bee058737ad82c7 | [
"MIT"
] | null | null | null | pi1/GPD Test.py | Rose-Hulman-Rover-Team/Rover-2019-2020 | d75a9086fa733f8a8b5240005bee058737ad82c7 | [
"MIT"
] | null | null | null |
import serial, string
import time
output = " "
lon=0
lat=0
stringVal=""
ser = serial.Serial('/dev/ttyUSB0',115200, 8, 'N',1, timeout = 1)
#file = open("Save.csv", "w")
while True:
print("----")
while output != "":
output = (ser.readline().decode())
if output[:6] == "$GPGGA":
##`output=output.split(",")[2:6];
## stringVal=output[0]
## lat=float(stringVal[:2])+float(stringVal[2:])/60
## stringVal=output[2]
## lon=float(stringVal[:3])+float(stringVal[3:])/60
## print("Latitude:\t" + str(lat) + "\tLongitude:\t" + str(lon))
print(output)
#file.write(output.decode("utf-8"))
output =" "
file.close()
##Pervious one that worked on old pi
##import serial, string
##import time
##output = " "
##lon=0
##lat=0
##stringVal=""
##ser = serial.Serial('/dev/ttyUSB0',115200, 8, 'N', 1, timeout = 1)
###file = open("Save.csv", "w")
##while True:
## print("----")
## while output != "":
## output = (ser.readline())
## print(output)
#### if output[:6] == "$GPGGA":
#### output=output.split(",")[2:6];
#### stringVal=output[0]
#### lat=float(stringVal[:2])+float(stringVal[2:])/60
#### stringVal=output[2]
#### lon=float(stringVal[:3])+float(stringVal[3:])/60
#### print("Latitude:\t" + str(lat) + "\tLongitude:\t" + str(lon))
## #file.write(output.decode("utf-8"))
##
## output =" "
##
##
##file.close()
##
##
| 24.727273 | 78 | 0.479167 | 180 | 1,632 | 4.344444 | 0.272222 | 0.143223 | 0.076726 | 0.061381 | 0.928389 | 0.928389 | 0.928389 | 0.928389 | 0.928389 | 0.826087 | 0 | 0.044983 | 0.291667 | 1,632 | 65 | 79 | 25.107692 | 0.631488 | 0.647059 | 0 | 0.133333 | 0 | 0 | 0.059242 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.133333 | 0 | 0.133333 | 0.133333 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
603adb37ff8d59149e85a4c88d5e533fe4b6bb2e | 41 | py | Python | python/testData/resolve/multiFile/keywordArgument/KeywordArgument.py | jnthn/intellij-community | 8fa7c8a3ace62400c838e0d5926a7be106aa8557 | [
"Apache-2.0"
] | 2 | 2019-04-28T07:48:50.000Z | 2020-12-11T14:18:08.000Z | python/testData/resolve/multiFile/keywordArgument/KeywordArgument.py | Cyril-lamirand/intellij-community | 60ab6c61b82fc761dd68363eca7d9d69663cfa39 | [
"Apache-2.0"
] | 173 | 2018-07-05T13:59:39.000Z | 2018-08-09T01:12:03.000Z | python/testData/resolve/multiFile/keywordArgument/KeywordArgument.py | Cyril-lamirand/intellij-community | 60ab6c61b82fc761dd68363eca7d9d69663cfa39 | [
"Apache-2.0"
] | 2 | 2020-03-15T08:57:37.000Z | 2020-04-07T04:48:14.000Z | from a import A
print(A())
# <ref>
| 6.833333 | 15 | 0.512195 | 7 | 41 | 3 | 0.714286 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.317073 | 41 | 5 | 16 | 8.2 | 0.75 | 0.121951 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.5 | 0 | 0.5 | 0.5 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 1 | 0 | 6 |
60403e0b8a33ef536edc4249522d2055c17c2538 | 23 | py | Python | build/lib/roro/__init__.py | samyabdellatif/roro | 80c90e1b87a46d5d9dff39316ec0f5f35bc1337d | [
"MIT"
] | null | null | null | build/lib/roro/__init__.py | samyabdellatif/roro | 80c90e1b87a46d5d9dff39316ec0f5f35bc1337d | [
"MIT"
] | null | null | null | build/lib/roro/__init__.py | samyabdellatif/roro | 80c90e1b87a46d5d9dff39316ec0f5f35bc1337d | [
"MIT"
] | null | null | null | from . import roroclass | 23 | 23 | 0.826087 | 3 | 23 | 6.333333 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.130435 | 23 | 1 | 23 | 23 | 0.95 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
60525d1554e6f9eb0fd83404636cdfc57b7b98c7 | 37,558 | py | Python | graph4nlp/pytorch/test/kg_completion/models_graph4nlp.py | stjordanis/graph4nlp | c6ebde32bc77d3a7b78f86a93f19b1c057963ffa | [
"Apache-2.0"
] | 1 | 2021-06-06T15:23:11.000Z | 2021-06-06T15:23:11.000Z | graph4nlp/pytorch/test/kg_completion/models_graph4nlp.py | stjordanis/graph4nlp | c6ebde32bc77d3a7b78f86a93f19b1c057963ffa | [
"Apache-2.0"
] | null | null | null | graph4nlp/pytorch/test/kg_completion/models_graph4nlp.py | stjordanis/graph4nlp | c6ebde32bc77d3a7b78f86a93f19b1c057963ffa | [
"Apache-2.0"
] | 1 | 2021-11-01T08:41:26.000Z | 2021-11-01T08:41:26.000Z | import torch
from torch.nn import functional as F, Parameter
from torch.autograd import Variable
from src.spodernet.spodernet.utils.global_config import Config
from src.spodernet.spodernet.utils.cuda_utils import CUDATimer
from torch.nn.init import xavier_normal_, xavier_uniform_
from src.spodernet.spodernet.utils.cuda_utils import CUDATimer
from torch.nn.utils.rnn import pack_padded_sequence, pad_packed_sequence
import math
import torch
from torch.nn.parameter import Parameter
from torch.nn.modules.module import Module
import torch.nn as nn
import torch.nn.init as init
import os, sys
import random
import numpy as np
from models import GraphConvolution
from models import MarginLoss
path_dir = os.getcwd()
random.seed(123)
# timer = CUDATimer()
use_cuda = torch.cuda.is_available()
FloatTensor = torch.cuda.FloatTensor if use_cuda else torch.FloatTensor
class Complex(torch.nn.Module):
def __init__(self, num_entities, num_relations, loss_name='BCELoss'):
super(Complex, self).__init__()
self.num_entities = num_entities
self.emb_e_real = torch.nn.Embedding(num_entities, Config.embedding_dim, padding_idx=0)
self.emb_e_img = torch.nn.Embedding(num_entities, Config.embedding_dim, padding_idx=0)
self.emb_rel_real = torch.nn.Embedding(num_relations, Config.embedding_dim, padding_idx=0)
self.emb_rel_img = torch.nn.Embedding(num_relations, Config.embedding_dim, padding_idx=0)
self.inp_drop = torch.nn.Dropout(Config.input_dropout)
self.loss_name = loss_name
if loss_name == 'BCELoss':
self.loss = torch.nn.BCELoss()
elif loss_name == "SoftplusLoss":
self.loss = SoftplusLoss()
elif loss_name == "SigmoidLoss":
self.loss = SigmoidLoss()
elif loss_name == "SoftMarginLoss":
self.loss = nn.SoftMarginLoss()
elif loss_name == "MSELoss":
self.loss = nn.MSELoss()
else:
raise NotImplementedError()
self.init()
def init(self):
xavier_normal_(self.emb_e_real.weight.data)
xavier_normal_(self.emb_e_img.weight.data)
xavier_normal_(self.emb_rel_real.weight.data)
xavier_normal_(self.emb_rel_img.weight.data)
def forward(self, e1, rel, X, A, e2_multi=None): # X and A haven't been used here.
e1_embedded_real = self.emb_e_real(e1).squeeze()
rel_embedded_real = self.emb_rel_real(rel).squeeze()
e1_embedded_img = self.emb_e_img(e1).squeeze()
rel_embedded_img = self.emb_rel_img(rel).squeeze()
e1_embedded_real = self.inp_drop(e1_embedded_real)
rel_embedded_real = self.inp_drop(rel_embedded_real)
e1_embedded_img = self.inp_drop(e1_embedded_img)
rel_embedded_img = self.inp_drop(rel_embedded_img)
# complex space bilinear product (equivalent to HolE)
realrealreal = torch.mm(e1_embedded_real*rel_embedded_real, self.emb_e_real.weight.transpose(1,0))
realimgimg = torch.mm(e1_embedded_real*rel_embedded_img, self.emb_e_img.weight.transpose(1,0))
imgrealimg = torch.mm(e1_embedded_img*rel_embedded_real, self.emb_e_img.weight.transpose(1,0))
imgimgreal = torch.mm(e1_embedded_img*rel_embedded_img, self.emb_e_real.weight.transpose(1,0))
pred = realrealreal + realimgimg + imgrealimg - imgimgreal
if self.loss_name in ["SoftMarginLoss"]:
pred = torch.tanh(pred)
else:
pred = torch.sigmoid(pred)
# if e2_multi!=None:
if type(e2_multi) != type(None):
idxs_pos = torch.nonzero(e2_multi == 1.)
pred_pos = pred[idxs_pos[:, 0], idxs_pos[:, 1]]
idxs_neg = torch.nonzero(e2_multi == 0.)
pred_neg = pred[idxs_neg[:, 0], idxs_neg[:, 1]]
return pred, pred_pos, pred_neg
else:
return pred
class KGCompletionLayerBase(nn.Module):
def __init__(self):
super(KGCompletionLayerBase, self).__init__()
def forward(self, node_emb, rel_emb, list_e_r_pair_idx=None, list_e_e_pair_idx=None):
raise NotImplementedError()
class ComplexLayer(KGCompletionLayerBase):
def __init__(self,
input_dropout=0.0,
rel_emb_from_gnn=True,
num_relations=None,
embedding_dim=None,
loss_name='BCELoss'):
super(ComplexLayer, self).__init__()
self.rel_emb_from_gnn = rel_emb_from_gnn
self.inp_drop = nn.Dropout(input_dropout)
if self.rel_emb_from_gnn == False:
assert num_relations != None
assert embedding_dim != None
self.emb_rel_real = torch.nn.Embedding(num_relations, Config.embedding_dim)
self.emb_rel_img = torch.nn.Embedding(num_relations, Config.embedding_dim)
self.reset_parameters()
self.loss_name = loss_name
self.reset_parameters()
def reset_parameters(self):
if self.rel_emb_from_gnn == False:
nn.init.xavier_normal_(self.emb_rel_real.weight.data)
nn.init.xavier_normal_(self.emb_rel_img.weight.data)
def forward(self,
node_emb_real,
node_emb_img,
rel_emb_real=None,
rel_emb_img=None,
list_e_r_pair_idx=None,
list_e_e_pair_idx=None,
multi_label=None):
if self.rel_emb_from_gnn == False:
assert rel_emb_real == None
assert rel_emb_img == None
rel_emb_real = self.emb_rel_real.weight
rel_emb_img = self.emb_rel_img.weight
if list_e_r_pair_idx == None and list_e_e_pair_idx == None:
raise RuntimeError("Only one of `list_e_r_pair_idx` and `list_e_e_pair_idx` can be `None`.")
assert node_emb_real.size()[1]==rel_emb_real.size()[1]
assert rel_emb_img.size()[1]==rel_emb_img.size()[1]
if list_e_r_pair_idx != None:
ent_idxs = torch.LongTensor([x[0] for x in list_e_r_pair_idx])
rel_idxs = torch.LongTensor([x[1] for x in list_e_r_pair_idx])
selected_ent_embs_real = node_emb_real[ent_idxs].squeeze() # [L, H]. L is the length of list_e_r_pair_idx
selected_ent_embs_img = node_emb_img[ent_idxs].squeeze() # [L, H]. L is the length of list_e_r_pair_idx
selected_rel_embs_real = rel_emb_real[rel_idxs].squeeze() # [L, H]. L is the length of list_e_r_pair_idx
selected_rel_embs_img = rel_emb_img[rel_idxs].squeeze() # [L, H]. L is the length of list_e_r_pair_idx
# dropout
selected_ent_embs_real = self.inp_drop(selected_ent_embs_real)
selected_ent_embs_img = self.inp_drop(selected_ent_embs_img)
selected_rel_embs_real = self.inp_drop(selected_rel_embs_real)
selected_rel_embs_img = self.inp_drop(selected_rel_embs_img)
# complex space bilinear product (equivalent to HolE)
realrealreal = torch.mm(selected_ent_embs_real * selected_rel_embs_real,
node_emb_real.transpose(1, 0))
realimgimg = torch.mm(selected_ent_embs_real * selected_rel_embs_img,
node_emb_img.transpose(1, 0))
imgrealimg = torch.mm(selected_ent_embs_img * selected_rel_embs_real,
node_emb_img.transpose(1, 0))
imgimgreal = torch.mm(selected_ent_embs_img * selected_rel_embs_img,
node_emb_real.transpose(1, 0))
pred = realrealreal + realimgimg + imgrealimg - imgimgreal
elif list_e_e_pair_idx != None:
# ent_head_idxs = torch.LongTensor([x[0] for x in list_e_e_pair_idx])
# ent_tail_idxs = torch.LongTensor([x[1] for x in list_e_e_pair_idx])
#
# selected_ent_head_embs = node_emb[ent_head_idxs].squeeze() # [L, H]. L is the length of list_e_e_pair_idx
# selected_ent_tail_embs = rel_emb[ent_tail_idxs].squeeze() # [L, H]. L is the length of list_e_e_pair_idx
#
# # dropout
# selected_ent_head_embs = self.inp_drop(selected_ent_head_embs)
# selected_ent_tail_embs = self.inp_drop(selected_ent_tail_embs)
#
# logits = torch.mm(selected_ent_head_embs*selected_ent_tail_embs,
# rel_emb.transpose(1, 0))
raise NotImplementedError
if self.loss_name in ["SoftMarginLoss"]:
pred = torch.tanh(pred)
else:
pred = torch.sigmoid(pred)
if type(multi_label) != type(None):
idxs_pos = torch.nonzero(multi_label == 1.)
pred_pos = pred[idxs_pos[:, 0], idxs_pos[:, 1]]
idxs_neg = torch.nonzero(multi_label == 0.)
pred_neg = pred[idxs_neg[:, 0], idxs_neg[:, 1]]
return pred, pred_pos, pred_neg
else:
return pred
class ComplexGNN(torch.nn.Module):
def __init__(self, num_entities, num_relations, loss_name='BCELoss'):
super(ComplexGNN, self).__init__()
self.num_entities = num_entities
self.emb_e_real = torch.nn.Embedding(num_entities, Config.embedding_dim)
self.emb_e_img = torch.nn.Embedding(num_entities, Config.embedding_dim)
self.gc1 = GraphConvolution(Config.embedding_dim, Config.gc1_emb_size, num_relations)
self.gc2 = GraphConvolution(Config.gc1_emb_size, Config.embedding_dim, num_relations)
self.bn1 = torch.nn.BatchNorm1d(Config.gc1_emb_size)
self.bn2 = torch.nn.BatchNorm1d(Config.embedding_dim)
self.gc3 = GraphConvolution(Config.embedding_dim, Config.gc1_emb_size, num_relations)
self.gc4 = GraphConvolution(Config.gc1_emb_size, Config.embedding_dim, num_relations)
self.bn3 = torch.nn.BatchNorm1d(Config.gc1_emb_size)
self.bn4 = torch.nn.BatchNorm1d(Config.embedding_dim)
# self.emb_rel_real = torch.nn.Embedding(num_relations, Config.embedding_dim)
# self.emb_rel_img = torch.nn.Embedding(num_relations, Config.embedding_dim)
# self.inp_drop = torch.nn.Dropout(Config.input_dropout)
self.complex_layer = ComplexLayer(rel_emb_from_gnn=False,
num_relations=num_relations,
embedding_dim=Config.embedding_dim,
loss_name=loss_name)
self.loss_name = loss_name
if loss_name == 'BCELoss':
self.loss = torch.nn.BCELoss()
elif loss_name == "SoftplusLoss":
self.loss = SoftplusLoss()
elif loss_name == "SigmoidLoss":
self.loss = SigmoidLoss()
elif loss_name == "SoftMarginLoss":
self.loss = nn.SoftMarginLoss()
elif loss_name == "MSELoss":
self.loss = nn.MSELoss()
else:
raise NotImplementedError()
self.init()
# def reset_parameters(self):
def init(self):
xavier_normal_(self.emb_e_real.weight.data)
xavier_normal_(self.emb_e_img.weight.data)
# xavier_normal_(self.emb_rel_real.weight.data)
# xavier_normal_(self.emb_rel_img.weight.data)
def forward(self, e1, rel, X, A, e2_multi=None): # X and A haven't been used here.
e1_embedded_real = self.emb_e_real(X)
e1_embedded_img = self.emb_e_img(X)
x_real = self.gc1(e1_embedded_real, A)
x_real = self.bn1(x_real)
x_real = torch.tanh(x_real)
x_real = torch.dropout(x_real, Config.dropout_rate, train=self.training)
x_real = self.bn2(self.gc2(x_real, A))
x_real = torch.tanh(x_real)
e1_embedded_all_real = torch.dropout(x_real, Config.dropout_rate, train=self.training)
x_img = self.gc3(e1_embedded_img, A)
x_img = self.bn3(x_img)
x_img = torch.tanh(x_img)
x_img = torch.dropout(x_img, Config.dropout_rate, train=self.training)
x_img = self.bn4(self.gc4(x_img, A))
x_img = torch.tanh(x_img)
e1_embedded_all_img = torch.dropout(x_img, Config.dropout_rate, train=self.training)
list_e_r_pair_idx = list(zip(e1.squeeze().tolist(), rel.squeeze().tolist()))
pred = self.complex_layer(e1_embedded_all_real,
e1_embedded_all_img,
list_e_r_pair_idx = list_e_r_pair_idx,
multi_label=e2_multi)
return pred
class TransE(nn.Module):
def __init__(self,
num_entities=None,
num_relations=None,
p_norm=1,
loss_name='BCELoss'
):
super(TransE, self).__init__()
self.p_norm = p_norm
self.ent_emb = nn.Embedding(num_entities, Config.embedding_dim)
self.rel_emb = nn.Embedding(num_relations, Config.embedding_dim)
self.loss_name = loss_name
if loss_name == 'BCELoss':
self.loss = torch.nn.BCELoss()
elif loss_name == "SoftplusLoss":
self.loss = SoftplusLoss()
elif loss_name == "SigmoidLoss":
self.loss = SigmoidLoss()
elif loss_name == "SoftMarginLoss":
self.loss = nn.SoftMarginLoss()
elif loss_name == "MSELoss":
self.loss = nn.MSELoss()
else:
raise NotImplementedError()
self.init()
def init(self):
nn.init.xavier_normal_(self.ent_emb.weight.data)
nn.init.xavier_normal_(self.rel_emb.weight.data)
def forward(self, e1, rel, X, A, e2_multi=None):
e1_embedded = self.ent_emb(e1)
rel_embedded = self.rel_emb(rel)
e1_embedded = e1_embedded.squeeze()
rel_embedded = rel_embedded.squeeze()
node_emb = self.ent_emb.weight
e1_embedded = F.normalize(e1_embedded, 2, -1)
rel_embedded = F.normalize(rel_embedded, 2, -1)
node_emb = F.normalize(node_emb, 2, -1)
head_add_rel = e1_embedded + rel_embedded # [L, H]
head_add_rel = head_add_rel.view(head_add_rel.size()[0], 1, head_add_rel.size()[1]) # [L, 1, H]
head_add_rel = head_add_rel.repeat(1, node_emb.size()[0], 1)
node_emb = node_emb.view(1, node_emb.size()[0], node_emb.size()[1]) # [1, N, H]
node_emb = node_emb.repeat(head_add_rel.size()[0], 1, 1)
result = head_add_rel - node_emb # head+rel-tail [L, N, H]
# logits = torch.softmax(torch.norm(result, self.p_norm, dim=2),dim=-1) # [L, N]
if self.loss_name in ["SoftMarginLoss"]:
pred = torch.norm(result, self.p_norm, dim=2)
else:
pred = torch.softmax(torch.norm(result, self.p_norm, dim=2),dim=-1) # [L, N]
if e2_multi != None:
idxs_pos = torch.nonzero(e2_multi == 1.)
pred_pos = pred[idxs_pos[:, 0], idxs_pos[:, 1]]
idxs_neg = torch.nonzero(e2_multi == 0.)
pred_neg = pred[idxs_neg[:, 0], idxs_neg[:, 1]]
# return pred, pred_pos, pred_neg
pred_pos = pred_pos.repeat(pred_neg.size()[0] // pred_pos.size()[0])
return pred, pred_pos, pred_neg[:pred_pos.size()[0]]
else:
return pred
class DistMult(torch.nn.Module):
def __init__(self, num_entities, num_relations, loss_name='BCELoss'):
super(DistMult, self).__init__()
self.emb_e = torch.nn.Embedding(num_entities, Config.embedding_dim)
self.emb_rel = torch.nn.Embedding(num_relations, Config.embedding_dim)
self.inp_drop = torch.nn.Dropout(Config.input_dropout)
self.loss = torch.nn.BCELoss()
self.loss_name = loss_name
if loss_name == 'BCELoss':
self.loss = torch.nn.BCELoss()
elif loss_name == "SoftplusLoss":
self.loss = SoftplusLoss()
elif loss_name == "SigmoidLoss":
self.loss = SigmoidLoss()
elif loss_name == "SoftMarginLoss":
self.loss = nn.SoftMarginLoss()
elif loss_name == "MSELoss":
self.loss = nn.MSELoss()
else:
raise NotImplementedError()
self.init()
def init(self):
xavier_normal_(self.emb_e.weight.data)
xavier_normal_(self.emb_rel.weight.data)
def forward(self, e1, rel, X, A, e2_multi=None): # X and A haven't been used here.
e1_embedded= self.emb_e(e1)
rel_embedded= self.emb_rel(rel)
e1_embedded = e1_embedded.squeeze()
rel_embedded = rel_embedded.squeeze()
e1_embedded = self.inp_drop(e1_embedded)
rel_embedded = self.inp_drop(rel_embedded)
pred = torch.mm(e1_embedded*rel_embedded, self.emb_e.weight.transpose(1,0))
if self.loss_name in ["SoftMarginLoss"]:
pred = torch.tanh(pred)
else:
pred = torch.sigmoid(pred)
# if e2_multi!=None:
if type(e2_multi) != type(None):
idxs_pos = torch.nonzero(e2_multi == 1.)
pred_pos = pred[idxs_pos[:, 0], idxs_pos[:, 1]]
idxs_neg = torch.nonzero(e2_multi == 0.)
pred_neg = pred[idxs_neg[:, 0], idxs_neg[:, 1]]
return pred, pred_pos, pred_neg
else:
return pred
class SoftplusLoss(nn.Module):
def __init__(self, adv_temperature=None):
super(SoftplusLoss, self).__init__()
self.criterion = nn.Softplus()
if adv_temperature != None:
self.adv_temperature = nn.Parameter(torch.Tensor([adv_temperature]))
self.adv_temperature.requires_grad = False
self.adv_flag = True
else:
self.adv_flag = False
def get_weights(self, n_score):
return F.softmax(n_score * self.adv_temperature, dim=-1).detach()
def forward(self, p_score, n_score):
if self.adv_flag:
return (self.criterion(-p_score).mean() + (self.get_weights(n_score) * self.criterion(n_score)).sum(
dim=-1).mean()) / 2
else:
return (self.criterion(-p_score).mean() + self.criterion(n_score).mean()) / 2
def predict(self, p_score, n_score):
score = self.forward(p_score, n_score)
return score.cpu().data.numpy()
class SigmoidLoss(nn.Module):
def __init__(self, adv_temperature = None):
super(SigmoidLoss, self).__init__()
self.criterion = nn.LogSigmoid()
if adv_temperature != None:
self.adv_temperature = nn.Parameter(torch.Tensor([adv_temperature]))
self.adv_temperature.requires_grad = False
self.adv_flag = True
else:
self.adv_flag = False
def get_weights(self, n_score):
return F.softmax(n_score * self.adv_temperature, dim = -1).detach()
def forward(self, p_score, n_score):
if self.adv_flag:
return -(self.criterion(p_score).mean() + (self.get_weights(n_score) * self.criterion(-n_score)).sum(dim = -1).mean()) / 2
else:
return -(self.criterion(p_score).mean() + self.criterion(-n_score).mean()) / 2
def predict(self, p_score, n_score):
score = self.forward(p_score, n_score)
return score.cpu().data.numpy()
class DistMultLayer(KGCompletionLayerBase):
r"""Specific class for knowledge graph completion task.
DistMult from paper `Embedding entities and relations for learning and
inference in knowledge bases <https://arxiv.org/pdf/1412.6575.pdf>`__.
.. math::
f(s, r, o) & = e_s^T R_r e_o
Parameters
----------
input_dropout: float
Dropout for node_emb and rel_emb. Default: 0.0
rel_emb_from_gnn: bool
If `rel_emb` is computed from GNN, rel_emb_from_gnn is set to `True`.
Else, rel_emb is initialized as nn.Embedding randomly. Default: `True`.
num_relations: int
Number of relations. `num_relations` is needed if rel_emb_from_gnn==True.
Default: `None`.
embedding_dim: int
Dimension of the rel_emb. `embedding_dim` is needed if rel_emb_from_gnn==True.
Default: `0`.
loss_name: str
The loss type selected fot the KG completion task.
"""
def __init__(self,
input_dropout=0.0,
rel_emb_from_gnn=True,
num_relations=None,
embedding_dim=None,
loss_name='BCELoss'):
super(DistMultLayer, self).__init__()
self.rel_emb_from_gnn = rel_emb_from_gnn
self.inp_drop = nn.Dropout(input_dropout)
if self.rel_emb_from_gnn == False:
assert num_relations != None
assert embedding_dim != None
self.rel_emb = nn.Embedding(num_relations, embedding_dim)
self.reset_parameters()
self.loss_name = loss_name
self.reset_parameters()
def reset_parameters(self):
if self.rel_emb_from_gnn == False:
nn.init.xavier_normal_(self.rel_emb.weight.data)
def forward(self,
node_emb,
rel_emb=None,
list_e_r_pair_idx=None,
list_e_e_pair_idx=None,
multi_label=None):
r"""
Parameters
----------
node_emb: tensor [N,H]
N: number of nodes in the whole KG graph
H: length of the node embeddings (entity embeddings)
rel_emb: tensor [N_r,H]
N_r: number of relations in the whole KG graph
H: length of the relation embeddings
list_e_r_pair_idx: list of tuple
a list of index of head entities and relations that needs
predicting the tail entities between them. Default: `None`
list_e_e_pair_idx: list of tuple
a list of index of head entities and tail entities that needs
predicting the relations between them. Default: `None`.
Only one of `list_e_r_pair_idx` and `list_e_e_pair_idx` can be `None`.
multi_label: tensor [L, N]
multi_label is a binary matrix. Each element can be equal to 1 for true label
and 0 for false label (or 1 for true label, -1 for false label).
multi_label[i] represents a multi-label of a given head-rel pair or head-tail pair.
L is the length of list_e_r_pair_idx, list_e_e_pair_idx or batch size.
N: number of nodes in the whole KG graph.
Returns
-------
logit tensor: [N, num_class] The score logits for all nodes preidcted.
"""
if self.rel_emb_from_gnn == False:
assert rel_emb == None
rel_emb = self.rel_emb.weight
if list_e_r_pair_idx == None and list_e_e_pair_idx == None:
raise RuntimeError("Only one of `list_e_r_pair_idx` and `list_e_e_pair_idx` can be `None`.")
assert node_emb.size()[1]==rel_emb.size()[1]
if list_e_r_pair_idx != None:
ent_idxs = torch.LongTensor([x[0] for x in list_e_r_pair_idx])
rel_idxs = torch.LongTensor([x[1] for x in list_e_r_pair_idx])
selected_ent_embs = node_emb[ent_idxs].squeeze() # [L, H]. L is the length of list_e_r_pair_idx
selected_rel_embs = rel_emb[rel_idxs].squeeze() # [L, H]. L is the length of list_e_r_pair_idx
# dropout
selected_ent_embs = self.inp_drop(selected_ent_embs)
selected_rel_embs = self.inp_drop(selected_rel_embs)
logits = torch.mm(selected_ent_embs * selected_rel_embs,
node_emb.transpose(1, 0))
elif list_e_e_pair_idx != None:
ent_head_idxs = torch.LongTensor([x[0] for x in list_e_e_pair_idx])
ent_tail_idxs = torch.LongTensor([x[1] for x in list_e_e_pair_idx])
selected_ent_head_embs = node_emb[ent_head_idxs].squeeze() # [L, H]. L is the length of list_e_e_pair_idx
selected_ent_tail_embs = rel_emb[ent_tail_idxs].squeeze() # [L, H]. L is the length of list_e_e_pair_idx
# dropout
selected_ent_head_embs = self.inp_drop(selected_ent_head_embs)
selected_ent_tail_embs = self.inp_drop(selected_ent_tail_embs)
logits = torch.mm(selected_ent_head_embs*selected_ent_tail_embs,
rel_emb.transpose(1, 0))
if self.loss_name in ['SoftMarginLoss']:
# target labels are numbers selecting from -1 and 1.
pred = torch.tanh(logits)
else:
# target labels are numbers selecting from 0 and 1.
pred = torch.sigmoid(logits)
# if multi_label!=None:
if type(multi_label) != type(None):
idxs_pos = torch.nonzero(multi_label == 1.)
pred_pos = pred[idxs_pos[:, 0], idxs_pos[:, 1]]
idxs_neg = torch.nonzero(multi_label == 0.)
pred_neg = pred[idxs_neg[:, 0], idxs_neg[:, 1]]
return pred, pred_pos, pred_neg
else:
return pred
class DistMultGNN(torch.nn.Module):
def __init__(self, num_entities, num_relations, loss_name='BCELoss'):
super(DistMultGNN, self).__init__()
self.emb_e = torch.nn.Embedding(num_entities, Config.init_emb_size)
self.gc1 = GraphConvolution(Config.init_emb_size, Config.gc1_emb_size, num_relations)
self.gc2 = GraphConvolution(Config.gc1_emb_size, Config.embedding_dim, num_relations)
# self.emb_rel = torch.nn.Embedding(num_relations, Config.embedding_dim)
self.loss_name = loss_name
if loss_name == 'BCELoss': # Multi-Class Loss (Binary Cross Entropy Loss)
self.loss = torch.nn.BCELoss()
elif loss_name == "SoftplusLoss":
self.loss = SoftplusLoss()
elif loss_name == "SigmoidLoss":
self.loss = SigmoidLoss()
elif loss_name == "SoftMarginLoss":
self.loss = nn.SoftMarginLoss()
elif loss_name == "MSELoss":
self.loss = nn.MSELoss()
else:
raise NotImplementedError()
self.register_parameter('b', Parameter(torch.zeros(num_entities)))
self.fc = torch.nn.Linear(Config.embedding_dim*Config.channels,Config.embedding_dim)
self.bn3 = torch.nn.BatchNorm1d(Config.gc1_emb_size)
self.bn4 = torch.nn.BatchNorm1d(Config.embedding_dim)
self.dismult_layer = DistMultLayer(rel_emb_from_gnn=False,
num_relations=num_relations,
embedding_dim=Config.embedding_dim,
loss_name=self.loss_name)
print(num_entities, num_relations)
self.init()
def init(self):
xavier_normal_(self.emb_e.weight.data)
# xavier_normal_(self.emb_rel.weight.data)
xavier_normal_(self.gc1.weight.data)
xavier_normal_(self.gc2.weight.data)
def forward(self, e1, rel, X, A, e2_multi=None):
emb_initial = self.emb_e(X)
x = self.gc1(emb_initial, A)
x = self.bn3(x)
x = torch.tanh(x)
x = torch.dropout(x, Config.dropout_rate, train=self.training)
x = self.bn4(self.gc2(x, A))
e1_embedded_all = torch.tanh(x)
e1_embedded_all = torch.dropout(e1_embedded_all, Config.dropout_rate, train=self.training)
# e1_embedded = e1_embedded_all[e1]
# rel_embedded = self.emb_rel(rel)
list_e_r_pair_idx = list(zip(e1.squeeze().tolist(), rel.squeeze().tolist()))
# TODO: emb_rel from gnn
pred = self.dismult_layer(e1_embedded_all, list_e_r_pair_idx = list_e_r_pair_idx, multi_label=e2_multi)
# pred = self.dismult_layer(e1_embedded_all, self.emb_rel.weight, list_e_r_pair_idx, multi_label=e2_multi)
return pred
class TransELayer(KGCompletionLayerBase):
r"""Specific class for knowledge graph completion task.
TransE from paper `Translating Embeddings for Modeling
Multi-relational Data <https://papers.nips.cc/paper/5071
-translating-embeddings-for-modeling-multi-relational-data.pdf>`__.
.. math::
f(s, r, o) & = ||e_s + w_r - e_o||_p
Parameters
----------
p_norm: int
Default: 1
rel_emb_from_gnn: bool
If `rel_emb` is computed from GNN, rel_emb_from_gnn is set to `True`.
Else, rel_emb is initialized as nn.Embedding randomly. Default: `True`.
num_relations: int
Number of relations. `num_relations` is needed if rel_emb_from_gnn==True.
Default: `None`.
embedding_dim: int
Dimension of the rel_emb. `embedding_dim` is needed if rel_emb_from_gnn==True.
Default: `0`.
loss_name: str
The loss type selected fot the KG completion task.
"""
def __init__(self,
p_norm=1,
rel_emb_from_gnn=True,
num_relations=None,
embedding_dim=None,
loss_name='BCELoss'):
super(TransELayer, self).__init__()
self.p_norm = p_norm
self.rel_emb_from_gnn = rel_emb_from_gnn
if self.rel_emb_from_gnn == False:
assert num_relations != None
assert embedding_dim != None
self.rel_emb = nn.Embedding(num_relations, embedding_dim)
self.reset_parameters()
self.loss_name = loss_name
def reset_parameters(self):
if self.rel_emb_from_gnn == False:
nn.init.xavier_normal_(self.rel_emb.weight.data)
def forward(self,
node_emb,
rel_emb=None,
list_e_r_pair_idx=None,
list_e_e_pair_idx=None,
multi_label=None):
r"""
Parameters
----------
node_emb: tensor [N,H]
N: number of nodes in the whole KG graph
H: length of the node embeddings (entity embeddings)
rel_emb: tensor [N_r,H]
N: number of relations in the whole KG graph
H: length of the relation embeddings
list_e_r_pair_idx: list of tuple
a list of index of head entities and relations that needs
predicting the tail entities between them. Default: `None`
list_e_e_pair_idx: list of tuple
a list of index of head entities and tail entities that needs
predicting the relations between them. Default: `None`.
Only one of `list_e_r_pair_idx` and `list_e_e_pair_idx` can be `None`.
multi_label: tensor [L, N]
multi_label is a binary matrix. Each element can be equal to 1 for true label
and 0 for false label (or 1 for true label, -1 for false label).
multi_label[i] represents a multi-label of a given head-rel pair or head-tail pair.
L is the length of list_e_r_pair_idx, list_e_e_pair_idx or batch size.
N: number of nodes in the whole KG graph.
Returns
-------
logit tensor: [N, num_class] The score logits for all nodes preidcted.
"""
if self.rel_emb_from_gnn == False:
assert rel_emb == None
rel_emb = self.rel_emb.weight
if list_e_r_pair_idx == None and list_e_e_pair_idx == None:
raise RuntimeError("Only one of `list_e_r_pair_idx` and `list_e_e_pair_idx` can be `None`.")
assert node_emb.size()[1] == rel_emb.size()[1]
if list_e_r_pair_idx != None:
ent_idxs = torch.LongTensor([x[0] for x in list_e_r_pair_idx])
rel_idxs = torch.LongTensor([x[1] for x in list_e_r_pair_idx])
selected_ent_embs = node_emb[ent_idxs].squeeze() # [L, H]. L is the length of list_e_r_pair_idx
selected_rel_embs = rel_emb[rel_idxs].squeeze() # [L, H]. L is the length of list_e_r_pair_idx
selected_ent_embs = F.normalize(selected_ent_embs, 2, -1)
selected_rel_embs = F.normalize(selected_rel_embs, 2, -1)
node_emb = F.normalize(node_emb, 2, -1)
head_add_rel = selected_ent_embs + selected_rel_embs # [L, H]
head_add_rel = head_add_rel.view(head_add_rel.size()[0], 1, head_add_rel.size()[1]) # [L, 1, H]
head_add_rel = head_add_rel.repeat(1, node_emb.size()[0], 1)
node_emb = node_emb.view(1, node_emb.size()[0], node_emb.size()[1]) # [1, N, H]
node_emb = node_emb.repeat(head_add_rel.size()[0], 1, 1)
result = head_add_rel - node_emb # head+rel-tail [L, N, H]
elif list_e_e_pair_idx != None:
ent_head_idxs = torch.LongTensor([x[0] for x in list_e_e_pair_idx])
ent_tail_idxs = torch.LongTensor([x[1] for x in list_e_e_pair_idx])
selected_ent_head_embs = node_emb[ent_head_idxs].squeeze() # [L, H]. L is the length of list_e_e_pair_idx
selected_ent_tail_embs = rel_emb[ent_tail_idxs].squeeze() # [L, H]. L is the length of list_e_e_pair_idx
selected_ent_head_embs = F.normalize(selected_ent_head_embs, 2, -1)
selected_ent_tail_embs = F.normalize(selected_ent_tail_embs, 2, -1)
rel_emb = F.normalize(rel_emb, 2, -1)
head_sub_tail = selected_ent_head_embs - selected_ent_tail_embs # [L, H]
head_sub_tail = head_sub_tail.view(head_sub_tail.size()[0], 1, head_sub_tail.size()[1]) # [L, 1, H]
head_sub_tail = head_sub_tail.repeat(1, rel_emb.size()[0], 1) # [L, N, H]
rel_emb = rel_emb.view(1, rel_emb.size()[0], rel_emb.size()[1]) # [1, N, H]
rel_emb = rel_emb.repeat(head_sub_tail.size()[0], 1, 1) # [L, N, H]
result = head_sub_tail + rel_emb # head-tail+rel [L, N, H]
if self.loss_name in ['SoftMarginLoss', 'MarginLoss']:
# target labels are numbers selecting from -1 and 1.
pred = torch.norm(result, self.p_norm, dim=2) # TODO
else:
pred = torch.softmax(torch.norm(result, self.p_norm, dim=2), dim=-1) # logits [L, N]
if multi_label!=None:
idxs_pos = torch.nonzero(multi_label == 1.)
pred_pos = pred[idxs_pos[:, 0], idxs_pos[:, 1]]
idxs_neg = torch.nonzero(multi_label == 0.)
pred_neg = pred[idxs_neg[:, 0], idxs_neg[:, 1]]
return pred, pred_pos, pred_neg
else:
return pred
class TransEGNN(torch.nn.Module):
def __init__(self, num_entities, num_relations, loss_name = 'BCELoss'):
super(TransEGNN, self).__init__()
self.emb_e = torch.nn.Embedding(num_entities, Config.init_emb_size)
self.gc1 = GraphConvolution(Config.init_emb_size, Config.gc1_emb_size, num_relations)
self.gc2 = GraphConvolution(Config.gc1_emb_size, Config.embedding_dim, num_relations)
# self.emb_rel = torch.nn.Embedding(num_relations, Config.embedding_dim)
# self.loss = torch.nn.BCELoss()
self.register_parameter('b', Parameter(torch.zeros(num_entities)))
self.fc = torch.nn.Linear(Config.embedding_dim*Config.channels,Config.embedding_dim)
self.bn3 = torch.nn.BatchNorm1d(Config.gc1_emb_size)
self.bn4 = torch.nn.BatchNorm1d(Config.embedding_dim)
# self.loss_name = "SoftplusLoss" # similar to Pairwise Hinge Loss
# self.loss = SoftplusLoss()
# self.loss_name = "SigmoidLoss" # -> Pointwise Logistic Loss
# self.loss = SigmoidLoss()
# self.loss_name = "MSELoss" # -> Pointwise Square Error Loss
# self.loss = torch.nn.MSELoss()
# self.loss_name = "BCELoss" # -> Multi-Class Loss (Binary Cross Entropy Loss)
# self.loss = torch.nn.BCELoss()
# self.loss_name = "MarginLoss" # TODO
# self.loss = MarginLoss()
self.loss_name = loss_name
if loss_name == 'BCELoss':
# self.loss_name = "BCELoss" # -> Multi-Class Loss (Binary Cross Entropy Loss)
self.loss = torch.nn.BCELoss()
elif loss_name == "SoftplusLoss":
self.loss = SoftplusLoss()
elif loss_name == "SigmoidLoss":
self.loss = SigmoidLoss()
elif loss_name == "SoftMarginLoss":
self.loss = nn.SoftMarginLoss()
elif loss_name == "MSELoss":
self.loss = nn.MSELoss()
else:
raise NotImplementedError()
# self.transe_layer = TransELayer(rel_emb_from_gnn=False,
self.transe_layer = TransELayer(rel_emb_from_gnn=False,
num_relations=num_relations,
embedding_dim=Config.embedding_dim,
loss_name=self.loss_name)
print(num_entities, num_relations)
self.init()
def init(self):
xavier_normal_(self.emb_e.weight.data)
# xavier_normal_(self.emb_rel.weight.data)
xavier_normal_(self.gc1.weight.data)
xavier_normal_(self.gc2.weight.data)
def forward(self, e1, rel, X, A, e2_multi=None):
emb_initial = self.emb_e(X)
x = self.gc1(emb_initial, A)
x = self.bn3(x)
x = torch.tanh(x)
x = torch.dropout(x, Config.dropout_rate, train=self.training)
x = self.bn4(self.gc2(x, A))
e1_embedded_all = torch.tanh(x)
e1_embedded_all = torch.dropout(e1_embedded_all, Config.dropout_rate, train=self.training)
list_e_r_pair_idx = list(zip(e1.squeeze().tolist(), rel.squeeze().tolist()))
pred = self.transe_layer(e1_embedded_all, list_e_r_pair_idx = list_e_r_pair_idx, multi_label=e2_multi)
# pred = self.transe_layer(e1_embedded_all, self.emb_rel.weight, list_e_r_pair_idx, multi_label=e2_multi)
return pred | 41.363436 | 134 | 0.625406 | 5,224 | 37,558 | 4.18549 | 0.056087 | 0.023325 | 0.012074 | 0.020123 | 0.889001 | 0.861056 | 0.831191 | 0.80375 | 0.777956 | 0.750972 | 0 | 0.012339 | 0.272831 | 37,558 | 908 | 135 | 41.363436 | 0.788254 | 0.194552 | 0 | 0.693632 | 0 | 0 | 0.022834 | 0 | 0 | 0 | 0 | 0.002203 | 0.024096 | 1 | 0.063683 | false | 0 | 0.032702 | 0.003442 | 0.156627 | 0.003442 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
6058840844b73128e58dd27ec5845ab72b1c2e4e | 5,171 | py | Python | tests/unit/recommenders/models/test_newsrec_model.py | enowy/Recommenders | 60033231b9167438032843c23158c0c776856e0e | [
"MIT"
] | 10,147 | 2019-05-07T07:24:36.000Z | 2022-03-31T21:16:41.000Z | tests/unit/recommenders/models/test_newsrec_model.py | enowy/Recommenders | 60033231b9167438032843c23158c0c776856e0e | [
"MIT"
] | 750 | 2019-05-07T07:34:33.000Z | 2022-03-31T10:11:55.000Z | tests/unit/recommenders/models/test_newsrec_model.py | enowy/Recommenders | 60033231b9167438032843c23158c0c776856e0e | [
"MIT"
] | 1,983 | 2019-05-07T08:56:48.000Z | 2022-03-31T16:43:00.000Z | # Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
import os
import pytest
try:
from recommenders.models.deeprec.deeprec_utils import download_deeprec_resources
from recommenders.models.newsrec.io.mind_all_iterator import MINDAllIterator
from recommenders.models.newsrec.io.mind_iterator import MINDIterator
from recommenders.models.newsrec.newsrec_utils import prepare_hparams
from recommenders.models.newsrec.models.lstur import LSTURModel
from recommenders.models.newsrec.models.naml import NAMLModel
from recommenders.models.newsrec.models.npa import NPAModel
from recommenders.models.newsrec.models.nrms import NRMSModel
except ImportError:
pass # skip this import if we are in cpu environment
@pytest.mark.gpu
def test_nrms_component_definition(mind_resource_path):
wordEmb_file = os.path.join(mind_resource_path, "utils", "embedding.npy")
userDict_file = os.path.join(mind_resource_path, "utils", "uid2index.pkl")
wordDict_file = os.path.join(mind_resource_path, "utils", "word_dict.pkl")
yaml_file = os.path.join(mind_resource_path, "utils", r"nrms.yaml")
if not os.path.exists(yaml_file):
download_deeprec_resources(
r"https://recodatasets.z20.web.core.windows.net/newsrec/",
os.path.join(mind_resource_path, "utils"),
"MINDdemo_utils.zip",
)
hparams = prepare_hparams(
yaml_file,
wordEmb_file=wordEmb_file,
wordDict_file=wordDict_file,
userDict_file=userDict_file,
epochs=1,
)
iterator = MINDIterator
model = NRMSModel(hparams, iterator)
assert model.model is not None
assert model.scorer is not None
assert model.loss is not None
assert model.train_optimizer is not None
@pytest.mark.gpu
def test_naml_component_definition(mind_resource_path):
wordEmb_file = os.path.join(mind_resource_path, "utils", "embedding_all.npy")
userDict_file = os.path.join(mind_resource_path, "utils", "uid2index.pkl")
wordDict_file = os.path.join(mind_resource_path, "utils", "word_dict_all.pkl")
vertDict_file = os.path.join(mind_resource_path, "utils", "vert_dict.pkl")
subvertDict_file = os.path.join(mind_resource_path, "utils", "subvert_dict.pkl")
yaml_file = os.path.join(mind_resource_path, "utils", r"naml.yaml")
if not os.path.exists(yaml_file):
download_deeprec_resources(
r"https://recodatasets.z20.web.core.windows.net/newsrec/",
os.path.join(mind_resource_path, "utils"),
"MINDdemo_utils.zip",
)
hparams = prepare_hparams(
yaml_file,
wordEmb_file=wordEmb_file,
wordDict_file=wordDict_file,
userDict_file=userDict_file,
vertDict_file=vertDict_file,
subvertDict_file=subvertDict_file,
epochs=1,
)
iterator = MINDAllIterator
model = NAMLModel(hparams, iterator)
assert model.model is not None
assert model.scorer is not None
assert model.loss is not None
assert model.train_optimizer is not None
@pytest.mark.gpu
def test_npa_component_definition(mind_resource_path):
wordEmb_file = os.path.join(mind_resource_path, "utils", "embedding.npy")
userDict_file = os.path.join(mind_resource_path, "utils", "uid2index.pkl")
wordDict_file = os.path.join(mind_resource_path, "utils", "word_dict.pkl")
yaml_file = os.path.join(mind_resource_path, "utils", r"npa.yaml")
if not os.path.exists(yaml_file):
download_deeprec_resources(
r"https://recodatasets.z20.web.core.windows.net/newsrec/",
os.path.join(mind_resource_path, "utils"),
"MINDdemo_utils.zip",
)
hparams = prepare_hparams(
yaml_file,
wordEmb_file=wordEmb_file,
wordDict_file=wordDict_file,
userDict_file=userDict_file,
epochs=1,
)
iterator = MINDIterator
model = NPAModel(hparams, iterator)
assert model.model is not None
assert model.scorer is not None
assert model.loss is not None
assert model.train_optimizer is not None
@pytest.mark.gpu
def test_lstur_component_definition(mind_resource_path):
wordEmb_file = os.path.join(mind_resource_path, "utils", "embedding.npy")
userDict_file = os.path.join(mind_resource_path, "utils", "uid2index.pkl")
wordDict_file = os.path.join(mind_resource_path, "utils", "word_dict.pkl")
yaml_file = os.path.join(mind_resource_path, "utils", r"lstur.yaml")
if not os.path.exists(yaml_file):
download_deeprec_resources(
r"https://recodatasets.z20.web.core.windows.net/newsrec/",
os.path.join(mind_resource_path, "mind", "utils"),
"MINDdemo_utils.zip",
)
hparams = prepare_hparams(
yaml_file,
wordEmb_file=wordEmb_file,
wordDict_file=wordDict_file,
userDict_file=userDict_file,
epochs=1,
)
iterator = MINDIterator
model = LSTURModel(hparams, iterator)
assert model.model is not None
assert model.scorer is not None
assert model.loss is not None
assert model.train_optimizer is not None
| 36.673759 | 84 | 0.70876 | 678 | 5,171 | 5.178466 | 0.141593 | 0.088864 | 0.118485 | 0.087724 | 0.821418 | 0.775847 | 0.75591 | 0.75591 | 0.735973 | 0.735973 | 0 | 0.003842 | 0.19474 | 5,171 | 140 | 85 | 36.935714 | 0.839337 | 0.026107 | 0 | 0.649573 | 0 | 0 | 0.125397 | 0 | 0 | 0 | 0 | 0 | 0.136752 | 1 | 0.034188 | false | 0.008547 | 0.094017 | 0 | 0.128205 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
6064b305300ad3a1c3a5521f27fbb72d07efc384 | 160 | py | Python | multiner/utils/__init__.py | ugurcanozalp/multilingual-ner | c8bd9085be3e6377a56e167419464911e5cd0834 | [
"Apache-2.0"
] | 2 | 2021-12-24T12:40:36.000Z | 2022-03-04T03:14:50.000Z | multiner/utils/__init__.py | ugurcanozalp/multilingual-ner | c8bd9085be3e6377a56e167419464911e5cd0834 | [
"Apache-2.0"
] | 2 | 2021-04-26T08:56:25.000Z | 2021-07-23T11:48:05.000Z | multiner/utils/__init__.py | ugurcanozalp/multilingual-ner | c8bd9085be3e6377a56e167419464911e5cd0834 | [
"Apache-2.0"
] | null | null | null | from .custom_tokenizer import CustomTokenizer
from .custom_tokenizer_np import CustomTokenizerNP
from .dataset import NerDataset
from .crf_numpy import CRFNumpy | 40 | 50 | 0.88125 | 20 | 160 | 6.85 | 0.6 | 0.145985 | 0.277372 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.09375 | 160 | 4 | 51 | 40 | 0.944828 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
6065e6fee42322b585a97acebd96a267765b8867 | 40,991 | py | Python | symphony/bdk/gen/pod_api/security_api.py | symphony-elias/symphony-bdk-python | 0d1cd94a9982e3687ea52c49acdb5f942ecd9bec | [
"Apache-2.0"
] | 17 | 2018-09-06T09:58:18.000Z | 2021-07-13T12:54:20.000Z | symphony/bdk/gen/pod_api/security_api.py | symphony-elias/symphony-bdk-python | 0d1cd94a9982e3687ea52c49acdb5f942ecd9bec | [
"Apache-2.0"
] | 59 | 2018-11-21T15:17:57.000Z | 2021-08-03T10:00:43.000Z | symphony/bdk/gen/pod_api/security_api.py | symphony-elias/symphony-bdk-python | 0d1cd94a9982e3687ea52c49acdb5f942ecd9bec | [
"Apache-2.0"
] | 37 | 2018-09-01T03:07:48.000Z | 2021-07-06T10:21:50.000Z | """
Pod API
This document refers to Symphony API calls that do not need encryption or decryption of content. - sessionToken can be obtained by calling the authenticationAPI on the symphony back end and the key manager respectively. Refer to the methods described in authenticatorAPI.yaml. - Actions are defined to be atomic, ie will succeed in their entirety or fail and have changed nothing. - If it returns a 40X status then it will have made no change to the system even if ome subset of the request would have succeeded. - If this contract cannot be met for any reason then this is an error and the response code will be 50X. # noqa: E501
The version of the OpenAPI document: 20.13.0
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from symphony.bdk.gen.api_client import ApiClient, Endpoint as _Endpoint
from symphony.bdk.gen.model_utils import ( # noqa: F401
check_allowed_values,
check_validations,
date,
datetime,
file_type,
none_type,
validate_and_convert_types
)
from symphony.bdk.gen.pod_model.company_cert import CompanyCert
from symphony.bdk.gen.pod_model.company_cert_attributes import CompanyCertAttributes
from symphony.bdk.gen.pod_model.company_cert_detail import CompanyCertDetail
from symphony.bdk.gen.pod_model.company_cert_info_list import CompanyCertInfoList
from symphony.bdk.gen.pod_model.company_cert_type_list import CompanyCertTypeList
from symphony.bdk.gen.pod_model.error import Error
from symphony.bdk.gen.pod_model.string_id import StringId
from symphony.bdk.gen.pod_model.success_response import SuccessResponse
class SecurityApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def __v1_companycert_delete_post(
self,
session_token,
finger_print,
**kwargs
):
"""Delete a company certificate # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = pod_api.v1_companycert_delete_post(session_token, finger_print, async_req=True)
>>> result = thread.get()
Args:
session_token (str): Session authentication token.
finger_print (StringId):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
SuccessResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['session_token'] = \
session_token
kwargs['finger_print'] = \
finger_print
return self.call_with_http_info(**kwargs)
self.v1_companycert_delete_post = _Endpoint(
settings={
'response_type': (SuccessResponse,),
'auth': [],
'endpoint_path': '/v1/companycert/delete',
'operation_id': 'v1_companycert_delete_post',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'session_token',
'finger_print',
],
'required': [
'session_token',
'finger_print',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'session_token':
(str,),
'finger_print':
(StringId,),
},
'attribute_map': {
'session_token': 'sessionToken',
},
'location_map': {
'session_token': 'header',
'finger_print': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client,
callable=__v1_companycert_delete_post
)
def __v1_companycert_finger_print_get_get(
self,
finger_print,
session_token,
**kwargs
):
"""Get the details of a company certificate # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = pod_api.v1_companycert_finger_print_get_get(finger_print, session_token, async_req=True)
>>> result = thread.get()
Args:
finger_print (str): Certificate fingerPrint (ID)
session_token (str): Session authentication token.
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
CompanyCertDetail
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['finger_print'] = \
finger_print
kwargs['session_token'] = \
session_token
return self.call_with_http_info(**kwargs)
self.v1_companycert_finger_print_get_get = _Endpoint(
settings={
'response_type': (CompanyCertDetail,),
'auth': [],
'endpoint_path': '/v1/companycert/{fingerPrint}/get',
'operation_id': 'v1_companycert_finger_print_get_get',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'finger_print',
'session_token',
],
'required': [
'finger_print',
'session_token',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'finger_print':
(str,),
'session_token':
(str,),
},
'attribute_map': {
'finger_print': 'fingerPrint',
'session_token': 'sessionToken',
},
'location_map': {
'finger_print': 'path',
'session_token': 'header',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__v1_companycert_finger_print_get_get
)
def __v1_companycert_finger_print_issued_by_get(
self,
finger_print,
session_token,
**kwargs
):
"""Return a list of all certificates which were verified to the cert whose fingerprint is passed. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = pod_api.v1_companycert_finger_print_issued_by_get(finger_print, session_token, async_req=True)
>>> result = thread.get()
Args:
finger_print (str): Certificate fingerPrint (ID)
session_token (str): Session authentication token.
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
CompanyCertInfoList
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['finger_print'] = \
finger_print
kwargs['session_token'] = \
session_token
return self.call_with_http_info(**kwargs)
self.v1_companycert_finger_print_issued_by_get = _Endpoint(
settings={
'response_type': (CompanyCertInfoList,),
'auth': [],
'endpoint_path': '/v1/companycert/{fingerPrint}/issuedBy',
'operation_id': 'v1_companycert_finger_print_issued_by_get',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'finger_print',
'session_token',
],
'required': [
'finger_print',
'session_token',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'finger_print':
(str,),
'session_token':
(str,),
},
'attribute_map': {
'finger_print': 'fingerPrint',
'session_token': 'sessionToken',
},
'location_map': {
'finger_print': 'path',
'session_token': 'header',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__v1_companycert_finger_print_issued_by_get
)
def __v1_companycert_finger_print_update_post(
self,
finger_print,
session_token,
cert_attributes,
**kwargs
):
"""Update a company certificate # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = pod_api.v1_companycert_finger_print_update_post(finger_print, session_token, cert_attributes, async_req=True)
>>> result = thread.get()
Args:
finger_print (str): Certificate fingerPrint (ID)
session_token (str): Session authentication token.
cert_attributes (CompanyCertAttributes):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
SuccessResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['finger_print'] = \
finger_print
kwargs['session_token'] = \
session_token
kwargs['cert_attributes'] = \
cert_attributes
return self.call_with_http_info(**kwargs)
self.v1_companycert_finger_print_update_post = _Endpoint(
settings={
'response_type': (SuccessResponse,),
'auth': [],
'endpoint_path': '/v1/companycert/{fingerPrint}/update',
'operation_id': 'v1_companycert_finger_print_update_post',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'finger_print',
'session_token',
'cert_attributes',
],
'required': [
'finger_print',
'session_token',
'cert_attributes',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'finger_print':
(str,),
'session_token':
(str,),
'cert_attributes':
(CompanyCertAttributes,),
},
'attribute_map': {
'finger_print': 'fingerPrint',
'session_token': 'sessionToken',
},
'location_map': {
'finger_print': 'path',
'session_token': 'header',
'cert_attributes': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client,
callable=__v1_companycert_finger_print_update_post
)
def __v1_companycert_list_get(
self,
session_token,
**kwargs
):
"""List all trusted certs # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = pod_api.v1_companycert_list_get(session_token, async_req=True)
>>> result = thread.get()
Args:
session_token (str): Session authentication token.
Keyword Args:
skip (int): Pagination start. [optional]
limit (int): Row limit. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
CompanyCertInfoList
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['session_token'] = \
session_token
return self.call_with_http_info(**kwargs)
self.v1_companycert_list_get = _Endpoint(
settings={
'response_type': (CompanyCertInfoList,),
'auth': [],
'endpoint_path': '/v1/companycert/list',
'operation_id': 'v1_companycert_list_get',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'session_token',
'skip',
'limit',
],
'required': [
'session_token',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'session_token':
(str,),
'skip':
(int,),
'limit':
(int,),
},
'attribute_map': {
'session_token': 'sessionToken',
'skip': 'skip',
'limit': 'limit',
},
'location_map': {
'session_token': 'header',
'skip': 'query',
'limit': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__v1_companycert_list_get
)
def __v1_companycert_podmanaged_list_get(
self,
session_token,
**kwargs
):
"""List all trusted certs # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = pod_api.v1_companycert_podmanaged_list_get(session_token, async_req=True)
>>> result = thread.get()
Args:
session_token (str): Session authentication token.
Keyword Args:
skip (int): Pagination start. [optional]
limit (int): Row limit. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
CompanyCertInfoList
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['session_token'] = \
session_token
return self.call_with_http_info(**kwargs)
self.v1_companycert_podmanaged_list_get = _Endpoint(
settings={
'response_type': (CompanyCertInfoList,),
'auth': [],
'endpoint_path': '/v1/companycert/podmanaged/list',
'operation_id': 'v1_companycert_podmanaged_list_get',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'session_token',
'skip',
'limit',
],
'required': [
'session_token',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'session_token':
(str,),
'skip':
(int,),
'limit':
(int,),
},
'attribute_map': {
'session_token': 'sessionToken',
'skip': 'skip',
'limit': 'limit',
},
'location_map': {
'session_token': 'header',
'skip': 'query',
'limit': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__v1_companycert_podmanaged_list_get
)
def __v1_companycert_type_list_post(
self,
session_token,
type_id_list,
**kwargs
):
"""List all certs of the given types # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = pod_api.v1_companycert_type_list_post(session_token, type_id_list, async_req=True)
>>> result = thread.get()
Args:
session_token (str): Session authentication token.
type_id_list (CompanyCertTypeList): Certificate type list
Keyword Args:
skip (int): Pagination start. [optional]
limit (int): Row limit. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
CompanyCertInfoList
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['session_token'] = \
session_token
kwargs['type_id_list'] = \
type_id_list
return self.call_with_http_info(**kwargs)
self.v1_companycert_type_list_post = _Endpoint(
settings={
'response_type': (CompanyCertInfoList,),
'auth': [],
'endpoint_path': '/v1/companycert/type/list',
'operation_id': 'v1_companycert_type_list_post',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'session_token',
'type_id_list',
'skip',
'limit',
],
'required': [
'session_token',
'type_id_list',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'session_token':
(str,),
'type_id_list':
(CompanyCertTypeList,),
'skip':
(int,),
'limit':
(int,),
},
'attribute_map': {
'session_token': 'sessionToken',
'skip': 'skip',
'limit': 'limit',
},
'location_map': {
'session_token': 'header',
'type_id_list': 'body',
'skip': 'query',
'limit': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client,
callable=__v1_companycert_type_list_post
)
def __v2_companycert_create_post(
self,
session_token,
cert,
**kwargs
):
"""Create a company trusted or untrusted certificate. Different from V1 in that we reject expired certificates. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = pod_api.v2_companycert_create_post(session_token, cert, async_req=True)
>>> result = thread.get()
Args:
session_token (str): Session authentication token.
cert (CompanyCert):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
CompanyCertDetail
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['session_token'] = \
session_token
kwargs['cert'] = \
cert
return self.call_with_http_info(**kwargs)
self.v2_companycert_create_post = _Endpoint(
settings={
'response_type': (CompanyCertDetail,),
'auth': [],
'endpoint_path': '/v2/companycert/create',
'operation_id': 'v2_companycert_create_post',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'session_token',
'cert',
],
'required': [
'session_token',
'cert',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'session_token':
(str,),
'cert':
(CompanyCert,),
},
'attribute_map': {
'session_token': 'sessionToken',
},
'location_map': {
'session_token': 'header',
'cert': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client,
callable=__v2_companycert_create_post
)
| 37.849492 | 637 | 0.474031 | 3,519 | 40,991 | 5.244388 | 0.076442 | 0.052018 | 0.022541 | 0.023408 | 0.880466 | 0.860417 | 0.836196 | 0.826172 | 0.807586 | 0.803522 | 0 | 0.004495 | 0.446391 | 40,991 | 1,082 | 638 | 37.884473 | 0.808752 | 0.324681 | 0 | 0.684282 | 0 | 0 | 0.221972 | 0.040069 | 0 | 0 | 0 | 0 | 0 | 1 | 0.012195 | false | 0 | 0.01626 | 0 | 0.04065 | 0.058266 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
6071adebf5dbd6d42f1d6d60ac48c086fd7751de | 42 | py | Python | rockit/core/resolvers/__init__.py | acreations/rockit-server | 4d1e87b563d9339e73bf0e5c698a59e8e124cc01 | [
"MIT"
] | null | null | null | rockit/core/resolvers/__init__.py | acreations/rockit-server | 4d1e87b563d9339e73bf0e5c698a59e8e124cc01 | [
"MIT"
] | null | null | null | rockit/core/resolvers/__init__.py | acreations/rockit-server | 4d1e87b563d9339e73bf0e5c698a59e8e124cc01 | [
"MIT"
] | null | null | null | from commands import *
from mixes import * | 21 | 22 | 0.785714 | 6 | 42 | 5.5 | 0.666667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.166667 | 42 | 2 | 23 | 21 | 0.942857 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
60acc5b3e7740b90b3f30649b84bfca11acad7f8 | 3,546 | py | Python | app/views/search_ip.py | aluisq/flask_mysql_jinja | 6fc9bd23ac31647744be0a1016bca42e2bc32961 | [
"MIT"
] | null | null | null | app/views/search_ip.py | aluisq/flask_mysql_jinja | 6fc9bd23ac31647744be0a1016bca42e2bc32961 | [
"MIT"
] | null | null | null | app/views/search_ip.py | aluisq/flask_mysql_jinja | 6fc9bd23ac31647744be0a1016bca42e2bc32961 | [
"MIT"
] | null | null | null | from app import cursor, app
from flask import render_template, request, redirect, url_for, jsonify, flash
from mysql.connector import Error
# Mostra os resultados
# print(dados)
@app.route("/search-ip/hgmi")
def ip_hgmi():
#QUERY ALTERADA PARA TESTE
sql = ("SELECT id, ur, ip, hostname, unidade, local, setor, ramal FROM equipments WHERE unidade = 'HGMI' AND raspberry LIKE 'N%' ")
cursor.execute(sql)
dados = []
unidade = "HGMI"
for (id, ur, ip, hostname, unidade, local, setor, ramal) in cursor:
dados.append({"id": id, "ur": ur, "ip": ip, "hostname": hostname, "unidade" : unidade, "local": local, "setor": setor, "ramal": ramal })
# print(dados)
return render_template('public/maquinas.html', dados = dados, unidade = unidade)
@app.route("/search-ip/hur1")
def ip_hur1():
sql = ("SELECT id, ur, ip, hostname, unidade, local, setor, ramal FROM equipments WHERE unidade = 'HUR 1' AND raspberry LIKE 'N%' ")
cursor.execute(sql)
dados = []
unidade = "HUR 1"
for (id, ur, ip, hostname, unidade, local, setor, ramal) in cursor:
dados.append({"id":id, "ur": ur, "ip": ip, "hostname": hostname, "unidade" : unidade, "local": local, "setor": setor, "ramal": ramal })
# print(dados)
return render_template('public/maquinas.html', dados = dados, unidade = unidade)
@app.route("/search-ip/anexo-hur1")
def ip_anexo_hur1():
sql = ("SELECT id, ur, ip, hostname, unidade, local, setor, ramal FROM equipments WHERE unidade = 'ANEXO' AND raspberry LIKE 'N%' ")
cursor.execute(sql)
dados = []
unidade = "ANEXO"
for (id, ur, ip, hostname, unidade, local, setor, ramal) in cursor:
dados.append({ "id": id, "ur": ur, "ip": ip, "hostname": hostname, "unidade" : unidade, "local": local, "setor": setor, "ramal": ramal })
# print(dados)
return render_template('public/maquinas.html', dados = dados, unidade = unidade)
@app.route("/search-ip/raspberry-hur1")
def ip_rasp_hur1():
sql = ("SELECT id, ip, hostname, unidade, local, setor FROM equipments WHERE unidade = 'HUR 1' AND raspberry LIKE 'S%'")
cursor.execute(sql)
dados = []
unidade = "HUR 1"
for (id, ip, hostname, unidade, local, setor) in cursor:
dados.append({"id": id, "ip":ip, "hostname": hostname, "unidade": unidade, "local": local, "setor": setor})
return render_template("/public/raspberry.html", dados = dados, unidade = unidade)
@app.route("/search-ip/raspberry-hgmi")
def ip_rasp_hgmi():
sql = ("SELECT id, ip, hostname, unidade, local, setor FROM equipments WHERE unidade = 'HGMI' AND raspberry LIKE 'S%'")
cursor.execute(sql)
dados = []
unidade = "HGMI"
for (id, ip, hostname, unidade, local, setor) in cursor:
dados.append({"id": id, "ip":ip, "hostname": hostname, "unidade": unidade, "local": local, "setor": setor})
return render_template("/public/raspberry.html", dados = dados, unidade = unidade)
@app.route("/search-ip/raspberry-anexo")
def ip_rasp_anexo():
sql = ("SELECT id, ip, hostname, unidade, local, setor FROM equipments WHERE unidade = 'ANEXO' AND raspberry LIKE 'S%'")
cursor.execute(sql)
dados = []
unidade = "ANEXO"
for (id, ip, hostname, unidade, local, setor) in cursor:
dados.append({"id":id, "ip":ip, "hostname": hostname, "unidade": unidade, "local": local, "setor": setor})
return render_template("/public/raspberry.html", dados = dados, unidade = unidade) | 41.232558 | 148 | 0.635928 | 456 | 3,546 | 4.905702 | 0.127193 | 0.080465 | 0.091194 | 0.118015 | 0.877962 | 0.877962 | 0.877962 | 0.877962 | 0.861869 | 0.734019 | 0 | 0.00355 | 0.205584 | 3,546 | 86 | 149 | 41.232558 | 0.790557 | 0.027355 | 0 | 0.631579 | 0 | 0.105263 | 0.33982 | 0.047342 | 0 | 0 | 0 | 0 | 0 | 1 | 0.105263 | false | 0 | 0.052632 | 0 | 0.263158 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
8812dd73eecd7df3d97f88c385ddb34298d35f1d | 97 | py | Python | groklog/ui/scenes/__init__.py | apockill/groklog | 3d7de51086851fc68dba6ae77aafd3e5274549c7 | [
"MIT"
] | 2 | 2021-07-15T02:18:56.000Z | 2022-02-01T19:33:34.000Z | groklog/ui/scenes/__init__.py | apockill/groklog | 3d7de51086851fc68dba6ae77aafd3e5274549c7 | [
"MIT"
] | 1 | 2022-03-08T23:24:00.000Z | 2022-03-08T23:24:00.000Z | groklog/ui/scenes/__init__.py | apockill/groklog | 3d7de51086851fc68dba6ae77aafd3e5274549c7 | [
"MIT"
] | null | null | null | from .app import GrokLog
from .base_app import BaseApp
from .filter_creator import FilterCreator
| 24.25 | 41 | 0.845361 | 14 | 97 | 5.714286 | 0.642857 | 0.225 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.123711 | 97 | 3 | 42 | 32.333333 | 0.941176 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
714a5064a62ea88f8206620d2f5854559fba5677 | 162 | py | Python | mla/__init__.py | thejevans/mla | 0c583741cfc7626b0653bf58f4efaa1e7681424c | [
"Apache-2.0"
] | 1 | 2020-11-20T15:47:00.000Z | 2020-11-20T15:47:00.000Z | mla/__init__.py | thejevans/mla | 0c583741cfc7626b0653bf58f4efaa1e7681424c | [
"Apache-2.0"
] | 57 | 2020-11-27T02:23:08.000Z | 2022-02-12T20:14:24.000Z | mla/__init__.py | thejevans/mla | 0c583741cfc7626b0653bf58f4efaa1e7681424c | [
"Apache-2.0"
] | null | null | null | """__init__.py"""
# flake8: noqa
from .analysis import *
from .models import *
from .sources import *
from .test_statistics import *
from .time_profiles import *
| 20.25 | 30 | 0.734568 | 21 | 162 | 5.380952 | 0.619048 | 0.353982 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.007246 | 0.148148 | 162 | 7 | 31 | 23.142857 | 0.811594 | 0.154321 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
7179edb352a299f8ba51b98500cb188948fe5b12 | 121 | py | Python | 2395.py | bigMARAC/uri-python-solves | 3dd6dadace8a7b6b8e3274e3fa9e22ef1ec94aa4 | [
"MIT"
] | null | null | null | 2395.py | bigMARAC/uri-python-solves | 3dd6dadace8a7b6b8e3274e3fa9e22ef1ec94aa4 | [
"MIT"
] | null | null | null | 2395.py | bigMARAC/uri-python-solves | 3dd6dadace8a7b6b8e3274e3fa9e22ef1ec94aa4 | [
"MIT"
] | null | null | null | a, b, c = map(int, input().split(' '))
x, y, z = map(int, input().split(' '))
print(int(x / a) * int(y / b) * int(z / c)) | 40.333333 | 43 | 0.479339 | 24 | 121 | 2.416667 | 0.458333 | 0.206897 | 0.37931 | 0.551724 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.198347 | 121 | 3 | 43 | 40.333333 | 0.597938 | 0 | 0 | 0 | 0 | 0 | 0.016393 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0 | 0 | 0 | 0.333333 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
71864bb93fe01bcd901a91f5f6db1edcb2d12674 | 146 | py | Python | main_model_hpOpt.py | zhangruochi/Mol-HGT | 81c1662cdfcf9796651c761c4c64715cf7be64ce | [
"MIT"
] | 3 | 2022-01-25T08:36:20.000Z | 2022-02-23T09:16:49.000Z | main_model_hpOpt.py | zhangruochi/Mol-HGT | 81c1662cdfcf9796651c761c4c64715cf7be64ce | [
"MIT"
] | 1 | 2022-02-15T10:19:26.000Z | 2022-02-24T14:25:37.000Z | main_model_hpOpt.py | zhangruochi/Mol-HGT | 81c1662cdfcf9796651c761c4c64715cf7be64ce | [
"MIT"
] | null | null | null | import core.molPred.init
from models import models
from core.molPred.molPred_datas_hpOpt import main
if __name__ == '__main__':
main(models)
| 20.857143 | 49 | 0.787671 | 21 | 146 | 5 | 0.52381 | 0.209524 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.136986 | 146 | 6 | 50 | 24.333333 | 0.833333 | 0 | 0 | 0 | 0 | 0 | 0.054795 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.6 | 0 | 0.6 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
71a3b41cad2fc928a5fdc69e163ef946ed092cc5 | 287 | py | Python | ontology-tools/CMCLABoxManagement/chemaboxwriters/chemaboxwriters/kgoperations/queryendpoints.py | mdhillmancmcl/TheWorldAvatar-CMCL-Fork | 011aee78c016b76762eaf511c78fabe3f98189f4 | [
"MIT"
] | 21 | 2021-03-08T01:58:25.000Z | 2022-03-09T15:46:16.000Z | ontology-tools/CMCLABoxManagement/chemaboxwriters/chemaboxwriters/kgoperations/queryendpoints.py | mdhillmancmcl/TheWorldAvatar-CMCL-Fork | 011aee78c016b76762eaf511c78fabe3f98189f4 | [
"MIT"
] | 63 | 2021-05-04T15:05:30.000Z | 2022-03-23T14:32:29.000Z | ontology-tools/CMCLABoxManagement/chemaboxwriters/chemaboxwriters/kgoperations/queryendpoints.py | mdhillmancmcl/TheWorldAvatar-CMCL-Fork | 011aee78c016b76762eaf511c78fabe3f98189f4 | [
"MIT"
] | 15 | 2021-03-08T07:52:03.000Z | 2022-03-29T04:46:20.000Z | SPARQL_ENDPOINTS = {
'ontocompchem': 'http://theworldavatar.com/blazegraph/namespace/ontocompchem/sparql',
'ontospecies': 'http://theworldavatar.com/blazegraph/namespace/ontospecies/sparql',
'ontopesscan': 'http://theworldavatar.com/blazegraph/namespace/ontopesscan/sparql'
} | 57.4 | 89 | 0.773519 | 26 | 287 | 8.5 | 0.384615 | 0.244344 | 0.285068 | 0.420814 | 0.542986 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.073171 | 287 | 5 | 90 | 57.4 | 0.830827 | 0 | 0 | 0 | 0 | 0 | 0.798611 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
71c1a0291726e85561ce8f504f1f744e8a42d885 | 90 | py | Python | eggmorning/models/__init__.py | beautifultools/eggmorning-django | 2f805088bcf8f2473bd742e76267364c6985074e | [
"MIT",
"Unlicense"
] | null | null | null | eggmorning/models/__init__.py | beautifultools/eggmorning-django | 2f805088bcf8f2473bd742e76267364c6985074e | [
"MIT",
"Unlicense"
] | 4 | 2021-06-04T23:52:54.000Z | 2021-09-22T19:33:34.000Z | eggmorning/models/__init__.py | beautifultools/eggmorning-django | 2f805088bcf8f2473bd742e76267364c6985074e | [
"MIT",
"Unlicense"
] | 1 | 2021-06-24T12:22:14.000Z | 2021-06-24T12:22:14.000Z | from .common import *
from .user import *
from .hotel import *
from .component import *
| 12.857143 | 24 | 0.711111 | 12 | 90 | 5.333333 | 0.5 | 0.46875 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.2 | 90 | 6 | 25 | 15 | 0.888889 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
71c9367725b36d3daebdea3e7e2e8b8bab59119a | 310 | py | Python | test/testMat.py | rayline/WebMat | c8ed120d693670dd14c90105df6b9ab78a1deac0 | [
"MIT"
] | null | null | null | test/testMat.py | rayline/WebMat | c8ed120d693670dd14c90105df6b9ab78a1deac0 | [
"MIT"
] | null | null | null | test/testMat.py | rayline/WebMat | c8ed120d693670dd14c90105df6b9ab78a1deac0 | [
"MIT"
] | null | null | null | import numpy
a = [[5,2,5,2,0,7,1,7,9,4,7],[1,3,1,6,4,5,1,2,1,3,2],[6,7,2,6,9,2,5,6,6,5,3],[7,3,4,2,7,8,4,7,5,4,4],[2,1,1,3,2,2,2,1,9,2,8],[7,9,0,0,3,9,9,8,6,5,2],[4,5,0,8,8,0,5,1,2,3,3],[6,7,1,6,2,9,3,9,4,6,4],[7,6,2,9,0,4,3,3,0,5,0],[3,7,0,6,1,7,7,1,9,5,0],[6,5,0,1,7,2,6,7,5,0,4]]
print(numpy.linalg.det(a))
| 77.5 | 269 | 0.493548 | 129 | 310 | 1.186047 | 0.124031 | 0.065359 | 0.039216 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.398026 | 0.019355 | 310 | 3 | 270 | 103.333333 | 0.105263 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.333333 | 0 | 0.333333 | 0.333333 | 0 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 6 |
e09d25c6923f60035d1a6ccd34679d8bb3bd93f3 | 2,324 | py | Python | Algorithms/register.py | kasimte/RLs | 0eba84bd7cc571269f874b65923bec2188828ef6 | [
"Apache-2.0"
] | null | null | null | Algorithms/register.py | kasimte/RLs | 0eba84bd7cc571269f874b65923bec2188828ef6 | [
"Apache-2.0"
] | null | null | null | Algorithms/register.py | kasimte/RLs | 0eba84bd7cc571269f874b65923bec2188828ef6 | [
"Apache-2.0"
] | null | null | null | import tensorflow as tf
try:
tf_version = tf.version.VERSION[0]
except:
tf_version = tf.VERSION[0]
finally:
if tf_version == '1':
from .tf1algos import *
# algorithms based on TF 1.x
version = 'tf1algos'
algos = {
'pg': [PG, 'on-policy', 'perEpisode'],
'ppo': [PPO, 'on-policy', 'perEpisode'],
'ac': [AC, 'off-policy', 'perStep'], # could be on-policy, but also doesn't work well.
'a2c': [A2C, 'on-policy', 'perEpisode'],
'dpg': [DPG, 'off-policy', 'perStep'],
'ddpg': [DDPG, 'off-policy', 'perStep'],
'td3': [TD3, 'off-policy', 'perStep'],
'sac': [SAC, 'off-policy', 'perStep'],
'sac_no_v': [SAC_NO_V, 'off-policy', 'perStep'],
'dqn': [DQN, 'off-policy', 'perStep'],
'ddqn': [DDQN, 'off-policy', 'perStep'],
'dddqn': [DDDQN, 'off-policy', 'perStep'],
'ma_dpg': [MADPG, 'off-policy', 'perStep'],
'ma_ddpg': [MADDPG, 'off-policy', 'perStep'],
'ma_td3': [MATD3, 'off-policy', 'perStep'],
}
elif tf_version == '2':
from .tf2algos import *
# algorithms based on TF 2.0
version = 'tf2algos'
algos = {
'pg': [PG, 'on-policy', 'perEpisode'],
'ppo': [PPO, 'on-policy', 'perEpisode'],
'ac': [AC, 'off-policy', 'perStep'], # could be on-policy, but also doesn't work well.
'a2c': [A2C, 'on-policy', 'perEpisode'],
'dpg': [DPG, 'off-policy', 'perStep'],
'ddpg': [DDPG, 'off-policy', 'perStep'],
'td3': [TD3, 'off-policy', 'perStep'],
'sac': [SAC, 'off-policy', 'perStep'],
'sac_no_v': [SAC_NO_V, 'off-policy', 'perStep'],
'dqn': [DQN, 'off-policy', 'perStep'],
'ddqn': [DDQN, 'off-policy', 'perStep'],
'dddqn': [DDDQN, 'off-policy', 'perStep'],
'maxsqn': [MAXSQN, 'off-policy', 'perStep'],
'ma_dpg': [MADPG, 'off-policy', 'perStep'],
'ma_ddpg': [MADDPG, 'off-policy', 'perStep'],
'ma_td3': [MATD3, 'off-policy', 'perStep'],
}
def get_model_info(name):
if name not in algos.keys():
raise NotImplementedError
else:
return version, algos[name] | 42.254545 | 99 | 0.496558 | 257 | 2,324 | 4.412451 | 0.245136 | 0.198413 | 0.352734 | 0.095238 | 0.765432 | 0.72134 | 0.72134 | 0.72134 | 0.72134 | 0.72134 | 0 | 0.014198 | 0.302926 | 2,324 | 55 | 100 | 42.254545 | 0.685802 | 0.064114 | 0 | 0.615385 | 0 | 0 | 0.316444 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.019231 | false | 0 | 0.057692 | 0 | 0.096154 | 0 | 0 | 0 | 0 | null | 0 | 1 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
e09f0fb22f42f8c4bd91a55212f80b77d8c9b80d | 96 | py | Python | venv/lib/python3.8/site-packages/numpy/core/tests/test__exceptions.py | GiulianaPola/select_repeats | 17a0d053d4f874e42cf654dd142168c2ec8fbd11 | [
"MIT"
] | 2 | 2022-03-13T01:58:52.000Z | 2022-03-31T06:07:54.000Z | venv/lib/python3.8/site-packages/numpy/core/tests/test__exceptions.py | DesmoSearch/Desmobot | b70b45df3485351f471080deb5c785c4bc5c4beb | [
"MIT"
] | 19 | 2021-11-20T04:09:18.000Z | 2022-03-23T15:05:55.000Z | venv/lib/python3.8/site-packages/numpy/core/tests/test__exceptions.py | DesmoSearch/Desmobot | b70b45df3485351f471080deb5c785c4bc5c4beb | [
"MIT"
] | null | null | null | /home/runner/.cache/pip/pool/42/ac/50/48b5dba0f5c45701f3f93c84d8978897f4c2fab3ee8337e8db99db727b | 96 | 96 | 0.895833 | 9 | 96 | 9.555556 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.40625 | 0 | 96 | 1 | 96 | 96 | 0.489583 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | null | 0 | 0 | null | null | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
1cf0b8f6e8ccab2ff2b41746c5dd57927d83045f | 8,990 | py | Python | py/g1/asyncs/bases/tests/test_streams.py | clchiou/garage | 446ff34f86cdbd114b09b643da44988cf5d027a3 | [
"MIT"
] | 3 | 2016-01-04T06:28:52.000Z | 2020-09-20T13:18:40.000Z | py/g1/asyncs/bases/tests/test_streams.py | clchiou/garage | 446ff34f86cdbd114b09b643da44988cf5d027a3 | [
"MIT"
] | null | null | null | py/g1/asyncs/bases/tests/test_streams.py | clchiou/garage | 446ff34f86cdbd114b09b643da44988cf5d027a3 | [
"MIT"
] | null | null | null | import unittest
from g1.asyncs.bases import streams
from g1.asyncs.kernels import contexts
from g1.asyncs.kernels import errors
from g1.asyncs.kernels import kernels
class BytesStreamTest(unittest.TestCase):
def setUp(self):
self.s = streams.BytesStream()
self.k = kernels.Kernel()
self.token = contexts.set_kernel(self.k)
def tearDown(self):
contexts.KERNEL.reset(self.token)
self.k.close()
def test_wrong_data_type(self):
with self.assertRaises(TypeError):
self.s.nonblocking.write('')
def test_write_after_close(self):
stream = self.s.nonblocking
stream.write(b'hello')
stream.close()
with self.assertRaises(AssertionError):
stream.write(b'')
self.assertEqual(stream.read(), b'hello')
self.assertEqual(stream.read(), b'')
def test_read(self):
stream = self.s.nonblocking
self.assert_stream(b'')
self.assertIsNone(stream.read())
# Test ``read(size=-1)``.
self.assertEqual(stream.write(b'hello'), 5)
self.assert_stream(b'hello')
self.assertEqual(stream.read(), b'hello')
self.assert_stream(b'')
self.assertIsNone(stream.read())
self.assert_stream(b'')
# Test ``read(size=0)``.
self.assertIsNone(stream.read(0))
self.assertEqual(stream.write(b'world'), 5)
self.assert_stream(b'world')
self.assertEqual(stream.read(0), b'')
self.assert_stream(b'world')
self.assertEqual(stream.read(0), b'')
self.assert_stream(b'world')
# Test size greater than 0.
self.assertEqual(stream.read(1), b'w')
self.assert_stream(b'orld')
self.assertEqual(stream.read(2), b'or')
self.assert_stream(b'ld')
self.assertEqual(stream.read(3), b'ld')
self.assert_stream(b'')
self.assertIsNone(stream.read(4))
self.assert_stream(b'')
self.assertIsNone(stream.read(5))
self.assert_stream(b'')
self.assertEqual(stream.write(b'foo'), 3)
self.assert_stream(b'foo')
stream.close()
self.assert_stream(b'foo')
self.assertEqual(stream.read(1), b'f')
self.assert_stream(b'oo')
self.assertEqual(stream.read(0), b'')
self.assert_stream(b'oo')
self.assertEqual(stream.read(), b'oo')
self.assert_stream(b'')
self.assertEqual(stream.read(), b'')
self.assert_stream(b'')
self.assertEqual(stream.read(0), b'')
self.assert_stream(b'')
self.assertEqual(stream.read(1), b'')
self.assert_stream(b'')
def test_readline_with_size(self):
stream = self.s.nonblocking
self.assertEqual(stream.write(b'hello'), 5)
self.assertEqual(stream.readline(3), b'hel')
self.assert_stream(b'lo')
self.assertEqual(stream.write(b'\n'), 1)
self.assert_stream(b'lo\n')
self.assertEqual(stream.readline(2), b'lo')
self.assert_stream(b'\n')
self.assertEqual(stream.readline(2), b'\n')
self.assert_stream(b'')
def test_readline_without_size(self):
stream = self.s.nonblocking
self.assert_stream(b'')
self.assertIsNone(stream.readline())
self.assertEqual(stream.write(b'hello'), 5)
self.assert_stream(b'hello')
self.assertIsNone(stream.readline())
self.assert_stream(b'hello')
self.assertEqual(stream.write(b'\n'), 1)
self.assert_stream(b'hello\n')
self.assertEqual(stream.readline(), b'hello\n')
self.assert_stream(b'')
self.assertIsNone(stream.readline())
self.assert_stream(b'')
self.assertEqual(stream.write(b'world'), 5)
self.assert_stream(b'world')
self.assertIsNone(stream.readline())
self.assert_stream(b'world')
self.assertEqual(stream.write(b'\n'), 1)
self.assert_stream(b'world\n')
self.assertEqual(stream.readline(), b'world\n')
self.assert_stream(b'')
self.assertIsNone(stream.readline())
self.assert_stream(b'')
self.assertEqual(stream.write(b'foo'), 3)
self.assert_stream(b'foo')
self.assertIsNone(stream.readline())
self.assert_stream(b'foo')
stream.close()
self.assert_stream(b'foo')
self.assertEqual(stream.readline(), b'foo')
self.assert_stream(b'')
self.assertEqual(stream.readline(), b'')
self.assert_stream(b'')
def test_async(self):
self.assert_stream(b'')
self.assertEqual(self.k.run(self.s.write(b'hello\n')), 6)
self.assert_stream(b'hello\n')
self.assertEqual(self.k.run(self.s.read(3)), b'hel')
self.assert_stream(b'lo\n')
self.assertEqual(self.k.run(self.s.readline()), b'lo\n')
self.assert_stream(b'')
t = self.k.spawn(self.s.read(0))
with self.assertRaises(errors.KernelTimeout):
self.k.run(timeout=0)
self.assert_stream(b'')
for _ in range(3):
self.k.run(self.s.write(b''))
with self.assertRaises(errors.KernelTimeout):
self.k.run(timeout=0)
self.assert_stream(b'')
self.k.run(self.s.write(b'world'))
self.k.run(self.s.write(b''))
self.k.run(self.s.write(b''))
self.assert_stream(b'world')
self.k.run()
self.assert_stream(b'world')
self.assertEqual(t.get_result_nonblocking(), b'')
self.k.run(self.s.close())
self.assert_stream(b'world')
self.assertEqual(self.k.run(self.s.read()), b'world')
self.assert_stream(b'')
self.assertEqual(self.k.run(self.s.read()), b'')
self.assertEqual(self.k.run(self.s.readline()), b'')
self.assert_stream(b'')
def test_async_iterator(self):
lines = []
async def do_iter():
async for line in self.s:
lines.append(line)
t = self.k.spawn(do_iter)
self.assertFalse(t.is_completed())
self.assertEqual(lines, [])
self.assert_stream(b'')
with self.assertRaises(errors.KernelTimeout):
self.k.run(timeout=0)
self.assertFalse(t.is_completed())
self.assertEqual(lines, [])
self.assert_stream(b'')
self.k.run(self.s.write(b'hello'))
self.assertFalse(t.is_completed())
self.assertEqual(lines, [])
self.assert_stream(b'hello')
self.k.run(self.s.write(b'\n'))
self.assertFalse(t.is_completed())
with self.assertRaises(errors.KernelTimeout):
self.k.run(timeout=0)
self.assertFalse(t.is_completed())
self.assertEqual(lines, [b'hello\n'])
self.assert_stream(b'')
self.k.run(self.s.write(b'world\n'))
self.k.run(self.s.write(b'foo'))
with self.assertRaises(errors.KernelTimeout):
self.k.run(timeout=0)
self.s.close()
self.k.run(timeout=1)
self.assertTrue(t.is_completed())
self.assertEqual(lines, [b'hello\n', b'world\n', b'foo'])
self.assert_stream(b'')
def test_async_readlines_with_hint(self):
t = self.k.spawn(self.s.readlines(12))
self.assertFalse(t.is_completed())
for piece in (b'hello', b'\n', b'world', b'\n', b'foo\n', b'bar\n'):
self.assertEqual(self.k.run(self.s.write(piece)), len(piece))
self.assert_stream(b'foo\nbar\n')
self.assertEqual(t.get_result_nonblocking(), [b'hello\n', b'world\n'])
def test_async_readlines_without_hint(self):
t = self.k.spawn(self.s.readlines())
self.assertFalse(t.is_completed())
for piece in (b'hello', b'\n', b'world\n', b'foo'):
self.assertEqual(self.k.run(self.s.write(piece)), len(piece))
self.assertFalse(t.is_completed())
with self.assertRaises(errors.KernelTimeout):
self.k.run(timeout=0)
self.assertFalse(t.is_completed())
self.assert_stream(b'foo')
with self.assertRaises(errors.KernelTimeout):
self.k.run(timeout=0)
self.s.close()
self.k.run(timeout=1)
self.assert_stream(b'')
self.assertEqual(
t.get_result_nonblocking(),
[b'hello\n', b'world\n', b'foo'],
)
def assert_stream(self, expect):
self.assertEqual(self.s._buffer.getvalue(), expect)
self.assertEqual(self.s._buffer.tell(), len(expect))
class StringStreamTest(unittest.TestCase):
def setUp(self):
self.s = streams.StringStream()
self.k = kernels.Kernel()
self.token = contexts.set_kernel(self.k)
def tearDown(self):
contexts.KERNEL.reset(self.token)
self.k.close()
def test_wrong_data_type(self):
with self.assertRaises(TypeError):
self.s.nonblocking.write(b'')
if __name__ == '__main__':
unittest.main()
| 30.893471 | 78 | 0.601001 | 1,185 | 8,990 | 4.459916 | 0.08692 | 0.136235 | 0.178619 | 0.189782 | 0.861306 | 0.829518 | 0.791864 | 0.707852 | 0.583349 | 0.509934 | 0 | 0.006907 | 0.243048 | 8,990 | 290 | 79 | 31 | 0.769728 | 0.008009 | 0 | 0.625571 | 0 | 0 | 0.040386 | 0 | 0 | 0 | 0 | 0 | 0.639269 | 1 | 0.068493 | false | 0 | 0.022831 | 0 | 0.100457 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
1cf35f9253c38501eaa0f4a43b356c7d84a2927f | 23,391 | py | Python | elstruct/elstruct/writer/_writer.py | sjklipp/autoio | e2b471e9c9dec933319c98a30d4d519ca5d47645 | [
"Apache-2.0"
] | null | null | null | elstruct/elstruct/writer/_writer.py | sjklipp/autoio | e2b471e9c9dec933319c98a30d4d519ca5d47645 | [
"Apache-2.0"
] | null | null | null | elstruct/elstruct/writer/_writer.py | sjklipp/autoio | e2b471e9c9dec933319c98a30d4d519ca5d47645 | [
"Apache-2.0"
] | null | null | null | """ Electronic structure program input writing module.
"""
from elstruct import par
from elstruct.writer import program_modules as pm
# energy input writers
def programs():
""" Constructs a list of available electronic structure programs.
At minimum, each program must have an energy reader to be enumerated.
"""
return pm.program_modules_with_function(pm.Job.ENERGY)
def energy(prog, geo, charge, mult, method, basis,
# molecule options
mol_options=(),
# machine options
memory=1, comment='', machine_options=(),
# theory options
orb_type='RU',
scf_options=(), casscf_options=(), corr_options=(),
# generic options
gen_lines=None):
""" Writes an input file string for an electronic energy calculation
for a specified electronic structure program.
:param prog: electronic structure program to use as a backend
:type prog: str
:param geo: cartesian or z-matrix geometry
:type geo: tuple
:param charge: molecular charge
:type charge: int
:param mult: spin multiplicity
:type mult: int
:param method: electronic structure method
:type method: str
:param basis: basis set
:type basis: str
:param mol_options: options for the molecule block
:type mol_options: tuple[str]
;param memory: memory in GB
:type memory: int
:param comment: a comment string to be placed at the top of the file
:type comment: str
:param machine_options: machine directives
(num procs, num threads, etc.)
:type machine_options: tuple[str]
:param orb_type: 'R' indicates restricted orbitals, 'U' indicates
unrestricted orbitals; can also be 'RR', 'RU', or 'UU'.
Where first (second) character sets R/U for singlets (multiplets)
:type orb_type: str
:param scf_options: scf method directives
:type scf_options: tuple[str]
:param casscf_options: casscf method directives
:type casscf_options: tuple[str]
:param corr_options: correlation method directives
:type corr_options: tuple[str]
:param gen_lines: generic lines for the input file
:type gen_lines: dict[idx:str]
"""
prog, method, basis, orb_restricted = _process_theory_specifications(
prog, method, basis, mult, orb_type)
return pm.call_module_function(
prog, pm.Job.ENERGY,
# *args
geo, charge, mult, method, basis, orb_restricted,
# **kwargs
# molecule options
mol_options=mol_options,
# machine options
memory=memory, comment=comment, machine_options=machine_options,
# theory options
scf_options=scf_options, casscf_options=casscf_options,
corr_options=corr_options,
# generic options
gen_lines=gen_lines,
# job options
job_options=(), frozen_coordinates=(),
saddle=False)
# gradient input writers
def gradient_programs():
""" Constructs a list of program modules implementing
gradient input writers.
"""
return pm.program_modules_with_function(pm.Job.GRADIENT)
def gradient(prog, geo, charge, mult, method, basis,
# molecule options
mol_options=(),
# machine options
memory=1, comment='', machine_options=(),
# theory options
orb_type='RU',
scf_options=(), casscf_options=(), corr_options=(),
# generic options
gen_lines=None,
# job options
job_options=()):
""" Writes an input file string for a gradient calculation
for a specified electronic structure program.
:param prog: electronic structure program to use as a backend
:type prog: str
:param geo: cartesian or z-matrix geometry
:type geo: tuple
:param charge: molecular charge
:type charge: int
:param mult: spin multiplicity
:type mult: int
:param method: electronic structure method
:type method: str
:param basis: basis set
:type basis: str
:param mol_options: options for the molecule block
:type mol_options: tuple[str]
;param memory: memory in GB
:type memory: int
:param comment: a comment string to be placed at the top of the file
:type comment: str
:param machine_options: machine directives
(num procs, num threads, etc.)
:type machine_options: tuple[str]
:param orb_type: 'R' indicates restricted orbitals, 'U' indicates
unrestricted orbitals; can also be 'RR', 'RU', or 'UU'.
Where first (second) character sets R/U for singlets (multiplets)
:type orb_type: str
:param scf_options: scf method directives
:type scf_options: tuple[str]
:param casscf_options: casscf method directives
:type casscf_options: tuple[str]
:param corr_options: correlation method directives
:type corr_options: tuple[str]
:param gen_lines: generic lines for the input file
:type gen_lines: dict[idx:str]
"""
prog, method, basis, orb_restricted = _process_theory_specifications(
prog, method, basis, mult, orb_type)
return pm.call_module_function(
prog, pm.Job.GRADIENT,
# *args
geo, charge, mult, method, basis, orb_restricted,
# **kwargs
# molecule options
mol_options=mol_options,
# machine options
memory=memory, comment=comment, machine_options=machine_options,
# theory options
scf_options=scf_options, casscf_options=casscf_options,
corr_options=corr_options,
# generic options
gen_lines=gen_lines,
# job options
job_options=job_options, frozen_coordinates=(),
saddle=False)
# hessian input writers
def hessian_programs():
""" Constructs a list of program modules implementing
Hessian input writers.
"""
return pm.program_modules_with_function(pm.Job.HESSIAN)
def hessian(prog, geo, charge, mult, method, basis,
# molecule options
mol_options=(),
# machine options
memory=1, comment='', machine_options=(),
# theory options
orb_type='RU',
scf_options=(), casscf_options=(), corr_options=(),
# generic options
gen_lines=None,
# job options
job_options=()):
""" Writes an input file string for a Hessian calculation
for a specified electronic structure program.
:param prog: electronic structure program to use as a backend
:type prog: str
:param geo: cartesian or z-matrix geometry
:type geo: tuple
:param charge: molecular charge
:type charge: int
:param mult: spin multiplicity
:type mult: int
:param method: electronic structure method
:type method: str
:param basis: basis set
:type basis: str
:param mol_options: options for the molecule block
:type mol_options: tuple[str]
;param memory: memory in GB
:type memory: int
:param comment: a comment string to be placed at the top of the file
:type comment: str
:param machine_options: machine directives
(num procs, num threads, etc.)
:type machine_options: tuple[str]
:param orb_type: 'R' indicates restricted orbitals, 'U' indicates
unrestricted orbitals; can also be 'RR', 'RU', or 'UU'.
Where first (second) character sets R/U for singlets (multiplets)
:type orb_type: str
:param scf_options: scf method directives
:type scf_options: tuple[str]
:param casscf_options: casscf method directives
:type casscf_options: tuple[str]
:param corr_options: correlation method directives
:type corr_options: tuple[str]
:param gen_lines: generic lines for the input file
:type gen_lines: dict[idx:str]
"""
prog, method, basis, orb_restricted = _process_theory_specifications(
prog, method, basis, mult, orb_type)
return pm.call_module_function(
prog, pm.Job.HESSIAN,
# *args
geo, charge, mult, method, basis, orb_restricted,
# **kwargs
# molecule options
mol_options=mol_options,
# machine options
memory=memory, comment=comment, machine_options=machine_options,
# theory options
scf_options=scf_options, casscf_options=casscf_options,
corr_options=corr_options,
# generic options
gen_lines=gen_lines,
# job options
job_options=job_options, frozen_coordinates=(),
saddle=False)
# vpt2 input writers
def vpt2_programs():
""" Constructs a list of program modules implementing
2nd-order vibrational perturbation theory (VPT2) input writers.
"""
return pm.program_modules_with_function(pm.Job.VPT2)
def vpt2(prog, geo, charge, mult, method, basis,
# molecule options
mol_options=(),
# machine options
memory=1, comment='', machine_options=(),
# theory options
orb_type='RU',
scf_options=(), casscf_options=(), corr_options=(),
# generic options
gen_lines=None,
# job options
job_options=()):
""" Writes an input file string for a
2nd-order vibrational perturbation theory calculation
for a specified electronic structure program.
:param prog: electronic structure program to use as a backend
:type prog: str
:param geo: cartesian or z-matrix geometry
:type geo: tuple
:param charge: molecular charge
:type charge: int
:param mult: spin multiplicity
:type mult: int
:param method: electronic structure method
:type method: str
:param basis: basis set
:type basis: str
:param mol_options: options for the molecule block
:type mol_options: tuple[str]
;param memory: memory in GB
:type memory: int
:param comment: a comment string to be placed at the top of the file
:type comment: str
:param machine_options: machine directives
(num procs, num threads, etc.)
:type machine_options: tuple[str]
:param orb_type: 'R' indicates restricted orbitals, 'U' indicates
unrestricted orbitals; can also be 'RR', 'RU', or 'UU'.
Where first (second) character sets R/U for singlets (multiplets)
:type orb_type: str
:param scf_options: scf method directives
:type scf_options: tuple[str]
:param casscf_options: casscf method directives
:type casscf_options: tuple[str]
:param corr_options: correlation method directives
:type corr_options: tuple[str]
:param gen_lines: generic lines for the input file
:type gen_lines: dict[idx:str]
"""
prog, method, basis, orb_restricted = _process_theory_specifications(
prog, method, basis, mult, orb_type)
return pm.call_module_function(
prog, pm.Job.VPT2,
# *args
geo, charge, mult, method, basis, orb_restricted,
# **kwargs
# molecule options
mol_options=mol_options,
# machine options
memory=memory, comment=comment, machine_options=machine_options,
# theory options
scf_options=scf_options, casscf_options=casscf_options,
corr_options=corr_options,
# generic options
gen_lines=gen_lines,
# job options
job_options=job_options, frozen_coordinates=(),
saddle=False)
# molec_properties input writers
def molecular_properties_programs():
""" Constructs a list of program modules implementing
molecular properties, including the
dipole moment and polarizability, input writers.
"""
return pm.program_modules_with_function(pm.Job.MOLPROP)
def molecular_properties(prog, geo, charge, mult, method, basis,
# molecule options
mol_options=(),
# machine options
memory=1, comment='', machine_options=(),
# theory options
orb_type='RU',
scf_options=(), casscf_options=(), corr_options=(),
# generic options
gen_lines=None,
# job options
job_options=()):
""" Writes an input file string for molecular properties calculations,
including the dipole moment and polarizability,
for a specified electronic structure program.
:param prog: electronic structure program to use as a backend
:type prog: str
:param geo: cartesian or z-matrix geometry
:type geo: tuple
:param charge: molecular charge
:type charge: int
:param mult: spin multiplicity
:type mult: int
:param method: electronic structure method
:type method: str
:param basis: basis set
:type basis: str
:param mol_options: options for the molecule block
:type mol_options: tuple[str]
;param memory: memory in GB
:type memory: int
:param comment: a comment string to be placed at the top of the file
:type comment: str
:param machine_options: machine directives
(num procs, num threads, etc.)
:type machine_options: tuple[str]
:param orb_type: 'R' indicates restricted orbitals, 'U' indicates
unrestricted orbitals; can also be 'RR', 'RU', or 'UU'.
Where first (second) character sets R/U for singlets (multiplets)
:type orb_type: str
:param scf_options: scf method directives
:type scf_options: tuple[str]
:param casscf_options: casscf method directives
:type casscf_options: tuple[str]
:param corr_options: correlation method directives
:type corr_options: tuple[str]
:param gen_lines: generic lines for the input file
:type gen_lines: dict[idx:str]
"""
prog, method, basis, orb_restricted = _process_theory_specifications(
prog, method, basis, mult, orb_type)
return pm.call_module_function(
prog, pm.Job.MOLPROP,
# *args
geo, charge, mult, method, basis, orb_restricted,
# **kwargs
# molecule options
mol_options=mol_options,
# machine options
memory=memory, comment=comment, machine_options=machine_options,
# theory options
scf_options=scf_options, casscf_options=casscf_options,
corr_options=corr_options,
# generic options
gen_lines=gen_lines,
# job options
job_options=job_options, frozen_coordinates=(),
saddle=False)
# irc input writers
def irc_programs():
""" Constructs a list of program modules implementing
Intrinsic Reaction Coordinate input writers.
"""
return pm.program_modules_with_function(pm.Job.IRC)
def irc(prog, geo, charge, mult, method, basis,
# molecule options
mol_options=(),
# machine options
memory=1, comment='', machine_options=(),
# theory options
orb_type='RU',
scf_options=(), casscf_options=(), corr_options=(),
# generic options
gen_lines=None,
# job options
job_options=(), frozen_coordinates=()):
""" Writes an input file string for an Intrinsic Reaction Coordinate
calculation for a specified electronic structure program.
:param prog: electronic structure program to use as a backend
:type prog: str
:param geo: cartesian or z-matrix geometry
:type geo: tuple
:param charge: molecular charge
:type charge: int
:param mult: spin multiplicity
:type mult: int
:param method: electronic structure method
:type method: str
:param basis: basis set
:type basis: str
:param mol_options: options for the molecule block
:type mol_options: tuple[str]
;param memory: memory in GB
:type memory: int
:param comment: a comment string to be placed at the top of the file
:type comment: str
:param machine_options: machine directives
(num procs, num threads, etc.)
:type machine_options: tuple[str]
:param orb_type: 'R' indicates restricted orbitals, 'U' indicates
unrestricted orbitals; can also be 'RR', 'RU', or 'UU'.
Where first (second) character sets R/U for singlets (multiplets)
:type orb_type: str
:param scf_options: scf method directives
:type scf_options: tuple[str]
:param casscf_options: casscf method directives
:type casscf_options: tuple[str]
:param corr_options: correlation method directives
:type corr_options: tuple[str]
:param job_options: geometry optimization routine directives
:type job_options: tuple[str]
:param frozen_coordinates: only with z-matrix geometries; list of
coordinate names to freeze
:type fozen_coordinates: tuple[str]
:param gen_lines: generic lines for the input file
:type gen_lines: dict[idx:str]
"""
prog, method, basis, orb_restricted = _process_theory_specifications(
prog, method, basis, mult, orb_type)
return pm.call_module_function(
prog, pm.Job.IRC,
# *args
geo, charge, mult, method, basis, orb_restricted,
# **kwargs
# molecule options
mol_options=mol_options,
# machine options
memory=memory, comment=comment, machine_options=machine_options,
# theory options
scf_options=scf_options, casscf_options=casscf_options,
corr_options=corr_options,
# generic options
gen_lines=gen_lines,
# job options
job_options=job_options, frozen_coordinates=frozen_coordinates,
saddle=True)
# optimization input writers
def optimization_programs():
""" Constructs a list of program modules implementing
geometry optimization input writers.
"""
return pm.program_modules_with_function(pm.Job.OPTIMIZATION)
def optimization(prog, geo, charge, mult, method, basis,
# molecule options
mol_options=(),
# machine options
memory=1, comment='', machine_options=(),
# theory options
orb_type='RU',
scf_options=(), casscf_options=(), corr_options=(),
# generic options
gen_lines=None,
# job options
job_options=(), frozen_coordinates=(), saddle=False):
""" Writes an input file string for a geometry optimization
calculation for a specified electronic structure program.
:param prog: electronic structure program to use as a backend
:type prog: str
:param geo: cartesian or z-matrix geometry
:type geo: tuple
:param charge: molecular charge
:type charge: int
:param mult: spin multiplicity
:type mult: int
:param method: electronic structure method
:type method: str
:param basis: basis set
:type basis: str
:param mol_options: options for the molecule block
:type mol_options: tuple[str]
;param memory: memory in GB
:type memory: int
:param comment: a comment string to be placed at the top of the file
:type comment: str
:param machine_options: machine directives
(num procs, num threads, etc.)
:type machine_options: tuple[str]
:param orb_type: 'R' indicates restricted orbitals, 'U' indicates
unrestricted orbitals; can also be 'RR', 'RU', or 'UU'.
Where first (second) character sets R/U for singlets (multiplets)
:type orb_type: str
:param scf_options: scf method directives
:type scf_options: tuple[str]
:param casscf_options: casscf method directives
:type casscf_options: tuple[str]
:param corr_options: correlation method directives
:type corr_options: tuple[str]
:param job_options: geometry optimization routine directives
:type job_options: tuple[str]
:param frozen_coordinates: only with z-matrix geometries; list of
coordinate names to freeze
:type fozen_coordinates: tuple[str]
:param saddle: optimize a saddle point?
:type saddle: bool
:param gen_lines: generic lines for the input file
:type gen_lines: dict[idx:str]
"""
prog, method, basis, orb_restricted = _process_theory_specifications(
prog, method, basis, mult, orb_type)
return pm.call_module_function(
prog, pm.Job.OPTIMIZATION,
# *args
geo, charge, mult, method, basis, orb_restricted,
# **kwargs
# molecule options
mol_options=mol_options,
# machine options
memory=memory, comment=comment, machine_options=machine_options,
# theory options
scf_options=scf_options, casscf_options=casscf_options,
corr_options=corr_options,
# generic options
gen_lines=gen_lines,
# job options
job_options=job_options, frozen_coordinates=frozen_coordinates,
saddle=saddle)
def _process_theory_specifications(prog, method, basis, mult, orb_type):
""" Process the theory method including the orbital type conversion.
:param prog: electronic structure program to use as a backend
:type prog: str
:param method: electronic structure method
:type method: str
:param basis: basis set
:type basis: str
:param mult: spin multiplicity
:type mult: int
:param orb_type: 'R' indicates restricted orbitals, 'U' indicates
unrestricted orbitals; can also be 'RR', 'RU', or 'UU'.
Where first (second) character sets R/U for singlets (multiplets)
:type orb_type: str
:rtype: (str, str, str, str)
"""
assert par.is_program(prog)
# determine the orbital restriction
singlet = (mult == 1)
if len(orb_type) == 2:
orb_type = orb_type[0] if singlet else orb_type[1]
assert orb_type in ('R', 'U')
orb_restricted = (orb_type == 'R')
# for non-standard DFT/Basis, the user can input whatever they want
if not par.Method.is_nonstandard_dft(method):
assert par.is_program_method(prog, method)
assert par.is_program_method_orbital_type(
prog, method, singlet, orb_type)
prog = par.standard_case(prog)
method = par.standard_case(method)
if not par.Basis.is_nonstandard_basis(basis):
assert par.is_program_basis(prog, basis)
basis = par.standard_case(basis)
return prog, method, basis, orb_restricted
| 37.910859 | 77 | 0.630114 | 2,733 | 23,391 | 5.251372 | 0.064764 | 0.042921 | 0.035326 | 0.051561 | 0.907539 | 0.89869 | 0.888796 | 0.883013 | 0.854097 | 0.850404 | 0 | 0.00115 | 0.293788 | 23,391 | 616 | 78 | 37.972403 | 0.867668 | 0.532427 | 0 | 0.678788 | 0 | 0 | 0.002003 | 0 | 0 | 0 | 0 | 0 | 0.030303 | 1 | 0.090909 | false | 0 | 0.012121 | 0 | 0.193939 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
e8173eed17c5a231e40693e845ff24899cb5d249 | 4,202 | py | Python | drop-linux/check_keys.py | ddesmond/clarisse-drop | 5b83f693ff1c950c473e6bdd5c0d6c0cf798e630 | [
"MIT"
] | 5 | 2018-11-28T19:23:50.000Z | 2022-03-07T03:59:43.000Z | drop-linux/check_keys.py | ddesmond/clarisse-drop | 5b83f693ff1c950c473e6bdd5c0d6c0cf798e630 | [
"MIT"
] | 1 | 2018-11-29T08:58:25.000Z | 2018-11-29T08:58:25.000Z | drop-linux/check_keys.py | ddesmond/clarisse-drop | 5b83f693ff1c950c473e6bdd5c0d6c0cf798e630 | [
"MIT"
] | 1 | 2020-07-04T18:34:23.000Z | 2020-07-04T18:34:23.000Z | import os
from cryptography.fernet import Fernet
path_to_keyfile = r'keyfile.key'
datasample = '''#Isotropix_Clarisse_Clipboard_Serialization 0.94
Context "context" {
copy_from "project://scene/context"
}
'''
if os.path.isfile(path_to_keyfile):
print "-------------------------------------------------------"
f = open(path_to_keyfile, 'r')
key_store = f.readline()
print "-------------------------------------------------------"
else:
print "-------------------------------------------------------"
print "no keys"
data = 'gAAAAABb_meb6KXRjpX51DVfCWi8rWnImT9itp6GI11fmD6ZIS1xfgmBHeMmX0Jq87PqN0eAbhd3ycxmrMQEenL09E-gZFOetThG6_U16hDhN7KLtX2wux2R3d3tMi7H8BHhuzzximRlCTwzH8OuCVF8xAijt2W7GBRhzHXloiyVoWJqUfainYHdoscij5ajKbFib75nLF0N6_VIxb98lsxUYC5WaVVH4u-nN9UjmHIUMt9AdEBQW5Q='
data2 = 'gAAAAABb_mhZsLF3Ws6ZWWA_-PS2g7n66A2l-UpWtFc4neHLb834X1zW084HpTH7GvMU53nF40LK5cZ29EdIHFrs5KItxBAA0DbMCbYi9dj4Tn2H-E6xDGNxQyHtt7Ipolg9lnasOZxuzYdXCFyt_JoaZp_b8h_6MmRtkDSHsHQ0dbPYz5I1TUghIYMxpie9Omhtjk0zHMgK3YAIyKCpbUHsR3NEcPZ4vDl1iJ-qTynwbR_LCffRM6sf2eIXExulyzwSvIri-6mw1RYItI4rH7_8D0VUirGZ0Uvu5EIk7tn0SuH_KEww1N-Q0GRee6Pw9VG9XUCKnpw1XuZTpWpQ4u-liMniZaJO8FrExwz7UQTrYMxeqew1-CdutitoCDGaQjjeedLYsz4z9kJricS1Vco8NsVeTlOeYbtia5ECm-Njwcx8mNPWW1r51thteEYa5SWQLsDJDe7qTicghih-tHyLZpjb28iMChSlV4uHKOHhy3Egf8NxL1cxZUR41v46AkWF0UTRf3ssAEJ8Lcn0j6yhvR9kYSuGdziv-XsaEllcauRSIrt3w4hERFBv4N6raH4e_QVx-ymryNp2yMG21HWmpWXkhNeyv8KvVt5lqs79ZdNb698vJivsEJz1pAeRLCOoUAcu6s7eFv1iTtzFrkE8ZT9YPHLyqv-3myw8oadAoOtWl7mRPmf7BY26mvmDw1NB8waLvNHK-ViR3SN8uXM_z5Tv1YMb9EL4aL1J5QcUfM2sM9Hj4FVmMv1V4zuuzfA9WXh3IhwMfApW7kFsyfCcdgrna7SgvcWncGMe8z6F0KojxX455MMsjLIoG1r3zeoPaF4ShevFoHrM00anqH0ZL21Z_0aLc-bve1MA2x4wCo4CNqF0o_ttE_xO3Wo0A-7afQdNtAHCIqsUrNugOY7YhsEtrMrHlJz7T4UugwQ57KRJv_ThD7zUC9CCGp1aFVSI1yV76Jzcg0BKosxyIglHnfKbubYP8m3fOUUbDDluQSDA7HDKxq1MoMyoKpggLuUgd8jldmW6pVBuP5V4NSY8FDQPMgT0bu9FBq4XL9e25Bqrx8tHwHUa6p9u5SoM9esNeDhuPVBcsW7-97Sz481AhEovej_ZeShocOcpIohF8Eta9ZNzAFQzh5r5yRuVLR_pQzFs7PCBLizWPcZbq-Ys7mYDkfReh6aCcc3EW5GFUe5styoTIL0qR0W_7zvw9vwLwx9sZzz3jDUH1dPTA82d8gnu1NqAsv4ckCf3jMFv2vpofHuxrT17LMGJYkUO0YeF5GAfoxAx6z2Zf7IgOqA4zQLdyvQvtNSRWqtXAQkdh6u26Zgv6dKADKU3laPmEYNQkRXcyVlmHWHUTpy0AaugY3krxIf5AMVOTUQkCeC3qwTcfBsYV3zrVWhdVRLyfKXlFGo-hLljQGtLNwJv_CnmemytmdPAkKjYF4w-wHDcdb7SLcqYwKMoT6LTO57-71JAf0GBPMUk'
d3 ='gAAAAABb_ml66IPEjqaTLtZP58i8b39GNFKUNqq3Vd4BUTWBEiDkUro7EBtf68F9lHGEujp0bCN5UBkOnz46qrcYJGw0KP5skBZa0ck-kA2FWB5YFzuc0Do='
d4=b'gAAAAABb_mm5oL-IEItGjigiD-LG3xLpef7f4VUXne0msKGZSl7lNI6OM7MUgUt8oefk4L3O81qYxJDU6Cd7hASYBSAXYl3BePtcUXChGYlcA9kuoc3vu0yohGcLd6eJV3thk2DfVGDFzpPeuj6mxWWDQLJFTQeAqdFntUJtI1VP9AGknje79atLbYWLtfJe5TNOiiDR-w38DAE6SnqAPsGPcJOX1yoQ2Nq6-stIu9WqAjcSa7OPH7ezI-5kWjvOBhSalGJUtyXOqm4_CHf9RoTtxTevvEk7n8d5I0P_NV5h1VUvuMBcJ2czSkgeTNgtAoa6pmn2YnkZvGQfA1KeQg6Utuq6o7P_KAuSAJ3QgCl52aqzAiUsy1cSIPs5J9tGb31FSuFciUuzlik_aJqVUqQ_wv5zc8UYWUCfVKIKxq18tHzL0yQ6e4VWjzDSISsdIARnfNHXvCtSaxTUR2sXfU6Vfa7HDr3GjB35ta-lHb3G5X3Y1r7weqw382lPwBsZvNSKeip2QZjiKQJq4rUsTinDpUbwxyOpeWhDXV4pSweS_SDwp_zdsX8W2cxFu15YrxlTF6B2OMdU0uRu2_qXlLjq7ykr3bNkoKc8EgQGSWxYdiZHg8kqfJMj_Th479Uv35GtVNi2sGw5UBNcWvACSQiBkbh6slv0HIZP4YNyEhdFqokYNdluMj-Tz3MWdeWy_jwUzx5rXqYvMRSaG--P0E9lOwd8--ww5uVfUX1cO-uUXUbgPWxBBxNmL4aUGu6URmoDbqBma4GoJ24Db87u4u1HmVivFt_auO0c2_jDCnSeUYn56j30DLxR9aP_ZDl-YENzYrd1sVQ2srvQAFT-8jDraR3yVadTSg6z-ZqleQE_KVyiMjFII8jJ_0sVOUYCgNUs5dFgidYoTSGwn9IUKQxV66IQeNsHH_TASsRyjAXQPCe5KuD9W78ZrlbYZq-f2a85S1LVudN0yNaib-NBhlHDnttYVnAYCi6HlrEWoReiqO4tJPXydxN0rqmKF9QsuGd5z591jsV17oZHjRAPuvfsHbjzUXpGCjQawKDkcy8zQyvIRO9lf4WBdnwtMvdQc57SE98iVsvBw9799WedFnmM2ccR98ouFTr4f3hxVCaFZantvdHyqWwQmnT1eptrc0JmQqXKu9CXrJk3zqPZ9UzmmEeelbrm9SKcxQuW0iIt7hlLbWfdD2lZ0ZyB1jD_7iCaKkQvIokK6aMKdZHrtlpKa7aWaMTlj8XHI1ZGhO6xFZWrpY_f3CtK6YYr5kIes4w09kyc273EwQlRdJS6kGCpstVmFVUqbisLLfy9AV5y4yxpphYtiefOM6LSzFZ5Wy0ijp1bYdMPEfuEB6R4B6i3GQom51DYaD_ftBDr6NgQxMWb14BQ6ONOFalebJghsUR0khb8bMS9exS1PVCmeji6d5rFgRbe9dxGQiFzb88KrGr8i0iSvcSSG-ABnOhyBAT8VRApkiKJhfAanAeSSXCgjMj10BOhMZtfriHlukd-ttS3bUiJjco='
try:
cipher_suite = Fernet(key_store)
#print datasample
print "keystore is: ",key_store
#cipher_text = cipher_suite.encrypt(data)
plain_text = cipher_suite.decrypt(d4)
print plain_text
except:
print "!! NO KEY. Generate keys first to access DB."
| 107.74359 | 1,534 | 0.905045 | 180 | 4,202 | 20.788889 | 0.738889 | 0.00481 | 0.010422 | 0.007483 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.135643 | 0.024512 | 4,202 | 38 | 1,535 | 110.578947 | 0.777263 | 0.013327 | 0 | 0.111111 | 0 | 0.074074 | 0.901232 | 0.872495 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.074074 | null | null | 0.259259 | 0 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
1c756a4993961cd443de95710343b4f19217a317 | 213 | py | Python | librespot/audio/decrypt/NoopAudioDecrypt.py | JeffmeisterJ/librespot-python | 0e0e1db65aa40262bd13479b97f81ae8c29ae049 | [
"Apache-2.0"
] | 1 | 2021-12-15T22:44:46.000Z | 2021-12-15T22:44:46.000Z | librespot/audio/decrypt/NoopAudioDecrypt.py | JeffmeisterJ/librespot-python | 0e0e1db65aa40262bd13479b97f81ae8c29ae049 | [
"Apache-2.0"
] | 12 | 2021-10-06T02:18:44.000Z | 2022-02-07T02:16:47.000Z | librespot/audio/decrypt/NoopAudioDecrypt.py | JeffmeisterJ/librespot-python | 0e0e1db65aa40262bd13479b97f81ae8c29ae049 | [
"Apache-2.0"
] | null | null | null | from librespot.audio.decrypt import AudioDecrypt
class NoopAudioDecrypt(AudioDecrypt):
def decrypt_chunk(self, chunk_index: int, buffer: bytes):
pass
def decrypt_time_ms(self):
return 0
| 21.3 | 61 | 0.723005 | 26 | 213 | 5.769231 | 0.769231 | 0.133333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.005917 | 0.206573 | 213 | 9 | 62 | 23.666667 | 0.881657 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | false | 0.166667 | 0.166667 | 0.166667 | 0.833333 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | 0 | 6 |
c714e0022037c798838c5bbe39eab40994360617 | 197 | py | Python | first-homework.py | spaceghst007/astro-119 | bb9aa0c27781774ffa9dfbeefcd5267934eaaece | [
"MIT"
] | null | null | null | first-homework.py | spaceghst007/astro-119 | bb9aa0c27781774ffa9dfbeefcd5267934eaaece | [
"MIT"
] | 9 | 2021-09-23T18:54:54.000Z | 2021-12-09T19:56:08.000Z | first-homework.py | spaceghst007/astro-119 | bb9aa0c27781774ffa9dfbeefcd5267934eaaece | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
#this program will write
#my Name and preferred pronouns
print("My name is Jesse Runkle") #print full name
print("Preferred pronouns are he/him") #print preferred pronouns
| 24.625 | 64 | 0.766497 | 31 | 197 | 4.870968 | 0.677419 | 0.337748 | 0.291391 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.005917 | 0.142132 | 197 | 7 | 65 | 28.142857 | 0.887574 | 0.573604 | 0 | 0 | 0 | 0 | 0.658228 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 6 |
c78ce57daed2424afafb81863d8932099fe27dd8 | 2,283 | py | Python | src/Algs/factories/args_generators/args_generators.py | EDA-Asp/Algebras_of_Multioperations | fe41831d06dd80c4191dd25c0c5d6901a8b860ee | [
"MIT"
] | null | null | null | src/Algs/factories/args_generators/args_generators.py | EDA-Asp/Algebras_of_Multioperations | fe41831d06dd80c4191dd25c0c5d6901a8b860ee | [
"MIT"
] | null | null | null | src/Algs/factories/args_generators/args_generators.py | EDA-Asp/Algebras_of_Multioperations | fe41831d06dd80c4191dd25c0c5d6901a8b860ee | [
"MIT"
] | null | null | null | from itertools import product, chain, combinations
def gen_intersection_and_union_binary_new(substitution_new):
return combinations(substitution_new, 2)
def gen_intersection_and_union_binary_cross_1(substitution_new, substitution_old):
for x in substitution_new:
for y in substitution_old:
yield (x, y)
def binary_intersection_and_union_gen_args(substitution_new, substitution_old):
it = chain(gen_intersection_and_union_binary_new(substitution_new.copy()),
gen_intersection_and_union_binary_cross_1(substitution_new.copy(), substitution_old.copy()))
substitution_old.update(substitution_new)
substitution_new.clear()
return it
def gen_substitution_new(substitution_new, rpt):
return product(substitution_new, repeat=rpt)
def gen_substitution_binary_cross_1(substitution_new, substitution_old):
for x in substitution_new:
for y in substitution_old:
for z in substitution_old:
yield (x, y, z)
yield (y, x, z)
yield (y, z, x)
def gen_substitution_binary_cross_2(substitution_new, substitution_old):
for x in substitution_new:
for y in substitution_new:
for z in substitution_old:
yield (z, x, y)
yield (x, z, y)
yield (x, y, z)
def binary_superposition_gen_args(substitution_new, substitution_old):
it = chain(gen_substitution_new(substitution_new.copy(), 3),
gen_substitution_binary_cross_1(substitution_new.copy(), substitution_old.copy()),
gen_substitution_binary_cross_2(substitution_new.copy(), substitution_old.copy()))
substitution_old.update(substitution_new)
substitution_new.clear()
return it
def gen_substitution_unary_cross_1(substitution_new, substitution_old):
for x in substitution_new:
for y in substitution_old:
yield (x, y)
yield (y, x)
def unary_superposition_gen_args(substitution_new, substitution_old):
it = chain(gen_substitution_new(substitution_new.copy(), 2),
gen_substitution_unary_cross_1(substitution_new.copy(), substitution_old.copy()))
substitution_old.update(substitution_new)
substitution_new.clear()
return it
| 33.573529 | 107 | 0.713973 | 292 | 2,283 | 5.202055 | 0.119863 | 0.315997 | 0.231073 | 0.138249 | 0.85846 | 0.84266 | 0.80316 | 0.731402 | 0.671494 | 0.573404 | 0 | 0.006108 | 0.211126 | 2,283 | 67 | 108 | 34.074627 | 0.837313 | 0 | 0 | 0.468085 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.191489 | false | 0 | 0.021277 | 0.042553 | 0.319149 | 0 | 0 | 0 | 0 | null | 1 | 1 | 0 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
c79c21887337236ed2692b6d19ab1ee7df1eb9d2 | 18,805 | py | Python | model/cut.py | jingkunchen/MS-CMR_miccai_2019 | ce4b67e017c0891533efadbdce4947b1c4821d6c | [
"MIT"
] | 14 | 2019-08-29T07:34:29.000Z | 2021-06-07T13:16:39.000Z | model/cut.py | jingkunchen/MS-CMR_miccai_2019 | ce4b67e017c0891533efadbdce4947b1c4821d6c | [
"MIT"
] | 2 | 2020-11-03T05:07:43.000Z | 2021-05-07T12:03:24.000Z | model/cut.py | jingkunchen/MS-CMR_miccai_2019 | ce4b67e017c0891533efadbdce4947b1c4821d6c | [
"MIT"
] | 3 | 2019-09-12T07:04:08.000Z | 2021-10-29T18:50:42.000Z | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
from __future__ import print_function
import os
import numpy as np
import SimpleITK as sitk
import scipy.misc
from skimage.transform import resize
import matplotlib.pyplot as plt
import matplotlib.image as mpimg
import scipy.ndimage
import cv2
import time
from decimal import Decimal
import skimage.io as io
data_dir = '/Users/chenjingkun/Documents/data/C0LET2_nii45_for_challenge19/c0t2lge/'
thresh = 1
rows = 224
cols = 224
xmin = 1
xmax = 1
ymin = 1
ymax = 1
xlenmin = 1
ylenmin = 1
img_count = 0
def show_img(data):
for i in range(data.shape[0]):
io.imshow(data[i, :, :], cmap='gray')
# io.imshow(data[:,:], cmap = 'gray')
io.show()
# label transform, 500-->1, 200-->2, 600-->3
###### LGE
LGE_data_1ch = []
LGE_gt_1ch = []
img_dir = '/Users/chenjingkun/Documents/data/C0LET2_nii45_for_challenge19/lge_images/'
if not os.path.exists(img_dir):
os.makedirs(img_dir)
gt_dir_1 = '/Users/chenjingkun/Documents/data/C0LET2_nii45_for_challenge19/lgegt/'
lge_list = []
for pp in range(1, 4):
data_name = data_dir + 'patient' + str(pp) + '_LGE.nii.gz'
gt_name = gt_dir_1 + 'patient' + str(pp) + '_LGE_manual.nii.gz'
img = sitk.ReadImage(os.path.join(gt_name))
data_array = sitk.GetArrayFromImage(sitk.ReadImage(
os.path.join(data_name)))
gt_array = sitk.GetArrayFromImage(sitk.ReadImage(os.path.join(gt_name)))
img_count +=gt_array.shape[0]
print(np.shape(data_array))
# new_data_array = 0
# count = 0
# for image in data_array:
# new_image = resize(image, (480,480), anti_aliasing=False)
# print()
# if count == 0:
# new_data_array = new_image[np.newaxis,:,:]
# else:
# new_data_array = np.concatenate((new_data_array, new_image[np.newaxis,:,:]), axis=0)
# count += 1
# data_array = new_data_array
# new_gt_array = 0
# count = 0
# for gt in gt_array:
# new_gt = resize(gt, (480,480), anti_aliasing=False)
# for i in range(480):
# for j in range(480):
# if new_gt[i][j] > 0.4:
# print(new_gt[i][j])
# if count == 0:
# new_gt_array = new_gt[np.newaxis,:,:]
# else:
# new_gt_array = np.concatenate((new_gt_array, new_gt[np.newaxis,:,:]), axis=0)
# count += 1
# gt_array = new_gt_array
x = []
y = []
print("idx:", pp)
for image in gt_array:
for i in range(np.shape(gt_array)[1]):
for j in range(np.shape(gt_array)[2]):
if image[i][j] != 0:
if i <30 or j<30:
print("label_error:", pp,i,j,image[i][j])
else:
x.append(i)
y.append(j)
print(min(x),max(x),max(x)-min(x),round(min(x)/np.shape(gt_array)[1],2), round(max(x)/np.shape(gt_array)[1],2))
print(min(y),max(y),max(y)-min(y),round(min(y)/np.shape(gt_array)[1],2), round(max(y)/np.shape(gt_array)[1],2))
# if xmin > round(min(x)/np.shape(gt_array)[1],2):
# xmin = round(min(x)/np.shape(gt_array)[1],2)
# if xmax > round(max(x)/np.shape(gt_array)[1],2):
# xmax = round(max(x)/np.shape(gt_array)[1],2)
# if ymin > round(min(y)/np.shape(gt_array)[1],2):
# ymin = round(min(y)/np.shape(gt_array)[1],2)
# if ymax > round(max(y)/np.shape(gt_array)[1],2):
# ymax = round(max(y)/np.shape(gt_array)[1],2)
# if xlenmin > round(max(x)/np.shape(gt_array)[1],2)-round(min(x)/np.shape(gt_array)[1],2):
# xlenmin = round(max(x)/np.shape(gt_array)[1],2)-round(min(x)/np.shape(gt_array)[1],2)
# if ylenmin > round(max(y)/np.shape(gt_array)[1],2)-round(min(y)/np.shape(gt_array)[1],2):
# ylenmin = round(max(y)/np.shape(gt_array)[1],2)-round(min(y)/np.shape(gt_array)[1],2)
if gt_array.shape[1] == 480 or gt_array.shape[1] == 512:
data_array = data_array[:,136:360,136:360]
gt_array = gt_array[:,136:360,136:360]
else:
print("error:",gt_array.shape)
# show_img(gt_array)
mask = np.zeros(np.shape(data_array), dtype='float32')
mask[data_array >= thresh] = 1
mask[data_array < thresh] = 0
for iii in range(np.shape(data_array)[0]):
mask[iii, :, :] = scipy.ndimage.morphology.binary_fill_holes(
mask[iii, :, :]) #fill the holes inside br
data_array = data_array - np.mean(data_array[mask == 1])
data_array /= np.std(data_array[mask == 1])
rows_o = np.shape(data_array)[1]
cols_o = np.shape(data_array)[2]
data_array_ = data_array[:,
int((rows_o - rows) /
2):int((rows_o - rows) / 2) + rows,
int((cols_o - cols) /
2):int((cols_o - cols) / 2) + cols]
gt_array_ = gt_array[:,
int((rows_o - rows) /
2):int((rows_o - rows) / 2) + rows,
int((cols_o - cols) / 2):int((cols_o - cols) / 2) +
cols]
mask = mask[:,
int((rows_o - rows) / 2):int((rows_o - rows) / 2) + rows,
int((cols_o - cols) / 2):int((cols_o - cols) / 2) + cols]
LGE_data_1ch.extend(np.float32(data_array_))
LGE_gt_1ch.extend(np.float32(gt_array_))
# for iii in range(np.shape(data_array)[0]):
# scipy.misc.imsave(img_dir+'mask_pat_'+str(pp)+'_'+str(iii)+'.png', mask[iii, ...])
# scipy.misc.imsave(img_dir+'img_pat_'+str(pp)+'_'+str(iii)+'.png', data_array_[iii, ...])
# scipy.misc.imsave(img_dir+'gt_pat_'+str(pp)+'_'+str(iii)+'.png', gt_array_[iii, ...])
#LGE_data_1ch = np.array(LGE_data_1ch)
#LGE_gt_1ch = np.array(LGE_gt_1ch)
LGE_data_1ch = np.asarray(LGE_data_1ch)
LGE_gt_1ch = np.asarray(LGE_gt_1ch)
LGE_gt_1ch[LGE_gt_1ch == 500] = 1
LGE_gt_1ch[LGE_gt_1ch == 200] = 2
LGE_gt_1ch[LGE_gt_1ch == 600] = 3
np.save('LGE_data_1ch.npy', LGE_data_1ch)
np.save('LGE_gt_1ch.npy', LGE_gt_1ch)
# print(xmin,xmax,ymin,ymax, xlenmin, ylenmin)
# xmin = 1
# xmax = 1
# ymin = 1
# ymax = 1
# xlenmin = 1
# ylenmin = 1
##### T2
T2_data_1ch = []
T2_gt_1ch = []
img_dir = '/Users/chenjingkun/Documents/data/C0LET2_nii45_for_challenge19/t2_images/'
if not os.path.exists(img_dir):
os.makedirs(img_dir)
gt_dir_1 = '/Users/chenjingkun/Documents/data/C0LET2_nii45_for_challenge19/t2gt/'
for pp in range(1, 31):
data_name = data_dir + 'patient' + str(pp) + '_T2.nii.gz'
gt_name = gt_dir_1 + 'patient' + str(pp) + '_T2_manual.nii.gz'
data_array = sitk.GetArrayFromImage(sitk.ReadImage(
os.path.join(data_name)))
gt_array = sitk.GetArrayFromImage(sitk.ReadImage(os.path.join(gt_name)))
data_array = np.nan_to_num(data_array, copy=True)
gt_array = np.nan_to_num(gt_array, copy=True)
print(gt_array.shape)
img_count +=gt_array.shape[0]
# count = 0
# for image in data_array:
# new_image = resize(image, (480,480), anti_aliasing=True)
# if count == 0:
# new_data_array = new_image[np.newaxis,:,:]
# else:
# new_data_array = np.concatenate((new_data_array, new_image[np.newaxis,:,:]), axis=0)
# count += 1
# data_array = new_data_array
# new_gt_array = 0
# count = 0
# for gt in gt_array:
# new_gt = resize(gt, (480,480), anti_aliasing=True)
# if count == 0:
# new_gt_array = new_gt[np.newaxis,:,:]
# else:
# new_gt_array = np.concatenate((new_gt_array, new_gt[np.newaxis,:,:]), axis=0)
# count += 1
# gt_array = new_gt_array.astype(int)
x = []
y = []
count = 0
print("idx:", pp)
for image in gt_array:
for i in range(np.shape(gt_array)[1]):
for j in range(np.shape(gt_array)[2]):
if image[i][j] != 0:
if j < 30 or i < 30:
# show_img(image.shape)
gt_array[count, 0:75, 0:50] = 0
else:
x.append(i)
y.append(j)
count += 1
print(min(x), max(x),
max(x) - min(x), round(min(x) / np.shape(gt_array)[1], 2),
round(max(x) / np.shape(gt_array)[1], 2))
print(min(y), max(y),
max(y) - min(y), round(min(y) / np.shape(gt_array)[1], 2),
round(max(y) / np.shape(gt_array)[1], 2))
if(round(min(x)/np.shape(gt_array)[1],2) < 0.2 or round(min(y)/np.shape(gt_array)[1],2)<0.2):
print("errorerrorerrorerrorerrorerror")
show_img(gt_array)
if int(gt_array.shape[1]) == 256:
data_array = data_array[:,16:240,16:240]
gt_array = gt_array[:,16:240,16:240]
elif gt_array.shape[1] == 288:
data_array = data_array[:,32:256,32:256]
gt_array = gt_array[:,32:256,32:256]
elif gt_array.shape[1] == 240:
data_array = data_array[:,8:232,8:232]
gt_array = gt_array[:,8:232,8:232]
elif gt_array.shape[1] == 224:
pass
else:
print("error:",gt_array.shape)
# if xmin > round(min(x)/np.shape(gt_array)[1],2):
# xmin = round(min(x)/np.shape(gt_array)[1],2)
# if xmax > round(max(x)/np.shape(gt_array)[1],2):
# xmax = round(max(x)/np.shape(gt_array)[1],2)
# if ymin > round(min(y)/np.shape(gt_array)[1],2):
# ymin = round(min(y)/np.shape(gt_array)[1],2)
# if ymax > round(max(y)/np.shape(gt_array)[1],2):
# ymax = round(max(y)/np.shape(gt_array)[1],2)
# if xlenmin > round(max(x)/np.shape(gt_array)[1],2)-round(min(x)/np.shape(gt_array)[1],2):
# xlenmin = round(max(x)/np.shape(gt_array)[1],2)-round(min(x)/np.shape(gt_array)[1],2)
# if ylenmin > round(max(y)/np.shape(gt_array)[1],2)-round(min(y)/np.shape(gt_array)[1],2):
# ylenmin = round(max(y)/np.shape(gt_array)[1],2)-round(min(y)/np.shape(gt_array)[1],2)
mask = np.zeros(np.shape(data_array), dtype='float32')
mask[data_array >= thresh] = 1
mask[data_array < thresh] = 0
# print("------------------")
# print("mask1:",data_array >= thresh)
# print("mask2:",data_array < thresh)
# print("------------------")
# time.sleep()
for iii in range(np.shape(data_array)[0]):
mask[iii, :, :] = scipy.ndimage.morphology.binary_fill_holes(
mask[iii, :, :]) #fill the holes inside br
data_array = data_array - np.mean(data_array[mask == 1])
data_array /= np.std(data_array[mask == 1])
rows_o = np.shape(data_array)[1]
cols_o = np.shape(data_array)[2]
data_array_ = data_array[:,
int((rows_o - rows) /
2):int((rows_o - rows) / 2) + rows,
int((cols_o - cols) /
2):int((cols_o - cols) / 2) + cols]
gt_array_ = gt_array[:,
int((rows_o - rows) /
2):int((rows_o - rows) / 2) + rows,
int((cols_o - cols) / 2):int((cols_o - cols) / 2) +
cols]
mask = mask[:,
int((rows_o - rows) / 2):int((rows_o - rows) / 2) + rows,
int((cols_o - cols) / 2):int((cols_o - cols) / 2) + cols]
print("np.max(data_array_):",np.max(data_array_))
T2_data_1ch.extend(np.float32(data_array_))
T2_gt_1ch.extend(np.float32(gt_array_))
for iii in range(np.shape(data_array)[0]):
scipy.misc.imsave(
img_dir + 'mask_pat_' + str(pp) + '_' + str(iii) + '.png',
mask[iii, ...])
scipy.misc.imsave(
img_dir + 'img_pat_' + str(pp) + '_' + str(iii) + '.png',
data_array_[iii, ...])
scipy.misc.imsave(
img_dir + 'gt_pat_' + str(pp) + '_' + str(iii) + '.png',
gt_array_[iii, ...])
#T2_data_1ch_ = np.zeros([np.shape(T2_data_1ch)[0], rows, cols])
#T2_gt_1ch_ = np.zeros([np.shape(T2_data_1ch)[0], rows, cols])
#for iii in range(0, np.shape(T2_data_1ch)[0]):
# T2_data_1ch_[iii, ...] = T2_data_1ch[iii]
# T2_gt_1ch_[iii, ...] = T2_gt_1ch[iii]
T2_data_1ch = np.asarray(T2_data_1ch)
T2_gt_1ch = np.asarray(T2_gt_1ch)
T2_gt_1ch[T2_gt_1ch == 500] = 1
T2_gt_1ch[T2_gt_1ch == 200] = 2
T2_gt_1ch[T2_gt_1ch == 600] = 3
np.save('T2_data_1ch.npy', T2_data_1ch)
np.save('T2_gt_1ch.npy', T2_gt_1ch)
# print(xmin,xmax,ymin,ymax, xlenmin, ylenmin)
# xmin = 1
# xmax = 1
# ymin = 1
# ymax = 1
# xlenmin = 1
# ylenmin = 1
#######C0
#
C0_data_1ch = []
C0_gt_1ch = []
img_dir = '/Users/chenjingkun/Documents/data/C0LET2_nii45_for_challenge19/c0_images/'
if not os.path.exists(img_dir):
os.makedirs(img_dir)
gt_dir_1 = '/Users/chenjingkun/Documents/data/C0LET2_nii45_for_challenge19/c0gt/'
for pp in range(1, 31):
data_name = data_dir + 'patient' + str(pp) + '_C0.nii.gz'
gt_name = gt_dir_1 + 'patient' + str(pp) + '_C0_manual.nii.gz'
data_array = sitk.GetArrayFromImage(sitk.ReadImage(
os.path.join(data_name)))
gt_array = sitk.GetArrayFromImage(sitk.ReadImage(os.path.join(gt_name)))
print(np.shape(data_array))
img_count +=gt_array.shape[0]
# new_data_array = 0
# count = 0
# for image in data_array:
# new_image = resize(image, (480,480), anti_aliasing=True)
# if count == 0:
# new_data_array = new_image[np.newaxis,:,:]
# else:
# new_data_array = np.concatenate((new_data_array, new_image[np.newaxis,:,:]), axis=0)
# count += 1
# data_array = new_data_array
# # show_img(new_data_array)
# new_gt_array = 0
# count = 0
# for gt in gt_array:
# new_gt = resize(gt, (480,480), anti_aliasing=True)
# if count == 0:
# new_gt_array = new_gt[np.newaxis,:,:]
# else:
# new_gt_array = np.concatenate((new_gt_array, new_gt[np.newaxis,:,:]), axis=0)
# count += 1
# gt_array = new_gt_array.astype(int)
x = []
y = []
for image in gt_array:
for i in range(np.shape(gt_array)[1]):
for j in range(np.shape(gt_array)[2]):
if image[i][j] != 0:
if i < 30 or j <30:
print("label_error:", pp,image.shape)
else:
x.append(i)
y.append(j)
print("idx:", pp)
print(min(x), max(x),
max(x) - min(x), round(min(x) / np.shape(gt_array)[1], 2),
round(max(x) / np.shape(gt_array)[1], 2))
print(min(y), max(y),
max(y) - min(y), round(min(y) / np.shape(gt_array)[1], 2),
round(max(y) / np.shape(gt_array)[1], 2))
if gt_array.shape[1] == 320:
data_array = data_array[:,64:288,64:288]
gt_array = gt_array[:,64:288,64:288]
elif gt_array.shape[1] == 288:
data_array = data_array[:,32:256,32:256]
gt_array = gt_array[:,32:256,32:256]
elif gt_array.shape[1] == 240:
data_array = data_array[:,8:232,8:232]
gt_array = gt_array[:,8:232,8:232]
elif gt_array.shape[1] == 256:
data_array = data_array[:,16:240,16:240]
gt_array = gt_array[:,16:240,16:240]
elif gt_array.shape[1] == 224:
pass
else:
print("error:",gt_array.shape)
# if(round(min(x)/np.shape(gt_array)[1],2) < 0.2 or round(min(y)/np.shape(gt_array)[1],2)<0.2):
# show_img(gt_array)
# if xmin > round(min(x)/np.shape(gt_array)[1],2):
# xmin = round(min(x)/np.shape(gt_array)[1],2)
# if xmax > round(max(x)/np.shape(gt_array)[1],2):
# xmax = round(max(x)/np.shape(gt_array)[1],2)
# if ymin > round(min(y)/np.shape(gt_array)[1],2):
# ymin = round(min(y)/np.shape(gt_array)[1],2)
# if ymax > round(max(y)/np.shape(gt_array)[1],2):
# ymax = round(max(y)/np.shape(gt_array)[1],2)
# if xlenmin > round(max(x)/np.shape(gt_array)[1],2)-round(min(x)/np.shape(gt_array)[1],2):
# xlenmin = round(max(x)/np.shape(gt_array)[1],2)-round(min(x)/np.shape(gt_array)[1],2)
# if ylenmin > round(max(y)/np.shape(gt_array)[1],2)-round(min(y)/np.shape(gt_array)[1],2):
# ylenmin = round(max(y)/np.shape(gt_array)[1],2)-round(min(y)/np.shape(gt_array)[1],2)
mask = np.zeros(np.shape(data_array), dtype='float32')
mask[data_array >= thresh] = 1
mask[data_array < thresh] = 0
# print(data_array >= thresh)
for iii in range(np.shape(data_array)[0]):
mask[iii, :, :] = scipy.ndimage.morphology.binary_fill_holes(
mask[iii, :, :]) #fill the holes inside br
data_array = data_array - np.mean(data_array[mask == 1])
data_array /= np.std(data_array[mask == 1])
rows_o = np.shape(data_array)[1]
cols_o = np.shape(data_array)[2]
data_array_ = data_array[:,
int((rows_o - rows) /
2):int((rows_o - rows) / 2) + rows,
int((cols_o - cols) /
2):int((cols_o - cols) / 2) + cols]
gt_array_ = gt_array[:,
int((rows_o - rows) /
2):int((rows_o - rows) / 2) + rows,
int((cols_o - cols) / 2):int((cols_o - cols) / 2) +
cols]
mask = mask[:,
int((rows_o - rows) / 2):int((rows_o - rows) / 2) + rows,
int((cols_o - cols) / 2):int((cols_o - cols) / 2) + cols]
C0_data_1ch.extend(np.float32(data_array_))
C0_gt_1ch.extend(np.float32(gt_array_))
for iii in range(np.shape(data_array)[0]):
scipy.misc.imsave(
img_dir + 'mask_pat_' + str(pp) + '_' + str(iii) + '.png',
mask[iii, ...])
scipy.misc.imsave(
img_dir + 'img_pat_' + str(pp) + '_' + str(iii) + '.png',
data_array_[iii, ...])
scipy.misc.imsave(
img_dir + 'gt_pat_' + str(pp) + '_' + str(iii) + '.png',
gt_array_[iii, ...])
C0_data_1ch = np.asarray(C0_data_1ch)
C0_gt_1ch = np.asarray(C0_gt_1ch)
C0_gt_1ch[C0_gt_1ch == 500] = 1
C0_gt_1ch[C0_gt_1ch == 200] = 2
C0_gt_1ch[C0_gt_1ch == 600] = 3
# np.save('C0_data_1ch.npy', C0_data_1ch)
# np.save('C0_gt_1ch.npy', C0_gt_1ch)
new_data_array = np.concatenate((LGE_data_1ch, C0_data_1ch), axis=0)
new_data_array = np.concatenate((new_data_array, T2_data_1ch), axis=0)
new_gt_array = np.concatenate((LGE_gt_1ch, C0_gt_1ch), axis=0)
new_gt_array = np.concatenate((new_gt_array, T2_gt_1ch), axis=0)
np.save('train_data.npy', new_data_array[:, :, :, np.newaxis])
np.save('train_gt.npy', new_gt_array[:, :, :, np.newaxis])
print("img_count:",img_count)
print("new_gt_array:",new_gt_array.shape)
# print(xmin,xmax,ymin,ymax, xlenmin, ylenmin)
| 39.672996 | 115 | 0.564212 | 2,952 | 18,805 | 3.366192 | 0.057249 | 0.109188 | 0.08574 | 0.098621 | 0.866861 | 0.836671 | 0.800845 | 0.776391 | 0.760894 | 0.751333 | 0 | 0.057295 | 0.259346 | 18,805 | 473 | 116 | 39.756871 | 0.65616 | 0.319277 | 0 | 0.632867 | 0 | 0 | 0.074001 | 0.04163 | 0 | 0 | 0 | 0 | 0 | 1 | 0.003497 | false | 0.006993 | 0.045455 | 0 | 0.048951 | 0.076923 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
c7a967c0aa72f3fa083f0a104fa0f0048c81136b | 3,757 | py | Python | vimms/Controller/model.py | hechth/vimms | ce5922578cf225d46cb285da8e7af97b5321f5aa | [
"MIT"
] | 11 | 2019-07-11T09:19:18.000Z | 2021-03-07T08:44:36.000Z | vimms/Controller/model.py | hechth/vimms | ce5922578cf225d46cb285da8e7af97b5321f5aa | [
"MIT"
] | 159 | 2019-12-11T14:41:40.000Z | 2021-03-31T19:47:08.000Z | vimms/Controller/model.py | hechth/vimms | ce5922578cf225d46cb285da8e7af97b5321f5aa | [
"MIT"
] | 4 | 2019-10-09T18:42:49.000Z | 2020-07-10T14:21:59.000Z | import numpy as np
from vimms.Controller import RoiController
class ModelRoiController(RoiController):
def __init__(self, ionisation_mode, isolation_width, mz_tol, min_ms1_intensity, min_roi_intensity,
min_roi_length, boxes, p_values, N=None, rt_tol=10,
min_roi_length_for_fragmentation=1, length_units="scans", ms1_shift=0, params=None,
box_min_rt_width=0.01, box_min_mz_width=0.01):
self.boxes = boxes
self.p_values = np.array(p_values)
self.box_min_rt_width = box_min_rt_width
self.box_min_mz_width = box_min_mz_width
super().__init__(ionisation_mode, isolation_width, mz_tol, min_ms1_intensity, min_roi_intensity,
min_roi_length, N, rt_tol=rt_tol,
min_roi_length_for_fragmentation=min_roi_length_for_fragmentation,
length_units=length_units, ms1_shift=ms1_shift, params=params)
class FullPrioritisationModelRoiController(ModelRoiController):
def __init__(self, ionisation_mode, isolation_width, mz_tol, min_ms1_intensity, min_roi_intensity,
min_roi_length, boxes, p_values, N=None, rt_tol=10,
min_roi_length_for_fragmentation=1, length_units="scans", ms1_shift=0, params=None,
box_min_rt_width=0.01, box_min_mz_width=0.01):
super().__init__(ionisation_mode, isolation_width, mz_tol, min_ms1_intensity, min_roi_intensity,
min_roi_length, boxes, p_values, N, rt_tol,
min_roi_length_for_fragmentation, length_units, ms1_shift, params,
box_min_rt_width, box_min_mz_width)
self.p_values_order = np.argsort(-np.array(self.p_values)) # this is highest to lowest
def _get_scores(self):
dda_scores = self._get_dda_scores()
overlap_scores = []
for i in range(len(dda_scores)):
overlaps = np.array(self.live_roi[i].get_boxes_overlap(self.boxes, self.box_min_rt_width,
self.box_min_mz_width))
overlap_scores.append(overlaps * self.p_values_order)
initial_scores = dda_scores * overlap_scores
scores = self._get_top_N_scores(initial_scores)
return scores
# class TopNBoxModelRoiController(ModelRoiController):
# def __init__(self, ionisation_mode, isolation_width, mz_tol, min_ms1_intensity, min_roi_intensity,
# min_roi_length, boxes, p_values, N=None, rt_tol=10,
# min_roi_length_for_fragmentation=1, length_units="scans", ms1_shift=0, params=None,
# box_min_rt_width=0.01, box_min_mz_width=0.01):
# super().__init__(ionisation_mode, isolation_width, mz_tol, min_ms1_intensity, min_roi_intensity,
# min_roi_length, boxes, p_values, N, rt_tol,
# min_roi_length_for_fragmentation, length_units, ms1_shift, params,
# box_min_rt_width, box_min_mz_width)
#
# self.p_values_order = np.argsort(-np.array(self.p_values)) # this is highest to lowest
#
# def _get_scores(self):
# dda_scores = self._get_dda_scores()
# overlap_scores = []
# for i in range(len(dda_scores)):
# overlaps = np.array(self.live_roi[i].get_boxes_overlap(self.boxes, self.box_min_rt_width,
# self.box_min_mz_width))
# max_pvalue = self.p_values[np.where(overlaps > 0.0)]
# overlap_scores.append(1 + 1 - max_pvalue)
# initial_scores = dda_scores * overlap_scores
# scores = self._get_top_N_scores(initial_scores)
# return scores
| 55.25 | 106 | 0.650253 | 497 | 3,757 | 4.434608 | 0.138833 | 0.051724 | 0.07078 | 0.053085 | 0.839383 | 0.83167 | 0.825771 | 0.793103 | 0.793103 | 0.779492 | 0 | 0.017066 | 0.266968 | 3,757 | 67 | 107 | 56.074627 | 0.783224 | 0.377961 | 0 | 0.285714 | 0 | 0 | 0.004327 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.085714 | false | 0 | 0.057143 | 0 | 0.228571 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
c7cfa484ce82a9eb52a5a953579b557f76d37d07 | 8,389 | py | Python | tests/test_payload_check.py | unfoldingWord-dev/door43-enqueue-job | f153c92660ad2f59cacd04ecc96fad89cfa8f9da | [
"Unlicense"
] | null | null | null | tests/test_payload_check.py | unfoldingWord-dev/door43-enqueue-job | f153c92660ad2f59cacd04ecc96fad89cfa8f9da | [
"Unlicense"
] | 37 | 2018-10-11T03:30:55.000Z | 2021-01-08T13:52:30.000Z | tests/test_payload_check.py | unfoldingWord-dev/door43-enqueue-job | f153c92660ad2f59cacd04ecc96fad89cfa8f9da | [
"Unlicense"
] | null | null | null | from unittest import TestCase
from unittest.mock import Mock
import json
import logging
from enqueue.check_posted_payload import check_posted_payload
class TestPayloadCheck(TestCase):
def test_blank(self):
payload_json = ''
mock_request = Mock()
mock_request.data = payload_json
output = check_posted_payload(mock_request, logging)
expected = False, {
'error': "No payload found. You must submit a POST request via a DCS webhook notification."
}
self.assertEqual(output, expected)
def test_missing_header(self):
headers = ''
payload_json = 'whatever'
mock_request = Mock()
mock_request.headers = headers
mock_request.data = payload_json
output = check_posted_payload(mock_request, logging)
expected = False, {
'error': "This does not appear to be from DCS."
}
self.assertEqual(output, expected)
def test_wrong_header(self):
headers = {'nonEvent':'whatever'}
payload_json = 'whatever'
mock_request = Mock()
mock_request.headers = headers
mock_request.data = payload_json
output = check_posted_payload(mock_request, logging)
expected = False, {
'error': "This does not appear to be from DCS."
}
self.assertEqual(output, expected)
def test_bad_header(self):
headers = {'X-Gitea-Event':'whatever'}
payload_json = 'whatever'
mock_request = Mock()
mock_request.headers = headers
mock_request.data = payload_json
output = check_posted_payload(mock_request, logging)
expected = False, {
'error': "This does not appear to be a push, release, fork, or delete."
}
self.assertEqual(output, expected)
def test_missing_repo(self):
headers = {'X-Gitea-Event':'push'}
payload_json = {'something':'whatever'}
mock_request = Mock(**{'get_json.return_value':payload_json})
mock_request.headers = headers
mock_request.data = payload_json
output = check_posted_payload(mock_request, logging)
expected = False, {
'error': "No repo URL specified for push."
}
self.assertEqual(output, expected)
def test_bad_repo(self):
headers = {'X-Gitea-Event':'push'}
payload_json = {
'repository':{
'html_url':'whatever'
}
}
mock_request = Mock(**{'get_json.return_value':payload_json})
mock_request.headers = headers
mock_request.data = payload_json
output = check_posted_payload(mock_request, logging)
expected = False, {
'error': "The repo for push does not belong to https://git.door43.org."
}
self.assertEqual(output, expected)
def test_missing_commit_branch(self):
headers = {'X-Gitea-Event':'push'}
payload_json = {
'repository':{
'html_url':'https://git.door43.org/whatever'
}
}
mock_request = Mock(**{'get_json.return_value':payload_json})
mock_request.headers = headers
mock_request.data = payload_json
output = check_posted_payload(mock_request, logging)
expected = False, {
'error': "No commits specified for push."
}
self.assertEqual(output, expected)
def test_bad_commit_branch(self):
headers = {'X-Gitea-Event':'push'}
payload_json = {
'ref':None,
'repository':{
'html_url':'https://git.door43.org/whatever',
},
}
mock_request = Mock(**{'get_json.return_value':payload_json})
mock_request.headers = headers
mock_request.data = payload_json
output = check_posted_payload(mock_request, logging)
expected = False, {
'error': "No commits specified for push."
}
self.assertEqual(output, expected)
def test_missing_default_branch(self):
headers = {'X-Gitea-Event':'push'}
payload_json = {
'ref':'refs/heads/master',
'repository':{
'html_url':'https://git.door43.org/whatever',
},
}
mock_request = Mock(**{'get_json.return_value':payload_json})
mock_request.headers = headers
mock_request.data = payload_json
output = check_posted_payload(mock_request, logging)
expected = False, {
'error': "No commits specified for push."
}
self.assertEqual(output, expected)
def test_different_commit_branch(self):
headers = {'X-Gitea-Event':'push'}
payload_json = {
'ref':'refs/heads/notMaster',
'repository':{
'html_url':'https://git.door43.org/whatever',
'default_branch':'master',
},
}
mock_request = Mock(**{'get_json.return_value':payload_json})
mock_request.headers = headers
mock_request.data = payload_json
output = check_posted_payload(mock_request, logging)
expected = False, {
'error': "No commits specified for push."
}
self.assertEqual(output, expected)
def test_missing_commits_entry(self):
headers = {'X-Gitea-Event':'push'}
payload_json = {
'ref':'refs/heads/master',
'repository':{
'html_url':'https://git.door43.org/whatever',
'default_branch':'master',
},
}
mock_request = Mock(**{'get_json.return_value':payload_json})
mock_request.headers = headers
mock_request.data = payload_json
output = check_posted_payload(mock_request, logging)
expected = False, {
'error': "No commits specified for push."
}
self.assertEqual(output, expected)
def test_empty_commits_entry(self):
headers = {'X-Gitea-Event':'push'}
payload_json = {
'ref':'refs/heads/master',
'repository':{
'html_url':'https://git.door43.org/whatever',
'default_branch':'master',
},
'commits': [],
}
mock_request = Mock(**{'get_json.return_value':payload_json})
mock_request.headers = headers
mock_request.data = payload_json
output = check_posted_payload(mock_request, logging)
expected = False, {
'error': "No commits found for push."
}
self.assertEqual(output, expected)
def test_empty_release(self):
headers = {'X-Gitea-Event':'release'}
payload_json = {
'action': 'published',
'repository':{
'html_url':'https://git.door43.org/whatever',
},
}
mock_request = Mock(**{'get_json.return_value':payload_json})
mock_request.headers = headers
mock_request.data = payload_json
output = check_posted_payload(mock_request, logging)
expected = True, payload_json
self.assertEqual(output, expected)
def test_basic_json_success(self):
headers = {'X-Gitea-Event':'push'}
payload_json = {
'ref':'refs/heads/master',
'repository':{
'html_url':'https://git.door43.org/whatever',
'default_branch':'master',
},
'commits': ['some commit info'],
}
mock_request = Mock(**{'get_json.return_value':payload_json})
mock_request.headers = headers
mock_request.data = payload_json
output = check_posted_payload(mock_request, logging)
expected = True, payload_json
self.assertEqual(output, expected)
def test_typical_full_json_success(self):
headers = {'X-Gitea-Event':'push'}
with open( 'tests/Resources/webhook_post.json', 'rt' ) as json_file:
payload_json = json.load(json_file)
mock_request = Mock(**{'get_json.return_value':payload_json})
mock_request.headers = headers
mock_request.data = payload_json
output = check_posted_payload(mock_request, logging)
expected = True, payload_json
self.assertEqual(output, expected)
| 36.159483 | 103 | 0.586602 | 876 | 8,389 | 5.374429 | 0.116438 | 0.13785 | 0.064996 | 0.070093 | 0.879779 | 0.867247 | 0.867247 | 0.83921 | 0.829014 | 0.808199 | 0 | 0.003077 | 0.302658 | 8,389 | 231 | 104 | 36.316017 | 0.801709 | 0 | 0 | 0.666667 | 0 | 0 | 0.205269 | 0.03147 | 0 | 0 | 0 | 0 | 0.070423 | 1 | 0.070423 | false | 0 | 0.023474 | 0 | 0.098592 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
c7d648f5cfc6a78ddb9c677722d02575657c7b42 | 45 | py | Python | tfds_juliet/__init__.py | alexpotter1/vulndetect-ml | 338fbf919b24520f9107a1604d1c8af48aadff76 | [
"MIT"
] | 1 | 2020-02-25T01:53:23.000Z | 2020-02-25T01:53:23.000Z | tfds_juliet/__init__.py | alexpotter1/vulndetect-ml | 338fbf919b24520f9107a1604d1c8af48aadff76 | [
"MIT"
] | null | null | null | tfds_juliet/__init__.py | alexpotter1/vulndetect-ml | 338fbf919b24520f9107a1604d1c8af48aadff76 | [
"MIT"
] | 1 | 2020-10-24T15:30:38.000Z | 2020-10-24T15:30:38.000Z | from tfds_juliet import * # noqa: F403,F401
| 22.5 | 44 | 0.733333 | 7 | 45 | 4.571429 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.162162 | 0.177778 | 45 | 1 | 45 | 45 | 0.702703 | 0.333333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
1be2480423c00fbfc5a30417ede469b1a1ea668b | 105 | py | Python | slackerbehave/__init__.py | raghavendranekkanti/slacker-behave | 7638ef9dac8a377ddfe425a5bcd10fd57f2354cd | [
"MIT"
] | 1 | 2021-03-08T14:39:57.000Z | 2021-03-08T14:39:57.000Z | slackerbehave/__init__.py | raghavendranekkanti/slacker-behave | 7638ef9dac8a377ddfe425a5bcd10fd57f2354cd | [
"MIT"
] | null | null | null | slackerbehave/__init__.py | raghavendranekkanti/slacker-behave | 7638ef9dac8a377ddfe425a5bcd10fd57f2354cd | [
"MIT"
] | null | null | null | from slackerbehave.slacker import Slacker
from slackerbehave.scenario import SFeature, SScenario, SStatus | 52.5 | 63 | 0.87619 | 12 | 105 | 7.666667 | 0.666667 | 0.369565 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.085714 | 105 | 2 | 63 | 52.5 | 0.958333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
400a0a1fa17dc9c685a9268667e8b8923812769f | 7,084 | py | Python | modnet/tests/test_preprocessing.py | Matgenix/modnet | e0ae0c9e24d6f48b8f0602a3422e8613870a31c2 | [
"MIT"
] | null | null | null | modnet/tests/test_preprocessing.py | Matgenix/modnet | e0ae0c9e24d6f48b8f0602a3422e8613870a31c2 | [
"MIT"
] | null | null | null | modnet/tests/test_preprocessing.py | Matgenix/modnet | e0ae0c9e24d6f48b8f0602a3422e8613870a31c2 | [
"MIT"
] | 1 | 2020-06-19T12:05:26.000Z | 2020-06-19T12:05:26.000Z | #!/usr/bin/env python
import numpy as np
import pandas as pd
import pytest
from modnet.preprocessing import get_cross_nmi
from modnet.preprocessing import nmi_target
def test_nmi_target():
# Test with linear data (should get 1.0 mutual information, or very close due to algorithm used
# in mutual_info_regression)
npoints = 31
x = np.linspace(0.5, 3.5, npoints)
y = 2*x - 2
z = 4*x + 2
df_feat = pd.DataFrame({'x': x, 'y': y})
df_target = pd.DataFrame({'z': z})
# Here we fix the number of neighbors for the call to sklearn.feature_selection's mutual_info_regression to 2 so
# that we get exactly 1 for the mutual information.
df_nmi_target = nmi_target(df_feat=df_feat, df_target=df_target, n_neighbors=2)
assert df_nmi_target.shape == (2, 1)
assert df_nmi_target.loc['x']['z'] == pytest.approx(1.0)
assert df_nmi_target.loc['y']['z'] == pytest.approx(1.0)
# Same data shuffled
# Shuffle the x, y and z
indices = np.arange(npoints)
np.random.seed(42)
np.random.shuffle(indices)
xs = x.take(indices)
ys = y.take(indices)
zs = z.take(indices)
df_feat = pd.DataFrame({'x': xs, 'y': ys})
df_target = pd.DataFrame({'z': zs})
df_nmi_target = nmi_target(df_feat=df_feat, df_target=df_target, n_neighbors=2)
assert df_nmi_target.shape == (2, 1)
assert df_nmi_target.loc['x']['z'] == pytest.approx(1.0)
assert df_nmi_target.loc['y']['z'] == pytest.approx(1.0)
# Test with one constant feature
c = np.ones(npoints) * 1.4
df_feat = pd.DataFrame({'x': x, 'y': y, 'c': c})
df_target = pd.DataFrame({'z': z})
df_nmi_target = nmi_target(df_feat=df_feat, df_target=df_target, n_neighbors=2)
assert df_nmi_target.shape == (2, 1)
assert df_nmi_target.loc['x']['z'] == pytest.approx(1.0)
assert df_nmi_target.loc['y']['z'] == pytest.approx(1.0)
df_nmi_target = nmi_target(df_feat=df_feat, df_target=df_target, drop_constant_features=False, n_neighbors=2)
assert df_nmi_target.shape == (3, 1)
assert df_nmi_target.loc['x']['z'] == pytest.approx(1.0)
assert df_nmi_target.loc['y']['z'] == pytest.approx(1.0)
assert df_nmi_target.loc['c']['z'] == pytest.approx(0.0)
# Test with unrelated data (grid)
x = np.linspace(start=2, stop=5, num=4)
z = np.linspace(start=3, stop=7, num=5)
x, z = np.meshgrid(x, z)
x = x.flatten()
z = z.flatten()
df_feat = pd.DataFrame({'x': x})
df_target = pd.DataFrame({'z': z})
df_nmi_target = nmi_target(df_feat=df_feat, df_target=df_target)
assert df_nmi_target.shape == (1, 1)
assert df_nmi_target.loc['x']['z'] == pytest.approx(0.0)
# Test initial checks
# Incompatible shapes
x = np.linspace(start=2, stop=3, num=5)
z = np.linspace(start=2, stop=3, num=8)
df_feat = pd.DataFrame({'x': x})
df_target = pd.DataFrame({'z': z})
with pytest.raises(ValueError, match=r'The input features DataFrame and the target variable DataFrame '
r'should contain the same number of data points.'):
nmi_target(df_feat=df_feat, df_target=df_target)
# Target DataFrame does not have exactly one column
x = np.linspace(start=2, stop=3, num=5)
z = np.linspace(start=2, stop=3, num=5)
df_feat = pd.DataFrame({'x': x})
df_target = pd.DataFrame({'z2': z, 'z': z})
with pytest.raises(ValueError, match=r'The target DataFrame should have exactly one column.'):
nmi_target(df_feat=df_feat, df_target=df_target)
# Test with some more real data (for which NMI is not just 0.0 or 1.0)
npoints = 200
np.random.seed(42)
x = np.random.rand(npoints)
z = 4 * x + 1.0 * np.random.rand(npoints)
df_feat = pd.DataFrame({'x': x})
df_target = pd.DataFrame({'z': z})
# Here we fix the random_state for the call to sklearn.feature_selection's mutual_info_regression so
# that we always get the same value.
df_nmi_target = nmi_target(df_feat=df_feat, df_target=df_target, random_state=42)
assert df_nmi_target.shape == (1, 1)
assert df_nmi_target.loc['x']['z'] == pytest.approx(0.3417665092162398)
def test_get_cross_nmi():
# Test with linear data (should get 1.0 mutual information, or very close due to algorithm used
# in mutual_info_regression)
npoints = 31
x = np.linspace(0.5, 3.5, npoints)
y = 2*x - 2
z = 4*x + 2
df_feat = pd.DataFrame({'x': x, 'y': y, 'z': z})
# Here we fix the number of neighbors for the call to sklearn.feature_selection's mutual_info_regression to 2 so
# that we get exactly 1 for the mutual information.
df_cross_nmi = get_cross_nmi(df_feat=df_feat, n_neighbors=2)
assert df_cross_nmi.shape == (3, 3)
for idx in df_cross_nmi.index:
for col in df_cross_nmi.columns:
assert df_cross_nmi.loc[idx][col] == pytest.approx(1.0)
# Same data shuffled
# Shuffle the x, y and z
indices = np.arange(npoints)
np.random.seed(42)
np.random.shuffle(indices)
xs = x.take(indices)
ys = y.take(indices)
zs = z.take(indices)
df_feat = pd.DataFrame({'x': xs, 'y': ys, 'z': zs})
df_cross_nmi = get_cross_nmi(df_feat=df_feat, n_neighbors=2)
assert df_cross_nmi.shape == (3, 3)
for idx in df_cross_nmi.index:
for col in df_cross_nmi.columns:
assert df_cross_nmi.loc[idx][col] == pytest.approx(1.0)
# Test with one constant feature
c = np.ones(npoints) * 1.4
df_feat = pd.DataFrame({'x': x, 'y': y, 'z': z, 'c': c})
df_cross_nmi = get_cross_nmi(df_feat=df_feat, n_neighbors=2)
assert df_cross_nmi.shape == (4, 4)
for idx in df_cross_nmi.index:
for col in df_cross_nmi.columns:
expected = 0.0 if idx == 'c' or col == 'c' else 1.0
assert df_cross_nmi.loc[idx][col] == pytest.approx(expected)
# Test with unrelated data (grid)
x = np.linspace(start=2, stop=5, num=4)
y = np.linspace(start=3, stop=7, num=5)
x, y = np.meshgrid(x, y)
x = x.flatten()
y = y.flatten()
df_feat = pd.DataFrame({'x': x, 'y': y})
df_cross_nmi = get_cross_nmi(df_feat=df_feat, n_neighbors=2)
assert df_cross_nmi.shape == (2, 2)
assert df_cross_nmi.loc['x']['y'] == pytest.approx(0.0)
assert df_cross_nmi.loc['y']['x'] == pytest.approx(0.0)
# Test with some more real data (for which NMI is not just 0.0 or 1.0)
npoints = 200
np.random.seed(42)
x = np.random.rand(npoints)
y = 4 * x + 1.0 * np.random.rand(npoints)
df_feat = pd.DataFrame({'x': x, 'y': y})
# Here we fix the random_state for the call to sklearn.feature_selection's mutual_info_regression so
# that we always get the same value.
df_cross_nmi = get_cross_nmi(df_feat=df_feat, random_state=42)
assert df_cross_nmi.shape == (2, 2)
assert df_cross_nmi.loc['x']['x'] == pytest.approx(1.0)
assert df_cross_nmi.loc['y']['y'] == pytest.approx(1.0)
assert df_cross_nmi.loc['x']['y'] == pytest.approx(0.3417665092162398)
assert df_cross_nmi.loc['y']['x'] == pytest.approx(0.3417665092162398)
| 37.481481 | 116 | 0.648786 | 1,217 | 7,084 | 3.601479 | 0.112572 | 0.052019 | 0.057039 | 0.065937 | 0.874059 | 0.863564 | 0.858773 | 0.854666 | 0.846452 | 0.794205 | 0 | 0.03674 | 0.208498 | 7,084 | 188 | 117 | 37.680851 | 0.744962 | 0.182101 | 0 | 0.645161 | 0 | 0 | 0.040049 | 0 | 0 | 0 | 0 | 0 | 0.25 | 1 | 0.016129 | false | 0 | 0.040323 | 0 | 0.056452 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
40469d2b21f82f655d8cd2164a0300b5c9dbdc4d | 162 | py | Python | HDPy/puppy/__init__.py | igsor/HDPy | c02ec62e90d0a2b6f6d29569becac45f017490b1 | [
"BSD-3-Clause"
] | 6 | 2017-06-09T11:32:29.000Z | 2021-07-08T07:24:44.000Z | HDPy/puppy/__init__.py | igsor/HDPy | c02ec62e90d0a2b6f6d29569becac45f017490b1 | [
"BSD-3-Clause"
] | null | null | null | HDPy/puppy/__init__.py | igsor/HDPy | c02ec62e90d0a2b6f6d29569becac45f017490b1 | [
"BSD-3-Clause"
] | 1 | 2015-07-11T00:41:22.000Z | 2015-07-11T00:41:22.000Z | """
.. automodule:: HDPy.puppy.puppy
.. automodule:: HDPy.puppy.analysis_puppy
"""
from puppy import *
from analysis_puppy import *
import policy
import plant
| 13.5 | 41 | 0.734568 | 20 | 162 | 5.85 | 0.4 | 0.239316 | 0.324786 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.141975 | 162 | 11 | 42 | 14.727273 | 0.841727 | 0.462963 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
4057cfaeae671d885148c1ffdc387518e229fa02 | 10,074 | py | Python | src/eddington/fitting_functions_list.py | EddLabs/eddington_core | 0923fc7fdf1240181554b2612a97d5708d6244bf | [
"Apache-2.0"
] | 3 | 2020-09-09T20:01:24.000Z | 2020-10-14T00:29:44.000Z | src/eddington/fitting_functions_list.py | EddLabs/eddington_core | 0923fc7fdf1240181554b2612a97d5708d6244bf | [
"Apache-2.0"
] | 43 | 2020-08-07T11:29:02.000Z | 2021-12-19T23:28:29.000Z | src/eddington/fitting_functions_list.py | EddLabs/eddington_core | 0923fc7fdf1240181554b2612a97d5708d6244bf | [
"Apache-2.0"
] | 5 | 2020-08-08T17:56:13.000Z | 2020-10-01T12:24:51.000Z | """List of common fitting functions."""
from typing import Union
import numpy as np
import scipy.special
from eddington.exceptions import FittingFunctionLoadError
from eddington.fitting_function_class import FittingFunction, fitting_function
@fitting_function(
n=2,
syntax="a[0] + a[1] * x",
x_derivative=lambda a, x: np.full(shape=np.shape(x), fill_value=a[1]),
a_derivative=lambda a, x: np.stack([np.ones(shape=np.shape(x)), x]),
) # pylint: disable=C0103
def linear(a: np.ndarray, x: Union[np.ndarray, float]) -> Union[np.ndarray, float]:
"""
Simple linear fitting function.
:param a: Parameters to be fitted
:type a: np.ndarray
:param x: Value to be evaluated by the function
:type x: float or np.ndarray
:return: evaluation value or values
:rtype: float or np.ndarray
"""
return a[0] + a[1] * x
@fitting_function(
n=1,
syntax="a[0]",
x_derivative=lambda a, x: np.zeros(shape=np.shape(x)),
a_derivative=lambda a, x: np.stack([np.ones(shape=np.shape(x))]),
) # pylint: disable=C0103
def constant(a: np.ndarray, x: Union[np.ndarray, float]) -> Union[np.ndarray, float]:
"""
Constant fitting function.
:param a: Parameters to be fitted
:type a: np.ndarray
:param x: Value to be evaluated by the function
:type x: float or np.ndarray
:return: evaluation value or values
:rtype: float or np.ndarray
"""
return np.full(fill_value=a[0], shape=np.shape(x))
@fitting_function(
n=3,
syntax="a[0] + a[1] * x + a[2] * x ^ 2",
x_derivative=lambda a, x: a[1] + 2 * a[2] * x,
a_derivative=lambda a, x: np.stack([np.ones(shape=np.shape(x)), x, x ** 2]),
) # pylint: disable=C0103
def parabolic(a: np.ndarray, x: Union[np.ndarray, float]) -> Union[np.ndarray, float]:
"""
Parabolic fitting function.
:param a: Parameters to be fitted
:type a: np.ndarray
:param x: Value to be evaluated by the function
:type x: float or np.ndarray
:return: evaluation value or values
:rtype: float or np.ndarray
"""
return a[0] + a[1] * x + a[2] * x ** 2
@fitting_function(
n=4,
syntax="a[0] * (x + a[1]) ^ a[2] + a[3]",
x_derivative=lambda a, x: a[2] * a[0] * (x + a[1]) ** (a[2] - 1),
a_derivative=lambda a, x: np.stack(
[
np.power(x + a[1], a[2]),
a[2] * a[0] * np.power(x + a[1], a[2] - 1),
a[0] * np.log(x + a[1]) * np.power(x + a[1], a[2]),
np.ones(shape=np.shape(x)),
]
),
) # pylint: disable=C0103
def straight_power(
a: np.ndarray, x: Union[np.ndarray, float]
) -> Union[np.ndarray, float]: # pylint: disable=C0103
"""
Represent fitting of y ~ x^n.
:param a: Parameters to be fitted
:type a: np.ndarray
:param x: Value to be evaluated by the function
:type x: float or np.ndarray
:return: evaluation value or values
:rtype: float or np.ndarray
"""
return a[0] * np.power(x + a[1], a[2]) + a[3]
@fitting_function(
n=4,
syntax="a[0] / (x + a[1]) ^ a[2] + a[3]",
x_derivative=lambda a, x: -a[2] * a[0] / np.power(x + a[1], a[2] + 1),
a_derivative=lambda a, x: np.stack(
[
1 / np.power(x + a[1], a[2]),
-a[2] * a[0] / np.power(x + a[1], a[2] + 1),
-a[0] * np.log(x + a[1]) * np.power(x + a[1], a[2]),
np.ones(shape=np.shape(x)),
]
),
) # pylint: disable=C0103
def inverse_power(
a: np.ndarray, x: Union[np.ndarray, float]
) -> Union[np.ndarray, float]: # pylint: disable=C0103
"""
Represent fitting of y ~ x^(-n).
:param a: Parameters to be fitted
:type a: np.ndarray
:param x: Value to be evaluated by the function
:type x: float or np.ndarray
:return: evaluation value or values
:rtype: float or np.ndarray
"""
return a[0] / np.power(x + a[1], a[2]) + a[3]
@fitting_function(
n=3,
syntax="a[0] / (x + a[1]) + a[2]",
x_derivative=lambda a, x: -a[0] / ((x + a[1]) ** 2),
a_derivative=lambda a, x: np.stack(
[1 / (x + a[1]), -a[0] / ((x + a[1]) ** 2), np.ones(shape=np.shape(x))]
),
) # pylint: disable=C0103
def hyperbolic(a: np.ndarray, x: Union[np.ndarray, float]) -> Union[np.ndarray, float]:
"""
Hyperbolic fitting function.
:param a: Parameters to be fitted
:type a: np.ndarray
:param x: Value to be evaluated by the function
:type x: float or np.ndarray
:return: evaluation value or values
:rtype: float or np.ndarray
"""
return a[0] / (x + a[1]) + a[2]
@fitting_function(
n=3,
syntax="a[0] * exp(a[1] * x) + a[2]",
x_derivative=lambda a, x: a[0] * a[1] * np.exp(a[1] * x),
a_derivative=lambda a, x: np.stack(
[np.exp(a[1] * x), a[0] * x * np.exp(a[1] * x), np.ones(np.shape(x))]
),
) # pylint: disable=C0103
def exponential(a: np.ndarray, x: Union[np.ndarray, float]) -> Union[np.ndarray, float]:
"""
Exponential fitting function.
:param a: Parameters to be fitted
:type a: np.ndarray
:param x: Value to be evaluated by the function
:type x: float or np.ndarray
:return: evaluation value or values
:rtype: float or np.ndarray
"""
return a[0] * np.exp(a[1] * x) + a[2]
@fitting_function(
n=4,
syntax="a[0] * cos(a[1] * x + a[2]) + a[3]",
x_derivative=lambda a, x: -a[0] * a[1] * np.sin(a[1] * x + a[2]),
a_derivative=lambda a, x: np.stack(
[
np.cos(a[1] * x + a[2]),
-a[0] * x * np.sin(a[1] * x + a[2]),
-a[0] * np.sin(a[1] * x + a[2]),
np.ones(shape=np.shape(x)),
]
),
) # pylint: disable=C0103
def cos(a: np.ndarray, x: Union[np.ndarray, float]) -> Union[np.ndarray, float]:
"""
Cosines fitting function.
:param a: Parameters to be fitted
:type a: np.ndarray
:param x: Value to be evaluated by the function
:type x: float or np.ndarray
:return: evaluation value or values
:rtype: float or np.ndarray
"""
return a[0] * np.cos(a[1] * x + a[2]) + a[3]
@fitting_function(
n=4,
syntax="a[0] * sin(a[1] * x + a[2]) + a[3]",
x_derivative=lambda a, x: a[0] * a[1] * np.cos(a[1] * x + a[2]),
a_derivative=lambda a, x: np.stack(
[
np.sin(a[1] * x + a[2]),
a[0] * x * np.cos(a[1] * x + a[2]),
a[0] * np.cos(a[1] * x + a[2]),
np.ones(shape=np.shape(x)),
]
),
) # pylint: disable=C0103
def sin(a: np.ndarray, x: Union[np.ndarray, float]) -> Union[np.ndarray, float]:
"""
Sine fitting function.
:param a: Parameters to be fitted
:type a: np.ndarray
:param x: Value to be evaluated by the function
:type x: float or np.ndarray
:return: evaluation value or values
:rtype: float or np.ndarray
"""
return a[0] * np.sin(a[1] * x + a[2]) + a[3]
@fitting_function(
n=4,
syntax="a[0] * exp( - ((x - a[1]) / a[2]) ^ 2) + a[3]",
x_derivative=lambda a, x: a[0]
* np.exp(-(((x - a[1]) / a[2]) ** 2)) # noqa: W503
* (-2 * (x - a[1]) / a[2]), # noqa: W503
a_derivative=lambda a, x: np.stack(
[
np.exp(-(((x - a[1]) / a[2]) ** 2)),
a[0] * np.exp(-(((x - a[1]) / a[2]) ** 2)) * (2 * (x - a[1]) / a[2]),
a[0] * np.exp(-(((x - a[1]) / a[2]) ** 2)) * (2 * (x - a[1]) / (a[2] ** 2)),
np.ones(shape=np.shape(x)),
]
),
) # pylint: disable=C0103
def normal(a: np.ndarray, x: Union[np.ndarray, float]) -> Union[np.ndarray, float]:
"""
Normal distribution fitting function.
:param a: Parameters to be fitted
:type a: np.ndarray
:param x: Value to be evaluated by the function
:type x: float or np.ndarray
:return: evaluation value or values
:rtype: float or np.ndarray
"""
return a[0] * np.exp(-(((x - a[1]) / a[2]) ** 2)) + a[3]
@fitting_function(
n=3,
syntax="a[0] * (a[1] ^ x) * exp(-a[1]) / gamma(x+1) + a[2]",
x_derivative=lambda a, x: (
a[0] * np.power(a[1], x) * np.exp(-a[1]) / scipy.special.gamma(x + 1)
) # noqa: W503
* (np.log(a[1]) - scipy.special.digamma(x + 1)), # noqa: W503
a_derivative=lambda a, x: np.stack(
[
np.power(a[1], x) * np.exp(-a[1]) / scipy.special.gamma(x + 1),
(a[0] * np.exp(-a[1]) / scipy.special.gamma(x + 1))
* (x * np.power(a[1], x - 1) - np.power(a[1], x)), # noqa: W503
np.ones(shape=np.shape(x)),
]
),
) # pylint: disable=C0103
def poisson(a: np.ndarray, x: Union[np.ndarray, float]) -> Union[np.ndarray, float]:
"""
Poisson fitting function.
:param a: Parameters to be fitted
:type a: np.ndarray
:param x: Value to be evaluated by the function
:type x: float or np.ndarray
:return: evaluation value or values
:rtype: float or np.ndarray
"""
return a[0] * np.power(a[1], x) * np.exp(-a[1]) / scipy.special.gamma(x + 1) + a[2]
def polynomial(n: int) -> FittingFunction: # pylint: disable=C0103
"""
Creates a polynomial fitting function with parameters as coefficients.
:param n: Degree of the polynomial.
:type n: int
:return: a polynomial fitting function
:rtype: FittingFunction
:raises FittingFunctionLoadError: Raised when trying to load a polynomial with
negative degree.
"""
n = int(n)
if n <= 0:
raise FittingFunctionLoadError(f"n must be positive, got {n}")
if n == 1:
return linear
arange = np.arange(1, n + 1)
syntax = "a[0] + a[1] * x + " + " + ".join(
[f"a[{i}] * x ^ {i}" for i in arange[1:]]
)
@fitting_function(
n=n + 1,
name=f"polynomial_{n}",
syntax=syntax,
x_derivative=lambda a, x: polynomial(n - 1)(arange * a[1:], x),
a_derivative=lambda a, x: np.stack([x ** i for i in range(n + 1)]),
save=False,
) # pylint: disable=C0103
def func(a: np.ndarray, x: Union[np.ndarray, float]) -> Union[np.ndarray, float]:
return sum([a[i] * x ** i for i in range(n + 1)])
return func
| 31.285714 | 88 | 0.554 | 1,636 | 10,074 | 3.384474 | 0.067848 | 0.112155 | 0.015712 | 0.078021 | 0.807838 | 0.791403 | 0.774065 | 0.75763 | 0.739751 | 0.718078 | 0 | 0.039406 | 0.264443 | 10,074 | 321 | 89 | 31.383178 | 0.707827 | 0.315763 | 0 | 0.278107 | 0 | 0.035503 | 0.062969 | 0 | 0.005917 | 0 | 0 | 0 | 0 | 1 | 0.076923 | false | 0 | 0.029586 | 0.005917 | 0.189349 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
40ffbade3d07139a98e571390da36319b4ed0934 | 47,637 | py | Python | lasagne/tests/layers/test_recurrent.py | BenjaminBossan/Lasagne | 8772ebbbaf15951f2deb1bc2d76940ebd8ad2076 | [
"MIT"
] | null | null | null | lasagne/tests/layers/test_recurrent.py | BenjaminBossan/Lasagne | 8772ebbbaf15951f2deb1bc2d76940ebd8ad2076 | [
"MIT"
] | 1 | 2021-03-20T04:42:05.000Z | 2021-03-20T04:42:05.000Z | lasagne/tests/layers/test_recurrent.py | BenjaminBossan/Lasagne | 8772ebbbaf15951f2deb1bc2d76940ebd8ad2076 | [
"MIT"
] | null | null | null | import pytest
from lasagne.layers import RecurrentLayer, LSTMLayer, CustomRecurrentLayer
from lasagne.layers import InputLayer, DenseLayer, GRULayer, Gate, Layer
from lasagne.layers import helper
import theano
import theano.tensor as T
import numpy as np
import lasagne
from mock import Mock
def test_recurrent_return_shape():
num_batch, seq_len, n_features1, n_features2 = 5, 3, 10, 11
num_units = 6
x = T.tensor4()
in_shp = (num_batch, seq_len, n_features1, n_features2)
l_inp = InputLayer(in_shp)
l_rec = RecurrentLayer(l_inp, num_units=num_units)
x_in = np.random.random(in_shp).astype('float32')
output = helper.get_output(l_rec, x)
output_val = output.eval({x: x_in})
assert helper.get_output_shape(l_rec, x_in.shape) == output_val.shape
assert output_val.shape == (num_batch, seq_len, num_units)
def test_recurrent_grad():
num_batch, seq_len, n_features = 5, 3, 10
num_units = 6
l_inp = InputLayer((num_batch, seq_len, n_features))
l_rec = RecurrentLayer(l_inp,
num_units=num_units)
output = helper.get_output(l_rec)
g = T.grad(T.mean(output), lasagne.layers.get_all_params(l_rec))
assert isinstance(g, (list, tuple))
def test_recurrent_nparams():
l_inp = InputLayer((2, 2, 3))
l_rec = RecurrentLayer(l_inp, 5, learn_init=False, nonlinearity=None)
# b, W_hid_to_hid and W_in_to_hid
assert len(lasagne.layers.get_all_params(l_rec, trainable=True)) == 3
# b + hid_init
assert len(lasagne.layers.get_all_params(l_rec, regularizable=False)) == 2
def test_recurrent_nparams_learn_init():
l_inp = InputLayer((2, 2, 3))
l_rec = RecurrentLayer(l_inp, 5, learn_init=True)
# b, W_hid_to_hid and W_in_to_hid + hid_init
assert len(lasagne.layers.get_all_params(l_rec, trainable=True)) == 4
# b + hid_init
assert len(lasagne.layers.get_all_params(l_rec, regularizable=False)) == 2
def test_recurrent_hid_init_layer():
# test that you can set hid_init to be a layer
l_inp = InputLayer((2, 2, 3))
l_inp_h = InputLayer((2, 5))
l_rec = RecurrentLayer(l_inp, 5, hid_init=l_inp_h)
x = T.tensor3()
h = T.matrix()
output = lasagne.layers.get_output(l_rec, {l_inp: x, l_inp_h: h})
def test_recurrent_nparams_hid_init_layer():
# test that you can see layers through hid_init
l_inp = InputLayer((2, 2, 3))
l_inp_h = InputLayer((2, 5))
l_inp_h_de = DenseLayer(l_inp_h, 7)
l_rec = RecurrentLayer(l_inp, 7, hid_init=l_inp_h_de)
# directly check the layers can be seen through hid_init
assert lasagne.layers.get_all_layers(l_rec) == [l_inp, l_inp_h, l_inp_h_de,
l_rec]
# b, W_hid_to_hid and W_in_to_hid + W + b
assert len(lasagne.layers.get_all_params(l_rec, trainable=True)) == 5
# b (recurrent) + b (dense)
assert len(lasagne.layers.get_all_params(l_rec, regularizable=False)) == 2
def test_recurrent_hid_init_mask():
# test that you can set hid_init to be a layer when a mask is provided
l_inp = InputLayer((2, 2, 3))
l_inp_h = InputLayer((2, 5))
l_inp_msk = InputLayer((2, 2))
l_rec = RecurrentLayer(l_inp, 5, hid_init=l_inp_h, mask_input=l_inp_msk)
x = T.tensor3()
h = T.matrix()
msk = T.matrix()
inputs = {l_inp: x, l_inp_h: h, l_inp_msk: msk}
output = lasagne.layers.get_output(l_rec, inputs)
def test_recurrent_hid_init_layer_eval():
# Test `hid_init` as a `Layer` with some dummy input. Compare the output of
# a network with a `Layer` as input to `hid_init` to a network with a
# `np.array` as input to `hid_init`
n_units = 7
n_test_cases = 2
in_shp = (n_test_cases, 2, 3)
in_h_shp = (1, n_units)
# dummy inputs
X_test = np.ones(in_shp, dtype=theano.config.floatX)
Xh_test = np.ones(in_h_shp, dtype=theano.config.floatX)
Xh_test_batch = np.tile(Xh_test, (n_test_cases, 1))
# network with `Layer` initializer for hid_init
l_inp = InputLayer(in_shp)
l_inp_h = InputLayer(in_h_shp)
l_rec_inp_layer = RecurrentLayer(l_inp, n_units, hid_init=l_inp_h,
nonlinearity=None)
# network with `np.array` initializer for hid_init
l_rec_nparray = RecurrentLayer(l_inp, n_units, hid_init=Xh_test,
nonlinearity=None)
# copy network parameters from l_rec_inp_layer to l_rec_nparray
l_il_param = dict([(p.name, p) for p in l_rec_inp_layer.get_params()])
l_rn_param = dict([(p.name, p) for p in l_rec_nparray.get_params()])
for k, v in l_rn_param.items():
if k in l_il_param:
v.set_value(l_il_param[k].get_value())
# build the theano functions
X = T.tensor3()
Xh = T.matrix()
output_inp_layer = lasagne.layers.get_output(l_rec_inp_layer,
{l_inp: X, l_inp_h: Xh})
output_nparray = lasagne.layers.get_output(l_rec_nparray, {l_inp: X})
# test both nets with dummy input
output_val_inp_layer = output_inp_layer.eval({X: X_test,
Xh: Xh_test_batch})
output_val_nparray = output_nparray.eval({X: X_test})
# check output given `Layer` is the same as with `np.array`
assert np.allclose(output_val_inp_layer, output_val_nparray)
def test_recurrent_incoming_tuple():
input_shape = (2, 3, 4)
l_rec = lasagne.layers.RecurrentLayer(input_shape, 5)
assert l_rec.input_shapes[0] == input_shape
def test_recurrent_name():
l_in = lasagne.layers.InputLayer((2, 3, 4))
layer_name = 'l_rec'
l_rec = lasagne.layers.RecurrentLayer(l_in, 4, name=layer_name)
assert l_rec.b.name == layer_name + '.input_to_hidden.b'
assert l_rec.W_in_to_hid.name == layer_name + '.input_to_hidden.W'
assert l_rec.W_hid_to_hid.name == layer_name + '.hidden_to_hidden.W'
def test_custom_recurrent_arbitrary_shape():
# Check that the custom recurrent layer can handle more than 1 feature dim
n_batch, n_steps, n_channels, width, height = (2, 3, 4, 5, 6)
n_out_filters = 7
filter_shape = (3, 3)
l_in = lasagne.layers.InputLayer(
(n_batch, n_steps, n_channels, width, height))
l_in_to_hid = lasagne.layers.Conv2DLayer(
lasagne.layers.InputLayer((None, n_channels, width, height)),
n_out_filters, filter_shape, pad='same')
l_hid_to_hid = lasagne.layers.Conv2DLayer(
lasagne.layers.InputLayer((None, n_out_filters, width, height)),
n_out_filters, filter_shape, pad='same')
l_rec = lasagne.layers.CustomRecurrentLayer(
l_in, l_in_to_hid, l_hid_to_hid)
assert l_rec.output_shape == (n_batch, n_steps, n_out_filters, width,
height)
out = theano.function([l_in.input_var], lasagne.layers.get_output(l_rec))
out_shape = out(np.zeros((n_batch, n_steps, n_channels, width, height),
dtype=theano.config.floatX)).shape
assert out_shape == (n_batch, n_steps, n_out_filters, width, height)
def test_custom_recurrent_arbitrary_depth():
# Check that the custom recurrent layer can handle a hidden-to-hidden
# network with an arbitrary depth
n_batch, n_steps, n_channels, width, height = (2, 3, 4, 5, 6)
n_out_filters = 7
n_in_hid_filters_0 = 11
n_hid_hid_filters_0 = 13
filter_shape = (3, 3)
l_in = lasagne.layers.InputLayer(
(n_batch, n_steps, n_channels, width, height))
# Expect the output shape of `l_in` as input shape for input-to-hidden
l_in_to_hid = lasagne.layers.InputLayer((None, n_channels, width, height))
# Two conv layers; first to `n_hid_filters_0` channels
l_in_to_hid = lasagne.layers.Conv2DLayer(
l_in_to_hid, n_in_hid_filters_0, filter_shape, pad='same')
# then to `n_out_filters` channels
l_in_to_hid = lasagne.layers.Conv2DLayer(
l_in_to_hid, n_out_filters, filter_shape, pad='same')
# Expect the output shape of `l_in_to_hid` as input shape for
# hidden-to-hidden
l_hid_to_hid = lasagne.layers.InputLayer((None, n_out_filters,
width, height))
# Two conv layers; first to `n_hid_hid_filters_0` channels
l_hid_to_hid = lasagne.layers.Conv2DLayer(
l_hid_to_hid, n_hid_hid_filters_0, filter_shape, pad='same')
# then to `n_out_filters` channels
l_hid_to_hid = lasagne.layers.Conv2DLayer(
l_hid_to_hid, n_out_filters, filter_shape, pad='same')
l_rec = lasagne.layers.CustomRecurrentLayer(
l_in, l_in_to_hid, l_hid_to_hid)
assert l_rec.output_shape == (n_batch, n_steps, n_out_filters, width,
height)
out = theano.function([l_in.input_var], lasagne.layers.get_output(l_rec))
out_shape = out(np.zeros((n_batch, n_steps, n_channels, width, height),
dtype=theano.config.floatX)).shape
assert out_shape == (n_batch, n_steps, n_out_filters, width, height)
def test_custom_recurrent_non_unique_inputs():
# Check that the custom recurrent layer constructor detects non-unique
# input layers within the input-to-hidden and hidden-to-hidden graphs
# and raises ValueError
n_batch, n_steps, n_channels, width, height = (2, 3, 4, 5, 6)
n_out_filters = 7
n_in_hid_filters_0 = 11
n_hid_hid_filters_0 = 13
filter_shape = (3, 3)
l_in = lasagne.layers.InputLayer(
(n_batch, n_steps, n_channels, width, height))
# Bad input-to-hidden graph with multiple input layers
# Expect the output shape of `l_in` as input shape for input-to-hidden
l_in_to_hid_bad_0 = lasagne.layers.InputLayer(
(None, n_channels, width, height))
l_in_to_hid_bad_1 = lasagne.layers.InputLayer(
(None, n_channels, width, height))
l_in_to_hid_bad = lasagne.layers.ConcatLayer(
[l_in_to_hid_bad_0, l_in_to_hid_bad_1], axis=1)
# Two conv layers; first to `n_hid_filters_0` channels
l_in_to_hid_bad = lasagne.layers.Conv2DLayer(
l_in_to_hid_bad, n_in_hid_filters_0, filter_shape, pad='same')
# then to `n_out_filters` channels
l_in_to_hid_bad = lasagne.layers.Conv2DLayer(
l_in_to_hid_bad, n_out_filters, filter_shape, pad='same')
# Expect the output shape of `l_in` as input shape for input-to-hidden
l_in_to_hid = lasagne.layers.InputLayer((None, n_channels, width, height))
# Two conv layers; first to `n_hid_filters_0` channels
l_in_to_hid = lasagne.layers.Conv2DLayer(
l_in_to_hid, n_in_hid_filters_0, filter_shape, pad='same')
# then to `n_out_filters` channels
l_in_to_hid = lasagne.layers.Conv2DLayer(
l_in_to_hid, n_out_filters, filter_shape, pad='same')
# Bad hidden-to-hidden graph with multiple input layers
# Expect the output shape of `l_in_to_hid` as input shape for
# hidden-to-hidden
l_hid_to_hid_bad_0 = lasagne.layers.InputLayer(
(None, n_out_filters, width, height))
l_hid_to_hid_bad_1 = lasagne.layers.InputLayer(
(None, n_out_filters, width, height))
l_hid_to_hid_bad = lasagne.layers.ConcatLayer(
[l_hid_to_hid_bad_0, l_hid_to_hid_bad_1], axis=1)
# Two conv layers; first to `n_hid_hid_filters_0` channels
l_hid_to_hid_bad = lasagne.layers.Conv2DLayer(
l_hid_to_hid_bad, n_hid_hid_filters_0, filter_shape, pad='same')
# then to `n_out_filters` channels
l_hid_to_hid_bad = lasagne.layers.Conv2DLayer(
l_hid_to_hid_bad, n_out_filters, filter_shape, pad='same')
# Expect the output shape of `l_in_to_hid` as input shape for
# hidden-to-hidden
l_hid_to_hid = lasagne.layers.InputLayer((None, n_out_filters,
width, height))
# Two conv layers; first to `n_hid_hid_filters_0` channels
l_hid_to_hid = lasagne.layers.Conv2DLayer(
l_hid_to_hid, n_hid_hid_filters_0, filter_shape, pad='same')
# then to `n_out_filters` channels
l_hid_to_hid = lasagne.layers.Conv2DLayer(
l_hid_to_hid, n_out_filters, filter_shape, pad='same')
# Ensure that trying to use either 'bad' graph raises ValueError
with pytest.raises(ValueError):
l_rec = lasagne.layers.CustomRecurrentLayer(
l_in, l_in_to_hid_bad, l_hid_to_hid)
with pytest.raises(ValueError):
l_rec = lasagne.layers.CustomRecurrentLayer(
l_in, l_in_to_hid, l_hid_to_hid_bad)
with pytest.raises(ValueError):
l_rec = lasagne.layers.CustomRecurrentLayer(
l_in, l_in_to_hid_bad, l_hid_to_hid_bad)
l_rec = lasagne.layers.CustomRecurrentLayer(
l_in, l_in_to_hid, l_hid_to_hid)
def test_custom_recurrent_init_shape_error():
# Check that the custom recurrent layer throws errors for invalid shapes
n_batch, n_steps, n_channels, width, height = (2, 3, 4, 5, 6)
n_out_filters = 7
filter_shape = (3, 3)
l_in = lasagne.layers.InputLayer(
(n_batch, n_steps, n_channels, width, height))
l_hid_to_hid = lasagne.layers.Conv2DLayer(
lasagne.layers.InputLayer((n_batch, n_out_filters, width, height)),
n_out_filters, filter_shape, pad='same')
# When precompute_input == True, input_to_hidden.shape[0] must be None
# or n_batch*n_steps
l_in_to_hid = lasagne.layers.Conv2DLayer(
lasagne.layers.InputLayer((n_batch, n_channels, width, height)),
n_out_filters, filter_shape, pad='same')
with pytest.raises(ValueError):
l_rec = lasagne.layers.CustomRecurrentLayer(
l_in, l_in_to_hid, l_hid_to_hid, precompute_input=True)
# When precompute_input = False, input_to_hidden.shape[1] must be None
# or hidden_to_hidden.shape[1]
l_in_to_hid = lasagne.layers.Conv2DLayer(
lasagne.layers.InputLayer((n_batch + 1, n_channels, width, height)),
n_out_filters, filter_shape, pad='same')
with pytest.raises(ValueError):
l_rec = lasagne.layers.CustomRecurrentLayer(
l_in, l_in_to_hid, l_hid_to_hid, precompute_input=False)
# In any case, input_to_hidden and hidden_to_hidden's output shapes after
# the first dimension must match
l_in_to_hid = lasagne.layers.Conv2DLayer(
lasagne.layers.InputLayer((None, n_channels, width + 1, height)),
n_out_filters, filter_shape, pad='same')
with pytest.raises(ValueError):
l_rec = lasagne.layers.CustomRecurrentLayer(
l_in, l_in_to_hid, l_hid_to_hid)
# And, the output shape of input_to_hidden must match the input shape
# of hidden_to_hidden past the first dimension. By not using padding,
# the output of l_in_to_hid will be cropped, which will make the
# shape inappropriate.
l_in_to_hid = lasagne.layers.Conv2DLayer(
lasagne.layers.InputLayer((None, n_channels, width, height)),
n_out_filters, filter_shape)
l_hid_to_hid = lasagne.layers.Conv2DLayer(
lasagne.layers.InputLayer((n_batch, n_out_filters, width, height)),
n_out_filters, filter_shape)
with pytest.raises(ValueError):
l_rec = lasagne.layers.CustomRecurrentLayer(
l_in, l_in_to_hid, l_hid_to_hid)
def test_recurrent_grad_clipping():
num_units = 5
batch_size = 3
seq_len = 2
n_inputs = 4
in_shp = (batch_size, seq_len, n_inputs)
l_inp = InputLayer(in_shp)
x = T.tensor3()
l_rec = RecurrentLayer(l_inp, num_units, grad_clipping=1.0)
output = lasagne.layers.get_output(l_rec, x)
def test_recurrent_bck():
num_batch, seq_len, n_features1 = 2, 3, 4
num_units = 2
x = T.tensor3()
in_shp = (num_batch, seq_len, n_features1)
l_inp = InputLayer(in_shp)
x_in = np.ones(in_shp).astype('float32')
# need to set random seed.
lasagne.random.get_rng().seed(1234)
l_rec_fwd = RecurrentLayer(l_inp, num_units=num_units, backwards=False)
lasagne.random.get_rng().seed(1234)
l_rec_bck = RecurrentLayer(l_inp, num_units=num_units, backwards=True)
l_out_fwd = helper.get_output(l_rec_fwd, x)
l_out_bck = helper.get_output(l_rec_bck, x)
output_fwd = l_out_fwd.eval({l_out_fwd: x_in})
output_bck = l_out_bck.eval({l_out_bck: x_in})
# test that the backwards model reverses its final input
np.testing.assert_almost_equal(output_fwd, output_bck[:, ::-1])
def test_recurrent_variable_input_size():
# check that seqlen and batchsize None works
num_batch, n_features1 = 6, 5
num_units = 13
x = T.tensor3()
in_shp = (None, None, n_features1)
l_inp = InputLayer(in_shp)
x_in1 = np.ones((num_batch+1, 10, n_features1)).astype('float32')
x_in2 = np.ones((num_batch, 15, n_features1)).astype('float32')
l_rec = RecurrentLayer(l_inp, num_units=num_units, backwards=False)
output = helper.get_output(l_rec, x)
output_val1 = output.eval({x: x_in1})
output_val2 = output.eval({x: x_in2})
def test_recurrent_unroll_scan_fwd():
num_batch, seq_len, n_features1 = 2, 3, 4
num_units = 2
in_shp = (num_batch, seq_len, n_features1)
l_inp = InputLayer(in_shp)
l_mask_inp = InputLayer(in_shp[:2])
x_in = np.random.random(in_shp).astype('float32')
mask_in = np.ones(in_shp[:2]).astype('float32')
# need to set random seed.
lasagne.random.get_rng().seed(1234)
l_rec_scan = RecurrentLayer(l_inp, num_units=num_units, backwards=False,
unroll_scan=False, mask_input=l_mask_inp)
lasagne.random.get_rng().seed(1234)
l_rec_unroll = RecurrentLayer(l_inp, num_units=num_units, backwards=False,
unroll_scan=True, mask_input=l_mask_inp)
output_scan = helper.get_output(l_rec_scan)
output_unrolled = helper.get_output(l_rec_unroll)
output_scan_val = output_scan.eval(
{l_inp.input_var: x_in, l_mask_inp.input_var: mask_in})
output_unrolled_val = output_unrolled.eval(
{l_inp.input_var: x_in, l_mask_inp.input_var: mask_in})
np.testing.assert_almost_equal(output_scan_val, output_unrolled_val)
def test_recurrent_unroll_scan_bck():
num_batch, seq_len, n_features1 = 2, 3, 4
num_units = 2
x = T.tensor3()
in_shp = (num_batch, seq_len, n_features1)
l_inp = InputLayer(in_shp)
x_in = np.random.random(in_shp).astype('float32')
# need to set random seed.
lasagne.random.get_rng().seed(1234)
l_rec_scan = RecurrentLayer(l_inp, num_units=num_units, backwards=True,
unroll_scan=False)
lasagne.random.get_rng().seed(1234)
l_rec_unroll = RecurrentLayer(l_inp, num_units=num_units, backwards=True,
unroll_scan=True)
output_scan = helper.get_output(l_rec_scan, x)
output_unrolled = helper.get_output(l_rec_unroll, x)
output_scan_val = output_scan.eval({x: x_in})
output_unrolled_val = output_unrolled.eval({x: x_in})
np.testing.assert_almost_equal(output_scan_val, output_unrolled_val)
def test_recurrent_precompute():
num_batch, seq_len, n_features1 = 2, 3, 4
num_units = 2
in_shp = (num_batch, seq_len, n_features1)
l_inp = InputLayer(in_shp)
l_mask_inp = InputLayer(in_shp[:2])
x_in = np.random.random(in_shp).astype('float32')
mask_in = np.ones((num_batch, seq_len), dtype='float32')
# need to set random seed.
lasagne.random.get_rng().seed(1234)
l_rec_precompute = RecurrentLayer(l_inp, num_units=num_units,
precompute_input=True,
mask_input=l_mask_inp)
lasagne.random.get_rng().seed(1234)
l_rec_no_precompute = RecurrentLayer(l_inp, num_units=num_units,
precompute_input=False,
mask_input=l_mask_inp)
output_precompute = helper.get_output(
l_rec_precompute).eval({l_inp.input_var: x_in,
l_mask_inp.input_var: mask_in})
output_no_precompute = helper.get_output(
l_rec_no_precompute).eval({l_inp.input_var: x_in,
l_mask_inp.input_var: mask_in})
np.testing.assert_almost_equal(output_precompute, output_no_precompute)
def test_recurrent_return_final():
num_batch, seq_len, n_features = 2, 3, 4
num_units = 2
in_shp = (num_batch, seq_len, n_features)
x_in = np.random.random(in_shp).astype('float32')
l_inp = InputLayer(in_shp)
lasagne.random.get_rng().seed(1234)
l_rec_final = RecurrentLayer(l_inp, num_units, only_return_final=True)
lasagne.random.get_rng().seed(1234)
l_rec_all = RecurrentLayer(l_inp, num_units, only_return_final=False)
output_final = helper.get_output(l_rec_final).eval({l_inp.input_var: x_in})
output_all = helper.get_output(l_rec_all).eval({l_inp.input_var: x_in})
assert output_final.shape == (output_all.shape[0], output_all.shape[2])
assert output_final.shape == lasagne.layers.get_output_shape(l_rec_final)
assert np.allclose(output_final, output_all[:, -1])
def test_lstm_return_shape():
num_batch, seq_len, n_features1, n_features2 = 5, 3, 10, 11
num_units = 6
x = T.tensor4()
in_shp = (num_batch, seq_len, n_features1, n_features2)
l_inp = InputLayer(in_shp)
x_in = np.random.random(in_shp).astype('float32')
l_lstm = LSTMLayer(l_inp, num_units=num_units)
output = helper.get_output(l_lstm, x)
output_val = output.eval({x: x_in})
assert helper.get_output_shape(l_lstm, x_in.shape) == output_val.shape
assert output_val.shape == (num_batch, seq_len, num_units)
def test_lstm_grad():
num_batch, seq_len, n_features = 5, 3, 10
num_units = 6
l_inp = InputLayer((num_batch, seq_len, n_features))
l_lstm = LSTMLayer(l_inp, num_units=num_units)
output = helper.get_output(l_lstm)
g = T.grad(T.mean(output), lasagne.layers.get_all_params(l_lstm))
assert isinstance(g, (list, tuple))
def test_lstm_nparams_no_peepholes():
l_inp = InputLayer((2, 2, 3))
l_lstm = LSTMLayer(l_inp, 5, peepholes=False, learn_init=False)
# 3*n_gates
# the 3 is because we have hid_to_gate, in_to_gate and bias for each gate
assert len(lasagne.layers.get_all_params(l_lstm, trainable=True)) == 12
# bias params + init params
assert len(lasagne.layers.get_all_params(l_lstm, regularizable=False)) == 6
def test_lstm_nparams_peepholes():
l_inp = InputLayer((2, 2, 3))
l_lstm = LSTMLayer(l_inp, 5, peepholes=True, learn_init=False)
# 3*n_gates + peepholes(3).
# the 3 is because we have hid_to_gate, in_to_gate and bias for each gate
assert len(lasagne.layers.get_all_params(l_lstm, trainable=True)) == 15
# bias params(4) + init params(2)
assert len(lasagne.layers.get_all_params(l_lstm, regularizable=False)) == 6
def test_lstm_nparams_learn_init():
l_inp = InputLayer((2, 2, 3))
l_lstm = LSTMLayer(l_inp, 5, peepholes=False, learn_init=True)
# 3*n_gates + inits(2).
# the 3 is because we have hid_to_gate, in_to_gate and bias for each gate
assert len(lasagne.layers.get_all_params(l_lstm, trainable=True)) == 14
# bias params(4) + init params(2)
assert len(lasagne.layers.get_all_params(l_lstm, regularizable=False)) == 6
def test_lstm_hid_init_layer():
# test that you can set hid_init to be a layer
l_inp = InputLayer((2, 2, 3))
l_inp_h = InputLayer((2, 5))
l_cell_h = InputLayer((2, 5))
l_lstm = LSTMLayer(l_inp, 5, hid_init=l_inp_h, cell_init=l_cell_h)
x = T.tensor3()
h = T.matrix()
output = lasagne.layers.get_output(l_lstm, {l_inp: x, l_inp_h: h})
def test_lstm_nparams_hid_init_layer():
# test that you can see layers through hid_init
l_inp = InputLayer((2, 2, 3))
l_inp_h = InputLayer((2, 5))
l_inp_h_de = DenseLayer(l_inp_h, 7)
l_inp_cell = InputLayer((2, 5))
l_inp_cell_de = DenseLayer(l_inp_cell, 7)
l_lstm = LSTMLayer(l_inp, 7, hid_init=l_inp_h_de, cell_init=l_inp_cell_de)
# directly check the layers can be seen through hid_init
layers_to_find = [l_inp, l_inp_h, l_inp_h_de, l_inp_cell, l_inp_cell_de,
l_lstm]
assert lasagne.layers.get_all_layers(l_lstm) == layers_to_find
# 3*n_gates + 4
# the 3 is because we have hid_to_gate, in_to_gate and bias for each gate
# 4 is for the W and b parameters in the two DenseLayer layers
assert len(lasagne.layers.get_all_params(l_lstm, trainable=True)) == 19
# GRU bias params(3) + Dense bias params(1) * 2
assert len(lasagne.layers.get_all_params(l_lstm, regularizable=False)) == 6
def test_lstm_hid_init_mask():
# test that you can set hid_init to be a layer when a mask is provided
l_inp = InputLayer((2, 2, 3))
l_inp_h = InputLayer((2, 5))
l_inp_msk = InputLayer((2, 2))
l_cell_h = InputLayer((2, 5))
l_lstm = LSTMLayer(l_inp, 5, hid_init=l_inp_h, mask_input=l_inp_msk,
cell_init=l_cell_h)
x = T.tensor3()
h = T.matrix()
msk = T.matrix()
inputs = {l_inp: x, l_inp_h: h, l_inp_msk: msk}
output = lasagne.layers.get_output(l_lstm, inputs)
def test_lstm_hid_init_layer_eval():
# Test `hid_init` as a `Layer` with some dummy input. Compare the output of
# a network with a `Layer` as input to `hid_init` to a network with a
# `np.array` as input to `hid_init`
n_units = 7
n_test_cases = 2
in_shp = (n_test_cases, 2, 3)
in_h_shp = (1, n_units)
in_cell_shp = (1, n_units)
# dummy inputs
X_test = np.ones(in_shp, dtype=theano.config.floatX)
Xh_test = np.ones(in_h_shp, dtype=theano.config.floatX)
Xc_test = np.ones(in_cell_shp, dtype=theano.config.floatX)
Xh_test_batch = np.tile(Xh_test, (n_test_cases, 1))
Xc_test_batch = np.tile(Xc_test, (n_test_cases, 1))
# network with `Layer` initializer for hid_init
l_inp = InputLayer(in_shp)
l_inp_h = InputLayer(in_h_shp)
l_inp_cell = InputLayer(in_cell_shp)
l_rec_inp_layer = LSTMLayer(l_inp, n_units, hid_init=l_inp_h,
cell_init=l_inp_cell, nonlinearity=None)
# network with `np.array` initializer for hid_init
l_rec_nparray = LSTMLayer(l_inp, n_units, hid_init=Xh_test,
cell_init=Xc_test, nonlinearity=None)
# copy network parameters from l_rec_inp_layer to l_rec_nparray
l_il_param = dict([(p.name, p) for p in l_rec_inp_layer.get_params()])
l_rn_param = dict([(p.name, p) for p in l_rec_nparray.get_params()])
for k, v in l_rn_param.items():
if k in l_il_param:
v.set_value(l_il_param[k].get_value())
# build the theano functions
X = T.tensor3()
Xh = T.matrix()
Xc = T.matrix()
output_inp_layer = lasagne.layers.get_output(l_rec_inp_layer,
{l_inp: X, l_inp_h:
Xh, l_inp_cell: Xc})
output_nparray = lasagne.layers.get_output(l_rec_nparray, {l_inp: X})
# test both nets with dummy input
output_val_inp_layer = output_inp_layer.eval({X: X_test, Xh: Xh_test_batch,
Xc: Xc_test_batch})
output_val_nparray = output_nparray.eval({X: X_test})
# check output given `Layer` is the same as with `np.array`
assert np.allclose(output_val_inp_layer, output_val_nparray)
def test_lstm_grad_clipping():
# test that you can set grad_clip variable
x = T.tensor3()
l_rec = LSTMLayer(InputLayer((2, 2, 3)), 5, grad_clipping=1)
output = lasagne.layers.get_output(l_rec, x)
def test_lstm_bck():
num_batch, seq_len, n_features1 = 2, 3, 4
num_units = 2
x = T.tensor3()
in_shp = (num_batch, seq_len, n_features1)
l_inp = InputLayer(in_shp)
x_in = np.ones(in_shp).astype('float32')
# need to set random seed.
lasagne.random.get_rng().seed(1234)
l_lstm_fwd = LSTMLayer(l_inp, num_units=num_units, backwards=False)
lasagne.random.get_rng().seed(1234)
l_lstm_bck = LSTMLayer(l_inp, num_units=num_units, backwards=True)
output_fwd = helper.get_output(l_lstm_fwd, x)
output_bck = helper.get_output(l_lstm_bck, x)
output_fwd_val = output_fwd.eval({x: x_in})
output_bck_val = output_bck.eval({x: x_in})
# test that the backwards model reverses its final input
np.testing.assert_almost_equal(output_fwd_val, output_bck_val[:, ::-1])
def test_lstm_precompute():
num_batch, seq_len, n_features1 = 2, 3, 4
num_units = 2
in_shp = (num_batch, seq_len, n_features1)
l_inp = InputLayer(in_shp)
l_mask_inp = InputLayer(in_shp[:2])
x_in = np.random.random(in_shp).astype('float32')
mask_in = np.ones((num_batch, seq_len), dtype='float32')
# need to set random seed.
lasagne.random.get_rng().seed(1234)
l_lstm_precompute = LSTMLayer(
l_inp, num_units=num_units, precompute_input=True,
mask_input=l_mask_inp)
lasagne.random.get_rng().seed(1234)
l_lstm_no_precompute = LSTMLayer(
l_inp, num_units=num_units, precompute_input=False,
mask_input=l_mask_inp)
output_precompute = helper.get_output(
l_lstm_precompute).eval({l_inp.input_var: x_in,
l_mask_inp.input_var: mask_in})
output_no_precompute = helper.get_output(
l_lstm_no_precompute).eval({l_inp.input_var: x_in,
l_mask_inp.input_var: mask_in})
# test that the backwards model reverses its final input
np.testing.assert_almost_equal(output_precompute, output_no_precompute)
def test_lstm_variable_input_size():
# that seqlen and batchsize None works
num_batch, n_features1 = 6, 5
num_units = 13
x = T.tensor3()
in_shp = (None, None, n_features1)
l_inp = InputLayer(in_shp)
x_in1 = np.ones((num_batch+1, 3+1, n_features1)).astype('float32')
x_in2 = np.ones((num_batch, 3, n_features1)).astype('float32')
l_rec = LSTMLayer(l_inp, num_units=num_units, backwards=False)
output = helper.get_output(l_rec, x)
output_val1 = output.eval({x: x_in1})
output_val2 = output.eval({x: x_in2})
def test_lstm_unroll_scan_fwd():
num_batch, seq_len, n_features1 = 2, 3, 4
num_units = 2
in_shp = (num_batch, seq_len, n_features1)
l_inp = InputLayer(in_shp)
l_mask_inp = InputLayer(in_shp[:2])
x_in = np.random.random(in_shp).astype('float32')
mask_in = np.ones(in_shp[:2]).astype('float32')
# need to set random seed.
lasagne.random.get_rng().seed(1234)
l_lstm_scan = LSTMLayer(l_inp, num_units=num_units, backwards=False,
unroll_scan=False, mask_input=l_mask_inp)
lasagne.random.get_rng().seed(1234)
l_lstm_unrolled = LSTMLayer(l_inp, num_units=num_units, backwards=False,
unroll_scan=True, mask_input=l_mask_inp)
output_scan = helper.get_output(l_lstm_scan)
output_unrolled = helper.get_output(l_lstm_unrolled)
output_scan_val = output_scan.eval({l_inp.input_var: x_in,
l_mask_inp.input_var: mask_in})
output_unrolled_val = output_unrolled.eval({l_inp.input_var: x_in,
l_mask_inp.input_var: mask_in})
np.testing.assert_almost_equal(output_scan_val, output_unrolled_val)
def test_lstm_unroll_scan_bck():
num_batch, seq_len, n_features1 = 2, 3, 4
num_units = 2
x = T.tensor3()
in_shp = (num_batch, seq_len, n_features1)
l_inp = InputLayer(in_shp)
x_in = np.random.random(in_shp).astype('float32')
# need to set random seed.
lasagne.random.get_rng().seed(1234)
l_lstm_scan = LSTMLayer(l_inp, num_units=num_units, backwards=True,
unroll_scan=False)
lasagne.random.get_rng().seed(1234)
l_lstm_unrolled = LSTMLayer(l_inp, num_units=num_units, backwards=True,
unroll_scan=True)
output_scan = helper.get_output(l_lstm_scan, x)
output_scan_unrolled = helper.get_output(l_lstm_unrolled, x)
output_scan_val = output_scan.eval({x: x_in})
output_unrolled_val = output_scan_unrolled.eval({x: x_in})
np.testing.assert_almost_equal(output_scan_val, output_unrolled_val)
def test_lstm_passthrough():
# Tests that the LSTM can simply pass through its input
l_in = InputLayer((4, 5, 6))
zero = lasagne.init.Constant(0.)
one = lasagne.init.Constant(1.)
pass_gate = Gate(zero, zero, zero, one, None)
no_gate = Gate(zero, zero, zero, zero, None)
in_pass_gate = Gate(
np.eye(6).astype(theano.config.floatX), zero, zero, zero, None)
l_rec = LSTMLayer(
l_in, 6, pass_gate, no_gate, in_pass_gate, pass_gate, None)
out = lasagne.layers.get_output(l_rec)
inp = np.arange(4*5*6).reshape(4, 5, 6).astype(theano.config.floatX)
np.testing.assert_almost_equal(out.eval({l_in.input_var: inp}), inp)
def test_lstm_return_final():
num_batch, seq_len, n_features = 2, 3, 4
num_units = 2
in_shp = (num_batch, seq_len, n_features)
x_in = np.random.random(in_shp).astype('float32')
l_inp = InputLayer(in_shp)
lasagne.random.get_rng().seed(1234)
l_rec_final = LSTMLayer(l_inp, num_units, only_return_final=True)
lasagne.random.get_rng().seed(1234)
l_rec_all = LSTMLayer(l_inp, num_units, only_return_final=False)
output_final = helper.get_output(l_rec_final).eval({l_inp.input_var: x_in})
output_all = helper.get_output(l_rec_all).eval({l_inp.input_var: x_in})
assert output_final.shape == (output_all.shape[0], output_all.shape[2])
assert output_final.shape == lasagne.layers.get_output_shape(l_rec_final)
assert np.allclose(output_final, output_all[:, -1])
def test_gru_return_shape():
num_batch, seq_len, n_features1, n_features2 = 5, 3, 10, 11
num_units = 6
x = T.tensor4()
in_shp = (num_batch, seq_len, n_features1, n_features2)
l_inp = InputLayer(in_shp)
l_rec = GRULayer(l_inp, num_units=num_units)
x_in = np.random.random(in_shp).astype('float32')
output = helper.get_output(l_rec, x)
output_val = output.eval({x: x_in})
assert helper.get_output_shape(l_rec, x_in.shape) == output_val.shape
assert output_val.shape == (num_batch, seq_len, num_units)
def test_gru_grad():
num_batch, seq_len, n_features = 5, 3, 10
num_units = 6
l_inp = InputLayer((num_batch, seq_len, n_features))
l_gru = GRULayer(l_inp,
num_units=num_units)
output = helper.get_output(l_gru)
g = T.grad(T.mean(output), lasagne.layers.get_all_params(l_gru))
assert isinstance(g, (list, tuple))
def test_gru_nparams_learn_init_false():
l_inp = InputLayer((2, 2, 3))
l_gru = GRULayer(l_inp, 5, learn_init=False)
# 3*n_gates
# the 3 is because we have hid_to_gate, in_to_gate and bias for each gate
assert len(lasagne.layers.get_all_params(l_gru, trainable=True)) == 9
# bias params(3) + hid_init
assert len(lasagne.layers.get_all_params(l_gru, regularizable=False)) == 4
def test_gru_nparams_learn_init_true():
l_inp = InputLayer((2, 2, 3))
l_gru = GRULayer(l_inp, 5, learn_init=True)
# 3*n_gates + hid_init
# the 3 is because we have hid_to_gate, in_to_gate and bias for each gate
assert len(lasagne.layers.get_all_params(l_gru, trainable=True)) == 10
# bias params(3) + init params(1)
assert len(lasagne.layers.get_all_params(l_gru, regularizable=False)) == 4
def test_gru_hid_init_layer():
# test that you can set hid_init to be a layer
l_inp = InputLayer((2, 2, 3))
l_inp_h = InputLayer((2, 5))
l_gru = GRULayer(l_inp, 5, hid_init=l_inp_h)
x = T.tensor3()
h = T.matrix()
output = lasagne.layers.get_output(l_gru, {l_inp: x, l_inp_h: h})
def test_gru_nparams_hid_init_layer():
# test that you can see layers through hid_init
l_inp = InputLayer((2, 2, 3))
l_inp_h = InputLayer((2, 5))
l_inp_h_de = DenseLayer(l_inp_h, 7)
l_gru = GRULayer(l_inp, 7, hid_init=l_inp_h_de)
# directly check the layers can be seen through hid_init
assert lasagne.layers.get_all_layers(l_gru) == [l_inp, l_inp_h, l_inp_h_de,
l_gru]
# 3*n_gates + 2
# the 3 is because we have hid_to_gate, in_to_gate and bias for each gate
# 2 is for the W and b parameters in the DenseLayer
assert len(lasagne.layers.get_all_params(l_gru, trainable=True)) == 11
# GRU bias params(3) + Dense bias params(1)
assert len(lasagne.layers.get_all_params(l_gru, regularizable=False)) == 4
def test_gru_hid_init_layer_eval():
# Test `hid_init` as a `Layer` with some dummy input. Compare the output of
# a network with a `Layer` as input to `hid_init` to a network with a
# `np.array` as input to `hid_init`
n_units = 7
n_test_cases = 2
in_shp = (n_test_cases, 2, 3)
in_h_shp = (1, n_units)
# dummy inputs
X_test = np.ones(in_shp, dtype=theano.config.floatX)
Xh_test = np.ones(in_h_shp, dtype=theano.config.floatX)
Xh_test_batch = np.tile(Xh_test, (n_test_cases, 1))
# network with `Layer` initializer for hid_init
l_inp = InputLayer(in_shp)
l_inp_h = InputLayer(in_h_shp)
l_rec_inp_layer = GRULayer(l_inp, n_units, hid_init=l_inp_h)
# network with `np.array` initializer for hid_init
l_rec_nparray = GRULayer(l_inp, n_units, hid_init=Xh_test)
# copy network parameters from l_rec_inp_layer to l_rec_nparray
l_il_param = dict([(p.name, p) for p in l_rec_inp_layer.get_params()])
l_rn_param = dict([(p.name, p) for p in l_rec_nparray.get_params()])
for k, v in l_rn_param.items():
if k in l_il_param:
v.set_value(l_il_param[k].get_value())
# build the theano functions
X = T.tensor3()
Xh = T.matrix()
output_inp_layer = lasagne.layers.get_output(l_rec_inp_layer,
{l_inp: X, l_inp_h: Xh})
output_nparray = lasagne.layers.get_output(l_rec_nparray, {l_inp: X})
# test both nets with dummy input
output_val_inp_layer = output_inp_layer.eval({X: X_test,
Xh: Xh_test_batch})
output_val_nparray = output_nparray.eval({X: X_test})
# check output given `Layer` is the same as with `np.array`
assert np.allclose(output_val_inp_layer, output_val_nparray)
def test_gru_hid_init_mask():
# test that you can set hid_init to be a layer when a mask is provided
l_inp = InputLayer((2, 2, 3))
l_inp_h = InputLayer((2, 5))
l_inp_msk = InputLayer((2, 2))
l_gru = GRULayer(l_inp, 5, hid_init=l_inp_h, mask_input=l_inp_msk)
x = T.tensor3()
h = T.matrix()
msk = T.matrix()
inputs = {l_inp: x, l_inp_h: h, l_inp_msk: msk}
output = lasagne.layers.get_output(l_gru, inputs)
def test_gru_grad_clipping():
# test that you can set grad_clip variable
x = T.tensor3()
l_rec = GRULayer(InputLayer((2, 2, 3)), 5, grad_clipping=1)
output = lasagne.layers.get_output(l_rec, x)
def test_gru_bck():
num_batch, seq_len, n_features1 = 2, 3, 4
num_units = 2
x = T.tensor3()
in_shp = (num_batch, seq_len, n_features1)
l_inp = InputLayer(in_shp)
x_in = np.ones(in_shp).astype('float32')
# need to set random seed.
lasagne.random.get_rng().seed(1234)
l_gru_fwd = GRULayer(l_inp, num_units=num_units, backwards=False)
lasagne.random.get_rng().seed(1234)
l_gru_bck = GRULayer(l_inp, num_units=num_units, backwards=True)
output_fwd = helper.get_output(l_gru_fwd, x)
output_bck = helper.get_output(l_gru_bck, x)
output_fwd_val = output_fwd.eval({x: x_in})
output_bck_val = output_bck.eval({x: x_in})
# test that the backwards model reverses its final input
np.testing.assert_almost_equal(output_fwd_val, output_bck_val[:, ::-1])
def test_gru_variable_input_size():
# that seqlen and batchsize None works
num_batch, n_features1 = 6, 5
num_units = 13
x = T.tensor3()
in_shp = (None, None, n_features1)
l_inp = InputLayer(in_shp)
x_in1 = np.ones((num_batch+1, 10, n_features1)).astype('float32')
x_in2 = np.ones((num_batch, 15, n_features1)).astype('float32')
l_rec = GRULayer(l_inp, num_units=num_units, backwards=False)
output = helper.get_output(l_rec, x)
output.eval({x: x_in1})
output.eval({x: x_in2})
def test_gru_unroll_scan_fwd():
num_batch, seq_len, n_features1 = 2, 3, 4
num_units = 2
in_shp = (num_batch, seq_len, n_features1)
l_inp = InputLayer(in_shp)
l_mask_inp = InputLayer(in_shp[:2])
x_in = np.random.random(in_shp).astype('float32')
mask_in = np.ones(in_shp[:2]).astype('float32')
# need to set random seed.
lasagne.random.get_rng().seed(1234)
l_gru_scan = GRULayer(l_inp, num_units=num_units, backwards=False,
unroll_scan=False, mask_input=l_mask_inp)
lasagne.random.get_rng().seed(1234)
l_gru_unrolled = GRULayer(l_inp, num_units=num_units, backwards=False,
unroll_scan=True, mask_input=l_mask_inp)
output_scan = helper.get_output(l_gru_scan)
output_unrolled = helper.get_output(l_gru_unrolled)
output_scan_val = output_scan.eval({l_inp.input_var: x_in,
l_mask_inp.input_var: mask_in})
output_unrolled_val = output_unrolled.eval({l_inp.input_var: x_in,
l_mask_inp.input_var: mask_in})
np.testing.assert_almost_equal(output_scan_val, output_unrolled_val)
def test_gru_unroll_scan_bck():
num_batch, seq_len, n_features1 = 2, 5, 4
num_units = 2
x = T.tensor3()
in_shp = (num_batch, seq_len, n_features1)
l_inp = InputLayer(in_shp)
x_in = np.random.random(in_shp).astype('float32')
# need to set random seed.
lasagne.random.get_rng().seed(1234)
l_gru_scan = GRULayer(l_inp, num_units=num_units, backwards=True,
unroll_scan=False)
lasagne.random.get_rng().seed(1234)
l_gru_unrolled = GRULayer(l_inp, num_units=num_units, backwards=True,
unroll_scan=True)
output_scan = helper.get_output(l_gru_scan, x)
output_unrolled = helper.get_output(l_gru_unrolled, x)
output_scan_val = output_scan.eval({x: x_in})
output_unrolled_val = output_unrolled.eval({x: x_in})
np.testing.assert_almost_equal(output_scan_val, output_unrolled_val)
def test_gru_precompute():
num_batch, seq_len, n_features1 = 2, 3, 4
num_units = 2
in_shp = (num_batch, seq_len, n_features1)
l_inp = InputLayer(in_shp)
l_mask_inp = InputLayer(in_shp[:2])
x_in = np.random.random(in_shp).astype('float32')
mask_in = np.ones((num_batch, seq_len), dtype='float32')
# need to set random seed.
lasagne.random.get_rng().seed(1234)
l_gru_precompute = GRULayer(l_inp, num_units=num_units,
precompute_input=True, mask_input=l_mask_inp)
lasagne.random.get_rng().seed(1234)
l_gru_no_precompute = GRULayer(l_inp, num_units=num_units,
precompute_input=False,
mask_input=l_mask_inp)
output_precompute = helper.get_output(
l_gru_precompute).eval({l_inp.input_var: x_in,
l_mask_inp.input_var: mask_in})
output_no_precompute = helper.get_output(
l_gru_no_precompute).eval({l_inp.input_var: x_in,
l_mask_inp.input_var: mask_in})
# test that the backwards model reverses its final input
np.testing.assert_almost_equal(output_precompute, output_no_precompute)
def test_gru_passthrough():
# Tests that the LSTM can simply pass through its input
l_in = InputLayer((4, 5, 6))
zero = lasagne.init.Constant(0.)
one = lasagne.init.Constant(1.)
pass_gate = Gate(zero, zero, None, one, None)
no_gate = Gate(zero, zero, None, zero, None)
in_pass_gate = Gate(
np.eye(6).astype(theano.config.floatX), zero, None, zero, None)
l_rec = GRULayer(l_in, 6, no_gate, pass_gate, in_pass_gate)
out = lasagne.layers.get_output(l_rec)
inp = np.arange(4*5*6).reshape(4, 5, 6).astype(theano.config.floatX)
np.testing.assert_almost_equal(out.eval({l_in.input_var: inp}), inp)
def test_gru_return_final():
num_batch, seq_len, n_features = 2, 3, 4
num_units = 2
in_shp = (num_batch, seq_len, n_features)
x_in = np.random.random(in_shp).astype('float32')
l_inp = InputLayer(in_shp)
lasagne.random.get_rng().seed(1234)
l_rec_final = GRULayer(l_inp, num_units, only_return_final=True)
lasagne.random.get_rng().seed(1234)
l_rec_all = GRULayer(l_inp, num_units, only_return_final=False)
output_final = helper.get_output(l_rec_final).eval({l_inp.input_var: x_in})
output_all = helper.get_output(l_rec_all).eval({l_inp.input_var: x_in})
assert output_final.shape == (output_all.shape[0], output_all.shape[2])
assert output_final.shape == lasagne.layers.get_output_shape(l_rec_final)
assert np.allclose(output_final, output_all[:, -1])
def test_gradient_steps_error():
# Check that error is raised if gradient_steps is not -1 and scan_unroll
# is true
l_in = InputLayer((2, 2, 3))
with pytest.raises(ValueError):
RecurrentLayer(l_in, 5, gradient_steps=3, unroll_scan=True)
with pytest.raises(ValueError):
LSTMLayer(l_in, 5, gradient_steps=3, unroll_scan=True)
with pytest.raises(ValueError):
GRULayer(l_in, 5, gradient_steps=3, unroll_scan=True)
def test_unroll_none_input_error():
# Test that a ValueError is raised if unroll scan is True and the input
# sequence length is specified as None.
l_in = InputLayer((2, None, 3))
with pytest.raises(ValueError):
RecurrentLayer(l_in, 5, unroll_scan=True)
with pytest.raises(ValueError):
LSTMLayer(l_in, 5, unroll_scan=True)
with pytest.raises(ValueError):
GRULayer(l_in, 5, unroll_scan=True)
def test_CustomRecurrentLayer_child_kwargs():
in_shape = (2, 3, 4)
n_hid = 5
# Construct mock for input-to-hidden layer
in_to_hid = Mock(
Layer,
output_shape=(in_shape[0]*in_shape[1], n_hid),
input_shape=(in_shape[0]*in_shape[1], in_shape[2]),
input_layer=InputLayer((in_shape[0]*in_shape[1], in_shape[2])),
get_output_kwargs=['foo'])
# These two functions get called, need to return dummy values for them
in_to_hid.get_output_for.return_value = T.matrix()
in_to_hid.get_params.return_value = []
# As above, for hidden-to-hidden layer
hid_to_hid = Mock(
Layer,
output_shape=(in_shape[0], n_hid),
input_shape=(in_shape[0], n_hid),
input_layer=InputLayer((in_shape[0], n_hid)),
get_output_kwargs=[])
hid_to_hid.get_output_for.return_value = T.matrix()
hid_to_hid.get_params.return_value = []
# Construct a CustomRecurrentLayer using these Mocks
l_rec = lasagne.layers.CustomRecurrentLayer(
InputLayer(in_shape), in_to_hid, hid_to_hid)
# Call get_output with a kwarg, should be passd to in_to_hid and hid_to_hid
helper.get_output(l_rec, foo='bar')
# Retrieve the arguments used to call in_to_hid.get_output_for
args, kwargs = in_to_hid.get_output_for.call_args
# Should be one argument - the Theano expression
assert len(args) == 1
# One keywould argument - should be 'foo' -> 'bar'
assert kwargs == {'foo': 'bar'}
# Same as with in_to_hid
args, kwargs = hid_to_hid.get_output_for.call_args
assert len(args) == 1
assert kwargs == {'foo': 'bar'}
| 38.887347 | 79 | 0.680395 | 7,672 | 47,637 | 3.880344 | 0.038843 | 0.027141 | 0.019819 | 0.022573 | 0.907155 | 0.892241 | 0.876486 | 0.855459 | 0.838025 | 0.815385 | 0 | 0.020368 | 0.214665 | 47,637 | 1,224 | 80 | 38.919118 | 0.775387 | 0.141592 | 0 | 0.654921 | 0 | 0 | 0.008836 | 0 | 0 | 0 | 0 | 0 | 0.085055 | 1 | 0.069259 | false | 0.009721 | 0.010936 | 0 | 0.080194 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
90574bfee9dab136f797c5275b2b563b84a6cd18 | 109 | py | Python | src/decko/pytest.py | JWLee89/yeezy | b64d9ee65c5abd2d38c10c47bda5e65a83826cb2 | [
"MIT"
] | null | null | null | src/decko/pytest.py | JWLee89/yeezy | b64d9ee65c5abd2d38c10c47bda5e65a83826cb2 | [
"MIT"
] | null | null | null | src/decko/pytest.py | JWLee89/yeezy | b64d9ee65c5abd2d38c10c47bda5e65a83826cb2 | [
"MIT"
] | null | null | null | """
These methods all wrap pytest functions:
TODO
"""
import typing as t
from .decorators import deckorator
| 13.625 | 40 | 0.761468 | 15 | 109 | 5.533333 | 0.933333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.165138 | 109 | 7 | 41 | 15.571429 | 0.912088 | 0.412844 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.142857 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
9073d9c97cd34095c889f8ba64f24b75d898a397 | 218 | py | Python | model/transform_twice.py | kamiyakenta/knowledge-distillation-pytorch | 749c6bb353961147718371b2b694046af0a6e3f1 | [
"MIT"
] | null | null | null | model/transform_twice.py | kamiyakenta/knowledge-distillation-pytorch | 749c6bb353961147718371b2b694046af0a6e3f1 | [
"MIT"
] | 1 | 2021-06-28T10:17:20.000Z | 2021-06-28T10:17:20.000Z | model/transform_twice.py | kamiyakenta/knowledge-distillation-pytorch | 749c6bb353961147718371b2b694046af0a6e3f1 | [
"MIT"
] | null | null | null | class TransformTwice:
def __init__(self, transform):
self.transform = transform
def __call__(self, inp):
out1 = self.transform(inp)
out2 = self.transform(inp)
return out1, out2
| 24.222222 | 34 | 0.62844 | 24 | 218 | 5.375 | 0.458333 | 0.403101 | 0.248062 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.025478 | 0.279817 | 218 | 8 | 35 | 27.25 | 0.796178 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.285714 | false | 0 | 0 | 0 | 0.571429 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 6 |
90b5d9b42901f8dec9f037437d7942deb298002f | 9 | py | Python | settings/sample_translation/start_settings.py | bopopescu/Lauecollect | 60ae2b05ea8596ba0decf426e37aeaca0bc8b6be | [
"MIT"
] | null | null | null | settings/sample_translation/start_settings.py | bopopescu/Lauecollect | 60ae2b05ea8596ba0decf426e37aeaca0bc8b6be | [
"MIT"
] | 1 | 2019-10-22T21:28:31.000Z | 2019-10-22T21:39:12.000Z | settings/sample_translation/start_settings.py | bopopescu/Lauecollect | 60ae2b05ea8596ba0decf426e37aeaca0bc8b6be | [
"MIT"
] | 2 | 2019-06-06T15:06:46.000Z | 2020-07-20T02:03:22.000Z | = 11.75
| 4.5 | 8 | 0.444444 | 2 | 9 | 2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.666667 | 0.333333 | 9 | 1 | 9 | 9 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0 | null | null | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
90dc9dc53de6bcbff71fbea8bb67a4fb87e359e2 | 117 | py | Python | poiolib/__init__.py | Poio-NLP/poio-lib | 2af55c863593511dbcf4c611c9265072022d8cdb | [
"Apache-2.0"
] | 1 | 2019-11-05T09:49:13.000Z | 2019-11-05T09:49:13.000Z | poiolib/__init__.py | Poio-NLP/poio-lib | 2af55c863593511dbcf4c611c9265072022d8cdb | [
"Apache-2.0"
] | null | null | null | poiolib/__init__.py | Poio-NLP/poio-lib | 2af55c863593511dbcf4c611c9265072022d8cdb | [
"Apache-2.0"
] | null | null | null | import poiolib.langinfo
import poiolib.corpus
import poiolib.ngrams
import poiolib.capitals
import poiolib.wikipedia
| 19.5 | 24 | 0.871795 | 15 | 117 | 6.8 | 0.466667 | 0.637255 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.08547 | 117 | 5 | 25 | 23.4 | 0.953271 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
90e4a7e8d8475309f5f82c55864d63bfab80d454 | 9,801 | py | Python | tests/unit/network_graph/test_interests.py | weilbith/relay | ab1fc05cbb0ce664409a055f18a67255917c6959 | [
"MIT"
] | null | null | null | tests/unit/network_graph/test_interests.py | weilbith/relay | ab1fc05cbb0ce664409a055f18a67255917c6959 | [
"MIT"
] | null | null | null | tests/unit/network_graph/test_interests.py | weilbith/relay | ab1fc05cbb0ce664409a055f18a67255917c6959 | [
"MIT"
] | null | null | null | import math
import pytest
from conftest import addresses
from relay.blockchain.currency_network_proxy import Trustline
from relay.network_graph.graph import Account, NetworkGraphConfig
from relay.network_graph.graph_constants import (
balance_ab,
creditline_ab,
creditline_ba,
fees_outstanding_a,
fees_outstanding_b,
interest_ab,
interest_ba,
m_time,
)
from relay.network_graph.interests import (
DELTA_TIME_MINIMAL_ALLOWED_VALUE,
calculate_interests,
)
A, B, C, D, E, F, G, H = addresses
SECONDS_PER_YEAR = 60 * 60 * 24 * 365
@pytest.fixture(params=[0, -1, DELTA_TIME_MINIMAL_ALLOWED_VALUE])
def small_non_positive_delta_time(request):
return request.param
@pytest.fixture
def basic_data():
data = {
creditline_ab: 0,
creditline_ba: 0,
interest_ab: 0,
interest_ba: 0,
fees_outstanding_a: 0,
fees_outstanding_b: 0,
m_time: 0,
balance_ab: 0,
}
return data
@pytest.fixture()
def basic_account(basic_data):
return Account(basic_data, A, B)
def test_interests_calculation_zero_interest_rate():
assert (
calculate_interests(
balance=1000,
internal_interest_rate=0,
delta_time_in_seconds=SECONDS_PER_YEAR,
)
== 0
)
def test_interests_calculation_returns_integer():
assert isinstance(
calculate_interests(
balance=1000,
internal_interest_rate=100,
delta_time_in_seconds=SECONDS_PER_YEAR,
),
int,
)
def test_interests_calculation_low_interest_rate():
assert (
calculate_interests(
balance=1000,
internal_interest_rate=100,
delta_time_in_seconds=SECONDS_PER_YEAR,
)
== 10
)
def test_interests_calculation_high_interest_rate():
assert calculate_interests(
balance=1000000000000000000,
internal_interest_rate=2000,
delta_time_in_seconds=SECONDS_PER_YEAR,
) == pytest.approx(1000000000000000000 * (math.exp(0.20) - 1), rel=0.01)
def test_interests_calculation_gives_same_result_as_smart_contracts():
assert (
calculate_interests(
balance=1000000000000000000,
internal_interest_rate=2000,
delta_time_in_seconds=SECONDS_PER_YEAR,
)
== 221402758160169828
) # taken from contract calculation
def tests_interests_calculation_no_time():
assert (
calculate_interests(
balance=1000, internal_interest_rate=100, delta_time_in_seconds=0
)
== 0
)
def test_interests_calculation_negative_balance():
assert (
calculate_interests(
balance=-1000,
internal_interest_rate=100,
delta_time_in_seconds=SECONDS_PER_YEAR,
)
== -10
)
def test_interests_calculation_from_A_balance_positive_relevant_interests(
basic_account
):
basic_account.balance = 100 # B owes to A
basic_account.interest_rate = 100 # interest given by A to B
assert basic_account.balance_with_interests(SECONDS_PER_YEAR) == 101
def test_interests_calculation_from_A_balance_negative_relevant_interests(
basic_account
):
basic_account.balance = -100 # A owes to B
basic_account.reverse_interest_rate = 100 # interest given by B to A
assert basic_account.balance_with_interests(SECONDS_PER_YEAR) == -101
def test_interests_calculation_from_A_balance_positive_irrelevant_interests(
basic_account
):
basic_account.balance = 100 # B owes to A
basic_account.reverse_interest_rate = 100 # interest given by B to A
assert basic_account.balance_with_interests(SECONDS_PER_YEAR) == 100
def test_interests_calculation_from_A_balance_negative_irrelevant_interests(
basic_account
):
basic_account.balance = -100 # A owes to B
basic_account.interest_rate = 100 # interest given by A to B
assert basic_account.balance_with_interests(SECONDS_PER_YEAR) == -100
def test_interests_calculation_delta_time(basic_account):
basic_account.balance = 100
basic_account.m_time = SECONDS_PER_YEAR
basic_account.interest_rate = 100
assert basic_account.balance_with_interests(2 * SECONDS_PER_YEAR) == 101
@pytest.mark.parametrize(
"configurable_community",
[
NetworkGraphConfig(
trustlines=[
Trustline(
A, B, 200, 200, balance=100, m_time=0, interest_rate_given=100
)
]
)
],
indirect=["configurable_community"],
)
def test_interests_path_from_A_balance_positive_relevant_interests(
configurable_community
):
# B owes to A
# 1% interest given by A to B
cost, path = configurable_community.find_transfer_path_sender_pays_fees(
A, B, 100, timestamp=SECONDS_PER_YEAR
)
assert path == [A, B]
@pytest.mark.parametrize(
"configurable_community",
[
NetworkGraphConfig(
trustlines=[
Trustline(
A, B, 200, 200, balance=-100, m_time=0, interest_rate_received=100
)
]
)
],
indirect=["configurable_community"],
)
def test_interests_path_from_A_balance_negative_relevant_interests(
configurable_community
):
# A owes to B
# 1% interest given by B to A
cost, path = configurable_community.find_transfer_path_sender_pays_fees(
A, B, 100, timestamp=SECONDS_PER_YEAR
)
assert path == []
@pytest.mark.parametrize(
"configurable_community",
[
NetworkGraphConfig(
trustlines=[
Trustline(
A, B, 200, 200, balance=100, m_time=0, interest_rate_received=100
)
]
)
],
indirect=["configurable_community"],
)
def test_interests_path_from_A_balance_positive_irrelevant_interests(
configurable_community
):
# B owes to A
# 1% interest given by B to A
cost, path = configurable_community.find_transfer_path_sender_pays_fees(
A, B, 100, timestamp=SECONDS_PER_YEAR
)
assert path == [A, B]
@pytest.mark.parametrize(
"configurable_community",
[
NetworkGraphConfig(
trustlines=[
Trustline(
A, B, 200, 200, balance=-100, m_time=0, interest_rate_given=100
)
]
)
],
indirect=["configurable_community"],
)
def test_interests_path_from_A_balance_negative_irrelevant_interests(
configurable_community
):
# A owes to B
# 1% interest given by A to B
cost, path = configurable_community.find_transfer_path_sender_pays_fees(
A, B, 100, timestamp=SECONDS_PER_YEAR
)
assert path == [A, B]
@pytest.mark.parametrize(
"configurable_community",
[
NetworkGraphConfig(
trustlines=[
Trustline(
A, B, 200, 200, balance=100, m_time=0, interest_rate_given=100
)
]
)
],
indirect=["configurable_community"],
)
def test_interests_path_from_B_balance_positive_relevant_interests(
configurable_community
):
# B owes to A
# 1% interest given by A to B
cost, path = configurable_community.find_transfer_path_sender_pays_fees(
B, A, 100, timestamp=SECONDS_PER_YEAR
)
assert path == []
@pytest.mark.parametrize(
"configurable_community",
[
NetworkGraphConfig(
trustlines=[
Trustline(
A, B, 200, 200, balance=-100, m_time=0, interest_rate_received=100
)
]
)
],
indirect=["configurable_community"],
)
def test_interests_path_from_B_balance_negative_relevant_interests(
configurable_community
):
# A owes to B
# 1% interest given by B to A
cost, path = configurable_community.find_transfer_path_sender_pays_fees(
B, A, 100, timestamp=SECONDS_PER_YEAR
)
assert path == [B, A]
@pytest.mark.parametrize(
"configurable_community",
[
NetworkGraphConfig(
trustlines=[
Trustline(
A, B, 200, 200, balance=100, m_time=0, interest_rate_received=100
)
]
)
],
indirect=["configurable_community"],
)
def test_interests_path_from_B_balance_positive_irrelevant_interests(
configurable_community
):
# B owes to A
# 1% interest given by B to A
cost, path = configurable_community.find_transfer_path_sender_pays_fees(
B, A, 100, timestamp=SECONDS_PER_YEAR
)
assert path == [B, A]
@pytest.mark.parametrize(
"configurable_community",
[
NetworkGraphConfig(
trustlines=[
Trustline(
A, B, 200, 200, balance=-100, m_time=0, interest_rate_given=100
)
]
)
],
indirect=["configurable_community"],
)
def test_interests_path_from_B_balance_negative_irrelevant_interests(
configurable_community
):
# A owes to B
# 1% interest given by A to B
cost, path = configurable_community.find_transfer_path_sender_pays_fees(
B, A, 100, timestamp=SECONDS_PER_YEAR
)
assert path == [B, A]
def test_calculate_interests_time_glitch(small_non_positive_delta_time):
calculate_interests(
balance=1000000000,
internal_interest_rate=1000,
delta_time_in_seconds=small_non_positive_delta_time,
) == 0
def test_calculate_interests_delta_time_out_of_bounds():
with pytest.raises(ValueError):
calculate_interests(
balance=1000000000,
internal_interest_rate=1000,
delta_time_in_seconds=DELTA_TIME_MINIMAL_ALLOWED_VALUE - 1,
)
| 26.276139 | 86 | 0.658096 | 1,127 | 9,801 | 5.33984 | 0.110914 | 0.111665 | 0.048853 | 0.049352 | 0.826022 | 0.777168 | 0.76321 | 0.757893 | 0.742606 | 0.742606 | 0 | 0.050258 | 0.267116 | 9,801 | 372 | 87 | 26.346774 | 0.787554 | 0.050913 | 0 | 0.523333 | 0 | 0 | 0.037947 | 0.037947 | 0 | 0 | 0 | 0 | 0.066667 | 1 | 0.083333 | false | 0 | 0.023333 | 0.006667 | 0.116667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
2913850cced6c8a7a03644d07790d7c9e4d87147 | 96 | py | Python | venv/lib/python3.8/site-packages/yapftests/__init__.py | Retraces/UkraineBot | 3d5d7f8aaa58fa0cb8b98733b8808e5dfbdb8b71 | [
"MIT"
] | 2 | 2022-03-13T01:58:52.000Z | 2022-03-31T06:07:54.000Z | venv/lib/python3.8/site-packages/yapftests/__init__.py | DesmoSearch/Desmobot | b70b45df3485351f471080deb5c785c4bc5c4beb | [
"MIT"
] | 19 | 2021-11-20T04:09:18.000Z | 2022-03-23T15:05:55.000Z | venv/lib/python3.8/site-packages/yapftests/__init__.py | DesmoSearch/Desmobot | b70b45df3485351f471080deb5c785c4bc5c4beb | [
"MIT"
] | null | null | null | /home/runner/.cache/pip/pool/69/b0/6a/86aa8f3f5d232baa4d084b795435597d82b4bb47d0ba04e9d800b42b89 | 96 | 96 | 0.895833 | 9 | 96 | 9.555556 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.427083 | 0 | 96 | 1 | 96 | 96 | 0.46875 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | null | 0 | 0 | null | null | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
291996842f4a2b5217165cdca25c43167a436ffd | 30 | py | Python | gtm/__init__.py | TimurGimadiev/GTM | 6fbf7de9c9e90a2a8702dbd93da9020e670f04f6 | [
"MIT"
] | null | null | null | gtm/__init__.py | TimurGimadiev/GTM | 6fbf7de9c9e90a2a8702dbd93da9020e670f04f6 | [
"MIT"
] | null | null | null | gtm/__init__.py | TimurGimadiev/GTM | 6fbf7de9c9e90a2a8702dbd93da9020e670f04f6 | [
"MIT"
] | 1 | 2021-07-19T15:34:13.000Z | 2021-07-19T15:34:13.000Z | from .GTM import GTMEstimator
| 15 | 29 | 0.833333 | 4 | 30 | 6.25 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.133333 | 30 | 1 | 30 | 30 | 0.961538 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
2935e4550b4fb0896ed412fc0d3927fc756683c4 | 20,594 | py | Python | freezer-api-7.1.0/freezer_api/tests/unit/v1/test_sessions.py | scottwedge/OpenStack-Stein | 7077d1f602031dace92916f14e36b124f474de15 | [
"Apache-2.0"
] | null | null | null | freezer-api-7.1.0/freezer_api/tests/unit/v1/test_sessions.py | scottwedge/OpenStack-Stein | 7077d1f602031dace92916f14e36b124f474de15 | [
"Apache-2.0"
] | 5 | 2019-08-14T06:46:03.000Z | 2021-12-13T20:01:25.000Z | freezer-api-7.1.0/freezer_api/tests/unit/v1/test_sessions.py | scottwedge/OpenStack-Stein | 7077d1f602031dace92916f14e36b124f474de15 | [
"Apache-2.0"
] | 2 | 2020-03-15T01:24:15.000Z | 2020-07-22T20:34:26.000Z | """
Copyright 2015 Hewlett-Packard
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import random
import falcon
import mock
from mock import patch
from freezer_api.api.v1 import sessions as v1_sessions
from freezer_api.common import exceptions
from freezer_api.tests.unit import common
class TestSessionsCollectionResource(common.FreezerBaseTestCase):
def setUp(self):
super(TestSessionsCollectionResource, self).setUp()
self.mock_db = mock.Mock()
self.mock_req = mock.MagicMock()
self.mock_req.env.__getitem__.side_effect = common.get_req_items
self.mock_req.get_header.return_value = common.fake_session_0[
'user_id']
self.mock_req.status = falcon.HTTP_200
self.resource = v1_sessions.SessionsCollectionResource(self.mock_db)
self.mock_json_body = mock.Mock()
self.mock_json_body.return_value = {}
self.resource.json_body = self.mock_json_body
def test_on_get_return_empty_list(self):
self.mock_db.search_session.return_value = []
expected_result = {'sessions': []}
self.resource.on_get(self.mock_req, self.mock_req)
result = self.mock_req.body
self.assertEqual(expected_result, result)
self.assertEqual(falcon.HTTP_200, self.mock_req.status)
def test_on_get_return_correct_list(self):
self.mock_db.search_session.return_value = [
common.get_fake_session_0(), common.get_fake_session_1()]
expected_result = {'sessions': [common.get_fake_session_0(),
common.get_fake_session_1()]}
self.resource.on_get(self.mock_req, self.mock_req)
result = self.mock_req.body
self.assertEqual(expected_result, result)
self.assertEqual(falcon.HTTP_200, self.mock_req.status)
def test_on_post_raises_when_missing_body(self):
self.mock_db.add_session.return_value = common.fake_session_0[
'session_id']
self.assertRaises(exceptions.BadDataFormat, self.resource.on_post,
self.mock_req, self.mock_req)
def test_on_post_inserts_correct_data(self):
session = common.get_fake_session_0()
self.mock_json_body.return_value = session
self.mock_db.add_session.return_value = 'pjiofrdslaikfunr'
expected_result = {'session_id': 'pjiofrdslaikfunr'}
self.resource.on_post(self.mock_req, self.mock_req)
self.assertEqual(falcon.HTTP_201, self.mock_req.status)
self.assertEqual(expected_result, self.mock_req.body)
class TestSessionsResource(common.FreezerBaseTestCase):
def setUp(self):
super(TestSessionsResource, self).setUp()
self.mock_db = mock.Mock()
self.mock_req = mock.MagicMock()
self.mock_req.env.__getitem__.side_effect = common.get_req_items
self.mock_req.get_header.return_value = common.fake_session_0[
'user_id']
self.mock_req.status = falcon.HTTP_200
self.resource = v1_sessions.SessionsResource(self.mock_db)
self.mock_json_body = mock.Mock()
self.mock_json_body.return_value = {}
self.resource.json_body = self.mock_json_body
def test_create_resource(self):
self.assertIsInstance(self.resource, v1_sessions.SessionsResource)
def test_on_get_return_no_result_and_404_when_not_found(self):
self.mock_db.get_session.return_value = None
self.mock_req.body = None
self.resource.on_get(self.mock_req, self.mock_req,
common.fake_session_0['session_id'])
self.assertIsNone(self.mock_req.body)
self.assertEqual(falcon.HTTP_404, self.mock_req.status)
def test_on_get_return_correct_data(self):
self.mock_db.get_session.return_value = common.get_fake_session_0()
self.resource.on_get(self.mock_req, self.mock_req,
common.fake_session_0['session_id'])
result = self.mock_req.body
self.assertEqual(common.get_fake_session_0(), result)
self.assertEqual(falcon.HTTP_200, self.mock_req.status)
def test_on_delete_removes_proper_data(self):
self.resource.on_delete(self.mock_req, self.mock_req,
common.fake_session_0['session_id'])
result = self.mock_req.body
expected_result = {'session_id': common.fake_session_0['session_id']}
self.assertEqual(falcon.HTTP_204, self.mock_req.status)
self.assertEqual(expected_result, result)
def test_on_patch_ok_with_some_fields(self):
new_version = random.randint(0, 99)
self.mock_db.update_session.return_value = new_version
patch_doc = {'some_field': 'some_value',
'because': 'size_matters'}
self.mock_json_body.return_value = patch_doc
expected_result = {'session_id': common.fake_session_0['session_id'],
'version': new_version}
self.resource.on_patch(self.mock_req, self.mock_req,
common.fake_session_0['session_id'])
self.mock_db.update_session.assert_called_with(
user_id=common.fake_session_0['user_id'],
session_id=common.fake_session_0['session_id'],
patch_doc=patch_doc)
self.assertEqual(falcon.HTTP_200, self.mock_req.status)
result = self.mock_req.body
self.assertEqual(expected_result, result)
def test_on_post_ok(self):
new_version = random.randint(0, 99)
self.mock_db.replace_session.return_value = new_version
session = common.get_fake_session_0()
self.mock_json_body.return_value = session
expected_result = {'session_id': common.fake_session_0['session_id'],
'version': new_version}
self.resource.on_post(self.mock_req, self.mock_req,
common.fake_session_0['session_id'])
self.assertEqual(falcon.HTTP_201, self.mock_req.status)
self.assertEqual(expected_result, self.mock_req.body)
def test_on_post_raises_when_db_replace_session_raises(self):
self.mock_db.replace_session.side_effect = exceptions.AccessForbidden(
'regular test failure')
session = common.get_fake_session_0()
self.mock_json_body.return_value = session
self.assertRaises(exceptions.AccessForbidden, self.resource.on_post,
self.mock_req,
self.mock_req,
common.fake_session_0['session_id'])
class TestSessionsAction(common.FreezerBaseTestCase):
def setUp(self):
super(TestSessionsAction, self).setUp()
self.mock_db = mock.Mock()
self.mock_req = mock.MagicMock()
self.mock_req.env.__getitem__.side_effect = common.get_req_items
self.mock_req.get_header.return_value = common.fake_session_0[
'user_id']
self.mock_req.status = falcon.HTTP_200
self.resource = v1_sessions.SessionsAction(self.mock_db)
self.mock_json_body = mock.Mock()
self.mock_json_body.return_value = {}
self.resource.json_body = self.mock_json_body
def test_create_resource(self):
self.assertIsInstance(self.resource, v1_sessions.SessionsAction)
def test_on_post_raises_when_unable_to_read_action_from_body(self):
self.mock_json_body.return_value = {}
self.assertRaises(exceptions.BadDataFormat, self.resource.on_post,
self.mock_req,
self.mock_req,
common.fake_session_0['session_id'])
def test_on_post_start_action_ok(self):
new_version = random.randint(0, 99)
self.mock_db.get_session.return_value = common.get_fake_session_0()
self.mock_db.update_session.return_value = new_version
action = {"start": {
"job_id": 'job_id_2',
"current_tag": 5
}}
self.mock_json_body.return_value = action
expected_result = {'result': 'success',
'session_tag': 6}
self.resource.on_post(self.mock_req, self.mock_req,
common.fake_session_0['session_id'])
self.assertEqual(falcon.HTTP_202, self.mock_req.status)
self.assertEqual(expected_result, self.mock_req.body)
def test_on_post_start_action_raises_BadDataFormat_when_job_not_in_session(
self):
new_version = random.randint(0, 99)
self.mock_db.get_session.return_value = common.get_fake_session_0()
self.mock_db.update_session.return_value = new_version
action = {"start": {
"job_id": 'missedme',
"current_tag": 5
}}
self.mock_json_body.return_value = action
self.assertRaises(exceptions.BadDataFormat, self.resource.on_post,
self.mock_req,
self.mock_req, common.fake_session_0['session_id'])
def test_on_post_start_action_raises_BadDataFormat_when_curr_tag_too_high(
self):
new_version = random.randint(0, 99)
self.mock_db.get_session.return_value = common.get_fake_session_0()
self.mock_db.update_session.return_value = new_version
action = {"start": {
"job_id": 'missedme',
"current_tag": 6
}}
self.mock_json_body.return_value = action
self.assertRaises(exceptions.BadDataFormat, self.resource.on_post,
self.mock_req,
self.mock_req, common.fake_session_0['session_id'])
def test_on_post_end_action_ok(self):
new_version = random.randint(0, 99)
self.mock_db.get_session.return_value = common.get_fake_session_0()
self.mock_db.update_session.return_value = new_version
action = {"end": {
"job_id": 'job_id_2',
"current_tag": 5,
"result": "success"
}}
self.mock_json_body.return_value = action
expected_result = {'result': 'success',
'session_tag': 5}
self.resource.on_post(self.mock_req, self.mock_req,
common.fake_session_0['session_id'])
self.assertEqual(falcon.HTTP_202, self.mock_req.status)
self.assertEqual(expected_result, self.mock_req.body)
def test_on_post_end_action_raises_BadDataFormat_when_job_not_in_session(
self):
new_version = random.randint(0, 99)
self.mock_db.get_session.return_value = common.get_fake_session_0()
self.mock_db.update_session.return_value = new_version
action = {"end": {
"job_id": 'ahahahahah',
"current_tag": 5,
"result": "success"
}}
self.mock_json_body.return_value = action
self.assertRaises(exceptions.BadDataFormat, self.resource.on_post,
self.mock_req,
self.mock_req, common.fake_session_0['session_id'])
def test_on_post_raises_MethodNotImplemented_when_methon_not_implemented(
self):
new_version = random.randint(0, 99)
self.mock_db.get_session.return_value = common.get_fake_session_0()
self.mock_db.update_session.return_value = new_version
action = {"method_not_implemented": {
"job_id": 'ahahahahah',
"current_tag": 5,
"result": "success"
}}
self.mock_json_body.return_value = action
self.assertRaises(exceptions.MethodNotImplemented,
self.resource.on_post, self.mock_req,
self.mock_req, common.fake_session_0['session_id'])
@patch('freezer_api.api.v1.sessions.time')
def test_on_post_start_succeeds_in_holdoff_if_tag_needs_not_increment(
self, mock_time):
mock_time.time.return_value = 1000
new_version = random.randint(0, 99)
session_doc = common.get_fake_session_0()
session_doc['time_start'] = 999
self.mock_db.get_session.return_value = session_doc
self.mock_db.update_session.return_value = new_version
action = {"start": {
"job_id": 'job_id_2',
"current_tag": 4
}}
self.mock_json_body.return_value = action
expected_result = {'result': 'success',
'session_tag': 5}
self.resource.on_post(self.mock_req, self.mock_req,
common.fake_session_0['session_id'])
self.assertEqual(falcon.HTTP_202, self.mock_req.status)
self.assertEqual(expected_result, self.mock_req.body)
@patch('freezer_api.api.v1.sessions.time')
def test_on_post_start_replies_holdoff_if_tag_would_increment(self,
mock_time):
mock_time.time.return_value = 1000
new_version = random.randint(0, 99)
session_doc = common.get_fake_session_0()
session_doc['time_start'] = 999
self.mock_db.get_session.return_value = session_doc
self.mock_db.update_session.return_value = new_version
action = {"start": {
"job_id": 'job_id_2',
"current_tag": 5
}}
self.mock_json_body.return_value = action
expected_result = {'result': 'hold-off',
'session_tag': 5}
self.resource.on_post(self.mock_req, self.mock_req,
common.fake_session_0['session_id'])
self.assertEqual(falcon.HTTP_202, self.mock_req.status)
self.assertEqual(expected_result, self.mock_req.body)
@patch('freezer_api.api.v1.sessions.time')
def test_on_post_start_outofholdoff_replies_outofsync_when_tag_too_low(
self, mock_time):
mock_time.time.return_value = 2000
new_version = random.randint(0, 99)
session_doc = common.get_fake_session_0()
session_doc['time_start'] = 999
self.mock_db.get_session.return_value = session_doc
self.mock_db.update_session.return_value = new_version
action = {"start": {
"job_id": 'job_id_2',
"current_tag": 2
}}
self.mock_json_body.return_value = action
expected_result = {'result': 'out-of-sync',
'session_tag': 5}
self.resource.on_post(self.mock_req, self.mock_req,
common.fake_session_0['session_id'])
self.assertEqual(falcon.HTTP_202, self.mock_req.status)
self.assertEqual(expected_result, self.mock_req.body)
class TestSessions(common.FreezerBaseTestCase):
def setUp(self):
super(TestSessions, self).setUp()
self.session_doc = {}
self.session = v1_sessions.Session(self.session_doc)
def test_create_resource(self):
self.assertIsInstance(self.session, v1_sessions.Session)
def test_overall_result_running(self):
self.session_doc['jobs'] = {'job1': {'status': 'completed',
'result': 'success'},
'job2': {'status': 'running',
'result': ''}}
res = self.session.get_job_overall_result()
self.assertEqual('running', res)
def test_overall_result_fail(self):
self.session_doc['jobs'] = {'job1': {'status': 'completed',
'result': 'success'},
'job2': {'status': 'completed',
'result': 'fail'}}
res = self.session.get_job_overall_result()
self.assertEqual('fail', res)
def test_overall_result_success(self):
self.session_doc['jobs'] = {'job1': {'status': 'completed',
'result': 'success'},
'job2': {'status': 'completed',
'result': 'success'}}
res = self.session.get_job_overall_result()
self.assertEqual('success', res)
class TestSessionsJobs(common.FreezerBaseTestCase):
def setUp(self):
super(TestSessionsJobs, self).setUp()
self.mock_db = mock.Mock()
self.mock_req = mock.MagicMock()
self.mock_req.env.__getitem__.side_effect = common.get_req_items
self.mock_req.get_header.return_value = common.fake_session_0[
'user_id']
self.mock_req.status = falcon.HTTP_200
self.resource = v1_sessions.SessionsJob(self.mock_db)
def test_create_resource(self):
self.assertIsInstance(self.resource, v1_sessions.SessionsJob)
def test_on_put_adds_job_to_session_jobs(self):
session = common.get_fake_session_0()
job = common.get_fake_job_0()
job_info = {job['job_id']: {'client_id': job['client_id'],
'status': job['job_schedule']['status'],
'result': job['job_schedule']['result'],
'time_started': job['job_schedule'][
'time_started'],
'time_ended': job['job_schedule'][
'time_ended']}}
session_update_doc = {'jobs': job_info}
self.mock_db.get_session.return_value = session
self.mock_db.get_job.return_value = job
self.resource.on_put(self.mock_req, self.mock_req,
session['session_id'],
job['job_id'])
self.mock_db.update_session.assert_called_with(
user_id=session['user_id'],
session_id=session['session_id'],
patch_doc=session_update_doc)
def test_on_put_updates_job_with_session_info(self):
session = common.get_fake_session_0()
job = common.get_fake_job_0()
self.mock_db.get_session.return_value = session
self.mock_db.get_job.return_value = job
job_update_doc = {
'session_id': session['session_id'],
'session_tag': session['session_tag'],
'job_schedule': session['schedule']
}
self.resource.on_put(self.mock_req, self.mock_req,
session['session_id'],
job['job_id'])
self.mock_db.update_job.assert_called_with(user_id=session['user_id'],
job_id=job['job_id'],
patch_doc=job_update_doc)
def test_on_delete_removes_job_from_session_jobs(self):
session = common.get_fake_session_0()
updated_session = common.get_fake_session_1()
job = common.get_fake_job_0()
self.mock_db.get_session.return_value = session
self.mock_db.get_job.return_value = job
self.resource.on_delete(self.mock_req, self.mock_req,
session['session_id'],
'job_id_2')
self.mock_db.replace_session.assert_called_with(
user_id=session['user_id'],
session_id=session['session_id'],
doc=updated_session)
def test_on_delete_removes_session_info_from_job_and_stops_job(self):
session = common.get_fake_session_0()
job = common.get_fake_job_0()
self.mock_db.get_session.return_value = session
self.mock_db.get_job.return_value = job
job_update_doc = {
'session_id': '',
'session_tag': 0,
'job_schedule': {
'event': 'stop'
}
}
self.resource.on_delete(self.mock_req, self.mock_req,
session['session_id'],
job['job_id'])
self.mock_db.update_job.assert_called_with(user_id=session['user_id'],
job_id=job['job_id'],
patch_doc=job_update_doc)
| 43.631356 | 79 | 0.623434 | 2,500 | 20,594 | 4.7632 | 0.0888 | 0.11085 | 0.084061 | 0.039301 | 0.826923 | 0.791317 | 0.762429 | 0.751008 | 0.731273 | 0.698606 | 0 | 0.014148 | 0.279256 | 20,594 | 471 | 80 | 43.723992 | 0.788116 | 0.02695 | 0 | 0.683544 | 0 | 0 | 0.078233 | 0.005891 | 0 | 0 | 0 | 0 | 0.113924 | 1 | 0.091139 | false | 0 | 0.017722 | 0 | 0.121519 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
294af699408cea7e94bcfe99fb3066d1937639d0 | 2,807 | py | Python | epytope/Data/pssms/smmpmbec/mat/B_08_01_11.py | christopher-mohr/epytope | 8ac9fe52c0b263bdb03235a5a6dffcb72012a4fd | [
"BSD-3-Clause"
] | 7 | 2021-02-01T18:11:28.000Z | 2022-01-31T19:14:07.000Z | epytope/Data/pssms/smmpmbec/mat/B_08_01_11.py | christopher-mohr/epytope | 8ac9fe52c0b263bdb03235a5a6dffcb72012a4fd | [
"BSD-3-Clause"
] | 22 | 2021-01-02T15:25:23.000Z | 2022-03-14T11:32:53.000Z | epytope/Data/pssms/smmpmbec/mat/B_08_01_11.py | christopher-mohr/epytope | 8ac9fe52c0b263bdb03235a5a6dffcb72012a4fd | [
"BSD-3-Clause"
] | 4 | 2021-05-28T08:50:38.000Z | 2022-03-14T11:45:32.000Z | B_08_01_11 = {0: {'A': -0.026, 'C': -0.004, 'E': -0.048, 'D': -0.021, 'G': 0.016, 'F': -0.001, 'I': -0.0, 'H': 0.026, 'K': 0.036, 'M': 0.004, 'L': -0.047, 'N': 0.004, 'Q': -0.001, 'P': -0.0, 'S': 0.017, 'R': 0.054, 'T': -0.013, 'W': 0.015, 'V': -0.045, 'Y': 0.034}, 1: {'A': 0.024, 'C': 0.015, 'E': -0.003, 'D': -0.026, 'G': 0.017, 'F': 0.045, 'I': -0.023, 'H': 0.037, 'K': 0.018, 'M': -0.013, 'L': -0.02, 'N': -0.005, 'Q': -0.033, 'P': -0.154, 'S': 0.049, 'R': 0.027, 'T': 0.02, 'W': 0.005, 'V': -0.041, 'Y': 0.063}, 2: {'A': -0.002, 'C': -0.015, 'E': 0.042, 'D': 0.051, 'G': -0.013, 'F': -0.131, 'I': -0.098, 'H': -0.01, 'K': -0.028, 'M': -0.02, 'L': 0.005, 'N': 0.035, 'Q': 0.113, 'P': 0.109, 'S': 0.039, 'R': -0.006, 'T': 0.043, 'W': -0.029, 'V': -0.016, 'Y': -0.07}, 3: {'A': 0.096, 'C': -0.012, 'E': -0.019, 'D': -0.042, 'G': 0.038, 'F': -0.032, 'I': 0.014, 'H': -0.001, 'K': 0.043, 'M': 0.001, 'L': -0.005, 'N': -0.067, 'Q': -0.019, 'P': -0.062, 'S': 0.06, 'R': 0.036, 'T': 0.021, 'W': -0.07, 'V': 0.032, 'Y': -0.009}, 4: {'A': 0.041, 'C': 0.001, 'E': 0.002, 'D': -0.022, 'G': 0.025, 'F': -0.111, 'I': -0.008, 'H': -0.019, 'K': -0.049, 'M': -0.025, 'L': -0.031, 'N': 0.045, 'Q': 0.053, 'P': 0.139, 'S': 0.053, 'R': -0.026, 'T': 0.013, 'W': -0.031, 'V': 0.012, 'Y': -0.062}, 5: {'A': -0.057, 'C': 0.021, 'E': -0.002, 'D': 0.01, 'G': 0.016, 'F': -0.01, 'I': 0.002, 'H': 0.026, 'K': 0.018, 'M': -0.005, 'L': -0.044, 'N': 0.014, 'Q': 0.007, 'P': 0.002, 'S': -0.008, 'R': 0.019, 'T': -0.034, 'W': 0.04, 'V': -0.04, 'Y': 0.025}, 6: {'A': -0.027, 'C': -0.052, 'E': 0.004, 'D': 0.007, 'G': -0.004, 'F': -0.066, 'I': 0.031, 'H': -0.017, 'K': 0.009, 'M': -0.007, 'L': 0.007, 'N': 0.006, 'Q': 0.052, 'P': 0.078, 'S': 0.023, 'R': 0.01, 'T': 0.038, 'W': -0.029, 'V': 0.03, 'Y': -0.091}, 7: {'A': 0.06, 'C': 0.009, 'E': 0.043, 'D': 0.017, 'G': 0.019, 'F': -0.034, 'I': -0.0, 'H': -0.043, 'K': -0.112, 'M': -0.012, 'L': 0.01, 'N': -0.029, 'Q': 0.047, 'P': 0.075, 'S': 0.017, 'R': -0.072, 'T': 0.007, 'W': -0.021, 'V': 0.025, 'Y': -0.004}, 8: {'A': 0.049, 'C': -0.001, 'E': 0.014, 'D': 0.034, 'G': 0.009, 'F': -0.104, 'I': 0.007, 'H': -0.037, 'K': -0.054, 'M': -0.006, 'L': -0.026, 'N': 0.014, 'Q': 0.038, 'P': 0.08, 'S': 0.045, 'R': -0.032, 'T': 0.053, 'W': -0.019, 'V': 0.029, 'Y': -0.094}, 9: {'A': -0.107, 'C': 0.002, 'E': 0.024, 'D': 0.022, 'G': -0.037, 'F': 0.01, 'I': 0.003, 'H': 0.026, 'K': 0.006, 'M': 0.021, 'L': -0.012, 'N': 0.044, 'Q': 0.024, 'P': 0.03, 'S': -0.041, 'R': 0.011, 'T': -0.064, 'W': 0.045, 'V': -0.042, 'Y': 0.035}, 10: {'A': 0.101, 'C': 0.021, 'E': -0.061, 'D': -0.013, 'G': 0.014, 'F': 0.034, 'I': -0.008, 'H': 0.035, 'K': 0.112, 'M': -0.067, 'L': -0.139, 'N': -0.055, 'Q': -0.159, 'P': -0.014, 'S': 0.012, 'R': 0.118, 'T': -0.005, 'W': 0.009, 'V': -0.0, 'Y': 0.067}, -1: {'con': 4.2866}} | 2,807 | 2,807 | 0.393659 | 679 | 2,807 | 1.622975 | 0.172312 | 0.019964 | 0.013612 | 0.016334 | 0.225953 | 0 | 0 | 0 | 0 | 0 | 0 | 0.373299 | 0.162095 | 2,807 | 1 | 2,807 | 2,807 | 0.095238 | 0 | 0 | 0 | 0 | 0 | 0.079416 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
465f958545af93c73d463a336f2ff9fcee147926 | 138 | py | Python | Modulo4/modulos/mas/despedida.py | DiegoAV95/python_curso_-domingos | 3e0cf0d4c08aab797a3defde8af44e9243987b4d | [
"Apache-2.0"
] | null | null | null | Modulo4/modulos/mas/despedida.py | DiegoAV95/python_curso_-domingos | 3e0cf0d4c08aab797a3defde8af44e9243987b4d | [
"Apache-2.0"
] | null | null | null | Modulo4/modulos/mas/despedida.py | DiegoAV95/python_curso_-domingos | 3e0cf0d4c08aab797a3defde8af44e9243987b4d | [
"Apache-2.0"
] | null | null | null | import os
import numpy as np
def chau():
print('este es el adios')
def otro_saludo():
print('otro saludo!!!!')
# despedida() | 10.615385 | 29 | 0.623188 | 20 | 138 | 4.25 | 0.75 | 0.235294 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.224638 | 138 | 13 | 30 | 10.615385 | 0.794393 | 0.07971 | 0 | 0 | 0 | 0 | 0.246032 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | true | 0 | 0.333333 | 0 | 0.666667 | 0.333333 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
4681854a6c823b8ab6cbb78bafbf7c1afd17f986 | 109 | py | Python | src/cmp/cool_lang/utils/__init__.py | codestrange/cool-compiler-2020 | 30508965d75a1a1d1362d0b51bef8da3978fd0c2 | [
"MIT"
] | 3 | 2020-01-14T04:47:32.000Z | 2020-09-10T17:57:20.000Z | src/cmp/cool_lang/utils/__init__.py | codestrange/cool-compiler-2020 | 30508965d75a1a1d1362d0b51bef8da3978fd0c2 | [
"MIT"
] | 5 | 2020-01-14T06:06:35.000Z | 2020-02-19T01:01:33.000Z | src/cmp/cool_lang/utils/__init__.py | codestrange/cool-compiler-2020 | 30508965d75a1a1d1362d0b51bef8da3978fd0c2 | [
"MIT"
] | 3 | 2020-01-14T04:58:24.000Z | 2020-01-14T16:23:41.000Z | from .attribute_dict import AttributeDict
from .find_column import find_column
from .visitor import on, when
| 27.25 | 41 | 0.844037 | 16 | 109 | 5.5625 | 0.625 | 0.224719 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.119266 | 109 | 3 | 42 | 36.333333 | 0.927083 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
d3c81ac291bcca5e02197346a5692a957c79a294 | 123 | py | Python | canvas/tools/__init__.py | SilicalNZ/canvas | 44d1eee02c334aae6b41aeba01ed0ecdf83aed21 | [
"MIT"
] | 7 | 2019-08-04T20:37:55.000Z | 2020-03-05T08:36:10.000Z | canvas/tools/__init__.py | SilicalNZ/canvas | 44d1eee02c334aae6b41aeba01ed0ecdf83aed21 | [
"MIT"
] | 1 | 2019-10-21T05:43:28.000Z | 2019-10-21T05:43:28.000Z | canvas/tools/__init__.py | SilicalNZ/canvas | 44d1eee02c334aae6b41aeba01ed0ecdf83aed21 | [
"MIT"
] | null | null | null | from .alterations import *
from .shapes import *
from .sorters import *
from .transformers import *
from .geometry import * | 24.6 | 27 | 0.764228 | 15 | 123 | 6.266667 | 0.466667 | 0.425532 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.154472 | 123 | 5 | 28 | 24.6 | 0.903846 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
d3cad218ce4eb7e184da64dfcffa0a1d5e24f619 | 40,594 | py | Python | trait_browser/test_searches.py | UW-GAC/pie | 89ae277f5ba1357580d78c3527f26200686308a6 | [
"MIT"
] | null | null | null | trait_browser/test_searches.py | UW-GAC/pie | 89ae277f5ba1357580d78c3527f26200686308a6 | [
"MIT"
] | 3 | 2020-01-02T20:17:06.000Z | 2020-01-04T21:13:09.000Z | trait_browser/test_searches.py | UW-GAC/pie | 89ae277f5ba1357580d78c3527f26200686308a6 | [
"MIT"
] | 1 | 2021-10-29T22:15:27.000Z | 2021-10-29T22:15:27.000Z | """Test the functions in searches.py."""
from django.test import TestCase
from watson.models import SearchEntry
from . import factories
from . import models
from . import searches
class ClearSearchIndexMixin(object):
"""Clear django-watson search index records in tests.
Normally, django runs the TestCase tests in a transaction, but this doesn't
work for the watson search records because they are stored in a MyISAM
table, which doesn't use transactions. The records in the table therefore
need to be cleared after each test.
"""
def tearDown(self):
super(ClearSearchIndexMixin, self).tearDown()
SearchEntry.objects.all().delete()
class SearchSourceDatasetsTest(ClearSearchIndexMixin, TestCase):
def test_returns_all_datasets_with_no_input(self):
"""All datasets are returned if nothing is passed to search."""
datasets = factories.SourceDatasetFactory.create_batch(10)
qs = searches.search_source_datasets()
self.assertEqual(qs.count(), models.SourceDataset.objects.current().count())
def test_does_not_find_deprecated_datasets(self):
"""No deprecated datasets are returned if nothing is passed to search."""
dataset = factories.SourceDatasetFactory.create()
dataset.source_study_version.i_is_deprecated = True
dataset.source_study_version.save()
qs = searches.search_source_datasets()
self.assertEqual(qs.count(), 0)
def test_description_no_matches(self):
"""No results are found if the search query doesn't match the dataset description."""
dataset = factories.SourceDatasetFactory.create(i_dbgap_description='lorem')
qs = searches.search_source_datasets(description='foobar')
self.assertQuerysetEqual(qs, [])
def test_description_one_word_exact_match(self):
"""Only the dataset whose description that matches the search query is found."""
factories.SourceDatasetFactory.create(i_dbgap_description='other dataset')
dataset = factories.SourceDatasetFactory.create(i_dbgap_description='lorem')
qs = searches.search_source_datasets(description='lorem')
self.assertQuerysetEqual(qs, [repr(dataset)])
def test_description_one_word_substring_match(self):
"""Only the dataset whose description contains words that begin with the search query is found."""
factories.SourceDatasetFactory.create(i_dbgap_description='other dataset')
dataset = factories.SourceDatasetFactory.create(i_dbgap_description='lorem')
qs = searches.search_source_datasets(description='lore')
self.assertQuerysetEqual(qs, [repr(dataset)])
def test_description_one_word_substring_matches_beginning_of_word_only(self):
"""Only datasets whose descriptions contains words that end with the search query are not found."""
factories.SourceDatasetFactory.create(i_dbgap_description='other dataset')
dataset = factories.SourceDatasetFactory.create(i_dbgap_description='lorem')
qs = searches.search_source_datasets(description='orem')
self.assertEqual(qs.count(), 0)
def test_description_one_word_substring_match_short_search(self):
"""Only datasets whose description contains words that begin with a (short) search query are found."""
factories.SourceDatasetFactory.create(i_dbgap_description='other dataset')
dataset = factories.SourceDatasetFactory.create(i_dbgap_description='lorem')
qs = searches.search_source_datasets(description='lo')
self.assertQuerysetEqual(qs, [repr(dataset)])
def test_description_one_word_substring_match_short_word(self):
"""Short word with three letters in the description are found."""
factories.SourceDatasetFactory.create(i_dbgap_description='other dataset')
dataset = factories.SourceDatasetFactory.create(i_dbgap_description='abc')
qs = searches.search_source_datasets(description='abc')
self.assertQuerysetEqual(qs, [repr(dataset)])
def test_description_multiple_words_exact_match(self):
"""Only datasets whose description contains words that exactly match multiple search terms is found."""
factories.SourceDatasetFactory.create(i_dbgap_description='other dataset')
dataset = factories.SourceDatasetFactory.create(i_dbgap_description='lorem ipsum')
qs = searches.search_source_datasets(description='lorem ipsum')
self.assertQuerysetEqual(qs, [repr(dataset)])
def test_description_multiple_words_substring_match(self):
"""Only datasets whose description contains words that begin with multiple search terms is found."""
factories.SourceDatasetFactory.create(i_dbgap_description='other dataset')
dataset = factories.SourceDatasetFactory.create(i_dbgap_description='lorem ipsum')
qs = searches.search_source_datasets(description='lore ipsu')
self.assertQuerysetEqual(qs, [repr(dataset)])
def test_description_match_can_be_anywhere(self):
"""Datasets are found when the search query term is not the first word."""
factories.SourceDatasetFactory.create(i_dbgap_description='other dataset')
dataset = factories.SourceDatasetFactory.create(i_dbgap_description='lorem ipsum')
qs = searches.search_source_datasets(description='ipsu')
self.assertQuerysetEqual(qs, [repr(dataset)])
def test_description_finds_only_descriptions_with_all_search_terms(self):
"""Dataset whose descriptions contain all words in the search query is found."""
factories.SourceDatasetFactory.create(i_dbgap_description='lorem other words')
dataset = factories.SourceDatasetFactory.create(i_dbgap_description='lorem ipsum other words')
qs = searches.search_source_datasets(description='lorem ipsum')
self.assertQuerysetEqual(qs, [repr(dataset)])
def test_description_matches_search_terms_in_any_order(self):
"""Datasets whose descriptions contain all search query words in any order are found."""
factories.SourceDatasetFactory.create(i_dbgap_description='lorem other words')
dataset_1 = factories.SourceDatasetFactory.create(i_dbgap_description='lorem ipsum other words')
dataset_2 = factories.SourceDatasetFactory.create(i_dbgap_description='ipsum lorem other words')
qs = searches.search_source_datasets(description='ipsum lorem')
self.assertIn(dataset_1, qs)
self.assertIn(dataset_2, qs)
def test_description_stop_words(self):
"""Dataset whose description contains common default stop words is found."""
# However is a stopword in MySQL by default.
dataset = factories.SourceDatasetFactory.create(i_dbgap_description='however has stop words')
qs = searches.search_source_datasets(description='however')
self.assertQuerysetEqual(qs, [repr(dataset)])
def test_description_is_case_insensitive(self):
"""Datasets whose descriptions match search term but with different case are found."""
dataset_1 = factories.SourceDatasetFactory.create(i_dbgap_description='lorem ipsum')
dataset_2 = factories.SourceDatasetFactory.create(i_dbgap_description='LOREM other')
qs = searches.search_source_datasets(description='lorem')
self.assertIn(dataset_1, qs)
self.assertIn(dataset_2, qs)
def test_description_does_not_match_dataset_name_field(self):
"""Datasets whose name field matches description query are not found."""
factories.SourceDatasetFactory.create(
dataset_name='lorem',
i_dbgap_description='other description')
qs = searches.search_source_datasets(description='lorem')
self.assertEqual(len(qs), 0)
def test_dataset_name_does_not_match_description_field(self):
"""Datasets whose description field matches name query are not found."""
factories.SourceDatasetFactory.create(
dataset_name='other',
i_dbgap_description='lorem')
qs = searches.search_source_datasets(name='lorem')
self.assertEqual(len(qs), 0)
def test_description_can_include_a_number(self):
"""Can search for "words" that contain both letters and numbers."""
dataset = factories.SourceDatasetFactory.create(i_dbgap_description='abcd123')
qs = searches.search_source_datasets(description='abcd123')
self.assertQuerysetEqual(qs, [repr(dataset)])
def test_description_can_be_only_numbers(self):
"""Can search for "words" that contain only letters."""
dataset = factories.SourceDatasetFactory.create(i_dbgap_description='123456')
qs = searches.search_source_datasets(description='123456')
self.assertQuerysetEqual(qs, [repr(dataset)])
def test_finds_matching_dataset_in_one_specified_study(self):
"""Datasets only in the requested study are found."""
factories.StudyFactory.create()
dataset = factories.SourceDatasetFactory.create()
qs = searches.search_source_datasets(studies=[dataset.source_study_version.study.pk])
self.assertQuerysetEqual(qs, [repr(dataset)])
def test_finds_matching_dataset_in_two_specified_studies(self):
"""Datasets in two requested studies are found."""
dataset_1 = factories.SourceDatasetFactory.create()
dataset_2 = factories.SourceDatasetFactory.create()
studies = [
dataset_1.source_study_version.study.pk,
dataset_2.source_study_version.study.pk,
]
qs = searches.search_source_datasets(studies=studies)
self.assertEqual(qs.count(), 2)
self.assertIn(dataset_1, qs)
self.assertIn(dataset_2, qs)
def test_finds_only_exact_match_name(self):
"""Dataset name must be an exact match."""
dataset = factories.SourceDatasetFactory.create(dataset_name='ipsum')
factories.SourceDatasetFactory.create(dataset_name='other')
qs = searches.search_source_datasets(name='ipsum')
self.assertQuerysetEqual(qs, [repr(dataset)])
def test_name_finds_case_insensitive_match(self):
"""Dataset name can be case insensitive."""
dataset = factories.SourceDatasetFactory.create(dataset_name='IpSuM')
factories.SourceDatasetFactory.create(dataset_name='other')
qs = searches.search_source_datasets(name='ipsum')
self.assertQuerysetEqual(qs, [repr(dataset)])
def test_does_not_find_substring_name_match(self):
"""Substrings of dataset names are not matched by default."""
dataset = factories.SourceDatasetFactory.create(dataset_name='ipsum')
qs = searches.search_source_datasets(name='ipsu')
self.assertEqual(len(qs), 0)
def test_finds_name_beginning_with_requested_string_if_specified(self):
"""Substrings of at the beginning of dataset names are matched if requested."""
dataset = factories.SourceDatasetFactory.create(dataset_name='ipsum')
qs = searches.search_source_datasets(name='ipsu', match_exact_name=False)
self.assertQuerysetEqual(qs, [repr(dataset)])
def test_finds_name_containing_requested_string_if_specified(self):
"""Substrings of dataset names are matched if requested."""
dataset = factories.SourceDatasetFactory.create(dataset_name='ipsum')
qs = searches.search_source_datasets(name='psu', match_exact_name=False)
self.assertQuerysetEqual(qs, [repr(dataset)])
def test_works_with_both_dataset_name_and_description(self):
"""Searching works when dataset name and description are both specified."""
dataset = factories.SourceDatasetFactory.create(dataset_name='ipsum', i_dbgap_description='lorem')
factories.SourceDatasetFactory.create(dataset_name='ipsum', i_dbgap_description='other')
factories.SourceDatasetFactory.create(dataset_name='other', i_dbgap_description='lorem')
qs = searches.search_source_datasets(name='ipsum', description='lorem')
self.assertQuerysetEqual(qs, [repr(dataset)])
def test_works_with_dataset_name_description_and_study(self):
"""Searching works when dataset name, description, and study are all specified."""
dataset = factories.SourceDatasetFactory.create(dataset_name='ipsum', i_dbgap_description='lorem')
factories.SourceDatasetFactory.create(dataset_name='ipsum', i_dbgap_description='lorem')
study = dataset.source_study_version.study
qs = searches.search_source_datasets(name='ipsum', description='lorem', studies=[study.pk])
self.assertQuerysetEqual(qs, [repr(dataset)])
def test_default_ordering_by_dataset_accession(self):
"""Datasets are ordered by dataset accession."""
study = factories.StudyFactory.create()
dataset_1 = factories.SourceDatasetFactory.create(i_accession=2, source_study_version__study=study)
dataset_2 = factories.SourceDatasetFactory.create(i_accession=1, source_study_version__study=study)
qs = searches.search_source_datasets()
self.assertEqual(list(qs), [dataset_2, dataset_1])
def test_default_ordering_by_study_and_dataset_accession(self):
"""Datasets are ordered by dataset accession."""
study_1 = factories.StudyFactory.create(i_accession=2)
study_2 = factories.StudyFactory.create(i_accession=1)
dataset_1 = factories.SourceDatasetFactory.create(i_accession=1, source_study_version__study=study_1)
dataset_2 = factories.SourceDatasetFactory.create(i_accession=2, source_study_version__study=study_2)
qs = searches.search_source_datasets()
self.assertEqual(list(qs), [dataset_2, dataset_1])
class SearchSourceTraitsTest(ClearSearchIndexMixin, TestCase):
def test_returns_all_traits_with_no_input(self):
"""All traits are returned if nothing is passed to search."""
traits = factories.SourceTraitFactory.create_batch(10)
qs = searches.search_source_traits()
self.assertEqual(qs.count(), models.SourceTrait.objects.current().count())
def test_does_not_find_deprecated_traits(self):
"""No deprecated traits are returned if nothing is passed to search."""
trait = factories.SourceTraitFactory.create()
trait.source_dataset.source_study_version.i_is_deprecated = True
trait.source_dataset.source_study_version.save()
qs = searches.search_source_traits()
self.assertEqual(qs.count(), 0)
def test_description_no_matches(self):
"""No results are found if the search query doesn't match the trait description."""
trait = factories.SourceTraitFactory.create(i_description='lorem')
qs = searches.search_source_traits(description='foobar')
self.assertQuerysetEqual(qs, [])
def test_description_one_word_exact_match(self):
"""Only the trait whose description that matches the search query is found."""
factories.SourceTraitFactory.create(i_description='other trait')
trait = factories.SourceTraitFactory.create(i_description='lorem')
qs = searches.search_source_traits(description='lorem')
self.assertQuerysetEqual(qs, [repr(trait)])
def test_description_one_word_substring_match(self):
"""Trait whose description contains words that begin with the search query is found."""
factories.SourceTraitFactory.create(i_description='other trait')
trait = factories.SourceTraitFactory.create(i_description='lorem')
qs = searches.search_source_traits(description='lore')
self.assertQuerysetEqual(qs, [repr(trait)])
def test_description_one_word_substring_matches_beginning_of_word_only(self):
"""Traits whose descriptions contains words that end with the search query are not found."""
factories.SourceTraitFactory.create(i_description='other trait')
trait = factories.SourceTraitFactory.create(i_description='lorem')
qs = searches.search_source_traits(description='orem')
self.assertEqual(qs.count(), 0)
def test_description_one_word_substring_match_short_search(self):
"""Traits whose description contains words that begin with a (short) search query are found."""
factories.SourceTraitFactory.create(i_description='other trait')
trait = factories.SourceTraitFactory.create(i_description='lorem')
qs = searches.search_source_traits(description='lo')
self.assertQuerysetEqual(qs, [repr(trait)])
def test_description_one_word_substring_match_short_word(self):
"""Short word with three letters in the description are found."""
factories.SourceTraitFactory.create(i_description='other trait')
trait = factories.SourceTraitFactory.create(i_description='abc')
qs = searches.search_source_traits(description='abc')
self.assertQuerysetEqual(qs, [repr(trait)])
def test_description_multiple_words_exact_match(self):
"""Trait whose description contains words that exactly match multiple search terms is found."""
factories.SourceTraitFactory.create(i_description='other trait')
trait = factories.SourceTraitFactory.create(i_description='lorem ipsum')
qs = searches.search_source_traits(description='lorem ipsum')
self.assertQuerysetEqual(qs, [repr(trait)])
def test_description_multiple_words_substring_match(self):
"""Trait whose description contains words that begin with multiple search terms is found."""
factories.SourceTraitFactory.create(i_description='other trait')
trait = factories.SourceTraitFactory.create(i_description='lorem ipsum')
qs = searches.search_source_traits(description='lore ipsu')
self.assertQuerysetEqual(qs, [repr(trait)])
def test_description_match_can_be_anywhere(self):
"""Trait when the search query term is not the first word is found."""
factories.SourceTraitFactory.create(i_description='other trait')
trait = factories.SourceTraitFactory.create(i_description='lorem ipsum')
qs = searches.search_source_traits(description='ipsu')
self.assertQuerysetEqual(qs, [repr(trait)])
def test_description_finds_only_descriptions_with_all_search_terms(self):
"""Trait whose descriptions contain all words in the search query is found."""
factories.SourceTraitFactory.create(i_description='lorem other words')
trait = factories.SourceTraitFactory.create(i_description='lorem ipsum other words')
qs = searches.search_source_traits(description='lorem ipsum')
self.assertQuerysetEqual(qs, [repr(trait)])
def test_description_matches_search_terms_in_any_order(self):
"""Traits whose descriptions contain all search query words in any order are found."""
factories.SourceTraitFactory.create(i_description='lorem other words')
trait_1 = factories.SourceTraitFactory.create(i_description='lorem ipsum other words')
trait_2 = factories.SourceTraitFactory.create(i_description='ipsum lorem other words')
qs = searches.search_source_traits(description='ipsum lorem')
self.assertIn(trait_1, qs)
self.assertIn(trait_2, qs)
def test_description_stop_words(self):
"""Trait whose description contains common default stop words is found."""
# However is a stopword in MySQL by default.
trait = factories.SourceTraitFactory.create(i_description='however has stop words')
qs = searches.search_source_traits(description='however')
self.assertQuerysetEqual(qs, [repr(trait)])
def test_description_is_case_insensitive(self):
"""Traits whose descriptions match search term but with different case are found."""
trait_1 = factories.SourceTraitFactory.create(i_description='lorem ipsum')
trait_2 = factories.SourceTraitFactory.create(i_description='LOREM other')
qs = searches.search_source_traits(description='lorem')
self.assertIn(trait_1, qs)
self.assertIn(trait_2, qs)
def test_description_does_not_match_trait_name_field(self):
"""Traits whose name field matches description query are not found."""
factories.SourceTraitFactory.create(
i_trait_name='lorem',
i_description='other description')
qs = searches.search_source_traits(description='lorem')
self.assertEqual(len(qs), 0)
def test_trait_name_does_not_match_description_field(self):
"""Traits whose description field matches name query are not found."""
factories.SourceTraitFactory.create(
i_trait_name='other',
i_description='lorem')
qs = searches.search_source_traits(name='lorem')
self.assertEqual(len(qs), 0)
def test_description_can_include_a_number(self):
"""Can search for "words" that contain both letters and numbers."""
trait = factories.SourceTraitFactory.create(i_description='abcd123')
qs = searches.search_source_traits(description='abcd123')
self.assertQuerysetEqual(qs, [repr(trait)])
def test_description_can_be_only_numbers(self):
"""Can search for "words" that contain only letters."""
trait = factories.SourceTraitFactory.create(i_description='123456')
qs = searches.search_source_traits(description='123456')
self.assertQuerysetEqual(qs, [repr(trait)])
def test_finds_matching_trait_in_one_specified_dataset(self):
"""Traits only in the requested dataset are found."""
factories.SourceDatasetFactory.create()
trait = factories.SourceTraitFactory.create()
qs = searches.search_source_traits(datasets=[trait.source_dataset])
self.assertQuerysetEqual(qs, [repr(trait)])
def test_finds_matching_trait_in_two_specified_datasets(self):
"""Traits in two requested studies are found."""
trait_1 = factories.SourceTraitFactory.create()
trait_2 = factories.SourceTraitFactory.create()
datasets = [
trait_1.source_dataset,
trait_2.source_dataset,
]
qs = searches.search_source_traits(datasets=datasets)
self.assertEqual(qs.count(), 2)
self.assertIn(trait_1, qs)
self.assertIn(trait_2, qs)
def test_finds_only_exact_match_name(self):
"""Trait name must be an exact match."""
trait = factories.SourceTraitFactory.create(i_trait_name='ipsum')
factories.SourceTraitFactory.create(i_trait_name='other')
qs = searches.search_source_traits(name='ipsum')
self.assertQuerysetEqual(qs, [repr(trait)])
def test_name_finds_case_insensitive_match(self):
"""Trait name can be case insensitive."""
trait = factories.SourceTraitFactory.create(i_trait_name='IpSuM')
factories.SourceTraitFactory.create(i_trait_name='other')
qs = searches.search_source_traits(name='ipsum')
self.assertQuerysetEqual(qs, [repr(trait)])
def test_does_not_find_substring_name_match(self):
"""Substrings of trait names are not matched by default."""
trait = factories.SourceTraitFactory.create(i_trait_name='ipsum')
qs = searches.search_source_traits(name='ipsu')
self.assertEqual(len(qs), 0)
def test_finds_name_beginning_with_requested_string_if_specified(self):
"""Substrings of at the beginning of trait names are matched if requested."""
trait = factories.SourceTraitFactory.create(i_trait_name='ipsum')
qs = searches.search_source_traits(name='ipsu', match_exact_name=False)
self.assertQuerysetEqual(qs, [repr(trait)])
def test_finds_name_containing_requested_string_if_specified(self):
"""Substrings of trait names are matched if requested."""
trait = factories.SourceTraitFactory.create(i_trait_name='ipsum')
qs = searches.search_source_traits(name='psu', match_exact_name=False)
self.assertQuerysetEqual(qs, [repr(trait)])
def test_works_with_both_trait_name_and_description(self):
"""Searching works when trait name and description are both specified."""
trait = factories.SourceTraitFactory.create(i_trait_name='ipsum', i_description='lorem')
factories.SourceTraitFactory.create(i_trait_name='ipsum', i_description='other')
factories.SourceTraitFactory.create(i_trait_name='other', i_description='lorem')
qs = searches.search_source_traits(name='ipsum', description='lorem')
self.assertQuerysetEqual(qs, [repr(trait)])
def test_works_with_trait_name_description_and_dataset(self):
"""Searching works when trait name, description, and study are all specified."""
trait = factories.SourceTraitFactory.create(i_trait_name='ipsum', i_description='lorem')
factories.SourceTraitFactory.create(i_trait_name='ipsum', i_description='lorem')
dataset = trait.source_dataset
qs = searches.search_source_traits(name='ipsum', description='lorem', datasets=[dataset])
self.assertQuerysetEqual(qs, [repr(trait)])
def test_default_ordering_by_trait(self):
"""Traits are ordered by dataset accession."""
dataset = factories.SourceDatasetFactory.create()
trait_1 = factories.SourceTraitFactory.create(
i_dbgap_variable_accession=2,
source_dataset=dataset)
trait_2 = factories.SourceTraitFactory.create(
i_dbgap_variable_accession=1,
source_dataset=dataset)
qs = searches.search_source_traits()
self.assertEqual(list(qs), [trait_2, trait_1])
def test_default_ordering_by_dataset_and_trait(self):
"""Traits are ordered by dataset accession and then variable accession."""
study = factories.StudyFactory.create()
dataset_1 = factories.SourceDatasetFactory.create(i_accession=2, source_study_version__study=study)
dataset_2 = factories.SourceDatasetFactory.create(i_accession=1, source_study_version__study=study)
trait_1 = factories.SourceTraitFactory.create(
i_dbgap_variable_accession=1,
source_dataset=dataset_1)
trait_2 = factories.SourceTraitFactory.create(
i_dbgap_variable_accession=2,
source_dataset=dataset_2)
qs = searches.search_source_traits()
self.assertEqual(list(qs), [trait_2, trait_1])
def test_default_ordering_by_study_dataset_and_trait(self):
"""Traits are ordered by study accession, dataset accession, and then variable accession."""
study_1 = factories.StudyFactory.create(i_accession=2)
study_2 = factories.StudyFactory.create(i_accession=1)
dataset_1 = factories.SourceDatasetFactory.create(i_accession=1, source_study_version__study=study_1)
dataset_2 = factories.SourceDatasetFactory.create(i_accession=2, source_study_version__study=study_2)
trait_1 = factories.SourceTraitFactory.create(
i_dbgap_variable_accession=1,
source_dataset=dataset_1)
trait_2 = factories.SourceTraitFactory.create(
i_dbgap_variable_accession=2,
source_dataset=dataset_2)
qs = searches.search_source_traits()
self.assertEqual(list(qs), [trait_2, trait_1])
def test_does_not_find_harmonized_traits(self):
"""Source trait search function does not find matching harmonized traits."""
trait = factories.HarmonizedTraitFactory.create(i_trait_name='lorem')
self.assertEqual(searches.search_source_traits(name='lorem').count(), 0)
def test_filters_to_selected_datasets_only(self):
dataset = factories.SourceDatasetFactory.create()
traits = factories.SourceTraitFactory.create_batch(5, source_dataset=dataset)
other_dataset = factories.SourceDatasetFactory.create()
other_traits = factories.SourceTraitFactory.create_batch(5, source_dataset=other_dataset)
qs = searches.search_source_traits(datasets=[dataset])
self.assertEqual(len(qs), len(traits))
for trait in traits:
self.assertIn(trait, qs)
for trait in other_traits:
self.assertNotIn(trait, qs)
def test_works_with_dataset_querysets(self):
"""Finds expected traits when a dataset queryset is passed."""
dataset = factories.SourceDatasetFactory.create()
traits = factories.SourceTraitFactory.create_batch(5, source_dataset=dataset)
other_dataset = factories.SourceDatasetFactory.create()
other_traits = factories.SourceTraitFactory.create_batch(5, source_dataset=other_dataset)
dataset_qs = models.SourceDataset.objects.filter(pk=dataset.pk)
qs = searches.search_source_traits(datasets=dataset_qs)
self.assertEqual(len(qs), len(traits))
for trait in traits:
self.assertIn(trait, qs)
for trait in other_traits:
self.assertNotIn(trait, qs)
def test_finds_no_matching_traits_with_empty_dataset_array(self):
trait = factories.SourceTraitFactory.create(i_trait_name='lorem')
qs = searches.search_source_traits(name='lorem', datasets=[])
self.assertEqual(len(qs), 0)
class SearchHarmonizedTraitsTest(ClearSearchIndexMixin, TestCase):
def test_returns_all_traits_with_no_input(self):
"""All traits are returned if nothing is passed to search."""
traits = factories.HarmonizedTraitFactory.create_batch(10)
qs = searches.search_harmonized_traits()
self.assertEqual(qs.count(), models.HarmonizedTrait.objects.current().count())
def test_does_not_find_deprecated_traits(self):
"""No deprecated traits are returned if nothing is passed to search."""
trait = factories.HarmonizedTraitFactory.create()
trait.harmonized_trait_set_version.i_is_deprecated = True
trait.harmonized_trait_set_version.save()
qs = searches.search_harmonized_traits()
self.assertEqual(qs.count(), 0)
def test_description_no_matches(self):
"""No results are found if the search query doesn't match the trait description."""
trait = factories.HarmonizedTraitFactory.create(i_description='lorem')
qs = searches.search_harmonized_traits(description='foobar')
self.assertQuerysetEqual(qs, [])
def test_description_one_word_exact_match(self):
"""Only the trait whose description that matches the search query is found."""
factories.HarmonizedTraitFactory.create(i_description='other trait')
trait = factories.HarmonizedTraitFactory.create(i_description='lorem')
qs = searches.search_harmonized_traits(description='lorem')
self.assertQuerysetEqual(qs, [repr(trait)])
def test_description_one_word_substring_match(self):
"""Trait whose description contains words that begin with the search query is found."""
factories.HarmonizedTraitFactory.create(i_description='other trait')
trait = factories.HarmonizedTraitFactory.create(i_description='lorem')
qs = searches.search_harmonized_traits(description='lore')
self.assertQuerysetEqual(qs, [repr(trait)])
def test_description_one_word_substring_matches_beginning_of_word_only(self):
"""Traits whose descriptions contains words that end with the search query are not found."""
factories.HarmonizedTraitFactory.create(i_description='other trait')
trait = factories.HarmonizedTraitFactory.create(i_description='lorem')
qs = searches.search_harmonized_traits(description='orem')
self.assertEqual(qs.count(), 0)
def test_description_one_word_substring_match_short_search(self):
"""Traits whose description contains words that begin with a (short) search query are found."""
factories.HarmonizedTraitFactory.create(i_description='other trait')
trait = factories.HarmonizedTraitFactory.create(i_description='lorem')
qs = searches.search_harmonized_traits(description='lo')
self.assertQuerysetEqual(qs, [repr(trait)])
def test_description_one_word_substring_match_short_word(self):
"""Short word with three letters in the description are found."""
factories.HarmonizedTraitFactory.create(i_description='other trait')
trait = factories.HarmonizedTraitFactory.create(i_description='abc')
qs = searches.search_harmonized_traits(description='abc')
self.assertQuerysetEqual(qs, [repr(trait)])
def test_description_multiple_words_exact_match(self):
"""Trait whose description contains words that exactly match multiple search terms is found."""
factories.HarmonizedTraitFactory.create(i_description='other trait')
trait = factories.HarmonizedTraitFactory.create(i_description='lorem ipsum')
qs = searches.search_harmonized_traits(description='lorem ipsum')
self.assertQuerysetEqual(qs, [repr(trait)])
def test_description_multiple_words_substring_match(self):
"""Trait whose description contains words that begin with multiple search terms is found."""
factories.HarmonizedTraitFactory.create(i_description='other trait')
trait = factories.HarmonizedTraitFactory.create(i_description='lorem ipsum')
qs = searches.search_harmonized_traits(description='lore ipsu')
self.assertQuerysetEqual(qs, [repr(trait)])
def test_description_match_can_be_anywhere(self):
"""Trait when the search query term is not the first word is found."""
factories.HarmonizedTraitFactory.create(i_description='other trait')
trait = factories.HarmonizedTraitFactory.create(i_description='lorem ipsum')
qs = searches.search_harmonized_traits(description='ipsu')
self.assertQuerysetEqual(qs, [repr(trait)])
def test_description_finds_only_descriptions_with_all_search_terms(self):
"""Trait whose descriptions contain all words in the search query is found."""
factories.HarmonizedTraitFactory.create(i_description='lorem other words')
trait = factories.HarmonizedTraitFactory.create(i_description='lorem ipsum other words')
qs = searches.search_harmonized_traits(description='lorem ipsum')
self.assertQuerysetEqual(qs, [repr(trait)])
def test_description_matches_search_terms_in_any_order(self):
"""Traits whose descriptions contain all search query words in any order are found."""
factories.HarmonizedTraitFactory.create(i_description='lorem other words')
trait_1 = factories.HarmonizedTraitFactory.create(i_description='lorem ipsum other words')
trait_2 = factories.HarmonizedTraitFactory.create(i_description='ipsum lorem other words')
qs = searches.search_harmonized_traits(description='ipsum lorem')
self.assertIn(trait_1, qs)
self.assertIn(trait_2, qs)
def test_description_stop_words(self):
"""Trait whose description contains common default stop words is found."""
# However is a stopword in MySQL by default.
trait = factories.HarmonizedTraitFactory.create(i_description='however has stop words')
qs = searches.search_harmonized_traits(description='however')
self.assertQuerysetEqual(qs, [repr(trait)])
def test_description_is_case_insensitive(self):
"""Traits whose descriptions match search term but with different case are found."""
trait_1 = factories.HarmonizedTraitFactory.create(i_description='lorem ipsum')
trait_2 = factories.HarmonizedTraitFactory.create(i_description='LOREM other')
qs = searches.search_harmonized_traits(description='lorem')
self.assertIn(trait_1, qs)
self.assertIn(trait_2, qs)
def test_description_does_not_match_trait_name_field(self):
"""Traits whose name field matches description query are not found."""
factories.HarmonizedTraitFactory.create(
i_trait_name='lorem',
i_description='other description')
qs = searches.search_harmonized_traits(description='lorem')
self.assertEqual(len(qs), 0)
def test_trait_name_does_not_match_description_field(self):
"""Traits whose description field matches name query are not found."""
factories.HarmonizedTraitFactory.create(
i_trait_name='other',
i_description='lorem')
qs = searches.search_harmonized_traits(name='lorem')
self.assertEqual(len(qs), 0)
def test_description_can_include_a_number(self):
"""Can search for "words" that contain both letters and numbers."""
trait = factories.HarmonizedTraitFactory.create(i_description='abcd123')
qs = searches.search_harmonized_traits(description='abcd123')
self.assertQuerysetEqual(qs, [repr(trait)])
def test_description_can_be_only_numbers(self):
"""Can search for "words" that contain only letters."""
trait = factories.HarmonizedTraitFactory.create(i_description='123456')
qs = searches.search_harmonized_traits(description='123456')
self.assertQuerysetEqual(qs, [repr(trait)])
def test_finds_only_exact_match_name(self):
"""Trait name must be an exact match."""
trait = factories.HarmonizedTraitFactory.create(i_trait_name='ipsum')
factories.HarmonizedTraitFactory.create(i_trait_name='other')
qs = searches.search_harmonized_traits(name='ipsum')
self.assertQuerysetEqual(qs, [repr(trait)])
def test_name_finds_case_insensitive_match(self):
"""Trait name can be case insensitive."""
trait = factories.HarmonizedTraitFactory.create(i_trait_name='IpSuM')
factories.HarmonizedTraitFactory.create(i_trait_name='other')
qs = searches.search_harmonized_traits(name='ipsum')
self.assertQuerysetEqual(qs, [repr(trait)])
def test_does_not_find_substring_name_match(self):
"""Substrings of trait names are not matched by default."""
trait = factories.HarmonizedTraitFactory.create(i_trait_name='ipsum')
qs = searches.search_harmonized_traits(name='ipsu')
self.assertEqual(len(qs), 0)
def test_finds_name_beginning_with_requested_string_if_specified(self):
"""Substrings of at the beginning of trait names are matched if requested."""
trait = factories.HarmonizedTraitFactory.create(i_trait_name='ipsum')
qs = searches.search_harmonized_traits(name='ipsu', match_exact_name=False)
self.assertQuerysetEqual(qs, [repr(trait)])
def test_finds_name_containing_requested_string_if_specified(self):
"""Substrings of trait names are matched if requested."""
trait = factories.HarmonizedTraitFactory.create(i_trait_name='ipsum')
qs = searches.search_harmonized_traits(name='psu', match_exact_name=False)
self.assertQuerysetEqual(qs, [repr(trait)])
def test_works_with_both_trait_name_and_description(self):
"""Searching works when trait name and description are both specified."""
trait = factories.HarmonizedTraitFactory.create(i_trait_name='ipsum', i_description='lorem')
factories.HarmonizedTraitFactory.create(i_trait_name='ipsum', i_description='other')
factories.HarmonizedTraitFactory.create(i_trait_name='other', i_description='lorem')
qs = searches.search_harmonized_traits(name='ipsum', description='lorem')
self.assertQuerysetEqual(qs, [repr(trait)])
def test_default_ordering(self):
"""Traits are ordered by dataset accession."""
trait_set_1 = factories.HarmonizedTraitSetFactory.create(i_id=2)
trait_set_2 = factories.HarmonizedTraitSetFactory.create(i_id=1)
trait_set_version_1 = factories.HarmonizedTraitSetVersionFactory.create(
harmonized_trait_set=trait_set_1
)
trait_set_version_2 = factories.HarmonizedTraitSetVersionFactory.create(
harmonized_trait_set=trait_set_2
)
trait_1 = factories.HarmonizedTraitFactory.create(
i_trait_id=1,
harmonized_trait_set_version=trait_set_version_1
)
trait_2 = factories.HarmonizedTraitFactory.create(
i_trait_id=2,
harmonized_trait_set_version=trait_set_version_2
)
qs = searches.search_harmonized_traits()
self.assertEqual(list(qs), [trait_2, trait_1])
def test_does_not_find_source_traits(self):
"""Harmonized trait search function does not find matching source traits."""
trait = factories.SourceTraitFactory.create(i_trait_name='lorem')
self.assertEqual(searches.search_harmonized_traits(name='lorem').count(), 0)
| 55.380628 | 111 | 0.72964 | 4,705 | 40,594 | 6.045484 | 0.043996 | 0.032485 | 0.050626 | 0.049501 | 0.939003 | 0.918436 | 0.887498 | 0.838982 | 0.801856 | 0.74768 | 0 | 0.005687 | 0.177021 | 40,594 | 732 | 112 | 55.456284 | 0.845726 | 0.159974 | 0 | 0.653409 | 0 | 0 | 0.051523 | 0 | 0 | 0 | 0 | 0 | 0.200758 | 1 | 0.176136 | false | 0 | 0.00947 | 0 | 0.193182 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
d3e4f7c92fc9e7dd6d6fdcadea1cb145c15a815c | 90 | py | Python | brawlbracket/routes/test.py | TheLastBanana/BrawlBracket | 1cad26b6499352b1b282388f4f76bfb4b2b6b4fe | [
"BSD-3-Clause"
] | null | null | null | brawlbracket/routes/test.py | TheLastBanana/BrawlBracket | 1cad26b6499352b1b282388f4f76bfb4b2b6b4fe | [
"BSD-3-Clause"
] | null | null | null | brawlbracket/routes/test.py | TheLastBanana/BrawlBracket | 1cad26b6499352b1b282388f4f76bfb4b2b6b4fe | [
"BSD-3-Clause"
] | null | null | null | from brawlbracket.app import app
print('\n\n\n----------------test----------------\n\n\n') | 45 | 57 | 0.477778 | 13 | 90 | 3.307692 | 0.538462 | 0.186047 | 0.139535 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.044444 | 90 | 2 | 57 | 45 | 0.5 | 0 | 0 | 0 | 0 | 0 | 0.527473 | 0.527473 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.5 | 0 | 0.5 | 0.5 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 1 | 0 | 6 |
d3eb1d1666bc60916979183c8fc2be7611060750 | 354 | py | Python | renormalizer/mps/__init__.py | liwt31/Renormalizer | 123a9d53f4f5f32c0088c255475f0ee60d02c745 | [
"Apache-2.0"
] | null | null | null | renormalizer/mps/__init__.py | liwt31/Renormalizer | 123a9d53f4f5f32c0088c255475f0ee60d02c745 | [
"Apache-2.0"
] | null | null | null | renormalizer/mps/__init__.py | liwt31/Renormalizer | 123a9d53f4f5f32c0088c255475f0ee60d02c745 | [
"Apache-2.0"
] | null | null | null | from renormalizer.mps.backend import backend
from renormalizer.mps.mpo import Mpo
from renormalizer.mps.mps import Mps, BraKetPair
from renormalizer.mps.mpdm import MpDm, MpDmFull
from renormalizer.mps.thermalprop import ThermalProp, load_thermal_state
from renormalizer.mps.supermpo import SuperLiouville
from renormalizer.mps.solver import optimize_mps | 50.571429 | 72 | 0.867232 | 48 | 354 | 6.333333 | 0.354167 | 0.368421 | 0.4375 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.084746 | 354 | 7 | 73 | 50.571429 | 0.938272 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
31057aaf5c592240d3a230082e9f3cdab5add908 | 151 | py | Python | easygraph/functions/structural_holes/__init__.py | coreturn/Easy-Graph | ee46d84250c4d4cf22271ca13449b15fad88ad7b | [
"BSD-3-Clause"
] | null | null | null | easygraph/functions/structural_holes/__init__.py | coreturn/Easy-Graph | ee46d84250c4d4cf22271ca13449b15fad88ad7b | [
"BSD-3-Clause"
] | null | null | null | easygraph/functions/structural_holes/__init__.py | coreturn/Easy-Graph | ee46d84250c4d4cf22271ca13449b15fad88ad7b | [
"BSD-3-Clause"
] | null | null | null | from .HIS import *
from .MaxD import *
from .AP_Greedy import *
from .HAM import *
from .evaluation import *
from .metrics import *
from .ICC import * | 18.875 | 25 | 0.721854 | 22 | 151 | 4.909091 | 0.454545 | 0.555556 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.18543 | 151 | 8 | 26 | 18.875 | 0.878049 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
31138988fc12976cd03c967511413c1f58791dbc | 46 | py | Python | SubShell.py | Anirban83314/OutLook-Automation-All-scopes-with-APP-utility. | 91b5f8edbc6136be82b1f78fa8f5ce40cb1158ec | [
"MIT"
] | 1 | 2019-04-16T10:31:41.000Z | 2019-04-16T10:31:41.000Z | SubShell.py | Anirban83314/OutLook-Automation-All-scopes-with-APP-utility. | 91b5f8edbc6136be82b1f78fa8f5ce40cb1158ec | [
"MIT"
] | 1 | 2017-12-28T13:25:50.000Z | 2017-12-28T13:25:50.000Z | SubShell.py | Anirban83314/OutLook-Automation-All-scopes-with-APP-utility. | 91b5f8edbc6136be82b1f78fa8f5ce40cb1158ec | [
"MIT"
] | null | null | null | #!/usr/bin/env python
import os
import sys
| 11.5 | 22 | 0.695652 | 8 | 46 | 4 | 0.875 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.195652 | 46 | 3 | 23 | 15.333333 | 0.864865 | 0.434783 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
312e51705255540888acb615c2ac6aa14ccf7602 | 104 | py | Python | bitmovin_api_sdk/encoding/encodings/streams/sprites/customdata/__init__.py | jaythecaesarean/bitmovin-api-sdk-python | 48166511fcb9082041c552ace55a9b66cc59b794 | [
"MIT"
] | 11 | 2019-07-03T10:41:16.000Z | 2022-02-25T21:48:06.000Z | bitmovin_api_sdk/encoding/encodings/streams/sprites/customdata/__init__.py | jaythecaesarean/bitmovin-api-sdk-python | 48166511fcb9082041c552ace55a9b66cc59b794 | [
"MIT"
] | 8 | 2019-11-23T00:01:25.000Z | 2021-04-29T12:30:31.000Z | bitmovin_api_sdk/encoding/encodings/streams/sprites/customdata/__init__.py | jaythecaesarean/bitmovin-api-sdk-python | 48166511fcb9082041c552ace55a9b66cc59b794 | [
"MIT"
] | 13 | 2020-01-02T14:58:18.000Z | 2022-03-26T12:10:30.000Z | from bitmovin_api_sdk.encoding.encodings.streams.sprites.customdata.customdata_api import CustomdataApi
| 52 | 103 | 0.903846 | 13 | 104 | 7 | 0.846154 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.038462 | 104 | 1 | 104 | 104 | 0.91 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
313b13b8a6b1c9a192dca6413b60af94714f3b5c | 94 | py | Python | data/demos/loading_models/scripts/python/constantly_rotate.py | Jean-LouisH/Omnia | e637746839801eb73707d10e3243d4a430dfea78 | [
"MIT"
] | null | null | null | data/demos/loading_models/scripts/python/constantly_rotate.py | Jean-LouisH/Omnia | e637746839801eb73707d10e3243d4a430dfea78 | [
"MIT"
] | null | null | null | data/demos/loading_models/scripts/python/constantly_rotate.py | Jean-LouisH/Omnia | e637746839801eb73707d10e3243d4a430dfea78 | [
"MIT"
] | null | null | null | import omnia
def on_logic_frame():
omnia.get_component("Transform").rotate_y(0.325)
| 15.666667 | 52 | 0.712766 | 14 | 94 | 4.5 | 0.928571 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.050633 | 0.159574 | 94 | 5 | 53 | 18.8 | 0.746835 | 0 | 0 | 0 | 0 | 0 | 0.095745 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | true | 0 | 0.333333 | 0 | 0.666667 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
317c0b10c3c9b6a80c20d82b58dff7dc4b3bf23a | 32 | py | Python | python/testData/refactoring/introduceParameter/simple.after.py | jnthn/intellij-community | 8fa7c8a3ace62400c838e0d5926a7be106aa8557 | [
"Apache-2.0"
] | 2 | 2019-04-28T07:48:50.000Z | 2020-12-11T14:18:08.000Z | python/testData/refactoring/introduceParameter/simple.after.py | Cyril-lamirand/intellij-community | 60ab6c61b82fc761dd68363eca7d9d69663cfa39 | [
"Apache-2.0"
] | 173 | 2018-07-05T13:59:39.000Z | 2018-08-09T01:12:03.000Z | python/testData/refactoring/introduceParameter/simple.after.py | Cyril-lamirand/intellij-community | 60ab6c61b82fc761dd68363eca7d9d69663cfa39 | [
"Apache-2.0"
] | 2 | 2020-03-15T08:57:37.000Z | 2020-04-07T04:48:14.000Z | def f(x, a="test"):
return a | 16 | 19 | 0.53125 | 7 | 32 | 2.428571 | 0.857143 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.25 | 32 | 2 | 20 | 16 | 0.708333 | 0 | 0 | 0 | 0 | 0 | 0.121212 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.5 | false | 0 | 0 | 0.5 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 6 |
7dff284ba8c3b8def671f13b064630d09c7a40c6 | 11,892 | py | Python | esp_sdk/apis/stat_signatures_api.py | zimmermanc/esp-sdk-python | cdef13c0dc6c3996b6c444160c71b2f1e3910c97 | [
"MIT"
] | 6 | 2017-06-05T20:37:19.000Z | 2019-04-10T08:43:59.000Z | esp_sdk/apis/stat_signatures_api.py | zimmermanc/esp-sdk-python | cdef13c0dc6c3996b6c444160c71b2f1e3910c97 | [
"MIT"
] | 18 | 2016-06-22T16:14:33.000Z | 2018-10-29T21:53:15.000Z | esp_sdk/apis/stat_signatures_api.py | zimmermanc/esp-sdk-python | cdef13c0dc6c3996b6c444160c71b2f1e3910c97 | [
"MIT"
] | 18 | 2016-07-27T19:20:01.000Z | 2020-11-17T02:09:58.000Z | # coding: utf-8
"""
ESP Documentation
The Evident Security Platform API (version 2.0) is designed to allow users granular control over their Amazon Web Service security experience by allowing them to review alerts, monitor signatures, and create custom signatures.
OpenAPI spec version: v2_sdk
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class StatSignaturesApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def list_for_stat(self, stat_id, **kwargs):
"""
Get a list of statistics for signatures
A successful call to this API returns all the statistics of all the signatures for a report identified by the stat_id parameter. Said report contains all statistics for this alert triggered from signatures contained in all signatures for the selected hour.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_for_stat(stat_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int stat_id: The ID of the stat to retrieve signature statistics for (required)
:param str include: Related objects that can be included in the response: signature, stat See Including Objects for more information.
:param dict(str, str) filter: Filter Params for Searching. Equality Searchable Attributes: [stat_id, type_id]
:param str page: Page Number and Page Size. Number is the page number of the collection to return, size is the number of items to return per page.
:return: PaginatedCollection
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.list_for_stat_with_http_info(stat_id, **kwargs)
else:
(data) = self.list_for_stat_with_http_info(stat_id, **kwargs)
return data
def list_for_stat_with_http_info(self, stat_id, **kwargs):
"""
Get a list of statistics for signatures
A successful call to this API returns all the statistics of all the signatures for a report identified by the stat_id parameter. Said report contains all statistics for this alert triggered from signatures contained in all signatures for the selected hour.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_for_stat_with_http_info(stat_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int stat_id: The ID of the stat to retrieve signature statistics for (required)
:param str include: Related objects that can be included in the response: signature, stat See Including Objects for more information.
:param dict(str, str) filter: Filter Params for Searching. Equality Searchable Attributes: [stat_id, type_id]
:param str page: Page Number and Page Size. Number is the page number of the collection to return, size is the number of items to return per page.
:return: PaginatedCollection
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['stat_id', 'include', 'filter', 'page']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_for_stat" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'stat_id' is set
if ('stat_id' not in params) or (params['stat_id'] is None):
raise ValueError("Missing the required parameter `stat_id` when calling `list_for_stat`")
collection_formats = {}
resource_path = '/api/v2/stats/{stat_id}/signatures.json_api'.replace('{format}', 'json_api')
path_params = {}
if 'stat_id' in params:
path_params['stat_id'] = params['stat_id']
query_params = {}
if 'include' in params:
query_params['include'] = params['include']
header_params = {}
form_params = []
local_var_files = {}
if 'filter' in params:
form_params.append(('filter', params['filter']))
if 'page' in params:
form_params.append(('page', params['page']))
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/vnd.api+json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/vnd.api+json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PaginatedCollection',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def show(self, id, **kwargs):
"""
Show a single Stat Signature
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.show(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int id: Stat Signature ID (required)
:param str include: Related objects that can be included in the response: signature, stat See Including Objects for more information.
:return: StatSignature
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.show_with_http_info(id, **kwargs)
else:
(data) = self.show_with_http_info(id, **kwargs)
return data
def show_with_http_info(self, id, **kwargs):
"""
Show a single Stat Signature
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.show_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int id: Stat Signature ID (required)
:param str include: Related objects that can be included in the response: signature, stat See Including Objects for more information.
:return: StatSignature
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'include']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method show" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `show`")
collection_formats = {}
resource_path = '/api/v2/stats/signatures/{id}.json_api'.replace('{format}', 'json_api')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'include' in params:
query_params['include'] = params['include']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/vnd.api+json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/vnd.api+json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='StatSignature',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 43.720588 | 264 | 0.596283 | 1,327 | 11,892 | 5.16428 | 0.16202 | 0.018386 | 0.017073 | 0.021013 | 0.864293 | 0.834817 | 0.825332 | 0.795272 | 0.793521 | 0.767547 | 0 | 0.000999 | 0.326606 | 11,892 | 271 | 265 | 43.881919 | 0.85477 | 0.404726 | 0 | 0.653846 | 0 | 0 | 0.15682 | 0.048168 | 0 | 0 | 0 | 0 | 0 | 1 | 0.038462 | false | 0 | 0.053846 | 0 | 0.146154 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
b42221ff0c0bf2d355e7be8bf5e1be706c4355d2 | 40 | py | Python | fivempy/__init__.py | itasli/fivempy | 827abb3b76a761c5a3cde68cc66fbe564bdc7c96 | [
"MIT"
] | 1 | 2021-04-26T10:45:33.000Z | 2021-04-26T10:45:33.000Z | fivempy/__init__.py | itasli/fivempy | 827abb3b76a761c5a3cde68cc66fbe564bdc7c96 | [
"MIT"
] | null | null | null | fivempy/__init__.py | itasli/fivempy | 827abb3b76a761c5a3cde68cc66fbe564bdc7c96 | [
"MIT"
] | null | null | null | from fivempy.Server import Server, Fivem | 40 | 40 | 0.85 | 6 | 40 | 5.666667 | 0.833333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.1 | 40 | 1 | 40 | 40 | 0.944444 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
b4426d5527b28b94f8d7aabf96a5359a50fe0766 | 35 | py | Python | src/python/stup/core/__init__.py | Wizmann/STUP-Protocol | e06a3442082e5061d2be32be3ffd681675e7ffb5 | [
"MIT"
] | 14 | 2017-05-06T10:14:32.000Z | 2018-07-17T02:58:00.000Z | src/python/stup/core/__init__.py | Wizmann/STUP-Protocol | e06a3442082e5061d2be32be3ffd681675e7ffb5 | [
"MIT"
] | 2 | 2017-06-13T05:40:18.000Z | 2017-06-13T16:23:01.000Z | src/python/stup/core/__init__.py | Wizmann/STUP-Protocol | e06a3442082e5061d2be32be3ffd681675e7ffb5 | [
"MIT"
] | 4 | 2017-06-09T20:20:54.000Z | 2018-07-17T02:58:10.000Z | #coding=utf-8
from .core import *
| 8.75 | 19 | 0.685714 | 6 | 35 | 4 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.034483 | 0.171429 | 35 | 3 | 20 | 11.666667 | 0.793103 | 0.342857 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
b47e1b88222d9d7fda7443528ce58f54e4f348df | 9,059 | py | Python | model/multitaskmodel.py | afprati/Bayesian-Causal-Inference | 385d42f27fe736c4147cffa6f23d3ee338a54b1c | [
"MIT",
"Unlicense"
] | 1 | 2021-04-22T02:09:48.000Z | 2021-04-22T02:09:48.000Z | model/multitaskmodel.py | afprati/Bayesian-Causal-Inference | 385d42f27fe736c4147cffa6f23d3ee338a54b1c | [
"MIT",
"Unlicense"
] | null | null | null | model/multitaskmodel.py | afprati/Bayesian-Causal-Inference | 385d42f27fe736c4147cffa6f23d3ee338a54b1c | [
"MIT",
"Unlicense"
] | 1 | 2021-02-14T20:30:19.000Z | 2021-02-14T20:30:19.000Z | import torch
import gpytorch
from torch.nn import ModuleList
import json
import numpy as np
from model.customizedkernel import myIndexKernel, constantKernel, myIndicatorKernel
from model.customizedkernel import ConstantVectorMean, DriftScaleKernel, DriftIndicatorKernel
class MultitaskGPModel(gpytorch.models.ExactGP):
def __init__(self, train_x, train_y, X_max_v, likelihood, MAP=True):
'''
Inputs:
- train_x:
- train_y:
- likelihood:
'''
super(MultitaskGPModel, self).__init__(train_x, train_y, likelihood)
# define priors
outputscale_prior = gpytorch.priors.GammaPrior(concentration=1,rate=10)
lengthscale_prior = gpytorch.priors.GammaPrior(concentration=4,rate=1/5)
rho_prior = gpytorch.priors.UniformPrior(-1, 1)
unit_outputscale_prior = gpytorch.priors.GammaPrior(concentration=1,rate=10)
unit_lengthscale_prior = gpytorch.priors.GammaPrior(concentration=4,rate=1/5)
drift_outputscale_prior = gpytorch.priors.GammaPrior(concentration=1,rate=20)
drift_lengthscale_prior = gpytorch.priors.GammaPrior(concentration=5,rate=1/5)
weekday_prior = gpytorch.priors.GammaPrior(concentration=1,rate=10)
day_prior = gpytorch.priors.GammaPrior(concentration=1,rate=10)
# treatment/control groups
self.num_groups = 2
self.num_units = len(train_x[:,-3].unique())
# categoritcal features: group/weekday/day/unit id
self.X_max_v = X_max_v
# dim of covariates
self.d = list(train_x.shape)[1] - 1
# same mean of unit bias for all units, could extend this to be unit-dependent
# self.unit_mean_module = gpytorch.means.ConstantMean()
self.unit_mean_module = ConstantVectorMean(d=self.num_units)
self.group_mean_module = ConstantVectorMean(d=self.num_groups)
# marginalize weekday/day/unit id effects
self.x_covar_module = ModuleList([constantKernel(num_tasks=v+1) for v in self.X_max_v])
# self.x_covar_module = ModuleList([constantKernel(num_tasks=X_max_v[0]+1, prior=weekday_prior),
# constantKernel(num_tasks=X_max_v[1]+1, prior=day_prior),
# constantKernel(num_tasks=X_max_v[2]+1)])
# group-level time trend
self.group_t_covar_module = gpytorch.kernels.ScaleKernel(gpytorch.kernels.RBFKernel(\
active_dims=torch.tensor([self.d]),\
lengthscale_prior=lengthscale_prior if MAP else None),\
outputscale_prior=outputscale_prior if MAP else None)
# indicator covariances
self.x_indicator_module = ModuleList([myIndicatorKernel(num_tasks=v+1) for v in X_max_v])
self.group_index_module = myIndexKernel(num_tasks=self.num_groups,\
rho_prior=rho_prior if MAP else None)
# unit-level zero-meaned time trend
self.unit_t_covar_module = gpytorch.kernels.ScaleKernel(gpytorch.kernels.RBFKernel(\
active_dims=torch.tensor([self.d]),\
lengthscale_prior=unit_lengthscale_prior if MAP else None),\
outputscale_prior=unit_outputscale_prior if MAP else None)
self.unit_indicator_module = myIndicatorKernel(num_tasks=len(train_x[:,-3].unique()))
# drift process for treatment effect
self.drift_t_module = DriftScaleKernel(gpytorch.kernels.RBFKernel(\
active_dims=torch.tensor([self.d]),\
lengthscale_prior=drift_lengthscale_prior if MAP else None),\
outputscale_prior=drift_outputscale_prior if MAP else None)
self.drift_indicator_module = DriftIndicatorKernel(num_tasks=self.num_groups)
def forward(self, x):
if len(x.shape)==2:
group = x[:,-2].reshape((-1,1)).long()
units = x[:,-3].reshape((-1,1)).long()
ts = x[:,-1]
else:
group = x[0,:,-2].reshape((-1,1)).long()
units = x[0,:,-3].reshape((-1,1)).long()
ts = x[0,:,-1]
# only non-zero unit-level mean
# mu = self.unit_mean_module(x)
mu = self.group_mean_module(group) + self.unit_mean_module(units)
mu = mu.reshape(-1,)
# covariance for time trends
covar_group_t = self.group_t_covar_module(x)
covar_group_index = self.group_index_module(group)
covar_unit_t = self.unit_t_covar_module(x)
covar_unit_indicator = self.unit_indicator_module(units)
covar = covar_group_t.mul(covar_group_index) + covar_unit_t.mul(covar_unit_indicator)
if self.drift_t_module.T0 is not None:
covar_drift_indicator = self.drift_indicator_module(group)
covar_drift_t = self.drift_t_module(x)
covar += covar_drift_t.mul(covar_drift_indicator)
# marginalize weekday/day/unit id effects
for j in range(len(self.X_max_v)):
if len(x.shape)==2:
covar_c = self.x_covar_module[j](x[:,j].long())
indicator = self.x_indicator_module[j](x[:,j].long())
else:
# batch realization
num_samples = x.shape[0]
n = x.shape[1]
tmp = x[:,:,j].reshape(num_samples,n).long()
covar_c = self.x_covar_module[j].forward(tmp, tmp)
tmp = x[:,:,j].reshape(num_samples,n,1).long()
indicator = self.x_indicator_module[j].forward(tmp, tmp)
covar += indicator.mul(covar_c)
return gpytorch.distributions.MultivariateNormal(mu.double(), covar.double())
class PresentationModel(gpytorch.models.ExactGP):
def __init__(self, train_x, train_y, X_max_v, likelihood, MAP=True):
super(PresentationModel, self).__init__(train_x, train_y, likelihood)
# define priors
outputscale_prior = gpytorch.priors.GammaPrior(concentration=1,rate=10)
lengthscale_prior = gpytorch.priors.GammaPrior(concentration=3,rate=1/5)
rho_prior = gpytorch.priors.UniformPrior(-1, 1)
unit_outputscale_prior = gpytorch.priors.GammaPrior(concentration=1,rate=10)
unit_lengthscale_prior = gpytorch.priors.GammaPrior(concentration=4,rate=1/5)
weekday_prior = gpytorch.priors.GammaPrior(concentration=1,rate=10)
day_prior = gpytorch.priors.GammaPrior(concentration=1,rate=10)
# treatment/control groups
self.num_groups = 2
self.num_units = len(train_x[:,-3].unique())
# categoritcal features: group/weekday/day/unit id
self.X_max_v = X_max_v
# dim of covariates
self.d = list(train_x.shape)[1] - 1
# same mean of unit bias for all units, could extend this to be unit-dependent
self.unit_mean_module = ConstantVectorMean(d=self.num_units)
self.group_mean_module = ConstantVectorMean(d=self.num_groups)
# marginalize weekday/day/unit id effects
# self.x_covar_module = ModuleList([constantKernel(num_tasks=v+1) for v in self.X_max_v])
# group-level time trend
self.group_t_covar_module = gpytorch.kernels.ScaleKernel(gpytorch.kernels.RBFKernel(\
active_dims=torch.tensor([self.d]),\
lengthscale_prior=lengthscale_prior if MAP else None),\
outputscale_prior=outputscale_prior if MAP else None)
# indicator covariances
# self.x_indicator_module = ModuleList([myIndicatorKernel(num_tasks=v+1) for v in X_max_v])
self.group_index_module = myIndexKernel(num_tasks=self.num_groups,\
rho_prior=rho_prior if MAP else None)
# unit-level zero-meaned time trend
self.unit_t_covar_module = gpytorch.kernels.ScaleKernel(gpytorch.kernels.RBFKernel(\
active_dims=torch.tensor([self.d]),\
lengthscale_prior=unit_lengthscale_prior if MAP else None),\
outputscale_prior=unit_outputscale_prior if MAP else None)
self.unit_indicator_module = myIndicatorKernel(num_tasks=len(train_x[:,-3].unique()))
def forward(self, x):
group = x[:,-2].reshape((-1,1)).long()
units = x[:,-3].reshape((-1,1)).long()
# only non-zero unit-level mean
mu = self.unit_mean_module(units) + self.group_mean_module(group)
mu = mu.reshape(-1,)
# covariance for time trends
covar_group_t = self.group_t_covar_module(x)
covar_group_index = self.group_index_module(group)
covar_unit_t = self.unit_t_covar_module(x)
covar_unit_indicator = self.unit_indicator_module(units)
covar = covar_group_t.mul(covar_group_index) + covar_unit_t.mul(covar_unit_indicator)
# marginalize weekday/day/unit id effects
# for j in range(len(self.X_max_v)):
# covar_c = self.x_covar_module[j](x[:,j].long())
# indicator = self.x_indicator_module[j](x[:,j].long())
# covar += indicator.mul(covar_c)
return gpytorch.distributions.MultivariateNormal(mu.double(), covar.double())
| 46.45641 | 104 | 0.660338 | 1,176 | 9,059 | 4.845238 | 0.118197 | 0.016673 | 0.053352 | 0.071253 | 0.870481 | 0.840821 | 0.830642 | 0.788522 | 0.762022 | 0.762022 | 0 | 0.013965 | 0.233249 | 9,059 | 194 | 105 | 46.695876 | 0.806363 | 0.172094 | 0 | 0.640351 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.035088 | false | 0 | 0.061404 | 0 | 0.131579 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
81ff2418aba47a23ba1363578637085bfc1be891 | 4,610 | py | Python | data/diffusion_model_flags.py | clintonjwang/clip-guided-diffusion | 14910f3d41fb951565d0f15ed0585fb787377a94 | [
"MIT"
] | 291 | 2021-07-21T04:15:14.000Z | 2022-03-29T23:19:34.000Z | data/diffusion_model_flags.py | clintonjwang/clip-guided-diffusion | 14910f3d41fb951565d0f15ed0585fb787377a94 | [
"MIT"
] | 13 | 2021-08-21T22:23:39.000Z | 2022-02-19T09:51:49.000Z | data/diffusion_model_flags.py | clintonjwang/clip-guided-diffusion | 14910f3d41fb951565d0f15ed0585fb787377a94 | [
"MIT"
] | 43 | 2021-07-27T18:28:05.000Z | 2022-03-30T16:16:05.000Z | DIFFUSION_LOOKUP = {
'cond': {
64: {
'url': 'https://openaipublic.blob.core.windows.net/diffusion/jul-2021/64x64_diffusion.pt',
"filename": '64x64_diffusion.pt',
'model_flags': {
"attention_resolutions": '32,16,8',
"class_cond": True,
"diffusion_steps": 1000,
"dropout": 0.1,
"image_size": 64,
"learn_sigma": True,
"noise_schedule": 'cosine',
"num_channels": 192,
"num_head_channels": 64,
"num_res_blocks": 3,
"resblock_updown": True,
"use_new_attention_order": True,
"use_fp16": True,
"use_scale_shift_norm": True,
},
},
128: {
"url": 'https://openaipublic.blob.core.windows.net/diffusion/jul-2021/128x128_diffusion.pt',
"filename": '128x128_diffusion.pt',
"model_flags": {
"attention_resolutions": '32,16,8',
"class_cond": True,
"diffusion_steps": 1000,
"image_size": 128,
"learn_sigma": True,
"noise_schedule": 'linear',
"num_channels": 256,
"num_heads": 4,
"num_res_blocks": 2,
"resblock_updown": True,
"use_fp16": True,
"use_scale_shift_norm": True,
},
},
256: {
"url": "https://openaipublic.blob.core.windows.net/diffusion/jul-2021/256x256_diffusion.pt",
"filename": '256x256_diffusion.pt',
"model_flags": {
"attention_resolutions": "32,16,8",
"class_cond": True,
"diffusion_steps": 1000,
"image_size": 256,
"learn_sigma": True,
"noise_schedule": "linear",
"num_channels": 256,
"num_head_channels": 64,
"num_res_blocks": 2,
"resblock_updown": True,
"use_fp16": True,
"use_scale_shift_norm": True
}
},
512: {
"url": "https://openaipublic.blob.core.windows.net/diffusion/jul-2021/512x512_diffusion.pt",
"filename": '512x512_diffusion.pt',
"model_flags": {
'attention_resolutions': '32, 16, 8',
'class_cond': True,
'diffusion_steps': 1000,
'rescale_timesteps': True,
'timestep_respacing': '1000',
'image_size': 512,
'learn_sigma': True,
'noise_schedule': 'linear',
'num_channels': 256,
'num_head_channels': 64,
'num_res_blocks': 2,
'resblock_updown': True,
'use_fp16': True,
'use_scale_shift_norm': True,
},
},
},
'uncond': {
256: {
"url": "https://openaipublic.blob.core.windows.net/diffusion/jul-2021/256x256_diffusion_uncond.pt",
"filename": '256x256_diffusion_uncond.pt',
"model_flags": {
"attention_resolutions": "32,16,8",
"class_cond": False,
"diffusion_steps": 1000,
"image_size": 256,
"learn_sigma": True,
"noise_schedule": "linear",
"num_channels": 256,
"num_head_channels": 64,
"num_res_blocks": 2,
"resblock_updown": True,
"use_fp16": True,
"use_scale_shift_norm": True
},
},
512: {
"url": 'https://the-eye.eu/public/AI/models/512x512_diffusion_unconditional_ImageNet/512x512_diffusion_uncond_finetune_008100.pt',
"filename": '512x512_diffusion_uncond_finetune_008100.pt',
"model_flags": {
'attention_resolutions': '32, 16, 8',
'class_cond': False,
'diffusion_steps': 1000,
'rescale_timesteps': True,
'timestep_respacing': '1000',
'image_size': 512,
'learn_sigma': True,
'noise_schedule': 'linear',
'num_channels': 256,
'num_head_channels': 64,
'num_res_blocks': 2,
'resblock_updown': True,
'use_fp16': True,
'use_scale_shift_norm': True,
}
}
}
}
| 38.099174 | 142 | 0.468764 | 405 | 4,610 | 5.009877 | 0.195062 | 0.04485 | 0.035485 | 0.0621 | 0.834894 | 0.821587 | 0.785116 | 0.770823 | 0.770823 | 0.75308 | 0 | 0.091874 | 0.407375 | 4,610 | 120 | 143 | 38.416667 | 0.650805 | 0 | 0 | 0.625 | 0 | 0.041667 | 0.435792 | 0.047505 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
c35bb403fe2a38b9a514b01e62464851a4a7990d | 10,414 | py | Python | tests/intensive/model_tests.py | FLIR/fiftyone | eeed8bc9dbdada0530036ae5b3afbbe7ab423ce3 | [
"Apache-2.0"
] | 11 | 2021-08-18T08:33:40.000Z | 2022-02-15T12:28:19.000Z | tests/intensive/model_tests.py | FLIR/fiftyone | eeed8bc9dbdada0530036ae5b3afbbe7ab423ce3 | [
"Apache-2.0"
] | 1 | 2022-03-25T19:27:53.000Z | 2022-03-25T19:27:53.000Z | tests/intensive/model_tests.py | FLIR/fiftyone | eeed8bc9dbdada0530036ae5b3afbbe7ab423ce3 | [
"Apache-2.0"
] | 1 | 2022-03-01T07:54:21.000Z | 2022-03-01T07:54:21.000Z | """
Model inference/embeddings tests.
All of these tests are designed to be run manually via::
pytest tests/intensive/model_tests.py -s -k test_<name>
| Copyright 2017-2021, Voxel51, Inc.
| `voxel51.com <https://voxel51.com/>`_
|
"""
import unittest
import numpy as np
import fiftyone as fo
import fiftyone.zoo as foz
def test_apply_model():
dataset = foz.load_zoo_dataset("quickstart")
view = dataset.take(50)
model = foz.load_zoo_model("inception-v3-imagenet-torch")
view.apply_model(model, "predictions1", batch_size=8)
print(view.count_values("predictions1.label"))
model = foz.load_zoo_model("ssd-mobilenet-v1-coco-tf")
view.apply_model(model, "predictions2")
print(view.count_values("predictions2.detections.label"))
def test_compute_embeddings():
dataset = foz.load_zoo_dataset("quickstart")
view = dataset.take(50)
model = foz.load_zoo_model("mobilenet-v2-imagenet-tf1")
embeddings1a = view.compute_embeddings(model)
view.compute_embeddings(model, embeddings_field="embeddings1")
embeddings1b = np.stack(view.values("embeddings1"))
# embeddings1a and embeddings1b should match
embeddings2a = view.compute_embeddings(model, batch_size=8)
view.compute_embeddings(
model, embeddings_field="embeddings2", batch_size=8
)
embeddings2b = np.stack(view.values("embeddings2"))
# embeddings2a and embeddings2b should match
def test_compute_patch_embeddings():
dataset = foz.load_zoo_dataset("quickstart")
view = dataset.take(50)
model = foz.load_zoo_model("mobilenet-v2-imagenet-tf1")
patch_embeddings1a = view.compute_patch_embeddings(model, "ground_truth")
view.compute_patch_embeddings(
model, "ground_truth", embeddings_field="patch_embeddings1"
)
patch_embeddings1b = {
_id: e
for _id, e in zip(view.values("id"), view.values("patch_embeddings1"))
}
# patch_embeddings1a and patch_embeddings1b should match
patch_embeddings2a = view.compute_patch_embeddings(
model, "ground_truth", batch_size=8
)
view.compute_patch_embeddings(
model, "ground_truth", embeddings_field="patch_embeddings2"
)
patch_embeddings2b = {
_id: e
for _id, e in zip(view.values("id"), view.values("patch_embeddings2"))
}
# patch_embeddings2a and patch_embeddings2b should match
def test_apply_model_frames():
dataset = foz.load_zoo_dataset("quickstart-video")
view = dataset.take(2)
model = foz.load_zoo_model("inception-v3-imagenet-torch")
view.apply_model(model, "predictions1", batch_size=8)
print(view.count_values("frames.predictions1.label"))
model = foz.load_zoo_model("ssd-mobilenet-v1-coco-tf")
view.apply_model(model, "predictions2")
print(view.count_values("frames.predictions2.detections.label"))
def test_compute_embeddings_frames():
dataset = foz.load_zoo_dataset("quickstart-video")
view = dataset.take(2)
model = foz.load_zoo_model("mobilenet-v2-imagenet-tf1")
embeddings1a = view.compute_embeddings(model)
view.compute_embeddings(model, embeddings_field="embeddings1")
embeddings1b = {
_id: np.stack(e)
for _id, e in zip(view.values("id"), view.values("frames.embeddings1"))
}
# embeddings1a and embeddings1b should match
embeddings2a = view.compute_embeddings(model, batch_size=8)
view.compute_embeddings(
model, embeddings_field="embeddings2", batch_size=8
)
embeddings2b = {
_id: np.stack(e)
for _id, e in zip(view.values("id"), view.values("frames.embeddings2"))
}
# embeddings2a and embeddings2b should match
def test_compute_patch_embeddings_frames():
dataset = foz.load_zoo_dataset("quickstart-video")
view = dataset.take(2)
model = foz.load_zoo_model("mobilenet-v2-imagenet-tf1")
patch_embeddings1a = view.compute_patch_embeddings(
model, "ground_truth_detections"
)
view.compute_patch_embeddings(
model, "ground_truth_detections", embeddings_field="patch_embeddings1"
)
patch_embeddings1b = {
_id: {fn: p for fn, p in enumerate(e, 1)}
for _id, e in zip(
view.values("id"), view.values("frames.patch_embeddings1")
)
}
# patch_embeddings1a and patch_embeddings1b should match
patch_embeddings2a = view.compute_patch_embeddings(
model, "ground_truth_detections", batch_size=8
)
view.compute_patch_embeddings(
model, "ground_truth_detections", embeddings_field="patch_embeddings2"
)
patch_embeddings2b = {
_id: {fn: p for fn, p in enumerate(e, 1)}
for _id, e in zip(
view.values("id"), view.values("frames.patch_embeddings2")
)
}
# patch_embeddings2a and patch_embeddings2b should match
def test_apply_model_skip_failures():
dataset = fo.Dataset()
dataset.add_samples(
[
fo.Sample(filepath="non-existent1.png"),
fo.Sample(filepath="non-existent2.png"),
fo.Sample(filepath="non-existent3.png"),
fo.Sample(filepath="non-existent4.png"),
]
)
# torch, data loader, single batches
model = foz.load_zoo_model("inception-v3-imagenet-torch")
dataset.apply_model(model, "predictions1")
# torch, data loader, batches
model = foz.load_zoo_model("inception-v3-imagenet-torch")
dataset.apply_model(model, "predictions2", batch_size=2)
# TF, single inference
model = foz.load_zoo_model("ssd-mobilenet-v1-coco-tf")
dataset.apply_model(model, "predictions3")
# TF, batch inference
model = foz.load_zoo_model("resnet-v2-50-imagenet-tf1")
dataset.apply_model(model, "predictions4", batch_size=2)
def test_compute_embeddings_skip_failures():
dataset = fo.Dataset()
dataset.add_samples(
[
fo.Sample(filepath="non-existent1.png"),
fo.Sample(filepath="non-existent2.png"),
fo.Sample(filepath="non-existent3.png"),
fo.Sample(filepath="non-existent4.png"),
]
)
# torch, data loader, single batches
model = foz.load_zoo_model("inception-v3-imagenet-torch")
dataset.compute_embeddings(model)
# torch, data loader, batches
model = foz.load_zoo_model("inception-v3-imagenet-torch")
dataset.compute_embeddings(model, batch_size=2)
# TF, batch inference
model = foz.load_zoo_model("resnet-v2-50-imagenet-tf1")
dataset.compute_embeddings(model, batch_size=2)
def test_compute_patch_embeddings_skip_failures():
dataset = fo.Dataset()
dataset.add_samples(
[
fo.Sample(filepath="non-existent1.png"),
fo.Sample(filepath="non-existent2.png"),
fo.Sample(filepath="non-existent3.png"),
fo.Sample(filepath="non-existent4.png"),
]
)
for sample in dataset:
sample["ground_truth"] = fo.Detections(
detections=[fo.Detection(bounding_box=[0.1, 0.1, 0.8, 0.8])]
)
sample.save()
# torch, data loader, single batches
model = foz.load_zoo_model("inception-v3-imagenet-torch")
dataset.compute_patch_embeddings(model, "ground_truth")
# torch, data loader, batches
model = foz.load_zoo_model("inception-v3-imagenet-torch")
dataset.compute_patch_embeddings(model, "ground_truth", batch_size=2)
# TF, batch inference
model = foz.load_zoo_model("resnet-v2-50-imagenet-tf1")
dataset.compute_patch_embeddings(model, "ground_truth", batch_size=2)
def test_apply_model_frames_skip_failures():
dataset = fo.Dataset()
dataset.add_samples(
[
fo.Sample(filepath="non-existent1.mp4"),
fo.Sample(filepath="non-existent2.mp4"),
fo.Sample(filepath="non-existent3.mp4"),
fo.Sample(filepath="non-existent4.mp4"),
]
)
# torch, data loader, single batches
model = foz.load_zoo_model("inception-v3-imagenet-torch")
dataset.apply_model(model, "predictions1")
# torch, data loader, batches
model = foz.load_zoo_model("inception-v3-imagenet-torch")
dataset.apply_model(model, "predictions2", batch_size=2)
# TF, single inference
model = foz.load_zoo_model("ssd-mobilenet-v1-coco-tf")
dataset.apply_model(model, "predictions3")
# TF, batch inference
model = foz.load_zoo_model("resnet-v2-50-imagenet-tf1")
dataset.apply_model(model, "predictions4", batch_size=2)
def test_compute_embeddings_frames_skip_failures():
dataset = fo.Dataset()
dataset.add_samples(
[
fo.Sample(filepath="non-existent1.mp4"),
fo.Sample(filepath="non-existent2.mp4"),
fo.Sample(filepath="non-existent3.mp4"),
fo.Sample(filepath="non-existent4.mp4"),
]
)
# torch, data loader, single batches
model = foz.load_zoo_model("inception-v3-imagenet-torch")
dataset.compute_embeddings(model)
# torch, data loader, batches
model = foz.load_zoo_model("inception-v3-imagenet-torch")
dataset.compute_embeddings(model, batch_size=2)
# TF, batch inference
model = foz.load_zoo_model("resnet-v2-50-imagenet-tf1")
dataset.compute_embeddings(model, batch_size=2)
def test_compute_patch_embeddings_frames_skip_failures():
dataset = fo.Dataset()
dataset.add_samples(
[
fo.Sample(filepath="non-existent1.mp4"),
fo.Sample(filepath="non-existent2.mp4"),
fo.Sample(filepath="non-existent3.mp4"),
fo.Sample(filepath="non-existent4.mp4"),
]
)
for sample in dataset:
frame = sample.frames[1]
frame["ground_truth"] = fo.Detections(
detections=[fo.Detection(bounding_box=[0.1, 0.1, 0.8, 0.8])]
)
sample.save()
# torch, data loader, single batches
model = foz.load_zoo_model("inception-v3-imagenet-torch")
dataset.compute_patch_embeddings(model, "ground_truth")
# torch, data loader, batches
model = foz.load_zoo_model("inception-v3-imagenet-torch")
dataset.compute_patch_embeddings(model, "ground_truth", batch_size=2)
# TF, batch inference
model = foz.load_zoo_model("resnet-v2-50-imagenet-tf1")
dataset.compute_patch_embeddings(model, "ground_truth", batch_size=2)
if __name__ == "__main__":
fo.config.show_progress_bars = True
unittest.main(verbosity=2)
| 30.810651 | 79 | 0.681582 | 1,290 | 10,414 | 5.287597 | 0.09845 | 0.034892 | 0.049846 | 0.061575 | 0.939598 | 0.935347 | 0.935347 | 0.906905 | 0.903973 | 0.903973 | 0 | 0.025478 | 0.197234 | 10,414 | 337 | 80 | 30.902077 | 0.790431 | 0.111869 | 0 | 0.62037 | 0 | 0 | 0.213805 | 0.103538 | 0 | 0 | 0 | 0 | 0 | 1 | 0.055556 | false | 0 | 0.018519 | 0 | 0.074074 | 0.018519 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
6f05e9fe7e34dd00bb4349f29a647ca50077e531 | 133 | py | Python | t/stdout_stderr_fatalexit.py | rbriski/asvab | 13504980d6c7af86a1122af6a2b7489d91bad16f | [
"WTFPL",
"Unlicense"
] | 1 | 2016-05-08T06:22:28.000Z | 2016-05-08T06:22:28.000Z | t/stdout_stderr_fatalexit.py | rbriski/asvab | 13504980d6c7af86a1122af6a2b7489d91bad16f | [
"WTFPL",
"Unlicense"
] | null | null | null | t/stdout_stderr_fatalexit.py | rbriski/asvab | 13504980d6c7af86a1122af6a2b7489d91bad16f | [
"WTFPL",
"Unlicense"
] | 4 | 2020-09-30T19:55:10.000Z | 2021-07-13T19:19:29.000Z | #!/usr/local/bin/python
import sys
print "fatalexit prints to stdout"
sys.stderr.write('fatalexit prints to stderr\n')
print 1/0
| 13.3 | 48 | 0.744361 | 22 | 133 | 4.5 | 0.727273 | 0.30303 | 0.343434 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.017391 | 0.135338 | 133 | 9 | 49 | 14.777778 | 0.843478 | 0.165414 | 0 | 0 | 0 | 0 | 0.490909 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.25 | null | null | 0.75 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 6 |
6f1448876baee2381665878d58edec2b7fa395b0 | 101 | py | Python | RemoveWindowsLockScreenAds/__init__.py | clarkb7/RemoveWindowsLockScreenAds | 83382f78f63b17e452c02c1b9bb7a197900a4703 | [
"MIT"
] | null | null | null | RemoveWindowsLockScreenAds/__init__.py | clarkb7/RemoveWindowsLockScreenAds | 83382f78f63b17e452c02c1b9bb7a197900a4703 | [
"MIT"
] | null | null | null | RemoveWindowsLockScreenAds/__init__.py | clarkb7/RemoveWindowsLockScreenAds | 83382f78f63b17e452c02c1b9bb7a197900a4703 | [
"MIT"
] | null | null | null | from RemoveWindowsLockScreenAds.RemoveWindowsLockScreenAds import GetAdSettingsDirectory, AdRemover
| 33.666667 | 99 | 0.920792 | 6 | 101 | 15.5 | 0.833333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.059406 | 101 | 2 | 100 | 50.5 | 0.978947 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
488bb0fb12b7ff7abae3d505c2e5eb56855297de | 221 | py | Python | syft/frameworks/tensorflow/__init__.py | MariaRigaki/PySyft | 8d8baa440f7afce7efedee3f402551853cb7c910 | [
"Apache-2.0"
] | null | null | null | syft/frameworks/tensorflow/__init__.py | MariaRigaki/PySyft | 8d8baa440f7afce7efedee3f402551853cb7c910 | [
"Apache-2.0"
] | 1 | 2019-07-05T09:49:48.000Z | 2019-07-05T09:49:48.000Z | syft/frameworks/tensorflow/__init__.py | MariaRigaki/PySyft | 8d8baa440f7afce7efedee3f402551853cb7c910 | [
"Apache-2.0"
] | 1 | 2021-04-18T15:27:15.000Z | 2021-04-18T15:27:15.000Z | import syft
if syft.dependency_check.tensorflow_available:
from syft_tensorflow.hook import TensorFlowHook
from syft_tensorflow.tensor import TensorFlowTensor
setattr(syft, "TensorFlowHook", TensorFlowHook)
| 27.625 | 55 | 0.819005 | 24 | 221 | 7.375 | 0.541667 | 0.090395 | 0.20339 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.131222 | 221 | 7 | 56 | 31.571429 | 0.921875 | 0 | 0 | 0 | 0 | 0 | 0.063348 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.6 | 0 | 0.6 | 0 | 1 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
48b004cfab6d65f8fbe5140bd30fa5d0779a075e | 43 | py | Python | nfspy/__init__.py | SKsample/NfSpy | a588acbe471229c9dce0472d32055d30fe671f2f | [
"MIT"
] | 254 | 2015-01-09T17:50:20.000Z | 2022-03-25T03:18:27.000Z | nfspy/__init__.py | SKsample/NfSpy | a588acbe471229c9dce0472d32055d30fe671f2f | [
"MIT"
] | 5 | 2015-06-07T09:57:45.000Z | 2021-01-29T19:32:21.000Z | nfspy/__init__.py | SKsample/NfSpy | a588acbe471229c9dce0472d32055d30fe671f2f | [
"MIT"
] | 68 | 2015-03-11T15:25:10.000Z | 2022-02-04T01:30:30.000Z | #!/usr/bin/env python
from nfspy import *
| 10.75 | 21 | 0.697674 | 7 | 43 | 4.285714 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.162791 | 43 | 3 | 22 | 14.333333 | 0.833333 | 0.465116 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
48c445d582ec5e860f03cf876f0dd95d502faa24 | 73 | py | Python | systemtests/__init__.py | nliao6622/QuantaDB-1 | e5db80c7b9e9f5b3c2c6715ce77c56d56e4c4c94 | [
"Apache-2.0"
] | 12 | 2021-01-20T23:20:27.000Z | 2021-12-10T12:14:26.000Z | systemtests/__init__.py | behnamm/cs244b_project | 957e8b3979e4ca24814edd73254cc4c69ea14126 | [
"0BSD"
] | null | null | null | systemtests/__init__.py | behnamm/cs244b_project | 957e8b3979e4ca24814edd73254cc4c69ea14126 | [
"0BSD"
] | 2 | 2021-01-13T02:03:32.000Z | 2022-01-20T17:26:55.000Z | import sys
sys.path.append('scripts')
sys.path.append('bindings/python')
| 18.25 | 34 | 0.767123 | 11 | 73 | 5.090909 | 0.636364 | 0.25 | 0.464286 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.054795 | 73 | 3 | 35 | 24.333333 | 0.811594 | 0 | 0 | 0 | 0 | 0 | 0.30137 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.333333 | 0 | 0.333333 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 6 |
5b112612f28c86255eba872de32695351fc8ad59 | 211 | py | Python | app/spot/exceptions.py | valeriansaliou/waaave-web | 8a0cde773563865a905af38f5a0b723a43b17341 | [
"RSA-MD"
] | 1 | 2020-04-06T10:04:43.000Z | 2020-04-06T10:04:43.000Z | app/spot/exceptions.py | valeriansaliou/waaave-web | 8a0cde773563865a905af38f5a0b723a43b17341 | [
"RSA-MD"
] | null | null | null | app/spot/exceptions.py | valeriansaliou/waaave-web | 8a0cde773563865a905af38f5a0b723a43b17341 | [
"RSA-MD"
] | null | null | null | class SpotNotFound(Exception):
"""
Exception raised when spot cannot be found
"""
pass
class SpotDataOverflow(Exception):
"""
Exception raised when spot data overflows
"""
pass
| 16.230769 | 46 | 0.64455 | 21 | 211 | 6.47619 | 0.619048 | 0.264706 | 0.352941 | 0.411765 | 0.470588 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.270142 | 211 | 12 | 47 | 17.583333 | 0.883117 | 0.398104 | 0 | 0.5 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0.5 | 0 | 0 | 0.5 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 6 |
8285b463740685aa6b9fe37e8926d42e7791be33 | 15,295 | py | Python | cased/tests/test_sensitive.py | cased/cased-python | e3c529e3fe816331277812bf4e3db537eb5a54fc | [
"MIT"
] | null | null | null | cased/tests/test_sensitive.py | cased/cased-python | e3c529e3fe816331277812bf4e3db537eb5a54fc | [
"MIT"
] | null | null | null | cased/tests/test_sensitive.py | cased/cased-python | e3c529e3fe816331277812bf4e3db537eb5a54fc | [
"MIT"
] | null | null | null | import re
import cased
from cased.data.sensitive import SensitiveDataHandler, SensitiveDataProcessor
username_regex = r"@([A-Za-z0-9_]+)"
name_regex = r"Smith"
event = {
"actor": "some-actor",
"action": "user.create",
"new_username": "@someusername",
}
event_with_two_usernames = {
"actor": "some-actor",
"action": "user.create",
"new_username": "@someusername and also @anotherusername",
}
event_with_multiple_keys_matching = {
"actor": "some-actor",
"action": "user.create",
"new_username": "@someusername and also @anotherusername",
"friend_username": "@friendusername",
}
event_with_multiple_keys_of_different_matches = {
"actor": "some-actor",
"action": "user.create",
"name": "Jane Smith",
"new_username": "@someusername",
"phone": "555-555-5555",
}
event_with_email_field = {
"actor": "some-actor",
"action": "user.create",
"email": "example@example.com",
}
event_with_email_and_phone_field = {
"actor": "some-actor",
"action": "user.create",
"email": "example@example.com",
"phone": "555-555-5555",
}
class TestSensitiveData(object):
def teardown_method(self, method):
cased.Context.clear()
cased.redact_before_publishing = False
def test_data_handler_can_be_created(self):
handler = SensitiveDataHandler("username", username_regex)
assert handler.label == "username"
assert handler.pattern == username_regex
def test_data_handler_finds_matches(self):
handler = SensitiveDataHandler("username", username_regex)
string = "@someusername"
match_obj = self._create_match_obj(username_regex, string)
matches = handler.find_matches(string)
assert len(matches) == 1
assert matches[0].span() == match_obj.span()
def test_data_handler_finds_multiple_matches(self):
handler = SensitiveDataHandler("username", username_regex)
string = "@someusername @anotherusername"
matches = handler.find_matches(string)
assert len(matches) == 2
def test_data_handler_finds_mixed_matches(self):
handler = SensitiveDataHandler("username", username_regex)
string = "someusername @anotherusername"
matches = handler.find_matches(string)
assert len(matches) == 1
def test_data_handler_finds_no_matches(self):
handler = SensitiveDataHandler("username", username_regex)
string = "nada nope"
matches = handler.find_matches(string)
assert len(matches) == 0
def test_data_handler_works_with_empty_string(self):
handler = SensitiveDataHandler("username", username_regex)
string = ""
matches = handler.find_matches(string)
assert len(matches) == 0
def test_sensitive_data_processor_can_be_created(self):
processor = SensitiveDataProcessor(event)
assert processor.audit_event == event
def test_sensitive_data_processor_can_be_created_with_a_handler(self):
handler = SensitiveDataHandler("username", username_regex)
processor = SensitiveDataProcessor(event, [handler])
assert processor.audit_event == event
assert processor.data_handlers == [handler]
def test_sensitive_data_processor_can_be_created_with_mutiple_handlers(self):
handler1 = SensitiveDataHandler("username", username_regex)
handler2 = SensitiveDataHandler("name", name_regex)
processor = SensitiveDataProcessor(event, [handler1, handler2])
assert processor.audit_event == event
assert processor.data_handlers == [handler1, handler2]
def test_handlers_can_be_added_and_removed_globally(self):
assert cased.sensitive_data_handlers == []
handler = SensitiveDataHandler("username", username_regex)
cased.add_handler(handler)
assert cased.sensitive_data_handlers == [handler]
cased.clear_handlers()
assert cased.sensitive_data_handlers == []
def test_sensitive_data_processor_has_default_handlers_if_set(self):
handler = SensitiveDataHandler("username", username_regex)
cased.add_handler(handler)
processor = SensitiveDataProcessor(event)
assert processor.data_handlers[0].label == handler.label
cased.clear_handlers()
def test_ranges_from_event(self):
handler = SensitiveDataHandler("username", username_regex)
processor = SensitiveDataProcessor(event, [handler])
assert processor.ranges_from_event(event, handler) == {
"new_username": [{"begin": 0, "end": 13, "label": "username"}]
}
def test_ranges_from_event_works_with_multiple_matches(self):
handler = SensitiveDataHandler("username", username_regex)
processor = SensitiveDataProcessor(event_with_two_usernames, [handler])
assert processor.ranges_from_event(event_with_two_usernames, handler) == {
"new_username": [
{"begin": 0, "end": 13, "label": "username"},
{"begin": 23, "end": 39, "label": "username"},
]
}
def test_ranges_from_event_works_with_multiple_handlers(self):
handler1 = SensitiveDataHandler("username", username_regex)
handler2 = SensitiveDataHandler("name", name_regex)
processor = SensitiveDataProcessor(
event_with_multiple_keys_of_different_matches.copy(), [handler1, handler2]
)
assert processor.process()[".cased"]["pii"] == {
"new_username": [{"begin": 0, "end": 13, "label": "username"}],
"name": [{"begin": 5, "end": 10, "label": "name"}],
}
def test_ranges_from_event_works_with_multiple_handlers_and_field_setting(self):
handler1 = SensitiveDataHandler("username", username_regex)
handler2 = SensitiveDataHandler("name", name_regex)
cased.add_sensitive_field("phone")
processor = SensitiveDataProcessor(
event_with_multiple_keys_of_different_matches.copy(), [handler1, handler2]
)
assert processor.process()[".cased"]["pii"] == {
"new_username": [{"begin": 0, "end": 13, "label": "username"}],
"name": [{"begin": 5, "end": 10, "label": "name"}],
"phone": [{"begin": 0, "end": 12, "label": "phone"}],
}
cased.sensitive_fields = set()
def test_ranges_from_event_works_with_multiple_key_matches(self):
handler = SensitiveDataHandler("username", username_regex)
processor = SensitiveDataProcessor(
event_with_multiple_keys_matching.copy(), [handler]
)
assert processor.ranges_from_event(
event_with_multiple_keys_matching.copy(), handler
) == {
"friend_username": [{"begin": 0, "end": 15, "label": "username"}],
"new_username": [
{"begin": 0, "end": 13, "label": "username"},
{"begin": 23, "end": 39, "label": "username"},
],
}
def test_add_ranges_to_events(self):
handler = SensitiveDataHandler("username", username_regex)
processor = SensitiveDataProcessor(event.copy(), [handler])
ranges = {"new_username": [{"begin": 0, "end": 13, "label": "username"}]}
assert processor.add_ranges_to_event(ranges) == {
".cased": {
"pii": {
"new_username": [{"begin": 0, "end": 13, "label": "username"}],
},
},
"action": "user.create",
"actor": "some-actor",
"new_username": "@someusername",
}
def test_add_ranges_to_event_with_multiple_key_matches(self):
handler = SensitiveDataHandler("username", username_regex)
processor = SensitiveDataProcessor(
event_with_multiple_keys_matching.copy(), [handler]
)
ranges = {
"friend_username": [{"begin": 0, "end": 15, "label": "username"}],
"new_username": [
{"begin": 0, "end": 13, "label": "username"},
{"begin": 23, "end": 39, "label": "username"},
],
}
assert processor.add_ranges_to_event(ranges) == {
".cased": {
"pii": {
"friend_username": [{"begin": 0, "end": 15, "label": "username"}],
"new_username": [
{"begin": 0, "end": 13, "label": "username"},
{"begin": 23, "end": 39, "label": "username"},
],
}
},
"action": "user.create",
"actor": "some-actor",
"friend_username": "@friendusername",
"new_username": "@someusername and also @anotherusername",
}
def test_redact_data(self):
handler = SensitiveDataHandler("username", username_regex)
processor = SensitiveDataProcessor(
event_with_multiple_keys_matching.copy(), [handler]
)
ranges = {
"friend_username": [{"begin": 0, "end": 15, "label": "username"}],
"new_username": [
{"begin": 0, "end": 13, "label": "username"},
{"begin": 23, "end": 39, "label": "username"},
],
}
assert processor.redact_data(ranges) == {
"action": "user.create",
"actor": "some-actor",
"friend_username": "XXXXXXXXXXXXXXX",
"new_username": "XXXXXXXXXXXXX and also XXXXXXXXXXXXXXXX",
}
def test_redact_data_with_multiple_key_matches(self):
handler = SensitiveDataHandler("username", username_regex)
processor = SensitiveDataProcessor(
event_with_multiple_keys_matching.copy(), [handler]
)
ranges = {
"friend_username": [{"begin": 0, "end": 15, "label": "username"}],
"new_username": [
{"begin": 0, "end": 13, "label": "username"},
{"begin": 23, "end": 39, "label": "username"},
],
}
assert processor.redact_data(ranges) == {
"action": "user.create",
"actor": "some-actor",
"friend_username": "XXXXXXXXXXXXXXX",
"new_username": "XXXXXXXXXXXXX and also XXXXXXXXXXXXXXXX",
}
def test_redact_data_with_multiple_handlers(self):
cased.redact_before_publishing = True
handler1 = SensitiveDataHandler("username", username_regex)
handler2 = SensitiveDataHandler("name", name_regex)
processor = SensitiveDataProcessor(
event_with_multiple_keys_of_different_matches.copy(), [handler1, handler2]
)
assert processor.process() == {
".cased": {
"pii": {
"new_username": [{"begin": 0, "end": 13, "label": "username"}],
"name": [{"begin": 5, "end": 10, "label": "name"}],
}
},
"name": "Jane XXXXX",
"action": "user.create",
"actor": "some-actor",
"phone": "555-555-5555",
"new_username": "XXXXXXXXXXXXX",
}
def test_process_does_everything_needed(self):
handler = SensitiveDataHandler("username", username_regex)
processor = SensitiveDataProcessor(
event_with_multiple_keys_matching.copy(), [handler]
)
assert processor.process() == {
".cased": {
"pii": {
"friend_username": [{"begin": 0, "end": 15, "label": "username"}],
"new_username": [
{"begin": 0, "end": 13, "label": "username"},
{"begin": 23, "end": 39, "label": "username"},
],
}
},
"action": "user.create",
"actor": "some-actor",
"friend_username": "@friendusername",
"new_username": "@someusername and also @anotherusername",
}
def test_process_does_everything_needed_with_redact_configured(self):
cased.redact_before_publishing = True
handler = SensitiveDataHandler("username", username_regex)
processor = SensitiveDataProcessor(
event_with_multiple_keys_matching.copy(), [handler]
)
assert processor.process() == {
".cased": {
"pii": {
"friend_username": [{"begin": 0, "end": 15, "label": "username"}],
"new_username": [
{"begin": 0, "end": 13, "label": "username"},
{"begin": 23, "end": 39, "label": "username"},
],
}
},
"action": "user.create",
"actor": "some-actor",
"friend_username": "XXXXXXXXXXXXXXX",
"new_username": "XXXXXXXXXXXXX and also XXXXXXXXXXXXXXXX",
}
def test_no_fields_are_marked_as_sensitive_by_default(self):
assert cased.sensitive_fields == set()
def test_fields_can_be_marked_as_sensitive(self):
cased.add_sensitive_field("email")
assert cased.sensitive_fields == {"email"}
def test_sensitive_fields_can_be_emptied(self):
cased.add_sensitive_field("email")
assert cased.sensitive_fields != set()
cased.clear_sensitive_fields()
assert cased.sensitive_fields == set()
def test_sensitive_data_fields_get_marked_when_added(self):
cased.add_sensitive_field("email")
processor = SensitiveDataProcessor(event_with_email_field)
assert processor.process() == {
".cased": {"pii": {"email": [{"begin": 0, "end": 19, "label": "email"}]},},
"action": "user.create",
"actor": "some-actor",
"email": "example@example.com",
}
assert len(event_with_email_field["email"]) == 19
def test_sensitive_data_fields_get_marked_with_explicit_setting(self):
cased.clear_sensitive_fields()
cased.sensitive_fields = {"email"}
processor = SensitiveDataProcessor(event_with_email_field)
assert processor.process() == {
".cased": {"pii": {"email": [{"begin": 0, "end": 19, "label": "email"}]},},
"action": "user.create",
"actor": "some-actor",
"email": "example@example.com",
}
assert (
len(event_with_email_field["email"]) == 19
) # to confirm a match with the "end" parameter in pii
def test_multiple_sensitive_data_fields_get_marked_when_added(self):
cased.add_sensitive_field("email")
cased.add_sensitive_field("phone")
processor = SensitiveDataProcessor(event_with_email_and_phone_field)
assert processor.process() == {
".cased": {
"pii": {
"email": [{"begin": 0, "end": 19, "label": "email"}],
"phone": [{"begin": 0, "end": 12, "label": "phone"}],
}
},
"action": "user.create",
"actor": "some-actor",
"email": "example@example.com",
"phone": "555-555-5555",
}
# Helpers
def _create_match_obj(self, regex, string):
return re.match(regex, string)
| 36.50358 | 87 | 0.586597 | 1,449 | 15,295 | 5.906832 | 0.096618 | 0.045566 | 0.02734 | 0.105386 | 0.860381 | 0.814932 | 0.771001 | 0.737352 | 0.703937 | 0.619114 | 0 | 0.01721 | 0.278195 | 15,295 | 418 | 88 | 36.590909 | 0.758062 | 0.003792 | 0 | 0.622807 | 0 | 0 | 0.188197 | 0 | 0 | 0 | 0 | 0 | 0.111111 | 1 | 0.090643 | false | 0 | 0.008772 | 0.002924 | 0.105263 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
829aab6b165065a64e2c8da5d5e7795d55835615 | 162 | py | Python | Condition expressions/Boolean operators/boolean_operators.py | kislyakovm/introduction-to-python | 2b44da4eb5a4fc1cba7676db5f49b651fa130b87 | [
"MIT"
] | null | null | null | Condition expressions/Boolean operators/boolean_operators.py | kislyakovm/introduction-to-python | 2b44da4eb5a4fc1cba7676db5f49b651fa130b87 | [
"MIT"
] | null | null | null | Condition expressions/Boolean operators/boolean_operators.py | kislyakovm/introduction-to-python | 2b44da4eb5a4fc1cba7676db5f49b651fa130b87 | [
"MIT"
] | null | null | null | name = "John"
age = 17
print(name == "John" or age == 17) # Checks that either name equals to "John" OR age equals to 17
print(name == "John" and age != 23)
| 23.142857 | 100 | 0.623457 | 28 | 162 | 3.607143 | 0.464286 | 0.237624 | 0.217822 | 0.29703 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.064516 | 0.234568 | 162 | 6 | 101 | 27 | 0.75 | 0.37037 | 0 | 0 | 0 | 0 | 0.12 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.5 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 6 |
82afec1e0fb140d195d87e043b8d91bcd91c1bf3 | 49 | py | Python | market_values_api/parsers/__init__.py | johnjichaowei/market-values-api | 4b675736348fdb1d3f98ea7e1e040f6343ba9abd | [
"MIT"
] | null | null | null | market_values_api/parsers/__init__.py | johnjichaowei/market-values-api | 4b675736348fdb1d3f98ea7e1e040f6343ba9abd | [
"MIT"
] | null | null | null | market_values_api/parsers/__init__.py | johnjichaowei/market-values-api | 4b675736348fdb1d3f98ea7e1e040f6343ba9abd | [
"MIT"
] | null | null | null | from .parse_market_value import ParseMarketValue
| 24.5 | 48 | 0.897959 | 6 | 49 | 7 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.081633 | 49 | 1 | 49 | 49 | 0.933333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
7d59d2027efa0f8a4aaa10930f9757f2649ba609 | 185,874 | py | Python | webapp/tests/test_functions.py | ctavan/graphite-web | 337eacf8ec4507fea097e08ca875306b19426e84 | [
"Apache-2.0"
] | null | null | null | webapp/tests/test_functions.py | ctavan/graphite-web | 337eacf8ec4507fea097e08ca875306b19426e84 | [
"Apache-2.0"
] | null | null | null | webapp/tests/test_functions.py | ctavan/graphite-web | 337eacf8ec4507fea097e08ca875306b19426e84 | [
"Apache-2.0"
] | null | null | null | import copy
import math
import pytz
from datetime import datetime
from fnmatch import fnmatch
from mock import patch, call, MagicMock
from django.test import TestCase
from django.conf import settings
from graphite.render.datalib import TimeSeries
from graphite.render import functions
from graphite.render.functions import NormalizeEmptyResultError
def return_greater(series, value):
return [i for i in series if i is not None and i > value]
def return_less(series, value):
return [i for i in series if i is not None and i < value]
class FunctionsTest(TestCase):
#
# Test safeSum()
#
def test_safeSum_None(self):
with self.assertRaises(TypeError):
functions.safeSum(None)
def test_safeSum_empty_list(self):
self.assertEqual(functions.safeSum([]), None)
def test_safeSum_all_numbers(self):
self.assertEqual(functions.safeSum([1,2,3,4]), 10)
def test_safeSum_all_None(self):
self.assertEqual(functions.safeSum([None,None,None,None]), None)
def test_safeSum_mixed(self):
self.assertEqual(functions.safeSum([10,None,5,None]), 15)
#
# Test safeDiff()
#
def test_safeDiff_None(self):
with self.assertRaises(TypeError):
functions.safeDiff(None)
def test_safeDiff_empty_list(self):
self.assertEqual(functions.safeDiff([]), None)
def test_safeDiff_all_numbers(self):
self.assertEqual(functions.safeDiff([1,2,3,4]), -8)
def test_safeDiff_all_None(self):
self.assertEqual(functions.safeDiff([None,None,None,None]), None)
def test_safeDiff_mixed(self):
self.assertEqual(functions.safeDiff([10,None,5,None]), 5)
#
# Test safeLen()
#
def test_safeLen_None(self):
with self.assertRaises(TypeError):
functions.safeLen(None)
def test_safeLen_empty_list(self):
self.assertEqual(functions.safeLen([]), 0)
def test_safeLen_all_numbers(self):
self.assertEqual(functions.safeLen([1,2,3,4]), 4)
def test_safeLen_all_None(self):
self.assertEqual(functions.safeLen([None,None,None,None]), 0)
def test_safeLen_mixed(self):
self.assertEqual(functions.safeLen([10,None,5,None]), 2)
#
# Test safeDiv()
#
def test_safeDiv_None_None(self):
self.assertEqual(functions.safeDiv(None, None), None)
def test_safeDiv_5_None(self):
self.assertEqual(functions.safeDiv(5, None), None)
def test_safeDiv_5_0(self):
self.assertEqual(functions.safeDiv(5, 0), None)
def test_safeDiv_0_10(self):
self.assertEqual(functions.safeDiv(0,10), 0)
def test_safeDiv_10_5(self):
self.assertEqual(functions.safeDiv(10,5), 2)
#
# Test safePow()
#
def test_safePow_None_None(self):
self.assertEqual(functions.safePow(None, None), None)
def test_safePow_5_None(self):
self.assertEqual(functions.safePow(5, None), None)
def test_safePow_5_0(self):
self.assertEqual(functions.safePow(5, 0), 1.0)
def test_safePow_0_10(self):
self.assertEqual(functions.safePow(0,10), 0)
def test_safePow_10_5(self):
self.assertEqual(functions.safePow(10,5), 100000.0)
#
# Test safeMul()
#
def test_safeMul_None_None(self):
self.assertEqual(functions.safeMul(None, None), None)
def test_safeMul_5_None(self):
self.assertEqual(functions.safeMul(5, None), None)
def test_safeMul_5_0(self):
self.assertEqual(functions.safeMul(5, 0), 0.0)
def test_safeMul_0_10(self):
self.assertEqual(functions.safeMul(0,10), 0)
def test_safeMul_10_5(self):
self.assertEqual(functions.safeMul(10,5), 50.0)
#
# Test safeSubtract()
#
def test_safeSubtract_None_None(self):
self.assertEqual(functions.safeSubtract(None, None), None)
def test_safeSubtract_5_None(self):
self.assertEqual(functions.safeSubtract(5, None), None)
def test_safeSubtract_5_0(self):
self.assertEqual(functions.safeSubtract(5, 0), 5.0)
def test_safeSubtract_0_10(self):
self.assertEqual(functions.safeSubtract(0,10), -10)
def test_safeSubtract_10_5(self):
self.assertEqual(functions.safeSubtract(10,5), 5)
#
# Test safeAvg()
#
def test_safeAvg_None(self):
with self.assertRaises(TypeError):
functions.safeAvg(None)
def test_safeAvg_empty_list(self):
self.assertEqual(functions.safeAvg([]), None)
def test_safeAvg_all_numbers(self):
self.assertEqual(functions.safeAvg([1,2,3,4]), 2.5)
def test_safeAvg_all_None(self):
self.assertEqual(functions.safeAvg([None,None,None,None]), None)
def test_safeAvg_mixed(self):
self.assertEqual(functions.safeAvg([10,None,5,None]), 7.5)
#
# Test safeStdDev()
#
def test_safeStdDev_None(self):
with self.assertRaises(TypeError):
functions.safeStdDev(None)
def test_safeStdDev_empty_list(self):
self.assertEqual(functions.safeStdDev([]), None)
def test_safeStdDev_all_numbers(self):
self.assertEqual(functions.safeStdDev([1,2,3,4]), 1.118033988749895)
def test_safeStdDev_all_None(self):
self.assertEqual(functions.safeStdDev([None,None,None,None]), None)
def test_safeStdDev_mixed(self):
self.assertEqual(functions.safeStdDev([10,None,5,None]), 2.5)
#
# Test safeLast()
#
def test_safeLast_None(self):
with self.assertRaises(TypeError):
functions.safeLast(None)
def test_safeLast_empty_list(self):
self.assertEqual(functions.safeLast([]), None)
def test_safeLast_all_numbers(self):
self.assertEqual(functions.safeLast([1,2,3,4]), 4)
def test_safeLast_all_None(self):
self.assertEqual(functions.safeLast([None,None,None,None]), None)
def test_safeLast_mixed(self):
self.assertEqual(functions.safeLast([10,None,5,None]), 5)
#
# Test safeMin()
#
def test_safeMin_None(self):
with self.assertRaises(TypeError):
functions.safeMin(None)
def test_safeMin_empty_list(self):
self.assertEqual(functions.safeMin([]), None)
def test_safeMin_all_numbers(self):
self.assertEqual(functions.safeMin([1,2,3,4]), 1)
def test_safeMin_all_None(self):
self.assertEqual(functions.safeMin([None,None,None,None]), None)
def test_safeMin_mixed(self):
self.assertEqual(functions.safeMin([10,None,5,None]), 5)
#
# Test safeMax()
#
def test_safeMax_None(self):
with self.assertRaises(TypeError):
functions.safeMax(None)
def test_safeMax_empty_list(self):
self.assertEqual(functions.safeMax([]), None)
def test_safeMax_all_numbers(self):
self.assertEqual(functions.safeMax([1,2,3,4]), 4)
def test_safeMax_all_None(self):
self.assertEqual(functions.safeMax([None,None,None,None]), None)
def test_safeMax_mixed(self):
self.assertEqual(functions.safeMax([10,None,5,None]), 10)
#
# Test safeAbs()
#
def test_safeAbs_None(self):
self.assertEqual(functions.safeAbs(None), None)
def test_safeAbs_empty_list(self):
with self.assertRaises(TypeError):
functions.safeAbs([])
def test_safeAbs_pos_number(self):
self.assertEqual(functions.safeAbs(1), 1)
def test_safeAbs_neg_numbers(self):
self.assertEqual(functions.safeAbs(-1), 1)
def test_safeAbs_zero(self):
self.assertEqual(functions.safeAbs(0), 0)
#
# Test safeMap()
#
def test_safeMap_None(self):
with self.assertRaises(TypeError):
functions.safeMap(abs, None)
def test_safeMap_empty_list(self):
self.assertEqual(functions.safeMap(abs, []), None)
def test_safeMap_all_numbers(self):
self.assertEqual(functions.safeMap(abs, [1,2,3,4]), [1,2,3,4])
def test_safeMap_all_None(self):
self.assertEqual(functions.safeMap(abs, [None,None,None,None]), None)
def test_safeMap_mixed(self):
self.assertEqual(functions.safeMap(abs, [10,None,5,None]), [10,5])
#
# Test gcd()
#
def test_gcd_None_None(self):
with self.assertRaises(TypeError):
functions.gcd(None, None)
def test_gcd_5_None(self):
with self.assertRaises(TypeError):
functions.gcd(5, None)
def test_gcd_5_0(self):
self.assertEqual(functions.gcd(5, 0), 5)
def test_gcd_0_10(self):
self.assertEqual(functions.gcd(0,10), 10)
def test_gcd_10_5(self):
self.assertEqual(functions.gcd(10,5), 5)
#
# Test lcm()
#
def test_lcm_None_None(self):
self.assertEqual(functions.lcm(None, None), None)
def test_lcm_5_None(self):
with self.assertRaises(TypeError):
functions.lcm(5, None)
def test_lcm_5_0(self):
self.assertEqual(functions.lcm(5, 0), 0)
def test_lcm_0_10(self):
self.assertEqual(functions.lcm(0,10), 0)
def test_lcm_10_5(self):
self.assertEqual(functions.lcm(10,5), 10)
#
# Test normalize()
#
def test_normalize_empty(self):
with self.assertRaises(NormalizeEmptyResultError):
functions.normalize([])
def test_normalize_None_values(self):
seriesList = []
seriesList.append(TimeSeries("collectd.test-db{0}.load.value", 0, 5, 1, [None, None, None, None, None]))
self.assertEqual(functions.normalize([seriesList]), (seriesList, 0, 5, 1))
def test_normalize_generate_series_list_input(self):
seriesList = self._generate_series_list()
self.assertEqual(functions.normalize([seriesList]), (seriesList, 0, 101, 1))
#
# Test matchSeries()
#
def test_matchSeries_assert(self):
seriesList = self._generate_series_list()
with self.assertRaisesRegexp(AssertionError, 'The number of series in each argument must be the same'):
functions.matchSeries(seriesList[0], [])
def test_matchSeries_empty(self):
results=functions.matchSeries([],[])
for i, (series1, series2) in enumerate(results):
self.assertEqual(series1, [])
self.assertEqual(series2, [])
def test_matchSeries(self):
seriesList1 = [
TimeSeries('collectd.test-db3.load.value',0,1,1,[3,30,31]),
TimeSeries('collectd.test-db1.load.value',0,1,1,[1,10,11]),
TimeSeries('collectd.test-db2.load.value',0,1,1,[2,20,21]),
TimeSeries('collectd.test-db4.load.value',0,1,1,[4,40,41]),
]
seriesList2 = [
TimeSeries('collectd.test-db4.load.value',0,1,1,[4,8,12]),
TimeSeries('collectd.test-db3.load.value',0,1,1,[3,7,11]),
TimeSeries('collectd.test-db1.load.value',0,1,1,[1,5,9]),
TimeSeries('collectd.test-db2.load.value',0,1,1,[2,6,10]),
]
expectedResult = [
[
TimeSeries('collectd.test-db1.load.value',0,1,1,[1,10,11]),
TimeSeries('collectd.test-db2.load.value',0,1,1,[2,20,21]),
TimeSeries('collectd.test-db3.load.value',0,1,1,[3,30,31]),
TimeSeries('collectd.test-db4.load.value',0,1,1,[4,40,41]),
],
[
TimeSeries('collectd.test-db1.load.value',0,1,1,[1,5,9]),
TimeSeries('collectd.test-db2.load.value',0,1,1,[2,6,10]),
TimeSeries('collectd.test-db3.load.value',0,1,1,[3,7,11]),
TimeSeries('collectd.test-db4.load.value',0,1,1,[4,8,12]),
]]
results = functions.matchSeries(copy.deepcopy(seriesList1), copy.deepcopy(seriesList2))
for i, (series1, series2) in enumerate(results):
self.assertEqual(series1, expectedResult[0][i])
self.assertEqual(series2, expectedResult[1][i])
#
# Test formatPathExpressions()
#
def test_formatPathExpressions_empty_list(self):
self.assertEqual(functions.formatPathExpressions([]), '')
def test_formatPathExpressions(self):
seriesList = self._generate_series_list()
self.assertEqual(functions.formatPathExpressions(seriesList), "collectd.test-db1.load.value,collectd.test-db2.load.value,collectd.test-db3.load.value")
#
# Test sumSeries()
#
def test_sumSeries_empty(self):
self.assertEqual(functions.sumSeries({}, []), [])
def test_sumSeries(self):
seriesList = self._generate_series_list()
data = range(0,202,2)
expected_name = "sumSeries(collectd.test-db1.load.value,collectd.test-db2.load.value)"
expectedList = [TimeSeries(expected_name, 0, len(data), 1, data)]
result = functions.sumSeries({}, [seriesList[0], seriesList[1]])
self.assertEqual(result, expectedList)
def test_sumSeriesWithWildcards_empty_series_int_position(self):
self.assertEqual(functions.sumSeriesWithWildcards({}, [], 0), [])
def test_sumSeriesWithWildcards(self):
seriesList = self._generate_series_list()
data = range(0,202,2)
expected_name = "load.value"
expectedList = [TimeSeries(expected_name, 0, len(data), 1, data)]
result = functions.sumSeriesWithWildcards({}, [seriesList[0], seriesList[1]], 0,1)
self.assertEqual(result, expectedList)
def test_averageSeriesWithWildcards_empty_series_int_position(self):
self.assertEqual(functions.averageSeriesWithWildcards({}, [], 0), [])
def test_averageSeriesWithWildcards(self):
seriesList = self._generate_series_list()
data = range(0,101,1)
expected_name = "load.value"
expectedList = [TimeSeries(expected_name, 0, len(data), 1, data)]
result = functions.averageSeriesWithWildcards({}, [seriesList[0], seriesList[1]], 0,1)
self.assertEqual(result, expectedList)
def test_multiplySeriesWithWildcards(self):
seriesList1 = [
TimeSeries('web.host-1.avg-response.value',0,1,1,[1,10,11]),
TimeSeries('web.host-2.avg-response.value',0,1,1,[2,20,21]),
TimeSeries('web.host-3.avg-response.value',0,1,1,[3,30,31]),
TimeSeries('web.host-4.avg-response.value',0,1,1,[4,40,41]),
]
seriesList2 = [
TimeSeries('web.host-4.total-request.value',0,1,1,[4,8,12]),
TimeSeries('web.host-3.total-request.value',0,1,1,[3,7,11]),
TimeSeries('web.host-1.total-request.value',0,1,1,[1,5,9]),
TimeSeries('web.host-2.total-request.value',0,1,1,[2,6,10]),
]
expectedResult = [
TimeSeries('web.host-1',0,1,1,[1,50,99]),
TimeSeries('web.host-2',0,1,1,[4,120,210]),
TimeSeries('web.host-3',0,1,1,[9,210,341]),
TimeSeries('web.host-4',0,1,1,[16,320,492]),
]
results = functions.multiplySeriesWithWildcards({}, copy.deepcopy(seriesList1+seriesList2), 2,3)
self.assertEqual(results,expectedResult)
def test_diffSeries(self):
seriesList = self._generate_series_list()
data = [0] * 101
expected_name = "diffSeries(collectd.test-db1.load.value,collectd.test-db2.load.value)"
expectedList = [TimeSeries(expected_name, 0, len(data), 1, data)]
result = functions.diffSeries({}, [seriesList[0], seriesList[1]])
self.assertEqual(result, expectedList)
def test_averageSeries(self):
seriesList = self._generate_series_list()
data = range(0,101)
expected_name = "averageSeries(collectd.test-db1.load.value,collectd.test-db2.load.value)"
expectedList = [TimeSeries(expected_name, 0, len(data), 1, data)]
result = functions.averageSeries({}, [seriesList[0], seriesList[1]])
self.assertEqual(result, expectedList)
def test_stddevSeries(self):
seriesList = self._generate_series_list()
data = [0.0] * 101
expected_name = "stddevSeries(collectd.test-db1.load.value,collectd.test-db2.load.value)"
expectedList = [TimeSeries(expected_name, 0, len(data), 1, data)]
result = functions.stddevSeries({}, [seriesList[0], seriesList[1]])
self.assertEqual(result, expectedList)
def test_minSeries(self):
seriesList = self._generate_series_list()
data = range(0,101)
expected_name = "minSeries(collectd.test-db1.load.value,collectd.test-db2.load.value)"
expectedList = [TimeSeries(expected_name, 0, len(data), 1, data)]
result = functions.minSeries({}, [seriesList[0], seriesList[1]])
self.assertEqual(result, expectedList)
def test_maxSeries(self):
seriesList = self._generate_series_list()
data = range(0,101)
expected_name = "maxSeries(collectd.test-db1.load.value,collectd.test-db2.load.value)"
expectedList = [TimeSeries(expected_name, 0, len(data), 1, data)]
result = functions.maxSeries({}, [seriesList[0], seriesList[1]])
self.assertEqual(result, expectedList)
def test_rangeOfSeries(self):
seriesList = self._generate_series_list()
data = [0.0] * 101
expected_name = "rangeOfSeries(collectd.test-db1.load.value,collectd.test-db2.load.value)"
expectedList = [TimeSeries(expected_name, 0, len(data), 1, data)]
result = functions.rangeOfSeries({}, [seriesList[0], seriesList[1]])
self.assertEqual(result, expectedList)
def test_percentileOfSeries_0th_percentile(self):
with self.assertRaisesRegexp(ValueError, 'The requested percent is required to be greater than 0'):
functions.percentileOfSeries({}, [], 0)
def test_percentileOfSeries(self):
seriesList = self._generate_series_list()
data = range(0,101)
expected_name = "percentileOfSeries(collectd.test-db1.load.value,90)"
expectedList = [TimeSeries(expected_name, 0, len(data), 1, data)]
result = functions.percentileOfSeries({}, [seriesList[0], seriesList[1]], 90)
self.assertEqual(result, expectedList)
def testGetPercentile_empty_points(self):
self.assertEqual(functions._getPercentile([], 30), None)
def testGetPercentile_percentile_0(self):
seriesList = [
([None, None, 15, 20, 35, 40, 50], 15),
(range(100), 0),
(range(200), 0),
(range(300), 0),
(range(1, 101), 1),
(range(1, 201), 1),
(range(1, 301), 1),
(range(0, 102), 0),
(range(1, 203), 1),
(range(1, 303), 1),
]
for index, conf in enumerate(seriesList):
series, expected = conf
result = functions._getPercentile(series, 0, True)
self.assertEqual(expected, result, 'For series index <%s> the 0th percentile ordinal is not %d, but %d ' % (index, expected, result))
def testGetPercentile_interpolated(self):
seriesList = [
([None, None, 15, 20, 35, 40, 50], 19.0),
(range(100), 29.3),
(range(200), 59.3),
(range(300), 89.3),
(range(1, 101), 30.3),
(range(1, 201), 60.3),
(range(1, 301), 90.3),
(range(0, 102), 29.9),
(range(1, 203), 60.9),
(range(1, 303), 90.9),
]
for index, conf in enumerate(seriesList):
series, expected = conf
result = functions._getPercentile(series, 30, True)
self.assertAlmostEqual(expected, result, 4, 'For series index <%s> the 30th percentile ordinal is not %g, but %g' % (index, expected, result))
def testGetPercentile(self):
seriesList = [
([None, None, 15, 20, 35, 40, 50], 20),
(range(100), 30),
(range(200), 60),
(range(300), 90),
(range(1, 101), 31),
(range(1, 201), 61),
(range(1, 301), 91),
(range(0, 102), 30),
(range(1, 203), 61),
(range(1, 303), 91),
]
for index, conf in enumerate(seriesList):
series, expected = conf
result = functions._getPercentile(series, 30)
self.assertEqual(expected, result, 'For series index <%s> the 30th percentile ordinal is not %d, but %d ' % (index, expected, result))
def test_keepLastValue(self):
seriesList = [
TimeSeries('collectd.test-db1.load.value',0,1,1,[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20]),
TimeSeries('collectd.test-db2.load.value',0,1,1,[None,2,None,4,None,6,None,8,None,10,None,12,None,14,None,16,None,18,None,20]),
TimeSeries('collectd.test-db3.load.value',0,1,1,[1,2,None,None,None,6,7,8,9,10,11,12,13,14,15,16,17,None,None,None]),
TimeSeries('collectd.test-db4.load.value',0,1,1,[1,2,3,4,None,6,None,None,9,10,11,None,13,None,None,None,None,18,19,20]),
TimeSeries('collectd.test-db5.load.value',0,1,1,[1,2,None,None,None,6,7,8,9,10,11,12,13,14,15,16,17,18,None,None]),
]
expectedResult = [
TimeSeries('keepLastValue(collectd.test-db1.load.value)',0,1,1,[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20]),
TimeSeries('keepLastValue(collectd.test-db2.load.value)',0,1,1,[None,2,2,4,4,6,6,8,8,10,10,12,12,14,14,16,16,18,18,20]),
TimeSeries('keepLastValue(collectd.test-db3.load.value)',0,1,1,[1,2,None,None,None,6,7,8,9,10,11,12,13,14,15,16,17,None,None,None]),
TimeSeries('keepLastValue(collectd.test-db4.load.value)',0,1,1,[1,2,3,4,4,6,6,6,9,10,11,11,13,None,None,None,None,18,19,20]),
TimeSeries('keepLastValue(collectd.test-db5.load.value)',0,1,1,[1,2,None,None,None,6,7,8,9,10,11,12,13,14,15,16,17,18,18,18]),
]
results = functions.keepLastValue({}, seriesList, 2)
self.assertEqual(results, expectedResult)
def test_interpolate(self):
seriesList = [
TimeSeries('collectd.test-db1.load.value',0,1,1,[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20]),
TimeSeries('collectd.test-db2.load.value',0,1,1,[None,2,None,4,None,6,None,8,None,10,None,12,None,14,None,16,None,18,None,20]),
TimeSeries('collectd.test-db3.load.value',0,1,1,[1,2,None,None,None,6,7,8,9,10,11,12,13,14,15,16,17,None,None,None]),
TimeSeries('collectd.test-db4.load.value',0,1,1,[1,2,3,4,None,6,None,None,9,10,11,None,13,None,None,None,None,18,19,20]),
TimeSeries('collectd.test-db5.load.value',0,1,1,[1,2,None,None,None,6,7,8,9,10,11,12,13,14,15,16,17,18,None,None]),
]
expectedResult = [
TimeSeries('interpolate(collectd.test-db1.load.value)',0,1,1,[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20]),
TimeSeries('interpolate(collectd.test-db2.load.value)',0,1,1,[None,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20]),
TimeSeries('interpolate(collectd.test-db3.load.value)',0,1,1,[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,None,None,None]),
TimeSeries('interpolate(collectd.test-db4.load.value)',0,1,1,[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20]),
TimeSeries('interpolate(collectd.test-db5.load.value)',0,1,1,[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,None,None]),
]
results = functions.interpolate({}, seriesList)
self.assertEqual(results, expectedResult)
def test_changed(self):
config = [
[[1,2,3,4,4,5,5,5,6,7], [0,1,1,1,0,1,0,0,1,1]],
[[None,None,None,None,0,0,0,None,None,1], [0,0,0,0,0,0,0,0,0,1]]
]
for i, c in enumerate(config):
name = "collectd.test-db{0}.load.value".format(i + 1)
series = [TimeSeries(name,0,1,1,c[0])]
expected = [TimeSeries("changed(%s)" % name,0,1,1,c[1])]
result = functions.changed({}, series)
self.assertEqual(result, expected)
def test_delay(self):
source = [
TimeSeries('collectd.test-db1.load.value',0,1,1,[range(18)] + [None, None]),
]
delay = 2
expectedList = [
TimeSeries('delay(collectd.test-db1.load.value,2)',0,1,1,[None, None] + [range(18)]),
]
gotList = functions.delay({}, source, delay)
self.assertEqual(len(gotList), len(expectedList))
for got, expected in zip(gotList, expectedList):
self.assertListEqual(got, expected)
def test_asPercent_error(self):
seriesList = [
TimeSeries('collectd.test-db1.load.value',0,1,1,[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20]),
TimeSeries('collectd.test-db2.load.value',0,1,1,[None,2,None,4,None,6,None,8,None,10,None,12,None,14,None,16,None,18,None,20]),
TimeSeries('collectd.test-db3.load.value',0,1,1,[1,2,None,None,None,6,7,8,9,10,11,12,13,14,15,16,17,None,None,None]),
TimeSeries('collectd.test-db4.load.value',0,1,1,[1,2,3,4,None,6,None,None,9,10,11,None,13,None,None,None,None,18,19,20]),
TimeSeries('collectd.test-db5.load.value',0,1,1,[1,2,None,None,None,6,7,8,9,10,11,12,13,14,15,16,17,18,None,None]),
]
seriesList2 = [
TimeSeries('collectd.test-db1.load.value',0,1,1,[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20]),
TimeSeries('collectd.test-db1.load.value',0,1,1,[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20]),
]
with self.assertRaisesRegexp(ValueError, "asPercent second argument must be missing, a single digit, reference exactly 1 series or reference the same number of series as the first argument"):
functions.asPercent({}, seriesList, seriesList2)
def test_asPercent_no_seriesList2(self):
seriesList = [
TimeSeries('collectd.test-db1.load.value',0,1,1,[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20]),
TimeSeries('collectd.test-db2.load.value',0,1,1,[None,2,None,4,None,6,None,8,None,10,None,12,None,14,None,16,None,18,None,20]),
TimeSeries('collectd.test-db3.load.value',0,1,1,[1,2,None,None,None,6,7,8,9,10,11,12,13,14,15,16,17,None,None,None]),
TimeSeries('collectd.test-db4.load.value',0,1,1,[1,2,3,4,None,6,None,None,9,10,11,None,13,None,None,None,None,18,19,20]),
TimeSeries('collectd.test-db5.load.value',0,1,1,[1,2,None,None,None,6,7,8,9,10,11,12,13,14,15,16,17,18,None,None]),
]
for series in seriesList:
series.pathExpression = series.name
expectedResult = [
TimeSeries('asPercent(collectd.test-db1.load.value,sumSeries(collectd.test-db1.load.value,collectd.test-db2.load.value,collectd.test-db3.load.value,collectd.test-db4.load.value,collectd.test-db5.load.value))',0,1,1,[25.0, 20.0, 50.0, 33.33, 100.0, 20.0, 33.33, 25.0, 25.0, 20.0, 25.0, 25.0, 25.0, 25.0, 33.33, 25.0, 33.33, 25.0, 50.0, 33.33]),
TimeSeries('asPercent(collectd.test-db2.load.value,sumSeries(collectd.test-db1.load.value,collectd.test-db2.load.value,collectd.test-db3.load.value,collectd.test-db4.load.value,collectd.test-db5.load.value))',0,1,1,[None, 20.0, None, 33.33, None, 20.0, None, 25.0, None, 20.0, None, 25.0, None, 25.0, None, 25.0, None, 25.0, None, 33.33]),
TimeSeries('asPercent(collectd.test-db3.load.value,sumSeries(collectd.test-db1.load.value,collectd.test-db2.load.value,collectd.test-db3.load.value,collectd.test-db4.load.value,collectd.test-db5.load.value))',0,1,1,[25.0, 20.0, None, None, None, 20.0, 33.33, 25.0, 25.0, 20.0, 25.0, 25.0, 25.0, 25.0, 33.33, 25.0, 33.33, None, None, None]),
TimeSeries('asPercent(collectd.test-db4.load.value,sumSeries(collectd.test-db1.load.value,collectd.test-db2.load.value,collectd.test-db3.load.value,collectd.test-db4.load.value,collectd.test-db5.load.value))',0,1,1,[25.0, 20.0, 50.0, 33.33, None, 20.0, None, None, 25.0, 20.0, 25.0, None, 25.0, None, None, None, None, 25.0, 50.0, 33.33]),
TimeSeries('asPercent(collectd.test-db5.load.value,sumSeries(collectd.test-db1.load.value,collectd.test-db2.load.value,collectd.test-db3.load.value,collectd.test-db4.load.value,collectd.test-db5.load.value))',0,1,1,[25.0, 20.0, None, None, None, 20.0, 33.33, 25.0, 25.0, 20.0, 25.0, 25.0, 25.0, 25.0, 33.33, 25.0, 33.33, 25.0, None, None]),
]
result = functions.asPercent({}, seriesList)
for i, series in enumerate(result):
for k, v in enumerate(series):
if type(v) is float:
series[k] = round(v,2)
self.assertEqual(result, expectedResult)
def test_asPercent_integer(self):
seriesList = [
TimeSeries('collectd.test-db1.load.value',0,1,1,[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20]),
TimeSeries('collectd.test-db2.load.value',0,1,1,[None,2,None,4,None,6,None,8,None,10,None,12,None,14,None,16,None,18,None,20]),
TimeSeries('collectd.test-db3.load.value',0,1,1,[1,2,None,None,None,6,7,8,9,10,11,12,13,14,15,16,17,None,None,None]),
TimeSeries('collectd.test-db4.load.value',0,1,1,[1,2,3,4,None,6,None,None,9,10,11,None,13,None,None,None,None,18,19,20]),
TimeSeries('collectd.test-db5.load.value',0,1,1,[1,2,None,None,None,6,7,8,9,10,11,12,13,14,15,16,17,18,None,None]),
]
expectedResult = [
TimeSeries('asPercent(collectd.test-db1.load.value,10)',0,1,1,[10.0, 20.0, 30.0, 40.0, 50.0, 60.0, 70.0, 80.0, 90.0, 100.0, 110.0, 120.0, 130.0, 140.0, 150.0, 160.0, 170.0, 180.0, 190.0, 200.0]),
TimeSeries('asPercent(collectd.test-db2.load.value,10)',0,1,1,[None, 20.0, None, 40.0, None, 60.0, None, 80.0, None, 100.0, None, 120.0, None, 140.0, None, 160.0, None, 180.0, None, 200.0]),
TimeSeries('asPercent(collectd.test-db3.load.value,10)',0,1,1,[10.0, 20.0, None, None, None, 60.0, 70.0, 80.0, 90.0, 100.0, 110.0, 120.0, 130.0, 140.0, 150.0, 160.0, 170.0, None, None, None]),
TimeSeries('asPercent(collectd.test-db4.load.value,10)',0,1,1,[10.0, 20.0, 30.0, 40.0, None, 60.0, None, None, 90.0, 100.0, 110.0, None, 130.0, None, None, None, None, 180.0, 190.0, 200.0]),
TimeSeries('asPercent(collectd.test-db5.load.value,10)',0,1,1,[10.0, 20.0, None, None, None, 60.0, 70.0, 80.0, 90.0, 100.0, 110.0, 120.0, 130.0, 140.0, 150.0, 160.0, 170.0, 180.0, None, None])
]
result = functions.asPercent({}, seriesList, 10)
for i, series in enumerate(result):
for k, v in enumerate(series):
if type(v) is float:
series[k] = round(v,2)
self.assertEqual(result, expectedResult)
def test_asPercent_seriesList2_single(self):
seriesList = [
TimeSeries('collectd.test-db1.load.value',0,1,1,[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20]),
TimeSeries('collectd.test-db2.load.value',0,1,1,[None,2,None,4,None,6,None,8,None,10,None,12,None,14,None,16,None,18,None,20]),
TimeSeries('collectd.test-db3.load.value',0,1,1,[1,2,None,None,None,6,7,8,9,10,11,12,13,14,15,16,17,None,None,None]),
TimeSeries('collectd.test-db4.load.value',0,1,1,[1,2,3,4,None,6,None,None,9,10,11,None,13,None,None,None,None,18,19,20]),
TimeSeries('collectd.test-db5.load.value',0,1,1,[1,2,None,None,None,6,7,8,9,10,11,12,13,14,15,16,17,18,None,None]),
]
seriesList2 = [
TimeSeries('collectd.test-db1.load.value',0,1,1,[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20]),
]
expectedResult = [
TimeSeries('asPercent(collectd.test-db1.load.value,collectd.test-db1.load.value)',0,1,1,[100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0]),
TimeSeries('asPercent(collectd.test-db2.load.value,collectd.test-db1.load.value)',0,1,1,[None, 100.0, None, 100.0, None, 100.0, None, 100.0, None, 100.0, None, 100.0, None, 100.0, None, 100.0, None, 100.0, None, 100.0]),
TimeSeries('asPercent(collectd.test-db3.load.value,collectd.test-db1.load.value)',0,1,1,[100.0, 100.0, None, None, None, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, None, None, None]),
TimeSeries('asPercent(collectd.test-db4.load.value,collectd.test-db1.load.value)',0,1,1,[100.0, 100.0, 100.0, 100.0, None, 100.0, None, None, 100.0, 100.0, 100.0, None, 100.0, None, None, None, None, 100.0, 100.0, 100.0]),
TimeSeries('asPercent(collectd.test-db5.load.value,collectd.test-db1.load.value)',0,1,1,[100.0, 100.0, None, None, None, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, None, None])
]
result = functions.asPercent({}, seriesList, seriesList2)
for i, series in enumerate(result):
for k, v in enumerate(series):
if type(v) is float:
series[k] = round(v,2)
self.assertEqual(result, expectedResult)
def test_asPercent_seriesList2_multi(self):
seriesList = [
TimeSeries('collectd.test-db1.load.value',0,1,1,[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20]),
TimeSeries('collectd.test-db2.load.value',0,1,1,[None,2,None,4,None,6,None,8,None,10,None,12,None,14,None,16,None,18,None,20]),
TimeSeries('collectd.test-db3.load.value',0,1,1,[1,2,None,None,None,6,7,8,9,10,11,12,13,14,15,16,17,None,None,None]),
TimeSeries('collectd.test-db4.load.value',0,1,1,[1,2,3,4,None,6,None,None,9,10,11,None,13,None,None,None,None,18,19,20]),
TimeSeries('collectd.test-db5.load.value',0,1,1,[1,2,None,None,None,6,7,8,9,10,11,12,13,14,15,16,17,18,None,None]),
]
seriesList2 = [
TimeSeries('collectd.test-db1.load.value',0,1,1,[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20]),
TimeSeries('collectd.test-db2.load.value',0,1,1,[None,2,None,4,None,6,None,8,None,10,None,12,None,14,None,16,None,18,None,20]),
TimeSeries('collectd.test-db3.load.value',0,1,1,[1,2,None,None,None,6,7,8,9,10,11,12,13,14,15,16,17,None,None,None]),
TimeSeries('collectd.test-db4.load.value',0,1,1,[1,2,3,4,None,6,None,None,9,10,11,None,13,None,None,None,None,18,19,20]),
TimeSeries('collectd.test-db5.load.value',0,1,1,[1,2,None,None,None,6,7,8,9,10,11,12,13,14,15,16,17,18,None,None]),
]
expectedResult = [
TimeSeries('asPercent(collectd.test-db1.load.value,collectd.test-db1.load.value)',0,1,1,[100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0]),
TimeSeries('asPercent(collectd.test-db2.load.value,collectd.test-db2.load.value)',0,1,1,[None, 100.0, None, 100.0, None, 100.0, None, 100.0, None, 100.0, None, 100.0, None, 100.0, None, 100.0, None, 100.0, None, 100.0]),
TimeSeries('asPercent(collectd.test-db3.load.value,collectd.test-db3.load.value)',0,1,1,[100.0, 100.0, None, None, None, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, None, None, None]),
TimeSeries('asPercent(collectd.test-db4.load.value,collectd.test-db4.load.value)',0,1,1,[100.0, 100.0, 100.0, 100.0, None, 100.0, None, None, 100.0, 100.0, 100.0, None, 100.0, None, None, None, None, 100.0, 100.0, 100.0]),
TimeSeries('asPercent(collectd.test-db5.load.value,collectd.test-db5.load.value)',0,1,1,[100.0, 100.0, None, None, None, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, None, None])
]
result = functions.asPercent({}, seriesList, seriesList2)
for i, series in enumerate(result):
for k, v in enumerate(series):
if type(v) is float:
series[k] = round(v,2)
self.assertEqual(result, expectedResult)
def test_divideSeries_error(self):
seriesList = [
TimeSeries('collectd.test-db1.load.value',0,1,1,[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20]),
TimeSeries('collectd.test-db2.load.value',0,1,1,[None,2,None,4,None,6,None,8,None,10,None,12,None,14,None,16,None,18,None,20]),
TimeSeries('collectd.test-db3.load.value',0,1,1,[1,2,None,None,None,6,7,8,9,10,11,12,13,14,15,16,17,None,None,None]),
TimeSeries('collectd.test-db4.load.value',0,1,1,[1,2,3,4,None,6,None,None,9,10,11,None,13,None,None,None,None,18,19,20]),
TimeSeries('collectd.test-db5.load.value',0,1,1,[1,2,None,None,None,6,7,8,9,10,11,12,13,14,15,16,17,18,None,None]),
]
seriesList2 = [
TimeSeries('collectd.test-db1.load.value',0,1,1,[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20]),
TimeSeries('collectd.test-db1.load.value',0,1,1,[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20]),
]
with self.assertRaisesRegexp(ValueError, "divideSeries second argument must reference exactly 1 series \(got 2\)"):
functions.divideSeries({}, seriesList, seriesList2)
def test_divideSeries_seriesList2_single(self):
seriesList = [
TimeSeries('collectd.test-db1.load.value',0,1,1,[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20]),
TimeSeries('collectd.test-db2.load.value',0,1,1,[None,2,None,4,None,6,None,8,None,10,None,12,None,14,None,16,None,18,None,20]),
TimeSeries('collectd.test-db3.load.value',0,1,1,[1,2,None,None,None,6,7,8,9,10,11,12,13,14,15,16,17,None,None,None]),
TimeSeries('collectd.test-db4.load.value',0,1,1,[1,2,3,4,None,6,None,None,9,10,11,None,13,None,None,None,None,18,19,20]),
TimeSeries('collectd.test-db5.load.value',0,1,1,[1,2,None,None,None,6,7,8,9,10,11,12,13,14,15,16,17,18,None,None]),
]
seriesList2 = [
TimeSeries('collectd.test-db1.load.value',0,1,1,[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20]),
]
expectedResult = [
TimeSeries('divideSeries(collectd.test-db1.load.value,collectd.test-db1.load.value)',0,1,1,[1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0]),
TimeSeries('divideSeries(collectd.test-db2.load.value,collectd.test-db1.load.value)',0,1,1,[None, 1.0, None, 1.0, None, 1.0, None, 1.0, None, 1.0, None, 1.0, None, 1.0, None, 1.0, None, 1.0, None, 1.0]),
TimeSeries('divideSeries(collectd.test-db3.load.value,collectd.test-db1.load.value)',0,1,1,[1.0, 1.0, None, None, None, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, None, None, None]),
TimeSeries('divideSeries(collectd.test-db4.load.value,collectd.test-db1.load.value)',0,1,1,[1.0, 1.0, 1.0, 1.0, None, 1.0, None, None, 1.0, 1.0, 1.0, None, 1.0, None, None, None, None, 1.0, 1.0, 1.0]),
TimeSeries('divideSeries(collectd.test-db5.load.value,collectd.test-db1.load.value)',0,1,1,[1.0, 1.0, None, None, None, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, None, None])
]
result = functions.divideSeries({}, seriesList, seriesList2)
for i, series in enumerate(result):
for k, v in enumerate(series):
if type(v) is float:
series[k] = round(v,2)
self.assertEqual(result, expectedResult)
def test_multiplySeries_single(self):
seriesList = [
TimeSeries('collectd.test-db1.load.value',0,1,1,[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20]),
]
self.assertEqual(functions.multiplySeries({}, seriesList), seriesList)
def test_multiplySeries(self):
seriesList = [
TimeSeries('collectd.test-db1.load.value',0,1,1,[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20]),
TimeSeries('collectd.test-db2.load.value',0,1,1,[None,2,None,4,None,6,None,8,None,10,None,12,None,14,None,16,None,18,None,20]),
TimeSeries('collectd.test-db3.load.value',0,1,1,[1,2,None,None,None,6,7,8,9,10,11,12,13,14,15,16,17,None,None,None]),
TimeSeries('collectd.test-db4.load.value',0,1,1,[1,2,3,4,None,6,None,None,9,10,11,None,13,None,None,None,None,18,19,20]),
TimeSeries('collectd.test-db5.load.value',0,1,1,[1,2,None,None,None,6,7,8,9,10,11,12,13,14,15,16,17,18,None,None]),
]
expectedResult = [
TimeSeries('multiplySeries(collectd.test-db1.load.value,collectd.test-db2.load.value,collectd.test-db3.load.value,collectd.test-db4.load.value,collectd.test-db5.load.value)',0,1,1,[None, 32.0, None, None, None, 7776.0, None, None, None, 100000.0, None, None, None, None, None, None, None, None, None, None]),
]
result = functions.multiplySeries({}, seriesList)
self.assertEqual(result, expectedResult)
def _verify_series_consolidationFunc(self, seriesList, value):
"""
Verify the consolidationFunc is set to the specified value
"""
for series in seriesList:
self.assertEqual(series.consolidationFunc, value)
def test_cumulative(self):
seriesList = self._generate_series_list()
self._verify_series_consolidationFunc(seriesList, "average")
results = functions.cumulative({}, seriesList)
self._verify_series_consolidationFunc(results, "sum")
def test_consolidateBy(self):
seriesList = self._generate_series_list()
self._verify_series_consolidationFunc(seriesList, "average")
avail_funcs = ['sum', 'average', 'min', 'max']
for func in avail_funcs:
results = functions.consolidateBy({}, seriesList, func)
self._verify_series_consolidationFunc(results, func)
def test_weightedAverage(self):
seriesList = [
TimeSeries('collectd.test-db1.load.value',0,1,1,[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20]),
TimeSeries('collectd.test-db2.load.value',0,1,1,[None,2,None,4,None,6,None,8,None,10,None,12,None,14,None,16,None,18,None,20]),
TimeSeries('collectd.test-db3.load.value',0,1,1,[1,2,None,None,None,6,7,8,9,10,11,12,13,14,15,16,17,None,None,None]),
TimeSeries('collectd.test-db4.load.value',0,1,1,[1,2,3,4,None,6,None,None,9,10,11,None,13,None,None,None,None,18,19,20]),
TimeSeries('collectd.test-db5.load.value',0,1,1,[1,2,None,None,None,6,7,8,9,10,11,12,13,14,15,16,17,18,None,None]),
]
for series in seriesList:
series.pathExpression = series.name
seriesList2 = [
TimeSeries('collectd.test-db1.load.value',0,1,1,[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20]),
TimeSeries('collectd.test-db2.load.value',0,1,1,[None,2,None,4,None,6,None,8,None,10,None,12,None,14,None,16,None,18,None,20]),
TimeSeries('collectd.test-db3.load.value',0,1,1,[1,2,None,None,None,6,7,8,9,10,11,12,13,14,15,16,17,None,None,None]),
TimeSeries('collectd.test-db4.load.value',0,1,1,[1,2,3,4,None,6,None,None,9,10,11,None,13,None,None,None,None,18,19,20]),
TimeSeries('collectd.test-db5.load.value',0,1,1,[1,2,None,None,None,6,7,8,9,10,11,12,13,14,15,16,17,18,None,None]),
]
for series in seriesList2:
series.pathExpression = series.name
expectedResult = [
TimeSeries('weightedAverage(collectd.test-db1.load.value,collectd.test-db2.load.value,collectd.test-db3.load.value,collectd.test-db4.load.value,collectd.test-db5.load.value, collectd.test-db1.load.value,collectd.test-db2.load.value,collectd.test-db3.load.value,collectd.test-db4.load.value,collectd.test-db5.load.value, 1)',0,1,1,[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20]),
]
result = functions.weightedAverage({}, seriesList, seriesList2, 1)
self.assertEqual(result, expectedResult)
def test_weightedAverage_mismatched_series(self):
seriesList = [
TimeSeries('collectd.test-db1.load.value',0,1,1,[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20]),
TimeSeries('collectd.test-db2.load.value',0,1,1,[None,2,None,4,None,6,None,8,None,10,None,12,None,14,None,16,None,18,None,20]),
TimeSeries('collectd.test-db3.load.value',0,1,1,[1,2,None,None,None,6,7,8,9,10,11,12,13,14,15,16,17,None,None,None]),
TimeSeries('collectd.test-db5.load.value',0,1,1,[1,2,None,None,None,6,7,8,9,10,11,12,13,14,15,16,17,18,None,None]),
]
for series in seriesList:
series.pathExpression = series.name
seriesList2 = [
TimeSeries('collectd.test-db1.load.value',0,1,1,[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20]),
TimeSeries('collectd.test-db3.load.value',0,1,1,[1,2,None,None,None,6,7,8,9,10,11,12,13,14,15,16,17,None,None,None]),
TimeSeries('collectd.test-db4.load.value',0,1,1,[1,2,3,4,None,6,None,None,9,10,11,None,13,None,None,None,None,18,19,20]),
TimeSeries('collectd.test-db5.load.value',0,1,1,[1,2,None,None,None,6,7,8,9,10,11,12,13,14,15,16,17,18,None,None]),
]
for series in seriesList2:
series.pathExpression = series.name
expectedResult = [
TimeSeries('weightedAverage(collectd.test-db1.load.value,collectd.test-db2.load.value,collectd.test-db3.load.value,collectd.test-db5.load.value, collectd.test-db1.load.value,collectd.test-db3.load.value,collectd.test-db4.load.value,collectd.test-db5.load.value, 1)',0,1,1,[0.75,1.5,1.5,2.0,5.0,4.5,7.0,8.0,6.75,7.5,8.25,12.0,9.75,14.0,15.0,16.0,17.0,12.0,9.5,10.0]),
]
result = functions.weightedAverage({}, seriesList, seriesList2, 1)
self.assertEqual(result, expectedResult)
def test_scaleToSeconds(self):
seriesList = [
TimeSeries('collectd.test-db1.load.value',0,600,60,[1,2,3,4,5,6,7,8,9,10]),
TimeSeries('collectd.test-db2.load.value',0,600,60,[None,2,None,4,None,6,None,8,None,10]),
TimeSeries('collectd.test-db3.load.value',0,600,60,[1,2,None,None,None,6,7,8,9,10]),
TimeSeries('collectd.test-db4.load.value',0,600,60,[1,2,3,4,5,6,7,8,9,None]),
]
for series in seriesList:
series.pathExpression = series.name
expectedResult = [
TimeSeries('scaleToSeconds(collectd.test-db1.load.value,30)',0,600,60,[0.5,1.0,1.5,2.0,2.5,3.0,3.5,4.0,4.5,5.0]),
TimeSeries('scaleToSeconds(collectd.test-db2.load.value,30)',0,600,60,[None,1.0,None,2.0,None,3.0,None,4.0,None,5.0]),
TimeSeries('scaleToSeconds(collectd.test-db3.load.value,30)',0,600,60,[0.5,1.0,None,None,None,3.0,3.5,4.0,4.5,5.0]),
TimeSeries('scaleToSeconds(collectd.test-db4.load.value,30)',0,600,60,[0.5,1.0,1.5,2.0,2.5,3.0,3.5,4.0,4.5,None]),
]
result = functions.scaleToSeconds({}, seriesList, 30)
self.assertEqual(result, expectedResult)
def test_absolute(self):
seriesList = [
TimeSeries('collectd.test-db1.load.value',0,21,1,[-10,-9,-8,-7,None,-5,-4,-3,-2,-1,0,1,2,3,4,5,6,7,8,9,10]),
]
expected = [
TimeSeries('absolute(collectd.test-db1.load.value)',0,21,1,[10,9,8,7,None,5,4,3,2,1,0,1,2,3,4,5,6,7,8,9,10]),
]
self.assertEqual(functions.absolute({}, seriesList), expected)
def test_offset(self):
seriesList = [
TimeSeries('collectd.test-db1.load.value',0,21,1,[-10,-9,-8,-7,None,-5,-4,-3,-2,-1,0,1,2,3,4,5,6,7,8,9,10]),
]
expected = [
TimeSeries('offset(collectd.test-db1.load.value,10)',0,21,1,[0,1,2,3,None,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20]),
]
self.assertEqual(functions.offset({}, seriesList, 10), expected)
def test_offsetToZero(self):
seriesList = [
TimeSeries('collectd.test-db1.load.value',0,21,1,[-10,-9,-8,-7,None,-5,-4,-3,-2,-1,0,1,2,3,4,5,6,7,8,9,10]),
]
expected = [
TimeSeries('offsetToZero(collectd.test-db1.load.value)',0,21,1,[0,1,2,3,None,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20]),
]
self.assertEqual(functions.offsetToZero({}, seriesList), expected)
def test_derivative(self):
seriesList = [TimeSeries('test', 0, 600, 60, [None, 1, 2, 3, 4, 5, None, 6, 7, 8])]
expected = [TimeSeries('derivative(test)', 0, 600, 60, [None, None, 1, 1, 1, 1, None, None, 1, 1])]
result = functions.derivative({}, seriesList)
self.assertEqual(expected, result, 'derivative result incorrect')
def test_nonNegativeDerivative(self):
seriesList = [TimeSeries('test', 0, 600, 60, [None, 1, 2, 3, 4, 5, None, 3, 2, 1])]
expected = [TimeSeries('nonNegativeDerivative(test)', 0, 600, 60, [None, None, 1, 1, 1, 1, None, None, None, None])]
result = functions.nonNegativeDerivative({}, seriesList)
self.assertEqual(expected, result, 'nonNegativeDerivative result incorrect')
def test_nonNegativeDerivative_max(self):
seriesList = [TimeSeries('test', 0, 600, 60, [0, 1, 2, 3, 4, 5, 0, 1, 2, 3])]
expected = [TimeSeries('nonNegativeDerivative(test)', 0, 600, 60, [None, 1, 1, 1, 1, 1, 1, 1, 1, 1])]
result = functions.nonNegativeDerivative({}, seriesList,5)
self.assertEqual(expected, result, 'nonNegativeDerivative result incorrect')
def test_perSecond(self):
seriesList = [TimeSeries('test', 0, 600, 60, [0, 120, 240, 480, 960, 1920, 3840, 7680, 15360, 30720])]
expected = [TimeSeries('perSecond(test)', 0, 600, 60, [None, 2, 2, 4, 8, 16, 32, 64, 128, 256])]
result = functions.perSecond({}, seriesList)
self.assertEqual(expected, result, 'perSecond result incorrect')
def test_perSecond_nones(self):
seriesList = [TimeSeries('test', 0, 600, 60, [0, 60, None, 180, None, 300, None, 420, None, 540])]
expected = [TimeSeries('perSecond(test)', 0, 600, 60, [None, 1, None, 1, None, 1, None, 1, None, 1])]
result = functions.perSecond({}, seriesList)
self.assertEqual(expected, result, 'perSecond result incorrect')
def test_perSecond_max(self):
seriesList = [TimeSeries('test', 0, 600, 60, [0, 120, 240, 480, 960, 900, 120, 240, 120, 0])]
expected = [TimeSeries('perSecond(test)', 0, 600, 60, [None, 2, 2, 4, 8, None, -5, 2, 6, 6])]
result = functions.perSecond({}, seriesList, 480)
self.assertEqual(expected, result, 'perSecond result incorrect')
def test_integral(self):
seriesList = [TimeSeries('test', 0, 600, 60, [None, 1, 2, 3, 4, 5, None, 6, 7, 8])]
expected = [TimeSeries('integral(test)', 0, 600, 60, [None, 1, 3, 6, 10, 15, None, 21, 28, 36])]
result = functions.integral({}, seriesList)
self.assertEqual(expected, result, 'integral result incorrect')
def test_integralByInterval(self):
seriesList = [TimeSeries('test', 0, 600, 60, [None, 1, 2, 3, 4, 5, None, 6, 7, 8])]
expected = [TimeSeries("integralByInterval(test,'2min')", 0, 600, 60, [0, 1, 2, 5, 4, 9, 0, 6, 7, 15])]
result = functions.integralByInterval({'startTime' : datetime(1970,1,1)}, seriesList, '2min')
self.assertEqual(expected, result, 'integralByInterval result incorrect %s %s' %(result, expected))
def test_stacked(self):
seriesList = [
TimeSeries('collectd.test-db1.load.value',0,600,60,[1,2,3,4,5,6,7,8,9,10]),
TimeSeries('collectd.test-db2.load.value',0,600,60,[None,2,None,4,None,6,None,8,None,10]),
TimeSeries('collectd.test-db3.load.value',0,600,60,[1,2,None,None,None,6,7,8,9,10]),
TimeSeries('collectd.test-db4.load.value',0,600,60,[1,2,3,4,5,6,7,8,9,None]),
]
for series in seriesList:
series.pathExpression = series.name
expectedResult = [
TimeSeries('stacked(collectd.test-db1.load.value)',0,600,60,[1,2,3,4,5,6,7,8,9,10]),
TimeSeries('stacked(collectd.test-db2.load.value)',0,600,60,[None,4,None,8,None,12,None,16,None,20]),
TimeSeries('stacked(collectd.test-db3.load.value)',0,600,60,[2,6,None,None,None,18,14,24,18,30]),
TimeSeries('stacked(collectd.test-db4.load.value)',0,600,60,[3,8,6,12,10,24,21,32,27,None]),
]
for series in expectedResult:
series.options = {'stacked': True}
request_context = {}
result = functions.stacked(request_context, seriesList)
self.assertEqual(result, expectedResult)
self.assertEqual(request_context, {'totalStack': {'__DEFAULT__': [3,8,6,12,10,24,21,32,27,30]}})
def test_stacked_with_name(self):
seriesList = [
TimeSeries('collectd.test-db1.load.value',0,600,60,[1,2,3,4,5,6,7,8,9,10]),
TimeSeries('collectd.test-db2.load.value',0,600,60,[None,2,None,4,None,6,None,8,None,10]),
TimeSeries('collectd.test-db3.load.value',0,600,60,[1,2,None,None,None,6,7,8,9,10]),
TimeSeries('collectd.test-db4.load.value',0,600,60,[1,2,3,4,5,6,7,8,9,None]),
]
for series in seriesList:
series.pathExpression = series.name
expectedResult = [
TimeSeries('collectd.test-db1.load.value',0,600,60,[1,2,3,4,5,6,7,8,9,10]),
TimeSeries('collectd.test-db2.load.value',0,600,60,[None,4,None,8,None,12,None,16,None,20]),
TimeSeries('collectd.test-db3.load.value',0,600,60,[2,6,None,None,None,18,14,24,18,30]),
TimeSeries('collectd.test-db4.load.value',0,600,60,[3,8,6,12,10,24,21,32,27,None]),
]
for series in expectedResult:
series.options = {'stacked': True}
request_context = {'totalStack': {'my_fun_stack': [0,0,0,0,0,0,0,0,0,0]}}
result = functions.stacked(request_context, seriesList, 'my_fun_stack')
self.assertEqual(result, expectedResult)
self.assertEqual(request_context, {'totalStack': {'my_fun_stack': [3,8,6,12,10,24,21,32,27,30]}})
def test_areaBetween(self):
seriesList = [
TimeSeries('collectd.test-db1.load.value',0,600,60,[1,2,3,4,5,6,7,8,9,10]),
TimeSeries('collectd.test-db2.load.value',0,600,60,[1,2,3,4,5,6,7,8,9,10]),
]
for series in seriesList:
series.pathExpression = series.name
expectedResult = [
TimeSeries('areaBetween(collectd.test-db2.load.value)',0,600,60,[1,2,3,4,5,6,7,8,9,10]),
TimeSeries('areaBetween(collectd.test-db2.load.value)',0,600,60,[1,2,3,4,5,6,7,8,9,10]),
]
expectedResult[0].options = {'stacked': True, 'invisible': True}
expectedResult[1].options = {'stacked': True}
request_context = {}
result = functions.areaBetween(request_context, seriesList)
self.assertEqual(result, expectedResult)
def test_cactiStyle(self):
seriesList = [
TimeSeries('collectd.test-db1.load.value',0,600,60,[1,2,3,4,5,6,7,8,9,10]),
TimeSeries('collectd.test-db2.load.value',0,600,60,[None,None,None,None,None,None,None,None,None,None]),
TimeSeries('collectd.test-db3.load.value',0,600,60,[1,2,None,None,None,6,7,8,9,10]),
TimeSeries('collectd.test-db4.load.value',0,600,60,[1,2,3,4,5,6,7,8,9,None]),
]
for series in seriesList:
series.pathExpression = series.name
expectedResult = [
TimeSeries('collectd.test-db1.load.value Current:10.00 Max:10.00 Min:1.00 ',0,600,60,[1,2,3,4,5,6,7,8,9,10]),
TimeSeries('collectd.test-db2.load.value Current:nan Max:nan Min:nan ',0,600,60,[None,None,None,None,None,None,None,None,None,None]),
TimeSeries('collectd.test-db3.load.value Current:10.00 Max:10.00 Min:1.00 ',0,600,60,[1,2,None,None,None,6,7,8,9,10]),
TimeSeries('collectd.test-db4.load.value Current:9.00 Max:9.00 Min:1.00 ',0,600,60,[1,2,3,4,5,6,7,8,9,None]),
]
for series in expectedResult:
series.options = {}
request_context = {}
result = functions.cactiStyle(request_context, seriesList)
self.assertEqual(result, expectedResult)
def test_cactiStyle_units(self):
seriesList = [
TimeSeries('collectd.test-db1.load.value',0,600,60,[1,2,3,4,5,6,7,8,9,10]),
TimeSeries('collectd.test-db2.load.value',0,600,60,[None,None,None,None,None,None,None,None,None,None]),
TimeSeries('collectd.test-db3.load.value',0,600,60,[1,2,None,None,None,6,7,8,9,10]),
TimeSeries('collectd.test-db4.load.value',0,600,60,[1,2,3,4,5,6,7,8,9,None]),
]
for series in seriesList:
series.pathExpression = series.name
expectedResult = [
TimeSeries('collectd.test-db1.load.value Current:10.00 b Max:10.00 b Min:1.00 b ',0,600,60,[1,2,3,4,5,6,7,8,9,10]),
TimeSeries('collectd.test-db2.load.value Current:nan Max:nan Min:nan ',0,600,60,[None,None,None,None,None,None,None,None,None,None]),
TimeSeries('collectd.test-db3.load.value Current:10.00 b Max:10.00 b Min:1.00 b ',0,600,60,[1,2,None,None,None,6,7,8,9,10]),
TimeSeries('collectd.test-db4.load.value Current:9.00 b Max:9.00 b Min:1.00 b ',0,600,60,[1,2,3,4,5,6,7,8,9,None]),
]
for series in expectedResult:
series.options = {}
request_context = {}
result = functions.cactiStyle(request_context, seriesList, units="b")
self.assertEqual(result, expectedResult)
def test_cactiStyle_emptyList(self):
result = functions.cactiStyle({}, [])
self.assertEqual(result, [])
def test_cactiStyle_binary(self):
seriesList = [
TimeSeries('collectd.test-db1.load.value',0,600,60,[1,2,3,4,5,6,7,8,9,10]),
TimeSeries('collectd.test-db2.load.value',0,600,60,[None,None,None,None,None,None,None,None,None,None]),
TimeSeries('collectd.test-db3.load.value',0,600,60,[1,2,None,None,None,6,7,8,9,10]),
TimeSeries('collectd.test-db4.load.value',0,600,60,[1,2,3,4,5,6,7,8,9,None]),
]
for series in seriesList:
series.pathExpression = series.name
expectedResult = [
TimeSeries('collectd.test-db1.load.value Current:10.00 Max:10.00 Min:1.00 ',0,600,60,[1,2,3,4,5,6,7,8,9,10]),
TimeSeries('collectd.test-db2.load.value Current:nan Max:nan Min:nan ',0,600,60,[None,None,None,None,None,None,None,None,None,None]),
TimeSeries('collectd.test-db3.load.value Current:10.00 Max:10.00 Min:1.00 ',0,600,60,[1,2,None,None,None,6,7,8,9,10]),
TimeSeries('collectd.test-db4.load.value Current:9.00 Max:9.00 Min:1.00 ',0,600,60,[1,2,3,4,5,6,7,8,9,None]),
]
for series in expectedResult:
series.options = {}
request_context = {}
result = functions.cactiStyle(request_context, seriesList, "binary")
self.assertEqual(result, expectedResult)
def test_cactiStyle_binary_units(self):
seriesList = [
TimeSeries('collectd.test-db1.load.value',0,600,60,[1,2,3,4,5,6,7,8,9,10]),
TimeSeries('collectd.test-db2.load.value',0,600,60,[None,None,None,None,None,None,None,None,None,None]),
TimeSeries('collectd.test-db3.load.value',0,600,60,[1,2,None,None,None,6,7,8,9,10]),
TimeSeries('collectd.test-db4.load.value',0,600,60,[1,2,3,4,5,6,7,8,9,None]),
]
for series in seriesList:
series.pathExpression = series.name
expectedResult = [
TimeSeries('collectd.test-db1.load.value Current:10.00 b Max:10.00 b Min:1.00 b ',0,600,60,[1,2,3,4,5,6,7,8,9,10]),
TimeSeries('collectd.test-db2.load.value Current:nan Max:nan Min:nan ',0,600,60,[None,None,None,None,None,None,None,None,None,None]),
TimeSeries('collectd.test-db3.load.value Current:10.00 b Max:10.00 b Min:1.00 b ',0,600,60,[1,2,None,None,None,6,7,8,9,10]),
TimeSeries('collectd.test-db4.load.value Current:9.00 b Max:9.00 b Min:1.00 b ',0,600,60,[1,2,3,4,5,6,7,8,9,None]),
]
for series in expectedResult:
series.options = {}
request_context = {}
result = functions.cactiStyle(request_context, seriesList, "binary", "b")
self.assertEqual(result, expectedResult)
def test_n_percentile(self):
config = [
[15, 35, 20, 40, 50],
range(1, 101),
range(1, 201),
range(1, 301),
range(0, 100),
range(0, 200),
range(0, 300),
# Ensure None values in list has no effect.
[None, None, None] + range(0, 300),
]
def n_percentile(perc, expect):
seriesList = []
expected = []
for i, c in enumerate(config):
seriesList.append(TimeSeries('Test(%d)' % i, 0, len(c), 1, c))
expected.append(TimeSeries('nPercentile(Test(%d), %d)' % (i, perc), 0, len(c), 1, expect[i]*len(c)))
result = functions.nPercentile({}, seriesList, perc)
self.assertEqual(expected, result)
n_percentile(30, [[20], [31], [61], [91], [30], [60], [90], [90]])
n_percentile(90, [[50], [91], [181], [271], [90], [180], [270], [270]])
n_percentile(95, [[50], [96], [191], [286], [95], [190], [285], [285]])
def test_averageOutsidePercentile_30(self):
seriesList = [
TimeSeries('collectd.test-db1.load.value',0,100,1,[7]*100),
TimeSeries('collectd.test-db2.load.value',0,100,1,[5]*100),
TimeSeries('collectd.test-db3.load.value',0,100,1,[10]*100),
TimeSeries('collectd.test-db4.load.value',0,100,1,[1]*100),
]
expectedResult = [
TimeSeries('collectd.test-db2.load.value',0,100,1,[5]*100),
TimeSeries('collectd.test-db3.load.value',0,100,1,[10]*100),
TimeSeries('collectd.test-db4.load.value',0,100,1,[1]*100),
]
result = functions.averageOutsidePercentile({}, seriesList, 30)
self.assertEqual(result, expectedResult)
def test_averageOutsidePercentile_70(self):
seriesList = [
TimeSeries('collectd.test-db1.load.value',0,100,1,[7]*100),
TimeSeries('collectd.test-db2.load.value',0,100,1,[5]*100),
TimeSeries('collectd.test-db3.load.value',0,100,1,[10]*100),
TimeSeries('collectd.test-db4.load.value',0,100,1,[1]*100),
]
expectedResult = [
TimeSeries('collectd.test-db2.load.value',0,100,1,[5]*100),
TimeSeries('collectd.test-db3.load.value',0,100,1,[10]*100),
TimeSeries('collectd.test-db4.load.value',0,100,1,[1]*100),
]
result = functions.averageOutsidePercentile({}, seriesList, 70)
self.assertEqual(result, expectedResult)
def test_removeBetweenPercentile_30(self):
seriesList = [
TimeSeries('collectd.test-db1.load.value',0,100,1,[7]*100),
TimeSeries('collectd.test-db2.load.value',0,100,1,[5]*100),
TimeSeries('collectd.test-db3.load.value',0,100,1,[10]*100),
TimeSeries('collectd.test-db4.load.value',0,100,1,[1]*100),
]
expectedResult = [
TimeSeries('collectd.test-db2.load.value',0,100,1,[5]*100),
TimeSeries('collectd.test-db3.load.value',0,100,1,[10]*100),
TimeSeries('collectd.test-db4.load.value',0,100,1,[1]*100),
]
result = functions.removeBetweenPercentile({}, seriesList, 30)
self.assertEqual(result, expectedResult)
def test_removeBetweenPercentile_70(self):
seriesList = [
TimeSeries('collectd.test-db1.load.value',0,100,1,[7]*100),
TimeSeries('collectd.test-db2.load.value',0,100,1,[5]*100),
TimeSeries('collectd.test-db3.load.value',0,100,1,[10]*100),
TimeSeries('collectd.test-db4.load.value',0,100,1,[1]*100),
]
expectedResult = [
TimeSeries('collectd.test-db2.load.value',0,100,1,[5]*100),
TimeSeries('collectd.test-db3.load.value',0,100,1,[10]*100),
TimeSeries('collectd.test-db4.load.value',0,100,1,[1]*100),
]
result = functions.removeBetweenPercentile({}, seriesList, 70)
self.assertEqual(result, expectedResult)
def test_sortByName(self):
seriesList = [
TimeSeries('collectd.test-db3.load.value',0,100,1,[10]*100),
TimeSeries('collectd.test-db2.load.value',0,100,1,[5]*100),
TimeSeries('collectd.test-db4.load.value',0,100,1,[1]*100),
TimeSeries('collectd.test-db1.load.value',0,100,1,[7]*100),
]
expectedResult = [
TimeSeries('collectd.test-db1.load.value',0,100,1,[7]*100),
TimeSeries('collectd.test-db2.load.value',0,100,1,[5]*100),
TimeSeries('collectd.test-db3.load.value',0,100,1,[10]*100),
TimeSeries('collectd.test-db4.load.value',0,100,1,[1]*100),
]
result = functions.sortByName({}, seriesList)
self.assertEqual(result, expectedResult)
def test_sortByName_natural(self):
seriesList = [
TimeSeries('collectd.test-db3.load.value',0,100,1,[10]*100),
TimeSeries('collectd.test-db2.load.value',0,100,1,[5]*100),
TimeSeries('collectd.test-db4.load.value',0,100,1,[1]*100),
TimeSeries('collectd.test-db1.load.value',0,100,1,[7]*100),
]
expectedResult = [
TimeSeries('collectd.test-db1.load.value',0,100,1,[7]*100),
TimeSeries('collectd.test-db2.load.value',0,100,1,[5]*100),
TimeSeries('collectd.test-db3.load.value',0,100,1,[10]*100),
TimeSeries('collectd.test-db4.load.value',0,100,1,[1]*100),
]
result = functions.sortByName({}, seriesList, True)
self.assertEqual(result, expectedResult)
def test_sorting_by_total(self):
seriesList = []
config = [[1000, 100, 10, 0], [1000, 100, 10, 1]]
for i, c in enumerate(config):
seriesList.append(TimeSeries('Test(%d)' % i, 0, 0, 0, c))
self.assertEqual(1110, functions.safeSum(seriesList[0]))
result = functions.sortByTotal({}, seriesList)
self.assertEqual(1111, functions.safeSum(result[0]))
self.assertEqual(1110, functions.safeSum(result[1]))
def test_sortByMaxima(self):
seriesList = [
TimeSeries('collectd.test-db3.load.value',0,100,1,[10]*100),
TimeSeries('collectd.test-db2.load.value',0,100,1,[5]*100),
TimeSeries('collectd.test-db4.load.value',0,100,1,[1]*100),
TimeSeries('collectd.test-db1.load.value',0,100,1,[7]*100),
]
expectedResult = [
TimeSeries('collectd.test-db3.load.value',0,100,1,[10]*100),
TimeSeries('collectd.test-db1.load.value',0,100,1,[7]*100),
TimeSeries('collectd.test-db2.load.value',0,100,1,[5]*100),
TimeSeries('collectd.test-db4.load.value',0,100,1,[1]*100),
]
result = functions.sortByMaxima({}, seriesList)
self.assertEqual(result, expectedResult)
def test_sortByMinima(self):
seriesList = [
TimeSeries('collectd.test-db3.load.value',0,100,1,[10]*100),
TimeSeries('collectd.test-db2.load.value',0,100,1,[5]*100),
TimeSeries('collectd.test-db4.load.value',0,100,1,[1]*100),
TimeSeries('collectd.test-db1.load.value',0,100,1,[7]*100),
]
expectedResult = [
TimeSeries('collectd.test-db4.load.value',0,100,1,[1]*100),
TimeSeries('collectd.test-db2.load.value',0,100,1,[5]*100),
TimeSeries('collectd.test-db1.load.value',0,100,1,[7]*100),
TimeSeries('collectd.test-db3.load.value',0,100,1,[10]*100),
]
result = functions.sortByMinima({}, seriesList)
self.assertEqual(result, expectedResult)
def _generate_series_list(self):
seriesList = []
config = [range(101), range(101), [1, None, None, None, None]]
for i, c in enumerate(config):
name = "collectd.test-db{0}.load.value".format(i + 1)
seriesList.append(TimeSeries(name, 0, len(c), 1, c))
for series in seriesList:
series.pathExpression = series.name
return seriesList
def test_check_empty_lists(self):
seriesList = []
config = [[1000, 100, 10, 0], []]
for i, c in enumerate(config):
seriesList.append(TimeSeries('Test(%d)' % i, 0, 0, 0, c))
self.assertTrue(functions.safeIsNotEmpty(seriesList[0]))
self.assertFalse(functions.safeIsNotEmpty(seriesList[1]))
result = functions.removeEmptySeries({}, seriesList)
self.assertEqual(1, len(result))
def test_remove_above_percentile(self):
seriesList = self._generate_series_list()
percent = 50
results = functions.removeAbovePercentile({}, seriesList, percent)
for i, result in enumerate(results):
self.assertEqual(return_greater(result, percent), [])
expected_name = "removeAbovePercentile(collectd.test-db{0}.load.value, 50)".format(i + 1)
self.assertEqual(expected_name, result.name)
def test_remove_above_percentile_float(self):
seriesList = self._generate_series_list()
percent = 0.1
results = functions.removeAbovePercentile({}, seriesList, percent)
expected = [[], [], [1]]
for i, result in enumerate(results):
self.assertEqual(return_greater(result, percent), expected[i])
expected_name = "removeAbovePercentile(collectd.test-db{0}.load.value, 0.1)".format(i + 1)
self.assertEqual(expected_name, result.name)
def test_remove_below_percentile(self):
seriesList = self._generate_series_list()
percent = 50
results = functions.removeBelowPercentile({}, seriesList, percent)
expected = [[], [], [1]]
for i, result in enumerate(results):
self.assertEqual(return_less(result, percent), expected[i])
expected_name = "removeBelowPercentile(collectd.test-db{0}.load.value, 50)".format(i + 1)
self.assertEqual(expected_name, result.name)
def test_remove_below_percentile_float(self):
seriesList = self._generate_series_list()
percent = 0.1
results = functions.removeBelowPercentile({}, seriesList, percent)
expected = [[0], [0], []]
for i, result in enumerate(results):
self.assertEqual(return_less(result, percent), expected[i])
expected_name = "removeBelowPercentile(collectd.test-db{0}.load.value, 0.1)".format(i + 1)
self.assertEqual(expected_name, result.name)
def test_remove_above_value(self):
seriesList = self._generate_series_list()
value = 5
results = functions.removeAboveValue({}, seriesList, value)
for i, result in enumerate(results):
self.assertEqual(return_greater(result, value), [])
expected_name = "removeAboveValue(collectd.test-db{0}.load.value, 5)".format(i + 1)
self.assertEqual(expected_name, result.name)
def test_remove_above_value_float(self):
seriesList = self._generate_series_list()
value = 0.1
results = functions.removeAboveValue({}, seriesList, value)
for i, result in enumerate(results):
self.assertEqual(return_greater(result, value), [])
expected_name = "removeAboveValue(collectd.test-db{0}.load.value, 0.1)".format(i + 1)
self.assertEqual(expected_name, result.name)
def test_remove_below_value(self):
seriesList = self._generate_series_list()
value = 5
results = functions.removeBelowValue({}, seriesList, value)
for i, result in enumerate(results):
self.assertEqual(return_less(result, value), [])
expected_name = "removeBelowValue(collectd.test-db{0}.load.value, 5)".format(i + 1)
self.assertEqual(expected_name, result.name)
def test_remove_below_value_float(self):
seriesList = self._generate_series_list()
value = 0.1
results = functions.removeBelowValue({}, seriesList, value)
for i, result in enumerate(results):
self.assertEqual(return_less(result, value), [])
expected_name = "removeBelowValue(collectd.test-db{0}.load.value, 0.1)".format(i + 1)
self.assertEqual(expected_name, result.name)
def test_limit(self):
seriesList = self._generate_series_list()
limit = len(seriesList) - 1
results = functions.limit({}, seriesList, limit)
self.assertEqual(len(results), limit,
"More than {0} results returned".format(limit),
)
def _verify_series_options(self, seriesList, name, value):
"""
Verify a given option is set and True for each series in a
series list
"""
for series in seriesList:
self.assertIn(name, series.options)
if value is True:
test_func = self.assertTrue
else:
test_func = self.assertEqual
test_func(series.options.get(name), value)
def test_second_y_axis(self):
seriesList = self._generate_series_list()
results = functions.secondYAxis({}, seriesList)
self._verify_series_options(results, "secondYAxis", True)
def test_draw_as_infinite(self):
seriesList = self._generate_series_list()
results = functions.drawAsInfinite({}, seriesList)
self._verify_series_options(results, "drawAsInfinite", True)
def test_vertical_line(self):
requestContext = {
'startTime': datetime(1970,1,1,1,0,0,0,pytz.timezone(settings.TIME_ZONE)),
'endTime':datetime(1970,1,1,1,2,0,0,pytz.timezone(settings.TIME_ZONE)),
'tzinfo':pytz.utc,
}
result = functions.verticalLine(requestContext, "01:0019700101", "foo")
expectedResult = [ TimeSeries('foo',3600,3600,1.0,[1.0, 1.0]), ]
expectedResult[0].options = {'drawAsInfinite': True}
self.assertEqual(result, expectedResult)
def test_vertical_line_color(self):
requestContext = {
'startTime': datetime(1970,1,1,1,0,0,0,pytz.timezone(settings.TIME_ZONE)),
'endTime':datetime(1970,1,1,1,2,0,0,pytz.timezone(settings.TIME_ZONE)),
'tzinfo':pytz.utc,
}
result = functions.verticalLine(requestContext, "01:0019700101", "foo", "white")
expectedResult = [ TimeSeries('foo',3600,3600,1.0,[1.0, 1.0]), ]
expectedResult[0].options = {'drawAsInfinite': True}
expectedResult[0].color = "white"
self.assertEqual(result, expectedResult)
def test_vertical_line_before_start(self):
requestContext = {
'startTime': datetime(1971,1,1,1,0,0,0,pytz.timezone(settings.TIME_ZONE)),
'endTime':datetime(1971,1,1,1,2,0,0,pytz.timezone(settings.TIME_ZONE)),
'tzinfo':pytz.utc,
}
with self.assertRaisesRegexp(ValueError, "verticalLine\(\): timestamp 3600 exists before start of range"):
result = functions.verticalLine(requestContext, "01:0019700101", "foo")
def test_vertical_line_after_end(self):
requestContext = {
'startTime': datetime(1970,1,1,1,0,0,0,pytz.timezone(settings.TIME_ZONE)),
'endTime':datetime(1970,1,1,1,2,0,0,pytz.timezone(settings.TIME_ZONE)),
'tzinfo':pytz.utc,
}
with self.assertRaisesRegexp(ValueError, "verticalLine\(\): timestamp 31539600 exists after end of range"):
result = functions.verticalLine(requestContext, "01:0019710101", "foo")
def test_line_width(self):
seriesList = self._generate_series_list()
width = 10
results = functions.lineWidth({}, seriesList, width)
self._verify_series_options(results, "lineWidth", width)
def test_dashed(self):
seriesList = self._generate_series_list()
dashLength = 3
results = functions.dashed({}, seriesList, dashLength)
self._verify_series_options(results, "dashed", 3)
for i, result in enumerate(results):
expected_name = "dashed(collectd.test-db{0}.load.value, 3)".format(i + 1)
self.assertEqual(expected_name, result.name)
def test_dashed_default(self):
seriesList = self._generate_series_list()
results = functions.dashed({}, seriesList)
self._verify_series_options(results, "dashed", 5)
for i, result in enumerate(results):
expected_name = "dashed(collectd.test-db{0}.load.value, 5)".format(i + 1)
self.assertEqual(expected_name, result.name)
def test_dashed_float(self):
seriesList = self._generate_series_list()
dashLength = 3.5
results = functions.dashed({}, seriesList, dashLength)
self._verify_series_options(results, "dashed", 3.5)
for i, result in enumerate(results):
expected_name = "dashed(collectd.test-db{0}.load.value, 3.5)".format(i + 1)
self.assertEqual(expected_name, result.name)
def test_transform_null(self):
seriesList = self._generate_series_list()
transform = -5
results = functions.transformNull({}, copy.deepcopy(seriesList), transform)
for counter, series in enumerate(seriesList):
if not None in series:
continue
# If the None values weren't transformed, there is a problem
self.assertNotIn(None, results[counter],
"tranformNull should remove all None values",
)
# Anywhere a None was in the original series, verify it
# was transformed to the given value it should be.
for i, value in enumerate(series):
if value is None:
result_val = results[counter][i]
self.assertEqual(transform, result_val,
"Transformed value should be {0}, not {1}".format(transform, result_val),
)
def test_transform_null_reference(self):
seriesList = self._generate_series_list()
transform = -5
referenceSeries = copy.deepcopy(seriesList[0])
for k, v in enumerate(referenceSeries):
if k % 2 != 0:
referenceSeries[k] = None
results = functions.transformNull({}, copy.deepcopy(seriesList), transform, [referenceSeries])
for counter, series in enumerate(seriesList):
if not None in series:
continue
# Anywhere a None was in the original series, verify it
# was transformed to the given value if a value existed
# in the reference series
for i, value in enumerate(series):
if value is None and referenceSeries[i] is not None:
result_val = results[counter][i]
self.assertEqual(transform, result_val,
"Transformed value should be {0}, not {1}".format(transform, result_val),
)
def test_transform_null_reference_empty(self):
seriesList = self._generate_series_list()
transform = -5
referenceSeries = []
results = functions.transformNull({}, copy.deepcopy(seriesList), transform, [referenceSeries])
for counter, series in enumerate(seriesList):
if not None in series:
continue
# If the None values weren't transformed, there is a problem
self.assertNotIn(None, results[counter],
"tranformNull should remove all None values",
)
# Anywhere a None was in the original series, verify it
# was transformed to the given value if a value existed
for i, value in enumerate(series):
if value is None:
result_val = results[counter][i]
self.assertEqual(transform, result_val,
"Transformed value should be {0}, not {1}".format(transform, result_val),
)
def test_isNonNull(self):
seriesList = [
TimeSeries('collectd.test-db1.load.value',0,600,60,[1,2,3,4,5,6,7,8,9,10]),
TimeSeries('collectd.test-db2.load.value',0,600,60,[None,None,None,None,None,None,None,None,None,None]),
TimeSeries('collectd.test-db3.load.value',0,600,60,[1,2,None,None,None,6,7,8,9,10]),
TimeSeries('collectd.test-db4.load.value',0,600,60,[1,2,3,4,5,6,7,8,9,None]),
]
for series in seriesList:
series.pathExpression = series.name
expectedResult = [
TimeSeries('isNonNull(collectd.test-db1.load.value)',0,600,60,[1,1,1,1,1,1,1,1,1,1]),
TimeSeries('isNonNull(collectd.test-db2.load.value)',0,600,60,[0,0,0,0,0,0,0,0,0,0]),
TimeSeries('isNonNull(collectd.test-db3.load.value)',0,600,60,[1,1,0,0,0,1,1,1,1,1]),
TimeSeries('isNonNull(collectd.test-db4.load.value)',0,600,60,[1,1,1,1,1,1,1,1,1,0]),
]
for series in expectedResult:
series.options = {}
request_context = {}
result = functions.isNonNull(request_context, seriesList)
self.assertEqual(result, expectedResult)
def test_identity(self):
expectedResult = [
TimeSeries('my_series', 3600, 3660, 60, [3600]),
]
requestContext = {
'startTime': datetime(1970,1,1,1,0,0,0,pytz.timezone(settings.TIME_ZONE)),
'endTime':datetime(1970,1,1,1,1,0,0,pytz.timezone(settings.TIME_ZONE))
}
result = functions.identity(requestContext, "my_series")
self.assertEqual(result, expectedResult)
def test_countSeries(self):
seriesList = [
TimeSeries('collectd.test-db1.load.value',0,600,60,[1,2,3,4,5,6,7,8,9,10]),
TimeSeries('collectd.test-db2.load.value',0,600,60,[None,None,None,None,None,None,None,None,None,None]),
TimeSeries('collectd.test-db3.load.value',0,600,60,[1,2,None,None,None,6,7,8,9,10]),
TimeSeries('collectd.test-db4.load.value',0,600,60,[1,2,3,4,5,6,7,8,9,None]),
]
for series in seriesList:
series.pathExpression = series.name
expectedResult = [
TimeSeries('countSeries(collectd.test-db1.load.value,collectd.test-db2.load.value,collectd.test-db3.load.value,collectd.test-db4.load.value)',0,600,60,[4,4,4,4,4,4,4,4,4,4]),
]
for series in expectedResult:
series.options = {}
request_context = {}
result = functions.countSeries(request_context, seriesList)
self.assertEqual(result, expectedResult)
def test_empty_countSeries(self):
expectedResult = [
TimeSeries('0',0,600,300,[0,0,0]),
]
request_context = {
'startTime': datetime(1970, 1, 1, 0, 0, 0, 0, pytz.timezone(settings.TIME_ZONE)),
'endTime': datetime(1970, 1, 1, 0, 10, 0, 0, pytz.timezone(settings.TIME_ZONE)),
}
result = functions.countSeries(request_context)
self.assertEqual(result, expectedResult)
def test_group(self):
seriesList = [
TimeSeries('collectd.test-db1.load.value',0,600,60,[1,2,3,4,5,6,7,8,9,10]),
TimeSeries('collectd.test-db2.load.value',0,600,60,[None,None,None,None,None,None,None,None,None,None]),
TimeSeries('collectd.test-db3.load.value',0,600,60,[1,2,None,None,None,6,7,8,9,10]),
TimeSeries('collectd.test-db4.load.value',0,600,60,[1,2,3,4,5,6,7,8,9,None]),
]
for series in seriesList:
series.pathExpression = series.name
request_context = {}
result = functions.group(request_context, seriesList[0], seriesList[1], seriesList[2], seriesList[3])
self.assertEqual(result, [1,2,3,4,5,6,7,8,9,10,None,None,None,None,None,None,None,None,None,None,1,2,None,None,None,6,7,8,9,10,1,2,3,4,5,6,7,8,9,None])
def test_alias(self):
seriesList = self._generate_series_list()
substitution = "Ni!"
results = functions.alias({}, seriesList, substitution)
for series in results:
self.assertEqual(series.name, substitution)
def test_alias_sub(self):
seriesList = self._generate_series_list()
substitution = "Shrubbery"
results = functions.aliasSub({}, seriesList, "^\w+", substitution)
for series in results:
self.assertTrue(series.name.startswith(substitution),
"aliasSub should replace the name with {0}".format(substitution),
)
# TODO: Add tests for * globbing and {} matching to this
def test_alias_by_node(self):
seriesList = self._generate_series_list()
def verify_node_name(*nodes):
if isinstance(nodes, int):
node_number = [nodes]
# Use deepcopy so the original seriesList is unmodified
results = functions.aliasByNode({}, copy.deepcopy(seriesList), *nodes)
for i, series in enumerate(results):
fragments = seriesList[i].name.split('.')
# Super simplistic. Doesn't match {thing1,thing2}
# or glob with *, both of what graphite allow you to use
expected_name = '.'.join([fragments[i] for i in nodes])
self.assertEqual(series.name, expected_name)
verify_node_name(1)
verify_node_name(1, 0)
verify_node_name(-1, 0)
# Verify broken input causes broken output
with self.assertRaises(IndexError):
verify_node_name(10000)
def test_aliasByMetric(self):
seriesList = [
TimeSeries('collectd.test-db1.load.value',0,600,60,[1,2,3,4,5,6,7,8,9,10]),
TimeSeries('collectd.test-db2.load.value',0,600,60,[None,None,None,None,None,None,None,None,None,None]),
TimeSeries('collectd.test-db3.load.value',0,600,60,[1,2,None,None,None,6,7,8,9,10]),
TimeSeries('collectd.test-db4.load.value',0,600,60,[1,2,3,4,5,6,7,8,9,None]),
]
for series in seriesList:
series.pathExpression = series.name
expectedResult = [
TimeSeries('value',0,600,60,[1,2,3,4,5,6,7,8,9,10]),
TimeSeries('value',0,600,60,[None,None,None,None,None,None,None,None,None,None]),
TimeSeries('value',0,600,60,[1,2,None,None,None,6,7,8,9,10]),
TimeSeries('value',0,600,60,[1,2,3,4,5,6,7,8,9,None]),
]
request_context = {}
result = functions.aliasByMetric(request_context, seriesList)
self.assertEqual(result, expectedResult)
def test_groupByNode(self):
seriesList, inputList = self._generate_mr_series()
def verify_groupByNode(expectedResult, nodeNum):
results = functions.groupByNode({}, copy.deepcopy(seriesList), nodeNum, "keepLastValue")
self.assertEqual(results, expectedResult)
expectedResult = [
TimeSeries('group',0,1,1,[None]),
]
verify_groupByNode(expectedResult, 0)
expectedResult = [
TimeSeries('server1',0,1,1,[None]),
TimeSeries('server2',0,1,1,[None]),
]
verify_groupByNode(expectedResult, 1)
def test_groupByNodes(self):
seriesList, inputList = self._generate_mr_series()
def verify_groupByNodes(expectedResult, *nodes):
if isinstance(nodes, int):
node_number = [nodes]
results = functions.groupByNodes({}, copy.deepcopy(seriesList), "keepLastValue", *nodes)
self.assertEqual(results, expectedResult)
expectedResult = [
TimeSeries('server1',0,1,1,[None]),
TimeSeries('server2',0,1,1,[None]),
]
verify_groupByNodes(expectedResult, 1)
expectedResult = [
TimeSeries('server1.metric1',0,1,1,[None]),
TimeSeries('server1.metric2',0,1,1,[None]),
TimeSeries('server2.metric1',0,1,1,[None]),
TimeSeries('server2.metric2',0,1,1,[None]),
]
verify_groupByNodes(expectedResult, 1, 2)
expectedResult = [
TimeSeries('server1.group',0,1,1,[None]),
TimeSeries('server2.group',0,1,1,[None]),
]
verify_groupByNodes(expectedResult, 1, 0)
def test_exclude(self):
seriesList = [
TimeSeries('collectd.test-db1.load.value',0,600,60,[1,2,3,4,5,6,7,8,9,10]),
TimeSeries('collectd.test-db2.load.value',0,600,60,[None,None,None,None,None,None,None,None,None,None]),
TimeSeries('collectd.test-db3.load.value',0,600,60,[1,2,None,None,None,6,7,8,9,10]),
TimeSeries('collectd.test-db4.load.value',0,600,60,[1,2,3,4,5,6,7,8,9,None]),
]
for series in seriesList:
series.pathExpression = series.name
expectedResult = [
TimeSeries('collectd.test-db1.load.value',0,600,60,[1,2,3,4,5,6,7,8,9,10]),
TimeSeries('collectd.test-db3.load.value',0,600,60,[1,2,None,None,None,6,7,8,9,10]),
TimeSeries('collectd.test-db4.load.value',0,600,60,[1,2,3,4,5,6,7,8,9,None]),
]
request_context = {}
result = functions.exclude(request_context, seriesList, '.*db2')
self.assertEqual(result, expectedResult)
def test_grep(self):
seriesList = [
TimeSeries('collectd.test-db1.load.value',0,600,60,[1,2,3,4,5,6,7,8,9,10]),
TimeSeries('collectd.test-db2.load.value',0,600,60,[None,None,None,None,None,None,None,None,None,None]),
TimeSeries('collectd.test-db3.load.value',0,600,60,[1,2,None,None,None,6,7,8,9,10]),
TimeSeries('collectd.test-db4.load.value',0,600,60,[1,2,3,4,5,6,7,8,9,None]),
]
for series in seriesList:
series.pathExpression = series.name
expectedResult = [
TimeSeries('collectd.test-db2.load.value',0,600,60,[None,None,None,None,None,None,None,None,None,None]),
]
request_context = {}
result = functions.grep(request_context, seriesList, '.*db2')
self.assertEqual(result, expectedResult)
def test_alpha(self):
seriesList = self._generate_series_list()
alpha = 0.5
results = functions.alpha({}, seriesList, alpha)
self._verify_series_options(results, "alpha", alpha)
def test_color(self):
seriesList = self._generate_series_list()
color = "red"
# Leave the original seriesList unmodified
results = functions.color({}, copy.deepcopy(seriesList), color)
for i, series in enumerate(results):
self.assertTrue(hasattr(series, "color"),
"The transformed seriesList is missing the 'color' attribute",
)
self.assertFalse(hasattr(seriesList[i], "color"),
"The original seriesList shouldn't have a 'color' attribute",
)
self.assertEqual(series.color, color)
def test_substr(self):
seriesList = [
TimeSeries('collectd.test-db1.load.value',0,600,60,[1,2,3,4,5,6,7,8,9,10]),
TimeSeries('collectd.test-db2.load.value',0,600,60,[None,None,None,None,None,None,None,None,None,None]),
TimeSeries('collectd.test-db3.load.value',0,600,60,[1,2,None,None,None,6,7,8,9,10]),
TimeSeries('collectd.test-db4.load.value',0,600,60,[1,2,3,4,5,6,7,8,9,None]),
]
expectedResult = [
TimeSeries('test-db1.load',0,600,60,[1,2,3,4,5,6,7,8,9,10]),
TimeSeries('test-db2.load',0,600,60,[None,None,None,None,None,None,None,None,None,None]),
TimeSeries('test-db3.load',0,600,60,[1,2,None,None,None,6,7,8,9,10]),
TimeSeries('test-db4.load',0,600,60,[1,2,3,4,5,6,7,8,9,None]),
]
request_context = {}
result = functions.substr(request_context, seriesList, 1, 3)
self.assertEqual(result, expectedResult)
def test_substr_no_args(self):
seriesList = [
TimeSeries('collectd.test-db1.load.value',0,600,60,[1,2,3,4,5,6,7,8,9,10]),
TimeSeries('collectd.test-db2.load.value',0,600,60,[None,None,None,None,None,None,None,None,None,None]),
TimeSeries('collectd.test-db3.load.value',0,600,60,[1,2,None,None,None,6,7,8,9,10]),
TimeSeries('collectd.test-db4.load.value',0,600,60,[1,2,3,4,5,6,7,8,9,None]),
]
expectedResult = [
TimeSeries('collectd.test-db1.load.value',0,600,60,[1,2,3,4,5,6,7,8,9,10]),
TimeSeries('collectd.test-db2.load.value',0,600,60,[None,None,None,None,None,None,None,None,None,None]),
TimeSeries('collectd.test-db3.load.value',0,600,60,[1,2,None,None,None,6,7,8,9,10]),
TimeSeries('collectd.test-db4.load.value',0,600,60,[1,2,3,4,5,6,7,8,9,None]),
]
request_context = {}
result = functions.substr(request_context, seriesList)
self.assertEqual(result, expectedResult)
def test_substr_function_no_args(self):
seriesList = [
TimeSeries('scaleToSeconds(collectd.test-db1.load.value,60)',0,600,60,[1,2,3,4,5,6,7,8,9,10]),
TimeSeries('scaleToSeconds(collectd.test-db2.load.value,60)',0,600,60,[None,None,None,None,None,None,None,None,None,None]),
TimeSeries('scaleToSeconds(collectd.test-db3.load.value,60)',0,600,60,[1,2,None,None,None,6,7,8,9,10]),
TimeSeries('scaleToSeconds(collectd.test-db4.load.value,60)',0,600,60,[1,2,3,4,5,6,7,8,9,None]),
]
expectedResult = [
TimeSeries('collectd.test-db1.load.value',0,600,60,[1,2,3,4,5,6,7,8,9,10]),
TimeSeries('collectd.test-db2.load.value',0,600,60,[None,None,None,None,None,None,None,None,None,None]),
TimeSeries('collectd.test-db3.load.value',0,600,60,[1,2,None,None,None,6,7,8,9,10]),
TimeSeries('collectd.test-db4.load.value',0,600,60,[1,2,3,4,5,6,7,8,9,None]),
]
request_context = {}
result = functions.substr(request_context, seriesList)
self.assertEqual(result, expectedResult)
def test_substr_function(self):
seriesList = [
TimeSeries('scaleToSeconds(collectd.test-db1.load.value,60)',0,600,60,[1,2,3,4,5,6,7,8,9,10]),
TimeSeries('scaleToSeconds(collectd.test-db2.load.value,60)',0,600,60,[None,None,None,None,None,None,None,None,None,None]),
TimeSeries('scaleToSeconds(collectd.test-db3.load.value,60)',0,600,60,[1,2,None,None,None,6,7,8,9,10]),
TimeSeries('scaleToSeconds(collectd.test-db4.load.value,60)',0,600,60,[1,2,3,4,5,6,7,8,9,None]),
]
expectedResult = [
TimeSeries('test-db1.load',0,600,60,[1,2,3,4,5,6,7,8,9,10]),
TimeSeries('test-db2.load',0,600,60,[None,None,None,None,None,None,None,None,None,None]),
TimeSeries('test-db3.load',0,600,60,[1,2,None,None,None,6,7,8,9,10]),
TimeSeries('test-db4.load',0,600,60,[1,2,3,4,5,6,7,8,9,None]),
]
request_context = {}
result = functions.substr(request_context, seriesList, 1, 3)
self.assertEqual(result, expectedResult)
def test_logarithm(self):
seriesList = [
TimeSeries('collectd.test-db1.load.value',0,600,60,[1,2,3,4,5,6,7,8,9,10]),
TimeSeries('collectd.test-db2.load.value',0,600,60,[None,None,None,None,None,None,None,None,None,None]),
TimeSeries('collectd.test-db3.load.value',0,600,60,[-1,-2,None,None,None,-6,-7,-8,-9,-10]),
TimeSeries('collectd.test-db4.load.value',0,600,60,[1,2,3,4,5,6,7,8,9,None]),
]
expectedResult = [
TimeSeries('log(collectd.test-db1.load.value, 10)',0,600,60,[0.0,0.30103,0.4771213,0.60206,0.69897,0.7781513,0.845098,0.90309,0.9542425,1.0]),
TimeSeries('log(collectd.test-db2.load.value, 10)',0,600,60,[None,None,None,None,None,None,None,None,None,None]),
TimeSeries('log(collectd.test-db3.load.value, 10)',0,600,60,[None,None,None,None,None,None,None,None,None,None]),
TimeSeries('log(collectd.test-db4.load.value, 10)',0,600,60,[0.0,0.30103,0.4771213,0.60206,0.69897,0.7781513,0.845098,0.90309,0.9542425,None]),
]
request_context = {}
result = functions.logarithm(request_context, seriesList)
# Round values to 7 digits for easier equality testing
for i, series in enumerate(result):
for k, v in enumerate(series):
if type(v) is float:
series[k] = round(v,7)
self.assertEqual(result, expectedResult)
def test_maximumAbove(self):
seriesList = [
TimeSeries('collectd.test-db1.load.value',0,600,60,[1,2,3,4,5,6,7,8,9,10]),
TimeSeries('collectd.test-db2.load.value',0,600,60,[None,None,None,None,None,None,None,None,None,None]),
TimeSeries('collectd.test-db3.load.value',0,600,60,[1,2,None,None,None,6,7,8,9,10]),
TimeSeries('collectd.test-db4.load.value',0,600,60,[1,2,3,4,5,4,3,2,1,None]),
]
expectedResult = [
TimeSeries('collectd.test-db1.load.value',0,600,60,[1,2,3,4,5,6,7,8,9,10]),
TimeSeries('collectd.test-db3.load.value',0,600,60,[1,2,None,None,None,6,7,8,9,10]),
]
request_context = {}
result = functions.maximumAbove(request_context, seriesList, 5)
self.assertEqual(result, expectedResult)
def test_maximumAbove_empty_list(self):
# Test the function works properly with an empty seriesList provided.
self.assertEqual([], functions.maximumAbove({}, [], 1))
def test_minimumAbove(self):
seriesList = [
TimeSeries('collectd.test-db1.load.value',0,600,60,[1,2,3,4,5,6,7,8,9,10]),
TimeSeries('collectd.test-db2.load.value',0,600,60,[None,None,None,None,None,None,None,None,None,None]),
TimeSeries('collectd.test-db3.load.value',0,600,60,[10,9,None,None,None,6,7,8,9,10]),
TimeSeries('collectd.test-db4.load.value',0,600,60,[10,9,8,7,6,7,8,9,10,None]),
]
expectedResult = [
TimeSeries('collectd.test-db3.load.value',0,600,60,[10,9,None,None,None,6,7,8,9,10]),
TimeSeries('collectd.test-db4.load.value',0,600,60,[10,9,8,7,6,7,8,9,10,None]),
]
request_context = {}
result = functions.minimumAbove(request_context, seriesList, 5)
self.assertEqual(result, expectedResult)
def test_minimumAbove_empty_list(self):
# Test the function works properly with an empty seriesList provided.
self.assertEqual([], functions.minimumAbove({}, [], 1))
def test_maximumBelow(self):
seriesList = [
TimeSeries('collectd.test-db1.load.value',0,600,60,[1,2,3,4,5,4,3,2,1,0]),
TimeSeries('collectd.test-db2.load.value',0,600,60,[None,None,None,None,None,None,None,None,None,None]),
TimeSeries('collectd.test-db3.load.value',0,600,60,[1,2,None,None,None,4,3,2,1,0]),
TimeSeries('collectd.test-db4.load.value',0,600,60,[10,9,8,7,6,7,8,9,10,None]),
]
expectedResult = [
TimeSeries('collectd.test-db1.load.value',0,600,60,[1,2,3,4,5,4,3,2,1,0]),
TimeSeries('collectd.test-db2.load.value',0,600,60,[None,None,None,None,None,None,None,None,None,None]),
TimeSeries('collectd.test-db3.load.value',0,600,60,[1,2,None,None,None,4,3,2,1,0]),
]
request_context = {}
result = functions.maximumBelow(request_context, seriesList, 5)
self.assertEqual(result, expectedResult)
def test_maximumBelow_empty_list(self):
# Test the function works properly with an empty seriesList provided.
self.assertEqual([], functions.maximumBelow({}, [], 1))
def test_minimumBelow(self):
seriesList = [
TimeSeries('collectd.test-db1.load.value',0,600,60,[1,2,3,4,5,4,3,2,1,0]),
TimeSeries('collectd.test-db2.load.value',0,600,60,[None,None,None,None,None,None,None,None,None,None]),
TimeSeries('collectd.test-db3.load.value',0,600,60,[1,2,None,None,None,4,3,2,1,0]),
TimeSeries('collectd.test-db4.load.value',0,600,60,[10,9,8,7,6,7,8,9,10,None]),
]
expectedResult = [
TimeSeries('collectd.test-db1.load.value',0,600,60,[1,2,3,4,5,4,3,2,1,0]),
TimeSeries('collectd.test-db2.load.value',0,600,60,[None,None,None,None,None,None,None,None,None,None]),
TimeSeries('collectd.test-db3.load.value',0,600,60,[1,2,None,None,None,4,3,2,1,0]),
]
request_context = {}
result = functions.minimumBelow(request_context, seriesList, 5)
self.assertEqual(result, expectedResult)
def test_minimumBelow_empty_list(self):
# Test the function works properly with an empty seriesList provided.
self.assertEqual([], functions.minimumBelow({}, [], 1))
def test_highestCurrent(self):
seriesList = [
TimeSeries('collectd.test-db1.load.value',0,600,60,[1,2,3,4,5,4,3,5,6,7]),
TimeSeries('collectd.test-db2.load.value',0,600,60,[None,None,None,None,None,None,None,None,None,None]),
TimeSeries('collectd.test-db3.load.value',0,600,60,[1,2,None,None,None,4,3,2,1,0]),
TimeSeries('collectd.test-db4.load.value',0,600,60,[10,9,8,7,6,7,8,9,10,None]),
]
expectedResult = [
TimeSeries('collectd.test-db1.load.value',0,600,60,[1,2,3,4,5,4,3,5,6,7]),
TimeSeries('collectd.test-db4.load.value',0,600,60,[10,9,8,7,6,7,8,9,10,None]),
]
request_context = {}
result = functions.highestCurrent(request_context, seriesList, 2)
self.assertEqual(result, expectedResult)
def test_highestCurrent_empty_list(self):
# Test the function works properly with an empty seriesList provided.
self.assertEqual([], functions.highestCurrent({}, [], 1))
def test_highest_max(self):
config = [20, 50, 30, 40]
seriesList = [range(max_val) for max_val in config]
# Expect the test results to be returned in descending order
expected = [
[seriesList[1]],
[seriesList[1], seriesList[3]],
[seriesList[1], seriesList[3], seriesList[2]],
# Test where num_return == len(seriesList)
[seriesList[1], seriesList[3], seriesList[2], seriesList[0]],
# Test where num_return > len(seriesList)
[seriesList[1], seriesList[3], seriesList[2], seriesList[0]],
]
for index, test in enumerate(expected):
results = functions.highestMax({}, seriesList, index + 1)
self.assertEqual(test, results)
def test_highest_max_empty_series_list(self):
# Test the function works properly with an empty seriesList provided.
self.assertEqual([], functions.highestMax({}, [], 1))
def test_lowestCurrent(self):
seriesList = [
TimeSeries('collectd.test-db1.load.value',0,600,60,[1,2,3,4,5,4,3,5,6,7]),
TimeSeries('collectd.test-db2.load.value',0,600,60,[None,None,None,None,None,None,None,None,None,None]),
TimeSeries('collectd.test-db3.load.value',0,600,60,[1,2,None,None,None,4,3,2,1,0]),
TimeSeries('collectd.test-db4.load.value',0,600,60,[10,9,8,7,6,7,8,9,10,None]),
]
expectedResult = [
TimeSeries('collectd.test-db2.load.value',0,600,60,[None,None,None,None,None,None,None,None,None,None]),
TimeSeries('collectd.test-db3.load.value',0,600,60,[1,2,None,None,None,4,3,2,1,0]),
]
request_context = {}
result = functions.lowestCurrent(request_context, seriesList, 2)
self.assertEqual(result, expectedResult)
def test_lowestCurrent_empty_list(self):
# Test the function works properly with an empty seriesList provided.
self.assertEqual([], functions.lowestCurrent({}, [], 1))
def test_currentAbove(self):
seriesList = [
TimeSeries('collectd.test-db1.load.value',0,600,60,[1,2,3,4,5,4,3,5,6,7]),
TimeSeries('collectd.test-db2.load.value',0,600,60,[None,None,None,None,None,None,None,None,None,None]),
TimeSeries('collectd.test-db3.load.value',0,600,60,[1,2,None,None,None,4,3,2,1,0]),
TimeSeries('collectd.test-db4.load.value',0,600,60,[10,9,8,7,6,7,8,9,10,None]),
]
expectedResult = [
TimeSeries('collectd.test-db1.load.value',0,600,60,[1,2,3,4,5,4,3,5,6,7]),
TimeSeries('collectd.test-db4.load.value',0,600,60,[10,9,8,7,6,7,8,9,10,None]),
]
request_context = {}
result = functions.currentAbove(request_context, seriesList, 2)
self.assertEqual(result, expectedResult)
def test_currentAbove_empty_list(self):
# Test the function works properly with an empty seriesList provided.
self.assertEqual([], functions.currentAbove({}, [], 1))
def test_currentBelow(self):
seriesList = [
TimeSeries('collectd.test-db1.load.value',0,600,60,[1,2,3,4,5,4,3,5,6,7]),
TimeSeries('collectd.test-db2.load.value',0,600,60,[None,None,None,None,None,None,None,None,None,None]),
TimeSeries('collectd.test-db3.load.value',0,600,60,[1,2,None,None,None,4,3,2,1,0]),
TimeSeries('collectd.test-db4.load.value',0,600,60,[10,9,8,7,6,7,8,9,10,None]),
]
expectedResult = [
TimeSeries('collectd.test-db2.load.value',0,600,60,[None,None,None,None,None,None,None,None,None,None]),
TimeSeries('collectd.test-db3.load.value',0,600,60,[1,2,None,None,None,4,3,2,1,0]),
]
request_context = {}
result = functions.currentBelow(request_context, seriesList, 2)
self.assertEqual(result, expectedResult)
def test_currentBelow_empty_list(self):
# Test the function works properly with an empty seriesList provided.
self.assertEqual([], functions.currentBelow({}, [], 1))
def test_highestAverage(self):
seriesList = [
TimeSeries('collectd.test-db1.load.value',0,600,60,[1,2,3,4,5,4,3,5,6,7]),
TimeSeries('collectd.test-db2.load.value',0,600,60,[None,None,None,None,None,None,None,None,None,None]),
TimeSeries('collectd.test-db3.load.value',0,600,60,[1,2,None,None,None,4,3,2,1,0]),
TimeSeries('collectd.test-db4.load.value',0,600,60,[10,9,8,7,6,7,8,9,10,None]),
]
expectedResult = [
TimeSeries('collectd.test-db1.load.value',0,600,60,[1,2,3,4,5,4,3,5,6,7]),
TimeSeries('collectd.test-db4.load.value',0,600,60,[10,9,8,7,6,7,8,9,10,None]),
]
request_context = {}
result = functions.highestAverage(request_context, seriesList, 2)
self.assertEqual(result, expectedResult)
def test_highestAverage_empty_list(self):
# Test the function works properly with an empty seriesList provided.
self.assertEqual([], functions.highestAverage({}, [], 1))
def test_lowestAverage(self):
seriesList = [
TimeSeries('collectd.test-db1.load.value',0,600,60,[1,2,3,4,5,4,3,5,6,7]),
TimeSeries('collectd.test-db2.load.value',0,600,60,[None,None,None,None,None,None,None,None,None,None]),
TimeSeries('collectd.test-db3.load.value',0,600,60,[1,2,None,None,None,4,3,2,1,0]),
TimeSeries('collectd.test-db4.load.value',0,600,60,[10,9,8,7,6,7,8,9,10,None]),
]
expectedResult = [
TimeSeries('collectd.test-db2.load.value',0,600,60,[None,None,None,None,None,None,None,None,None,None]),
TimeSeries('collectd.test-db3.load.value',0,600,60,[1,2,None,None,None,4,3,2,1,0]),
]
request_context = {}
result = functions.lowestAverage(request_context, seriesList, 2)
self.assertEqual(result, expectedResult)
def test_lowestAverage_empty_list(self):
# Test the function works properly with an empty seriesList provided.
self.assertEqual([], functions.lowestAverage({}, [], 1))
def test_averageAbove(self):
seriesList = [
TimeSeries('collectd.test-db1.load.value',0,600,60,[1,2,3,4,5,4,3,5,6,7]),
TimeSeries('collectd.test-db2.load.value',0,600,60,[None,None,None,None,None,None,None,None,None,None]),
TimeSeries('collectd.test-db3.load.value',0,600,60,[1,2,None,None,None,4,3,2,1,0]),
TimeSeries('collectd.test-db4.load.value',0,600,60,[10,9,8,7,6,7,8,9,10,None]),
]
expectedResult = [
TimeSeries('collectd.test-db1.load.value',0,600,60,[1,2,3,4,5,4,3,5,6,7]),
TimeSeries('collectd.test-db4.load.value',0,600,60,[10,9,8,7,6,7,8,9,10,None]),
]
request_context = {}
result = functions.averageAbove(request_context, seriesList, 2)
self.assertEqual(result, expectedResult)
def test_averageAbove_empty_list(self):
# Test the function works properly with an empty seriesList provided.
self.assertEqual([], functions.averageAbove({}, [], 1))
def test_averageBelow(self):
seriesList = [
TimeSeries('collectd.test-db1.load.value',0,600,60,[1,2,3,4,5,4,3,5,6,7]),
TimeSeries('collectd.test-db2.load.value',0,600,60,[None,None,None,None,None,None,None,None,None,None]),
TimeSeries('collectd.test-db3.load.value',0,600,60,[1,2,None,None,None,4,3,2,1,0]),
TimeSeries('collectd.test-db4.load.value',0,600,60,[10,9,8,7,6,7,8,9,10,None]),
]
expectedResult = [
TimeSeries('collectd.test-db2.load.value',0,600,60,[None,None,None,None,None,None,None,None,None,None]),
TimeSeries('collectd.test-db3.load.value',0,600,60,[1,2,None,None,None,4,3,2,1,0]),
]
request_context = {}
result = functions.averageBelow(request_context, seriesList, 2)
self.assertEqual(result, expectedResult)
def test_averageBelow_empty_list(self):
# Test the function works properly with an empty seriesList provided.
self.assertEqual([], functions.averageBelow({}, [], 1))
def test_constantLine(self):
requestContext = {'startTime': datetime(2014,3,12,2,0,0,2,pytz.timezone(settings.TIME_ZONE)), 'endTime':datetime(2014,3,12,3,0,0,2,pytz.timezone(settings.TIME_ZONE))}
results = functions.constantLine(requestContext, [1])
def test_aggregateLine_default(self):
seriesList = [
TimeSeries('collectd.test-db1.load.value',0,600,60,[1,2,3,4,5,4,3,5,6,7]),
TimeSeries('collectd.test-db2.load.value',0,600,60,[None,None,None,None,None,None,None,None,None,None]),
TimeSeries('collectd.test-db3.load.value',0,600,60,[1,2,None,None,None,4,3,2,1,0]),
TimeSeries('collectd.test-db4.load.value',0,600,60,[10,9,8,7,6,7,8,9,10,None]),
]
for series in seriesList:
series.pathExpression = series.name
expectedResult = [
TimeSeries('aggregateLine(collectd.test-db1.load.value, 4)', 3600, 3660, 30, [4.0, 4.0, 4.0]),
TimeSeries('aggregateLine(collectd.test-db2.load.value, None)', 3600, 3660, 30, [None, None, None]),
TimeSeries('aggregateLine(collectd.test-db3.load.value, 1.85714)', 3600, 3660, 30, [1.8571428571428572, 1.8571428571428572, 1.8571428571428572]),
TimeSeries('aggregateLine(collectd.test-db4.load.value, 8.22222)', 3600, 3660, 30, [8.222222222222221, 8.222222222222221, 8.222222222222221]),
]
requestContext = {
'startTime': datetime(1970,1,1,1,0,0,0,pytz.timezone(settings.TIME_ZONE)),
'endTime':datetime(1970,1,1,1,1,0,0,pytz.timezone(settings.TIME_ZONE))
}
result = functions.aggregateLine(requestContext, seriesList)
self.assertEqual(result, expectedResult)
def test_aggregateLine_avg(self):
seriesList = [
TimeSeries('collectd.test-db1.load.value',0,600,60,[1,2,3,4,5,4,3,5,6,7]),
TimeSeries('collectd.test-db2.load.value',0,600,60,[None,None,None,None,None,None,None,None,None,None]),
TimeSeries('collectd.test-db3.load.value',0,600,60,[1,2,None,None,None,4,3,2,1,0]),
TimeSeries('collectd.test-db4.load.value',0,600,60,[10,9,8,7,6,7,8,9,10,None]),
]
for series in seriesList:
series.pathExpression = series.name
expectedResult = [
TimeSeries('aggregateLine(collectd.test-db1.load.value, 4)', 3600, 3600, 0, [4.0, 4.0, 4.0]),
TimeSeries('aggregateLine(collectd.test-db2.load.value, None)', 3600, 3600, 0, [None, None, None]),
TimeSeries('aggregateLine(collectd.test-db3.load.value, 1.85714)', 3600, 3600, 0, [1.8571428571428572, 1.8571428571428572, 1.8571428571428572]),
TimeSeries('aggregateLine(collectd.test-db4.load.value, 8.22222)', 3600, 3600, 0, [8.222222222222221, 8.222222222222221, 8.222222222222221]),
]
requestContext = {
'startTime': datetime(1970,1,1,1,0,0,0,pytz.timezone(settings.TIME_ZONE)),
'endTime':datetime(1970,1,1,1,0,0,0,pytz.timezone(settings.TIME_ZONE))
}
result = functions.aggregateLine(requestContext, seriesList, 'avg')
self.assertEqual(result, expectedResult)
def test_aggregateLine_min(self):
seriesList = [
TimeSeries('collectd.test-db1.load.value',0,600,60,[1,2,3,4,5,4,3,5,6,7]),
TimeSeries('collectd.test-db2.load.value',0,600,60,[None,None,None,None,None,None,None,None,None,None]),
TimeSeries('collectd.test-db3.load.value',0,600,60,[1,2,None,None,None,4,3,2,1,0]),
TimeSeries('collectd.test-db4.load.value',0,600,60,[10,9,8,7,6,7,8,9,10,None]),
]
for series in seriesList:
series.pathExpression = series.name
expectedResult = [
TimeSeries('aggregateLine(collectd.test-db1.load.value, 1)', 3600, 3600, 0, [1.0, 1.0, 1.0]),
TimeSeries('aggregateLine(collectd.test-db2.load.value, None)', 3600, 3600, 0, [None, None, None]),
TimeSeries('aggregateLine(collectd.test-db3.load.value, 0)', 3600, 3600, 0, [0.0, 0.0, 0.0]),
TimeSeries('aggregateLine(collectd.test-db4.load.value, 6)', 3600, 3600, 0, [6.0, 6.0, 6.0]),
]
requestContext = {
'startTime': datetime(1970,1,1,1,0,0,0,pytz.timezone(settings.TIME_ZONE)),
'endTime':datetime(1970,1,1,1,0,0,0,pytz.timezone(settings.TIME_ZONE))
}
result = functions.aggregateLine(requestContext, seriesList, 'min')
self.assertEqual(result, expectedResult)
def test_aggregateLine_max(self):
seriesList = [
TimeSeries('collectd.test-db1.load.value',0,600,60,[1,2,3,4,5,4,3,5,6,7]),
TimeSeries('collectd.test-db2.load.value',0,600,60,[None,None,None,None,None,None,None,None,None,None]),
TimeSeries('collectd.test-db3.load.value',0,600,60,[1,2,None,None,None,4,3,2,1,0]),
TimeSeries('collectd.test-db4.load.value',0,600,60,[10,9,8,7,6,7,8,9,10,None]),
]
for series in seriesList:
series.pathExpression = series.name
expectedResult = [
TimeSeries('aggregateLine(collectd.test-db1.load.value, 7)', 3600, 3600, 0, [7.0, 7.0, 7.0]),
TimeSeries('aggregateLine(collectd.test-db2.load.value, None)', 3600, 3600, 0, [None, None, None]),
TimeSeries('aggregateLine(collectd.test-db3.load.value, 4)', 3600, 3600, 0, [4.0, 4.0, 4.0]),
TimeSeries('aggregateLine(collectd.test-db4.load.value, 10)', 3600, 3600, 0, [10.0, 10.0, 10.0]),
]
requestContext = {
'startTime': datetime(1970,1,1,1,0,0,0,pytz.timezone(settings.TIME_ZONE)),
'endTime':datetime(1970,1,1,1,0,0,0,pytz.timezone(settings.TIME_ZONE))
}
result = functions.aggregateLine(requestContext, seriesList, 'max')
self.assertEqual(result, expectedResult)
def test_aggregateLine_bad(self):
seriesList = [
TimeSeries('collectd.test-db1.load.value',0,600,60,[1,2,3,4,5,4,3,5,6,7]),
TimeSeries('collectd.test-db2.load.value',0,600,60,[None,None,None,None,None,None,None,None,None,None]),
TimeSeries('collectd.test-db3.load.value',0,600,60,[1,2,None,None,None,4,3,2,1,0]),
TimeSeries('collectd.test-db4.load.value',0,600,60,[10,9,8,7,6,7,8,9,10,None]),
]
for series in seriesList:
series.pathExpression = series.name
requestContext = {
'startTime': datetime(1970,1,1,1,0,0,0,pytz.timezone(settings.TIME_ZONE)),
'endTime':datetime(1970,1,1,1,0,0,0,pytz.timezone(settings.TIME_ZONE))
}
with self.assertRaisesRegexp(ValueError, 'Invalid function bad'):
result = functions.aggregateLine(requestContext, seriesList, 'bad')
def test_threshold_default(self):
expectedResult = [
TimeSeries('7', 3600, 3600, 0, [7.0, 7.0, 7.0]),
]
requestContext = {
'startTime': datetime(1970,1,1,1,0,0,0,pytz.timezone(settings.TIME_ZONE)),
'endTime':datetime(1970,1,1,1,0,0,0,pytz.timezone(settings.TIME_ZONE))
}
result = functions.threshold(requestContext, 7)
self.assertEqual(result, expectedResult)
def test_threshold_label_color(self):
expectedResult = [
TimeSeries('MyLine', 3600, 3600, 0, [7.0, 7.0, 7.0]),
]
expectedResult[0].color='blue'
requestContext = {
'startTime': datetime(1970,1,1,1,0,0,0,pytz.timezone(settings.TIME_ZONE)),
'endTime':datetime(1970,1,1,1,0,0,0,pytz.timezone(settings.TIME_ZONE))
}
result = functions.threshold(requestContext, 7, 'MyLine', 'blue')
self.assertEqual(result, expectedResult)
def test_scale(self):
seriesList = self._generate_series_list()
multiplier = 2
# Leave the original seriesList undisturbed for verification
results = functions.scale({}, copy.deepcopy(seriesList), multiplier)
for i, series in enumerate(results):
for counter, value in enumerate(series):
if value is None:
continue
original_value = seriesList[i][counter]
expected_value = original_value * multiplier
self.assertEqual(value, expected_value)
def _generate_mr_series(self):
seriesList = [
TimeSeries('group.server1.metric1',0,1,1,[None]),
TimeSeries('group.server1.metric2',0,1,1,[None]),
TimeSeries('group.server2.metric1',0,1,1,[None]),
TimeSeries('group.server2.metric2',0,1,1,[None]),
]
mappedResult = [
[seriesList[0],seriesList[1]],
[seriesList[2],seriesList[3]]
]
return (seriesList,mappedResult)
def test_mapSeries(self):
seriesList, expectedResult = self._generate_mr_series()
results = functions.mapSeries({}, copy.deepcopy(seriesList), 1)
self.assertEqual(results,expectedResult)
def test_reduceSeries(self):
sl, inputList = self._generate_mr_series()
expectedResult = [
TimeSeries('group.server2.reduce.mock',0,1,1,[None]),
TimeSeries('group.server2.reduce.mock',0,1,1,[None])
]
resultSeriesList = [TimeSeries('mock(series)',0,1,1,[None])]
mock = MagicMock(return_value = resultSeriesList)
with patch.dict(functions.SeriesFunctions,{ 'mock': mock }):
results = functions.reduceSeries({}, copy.deepcopy(inputList), "mock", 2, "metric1","metric2" )
self.assertEqual(results,expectedResult)
self.assertEqual(mock.mock_calls,
[call({},[inputList[0][0]],[inputList[0][1]]),
call({},[inputList[1][0]],[inputList[1][1]])])
def test_reduceSeries_asPercent(self):
seriesList = [
TimeSeries('group.server1.bytes_used',0,1,1,[1]),
TimeSeries('group.server1.total_bytes',0,1,1,[2]),
TimeSeries('group.server2.bytes_used',0,1,1,[3]),
TimeSeries('group.server2.total_bytes',0,1,1,[4]),
]
for series in seriesList:
series.pathExpression = "tempPath"
expectedResult = [
TimeSeries('group.server1.reduce.asPercent',0,1,1,[50]), #100*1/2
TimeSeries('group.server2.reduce.asPercent',0,1,1,[75]) #100*3/4
]
mappedResult = [seriesList[0]],[seriesList[1]], [seriesList[2]],[seriesList[3]]
results = functions.reduceSeries({}, copy.deepcopy(mappedResult), "asPercent", 2, "bytes_used", "total_bytes")
self.assertEqual(results,expectedResult)
def test_pow(self):
seriesList = self._generate_series_list()
factor = 2
# Leave the original seriesList undisturbed for verification
results = functions.pow({}, copy.deepcopy(seriesList), factor)
for i, series in enumerate(results):
for counter, value in enumerate(series):
if value is None:
continue
original_value = seriesList[i][counter]
expected_value = math.pow(original_value, factor)
self.assertEqual(value, expected_value)
def test_squareRoot(self):
seriesList = self._generate_series_list()
# Leave the original seriesList undisturbed for verification
results = functions.squareRoot({}, copy.deepcopy(seriesList))
for i, series in enumerate(results):
for counter, value in enumerate(series):
original_value = seriesList[i][counter]
if value is None:
self.assertEqual(original_value, None)
continue
expected_value = math.pow(original_value, 0.5)
self.assertEqual(value, expected_value)
def test_invert(self):
seriesList = self._generate_series_list()
# Leave the original seriesList undisturbed for verification
results = functions.invert({}, copy.deepcopy(seriesList))
for i, series in enumerate(results):
for counter, value in enumerate(series):
original_value = seriesList[i][counter]
if value is None:
continue
expected_value = math.pow(original_value, -1)
self.assertEqual(value, expected_value)
def test_timeSlice(self):
seriesList = [
# series starts at 60 seconds past the epoch and continues for 600 seconds (ten minutes)
# steps are every 60 seconds
TimeSeries('test.value',0,600,60,[None,1,2,3,None,5,6,None,7,8,9]),
]
# we're going to slice such that we only include minutes 3 to 8 (of 0 to 9)
expectedResult = [
TimeSeries('timeSlice(test.value, 180, 480)',0,600,60,[None,None,None,3,None,5,6,None,7,None,None])
]
results = functions.timeSlice({
'startTime': datetime(1970, 1, 1, 0, 0, 0, 0, pytz.timezone(settings.TIME_ZONE)),
'endTime': datetime(1970, 1, 1, 0, 9, 0, 0, pytz.timezone(settings.TIME_ZONE)),
'localOnly': False,
'data': [],
}, seriesList, '00:03 19700101', '00:08 19700101')
self.assertEqual(results, expectedResult)
def test_legendValue_with_system_preserves_sign(self):
seriesList = [TimeSeries("foo", 0, 3, 1, [-10000, -20000, -30000, -40000])]
expectedResult = [TimeSeries("foo avg -25.00k ", 0, 3, 1, [-10000, -20000, -30000, -40000])]
result = functions.legendValue({}, seriesList, "avg", "si")
self.assertEqual(result, expectedResult)
def test_legendValue_all(self):
seriesList = [TimeSeries("foo", 0, 4, 1, [10000, 20000, -30000, -40000, None]),
TimeSeries("bar", 0, 4, 1, [0, 10000, 20000, -30000, -40000]),
TimeSeries("baz", 0, 4, 1, [None, None, None, None, None])]
expectedResult = [TimeSeries("foo (avg: -10000.0) (total: -40000) (min: -40000) (max: 20000) (last: -40000)", 0, 4, 1, [10000, 20000, -30000, -40000, None]),
TimeSeries("bar (avg: -8000.0) (total: -40000) (min: -40000) (max: 20000) (last: -40000)", 0, 4, 1, [0, 10000, 20000, -30000, -40000]),
TimeSeries("baz (avg: None) (total: None) (min: None) (max: None) (last: None)", 0, 4, 1, [None, None, None, None, None])]
result = functions.legendValue({}, seriesList, "avg", "total", "min", "max", "last")
self.assertEqual(result, expectedResult)
def test_legendValue_all_si(self):
seriesList = [TimeSeries("foo", 0, 4, 1, [10000, 20000, -30000, -40000, None]),
TimeSeries("bar", 0, 4, 1, [0, 10000, 20000, -30000, -40000]),
TimeSeries("baz", 0, 4, 1, [None, None, None, None, None])]
expectedResult = [TimeSeries("foo avg -10.00k total-40.00k min -40.00k max 20.00k last -40.00k ", 0, 4, 1, [10000, 20000, -30000, -40000, None]),
TimeSeries("bar avg -8.00k total-40.00k min -40.00k max 20.00k last -40.00k ", 0, 4, 1, [0, 10000, 20000, -30000, -40000]),
TimeSeries("baz avg None totalNone min None max None last None ", 0, 4, 1, [None, None, None, None, None])]
result = functions.legendValue({}, seriesList, "avg", "total", "min", "max", "last", "si")
self.assertEqual(result, expectedResult)
def test_legendValue_all_binary(self):
seriesList = [TimeSeries("foo", 0, 4, 1, [10000, 20000, -30000, -40000, None]),
TimeSeries("bar", 0, 4, 1, [0, 10000, 20000, -30000, -40000]),
TimeSeries("baz", 0, 4, 1, [None, None, None, None, None])]
expectedResult = [TimeSeries("foo avg -9.77Ki total-39.06Ki min -39.06Ki max 19.53Ki last -39.06Ki ", 0, 4, 1, [10000, 20000, -30000, -40000, None]),
TimeSeries("bar avg -7.81Ki total-39.06Ki min -39.06Ki max 19.53Ki last -39.06Ki ", 0, 4, 1, [0, 10000, 20000, -30000, -40000]),
TimeSeries("baz avg None totalNone min None max None last None ", 0, 4, 1, [None, None, None, None, None])]
result = functions.legendValue({}, seriesList, "avg", "total", "min", "max", "last", "binary")
self.assertEqual(result, expectedResult)
def test_legendValue_invalid_none(self):
seriesList = [TimeSeries("foo", 0, 4, 1, [10000, 20000, -30000, -40000, None]),
TimeSeries("bar", 0, 4, 1, [0, 10000, 20000, -30000, -40000]),
TimeSeries("baz", 0, 4, 1, [None, None, None, None, None])]
expectedResult = [TimeSeries("foo (avg: -10000.0) (bogus: (?))", 0, 4, 1, [10000, 20000, -30000, -40000, None]),
TimeSeries("bar (avg: -8000.0) (bogus: (?))", 0, 4, 1, [0, 10000, 20000, -30000, -40000]),
TimeSeries("baz (avg: None) (bogus: (?))", 0, 4, 1, [None, None, None, None, None])]
result = functions.legendValue({}, seriesList, "avg", "bogus")
self.assertEqual(result, expectedResult)
def test_linearRegression(self):
original = functions.evaluateTarget
try:
# series starts at 60 seconds past the epoch and continues for 600 seconds (ten minutes)
# steps are every 60 seconds
savedSeries = TimeSeries('test.value',180,480,60,[3,None,5,6,None,8]),
functions.evaluateTarget = lambda x, y: savedSeries
# input values will be ignored and replaced by regression function
inputSeries = TimeSeries('test.value',1200,1500,60,[123,None,None,456,None,None,None])
inputSeries.pathExpression = 'test.value'
results = functions.linearRegression({
'startTime': datetime(1970, 1, 1, 0, 20, 0, 0, pytz.timezone(settings.TIME_ZONE)),
'endTime': datetime(1970, 1, 1, 0, 25, 0, 0, pytz.timezone(settings.TIME_ZONE)),
'localOnly': False,
'data': [],
}, [ inputSeries ], '00:03 19700101', '00:08 19700101')
# regression function calculated from datapoints on minutes 3 to 8
expectedResult = [
TimeSeries('linearRegression(test.value, 180, 480)',1200,1500,60,[20.0,21.0,22.0,23.0,24.0,25.0,26.0])
]
self.assertEqual(results, expectedResult)
finally:
functions.evaluateTarget = original
def test_applyByNode(self):
seriesList = [
TimeSeries('servers.s1.disk.bytes_used', 0, 3, 1, [10, 20, 30]),
TimeSeries('servers.s1.disk.bytes_free', 0, 3, 1, [90, 80, 70]),
TimeSeries('servers.s2.disk.bytes_used', 0, 3, 1, [1, 2, 3]),
TimeSeries('servers.s2.disk.bytes_free', 0, 3, 1, [99, 98, 97])
]
for series in seriesList:
series.pathExpression = series.name
def mock_data_fetcher(reqCtx, path_expression):
rv = []
for s in seriesList:
if s.name == path_expression or fnmatch(s.name, path_expression):
rv.append(s)
if rv:
return rv
raise KeyError('{} not found!'.format(path_expression))
expectedResults = [
TimeSeries('divideSeries(servers.s1.disk.bytes_used,sumSeries(servers.s1.disk.bytes_used,servers.s1.disk.bytes_free))', 0, 3, 1, [0.10, 0.20, 0.30]),
TimeSeries('divideSeries(servers.s2.disk.bytes_used,sumSeries(servers.s2.disk.bytes_used,servers.s2.disk.bytes_free))', 0, 3, 1, [0.01, 0.02, 0.03])
]
with patch('graphite.render.evaluator.fetchData', mock_data_fetcher):
result = functions.applyByNode(
{
'startTime': datetime(1970, 1, 1, 0, 0, 0, 0, pytz.timezone(settings.TIME_ZONE)),
'endTime': datetime(1970, 1, 1, 0, 9, 0, 0, pytz.timezone(settings.TIME_ZONE)),
'localOnly': False,
},
seriesList, 1,
'divideSeries(%.disk.bytes_used, sumSeries(%.disk.bytes_*))'
)
self.assertEqual(result, expectedResults)
def test_applyByNode_newName(self):
seriesList = [
TimeSeries('servers.s1.disk.bytes_used', 0, 3, 1, [10, 20, 30]),
TimeSeries('servers.s1.disk.bytes_free', 0, 3, 1, [90, 80, 70]),
TimeSeries('servers.s2.disk.bytes_used', 0, 3, 1, [1, 2, 3]),
TimeSeries('servers.s2.disk.bytes_free', 0, 3, 1, [99, 98, 97])
]
for series in seriesList:
series.pathExpression = series.name
def mock_data_fetcher(reqCtx, path_expression):
rv = []
for s in seriesList:
if s.name == path_expression or fnmatch(s.name, path_expression):
rv.append(s)
if rv:
return rv
raise KeyError('{} not found!'.format(path_expression))
expectedResults = [
TimeSeries('servers.s1.disk.pct_used', 0, 3, 1, [0.10, 0.20, 0.30]),
TimeSeries('servers.s2.disk.pct_used', 0, 3, 1, [0.01, 0.02, 0.03])
]
with patch('graphite.render.evaluator.fetchData', mock_data_fetcher):
result = functions.applyByNode(
{
'startTime': datetime(1970, 1, 1, 0, 0, 0, 0, pytz.timezone(settings.TIME_ZONE)),
'endTime': datetime(1970, 1, 1, 0, 9, 0, 0, pytz.timezone(settings.TIME_ZONE)),
'localOnly': False,
},
seriesList, 1,
'divideSeries(%.disk.bytes_used, sumSeries(%.disk.bytes_*))',
'%.disk.pct_used'
)
self.assertEqual(result, expectedResults)
def test_movingMedian_emptySeriesList(self):
self.assertEqual(functions.movingMedian({},[],""), [])
def test_movingMedian_evaluateTokens_returns_none(self):
def gen_seriesList(start=0):
seriesList = [
TimeSeries('collectd.test-db0.load.value', start+10, start+15, 1, range(start, start+15)),
]
for series in seriesList:
series.pathExpression = series.name
return seriesList
seriesList = gen_seriesList(10)
def mock_evaluateTokens(reqCtx, tokens, replacements=None):
seriesList = [
TimeSeries('collectd.test-db0.load.value', 10, 25, 1, [None, None, None, None, None, None, None, None, None, None, None, None, None, None, None])
]
for series in seriesList:
series.pathExpression = series.name
return seriesList
expectedResults = [
TimeSeries('movingMedian(collectd.test-db0.load.value,10)', 20, 25, 1, [None, None, None, None, None])
]
with patch('graphite.render.functions.evaluateTokens', mock_evaluateTokens):
result = functions.movingMedian(
{
'template': {},
'args': ({},{}),
'startTime': datetime(1970, 1, 1, 0, 0, 0, 0, pytz.timezone(settings.TIME_ZONE)),
'endTime': datetime(1970, 1, 1, 0, 9, 0, 0, pytz.timezone(settings.TIME_ZONE)),
'localOnly': False,
'data': []
},
seriesList, 10
)
self.assertEqual(result, expectedResults)
def test_movingMedian_evaluateTokens_returns_half_none(self):
def gen_seriesList(start=0):
seriesList = [
TimeSeries('collectd.test-db0.load.value', start+10, start+20, 1, range(0, 10)),
]
for series in seriesList:
series.pathExpression = series.name
return seriesList
seriesList = gen_seriesList(10)
def mock_evaluateTokens(reqCtx, tokens, replacements=None):
seriesList = [
TimeSeries('collectd.test-db0.load.value', 10, 30, 1, [None] * 10 + range(0, 10))
]
for series in seriesList:
series.pathExpression = series.name
return seriesList
expectedResults = [
TimeSeries('movingMedian(collectd.test-db0.load.value,10)', 20, 30, 1, [None, 0, 1, 1, 2, 2, 3, 3, 4, 4])
]
with patch('graphite.render.functions.evaluateTokens', mock_evaluateTokens):
result = functions.movingMedian(
{
'template': {},
'args': ({},{}),
'startTime': datetime(1970, 1, 1, 0, 0, 0, 0, pytz.timezone(settings.TIME_ZONE)),
'endTime': datetime(1970, 1, 1, 0, 9, 0, 0, pytz.timezone(settings.TIME_ZONE)),
'localOnly': False,
'data': []
},
seriesList, 10
)
self.assertEqual(result, expectedResults)
def test_movingMedian_evaluateTokens_returns_empty_list(self):
def gen_seriesList(start=0):
seriesList = [
TimeSeries('collectd.test-db0.load.value', start+600, start+700, 1, range(start, start+100)),
]
for series in seriesList:
series.pathExpression = series.name
return seriesList
seriesList = gen_seriesList(10)
def mock_evaluateTokens(reqCtx, tokens, replacements=None):
return []
expectedResults = []
with patch('graphite.render.functions.evaluateTokens', mock_evaluateTokens):
result = functions.movingMedian(
{
'template': {},
'args': ({},{}),
'startTime': datetime(1970, 1, 1, 0, 0, 0, 0, pytz.timezone(settings.TIME_ZONE)),
'endTime': datetime(1970, 1, 1, 0, 9, 0, 0, pytz.timezone(settings.TIME_ZONE)),
'localOnly': False,
'data': []
},
seriesList, 60
)
self.assertEqual(result, expectedResults)
def test_movingMedian_integerWindowSize(self):
def gen_seriesList(start=0):
seriesList = [
TimeSeries('collectd.test-db0.load.value', start+600, start+700, 1, range(start, start+100)),
]
for series in seriesList:
series.pathExpression = series.name
return seriesList
seriesList = gen_seriesList(10)
def mock_evaluateTokens(reqCtx, tokens, replacements=None):
return gen_seriesList()
expectedResults = [
TimeSeries('movingMedian(collectd.test-db0.load.value,60)', 660, 700, 1, range(30, 70)),
]
with patch('graphite.render.functions.evaluateTokens', mock_evaluateTokens):
result = functions.movingMedian(
{
'template': {},
'args': ({},{}),
'startTime': datetime(1970, 1, 1, 0, 0, 0, 0, pytz.timezone(settings.TIME_ZONE)),
'endTime': datetime(1970, 1, 1, 0, 9, 0, 0, pytz.timezone(settings.TIME_ZONE)),
'localOnly': False,
'data': []
},
seriesList, 60
)
self.assertEqual(result, expectedResults)
def test_movingMedian_stringWindowSize(self):
def gen_seriesList(start=0):
seriesList = [
TimeSeries('collectd.test-db0.load.value', start+600, start+700, 1, range(start, start+100)),
]
for series in seriesList:
series.pathExpression = series.name
return seriesList
seriesList = gen_seriesList(10)
def mock_evaluateTokens(reqCtx, tokens, replacements=None):
return gen_seriesList()
expectedResults = [
TimeSeries('movingMedian(collectd.test-db0.load.value,"-1min")', 660, 700, 1, range(30, 70)),
]
with patch('graphite.render.functions.evaluateTokens', mock_evaluateTokens):
result = functions.movingMedian(
{
'template': {},
'args': ({},{}),
'startTime': datetime(1970, 1, 1, 0, 0, 0, 0, pytz.timezone(settings.TIME_ZONE)),
'endTime': datetime(1970, 1, 1, 0, 9, 0, 0, pytz.timezone(settings.TIME_ZONE)),
'localOnly': False,
'data': []
},
seriesList, "-1min"
)
self.assertEqual(result, expectedResults)
def test_movingAverage_emptySeriesList(self):
self.assertEqual(functions.movingAverage({},[],""), [])
def test_movingAverage_evaluateTokens_returns_none(self):
def gen_seriesList(start=0):
seriesList = [
TimeSeries('collectd.test-db0.load.value', start+10, start+15, 1, range(start, start+15)),
]
for series in seriesList:
series.pathExpression = series.name
return seriesList
seriesList = gen_seriesList(10)
def mock_evaluateTokens(reqCtx, tokens, replacements=None):
seriesList = [
TimeSeries('collectd.test-db0.load.value', 10, 25, 1, [None, None, None, None, None, None, None, None, None, None, None, None, None, None, None])
]
for series in seriesList:
series.pathExpression = series.name
return seriesList
expectedResults = [
TimeSeries('movingAverage(collectd.test-db0.load.value,10)', 20, 25, 1, [None, None, None, None, None])
]
with patch('graphite.render.functions.evaluateTokens', mock_evaluateTokens):
result = functions.movingAverage(
{
'template': {},
'args': ({},{}),
'startTime': datetime(1970, 1, 1, 0, 0, 0, 0, pytz.timezone(settings.TIME_ZONE)),
'endTime': datetime(1970, 1, 1, 0, 9, 0, 0, pytz.timezone(settings.TIME_ZONE)),
'localOnly': False,
'data': []
},
seriesList, 10
)
self.assertEqual(result, expectedResults)
def test_movingAverage_evaluateTokens_returns_half_none(self):
def gen_seriesList(start=0):
seriesList = [
TimeSeries('collectd.test-db0.load.value', start+10, start+20, 1, range(0, 10)),
]
for series in seriesList:
series.pathExpression = series.name
return seriesList
seriesList = gen_seriesList(10)
def mock_evaluateTokens(reqCtx, tokens, replacements=None):
seriesList = [
TimeSeries('collectd.test-db0.load.value', 10, 30, 1, [None] * 10 + range(0, 10))
]
for series in seriesList:
series.pathExpression = series.name
return seriesList
expectedResults = [
TimeSeries('movingAverage(collectd.test-db0.load.value,10)', 20, 30, 1, [None, 0.0, 0.5, 1.0, 1.5, 2.0, 2.5, 3.0, 3.5, 4.0])
]
with patch('graphite.render.functions.evaluateTokens', mock_evaluateTokens):
result = functions.movingAverage(
{
'template': {},
'args': ({},{}),
'startTime': datetime(1970, 1, 1, 0, 0, 0, 0, pytz.timezone(settings.TIME_ZONE)),
'endTime': datetime(1970, 1, 1, 0, 9, 0, 0, pytz.timezone(settings.TIME_ZONE)),
'localOnly': False,
'data': []
},
seriesList, 10
)
self.assertEqual(result, expectedResults)
def test_movingAverage_evaluateTokens_returns_empty_list(self):
def gen_seriesList(start=0):
seriesList = [
TimeSeries('collectd.test-db0.load.value', start+600, start+700, 1, range(start, start+100)),
]
for series in seriesList:
series.pathExpression = series.name
return seriesList
seriesList = gen_seriesList(10)
def mock_evaluateTokens(reqCtx, tokens, replacements=None):
return []
expectedResults = []
with patch('graphite.render.functions.evaluateTokens', mock_evaluateTokens):
result = functions.movingAverage(
{
'template': {},
'args': ({},{}),
'startTime': datetime(1970, 1, 1, 0, 0, 0, 0, pytz.timezone(settings.TIME_ZONE)),
'endTime': datetime(1970, 1, 1, 0, 9, 0, 0, pytz.timezone(settings.TIME_ZONE)),
'localOnly': False,
'data': []
},
seriesList, 60
)
self.assertEqual(result, expectedResults)
def test_movingAverage_integerWindowSize(self):
def gen_seriesList(start=0):
seriesList = [
TimeSeries('collectd.test-db0.load.value', start+600, start+700, 1, range(start, start+100)),
]
for series in seriesList:
series.pathExpression = series.name
return seriesList
seriesList = gen_seriesList(10)
def mock_evaluateTokens(reqCtx, tokens, replacements=None):
return gen_seriesList()
def frange(x,y,jump):
while x<y:
yield x
x+=jump
expectedResults = [
TimeSeries('movingAverage(collectd.test-db0.load.value,60)', 660, 700, 1, frange(29.5, 69.5, 1)),
]
with patch('graphite.render.functions.evaluateTokens', mock_evaluateTokens):
result = functions.movingAverage(
{
'template': {},
'args': ({},{}),
'startTime': datetime(1970, 1, 1, 0, 0, 0, 0, pytz.timezone(settings.TIME_ZONE)),
'endTime': datetime(1970, 1, 1, 0, 9, 0, 0, pytz.timezone(settings.TIME_ZONE)),
'localOnly': False,
'data': []
},
seriesList, 60
)
self.assertEqual(result, expectedResults)
def test_movingAverage_stringWindowSize(self):
def gen_seriesList(start=0):
seriesList = [
TimeSeries('collectd.test-db0.load.value', start+600, start+700, 1, range(start, start+100)),
]
for series in seriesList:
series.pathExpression = series.name
return seriesList
seriesList = gen_seriesList(10)
def mock_evaluateTokens(reqCtx, tokens, replacements=None):
return gen_seriesList()
def frange(x,y,jump):
while x<y:
yield x
x+=jump
expectedResults = [
TimeSeries('movingAverage(collectd.test-db0.load.value,"-1min")', 660, 700, 1, frange(29.5, 69.5, 1)),
]
with patch('graphite.render.functions.evaluateTokens', mock_evaluateTokens):
result = functions.movingAverage(
{
'template': {},
'args': ({},{}),
'startTime': datetime(1970, 1, 1, 0, 0, 0, 0, pytz.timezone(settings.TIME_ZONE)),
'endTime': datetime(1970, 1, 1, 0, 9, 0, 0, pytz.timezone(settings.TIME_ZONE)),
'localOnly': False,
'data': []
},
seriesList, "-1min"
)
self.assertEqual(result, expectedResults)
def test_holtWintersAnalysis_None(self):
seriesList = TimeSeries('collectd.test-db0.load.value', 660, 700, 1, [None])
expectedResults = {
'predictions': TimeSeries('holtWintersForecast(collectd.test-db0.load.value)', 660, 700, 1, [None]),
'deviations': TimeSeries('holtWintersDeviation(collectd.test-db0.load.value)', 660, 700, 1, [0]),
'seasonals': [0],
'slopes': [0],
'intercepts': [None]
}
result = functions.holtWintersAnalysis(seriesList)
self.assertEqual(result, expectedResults)
def test_holtWintersForecast(self):
def gen_seriesList(start=0):
seriesList = [
TimeSeries('collectd.test-db0.load.value', start+600, start+700, 1, range(start, start+100)),
]
for series in seriesList:
series.pathExpression = series.name
return seriesList
seriesList = gen_seriesList(10)
def mock_evaluateTokens(reqCtx, tokens, replacements=None):
return gen_seriesList()
expectedResults = [
TimeSeries('holtWintersForecast(collectd.test-db0.load.value)', 605400, 700, 1, [])
]
with patch('graphite.render.functions.evaluateTokens', mock_evaluateTokens):
result = functions.holtWintersForecast(
{
'template': {},
'args': ({},{}),
'startTime': datetime(1970, 2, 1, 0, 0, 0, 0, pytz.timezone(settings.TIME_ZONE)),
'endTime': datetime(1970, 2, 1, 0, 9, 0, 0, pytz.timezone(settings.TIME_ZONE)),
'localOnly': False,
'data': []
},
seriesList
)
self.assertEqual(result, expectedResults)
def test_holtWintersConfidenceBands(self):
points=10
step=600
start_time=2678400 # 1970-02-01
week_seconds=7*86400
def hw_range(x,y,jump):
while x<y:
yield (x/jump)%10
x+=jump
def gen_seriesList(start=0, points=10):
seriesList = [
TimeSeries('collectd.test-db0.load.value', start, start+(points*step), step, hw_range(0, points*step, step)),
]
for series in seriesList:
series.pathExpression = series.name
return seriesList
seriesList = gen_seriesList(start_time, points)
def mock_evaluateTokens(reqCtx, tokens, replacements=None):
return gen_seriesList(start_time-week_seconds, (week_seconds/step)+points)
expectedResults = [
TimeSeries('holtWintersConfidenceLower(collectd.test-db0.load.value)', start_time, start_time+(points*step), step, [0.2841206166091448, 1.0581027098774411, 0.3338172102994683, 0.5116859493263242, -0.18199175514936972, 0.2366173792019426, -1.2941554508809152, -0.513426806531049, -0.7970905542723132, 0.09868900726536012]),
TimeSeries('holtWintersConfidenceUpper(collectd.test-db0.load.value)', start_time, start_time+(points*step), step, [8.424944558327624, 9.409422251880809, 10.607070189221787, 10.288439865038768, 9.491556863132963, 9.474595784593738, 8.572310478053845, 8.897670449095346, 8.941566968508148, 9.409728797779282])
]
with patch('graphite.render.functions.evaluateTokens', mock_evaluateTokens):
result = functions.holtWintersConfidenceBands(
{
'template': {},
'args': ({},{}),
'startTime': datetime(1970, 2, 1, 0, 0, 0, 0, pytz.timezone(settings.TIME_ZONE)),
'endTime': datetime(1970, 2, 1, 0, 9, 0, 0, pytz.timezone(settings.TIME_ZONE)),
'localOnly': False,
'data': []
},
seriesList
)
self.assertEqual(result, expectedResults)
def test_holtWintersConfidenceArea(self):
points=10
step=600
start_time=2678400 # 1970-02-01
week_seconds=7*86400
def hw_range(x,y,jump):
while x<y:
yield (x/jump)%10
x+=jump
def gen_seriesList(start=0, points=10):
seriesList = [
TimeSeries('collectd.test-db0.load.value', start, start+(points*step), step, hw_range(0, points*step, step)),
]
for series in seriesList:
series.pathExpression = series.name
return seriesList
seriesList = gen_seriesList(start_time, points)
def mock_evaluateTokens(reqCtx, tokens, replacements=None):
return gen_seriesList(start_time-week_seconds, (week_seconds/step)+points)
expectedResults = [
TimeSeries('holtWintersConfidenceArea(collectd.test-db0.load.value)', start_time, start_time+(points*step), step, [0.2841206166091448, 1.0581027098774411, 0.3338172102994683, 0.5116859493263242, -0.18199175514936972, 0.2366173792019426, -1.2941554508809152, -0.513426806531049, -0.7970905542723132, 0.09868900726536012]),
TimeSeries('holtWintersConfidenceArea(collectd.test-db0.load.value)', start_time, start_time+(points*step), step, [8.424944558327624, 9.409422251880809, 10.607070189221787, 10.288439865038768, 9.491556863132963, 9.474595784593738, 8.572310478053845, 8.897670449095346, 8.941566968508148, 9.409728797779282]),
]
expectedResults[0].options = {'invisible': True, 'stacked': True}
expectedResults[1].options = {'stacked': True}
with patch('graphite.render.functions.evaluateTokens', mock_evaluateTokens):
result = functions.holtWintersConfidenceArea(
{
'template': {},
'args': ({},{}),
'startTime': datetime(1970, 2, 1, 0, 0, 0, 0, pytz.timezone(settings.TIME_ZONE)),
'endTime': datetime(1970, 2, 1, 0, 9, 0, 0, pytz.timezone(settings.TIME_ZONE)),
'localOnly': False,
'data': []
},
seriesList
)
self.assertEqual(result, expectedResults)
def test_holtWintersAberration(self):
points=10
step=600
start_time=2678400 # 1970-02-01
week_seconds=7*86400
def hw_range(x,y,jump):
while x<y:
yield (x/jump)%10
x+=jump
def gen_seriesList(start=0, points=10):
seriesList = [
TimeSeries('collectd.test-db0.load.value', start, start+(points*step), step, hw_range(0, points*step, step)),
]
for series in seriesList:
series.pathExpression = series.name
return seriesList
seriesList = gen_seriesList(start_time, points)
def mock_evaluateTokens(reqCtx, tokens, replacements=None):
return gen_seriesList(start_time-week_seconds, (week_seconds/step)+points)
expectedResults = [
TimeSeries('holtWintersAberration(collectd.test-db0.load.value)', start_time, start_time+(points*step), step, [-0.2841206166091448, -0.05810270987744115, 0, 0, 0, 0, 0, 0, 0, 0])
]
with patch('graphite.render.functions.evaluateTokens', mock_evaluateTokens):
result = functions.holtWintersAberration(
{
'template': {},
'args': ({},{}),
'startTime': datetime(1970, 2, 1, 0, 0, 0, 0, pytz.timezone(settings.TIME_ZONE)),
'endTime': datetime(1970, 2, 1, 0, 9, 0, 0, pytz.timezone(settings.TIME_ZONE)),
'localOnly': False,
'data': []
},
seriesList
)
self.assertEqual(result, expectedResults)
def test_smartSummarize_1day(self):
seriesList = [
TimeSeries('servers.s1.disk.bytes_used', 0, 86400, 60, range(0,86400,60)),
TimeSeries('servers.s1.disk.bytes_free', 0, 86400, 60, range(0, -86400, -60)),
TimeSeries('servers.s2.disk.bytes_used', 0, 86400, 60, [None]*1440),
TimeSeries('servers.s2.disk.bytes_free', 0, 86400, 60, range(0,1440))
]
for series in seriesList:
series.pathExpression = series.name
def mock_data_fetcher(reqCtx, path_expression):
rv = []
for s in seriesList:
if s.name == path_expression or fnmatch(s.name, path_expression):
rv.append(s)
if rv:
return rv
raise KeyError('{} not found!'.format(path_expression))
expectedResults = {'sum' : [
TimeSeries('smartSummarize(servers.s1.disk.bytes_used, "1d", "sum")', 0, 86400, 86400, [62164800]),
TimeSeries('smartSummarize(servers.s1.disk.bytes_free, "1d", "sum")', 0, 86400, 86400, [-62164800]),
TimeSeries('smartSummarize(servers.s2.disk.bytes_used, "1d", "sum")', 0, 86400, 86400, [None]),
TimeSeries('smartSummarize(servers.s2.disk.bytes_free, "1d", "sum")', 0, 86400, 86400, [1036080])
],
'avg' : [
TimeSeries('smartSummarize(servers.s1.disk.bytes_used, "1d", "avg")', 0, 86400, 86400, [43170.0]),
TimeSeries('smartSummarize(servers.s1.disk.bytes_free, "1d", "avg")', 0, 86400, 86400, [-43170.0]),
TimeSeries('smartSummarize(servers.s2.disk.bytes_used, "1d", "avg")', 0, 86400, 86400, [None]),
TimeSeries('smartSummarize(servers.s2.disk.bytes_free, "1d", "avg")', 0, 86400, 86400, [719.5])
],
'last' : [
TimeSeries('smartSummarize(servers.s1.disk.bytes_used, "1d", "last")', 0, 86400, 86400, [86340]),
TimeSeries('smartSummarize(servers.s1.disk.bytes_free, "1d", "last")', 0, 86400, 86400, [-86340]),
TimeSeries('smartSummarize(servers.s2.disk.bytes_used, "1d", "last")', 0, 86400, 86400, [None]),
TimeSeries('smartSummarize(servers.s2.disk.bytes_free, "1d", "last")', 0, 86400, 86400, [1439])
],
'max' : [
TimeSeries('smartSummarize(servers.s1.disk.bytes_used, "1d", "max")', 0, 86400, 86400, [86340]),
TimeSeries('smartSummarize(servers.s1.disk.bytes_free, "1d", "max")', 0, 86400, 86400, [0]),
TimeSeries('smartSummarize(servers.s2.disk.bytes_used, "1d", "max")', 0, 86400, 86400, [None]),
TimeSeries('smartSummarize(servers.s2.disk.bytes_free, "1d", "max")', 0, 86400, 86400, [1439])
],
'min' : [
TimeSeries('smartSummarize(servers.s1.disk.bytes_used, "1d", "min")', 0, 86400, 86400, [0]),
TimeSeries('smartSummarize(servers.s1.disk.bytes_free, "1d", "min")', 0, 86400, 86400, [-86340]),
TimeSeries('smartSummarize(servers.s2.disk.bytes_used, "1d", "min")', 0, 86400, 86400, [None]),
TimeSeries('smartSummarize(servers.s2.disk.bytes_free, "1d", "min")', 0, 86400, 86400, [0])
],
}
for func in expectedResults:
with patch('graphite.render.evaluator.fetchData', mock_data_fetcher):
result = functions.smartSummarize(
{
'startTime': datetime(1970, 1, 1, 0, 0, 0, 0, pytz.timezone(settings.TIME_ZONE)),
'endTime': datetime(1970, 1, 2, 0, 0, 0, 0, pytz.timezone(settings.TIME_ZONE)),
'localOnly': False,
},
seriesList, "1d", func)
self.assertEqual(result, expectedResults[func])
def test_smartSummarize_1hour(self):
seriesList = [
TimeSeries('servers.s1.disk.bytes_used', 0, 14400, 1, range(0,14400,1)),
TimeSeries('servers.s1.disk.bytes_free', 0, 14400, 1, range(0, -14400, -1)),
TimeSeries('servers.s2.disk.bytes_used', 0, 14400, 1, [None]*14400),
TimeSeries('servers.s2.disk.bytes_free', 0, 14400, 1, range(0,14400*2,2))
]
for series in seriesList:
series.pathExpression = series.name
def mock_data_fetcher(reqCtx, path_expression):
rv = []
for s in seriesList:
if s.name == path_expression or fnmatch(s.name, path_expression):
rv.append(s)
if rv:
return rv
raise KeyError('{} not found!'.format(path_expression))
expectedResults = {'sum' : [
TimeSeries('smartSummarize(servers.s1.disk.bytes_used, "1hour", "sum")', 0, 14400, 3600, [6478200, 19438200, 32398200, 45358200]),
TimeSeries('smartSummarize(servers.s1.disk.bytes_free, "1hour", "sum")', 0, 14400, 3600, [-6478200, -19438200, -32398200, -45358200]),
TimeSeries('smartSummarize(servers.s2.disk.bytes_used, "1hour", "sum")', 0, 14400, 3600, [None, None, None, None]),
TimeSeries('smartSummarize(servers.s2.disk.bytes_free, "1hour", "sum")', 0, 14400, 3600, [12956400, 38876400, 64796400, 90716400])
],
'avg' : [
TimeSeries('smartSummarize(servers.s1.disk.bytes_used, "1hour", "avg")', 0, 14400, 3600, [1799.5, 5399.5, 8999.5, 12599.5]),
TimeSeries('smartSummarize(servers.s1.disk.bytes_free, "1hour", "avg")', 0, 14400, 3600, [-1799.5, -5399.5, -8999.5, -12599.5]),
TimeSeries('smartSummarize(servers.s2.disk.bytes_used, "1hour", "avg")', 0, 14400, 3600, [None, None, None, None]),
TimeSeries('smartSummarize(servers.s2.disk.bytes_free, "1hour", "avg")', 0, 14400, 3600, [3599.0, 10799.0, 17999.0, 25199.0])
],
'last' : [
TimeSeries('smartSummarize(servers.s1.disk.bytes_used, "1hour", "last")', 0, 14400, 3600, [3599, 7199, 10799, 14399]),
TimeSeries('smartSummarize(servers.s1.disk.bytes_free, "1hour", "last")', 0, 14400, 3600, [-3599, -7199, -10799, -14399]),
TimeSeries('smartSummarize(servers.s2.disk.bytes_used, "1hour", "last")', 0, 14400, 3600, [None, None, None, None]),
TimeSeries('smartSummarize(servers.s2.disk.bytes_free, "1hour", "last")', 0, 14400, 3600, [7198, 14398, 21598, 28798])
],
'max' : [
TimeSeries('smartSummarize(servers.s1.disk.bytes_used, "1hour", "max")', 0, 14400, 3600, [3599, 7199, 10799, 14399]),
TimeSeries('smartSummarize(servers.s1.disk.bytes_free, "1hour", "max")', 0, 14400, 3600, [0, -3600, -7200, -10800]),
TimeSeries('smartSummarize(servers.s2.disk.bytes_used, "1hour", "max")', 0, 14400, 3600, [None, None, None, None]),
TimeSeries('smartSummarize(servers.s2.disk.bytes_free, "1hour", "max")', 0, 14400, 3600, [7198, 14398, 21598, 28798])
],
'min' : [
TimeSeries('smartSummarize(servers.s1.disk.bytes_used, "1hour", "min")', 0, 14400, 3600, [0, 3600, 7200, 10800]),
TimeSeries('smartSummarize(servers.s1.disk.bytes_free, "1hour", "min")', 0, 14400, 3600, [-3599, -7199, -10799, -14399]),
TimeSeries('smartSummarize(servers.s2.disk.bytes_used, "1hour", "min")', 0, 14400, 3600, [None, None, None, None]),
TimeSeries('smartSummarize(servers.s2.disk.bytes_free, "1hour", "min")', 0, 14400, 3600, [0, 7200, 14400, 21600])
],
}
for func in expectedResults:
with patch('graphite.render.evaluator.fetchData', mock_data_fetcher):
result = functions.smartSummarize(
{
'startTime': datetime(1970, 1, 1, 0, 0, 0, 0, pytz.timezone(settings.TIME_ZONE)),
'endTime': datetime(1970, 1, 1, 4, 0, 0, 0, pytz.timezone(settings.TIME_ZONE)),
'localOnly': False,
},
seriesList, "1hour", func)
self.assertEqual(result, expectedResults[func])
def test_smartSummarize_1minute(self):
seriesList = [
TimeSeries('servers.s1.disk.bytes_used', 0, 240, 1, range(0,240)),
TimeSeries('servers.s1.disk.bytes_free', 0, 240, 1, range(0, -240, -1)),
TimeSeries('servers.s2.disk.bytes_used', 0, 240, 1, [None]*240),
TimeSeries('servers.s2.disk.bytes_free', 0, 240, 1, range(0,480,2))
]
for series in seriesList:
series.pathExpression = series.name
def mock_data_fetcher(reqCtx, path_expression):
rv = []
for s in seriesList:
if s.name == path_expression or fnmatch(s.name, path_expression):
rv.append(s)
if rv:
return rv
raise KeyError('{} not found!'.format(path_expression))
expectedResults = {'sum' : [
TimeSeries('smartSummarize(servers.s1.disk.bytes_used, "1minute", "sum")', 0, 240, 60, [1770, 5370, 8970, 12570]),
TimeSeries('smartSummarize(servers.s1.disk.bytes_free, "1minute", "sum")', 0, 240, 60, [-1770, -5370, -8970, -12570]),
TimeSeries('smartSummarize(servers.s2.disk.bytes_used, "1minute", "sum")', 0, 240, 60, [None, None, None, None]),
TimeSeries('smartSummarize(servers.s2.disk.bytes_free, "1minute", "sum")', 0, 240, 60, [3540, 10740, 17940, 25140])
],
'avg' : [
TimeSeries('smartSummarize(servers.s1.disk.bytes_used, "1minute", "avg")', 0, 240, 60, [29.5, 89.5, 149.5, 209.5]),
TimeSeries('smartSummarize(servers.s1.disk.bytes_free, "1minute", "avg")', 0, 240, 60, [-29.5, -89.5, -149.5, -209.5]),
TimeSeries('smartSummarize(servers.s2.disk.bytes_used, "1minute", "avg")', 0, 240, 60, [None, None, None, None]),
TimeSeries('smartSummarize(servers.s2.disk.bytes_free, "1minute", "avg")', 0, 240, 60, [59.0, 179.0, 299.0, 419.0])
],
'last' : [
TimeSeries('smartSummarize(servers.s1.disk.bytes_used, "1minute", "last")', 0, 240, 60, [59, 119, 179, 239]),
TimeSeries('smartSummarize(servers.s1.disk.bytes_free, "1minute", "last")', 0, 240, 60, [-59, -119, -179, -239]),
TimeSeries('smartSummarize(servers.s2.disk.bytes_used, "1minute", "last")', 0, 240, 60, [None, None, None, None]),
TimeSeries('smartSummarize(servers.s2.disk.bytes_free, "1minute", "last")', 0, 240, 60, [118, 238, 358, 478])
],
'max' : [
TimeSeries('smartSummarize(servers.s1.disk.bytes_used, "1minute", "max")', 0, 240, 60, [59, 119, 179, 239]),
TimeSeries('smartSummarize(servers.s1.disk.bytes_free, "1minute", "max")', 0, 240, 60, [0, -60, -120, -180]),
TimeSeries('smartSummarize(servers.s2.disk.bytes_used, "1minute", "max")', 0, 240, 60, [None, None, None, None]),
TimeSeries('smartSummarize(servers.s2.disk.bytes_free, "1minute", "max")', 0, 240, 60, [118, 238, 358, 478])
],
'min' : [
TimeSeries('smartSummarize(servers.s1.disk.bytes_used, "1minute", "min")', 0, 240, 60, [0, 60, 120, 180]),
TimeSeries('smartSummarize(servers.s1.disk.bytes_free, "1minute", "min")', 0, 240, 60, [-59, -119, -179, -239]),
TimeSeries('smartSummarize(servers.s2.disk.bytes_used, "1minute", "min")', 0, 240, 60, [None, None, None, None]),
TimeSeries('smartSummarize(servers.s2.disk.bytes_free, "1minute", "min")', 0, 240, 60, [0, 120, 240, 360])
],
}
for func in expectedResults:
with patch('graphite.render.evaluator.fetchData', mock_data_fetcher):
result = functions.smartSummarize(
{
'startTime': datetime(1970, 1, 1, 0, 0, 0, 0, pytz.timezone(settings.TIME_ZONE)),
'endTime': datetime(1970, 1, 1, 0, 4, 0, 0, pytz.timezone(settings.TIME_ZONE)),
'localOnly': False,
},
seriesList, "1minute", func)
self.assertEqual(result, expectedResults[func])
def test_smartSummarize_1minute_alignToFrom(self):
seriesList = [
TimeSeries('servers.s1.disk.bytes_used', 0, 240, 1, range(0,240)),
TimeSeries('servers.s1.disk.bytes_free', 0, 240, 1, range(0, -240, -1)),
TimeSeries('servers.s2.disk.bytes_used', 0, 240, 1, [None]*240),
TimeSeries('servers.s2.disk.bytes_free', 0, 240, 1, range(0,480,2))
]
for series in seriesList:
series.pathExpression = series.name
def mock_data_fetcher(reqCtx, path_expression):
rv = []
for s in seriesList:
if s.name == path_expression or fnmatch(s.name, path_expression):
rv.append(s)
if rv:
return rv
raise KeyError('{} not found!'.format(path_expression))
expectedResults = {'sum' : [
TimeSeries('smartSummarize(servers.s1.disk.bytes_used, "1minute", "sum")', 0, 240, 60, [1770, 5370, 8970, 12570]),
TimeSeries('smartSummarize(servers.s1.disk.bytes_free, "1minute", "sum")', 0, 240, 60, [-1770, -5370, -8970, -12570]),
TimeSeries('smartSummarize(servers.s2.disk.bytes_used, "1minute", "sum")', 0, 240, 60, [None, None, None, None]),
TimeSeries('smartSummarize(servers.s2.disk.bytes_free, "1minute", "sum")', 0, 240, 60, [3540, 10740, 17940, 25140])
],
'avg' : [
TimeSeries('smartSummarize(servers.s1.disk.bytes_used, "1minute", "avg")', 0, 240, 60, [29.5, 89.5, 149.5, 209.5]),
TimeSeries('smartSummarize(servers.s1.disk.bytes_free, "1minute", "avg")', 0, 240, 60, [-29.5, -89.5, -149.5, -209.5]),
TimeSeries('smartSummarize(servers.s2.disk.bytes_used, "1minute", "avg")', 0, 240, 60, [None, None, None, None]),
TimeSeries('smartSummarize(servers.s2.disk.bytes_free, "1minute", "avg")', 0, 240, 60, [59.0, 179.0, 299.0, 419.0])
],
'last' : [
TimeSeries('smartSummarize(servers.s1.disk.bytes_used, "1minute", "last")', 0, 240, 60, [59, 119, 179, 239]),
TimeSeries('smartSummarize(servers.s1.disk.bytes_free, "1minute", "last")', 0, 240, 60, [-59, -119, -179, -239]),
TimeSeries('smartSummarize(servers.s2.disk.bytes_used, "1minute", "last")', 0, 240, 60, [None, None, None, None]),
TimeSeries('smartSummarize(servers.s2.disk.bytes_free, "1minute", "last")', 0, 240, 60, [118, 238, 358, 478])
],
'max' : [
TimeSeries('smartSummarize(servers.s1.disk.bytes_used, "1minute", "max")', 0, 240, 60, [59, 119, 179, 239]),
TimeSeries('smartSummarize(servers.s1.disk.bytes_free, "1minute", "max")', 0, 240, 60, [0, -60, -120, -180]),
TimeSeries('smartSummarize(servers.s2.disk.bytes_used, "1minute", "max")', 0, 240, 60, [None, None, None, None]),
TimeSeries('smartSummarize(servers.s2.disk.bytes_free, "1minute", "max")', 0, 240, 60, [118, 238, 358, 478])
],
'min' : [
TimeSeries('smartSummarize(servers.s1.disk.bytes_used, "1minute", "min")', 0, 240, 60, [0, 60, 120, 180]),
TimeSeries('smartSummarize(servers.s1.disk.bytes_free, "1minute", "min")', 0, 240, 60, [-59, -119, -179, -239]),
TimeSeries('smartSummarize(servers.s2.disk.bytes_used, "1minute", "min")', 0, 240, 60, [None, None, None, None]),
TimeSeries('smartSummarize(servers.s2.disk.bytes_free, "1minute", "min")', 0, 240, 60, [0, 120, 240, 360])
],
}
for func in expectedResults:
with patch('graphite.render.evaluator.fetchData', mock_data_fetcher):
result = functions.smartSummarize(
{
'startTime': datetime(1970, 1, 1, 0, 0, 0, 0, pytz.timezone(settings.TIME_ZONE)),
'endTime': datetime(1970, 1, 1, 0, 4, 0, 0, pytz.timezone(settings.TIME_ZONE)),
'localOnly': False,
},
seriesList, "1minute", func, True)
self.assertEqual(result, expectedResults[func])
def test_hitcount_1day(self):
seriesList = [
TimeSeries('servers.s1.disk.bytes_used', 0, 86400, 60, range(0,86400,60)),
TimeSeries('servers.s1.disk.bytes_free', 0, 86400, 60, range(0, -86400, -60)),
TimeSeries('servers.s2.disk.bytes_used', 0, 86400, 60, [None]*1440),
TimeSeries('servers.s2.disk.bytes_free', 0, 86400, 60, range(0,1440))
]
for series in seriesList:
series.pathExpression = series.name
def mock_data_fetcher(reqCtx, path_expression):
rv = []
for s in seriesList:
if s.name == path_expression or fnmatch(s.name, path_expression):
rv.append(s)
if rv:
return rv
raise KeyError('{} not found!'.format(path_expression))
expectedResults = [
TimeSeries('hitcount(servers.s1.disk.bytes_used, "1d", true)', 0, 172800, 86400, [3729888000, None]),
TimeSeries('hitcount(servers.s1.disk.bytes_free, "1d", true)', 0, 172800, 86400, [-3729888000, None]),
TimeSeries('hitcount(servers.s2.disk.bytes_used, "1d", true)', 0, 172800, 86400, [None, None]),
TimeSeries('hitcount(servers.s2.disk.bytes_free, "1d", true)', 0, 172800, 86400, [62164800, None])
]
with patch('graphite.render.evaluator.fetchData', mock_data_fetcher):
result = functions.hitcount(
{
'startTime': datetime(1970, 1, 1, 0, 0, 0, 0, pytz.timezone(settings.TIME_ZONE)),
'endTime': datetime(1970, 1, 2, 0, 0, 0, 0, pytz.timezone(settings.TIME_ZONE)),
'localOnly': False,
},
seriesList, "1d", True)
self.assertEqual(result, expectedResults)
def test_hitcount_1hour(self):
seriesList = [
TimeSeries('servers.s1.disk.bytes_used', 0, 14400, 1, range(0,14400,1)),
TimeSeries('servers.s1.disk.bytes_free', 0, 14400, 1, range(0, -14400, -1)),
TimeSeries('servers.s2.disk.bytes_used', 0, 14400, 1, [None]*14400),
TimeSeries('servers.s2.disk.bytes_free', 0, 14400, 1, range(0,14400*2,2))
]
for series in seriesList:
series.pathExpression = series.name
def mock_data_fetcher(reqCtx, path_expression):
rv = []
for s in seriesList:
if s.name == path_expression or fnmatch(s.name, path_expression):
rv.append(s)
if rv:
return rv
raise KeyError('{} not found!'.format(path_expression))
expectedResults = [
TimeSeries('hitcount(servers.s1.disk.bytes_used, "1hour", true)', 0, 18000, 3600, [6478200, 19438200, 32398200, 45358200, None]),
TimeSeries('hitcount(servers.s1.disk.bytes_free, "1hour", true)', 0, 18000, 3600, [-6478200, -19438200, -32398200, -45358200, None]),
TimeSeries('hitcount(servers.s2.disk.bytes_used, "1hour", true)', 0, 18000, 3600, [None, None, None, None, None]),
TimeSeries('hitcount(servers.s2.disk.bytes_free, "1hour", true)', 0, 18000, 3600, [12956400, 38876400, 64796400, 90716400, None])
]
with patch('graphite.render.evaluator.fetchData', mock_data_fetcher):
result = functions.hitcount(
{
'startTime': datetime(1970, 1, 1, 0, 0, 0, 0, pytz.timezone(settings.TIME_ZONE)),
'endTime': datetime(1970, 1, 1, 4, 0, 0, 0, pytz.timezone(settings.TIME_ZONE)),
'localOnly': False,
},
seriesList, "1hour", True)
self.assertEqual(result, expectedResults)
def test_hitcount_1minute(self):
seriesList = [
TimeSeries('servers.s1.disk.bytes_used', 0, 240, 1, range(0,240)),
TimeSeries('servers.s1.disk.bytes_free', 0, 240, 1, range(0, -240, -1)),
TimeSeries('servers.s2.disk.bytes_used', 0, 240, 1, [None]*240),
TimeSeries('servers.s2.disk.bytes_free', 0, 240, 1, range(0,480,2))
]
for series in seriesList:
series.pathExpression = series.name
def mock_data_fetcher(reqCtx, path_expression):
rv = []
for s in seriesList:
if s.name == path_expression or fnmatch(s.name, path_expression):
rv.append(s)
if rv:
return rv
raise KeyError('{} not found!'.format(path_expression))
expectedResults = [
TimeSeries('hitcount(servers.s1.disk.bytes_used, "1minute", true)', 0, 300, 60, [1770, 5370, 8970, 12570, None]),
TimeSeries('hitcount(servers.s1.disk.bytes_free, "1minute", true)', 0, 300, 60, [-1770, -5370, -8970, -12570, None]),
TimeSeries('hitcount(servers.s2.disk.bytes_used, "1minute", true)', 0, 300, 60, [None, None, None, None, None]),
TimeSeries('hitcount(servers.s2.disk.bytes_free, "1minute", true)', 0, 300, 60, [3540, 10740, 17940, 25140, None])
]
with patch('graphite.render.evaluator.fetchData', mock_data_fetcher):
result = functions.hitcount(
{
'startTime': datetime(1970, 1, 1, 0, 0, 0, 0, pytz.timezone(settings.TIME_ZONE)),
'endTime': datetime(1970, 1, 1, 0, 4, 0, 0, pytz.timezone(settings.TIME_ZONE)),
'localOnly': False,
},
seriesList, "1minute", True)
self.assertEqual(result, expectedResults)
def test_hitcount_1minute_alignToFrom_false(self):
seriesList = [
TimeSeries('servers.s1.disk.bytes_used', 0, 240, 1, range(0,240)),
TimeSeries('servers.s1.disk.bytes_free', 0, 240, 1, range(0, -240, -1)),
TimeSeries('servers.s2.disk.bytes_used', 0, 240, 1, [None]*240),
TimeSeries('servers.s2.disk.bytes_free', 0, 240, 1, range(0,480,2))
]
for series in seriesList:
series.pathExpression = series.name
def mock_data_fetcher(reqCtx, path_expression):
rv = []
for s in seriesList:
if s.name == path_expression or fnmatch(s.name, path_expression):
rv.append(s)
if rv:
return rv
raise KeyError('{} not found!'.format(path_expression))
expectedResults = [
TimeSeries('hitcount(servers.s1.disk.bytes_used, "1minute")', 0, 240, 60, [1770, 5370, 8970, 12570]),
TimeSeries('hitcount(servers.s1.disk.bytes_free, "1minute")', 0, 240, 60, [-1770, -5370, -8970, -12570]),
TimeSeries('hitcount(servers.s2.disk.bytes_used, "1minute")', 0, 240, 60, [None, None, None, None]),
TimeSeries('hitcount(servers.s2.disk.bytes_free, "1minute")', 0, 240, 60, [3540, 10740, 17940, 25140])
]
with patch('graphite.render.evaluator.fetchData', mock_data_fetcher):
result = functions.hitcount(
{
'startTime': datetime(1970, 1, 1, 0, 0, 0, 0, pytz.timezone(settings.TIME_ZONE)),
'endTime': datetime(1970, 1, 1, 0, 4, 0, 0, pytz.timezone(settings.TIME_ZONE)),
'localOnly': False,
},
seriesList, "1minute", False)
self.assertEqual(result, expectedResults)
def test_summarize_1minute(self):
seriesList = [
TimeSeries('servers.s1.disk.bytes_used', 0, 240, 1, range(0,240)),
TimeSeries('servers.s1.disk.bytes_free', 0, 240, 1, range(0, -240, -1)),
TimeSeries('servers.s2.disk.bytes_used', 0, 240, 1, [None]*240),
TimeSeries('servers.s2.disk.bytes_free', 0, 240, 1, range(0,480,2))
]
for series in seriesList:
series.pathExpression = series.name
expectedResults = {'sum' : [
TimeSeries('summarize(servers.s1.disk.bytes_used, "1minute", "sum")', 0, 300, 60, [1770, 5370, 8970, 12570, None]),
TimeSeries('summarize(servers.s1.disk.bytes_free, "1minute", "sum")', 0, 300, 60, [-1770, -5370, -8970, -12570, None]),
TimeSeries('summarize(servers.s2.disk.bytes_used, "1minute", "sum")', 0, 300, 60, [None, None, None, None, None]),
TimeSeries('summarize(servers.s2.disk.bytes_free, "1minute", "sum")', 0, 300, 60, [3540, 10740, 17940, 25140, None])
],
'avg' : [
TimeSeries('summarize(servers.s1.disk.bytes_used, "1minute", "avg")', 0, 300, 60, [29.5, 89.5, 149.5, 209.5, None]),
TimeSeries('summarize(servers.s1.disk.bytes_free, "1minute", "avg")', 0, 300, 60, [-29.5, -89.5, -149.5, -209.5, None]),
TimeSeries('summarize(servers.s2.disk.bytes_used, "1minute", "avg")', 0, 300, 60, [None, None, None, None, None]),
TimeSeries('summarize(servers.s2.disk.bytes_free, "1minute", "avg")', 0, 300, 60, [59.0, 179.0, 299.0, 419.0, None])
],
'last' : [
TimeSeries('summarize(servers.s1.disk.bytes_used, "1minute", "last")', 0, 300, 60, [59, 119, 179, 239, None]),
TimeSeries('summarize(servers.s1.disk.bytes_free, "1minute", "last")', 0, 300, 60, [-59, -119, -179, -239, None]),
TimeSeries('summarize(servers.s2.disk.bytes_used, "1minute", "last")', 0, 300, 60, [None, None, None, None, None]),
TimeSeries('summarize(servers.s2.disk.bytes_free, "1minute", "last")', 0, 300, 60, [118, 238, 358, 478, None])
],
'max' : [
TimeSeries('summarize(servers.s1.disk.bytes_used, "1minute", "max")', 0, 300, 60, [59, 119, 179, 239, None]),
TimeSeries('summarize(servers.s1.disk.bytes_free, "1minute", "max")', 0, 300, 60, [0, -60, -120, -180, None]),
TimeSeries('summarize(servers.s2.disk.bytes_used, "1minute", "max")', 0, 300, 60, [None, None, None, None, None]),
TimeSeries('summarize(servers.s2.disk.bytes_free, "1minute", "max")', 0, 300, 60, [118, 238, 358, 478, None])
],
'min' : [
TimeSeries('summarize(servers.s1.disk.bytes_used, "1minute", "min")', 0, 300, 60, [0, 60, 120, 180, None]),
TimeSeries('summarize(servers.s1.disk.bytes_free, "1minute", "min")', 0, 300, 60, [-59, -119, -179, -239, None]),
TimeSeries('summarize(servers.s2.disk.bytes_used, "1minute", "min")', 0, 300, 60, [None, None, None, None, None]),
TimeSeries('summarize(servers.s2.disk.bytes_free, "1minute", "min")', 0, 300, 60, [0, 120, 240, 360, None])
],
}
for func in expectedResults:
for series in expectedResults[func]:
series.pathExpression = series.name
result = functions.summarize(
{
'startTime': datetime(1970, 1, 1, 0, 0, 0, 0, pytz.timezone(settings.TIME_ZONE)),
'endTime': datetime(1970, 1, 1, 0, 4, 0, 0, pytz.timezone(settings.TIME_ZONE)),
'localOnly': False,
},
seriesList, "1minute", func)
self.assertEqual(result, expectedResults[func])
def test_summarize_1minute_alignToFrom(self):
seriesList = [
TimeSeries('servers.s1.disk.bytes_used', 0, 240, 1, range(0,240)),
TimeSeries('servers.s1.disk.bytes_free', 0, 240, 1, range(0, -240, -1)),
TimeSeries('servers.s2.disk.bytes_used', 0, 240, 1, [None]*240),
TimeSeries('servers.s2.disk.bytes_free', 0, 240, 1, range(0,480,2))
]
for series in seriesList:
series.pathExpression = series.name
expectedResults = {'sum' : [
TimeSeries('summarize(servers.s1.disk.bytes_used, "1minute", "sum", true)', 0, 240, 60, [1770, 5370, 8970, 12570]),
TimeSeries('summarize(servers.s1.disk.bytes_free, "1minute", "sum", true)', 0, 240, 60, [-1770, -5370, -8970, -12570]),
TimeSeries('summarize(servers.s2.disk.bytes_used, "1minute", "sum", true)', 0, 240, 60, [None, None, None, None]),
TimeSeries('summarize(servers.s2.disk.bytes_free, "1minute", "sum", true)', 0, 240, 60, [3540, 10740, 17940, 25140])
],
'avg' : [
TimeSeries('summarize(servers.s1.disk.bytes_used, "1minute", "avg", true)', 0, 240, 60, [29.5, 89.5, 149.5, 209.5]),
TimeSeries('summarize(servers.s1.disk.bytes_free, "1minute", "avg", true)', 0, 240, 60, [-29.5, -89.5, -149.5, -209.5]),
TimeSeries('summarize(servers.s2.disk.bytes_used, "1minute", "avg", true)', 0, 240, 60, [None, None, None, None]),
TimeSeries('summarize(servers.s2.disk.bytes_free, "1minute", "avg", true)', 0, 240, 60, [59.0, 179.0, 299.0, 419.0])
],
'last' : [
TimeSeries('summarize(servers.s1.disk.bytes_used, "1minute", "last", true)', 0, 240, 60, [59, 119, 179, 239]),
TimeSeries('summarize(servers.s1.disk.bytes_free, "1minute", "last", true)', 0, 240, 60, [-59, -119, -179, -239]),
TimeSeries('summarize(servers.s2.disk.bytes_used, "1minute", "last", true)', 0, 240, 60, [None, None, None, None]),
TimeSeries('summarize(servers.s2.disk.bytes_free, "1minute", "last", true)', 0, 240, 60, [118, 238, 358, 478])
],
'max' : [
TimeSeries('summarize(servers.s1.disk.bytes_used, "1minute", "max", true)', 0, 240, 60, [59, 119, 179, 239]),
TimeSeries('summarize(servers.s1.disk.bytes_free, "1minute", "max", true)', 0, 240, 60, [0, -60, -120, -180]),
TimeSeries('summarize(servers.s2.disk.bytes_used, "1minute", "max", true)', 0, 240, 60, [None, None, None, None]),
TimeSeries('summarize(servers.s2.disk.bytes_free, "1minute", "max", true)', 0, 240, 60, [118, 238, 358, 478])
],
'min' : [
TimeSeries('summarize(servers.s1.disk.bytes_used, "1minute", "min", true)', 0, 240, 60, [0, 60, 120, 180]),
TimeSeries('summarize(servers.s1.disk.bytes_free, "1minute", "min", true)', 0, 240, 60, [-59, -119, -179, -239]),
TimeSeries('summarize(servers.s2.disk.bytes_used, "1minute", "min", true)', 0, 240, 60, [None, None, None, None]),
TimeSeries('summarize(servers.s2.disk.bytes_free, "1minute", "min", true)', 0, 240, 60, [0, 120, 240, 360])
],
}
for func in expectedResults:
for series in expectedResults[func]:
series.pathExpression = series.name
result = functions.summarize(
{
'startTime': datetime(1970, 1, 1, 0, 0, 0, 0, pytz.timezone(settings.TIME_ZONE)),
'endTime': datetime(1970, 1, 1, 0, 4, 0, 0, pytz.timezone(settings.TIME_ZONE)),
'localOnly': False,
},
seriesList, "1minute", func, True)
self.assertEqual(result, expectedResults[func])
| 50.688301 | 396 | 0.597351 | 24,799 | 185,874 | 4.418485 | 0.031171 | 0.078048 | 0.081589 | 0.07082 | 0.876914 | 0.853213 | 0.810092 | 0.78087 | 0.752843 | 0.726815 | 0 | 0.119313 | 0.239802 | 185,874 | 3,666 | 397 | 50.702128 | 0.656152 | 0.016371 | 0 | 0.56345 | 0 | 0.017845 | 0.190055 | 0.145045 | 0 | 0 | 0 | 0.000273 | 0.097819 | 1 | 0.108063 | false | 0 | 0.003635 | 0.003966 | 0.125909 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
7d805abebc4ad9cb7fab34d4c5c4a12fc16557b1 | 25 | py | Python | tests/test.py | alanwuha/map-reduce | 50b459839159b8cb3b39020bd034b542535e48c3 | [
"MIT"
] | null | null | null | tests/test.py | alanwuha/map-reduce | 50b459839159b8cb3b39020bd034b542535e48c3 | [
"MIT"
] | null | null | null | tests/test.py | alanwuha/map-reduce | 50b459839159b8cb3b39020bd034b542535e48c3 | [
"MIT"
] | null | null | null | from .context import src
| 12.5 | 24 | 0.8 | 4 | 25 | 5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.16 | 25 | 1 | 25 | 25 | 0.952381 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
7d9279ebf4ad2535bf7112c8640e1d1c728993f2 | 33 | py | Python | limbo/slackclient/__init__.py | dorian1453/limbo | 0c17b6d431ea33dfe96e4b258767636ca05b6e0d | [
"MIT"
] | 3 | 2015-03-11T05:01:55.000Z | 2021-04-29T01:52:52.000Z | limbo/slackclient/__init__.py | dorian1453/limbo | 0c17b6d431ea33dfe96e4b258767636ca05b6e0d | [
"MIT"
] | null | null | null | limbo/slackclient/__init__.py | dorian1453/limbo | 0c17b6d431ea33dfe96e4b258767636ca05b6e0d | [
"MIT"
] | null | null | null | from ._client import SlackClient
| 16.5 | 32 | 0.848485 | 4 | 33 | 6.75 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.121212 | 33 | 1 | 33 | 33 | 0.931034 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
7dbe03973507c3eb995e28c4dd53f1c2dbd70b97 | 2,421 | py | Python | invoke_based/tasks/utils/aws.py | netpyoung/nf.task-flow | 1b8beb231e310d35848326b89944761f2919450a | [
"MIT"
] | null | null | null | invoke_based/tasks/utils/aws.py | netpyoung/nf.task-flow | 1b8beb231e310d35848326b89944761f2919450a | [
"MIT"
] | null | null | null | invoke_based/tasks/utils/aws.py | netpyoung/nf.task-flow | 1b8beb231e310d35848326b89944761f2919450a | [
"MIT"
] | null | null | null | from boto3.session import Session
import json
import hashlib
import requests
ACCESS_KEY=''
SECRET_KEY=''
def upload_db_to_aws(fpath, version):
try:
file_bytes = open(fpath, "rb").read()
m = hashlib.md5()
m.update(file_bytes)
md5 = m.hexdigest().upper()
json_txt = json.dumps({"version": version, "md5": md5})
print(json_txt)
with open('__DB/LATEST_DB.txt', 'w') as f:
f.write(json_txt)
session = Session(aws_access_key_id=ACCESS_KEY,
aws_secret_access_key=SECRET_KEY)
s3 = session.resource('s3')
bucket = s3.Bucket(BUCKET_NAME)
bucket.upload_file(
fpath,
f"game_db/{version}/client.db",
ExtraArgs={'ACL':'public-read'}
)
bucket.upload_file(
'__DB/LATEST_DB.txt',
f"game_db/{version}/LATEST_DB.txt",
ExtraArgs={'ACL':'public-read'}
)
bucket.upload_file(
'__DB/LATEST_DB.txt',
f"game_db/LATEST_DB.txt",
ExtraArgs={'ACL':'public-read'}
)
data = {"version": version, "hash": md5}
return (data, None)
except Exception as e:
return (None, e)
def upload_locale_to_aws(fpath, version):
try:
file_bytes = open(fpath, "rb").read()
m = hashlib.md5()
m.update(file_bytes)
md5 = m.hexdigest().upper()
json_txt = json.dumps({"version": version, "md5": md5})
print(json_txt)
with open('__DB/LATEST_LOCALE.txt', 'w') as f:
f.write(json_txt)
session = Session(aws_access_key_id=ACCESS_KEY,
aws_secret_access_key=SECRET_KEY)
s3 = session.resource('s3')
bucket = s3.Bucket(BUCKET_NAME)
bucket.upload_file(
fpath,
f"locale_db/{version}/locale.db",
ExtraArgs={'ACL':'public-read'}
)
bucket.upload_file(
'__DB/LATEST_LOCALE.txt',
f"locale_db/{version}/LATEST_LOCALE.txt",
ExtraArgs={'ACL':'public-read'}
)
bucket.upload_file(
'__DB/LATEST_LOCALE.txt',
f"locale_db/LATEST_LOCALE.txt",
ExtraArgs={'ACL':'public-read'}
)
data = {"version": version, "hash": md5}
return (data, None)
except Exception as e:
return (None, e)
| 24.958763 | 63 | 0.547708 | 287 | 2,421 | 4.376307 | 0.191638 | 0.050955 | 0.076433 | 0.105096 | 0.863057 | 0.863057 | 0.863057 | 0.83121 | 0.83121 | 0.83121 | 0 | 0.010322 | 0.319703 | 2,421 | 96 | 64 | 25.21875 | 0.752277 | 0 | 0 | 0.694444 | 0 | 0 | 0.176786 | 0.098306 | 0 | 0 | 0 | 0 | 0 | 1 | 0.027778 | false | 0 | 0.055556 | 0 | 0.138889 | 0.027778 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
81913bc3ae06a5693a14d13c32ace27310dec35c | 30,172 | py | Python | cisco-ios-xr/ydk/models/cisco_ios_xr/_meta/_Cisco_IOS_XR_crypto_macsec_mka_oper.py | tkamata-test/ydk-py | b637e7853a8edbbd31fbc05afa3aa4110b31c5f9 | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | cisco-ios-xr/ydk/models/cisco_ios_xr/_meta/_Cisco_IOS_XR_crypto_macsec_mka_oper.py | tkamata-test/ydk-py | b637e7853a8edbbd31fbc05afa3aa4110b31c5f9 | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | cisco-ios-xr/ydk/models/cisco_ios_xr/_meta/_Cisco_IOS_XR_crypto_macsec_mka_oper.py | tkamata-test/ydk-py | b637e7853a8edbbd31fbc05afa3aa4110b31c5f9 | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
import re
import collections
from enum import Enum
from ydk._core._dm_meta_info import _MetaInfoClassMember, _MetaInfoClass, _MetaInfoEnum
from ydk.types import Empty, YList, YLeafList, DELETE, Decimal64, FixedBitsDict
from ydk._core._dm_meta_info import ATTRIBUTE, REFERENCE_CLASS, REFERENCE_LIST, REFERENCE_LEAFLIST, REFERENCE_IDENTITY_CLASS, REFERENCE_ENUM_CLASS, REFERENCE_BITS, REFERENCE_UNION
from ydk.errors import YPYError, YPYModelError
from ydk.providers._importer import _yang_ns
_meta_table = {
'Macsec.Mka.Interfaces.Interface.Session.SessionSummary.OuterTag' : {
'meta_info' : _MetaInfoClass('Macsec.Mka.Interfaces.Interface.Session.SessionSummary.OuterTag',
False,
[
_MetaInfoClassMember('cfi', ATTRIBUTE, 'int' , None, None,
[('0', '255')], [],
''' cfi
''',
'cfi',
'Cisco-IOS-XR-crypto-macsec-mka-oper', False),
_MetaInfoClassMember('etype', ATTRIBUTE, 'int' , None, None,
[('0', '65535')], [],
''' etype
''',
'etype',
'Cisco-IOS-XR-crypto-macsec-mka-oper', False),
_MetaInfoClassMember('priority', ATTRIBUTE, 'int' , None, None,
[('0', '255')], [],
''' priority
''',
'priority',
'Cisco-IOS-XR-crypto-macsec-mka-oper', False),
_MetaInfoClassMember('vlan-id', ATTRIBUTE, 'int' , None, None,
[('0', '65535')], [],
''' vlan id
''',
'vlan_id',
'Cisco-IOS-XR-crypto-macsec-mka-oper', False),
],
'Cisco-IOS-XR-crypto-macsec-mka-oper',
'outer-tag',
_yang_ns._namespaces['Cisco-IOS-XR-crypto-macsec-mka-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_macsec_mka_oper'
),
},
'Macsec.Mka.Interfaces.Interface.Session.SessionSummary.InnerTag' : {
'meta_info' : _MetaInfoClass('Macsec.Mka.Interfaces.Interface.Session.SessionSummary.InnerTag',
False,
[
_MetaInfoClassMember('cfi', ATTRIBUTE, 'int' , None, None,
[('0', '255')], [],
''' cfi
''',
'cfi',
'Cisco-IOS-XR-crypto-macsec-mka-oper', False),
_MetaInfoClassMember('etype', ATTRIBUTE, 'int' , None, None,
[('0', '65535')], [],
''' etype
''',
'etype',
'Cisco-IOS-XR-crypto-macsec-mka-oper', False),
_MetaInfoClassMember('priority', ATTRIBUTE, 'int' , None, None,
[('0', '255')], [],
''' priority
''',
'priority',
'Cisco-IOS-XR-crypto-macsec-mka-oper', False),
_MetaInfoClassMember('vlan-id', ATTRIBUTE, 'int' , None, None,
[('0', '65535')], [],
''' vlan id
''',
'vlan_id',
'Cisco-IOS-XR-crypto-macsec-mka-oper', False),
],
'Cisco-IOS-XR-crypto-macsec-mka-oper',
'inner-tag',
_yang_ns._namespaces['Cisco-IOS-XR-crypto-macsec-mka-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_macsec_mka_oper'
),
},
'Macsec.Mka.Interfaces.Interface.Session.SessionSummary' : {
'meta_info' : _MetaInfoClass('Macsec.Mka.Interfaces.Interface.Session.SessionSummary',
False,
[
_MetaInfoClassMember('algo-agility', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Alogorithm Agility
''',
'algo_agility',
'Cisco-IOS-XR-crypto-macsec-mka-oper', False),
_MetaInfoClassMember('capability', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' MACSec Capability
''',
'capability',
'Cisco-IOS-XR-crypto-macsec-mka-oper', False),
_MetaInfoClassMember('cipher-str', ATTRIBUTE, 'str' , None, None,
[], [],
''' Cipher String
''',
'cipher_str',
'Cisco-IOS-XR-crypto-macsec-mka-oper', False),
_MetaInfoClassMember('confidentiality-offset', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Confidentiality Offset
''',
'confidentiality_offset',
'Cisco-IOS-XR-crypto-macsec-mka-oper', False),
_MetaInfoClassMember('delay-protect', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Delay Protect
''',
'delay_protect',
'Cisco-IOS-XR-crypto-macsec-mka-oper', False),
_MetaInfoClassMember('inherited-policy', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Is Inherited Policy
''',
'inherited_policy',
'Cisco-IOS-XR-crypto-macsec-mka-oper', False),
_MetaInfoClassMember('inner-tag', REFERENCE_CLASS, 'InnerTag' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_macsec_mka_oper', 'Macsec.Mka.Interfaces.Interface.Session.SessionSummary.InnerTag',
[], [],
''' VLAN Inner TAG
''',
'inner_tag',
'Cisco-IOS-XR-crypto-macsec-mka-oper', False),
_MetaInfoClassMember('interface-name', ATTRIBUTE, 'str' , None, None,
[], [],
''' macsec configured interface
''',
'interface_name',
'Cisco-IOS-XR-crypto-macsec-mka-oper', False),
_MetaInfoClassMember('key-chain', ATTRIBUTE, 'str' , None, None,
[], [],
''' Key Chain name
''',
'key_chain',
'Cisco-IOS-XR-crypto-macsec-mka-oper', False),
_MetaInfoClassMember('mac-sec-desired', ATTRIBUTE, 'bool' , None, None,
[], [],
''' MACSec Desired
''',
'mac_sec_desired',
'Cisco-IOS-XR-crypto-macsec-mka-oper', False),
_MetaInfoClassMember('my-mac', ATTRIBUTE, 'str' , None, None,
[], [],
''' My MAC
''',
'my_mac',
'Cisco-IOS-XR-crypto-macsec-mka-oper', False),
_MetaInfoClassMember('outer-tag', REFERENCE_CLASS, 'OuterTag' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_macsec_mka_oper', 'Macsec.Mka.Interfaces.Interface.Session.SessionSummary.OuterTag',
[], [],
''' VLAN Outer TAG
''',
'outer_tag',
'Cisco-IOS-XR-crypto-macsec-mka-oper', False),
_MetaInfoClassMember('policy', ATTRIBUTE, 'str' , None, None,
[], [],
''' Policy Name
''',
'policy',
'Cisco-IOS-XR-crypto-macsec-mka-oper', False),
_MetaInfoClassMember('priority', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Key Server Priority
''',
'priority',
'Cisco-IOS-XR-crypto-macsec-mka-oper', False),
_MetaInfoClassMember('replay-protect', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Replay Protect
''',
'replay_protect',
'Cisco-IOS-XR-crypto-macsec-mka-oper', False),
_MetaInfoClassMember('window-size', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Replay Window Size
''',
'window_size',
'Cisco-IOS-XR-crypto-macsec-mka-oper', False),
],
'Cisco-IOS-XR-crypto-macsec-mka-oper',
'session-summary',
_yang_ns._namespaces['Cisco-IOS-XR-crypto-macsec-mka-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_macsec_mka_oper'
),
},
'Macsec.Mka.Interfaces.Interface.Session.Vp' : {
'meta_info' : _MetaInfoClass('Macsec.Mka.Interfaces.Interface.Session.Vp',
False,
[
_MetaInfoClassMember('cipher-suite', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' SAK Cipher Suite
''',
'cipher_suite',
'Cisco-IOS-XR-crypto-macsec-mka-oper', False),
_MetaInfoClassMember('latest-an', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Latest SAK AN
''',
'latest_an',
'Cisco-IOS-XR-crypto-macsec-mka-oper', False),
_MetaInfoClassMember('latest-ki', ATTRIBUTE, 'str' , None, None,
[], [],
''' Latest SAK KI
''',
'latest_ki',
'Cisco-IOS-XR-crypto-macsec-mka-oper', False),
_MetaInfoClassMember('latest-kn', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Latest SAK KN
''',
'latest_kn',
'Cisco-IOS-XR-crypto-macsec-mka-oper', False),
_MetaInfoClassMember('latest-rx', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Latest Rx status
''',
'latest_rx',
'Cisco-IOS-XR-crypto-macsec-mka-oper', False),
_MetaInfoClassMember('latest-tx', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Latest Tx status
''',
'latest_tx',
'Cisco-IOS-XR-crypto-macsec-mka-oper', False),
_MetaInfoClassMember('my-sci', ATTRIBUTE, 'str' , None, None,
[], [],
''' Local SCI(MAC)
''',
'my_sci',
'Cisco-IOS-XR-crypto-macsec-mka-oper', False),
_MetaInfoClassMember('old-an', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Old SAK AN
''',
'old_an',
'Cisco-IOS-XR-crypto-macsec-mka-oper', False),
_MetaInfoClassMember('old-ki', ATTRIBUTE, 'str' , None, None,
[], [],
''' Old SAK KI
''',
'old_ki',
'Cisco-IOS-XR-crypto-macsec-mka-oper', False),
_MetaInfoClassMember('old-kn', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Old SAK KN
''',
'old_kn',
'Cisco-IOS-XR-crypto-macsec-mka-oper', False),
_MetaInfoClassMember('old-rx', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Old Rx status
''',
'old_rx',
'Cisco-IOS-XR-crypto-macsec-mka-oper', False),
_MetaInfoClassMember('old-tx', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Old Tx status
''',
'old_tx',
'Cisco-IOS-XR-crypto-macsec-mka-oper', False),
_MetaInfoClassMember('retire-time', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' SAK Retire time
''',
'retire_time',
'Cisco-IOS-XR-crypto-macsec-mka-oper', False),
_MetaInfoClassMember('ssci', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' SSCI of the Local TxSC
''',
'ssci',
'Cisco-IOS-XR-crypto-macsec-mka-oper', False),
_MetaInfoClassMember('virtual-port-id', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Virtual Port ID
''',
'virtual_port_id',
'Cisco-IOS-XR-crypto-macsec-mka-oper', False),
_MetaInfoClassMember('wait-time', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' SAK Transmit Wait Time
''',
'wait_time',
'Cisco-IOS-XR-crypto-macsec-mka-oper', False),
],
'Cisco-IOS-XR-crypto-macsec-mka-oper',
'vp',
_yang_ns._namespaces['Cisco-IOS-XR-crypto-macsec-mka-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_macsec_mka_oper'
),
},
'Macsec.Mka.Interfaces.Interface.Session.Ca.LivePeer' : {
'meta_info' : _MetaInfoClass('Macsec.Mka.Interfaces.Interface.Session.Ca.LivePeer',
False,
[
_MetaInfoClassMember('mi', ATTRIBUTE, 'str' , None, None,
[], [],
''' Member ID
''',
'mi',
'Cisco-IOS-XR-crypto-macsec-mka-oper', False),
_MetaInfoClassMember('mn', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Message Number
''',
'mn',
'Cisco-IOS-XR-crypto-macsec-mka-oper', False),
_MetaInfoClassMember('priority', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' KS Priority
''',
'priority',
'Cisco-IOS-XR-crypto-macsec-mka-oper', False),
_MetaInfoClassMember('sci', ATTRIBUTE, 'str' , None, None,
[], [],
''' Rx SCI
''',
'sci',
'Cisco-IOS-XR-crypto-macsec-mka-oper', False),
_MetaInfoClassMember('ssci', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Peer SSCI
''',
'ssci',
'Cisco-IOS-XR-crypto-macsec-mka-oper', False),
],
'Cisco-IOS-XR-crypto-macsec-mka-oper',
'live-peer',
_yang_ns._namespaces['Cisco-IOS-XR-crypto-macsec-mka-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_macsec_mka_oper'
),
},
'Macsec.Mka.Interfaces.Interface.Session.Ca.PotentialPeer' : {
'meta_info' : _MetaInfoClass('Macsec.Mka.Interfaces.Interface.Session.Ca.PotentialPeer',
False,
[
_MetaInfoClassMember('mi', ATTRIBUTE, 'str' , None, None,
[], [],
''' Member ID
''',
'mi',
'Cisco-IOS-XR-crypto-macsec-mka-oper', False),
_MetaInfoClassMember('mn', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Message Number
''',
'mn',
'Cisco-IOS-XR-crypto-macsec-mka-oper', False),
_MetaInfoClassMember('priority', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' KS Priority
''',
'priority',
'Cisco-IOS-XR-crypto-macsec-mka-oper', False),
_MetaInfoClassMember('sci', ATTRIBUTE, 'str' , None, None,
[], [],
''' Rx SCI
''',
'sci',
'Cisco-IOS-XR-crypto-macsec-mka-oper', False),
_MetaInfoClassMember('ssci', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Peer SSCI
''',
'ssci',
'Cisco-IOS-XR-crypto-macsec-mka-oper', False),
],
'Cisco-IOS-XR-crypto-macsec-mka-oper',
'potential-peer',
_yang_ns._namespaces['Cisco-IOS-XR-crypto-macsec-mka-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_macsec_mka_oper'
),
},
'Macsec.Mka.Interfaces.Interface.Session.Ca.DormantPeer' : {
'meta_info' : _MetaInfoClass('Macsec.Mka.Interfaces.Interface.Session.Ca.DormantPeer',
False,
[
_MetaInfoClassMember('mi', ATTRIBUTE, 'str' , None, None,
[], [],
''' Member ID
''',
'mi',
'Cisco-IOS-XR-crypto-macsec-mka-oper', False),
_MetaInfoClassMember('mn', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Message Number
''',
'mn',
'Cisco-IOS-XR-crypto-macsec-mka-oper', False),
_MetaInfoClassMember('priority', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' KS Priority
''',
'priority',
'Cisco-IOS-XR-crypto-macsec-mka-oper', False),
_MetaInfoClassMember('sci', ATTRIBUTE, 'str' , None, None,
[], [],
''' Rx SCI
''',
'sci',
'Cisco-IOS-XR-crypto-macsec-mka-oper', False),
_MetaInfoClassMember('ssci', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Peer SSCI
''',
'ssci',
'Cisco-IOS-XR-crypto-macsec-mka-oper', False),
],
'Cisco-IOS-XR-crypto-macsec-mka-oper',
'dormant-peer',
_yang_ns._namespaces['Cisco-IOS-XR-crypto-macsec-mka-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_macsec_mka_oper'
),
},
'Macsec.Mka.Interfaces.Interface.Session.Ca' : {
'meta_info' : _MetaInfoClass('Macsec.Mka.Interfaces.Interface.Session.Ca',
False,
[
_MetaInfoClassMember('authenticator', ATTRIBUTE, 'bool' , None, None,
[], [],
''' authenticator
''',
'authenticator',
'Cisco-IOS-XR-crypto-macsec-mka-oper', False),
_MetaInfoClassMember('ckn', ATTRIBUTE, 'str' , None, None,
[], [],
''' CKN
''',
'ckn',
'Cisco-IOS-XR-crypto-macsec-mka-oper', False),
_MetaInfoClassMember('dormant-peer', REFERENCE_LIST, 'DormantPeer' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_macsec_mka_oper', 'Macsec.Mka.Interfaces.Interface.Session.Ca.DormantPeer',
[], [],
''' Dormant Peer List
''',
'dormant_peer',
'Cisco-IOS-XR-crypto-macsec-mka-oper', False),
_MetaInfoClassMember('first-ca', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Is First CA
''',
'first_ca',
'Cisco-IOS-XR-crypto-macsec-mka-oper', False),
_MetaInfoClassMember('is-key-server', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Is Key Server
''',
'is_key_server',
'Cisco-IOS-XR-crypto-macsec-mka-oper', False),
_MetaInfoClassMember('live-peer', REFERENCE_LIST, 'LivePeer' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_macsec_mka_oper', 'Macsec.Mka.Interfaces.Interface.Session.Ca.LivePeer',
[], [],
''' Live Peer List
''',
'live_peer',
'Cisco-IOS-XR-crypto-macsec-mka-oper', False),
_MetaInfoClassMember('my-mi', ATTRIBUTE, 'str' , None, None,
[], [],
''' Member Identifier
''',
'my_mi',
'Cisco-IOS-XR-crypto-macsec-mka-oper', False),
_MetaInfoClassMember('my-mn', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Message Number
''',
'my_mn',
'Cisco-IOS-XR-crypto-macsec-mka-oper', False),
_MetaInfoClassMember('num-live-peers', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Number of Live Peers
''',
'num_live_peers',
'Cisco-IOS-XR-crypto-macsec-mka-oper', False),
_MetaInfoClassMember('num-live-peers-responded', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Number of Live Peers responded
''',
'num_live_peers_responded',
'Cisco-IOS-XR-crypto-macsec-mka-oper', False),
_MetaInfoClassMember('peer-sci', ATTRIBUTE, 'str' , None, None,
[], [],
''' Peer SCI(MAC)
''',
'peer_sci',
'Cisco-IOS-XR-crypto-macsec-mka-oper', False),
_MetaInfoClassMember('potential-peer', REFERENCE_LIST, 'PotentialPeer' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_macsec_mka_oper', 'Macsec.Mka.Interfaces.Interface.Session.Ca.PotentialPeer',
[], [],
''' Potential Peer List
''',
'potential_peer',
'Cisco-IOS-XR-crypto-macsec-mka-oper', False),
_MetaInfoClassMember('status', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Session Status [Secured/Not Secured]
''',
'status',
'Cisco-IOS-XR-crypto-macsec-mka-oper', False),
_MetaInfoClassMember('status-description', ATTRIBUTE, 'str' , None, None,
[], [],
''' Status Description
''',
'status_description',
'Cisco-IOS-XR-crypto-macsec-mka-oper', False),
],
'Cisco-IOS-XR-crypto-macsec-mka-oper',
'ca',
_yang_ns._namespaces['Cisco-IOS-XR-crypto-macsec-mka-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_macsec_mka_oper'
),
},
'Macsec.Mka.Interfaces.Interface.Session' : {
'meta_info' : _MetaInfoClass('Macsec.Mka.Interfaces.Interface.Session',
False,
[
_MetaInfoClassMember('ca', REFERENCE_LIST, 'Ca' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_macsec_mka_oper', 'Macsec.Mka.Interfaces.Interface.Session.Ca',
[], [],
''' CA List for a Session
''',
'ca',
'Cisco-IOS-XR-crypto-macsec-mka-oper', False),
_MetaInfoClassMember('session-summary', REFERENCE_CLASS, 'SessionSummary' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_macsec_mka_oper', 'Macsec.Mka.Interfaces.Interface.Session.SessionSummary',
[], [],
''' Session summary
''',
'session_summary',
'Cisco-IOS-XR-crypto-macsec-mka-oper', False),
_MetaInfoClassMember('vp', REFERENCE_CLASS, 'Vp' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_macsec_mka_oper', 'Macsec.Mka.Interfaces.Interface.Session.Vp',
[], [],
''' Virtual Pointer Info
''',
'vp',
'Cisco-IOS-XR-crypto-macsec-mka-oper', False),
],
'Cisco-IOS-XR-crypto-macsec-mka-oper',
'session',
_yang_ns._namespaces['Cisco-IOS-XR-crypto-macsec-mka-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_macsec_mka_oper'
),
},
'Macsec.Mka.Interfaces.Interface' : {
'meta_info' : _MetaInfoClass('Macsec.Mka.Interfaces.Interface',
False,
[
_MetaInfoClassMember('name', ATTRIBUTE, 'str' , None, None,
[], ['(([a-zA-Z0-9_]*\\d+/){3,4}\\d+)|(([a-zA-Z0-9_]*\\d+/){3,4}\\d+\\.\\d+)|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]*\\d+))|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]+))|([a-zA-Z0-9_-]*\\d+)|([a-zA-Z0-9_-]*\\d+\\.\\d+)|(mpls)|(dwdm)'],
''' Interface Name
''',
'name',
'Cisco-IOS-XR-crypto-macsec-mka-oper', True),
_MetaInfoClassMember('session', REFERENCE_CLASS, 'Session' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_macsec_mka_oper', 'Macsec.Mka.Interfaces.Interface.Session',
[], [],
''' MKA Session Data
''',
'session',
'Cisco-IOS-XR-crypto-macsec-mka-oper', False),
],
'Cisco-IOS-XR-crypto-macsec-mka-oper',
'interface',
_yang_ns._namespaces['Cisco-IOS-XR-crypto-macsec-mka-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_macsec_mka_oper'
),
},
'Macsec.Mka.Interfaces' : {
'meta_info' : _MetaInfoClass('Macsec.Mka.Interfaces',
False,
[
_MetaInfoClassMember('interface', REFERENCE_LIST, 'Interface' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_macsec_mka_oper', 'Macsec.Mka.Interfaces.Interface',
[], [],
''' MKA Data for the Interface
''',
'interface',
'Cisco-IOS-XR-crypto-macsec-mka-oper', False),
],
'Cisco-IOS-XR-crypto-macsec-mka-oper',
'interfaces',
_yang_ns._namespaces['Cisco-IOS-XR-crypto-macsec-mka-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_macsec_mka_oper'
),
},
'Macsec.Mka' : {
'meta_info' : _MetaInfoClass('Macsec.Mka',
False,
[
_MetaInfoClassMember('interfaces', REFERENCE_CLASS, 'Interfaces' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_macsec_mka_oper', 'Macsec.Mka.Interfaces',
[], [],
''' MKA Data
''',
'interfaces',
'Cisco-IOS-XR-crypto-macsec-mka-oper', False),
],
'Cisco-IOS-XR-crypto-macsec-mka-oper',
'mka',
_yang_ns._namespaces['Cisco-IOS-XR-crypto-macsec-mka-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_macsec_mka_oper'
),
},
'Macsec' : {
'meta_info' : _MetaInfoClass('Macsec',
False,
[
_MetaInfoClassMember('mka', REFERENCE_CLASS, 'Mka' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_macsec_mka_oper', 'Macsec.Mka',
[], [],
''' MKA Data
''',
'mka',
'Cisco-IOS-XR-crypto-macsec-mka-oper', False),
],
'Cisco-IOS-XR-crypto-macsec-mka-oper',
'macsec',
_yang_ns._namespaces['Cisco-IOS-XR-crypto-macsec-mka-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_macsec_mka_oper'
),
},
}
_meta_table['Macsec.Mka.Interfaces.Interface.Session.SessionSummary.OuterTag']['meta_info'].parent =_meta_table['Macsec.Mka.Interfaces.Interface.Session.SessionSummary']['meta_info']
_meta_table['Macsec.Mka.Interfaces.Interface.Session.SessionSummary.InnerTag']['meta_info'].parent =_meta_table['Macsec.Mka.Interfaces.Interface.Session.SessionSummary']['meta_info']
_meta_table['Macsec.Mka.Interfaces.Interface.Session.Ca.LivePeer']['meta_info'].parent =_meta_table['Macsec.Mka.Interfaces.Interface.Session.Ca']['meta_info']
_meta_table['Macsec.Mka.Interfaces.Interface.Session.Ca.PotentialPeer']['meta_info'].parent =_meta_table['Macsec.Mka.Interfaces.Interface.Session.Ca']['meta_info']
_meta_table['Macsec.Mka.Interfaces.Interface.Session.Ca.DormantPeer']['meta_info'].parent =_meta_table['Macsec.Mka.Interfaces.Interface.Session.Ca']['meta_info']
_meta_table['Macsec.Mka.Interfaces.Interface.Session.SessionSummary']['meta_info'].parent =_meta_table['Macsec.Mka.Interfaces.Interface.Session']['meta_info']
_meta_table['Macsec.Mka.Interfaces.Interface.Session.Vp']['meta_info'].parent =_meta_table['Macsec.Mka.Interfaces.Interface.Session']['meta_info']
_meta_table['Macsec.Mka.Interfaces.Interface.Session.Ca']['meta_info'].parent =_meta_table['Macsec.Mka.Interfaces.Interface.Session']['meta_info']
_meta_table['Macsec.Mka.Interfaces.Interface.Session']['meta_info'].parent =_meta_table['Macsec.Mka.Interfaces.Interface']['meta_info']
_meta_table['Macsec.Mka.Interfaces.Interface']['meta_info'].parent =_meta_table['Macsec.Mka.Interfaces']['meta_info']
_meta_table['Macsec.Mka.Interfaces']['meta_info'].parent =_meta_table['Macsec.Mka']['meta_info']
_meta_table['Macsec.Mka']['meta_info'].parent =_meta_table['Macsec']['meta_info']
| 47.514961 | 245 | 0.47733 | 2,646 | 30,172 | 5.251323 | 0.058957 | 0.121123 | 0.110112 | 0.147391 | 0.828068 | 0.799064 | 0.776538 | 0.757179 | 0.721626 | 0.626125 | 0 | 0.01909 | 0.37326 | 30,172 | 634 | 246 | 47.589905 | 0.715706 | 0 | 0 | 0.530909 | 0 | 0.001818 | 0.388853 | 0.300808 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.014545 | 0 | 0.014545 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
819b6215d21515bee2bf6ee1a39962875c0b50b3 | 56 | py | Python | src/ts_analysis/utilities/__init__.py | tedchengf/ts_analysis | b1ed127b5392d177c51bd136107aa0fec4a1759c | [
"MIT"
] | 1 | 2022-01-11T00:19:26.000Z | 2022-01-11T00:19:26.000Z | src/ts_analysis/utilities/__init__.py | tedchengf/ts_analysis | b1ed127b5392d177c51bd136107aa0fec4a1759c | [
"MIT"
] | null | null | null | src/ts_analysis/utilities/__init__.py | tedchengf/ts_analysis | b1ed127b5392d177c51bd136107aa0fec4a1759c | [
"MIT"
] | null | null | null | from . import aux
from . import func
from . import matop | 18.666667 | 19 | 0.75 | 9 | 56 | 4.666667 | 0.555556 | 0.714286 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.196429 | 56 | 3 | 19 | 18.666667 | 0.933333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
81d0d785ff4514acf137b58d87005e3f5cf5dd0f | 30 | py | Python | colab_ssh/utils/logger/__init__.py | mushonnip/colab-ssh | a7ec5877e486da1cdf07e38294e450e64b0bf81e | [
"MIT"
] | 623 | 2020-06-22T10:47:07.000Z | 2022-03-31T15:23:08.000Z | colab_ssh/utils/logger/__init__.py | mushonnip/colab-ssh | a7ec5877e486da1cdf07e38294e450e64b0bf81e | [
"MIT"
] | 63 | 2020-07-16T16:15:03.000Z | 2022-03-29T22:54:46.000Z | colab_ssh/utils/logger/__init__.py | mushonnip/colab-ssh | a7ec5877e486da1cdf07e38294e450e64b0bf81e | [
"MIT"
] | 135 | 2020-06-29T18:13:31.000Z | 2022-03-25T10:41:48.000Z | from .logger import get_logger | 30 | 30 | 0.866667 | 5 | 30 | 5 | 0.8 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.1 | 30 | 1 | 30 | 30 | 0.925926 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
81d214843941b30400727b1176a926486b1e574f | 194 | py | Python | util/__init__.py | mlnyang/AE-NeRF | 08778d8c37b06c9cea2346c68318bcb1e6816237 | [
"MIT"
] | null | null | null | util/__init__.py | mlnyang/AE-NeRF | 08778d8c37b06c9cea2346c68318bcb1e6816237 | [
"MIT"
] | null | null | null | util/__init__.py | mlnyang/AE-NeRF | 08778d8c37b06c9cea2346c68318bcb1e6816237 | [
"MIT"
] | null | null | null | # from .iter_counter import IterationCounter
# from .visualizer import Visualizer
# from .metric_tracker import MetricTracker
from .util import *
# from .html import HTML
# from .pca import PCA
| 27.714286 | 44 | 0.78866 | 25 | 194 | 6.04 | 0.48 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.149485 | 194 | 6 | 45 | 32.333333 | 0.915152 | 0.840206 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
81d5fe506aa48c889c5c6f1995ef2dd33924c714 | 73 | py | Python | exercises/palindrome-products/palindrome_products.py | RJTK/python | f9678d629735f75354bbd543eb7f10220a498dae | [
"MIT"
] | 1 | 2021-05-15T19:59:04.000Z | 2021-05-15T19:59:04.000Z | exercises/palindrome-products/palindrome_products.py | RJTK/python | f9678d629735f75354bbd543eb7f10220a498dae | [
"MIT"
] | null | null | null | exercises/palindrome-products/palindrome_products.py | RJTK/python | f9678d629735f75354bbd543eb7f10220a498dae | [
"MIT"
] | 2 | 2018-03-03T08:32:12.000Z | 2019-08-22T11:55:53.000Z | def largest_palindrome():
pass
def smallest_palindrome():
pass
| 10.428571 | 26 | 0.69863 | 8 | 73 | 6.125 | 0.625 | 0.571429 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.219178 | 73 | 6 | 27 | 12.166667 | 0.859649 | 0 | 0 | 0.5 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.5 | true | 0.5 | 0 | 0 | 0.5 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 6 |
81d6d8a95e3ab41aec48897e16153762e4941865 | 426 | py | Python | Losses/GeometricMeanRelativeAbsoluteError.py | recep-yildirim/Machine-Learning-Algorithms | dc4f4e6939631468246efc7537b1569007fee792 | [
"MIT"
] | 3 | 2021-05-12T13:13:52.000Z | 2022-01-19T19:54:16.000Z | Losses/GeometricMeanRelativeAbsoluteError.py | recep-yildirim/Machine-Learning-Algorithms | dc4f4e6939631468246efc7537b1569007fee792 | [
"MIT"
] | null | null | null | Losses/GeometricMeanRelativeAbsoluteError.py | recep-yildirim/Machine-Learning-Algorithms | dc4f4e6939631468246efc7537b1569007fee792 | [
"MIT"
] | null | null | null | import numpy as np
from Losses import Loss
class GeometricMeanRelativeAbsoluteError(Loss):
def call(self, true_labels, predicted_labels):
result = np.abs(true_labels - predicted_labels) / np.abs(true_labels - np.mean(true_labels))
return np.power(np.prod(result), (1 / true_labels.shape[0]))
def __call__(self, true_labels, predicted_labels):
return self.call(true_labels, predicted_labels) | 35.5 | 100 | 0.734742 | 58 | 426 | 5.137931 | 0.413793 | 0.234899 | 0.255034 | 0.33557 | 0.241611 | 0.241611 | 0.241611 | 0 | 0 | 0 | 0 | 0.005618 | 0.164319 | 426 | 12 | 101 | 35.5 | 0.831461 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | false | 0 | 0.25 | 0.125 | 0.875 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 6 |
c4943e4b04d06ba5f49272963be970403e803554 | 21,031 | py | Python | retro_data_structures/conversion/part.py | duncathan/Retro-data-structures | 88c0a685e45c9c6935c1bd9d95ba549849947beb | [
"MIT"
] | 2 | 2021-06-18T16:47:00.000Z | 2021-07-06T22:36:32.000Z | retro_data_structures/conversion/part.py | duncathan/Retro-data-structures | 88c0a685e45c9c6935c1bd9d95ba549849947beb | [
"MIT"
] | 1 | 2021-10-01T20:26:01.000Z | 2021-10-01T20:26:01.000Z | retro_data_structures/conversion/part.py | duncathan/Retro-data-structures | 88c0a685e45c9c6935c1bd9d95ba549849947beb | [
"MIT"
] | 5 | 2021-08-23T17:01:01.000Z | 2021-11-20T03:57:14.000Z | import copy
from retro_data_structures.conversion.asset_converter import AssetConverter, Resource, AssetDetails
from retro_data_structures.game_check import Game
def upgrade(data, converter: AssetConverter, source_game: Game):
if source_game < Game.ECHOES <= converter.target_game:
for element in data["elements"]:
if element["type"] == "KSSM" and element["body"]["magic"] != "NONE":
for spawn in element["body"]["value"]["spawns"]:
for t in spawn["v2"]:
t["type"] = "PART"
def downgrade(data, converter: AssetConverter, source_game: Game):
if converter.target_game <= Game.PRIME < source_game:
for element in data["elements"][:]:
if element["type"] == "KSSM" and element["body"]["magic"] != "NONE":
for spawn in element["body"]["value"]["spawns"]:
for t in spawn["v2"]:
t["type"] = 0
if element["type"] == "RDOP":
data["elements"].remove(element)
if element["type"] == "XTAD":
data["elements"].remove(element)
if element["type"] == "INDM":
data["elements"].remove(element)
if element["type"] == "VMPC":
data["elements"].remove(element)
if element["type"] == "EMTR":
if element["body"]["type"] == "SEMR":
if (
element["body"]["body"]["a"]["type"] == "RNDV"
and element["body"]["body"]["b"]["type"] == "RNDV"
):
element["body"]["type"] = "SPHE"
element["body"]["body"] = {
"a": {
"type": "RTOV",
"body" : {
"type": "CNST",
"body": 0,
},
},
"b": element["body"]["body"]["a"]["body"],
"c": {
"type": "RAND",
"body": {
"a": {
"type": "CNST",
"body": 0,
},
"b": element["body"]["body"]["b"]["body"],
},
},
}
if (
element["body"]["body"]["a"]["type"] == "RNDV"
and element["body"]["body"]["b"]["type"] == "CNST"
):
element["body"]["type"] = "SPHE"
element["body"]["body"] = {
"a": {
"type": "RTOV",
"body": {
"type": "CNST",
"body": 0
}
},
"b": element["body"]["body"]["a"]["body"],
"c": {
"type": "RAND",
"body": {
"a": {
"type": "CNST",
"body": 0
},
"b": element["body"]["body"]["b"]["body"]["a"]
}
}
}
if element["body"]["type"] == "ELPS":
element["body"]["type"] = "SPHE"
element["body"]["body"]["b"] = element["body"]["body"]["b"]["body"]["a"]
element["body"]["body"]["c"] = element["body"]["body"]["d"].copy()
del element["body"]["body"]["d"]
del element["body"]["body"]["e"]
if element["type"] == "COLR":
if element["body"]["type"] == "MDAO":
if (
element["body"]["body"]["a"]["type"] == "KEYE"
and element["body"]["body"]["b"]["type"] == "KEYP"
):
org_colr_mado_a_keye = element["body"]["body"]["a"]["body"]["keys"]
new_colr_cnst_a_keyp_a = copy.deepcopy(element["body"]["body"]["a"])
new_colr_cnst_a_keyp_b = copy.deepcopy(element["body"]["body"]["a"])
new_colr_cnst_a_keyp_c = copy.deepcopy(element["body"]["body"]["a"])
new_colr_cnst_a_keyp_d = copy.deepcopy(element["body"]["body"]["a"])
new_colr_cnst_a_keyp_a["body"]["keys"] = [None] * len(element["body"]["body"]["a"]["body"]["keys"])
new_colr_cnst_a_keyp_b["body"]["keys"] = [None] * len(element["body"]["body"]["a"]["body"]["keys"])
new_colr_cnst_a_keyp_c["body"]["keys"] = [None] * len(element["body"]["body"]["a"]["body"]["keys"])
new_colr_cnst_a_keyp_d["body"]["keys"] = [None] * len(element["body"]["body"]["a"]["body"]["keys"])
element["body"]["body"]["a"]["type"] = "CNST"
for i,key in enumerate(org_colr_mado_a_keye):
new_colr_cnst_a_keyp_a["body"]["keys"][i] = key[0]
new_colr_cnst_a_keyp_b["body"]["keys"][i] = key[1]
new_colr_cnst_a_keyp_c["body"]["keys"][i] = key[2]
new_colr_cnst_a_keyp_d["body"]["keys"][i] = key[3]
element["body"]["body"]["a"]["body"] = {
"a": new_colr_cnst_a_keyp_a,
"b": new_colr_cnst_a_keyp_b,
"c": new_colr_cnst_a_keyp_c,
"d": {
"type": "MULT",
"body": {
"a": new_colr_cnst_a_keyp_d,
"b": element["body"]["body"]["b"],
},
},
}
element["body"] = element["body"]["body"]["a"]
if element["body"]["type"] == "MULT":
if (
element["body"]["body"]["a"]["type"] == "PULS"
and element["body"]["body"]["b"]["type"] == "KEYP"
):
org_colr_mult_b_keyp = element["body"]["body"]["b"]["body"]["keys"]
new_colr_a_c_mult_b_keyp_a = copy.deepcopy(element["body"]["body"]["b"])
new_colr_a_c_mult_b_keyp_b = copy.deepcopy(element["body"]["body"]["b"])
new_colr_a_c_mult_b_keyp_c = copy.deepcopy(element["body"]["body"]["b"])
new_colr_a_c_mult_b_keyp_d = copy.deepcopy(element["body"]["body"]["b"])
num_keys = len(element["body"]["body"]["b"]["body"]["keys"])
new_colr_a_c_mult_b_keyp_a["body"]["keys"] = [None] * num_keys
new_colr_a_c_mult_b_keyp_b["body"]["keys"] = [None] * num_keys
new_colr_a_c_mult_b_keyp_c["body"]["keys"] = [None] * num_keys
new_colr_a_c_mult_b_keyp_d["body"]["keys"] = [None] * num_keys
for i, key in enumerate(org_colr_mult_b_keyp):
new_colr_a_c_mult_b_keyp_a["body"]["keys"][i] = key[0]
new_colr_a_c_mult_b_keyp_b["body"]["keys"][i] = key[1]
new_colr_a_c_mult_b_keyp_c["body"]["keys"][i] = key[2]
new_colr_a_c_mult_b_keyp_d["body"]["keys"][i] = key[3]
if (
element["body"]["body"]["a"]["body"]["c"]["type"] == "KEYP"
and element["body"]["body"]["a"]["body"]["d"]["type"] == "KEYP"
):
org_colr_mult_a_c_keyp = element["body"]["body"]["a"]["body"]["c"]["body"]["keys"]
new_colr_a_c_mult_a_keyp_c_a = copy.deepcopy(element["body"]["body"]["a"]["body"]["c"])
new_colr_a_c_mult_a_keyp_c_b = copy.deepcopy(element["body"]["body"]["a"]["body"]["c"])
new_colr_a_c_mult_a_keyp_c_c = copy.deepcopy(element["body"]["body"]["a"]["body"]["c"])
new_colr_a_c_mult_a_keyp_c_d = copy.deepcopy(element["body"]["body"]["a"]["body"]["c"])
new_colr_a_c_mult_a_keyp_c_a["body"]["keys"] = [None] * len(org_colr_mult_a_c_keyp)
new_colr_a_c_mult_a_keyp_c_b["body"]["keys"] = [None] * len(org_colr_mult_a_c_keyp)
new_colr_a_c_mult_a_keyp_c_c["body"]["keys"] = [None] * len(org_colr_mult_a_c_keyp)
new_colr_a_c_mult_a_keyp_c_d["body"]["keys"] = [None] * len(org_colr_mult_a_c_keyp)
element["body"]["body"]["a"]["body"]["c"]["type"] = "CNST"
for i, key in enumerate(org_colr_mult_a_c_keyp):
new_colr_a_c_mult_a_keyp_c_a["body"]["keys"][i] = key[0]
new_colr_a_c_mult_a_keyp_c_b["body"]["keys"][i] = key[1]
new_colr_a_c_mult_a_keyp_c_c["body"]["keys"][i] = key[2]
new_colr_a_c_mult_a_keyp_c_d["body"]["keys"][i] = key[3]
element["body"]["body"]["a"]["body"]["c"]["body"] = {
"a": {
"type": "MULT",
"body": {
"a": new_colr_a_c_mult_a_keyp_c_a,
"b": new_colr_a_c_mult_b_keyp_a,
},
},
"b": {
"type": "MULT",
"body": {
"a": new_colr_a_c_mult_a_keyp_c_b,
"b": new_colr_a_c_mult_b_keyp_b,
},
},
"c": {
"type": "MULT",
"body": {
"a": new_colr_a_c_mult_a_keyp_c_c,
"b": new_colr_a_c_mult_b_keyp_c,
},
},
"d": {
"type": "MULT",
"body": {
"a": new_colr_a_c_mult_a_keyp_c_d,
"b": new_colr_a_c_mult_b_keyp_d,
},
},
}
# ================================================
org_colr_mult_a_d_keyp = element["body"]["body"]["a"]["body"]["d"]["body"]["keys"]
new_colr_a_c_mult_a_keyp_d_a = copy.deepcopy(element["body"]["body"]["a"]["body"]["d"])
new_colr_a_c_mult_a_keyp_d_b = copy.deepcopy(element["body"]["body"]["a"]["body"]["d"])
new_colr_a_c_mult_a_keyp_d_c = copy.deepcopy(element["body"]["body"]["a"]["body"]["d"])
new_colr_a_c_mult_a_keyp_d_d = copy.deepcopy(element["body"]["body"]["a"]["body"]["d"])
new_colr_a_c_mult_a_keyp_d_a["body"]["keys"] = [None] * len(org_colr_mult_a_d_keyp)
new_colr_a_c_mult_a_keyp_d_b["body"]["keys"] = [None] * len(org_colr_mult_a_d_keyp)
new_colr_a_c_mult_a_keyp_d_c["body"]["keys"] = [None] * len(org_colr_mult_a_d_keyp)
new_colr_a_c_mult_a_keyp_d_d["body"]["keys"] = [None] * len(org_colr_mult_a_d_keyp)
element["body"]["body"]["a"]["body"]["d"]["type"] = "CNST"
for i, key in enumerate(org_colr_mult_a_d_keyp):
new_colr_a_c_mult_a_keyp_d_a["body"]["keys"][i] = key[0]
new_colr_a_c_mult_a_keyp_d_b["body"]["keys"][i] = key[1]
new_colr_a_c_mult_a_keyp_d_c["body"]["keys"][i] = key[2]
new_colr_a_c_mult_a_keyp_d_d["body"]["keys"][i] = key[3]
element["body"]["body"]["a"]["body"]["d"]["body"] = {
"a": {
"type": "MULT",
"body": {
"a": new_colr_a_c_mult_a_keyp_d_a,
"b": new_colr_a_c_mult_b_keyp_a,
},
},
"b": {
"type": "MULT",
"body": {
"a": new_colr_a_c_mult_a_keyp_d_b,
"b": new_colr_a_c_mult_b_keyp_b,
},
},
"c": {
"type": "MULT",
"body": {
"a": new_colr_a_c_mult_a_keyp_d_c,
"b": new_colr_a_c_mult_b_keyp_c,
},
},
"d": {
"type": "MULT",
"body": {
"a": new_colr_a_c_mult_a_keyp_d_d,
"b": new_colr_a_c_mult_b_keyp_d,
},
},
}
else:
element["body"]["body"]["a"]["body"]["c"]["type"] = "CNST"
element["body"]["body"]["a"]["body"]["c"]["body"] = {
"a": {
"type": "MULT",
"body": {
"a": {
"type": "CNST",
"body": element["body"]["body"]["a"]["body"]["c"]["body"]["a"]["body"],
},
"b": new_colr_a_c_mult_b_keyp_a,
},
},
"b": {
"type": "MULT",
"body": {
"a": {
"type": "CNST",
"body": element["body"]["body"]["a"]["body"]["c"]["body"]["b"]["body"],
},
"b": new_colr_a_c_mult_b_keyp_b,
},
},
"c": {
"type": "MULT",
"body": {
"a": {
"type": "CNST",
"body": element["body"]["body"]["a"]["body"]["c"]["body"]["c"]["body"],
},
"b": new_colr_a_c_mult_b_keyp_c,
},
},
"d": {
"type": "MULT",
"body": {
"a": {
"type": "CNST",
"body": element["body"]["body"]["a"]["body"]["c"]["body"]["d"]["body"],
},
"b": new_colr_a_c_mult_b_keyp_d,
},
},
}
element["body"]["body"]["a"]["body"]["d"]["type"] = "CNST"
element["body"]["body"]["a"]["body"]["d"]["body"] = {
"a": {
"type": "MULT",
"body": {
"a": {
"type": "CNST",
"body": element["body"]["body"]["a"]["body"]["d"]["body"]["a"]["body"],
},
"b": new_colr_a_c_mult_b_keyp_a,
},
},
"b": {
"type": "MULT",
"body": {
"a": {
"type": "CNST",
"body": element["body"]["body"]["a"]["body"]["d"]["body"]["b"]["body"],
},
"b": new_colr_a_c_mult_b_keyp_b,
},
},
"c": {
"type": "MULT",
"body": {
"a": {
"type": "CNST",
"body": element["body"]["body"]["a"]["body"]["d"]["body"]["c"]["body"],
},
"b": new_colr_a_c_mult_b_keyp_c,
},
},
"d": {
"type": "MULT",
"body": {
"a": {
"type": "CNST",
"body": element["body"]["body"]["a"]["body"]["d"]["body"]["d"]["body"],
},
"b": new_colr_a_c_mult_b_keyp_d,
},
},
}
element["body"] = element["body"]["body"]["a"]
if element["type"] == "ADV1":
if element["body"]["type"] == "KPIN":
element["body"] = element["body"]["body"]
return data
def convert(data: Resource, details: AssetDetails, converter: AssetConverter):
source_game = details.original_game
if source_game.value < converter.target_game.value:
upgrade(data, converter, source_game)
elif source_game.value > converter.target_game.value:
downgrade(data, converter, source_game)
# convert asset references
for element in data["elements"]:
if element["type"] in ("TEXR", "TIND"):
body = element["body"]["body"]
if body is not None:
if body["id"] is not None:
body["id"] = converter.convert_id(body["id"], source_game)
if element["type"] == "KSSM" and element["body"]["magic"] != "NONE":
for spawn in element["body"]["value"]["spawns"]:
for t in spawn["v2"]:
t["id"] = converter.convert_id(t["id"], source_game)
if element["type"] in ("SSWH", "PMDL", "SELC", "IDTS", "ICTS", "IITS"):
body = element["body"]
if body["body"] is not None and source_game.is_valid_asset_id(body["body"]):
body["body"] = converter.convert_id(body["body"], source_game)
return data
class PARTConverter(dict):
def __missing__(self, key: Game):
if isinstance(key, Game):
return convert
else:
raise KeyError(key)
CONVERTERS = PARTConverter()
| 54.911227 | 123 | 0.329323 | 1,863 | 21,031 | 3.37037 | 0.057434 | 0.154165 | 0.167224 | 0.086001 | 0.833254 | 0.819239 | 0.794394 | 0.721452 | 0.683548 | 0.641503 | 0 | 0.002471 | 0.518853 | 21,031 | 382 | 124 | 55.054974 | 0.618045 | 0.003471 | 0 | 0.441595 | 0 | 0 | 0.105178 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.011396 | false | 0 | 0.008547 | 0 | 0.031339 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
c4ba6be963f594a0882f4c18ef20916d0f27cfb5 | 39 | py | Python | app/models/__init__.py | lucademian/spotifycards | 86bb76b491f5d66f563294ac5b63d97fa5306284 | [
"MIT"
] | null | null | null | app/models/__init__.py | lucademian/spotifycards | 86bb76b491f5d66f563294ac5b63d97fa5306284 | [
"MIT"
] | null | null | null | app/models/__init__.py | lucademian/spotifycards | 86bb76b491f5d66f563294ac5b63d97fa5306284 | [
"MIT"
] | null | null | null | """Models package"""
from . import user | 19.5 | 20 | 0.692308 | 5 | 39 | 5.4 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.128205 | 39 | 2 | 21 | 19.5 | 0.794118 | 0.358974 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
f200740dc34d8bca9d0e1d3672b8b010b761e013 | 911 | py | Python | edabit/very_hard/advanced_sort/test_advanced_sort.py | ticotheps/practice_problems | 943c5ab9eebeac4e5cf162adbdc681119603dc36 | [
"MIT"
] | null | null | null | edabit/very_hard/advanced_sort/test_advanced_sort.py | ticotheps/practice_problems | 943c5ab9eebeac4e5cf162adbdc681119603dc36 | [
"MIT"
] | null | null | null | edabit/very_hard/advanced_sort/test_advanced_sort.py | ticotheps/practice_problems | 943c5ab9eebeac4e5cf162adbdc681119603dc36 | [
"MIT"
] | null | null | null | import unittest
from advanced_sort import advanced_sort
class Test(unittest.TestCase):
def test_advanced_sort(self):
self.assertEqual(advanced_sort([1,2,1,2]) , [[1,1],[2,2]])
self.assertEqual(advanced_sort([2,1,2,1]) , [[2,2],[1,1]])
self.assertEqual(advanced_sort([3,2,1,3,2,1]) , [[3,3],[2,2],[1,1]])
self.assertEqual(advanced_sort([5,5,4,3,4,4]) , [[5,5],[4,4,4],[3]])
self.assertEqual(advanced_sort([80,80,4,60,60,3]),[[80,80],[4],[60,60],[3]])
self.assertEqual(advanced_sort(['c','c','b','c','b',1,1]),[['c','c','c'],['b','b'],[1,1]])
self.assertEqual(advanced_sort([1234, 1235, 1234, 1235, 1236, 1235]),[[1234, 1234],[1235, 1235, 1235],[1236]])
self.assertEqual(advanced_sort(['1234', '1235', '1234', '1235', '1236', '1235']),[['1234', '1234'],['1235', '1235', '1235'],['1236']])
if __name__ == "__main__":
unittest.main() | 56.9375 | 142 | 0.575192 | 144 | 911 | 3.5 | 0.180556 | 0.261905 | 0.365079 | 0.428571 | 0.573413 | 0.464286 | 0.420635 | 0.420635 | 0.297619 | 0.297619 | 0 | 0.204342 | 0.140505 | 911 | 16 | 143 | 56.9375 | 0.439336 | 0 | 0 | 0 | 0 | 0 | 0.072368 | 0 | 0 | 0 | 0 | 0 | 0.571429 | 1 | 0.071429 | false | 0 | 0.142857 | 0 | 0.285714 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
f2053315c07d95b148c00bf6011ab87adcfa1848 | 155 | py | Python | petrophys/vshale/clavier.py | smiles21/petrophys | 03ccf2c65cd472013596b293cb2daaa95a1a05a6 | [
"MIT"
] | null | null | null | petrophys/vshale/clavier.py | smiles21/petrophys | 03ccf2c65cd472013596b293cb2daaa95a1a05a6 | [
"MIT"
] | null | null | null | petrophys/vshale/clavier.py | smiles21/petrophys | 03ccf2c65cd472013596b293cb2daaa95a1a05a6 | [
"MIT"
] | null | null | null | from .linear import linear
def clavier(df, gamma_ray_col):
gr_index = linear(df, gamma_ray_col)
return 1.7 - ((3.38 - (gr_index + 0.7) ** 2)) ** 0.5
| 22.142857 | 54 | 0.645161 | 29 | 155 | 3.241379 | 0.655172 | 0.148936 | 0.212766 | 0.276596 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.079365 | 0.187097 | 155 | 6 | 55 | 25.833333 | 0.666667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | false | 0 | 0.25 | 0 | 0.75 | 0 | 1 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 6 |
f208bd01be99e2c11b4f552fa2af015a7c1c7d01 | 3,886 | py | Python | tests/test_admin.py | Microdisseny/django-adminfilters | ec7f3ed5b0730d394bc5d14ce85765dbbbe2a49e | [
"BSD-1-Clause"
] | 17 | 2015-03-03T23:15:31.000Z | 2022-03-02T16:55:18.000Z | tests/test_admin.py | Microdisseny/django-adminfilters | ec7f3ed5b0730d394bc5d14ce85765dbbbe2a49e | [
"BSD-1-Clause"
] | 9 | 2015-11-10T15:30:27.000Z | 2022-02-12T20:55:39.000Z | tests/test_admin.py | Microdisseny/django-adminfilters | ec7f3ed5b0730d394bc5d14ce85765dbbbe2a49e | [
"BSD-1-Clause"
] | 14 | 2015-04-07T13:52:42.000Z | 2022-02-03T17:54:42.000Z | from django.contrib.auth.models import User
from django.test import RequestFactory, TestCase
from django.urls import reverse
class AdminFilterTests(TestCase):
fixtures = ['demoproject']
def setUp(self):
# Every test needs access to the request factory.
self.factory = RequestFactory()
self.user = User.objects.create_user(
username='sax', email='sax@sax.com', password='top_secret')
self.user.is_superuser = True
self.user.save()
def test_admin_filter_RelatedFieldRadioFilter(self):
"""
test if the admin page with RelatedFieldRadioFilter filters loads succesfully
"""
self.assertTrue(self.client.login(
username='sax', password='top_secret'))
response = self.client.get(
reverse('admin:demoapp_demomodel_relatedfieldradiofilter_changelist'))
self.assertEqual(response.status_code, 200)
response = self.client.get(reverse(
'admin:demoapp_demomodel_relatedfieldradiofilter_changelist') + "?demo_related__id__exact=1")
self.assertEqual(response.status_code, 200)
response = self.client.get(reverse('admin:demoapp_demomodel_relatedfieldradiofilter_changelist') +
"?demo_related__id__exact=1&demo_related__id__exact=2")
self.assertEqual(response.status_code, 200)
def test_admin_RelatedFieldCheckbox(self):
"""
test if the admin page with RelatedFieldCheckbox filters loads succesfully
"""
self.assertTrue(self.client.login(
username='sax', password='top_secret'))
response = self.client.get(
reverse('admin:demoapp_demomodel_relatedfieldcheckboxfilter_changelist'))
self.assertEqual(response.status_code, 200)
response = self.client.get(reverse(
'admin:demoapp_demomodel_relatedfieldcheckboxfilter_changelist') + "?demo_related__id__exact=1")
self.assertEqual(response.status_code, 200)
def test_admin_UnionFieldListFilter(self):
"""
test if the admin page with UnionFieldListFilter filters loads succesfully
"""
self.assertTrue(self.client.login(
username='sax', password='top_secret'))
response = self.client.get(
reverse('admin:demoapp_demomodel_unionfieldlistfilter_changelist'))
self.assertEqual(response.status_code, 200)
response = self.client.get(reverse(
'admin:demoapp_demomodel_unionfieldlistfilter_changelist') + "?demo_related_filter=1%2C2")
self.assertEqual(response.status_code, 200)
def test_admin_IntersectionFieldListFilter(self):
"""
test if the admin page with IntersectionFieldListFilter filter loads succesfully
"""
self.assertTrue(self.client.login(
username='sax', password='top_secret'))
response = self.client.get(
reverse('admin:demoapp_demomodel_intersectionfieldlistfilter_changelist'))
self.assertEqual(response.status_code, 200)
response = self.client.get(reverse(
'admin:demoapp_demomodel_intersectionfieldlistfilter_changelist') + "?demo_related_filter=1%2C2")
self.assertEqual(response.status_code, 200)
def test_admin_TextFieldFilter(self):
"""
test if the admin page with IntersectionFieldListFilter filter loads succesfully
"""
self.assertTrue(self.client.login(
username='sax', password='top_secret'))
response = self.client.get(
reverse('admin:demoapp_demomodel_intersectionfieldlistfilter_changelist'))
self.assertEqual(response.status_code, 200)
response = self.client.get(reverse(
'admin:demoapp_demomodel_intersectionfieldlistfilter_changelist') + "?name=ccccc")
self.assertEqual(response.status_code, 200)
| 46.261905 | 109 | 0.689655 | 394 | 3,886 | 6.581218 | 0.187817 | 0.061705 | 0.076359 | 0.089086 | 0.78789 | 0.78789 | 0.774007 | 0.743926 | 0.743926 | 0.720787 | 0 | 0.014131 | 0.216933 | 3,886 | 83 | 110 | 46.819277 | 0.837989 | 0.112712 | 0 | 0.559322 | 0 | 0 | 0.276411 | 0.243097 | 0 | 0 | 0 | 0 | 0.271186 | 1 | 0.101695 | false | 0.101695 | 0.050847 | 0 | 0.186441 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 6 |
481aed7dd984b03d32b9c2fcc3be993465e5fbaf | 150 | py | Python | hecuba_py/tests/app/result.py | cugni/hecuba | 5f4654d068dff0ef641d37d98bdac46e539fea48 | [
"Apache-2.0"
] | 6 | 2017-11-09T12:59:54.000Z | 2022-02-03T14:04:29.000Z | hecuba_py/tests/app/result.py | cugni/hecuba | 5f4654d068dff0ef641d37d98bdac46e539fea48 | [
"Apache-2.0"
] | 150 | 2017-10-18T09:24:46.000Z | 2021-11-02T13:28:50.000Z | hecuba_py/tests/app/result.py | cugni/hecuba | 5f4654d068dff0ef641d37d98bdac46e539fea48 | [
"Apache-2.0"
] | 3 | 2017-11-10T18:56:46.000Z | 2021-11-02T10:35:14.000Z | from hecuba.storageobj import StorageObj
class Result(StorageObj):
'''
@ClassField instances dict<<word:str>,instances:int>
'''
pass
| 18.75 | 56 | 0.686667 | 16 | 150 | 6.4375 | 0.8125 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.193333 | 150 | 7 | 57 | 21.428571 | 0.85124 | 0.346667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0.333333 | 0.333333 | 0 | 0.666667 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 6 |
485d0851bdfb83f8314c870bf54a745abacd9a31 | 31 | py | Python | main.py | mtracy/dockerpy | 07183f65b3450f8f1cc5f0d8638d8a0c17a690ae | [
"Apache-2.0"
] | null | null | null | main.py | mtracy/dockerpy | 07183f65b3450f8f1cc5f0d8638d8a0c17a690ae | [
"Apache-2.0"
] | null | null | null | main.py | mtracy/dockerpy | 07183f65b3450f8f1cc5f0d8638d8a0c17a690ae | [
"Apache-2.0"
] | null | null | null | print("hello docker world!!!")
| 15.5 | 30 | 0.677419 | 4 | 31 | 5.25 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.096774 | 31 | 1 | 31 | 31 | 0.75 | 0 | 0 | 0 | 0 | 0 | 0.677419 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 6 |
487721698ced2f8a8914eb7d4cfc08d38595b80c | 281 | py | Python | PythonExercicios/ex014.py | lordvinick/Python | c03fd08d4c204104bf0196b0bd129427fd2067ae | [
"MIT"
] | null | null | null | PythonExercicios/ex014.py | lordvinick/Python | c03fd08d4c204104bf0196b0bd129427fd2067ae | [
"MIT"
] | null | null | null | PythonExercicios/ex014.py | lordvinick/Python | c03fd08d4c204104bf0196b0bd129427fd2067ae | [
"MIT"
] | null | null | null | print('\033[33m=\033[m'*12, '\033[1;31mConversor de Temperaturas\033[m', '\033[33m=\033[m'*12)
c = float(input('\033[7;30mInforme a temperatura em °C:\033[m'))
f = (c*9/5) + 32
print('\033[4;33mA temperatura de \033[4;31m{}°C \033[4;33mcorresponde a \033[4;31m{}°F!'.format(c, f))
| 56.2 | 103 | 0.647687 | 59 | 281 | 3.135593 | 0.457627 | 0.086486 | 0.097297 | 0.108108 | 0.12973 | 0 | 0 | 0 | 0 | 0 | 0 | 0.257813 | 0.088968 | 281 | 4 | 104 | 70.25 | 0.453125 | 0 | 0 | 0 | 0 | 0.25 | 0.697509 | 0.074733 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.5 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 6 |
6fa9f30cb9b087ef5e948a9dd241b9e2c5621cec | 16,299 | py | Python | src/securityinsight/azext_sentinel/generated/action.py | Mannan2812/azure-cli-extensions | e2b34efe23795f6db9c59100534a40f0813c3d95 | [
"MIT"
] | 207 | 2017-11-29T06:59:41.000Z | 2022-03-31T10:00:53.000Z | src/securityinsight/azext_sentinel/generated/action.py | Mannan2812/azure-cli-extensions | e2b34efe23795f6db9c59100534a40f0813c3d95 | [
"MIT"
] | 4,061 | 2017-10-27T23:19:56.000Z | 2022-03-31T23:18:30.000Z | src/securityinsight/azext_sentinel/generated/action.py | Mannan2812/azure-cli-extensions | e2b34efe23795f6db9c59100534a40f0813c3d95 | [
"MIT"
] | 802 | 2017-10-11T17:36:26.000Z | 2022-03-31T22:24:32.000Z | # --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
# pylint: disable=protected-access
import argparse
from collections import defaultdict
from knack.util import CLIError
class AddFusionAlertRule(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
action = self.get_action(values, option_string)
namespace.fusion_alert_rule = action
def get_action(self, values, option_string): # pylint: disable=no-self-use
try:
properties = defaultdict(list)
for (k, v) in (x.split('=', 1) for x in values):
properties[k].append(v)
properties = dict(properties)
except ValueError:
raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
d = {}
for k in properties:
kl = k.lower()
v = properties[k]
if kl == 'alert-rule-template-name':
d['alert_rule_template_name'] = v[0]
elif kl == 'enabled':
d['enabled'] = v[0]
elif kl == 'etag':
d['etag'] = v[0]
d['kind'] = 'Fusion'
return d
class AddMicrosoftSecurityIncidentCreationAlertRule(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
action = self.get_action(values, option_string)
namespace.microsoft_security_incident_creation_alert_rule = action
def get_action(self, values, option_string): # pylint: disable=no-self-use
try:
properties = defaultdict(list)
for (k, v) in (x.split('=', 1) for x in values):
properties[k].append(v)
properties = dict(properties)
except ValueError:
raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
d = {}
for k in properties:
kl = k.lower()
v = properties[k]
if kl == 'display-names-filter':
d['display_names_filter'] = v
elif kl == 'display-names-exclude-filter':
d['display_names_exclude_filter'] = v
elif kl == 'product-filter':
d['product_filter'] = v[0]
elif kl == 'severities-filter':
d['severities_filter'] = v
elif kl == 'alert-rule-template-name':
d['alert_rule_template_name'] = v[0]
elif kl == 'description':
d['description'] = v[0]
elif kl == 'display-name':
d['display_name'] = v[0]
elif kl == 'enabled':
d['enabled'] = v[0]
elif kl == 'etag':
d['etag'] = v[0]
d['kind'] = 'MicrosoftSecurityIncidentCreation'
return d
class AddScheduledAlertRule(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
action = self.get_action(values, option_string)
namespace.scheduled_alert_rule = action
def get_action(self, values, option_string): # pylint: disable=no-self-use
try:
properties = defaultdict(list)
for (k, v) in (x.split('=', 1) for x in values):
properties[k].append(v)
properties = dict(properties)
except ValueError:
raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
d = {}
for k in properties:
kl = k.lower()
v = properties[k]
if kl == 'query':
d['query'] = v[0]
elif kl == 'query-frequency':
d['query_frequency'] = v[0]
elif kl == 'query-period':
d['query_period'] = v[0]
elif kl == 'severity':
d['severity'] = v[0]
elif kl == 'trigger-operator':
d['trigger_operator'] = v[0]
elif kl == 'trigger-threshold':
d['trigger_threshold'] = v[0]
elif kl == 'alert-rule-template-name':
d['alert_rule_template_name'] = v[0]
elif kl == 'description':
d['description'] = v[0]
elif kl == 'display-name':
d['display_name'] = v[0]
elif kl == 'enabled':
d['enabled'] = v[0]
elif kl == 'suppression-duration':
d['suppression_duration'] = v[0]
elif kl == 'suppression-enabled':
d['suppression_enabled'] = v[0]
elif kl == 'tactics':
d['tactics'] = v
elif kl == 'etag':
d['etag'] = v[0]
d['kind'] = 'Scheduled'
return d
class AddIncidentInfo(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
action = self.get_action(values, option_string)
namespace.incident_info = action
def get_action(self, values, option_string): # pylint: disable=no-self-use
try:
properties = defaultdict(list)
for (k, v) in (x.split('=', 1) for x in values):
properties[k].append(v)
properties = dict(properties)
except ValueError:
raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
d = {}
for k in properties:
kl = k.lower()
v = properties[k]
if kl == 'incident-id':
d['incident_id'] = v[0]
elif kl == 'severity':
d['severity'] = v[0]
elif kl == 'title':
d['title'] = v[0]
elif kl == 'relation-name':
d['relation_name'] = v[0]
return d
class AddAadDataConnector(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
action = self.get_action(values, option_string)
namespace.aad_data_connector = action
def get_action(self, values, option_string): # pylint: disable=no-self-use
try:
properties = defaultdict(list)
for (k, v) in (x.split('=', 1) for x in values):
properties[k].append(v)
properties = dict(properties)
except ValueError:
raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
d = {}
for k in properties:
kl = k.lower()
v = properties[k]
if kl == 'tenant-id':
d['tenant_id'] = v[0]
elif kl == 'state':
d['state'] = v[0]
elif kl == 'etag':
d['etag'] = v[0]
d['kind'] = 'AzureActiveDirectory'
return d
class AddAatpDataConnector(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
action = self.get_action(values, option_string)
namespace.aatp_data_connector = action
def get_action(self, values, option_string): # pylint: disable=no-self-use
try:
properties = defaultdict(list)
for (k, v) in (x.split('=', 1) for x in values):
properties[k].append(v)
properties = dict(properties)
except ValueError:
raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
d = {}
for k in properties:
kl = k.lower()
v = properties[k]
if kl == 'tenant-id':
d['tenant_id'] = v[0]
elif kl == 'state':
d['state'] = v[0]
elif kl == 'etag':
d['etag'] = v[0]
d['kind'] = 'AzureAdvancedThreatProtection'
return d
class AddAscDataConnector(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
action = self.get_action(values, option_string)
namespace.asc_data_connector = action
def get_action(self, values, option_string): # pylint: disable=no-self-use
try:
properties = defaultdict(list)
for (k, v) in (x.split('=', 1) for x in values):
properties[k].append(v)
properties = dict(properties)
except ValueError:
raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
d = {}
for k in properties:
kl = k.lower()
v = properties[k]
if kl == 'subscription-id':
d['subscription_id'] = v[0]
elif kl == 'state':
d['state'] = v[0]
elif kl == 'etag':
d['etag'] = v[0]
d['kind'] = 'AzureSecurityCenter'
return d
class AddAwsCloudTrailDataConnector(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
action = self.get_action(values, option_string)
namespace.aws_cloud_trail_data_connector = action
def get_action(self, values, option_string): # pylint: disable=no-self-use
try:
properties = defaultdict(list)
for (k, v) in (x.split('=', 1) for x in values):
properties[k].append(v)
properties = dict(properties)
except ValueError:
raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
d = {}
for k in properties:
kl = k.lower()
v = properties[k]
if kl == 'aws-role-arn':
d['aws_role_arn'] = v[0]
elif kl == 'state':
d['state'] = v[0]
elif kl == 'etag':
d['etag'] = v[0]
d['kind'] = 'AmazonWebServicesCloudTrail'
return d
class AddMcasDataConnector(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
action = self.get_action(values, option_string)
namespace.mcas_data_connector = action
def get_action(self, values, option_string): # pylint: disable=no-self-use
try:
properties = defaultdict(list)
for (k, v) in (x.split('=', 1) for x in values):
properties[k].append(v)
properties = dict(properties)
except ValueError:
raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
d = {}
for k in properties:
kl = k.lower()
v = properties[k]
if kl == 'tenant-id':
d['tenant_id'] = v[0]
elif kl == 'state-data-types-alerts-state':
d['state_data_types_alerts_state'] = v[0]
elif kl == 'state-data-types-discovery-logs-state':
d['state_data_types_discovery_logs_state'] = v[0]
elif kl == 'etag':
d['etag'] = v[0]
d['kind'] = 'MicrosoftCloudAppSecurity'
return d
class AddMdatpDataConnector(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
action = self.get_action(values, option_string)
namespace.mdatp_data_connector = action
def get_action(self, values, option_string): # pylint: disable=no-self-use
try:
properties = defaultdict(list)
for (k, v) in (x.split('=', 1) for x in values):
properties[k].append(v)
properties = dict(properties)
except ValueError:
raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
d = {}
for k in properties:
kl = k.lower()
v = properties[k]
if kl == 'tenant-id':
d['tenant_id'] = v[0]
elif kl == 'state':
d['state'] = v[0]
elif kl == 'etag':
d['etag'] = v[0]
d['kind'] = 'MicrosoftDefenderAdvancedThreatProtection'
return d
class AddOfficeDataConnector(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
action = self.get_action(values, option_string)
namespace.office_data_connector = action
def get_action(self, values, option_string): # pylint: disable=no-self-use
try:
properties = defaultdict(list)
for (k, v) in (x.split('=', 1) for x in values):
properties[k].append(v)
properties = dict(properties)
except ValueError:
raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
d = {
'dataTypes': {
'sharePoint': {'state': 'Disabled'},
'exchange': {'state': 'Disabled'}
}
}
for k in properties:
kl = k.lower()
v = properties[k]
if kl == 'tenant-id':
d['tenantId'] = v[0]
elif kl == 'sharepoint-enabled':
d['dataTypes']['sharePoint']['state'] = 'Enabled'
elif kl == 'exchange-enabled':
d['dataTypes']['exchange']['state'] = 'Enabled'
elif kl == 'etag':
d['etag'] = v[0]
d['kind'] = 'Office365'
print(d)
return d
class AddTiDataConnector(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
action = self.get_action(values, option_string)
namespace.ti_data_connector = action
def get_action(self, values, option_string): # pylint: disable=no-self-use
try:
properties = defaultdict(list)
for (k, v) in (x.split('=', 1) for x in values):
properties[k].append(v)
properties = dict(properties)
except ValueError:
raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
d = {}
for k in properties:
kl = k.lower()
v = properties[k]
if kl == 'tenant-id':
d['tenant_id'] = v[0]
elif kl == 'state':
d['state'] = v[0]
elif kl == 'etag':
d['etag'] = v[0]
d['kind'] = 'ThreatIntelligence'
return d
class AddLabels(argparse._AppendAction):
def __call__(self, parser, namespace, values, option_string=None):
action = self.get_action(values, option_string)
super(AddLabels, self).__call__(parser, namespace, action, option_string)
def get_action(self, values, option_string): # pylint: disable=no-self-use
try:
properties = defaultdict(list)
for (k, v) in (x.split('=', 1) for x in values):
properties[k].append(v)
properties = dict(properties)
except ValueError:
raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
d = {}
for k in properties:
kl = k.lower()
v = properties[k]
if kl == 'label-name':
d['label_name'] = v[0]
return d
class AddOwner(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
action = self.get_action(values, option_string)
namespace.owner = action
def get_action(self, values, option_string): # pylint: disable=no-self-use
try:
properties = defaultdict(list)
for (k, v) in (x.split('=', 1) for x in values):
properties[k].append(v)
properties = dict(properties)
except ValueError:
raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
d = {}
for k in properties:
kl = k.lower()
v = properties[k]
if kl == 'email':
d['email'] = v[0]
elif kl == 'assigned-to':
d['assigned_to'] = v[0]
elif kl == 'object-id':
d['object_id'] = v[0]
elif kl == 'user-principal-name':
d['user_principal_name'] = v[0]
return d
| 37.5553 | 83 | 0.52764 | 1,793 | 16,299 | 4.672616 | 0.10039 | 0.081642 | 0.090236 | 0.03915 | 0.772738 | 0.754118 | 0.74445 | 0.74075 | 0.74075 | 0.735498 | 0 | 0.006652 | 0.33591 | 16,299 | 433 | 84 | 37.642032 | 0.767369 | 0.053009 | 0 | 0.728947 | 0 | 0 | 0.140947 | 0.031603 | 0 | 0 | 0 | 0 | 0 | 1 | 0.073684 | false | 0 | 0.007895 | 0 | 0.155263 | 0.002632 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.