hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
e6e35dac14aba464df4ec53ca5df32587a892f2c
| 141
|
py
|
Python
|
lisc/urls/__init__.py
|
koudyk/lisc
|
679ba6624bb300f1b971be04fbf46e1fcfee65c9
|
[
"Apache-2.0"
] | 1
|
2020-05-11T18:36:16.000Z
|
2020-05-11T18:36:16.000Z
|
lisc/urls/__init__.py
|
ryanhammonds/lisc
|
56714291855164c8059486da44d8e239e5e920d6
|
[
"Apache-2.0"
] | null | null | null |
lisc/urls/__init__.py
|
ryanhammonds/lisc
|
56714291855164c8059486da44d8e239e5e920d6
|
[
"Apache-2.0"
] | null | null | null |
"""URLs object and associated functionality."""
from .urls import URLs
from .eutils import EUtils
from .open_citations import OpenCitations
| 23.5
| 47
| 0.801418
| 18
| 141
| 6.222222
| 0.611111
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.12766
| 141
| 5
| 48
| 28.2
| 0.910569
| 0.29078
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
e6fb0a58ef1ddf85af3e0c1e578c67f3942ac816
| 196
|
py
|
Python
|
movo_demos/setup.py
|
syc7446/kinova-movo
|
28bec5bb61517f970071782a32ac58e92c67f0df
|
[
"BSD-3-Clause"
] | 1
|
2021-06-24T19:20:01.000Z
|
2021-06-24T19:20:01.000Z
|
movo_demos/setup.py
|
syc7446/kinova-movo
|
28bec5bb61517f970071782a32ac58e92c67f0df
|
[
"BSD-3-Clause"
] | null | null | null |
movo_demos/setup.py
|
syc7446/kinova-movo
|
28bec5bb61517f970071782a32ac58e92c67f0df
|
[
"BSD-3-Clause"
] | 1
|
2020-01-21T11:05:24.000Z
|
2020-01-21T11:05:24.000Z
|
from distutils.core import setup
from catkin_pkg.python_setup import generate_distutils_setup
d = generate_distutils_setup(
packages=['movo_demo'],
package_dir={'': 'scripts'}
)
setup(**d)
| 28
| 60
| 0.765306
| 26
| 196
| 5.461538
| 0.615385
| 0.239437
| 0.309859
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.117347
| 196
| 7
| 61
| 28
| 0.820809
| 0
| 0
| 0
| 1
| 0
| 0.081218
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.285714
| 0
| 0.285714
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
fc0b456dc5aeddb727d952ed713977641a1989c6
| 41,068
|
py
|
Python
|
mars/dataframe/base/tests/test_base.py
|
vibhatha/mars
|
7a6b78ca4befd1a46d82cfb0163ffcd49293f7b5
|
[
"Apache-2.0"
] | null | null | null |
mars/dataframe/base/tests/test_base.py
|
vibhatha/mars
|
7a6b78ca4befd1a46d82cfb0163ffcd49293f7b5
|
[
"Apache-2.0"
] | null | null | null |
mars/dataframe/base/tests/test_base.py
|
vibhatha/mars
|
7a6b78ca4befd1a46d82cfb0163ffcd49293f7b5
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 1999-2020 Alibaba Group Holding Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import random
import numpy as np
import pandas as pd
from mars import opcodes
from mars.config import options, option_context
from mars.core import OutputType
from mars.dataframe.core import DATAFRAME_TYPE, SERIES_TYPE, SERIES_CHUNK_TYPE, \
INDEX_TYPE, CATEGORICAL_TYPE, CATEGORICAL_CHUNK_TYPE
from mars.dataframe.datasource.dataframe import from_pandas as from_pandas_df
from mars.dataframe.datasource.series import from_pandas as from_pandas_series
from mars.dataframe.datasource.index import from_pandas as from_pandas_index
from mars.dataframe.base import to_gpu, to_cpu, cut, astype
from mars.operands import OperandStage
from mars.tensor.core import TENSOR_TYPE
from mars.tests.core import TestBase
from mars.tiles import get_tiled
class Test(TestBase):
def testToGPU(self):
# test dataframe
data = pd.DataFrame(np.random.rand(10, 10), index=np.random.randint(-100, 100, size=(10,)),
columns=[np.random.bytes(10) for _ in range(10)])
df = from_pandas_df(data)
cdf = to_gpu(df)
self.assertEqual(df.index_value, cdf.index_value)
self.assertEqual(df.columns_value, cdf.columns_value)
self.assertTrue(cdf.op.gpu)
pd.testing.assert_series_equal(df.dtypes, cdf.dtypes)
cdf = cdf.tiles()
df = get_tiled(df)
self.assertEqual(df.nsplits, cdf.nsplits)
self.assertEqual(df.chunks[0].index_value, cdf.chunks[0].index_value)
self.assertEqual(df.chunks[0].columns_value, cdf.chunks[0].columns_value)
self.assertTrue(cdf.chunks[0].op.gpu)
pd.testing.assert_series_equal(df.chunks[0].dtypes, cdf.chunks[0].dtypes)
self.assertIs(cdf, to_gpu(cdf))
# test series
sdata = data.iloc[:, 0]
series = from_pandas_series(sdata)
cseries = to_gpu(series)
self.assertEqual(series.index_value, cseries.index_value)
self.assertTrue(cseries.op.gpu)
cseries = cseries.tiles()
series = get_tiled(series)
self.assertEqual(series.nsplits, cseries.nsplits)
self.assertEqual(series.chunks[0].index_value, cseries.chunks[0].index_value)
self.assertTrue(cseries.chunks[0].op.gpu)
self.assertIs(cseries, to_gpu(cseries))
def testToCPU(self):
data = pd.DataFrame(np.random.rand(10, 10), index=np.random.randint(-100, 100, size=(10,)),
columns=[np.random.bytes(10) for _ in range(10)])
df = from_pandas_df(data)
cdf = to_gpu(df)
df2 = to_cpu(cdf)
self.assertEqual(df.index_value, df2.index_value)
self.assertEqual(df.columns_value, df2.columns_value)
self.assertFalse(df2.op.gpu)
pd.testing.assert_series_equal(df.dtypes, df2.dtypes)
df2 = df2.tiles()
df = get_tiled(df)
self.assertEqual(df.nsplits, df2.nsplits)
self.assertEqual(df.chunks[0].index_value, df2.chunks[0].index_value)
self.assertEqual(df.chunks[0].columns_value, df2.chunks[0].columns_value)
self.assertFalse(df2.chunks[0].op.gpu)
pd.testing.assert_series_equal(df.chunks[0].dtypes, df2.chunks[0].dtypes)
self.assertIs(df2, to_cpu(df2))
def testRechunk(self):
raw = pd.DataFrame(np.random.rand(10, 10))
df = from_pandas_df(raw, chunk_size=3)
df2 = df.rechunk(4).tiles()
self.assertEqual(df2.shape, (10, 10))
self.assertEqual(len(df2.chunks), 9)
self.assertEqual(df2.chunks[0].shape, (4, 4))
pd.testing.assert_index_equal(df2.chunks[0].index_value.to_pandas(), pd.RangeIndex(4))
pd.testing.assert_index_equal(df2.chunks[0].columns_value.to_pandas(), pd.RangeIndex(4))
pd.testing.assert_series_equal(df2.chunks[0].dtypes, raw.dtypes[:4])
self.assertEqual(df2.chunks[2].shape, (4, 2))
pd.testing.assert_index_equal(df2.chunks[2].index_value.to_pandas(), pd.RangeIndex(4))
pd.testing.assert_index_equal(df2.chunks[2].columns_value.to_pandas(), pd.RangeIndex(8, 10))
pd.testing.assert_series_equal(df2.chunks[2].dtypes, raw.dtypes[-2:])
self.assertEqual(df2.chunks[-1].shape, (2, 2))
pd.testing.assert_index_equal(df2.chunks[-1].index_value.to_pandas(), pd.RangeIndex(8, 10))
pd.testing.assert_index_equal(df2.chunks[-1].columns_value.to_pandas(), pd.RangeIndex(8, 10))
pd.testing.assert_series_equal(df2.chunks[-1].dtypes, raw.dtypes[-2:])
for c in df2.chunks:
self.assertEqual(c.shape[1], len(c.dtypes))
self.assertEqual(len(c.columns_value.to_pandas()), len(c.dtypes))
columns = [np.random.bytes(10) for _ in range(10)]
index = np.random.randint(-100, 100, size=(4,))
raw = pd.DataFrame(np.random.rand(4, 10), index=index, columns=columns)
df = from_pandas_df(raw, chunk_size=3)
df2 = df.rechunk(6).tiles()
self.assertEqual(df2.shape, (4, 10))
self.assertEqual(len(df2.chunks), 2)
self.assertEqual(df2.chunks[0].shape, (4, 6))
pd.testing.assert_index_equal(df2.chunks[0].index_value.to_pandas(), df.index_value.to_pandas())
pd.testing.assert_index_equal(df2.chunks[0].columns_value.to_pandas(), pd.Index(columns[:6]))
pd.testing.assert_series_equal(df2.chunks[0].dtypes, raw.dtypes[:6])
self.assertEqual(df2.chunks[1].shape, (4, 4))
pd.testing.assert_index_equal(df2.chunks[1].index_value.to_pandas(), df.index_value.to_pandas())
pd.testing.assert_index_equal(df2.chunks[1].columns_value.to_pandas(), pd.Index(columns[6:]))
pd.testing.assert_series_equal(df2.chunks[1].dtypes, raw.dtypes[-4:])
for c in df2.chunks:
self.assertEqual(c.shape[1], len(c.dtypes))
self.assertEqual(len(c.columns_value.to_pandas()), len(c.dtypes))
# test Series rechunk
series = from_pandas_series(pd.Series(np.random.rand(10,)), chunk_size=3)
series2 = series.rechunk(4).tiles()
self.assertEqual(series2.shape, (10,))
self.assertEqual(len(series2.chunks), 3)
pd.testing.assert_index_equal(series2.index_value.to_pandas(), pd.RangeIndex(10))
self.assertEqual(series2.chunk_shape, (3,))
self.assertEqual(series2.nsplits, ((4, 4, 2), ))
self.assertEqual(series2.chunks[0].shape, (4,))
pd.testing.assert_index_equal(series2.chunks[0].index_value.to_pandas(), pd.RangeIndex(4))
self.assertEqual(series2.chunks[1].shape, (4,))
pd.testing.assert_index_equal(series2.chunks[1].index_value.to_pandas(), pd.RangeIndex(4, 8))
self.assertEqual(series2.chunks[2].shape, (2,))
pd.testing.assert_index_equal(series2.chunks[2].index_value.to_pandas(), pd.RangeIndex(8, 10))
series2 = series.rechunk(1).tiles()
self.assertEqual(series2.shape, (10,))
self.assertEqual(len(series2.chunks), 10)
pd.testing.assert_index_equal(series2.index_value.to_pandas(), pd.RangeIndex(10))
self.assertEqual(series2.chunk_shape, (10,))
self.assertEqual(series2.nsplits, ((1,) * 10, ))
self.assertEqual(series2.chunks[0].shape, (1,))
pd.testing.assert_index_equal(series2.chunks[0].index_value.to_pandas(), pd.RangeIndex(1))
# no need to rechunk
series2 = series.rechunk(3).tiles()
series = get_tiled(series)
self.assertEqual(series2.chunk_shape, series.chunk_shape)
self.assertEqual(series2.nsplits, series.nsplits)
def testFillNA(self):
df_raw = pd.DataFrame(np.nan, index=range(0, 20), columns=list('ABCDEFGHIJ'))
for _ in range(20):
df_raw.iloc[random.randint(0, 19), random.randint(0, 9)] = random.randint(0, 99)
value_df_raw = pd.DataFrame(np.random.randint(0, 100, (10, 7)).astype(np.float32),
columns=list('ABCDEFG'))
series_raw = pd.Series(np.nan, index=range(20))
for _ in range(3):
series_raw.iloc[random.randint(0, 19)] = random.randint(0, 99)
value_series_raw = pd.Series(np.random.randint(0, 100, (10,)).astype(np.float32),
index=list('ABCDEFGHIJ'))
df = from_pandas_df(df_raw)
series = from_pandas_series(series_raw)
# when nothing supplied, raise
with self.assertRaises(ValueError):
df.fillna()
# when both values and methods supplied, raises
with self.assertRaises(ValueError):
df.fillna(value=1, method='ffill')
# when call on series, cannot supply DataFrames
with self.assertRaises(ValueError):
series.fillna(value=df)
with self.assertRaises(ValueError):
series.fillna(value=df_raw)
with self.assertRaises(NotImplementedError):
series.fillna(value=series_raw, downcast='infer')
with self.assertRaises(NotImplementedError):
series.ffill(limit=1)
df2 = df.fillna(value_series_raw).tiles()
self.assertEqual(len(df2.chunks), 1)
self.assertEqual(df2.chunks[0].shape, df2.shape)
self.assertIsNone(df2.chunks[0].op.stage)
series2 = series.fillna(value_series_raw).tiles()
self.assertEqual(len(series2.chunks), 1)
self.assertEqual(series2.chunks[0].shape, series2.shape)
self.assertIsNone(series2.chunks[0].op.stage)
df = from_pandas_df(df_raw, chunk_size=5)
df2 = df.fillna(value_series_raw).tiles()
self.assertEqual(len(df2.chunks), 8)
self.assertEqual(df2.chunks[0].shape, (5, 5))
self.assertIsNone(df2.chunks[0].op.stage)
series = from_pandas_series(series_raw, chunk_size=5)
series2 = series.fillna(value_series_raw).tiles()
self.assertEqual(len(series2.chunks), 4)
self.assertEqual(series2.chunks[0].shape, (5,))
self.assertIsNone(series2.chunks[0].op.stage)
df2 = df.ffill(axis='columns').tiles()
self.assertEqual(len(df2.chunks), 8)
self.assertEqual(df2.chunks[0].shape, (5, 5))
self.assertEqual(df2.chunks[0].op.axis, 1)
self.assertEqual(df2.chunks[0].op.stage, OperandStage.combine)
self.assertEqual(df2.chunks[0].op.method, 'ffill')
self.assertIsNone(df2.chunks[0].op.limit)
series2 = series.bfill().tiles()
self.assertEqual(len(series2.chunks), 4)
self.assertEqual(series2.chunks[0].shape, (5,))
self.assertEqual(series2.chunks[0].op.stage, OperandStage.combine)
self.assertEqual(series2.chunks[0].op.method, 'bfill')
self.assertIsNone(series2.chunks[0].op.limit)
value_df = from_pandas_df(value_df_raw, chunk_size=7)
value_series = from_pandas_series(value_series_raw, chunk_size=7)
df2 = df.fillna(value_df).tiles()
self.assertEqual(df2.shape, df.shape)
self.assertIsNone(df2.chunks[0].op.stage)
df2 = df.fillna(value_series).tiles()
self.assertEqual(df2.shape, df.shape)
self.assertIsNone(df2.chunks[0].op.stage)
value_series_raw.index = list(range(10))
value_series = from_pandas_series(value_series_raw)
series2 = series.fillna(value_series).tiles()
self.assertEqual(series2.shape, series.shape)
self.assertIsNone(series2.chunks[0].op.stage)
def testDataFrameApply(self):
cols = [chr(ord('A') + i) for i in range(10)]
df_raw = pd.DataFrame(dict((c, [i ** 2 for i in range(20)]) for c in cols))
old_chunk_store_limit = options.chunk_store_limit
try:
options.chunk_store_limit = 20
df = from_pandas_df(df_raw, chunk_size=5)
r = df.apply('ffill')
self.assertEqual(r.op._op_type_, opcodes.FILL_NA)
r = df.apply(np.sqrt).tiles()
self.assertTrue(all(v == np.dtype('float64') for v in r.dtypes))
self.assertEqual(r.shape, df.shape)
self.assertEqual(r.op._op_type_, opcodes.APPLY)
self.assertEqual(r.op.output_types[0], OutputType.dataframe)
self.assertTrue(r.op.elementwise)
r = df.apply(lambda x: pd.Series([1, 2])).tiles()
self.assertTrue(all(v == np.dtype('int64') for v in r.dtypes))
self.assertEqual(r.shape, (np.nan, df.shape[1]))
self.assertEqual(r.op.output_types[0], OutputType.dataframe)
self.assertEqual(r.chunks[0].shape, (np.nan, 1))
self.assertEqual(r.chunks[0].inputs[0].shape[0], df_raw.shape[0])
self.assertEqual(r.chunks[0].inputs[0].op._op_type_, opcodes.CONCATENATE)
self.assertFalse(r.op.elementwise)
r = df.apply(np.sum, axis='index').tiles()
self.assertTrue(np.dtype('int64'), r.dtype)
self.assertEqual(r.shape, (df.shape[1],))
self.assertEqual(r.op.output_types[0], OutputType.series)
self.assertEqual(r.chunks[0].shape, (20 // df.shape[0],))
self.assertEqual(r.chunks[0].inputs[0].shape[0], df_raw.shape[0])
self.assertEqual(r.chunks[0].inputs[0].op._op_type_, opcodes.CONCATENATE)
self.assertFalse(r.op.elementwise)
r = df.apply(np.sum, axis='columns').tiles()
self.assertTrue(np.dtype('int64'), r.dtype)
self.assertEqual(r.shape, (df.shape[0],))
self.assertEqual(r.op.output_types[0], OutputType.series)
self.assertEqual(r.chunks[0].shape, (20 // df.shape[1],))
self.assertEqual(r.chunks[0].inputs[0].shape[1], df_raw.shape[1])
self.assertEqual(r.chunks[0].inputs[0].op._op_type_, opcodes.CONCATENATE)
self.assertFalse(r.op.elementwise)
r = df.apply(lambda x: pd.Series([1, 2], index=['foo', 'bar']), axis=1).tiles()
self.assertTrue(all(v == np.dtype('int64') for v in r.dtypes))
self.assertEqual(r.shape, (df.shape[0], np.nan))
self.assertEqual(r.op.output_types[0], OutputType.dataframe)
self.assertEqual(r.chunks[0].shape, (20 // df.shape[1], np.nan))
self.assertEqual(r.chunks[0].inputs[0].shape[1], df_raw.shape[1])
self.assertEqual(r.chunks[0].inputs[0].op._op_type_, opcodes.CONCATENATE)
self.assertFalse(r.op.elementwise)
r = df.apply(lambda x: [1, 2], axis=1, result_type='expand').tiles()
self.assertTrue(all(v == np.dtype('int64') for v in r.dtypes))
self.assertEqual(r.shape, (df.shape[0], np.nan))
self.assertEqual(r.op.output_types[0], OutputType.dataframe)
self.assertEqual(r.chunks[0].shape, (20 // df.shape[1], np.nan))
self.assertEqual(r.chunks[0].inputs[0].shape[1], df_raw.shape[1])
self.assertEqual(r.chunks[0].inputs[0].op._op_type_, opcodes.CONCATENATE)
self.assertFalse(r.op.elementwise)
r = df.apply(lambda x: list(range(10)), axis=1, result_type='reduce').tiles()
self.assertTrue(np.dtype('object'), r.dtype)
self.assertEqual(r.shape, (df.shape[0],))
self.assertEqual(r.op.output_types[0], OutputType.series)
self.assertEqual(r.chunks[0].shape, (20 // df.shape[1],))
self.assertEqual(r.chunks[0].inputs[0].shape[1], df_raw.shape[1])
self.assertEqual(r.chunks[0].inputs[0].op._op_type_, opcodes.CONCATENATE)
self.assertFalse(r.op.elementwise)
r = df.apply(lambda x: list(range(10)), axis=1, result_type='broadcast').tiles()
self.assertTrue(all(v == np.dtype('int64') for v in r.dtypes))
self.assertEqual(r.shape, (df.shape[0], np.nan))
self.assertEqual(r.op.output_types[0], OutputType.dataframe)
self.assertEqual(r.chunks[0].shape, (20 // df.shape[1], np.nan))
self.assertEqual(r.chunks[0].inputs[0].shape[1], df_raw.shape[1])
self.assertEqual(r.chunks[0].inputs[0].op._op_type_, opcodes.CONCATENATE)
self.assertFalse(r.op.elementwise)
finally:
options.chunk_store_limit = old_chunk_store_limit
def testSeriesApply(self):
idxes = [chr(ord('A') + i) for i in range(20)]
s_raw = pd.Series([i ** 2 for i in range(20)], index=idxes)
series = from_pandas_series(s_raw, chunk_size=5)
r = series.apply('add', args=(1,)).tiles()
self.assertEqual(r.op._op_type_, opcodes.ADD)
r = series.apply(np.sqrt).tiles()
self.assertTrue(np.dtype('float64'), r.dtype)
self.assertEqual(r.shape, series.shape)
self.assertEqual(r.op._op_type_, opcodes.APPLY)
self.assertEqual(r.op.output_types[0], OutputType.series)
self.assertEqual(r.chunks[0].shape, (5,))
self.assertEqual(r.chunks[0].inputs[0].shape, (5,))
r = series.apply('sqrt').tiles()
self.assertTrue(np.dtype('float64'), r.dtype)
self.assertEqual(r.shape, series.shape)
self.assertEqual(r.op._op_type_, opcodes.APPLY)
self.assertEqual(r.op.output_types[0], OutputType.series)
self.assertEqual(r.chunks[0].shape, (5,))
self.assertEqual(r.chunks[0].inputs[0].shape, (5,))
r = series.apply(lambda x: [x, x + 1], convert_dtype=False).tiles()
self.assertTrue(np.dtype('object'), r.dtype)
self.assertEqual(r.shape, series.shape)
self.assertEqual(r.op._op_type_, opcodes.APPLY)
self.assertEqual(r.op.output_types[0], OutputType.series)
self.assertEqual(r.chunks[0].shape, (5,))
self.assertEqual(r.chunks[0].inputs[0].shape, (5,))
def testTransform(self):
cols = [chr(ord('A') + i) for i in range(10)]
df_raw = pd.DataFrame(dict((c, [i ** 2 for i in range(20)]) for c in cols))
df = from_pandas_df(df_raw, chunk_size=5)
idxes = [chr(ord('A') + i) for i in range(20)]
s_raw = pd.Series([i ** 2 for i in range(20)], index=idxes)
series = from_pandas_series(s_raw, chunk_size=5)
def rename_fn(f, new_name):
f.__name__ = new_name
return f
old_chunk_store_limit = options.chunk_store_limit
try:
options.chunk_store_limit = 20
# DATAFRAME CASES
# test transform scenarios on data frames
r = df.transform(lambda x: list(range(len(x)))).tiles()
self.assertTrue(all(v == np.dtype('int64') for v in r.dtypes))
self.assertEqual(r.shape, df.shape)
self.assertEqual(r.op._op_type_, opcodes.TRANSFORM)
self.assertEqual(r.op.output_types[0], OutputType.dataframe)
self.assertEqual(r.chunks[0].shape, (df.shape[0], 20 // df.shape[0]))
self.assertEqual(r.chunks[0].inputs[0].shape[0], df_raw.shape[0])
self.assertEqual(r.chunks[0].inputs[0].op._op_type_, opcodes.CONCATENATE)
r = df.transform(lambda x: list(range(len(x))), axis=1).tiles()
self.assertTrue(all(v == np.dtype('int64') for v in r.dtypes))
self.assertEqual(r.shape, df.shape)
self.assertEqual(r.op._op_type_, opcodes.TRANSFORM)
self.assertEqual(r.op.output_types[0], OutputType.dataframe)
self.assertEqual(r.chunks[0].shape, (20 // df.shape[1], df.shape[1]))
self.assertEqual(r.chunks[0].inputs[0].shape[1], df_raw.shape[1])
self.assertEqual(r.chunks[0].inputs[0].op._op_type_, opcodes.CONCATENATE)
r = df.transform(['cumsum', 'cummax', lambda x: x + 1]).tiles()
self.assertTrue(all(v == np.dtype('int64') for v in r.dtypes))
self.assertEqual(r.shape, (df.shape[0], df.shape[1] * 3))
self.assertEqual(r.op._op_type_, opcodes.TRANSFORM)
self.assertEqual(r.op.output_types[0], OutputType.dataframe)
self.assertEqual(r.chunks[0].shape, (df.shape[0], 20 // df.shape[0] * 3))
self.assertEqual(r.chunks[0].inputs[0].shape[0], df_raw.shape[0])
self.assertEqual(r.chunks[0].inputs[0].op._op_type_, opcodes.CONCATENATE)
r = df.transform({'A': 'cumsum', 'D': ['cumsum', 'cummax'], 'F': lambda x: x + 1}).tiles()
self.assertTrue(all(v == np.dtype('int64') for v in r.dtypes))
self.assertEqual(r.shape, (df.shape[0], 4))
self.assertEqual(r.op._op_type_, opcodes.TRANSFORM)
self.assertEqual(r.op.output_types[0], OutputType.dataframe)
self.assertEqual(r.chunks[0].shape, (df.shape[0], 1))
self.assertEqual(r.chunks[0].inputs[0].shape[0], df_raw.shape[0])
self.assertEqual(r.chunks[0].inputs[0].op._op_type_, opcodes.CONCATENATE)
# test agg scenarios on series
r = df.transform(lambda x: x.iloc[:-1], _call_agg=True).tiles()
self.assertTrue(all(v == np.dtype('int64') for v in r.dtypes))
self.assertEqual(r.shape, (np.nan, df.shape[1]))
self.assertEqual(r.op._op_type_, opcodes.TRANSFORM)
self.assertEqual(r.op.output_types[0], OutputType.dataframe)
self.assertEqual(r.chunks[0].shape, (np.nan, 1))
self.assertEqual(r.chunks[0].inputs[0].shape[0], df_raw.shape[0])
self.assertEqual(r.chunks[0].inputs[0].op._op_type_, opcodes.CONCATENATE)
r = df.transform(lambda x: x.iloc[:-1], axis=1, _call_agg=True).tiles()
self.assertTrue(all(v == np.dtype('int64') for v in r.dtypes))
self.assertEqual(r.shape, (df.shape[0], np.nan))
self.assertEqual(r.op._op_type_, opcodes.TRANSFORM)
self.assertEqual(r.op.output_types[0], OutputType.dataframe)
self.assertEqual(r.chunks[0].shape, (2, np.nan))
self.assertEqual(r.chunks[0].inputs[0].shape[1], df_raw.shape[1])
self.assertEqual(r.chunks[0].inputs[0].op._op_type_, opcodes.CONCATENATE)
fn_list = [rename_fn(lambda x: x.iloc[1:].reset_index(drop=True), 'f1'),
lambda x: x.iloc[:-1].reset_index(drop=True)]
r = df.transform(fn_list, _call_agg=True).tiles()
self.assertTrue(all(v == np.dtype('int64') for v in r.dtypes))
self.assertEqual(r.shape, (np.nan, df.shape[1] * 2))
self.assertEqual(r.op._op_type_, opcodes.TRANSFORM)
self.assertEqual(r.op.output_types[0], OutputType.dataframe)
self.assertEqual(r.chunks[0].shape, (np.nan, 2))
self.assertEqual(r.chunks[0].inputs[0].shape[0], df_raw.shape[0])
self.assertEqual(r.chunks[0].inputs[0].op._op_type_, opcodes.CONCATENATE)
r = df.transform(lambda x: x.sum(), _call_agg=True).tiles()
self.assertEqual(r.dtype, np.dtype('int64'))
self.assertEqual(r.shape, (df.shape[1],))
self.assertEqual(r.op._op_type_, opcodes.TRANSFORM)
self.assertEqual(r.op.output_types[0], OutputType.series)
self.assertEqual(r.chunks[0].shape, (20 // df.shape[0],))
self.assertEqual(r.chunks[0].inputs[0].shape[0], df_raw.shape[0])
self.assertEqual(r.chunks[0].inputs[0].op._op_type_, opcodes.CONCATENATE)
fn_dict = {
'A': rename_fn(lambda x: x.iloc[1:].reset_index(drop=True), 'f1'),
'D': [rename_fn(lambda x: x.iloc[1:].reset_index(drop=True), 'f1'),
lambda x: x.iloc[:-1].reset_index(drop=True)],
'F': lambda x: x.iloc[:-1].reset_index(drop=True),
}
r = df.transform(fn_dict, _call_agg=True).tiles()
self.assertTrue(all(v == np.dtype('int64') for v in r.dtypes))
self.assertEqual(r.shape, (np.nan, 4))
self.assertEqual(r.op._op_type_, opcodes.TRANSFORM)
self.assertEqual(r.op.output_types[0], OutputType.dataframe)
self.assertEqual(r.chunks[0].shape, (np.nan, 1))
self.assertEqual(r.chunks[0].inputs[0].shape[0], df_raw.shape[0])
self.assertEqual(r.chunks[0].inputs[0].op._op_type_, opcodes.CONCATENATE)
# SERIES CASES
# test transform scenarios on series
r = series.transform(lambda x: x + 1).tiles()
self.assertTrue(np.dtype('float64'), r.dtype)
self.assertEqual(r.shape, series.shape)
self.assertEqual(r.op._op_type_, opcodes.TRANSFORM)
self.assertEqual(r.op.output_types[0], OutputType.series)
self.assertEqual(r.chunks[0].shape, (5,))
self.assertEqual(r.chunks[0].inputs[0].shape, (5,))
finally:
options.chunk_store_limit = old_chunk_store_limit
def testStringMethod(self):
s = pd.Series(['a', 'b', 'c'], name='s')
series = from_pandas_series(s, chunk_size=2)
with self.assertRaises(AttributeError):
_ = series.str.non_exist
r = series.str.contains('c')
self.assertEqual(r.dtype, np.bool_)
self.assertEqual(r.name, s.name)
pd.testing.assert_index_equal(r.index_value.to_pandas(), s.index)
self.assertEqual(r.shape, s.shape)
r = r.tiles()
for i, c in enumerate(r.chunks):
self.assertEqual(c.index, (i,))
self.assertEqual(c.dtype, np.bool_)
self.assertEqual(c.name, s.name)
pd.testing.assert_index_equal(c.index_value.to_pandas(),
s.index[i * 2: (i + 1) * 2])
self.assertEqual(c.shape, (2,) if i == 0 else (1,))
r = series.str.split(',', expand=True, n=1)
self.assertEqual(r.op.output_types[0], OutputType.dataframe)
self.assertEqual(r.shape, (3, 2))
pd.testing.assert_index_equal(r.index_value.to_pandas(), s.index)
pd.testing.assert_index_equal(r.columns_value.to_pandas(), pd.RangeIndex(2))
r = r.tiles()
for i, c in enumerate(r.chunks):
self.assertEqual(c.index, (i, 0))
pd.testing.assert_index_equal(c.index_value.to_pandas(),
s.index[i * 2: (i + 1) * 2])
pd.testing.assert_index_equal(c.columns_value.to_pandas(), pd.RangeIndex(2))
self.assertEqual(c.shape, (2, 2) if i == 0 else (1, 2))
with self.assertRaises(TypeError):
_ = series.str.cat([['1', '2']])
with self.assertRaises(ValueError):
_ = series.str.cat(['1', '2'])
with self.assertRaises(ValueError):
_ = series.str.cat(',')
with self.assertRaises(TypeError):
_ = series.str.cat({'1', '2', '3'})
r = series.str.cat(sep=',')
self.assertEqual(r.op.output_types[0], OutputType.scalar)
self.assertEqual(r.dtype, s.dtype)
r = r.tiles()
self.assertEqual(len(r.chunks), 1)
self.assertEqual(r.chunks[0].op.output_types[0], OutputType.scalar)
self.assertEqual(r.chunks[0].dtype, s.dtype)
r = series.str.extract(r'[ab](\d)', expand=False)
self.assertEqual(r.op.output_types[0], OutputType.series)
self.assertEqual(r.dtype, s.dtype)
r = r.tiles()
for i, c in enumerate(r.chunks):
self.assertEqual(c.index, (i,))
self.assertEqual(c.dtype, s.dtype)
self.assertEqual(c.name, s.name)
pd.testing.assert_index_equal(c.index_value.to_pandas(),
s.index[i * 2: (i + 1) * 2])
self.assertEqual(c.shape, (2,) if i == 0 else (1,))
r = series.str.extract(r'[ab](\d)', expand=True)
self.assertEqual(r.op.output_types[0], OutputType.dataframe)
self.assertEqual(r.shape, (3, 1))
pd.testing.assert_index_equal(r.index_value.to_pandas(), s.index)
pd.testing.assert_index_equal(r.columns_value.to_pandas(), pd.RangeIndex(1))
r = r.tiles()
for i, c in enumerate(r.chunks):
self.assertEqual(c.index, (i, 0))
pd.testing.assert_index_equal(c.index_value.to_pandas(),
s.index[i * 2: (i + 1) * 2])
pd.testing.assert_index_equal(c.columns_value.to_pandas(), pd.RangeIndex(1))
self.assertEqual(c.shape, (2, 1) if i == 0 else (1, 1))
self.assertIn('lstrip', dir(series.str))
def testDatetimeMethod(self):
s = pd.Series([pd.Timestamp('2020-1-1'),
pd.Timestamp('2020-2-1'),
pd.Timestamp('2020-3-1')],
name='ss')
series = from_pandas_series(s, chunk_size=2)
r = series.dt.year
self.assertEqual(r.dtype, s.dt.year.dtype)
pd.testing.assert_index_equal(r.index_value.to_pandas(), s.index)
self.assertEqual(r.shape, s.shape)
self.assertEqual(r.op.output_types[0], OutputType.series)
self.assertEqual(r.name, s.dt.year.name)
r = r.tiles()
for i, c in enumerate(r.chunks):
self.assertEqual(c.index, (i,))
self.assertEqual(c.dtype, s.dt.year.dtype)
self.assertEqual(c.op.output_types[0], OutputType.series)
self.assertEqual(r.name, s.dt.year.name)
pd.testing.assert_index_equal(c.index_value.to_pandas(),
s.index[i * 2: (i + 1) * 2])
self.assertEqual(c.shape, (2,) if i == 0 else (1,))
with self.assertRaises(AttributeError):
_ = series.dt.non_exist
self.assertIn('ceil', dir(series.dt))
def testSeriesIsin(self):
# one chunk in multiple chunks
a = from_pandas_series(pd.Series([0, 1, 2, 3, 4, 5, 6, 7, 8, 9]), chunk_size=10)
b = from_pandas_series(pd.Series([2, 1, 9, 3]), chunk_size=2)
r = a.isin(b).tiles()
for i, c in enumerate(r.chunks):
self.assertEqual(c.index, (i,))
self.assertEqual(c.dtype, np.dtype('bool'))
self.assertEqual(c.shape, (10,))
self.assertEqual(len(c.op.inputs), 2)
self.assertEqual(c.op.output_types[0], OutputType.series)
self.assertEqual(c.op.inputs[0].index, (i,))
self.assertEqual(c.op.inputs[0].shape, (10,))
self.assertEqual(c.op.inputs[1].index, (0,))
self.assertEqual(c.op.inputs[1].shape, (4,)) # has been rechunked
# multiple chunk in one chunks
a = from_pandas_series(pd.Series([0, 1, 2, 3, 4, 5, 6, 7, 8, 9]), chunk_size=2)
b = from_pandas_series(pd.Series([2, 1, 9, 3]), chunk_size=4)
r = a.isin(b).tiles()
for i, c in enumerate(r.chunks):
self.assertEqual(c.index, (i,))
self.assertEqual(c.dtype, np.dtype('bool'))
self.assertEqual(c.shape, (2,))
self.assertEqual(len(c.op.inputs), 2)
self.assertEqual(c.op.output_types[0], OutputType.series)
self.assertEqual(c.op.inputs[0].index, (i,))
self.assertEqual(c.op.inputs[0].shape, (2,))
self.assertEqual(c.op.inputs[1].index, (0,))
self.assertEqual(c.op.inputs[1].shape, (4,))
# multiple chunk in multiple chunks
a = from_pandas_series(pd.Series([0, 1, 2, 3, 4, 5, 6, 7, 8, 9]), chunk_size=2)
b = from_pandas_series(pd.Series([2, 1, 9, 3]), chunk_size=2)
r = a.isin(b).tiles()
for i, c in enumerate(r.chunks):
self.assertEqual(c.index, (i,))
self.assertEqual(c.dtype, np.dtype('bool'))
self.assertEqual(c.shape, (2,))
self.assertEqual(len(c.op.inputs), 2)
self.assertEqual(c.op.output_types[0], OutputType.series)
self.assertEqual(c.op.inputs[0].index, (i,))
self.assertEqual(c.op.inputs[0].shape, (2,))
self.assertEqual(c.op.inputs[1].index, (0,))
self.assertEqual(c.op.inputs[1].shape, (4,)) # has been rechunked
with self.assertRaises(TypeError):
_ = a.isin('sth')
def testDropNA(self):
# dataframe cases
df_raw = pd.DataFrame(np.nan, index=range(0, 20), columns=list('ABCDEFGHIJ'))
for _ in range(30):
df_raw.iloc[random.randint(0, 19), random.randint(0, 9)] = random.randint(0, 99)
for rowid in range(random.randint(1, 5)):
row = random.randint(0, 19)
for idx in range(0, 10):
df_raw.iloc[row, idx] = random.randint(0, 99)
# not supporting drop with axis=1
with self.assertRaises(NotImplementedError):
from_pandas_df(df_raw).dropna(axis=1)
# only one chunk in columns, can run dropna directly
r = from_pandas_df(df_raw, chunk_size=(4, 10)).dropna().tiles()
self.assertEqual(r.shape, (np.nan, 10))
self.assertEqual(r.nsplits, ((np.nan,) * 5, (10,)))
for c in r.chunks:
self.assertIsInstance(c.op, type(r.op))
self.assertEqual(len(c.inputs), 1)
self.assertEqual(len(c.inputs[0].inputs), 0)
self.assertEqual(c.shape, (np.nan, 10))
# multiple chunks in columns, count() will be called first
r = from_pandas_df(df_raw, chunk_size=4).dropna().tiles()
self.assertEqual(r.shape, (np.nan, 10))
self.assertEqual(r.nsplits, ((np.nan,) * 5, (4, 4, 2)))
for c in r.chunks:
self.assertIsInstance(c.op, type(r.op))
self.assertEqual(len(c.inputs), 2)
self.assertEqual(len(c.inputs[0].inputs), 0)
self.assertEqual(c.inputs[1].op.stage, OperandStage.agg)
self.assertTrue(np.isnan(c.shape[0]))
# series cases
series_raw = pd.Series(np.nan, index=range(20))
for _ in range(10):
series_raw.iloc[random.randint(0, 19)] = random.randint(0, 99)
r = from_pandas_series(series_raw, chunk_size=4).dropna().tiles()
self.assertEqual(r.shape, (np.nan,))
self.assertEqual(r.nsplits, ((np.nan,) * 5,))
for c in r.chunks:
self.assertIsInstance(c.op, type(r.op))
self.assertEqual(len(c.inputs), 1)
self.assertEqual(len(c.inputs[0].inputs), 0)
self.assertEqual(c.shape, (np.nan,))
def testCut(self):
s = from_pandas_series(pd.Series([1., 2., 3., 4.]), chunk_size=2)
with self.assertRaises(ValueError):
_ = cut(s, -1)
with self.assertRaises(ValueError):
_ = cut([[1, 2], [3, 4]], 3)
with self.assertRaises(ValueError):
_ = cut([], 3)
r, b = cut(s, [1.5, 2.5], retbins=True)
self.assertIsInstance(r, SERIES_TYPE)
self.assertIsInstance(b, TENSOR_TYPE)
r = r.tiles()
self.assertEqual(len(r.chunks), 2)
for c in r.chunks:
self.assertIsInstance(c, SERIES_CHUNK_TYPE)
self.assertEqual(c.shape, (2,))
r = cut(s.to_tensor(), [1.5, 2.5])
self.assertIsInstance(r, CATEGORICAL_TYPE)
self.assertEqual(len(r), len(s))
self.assertIn('Categorical', repr(r))
r = r.tiles()
self.assertEqual(len(r.chunks), 2)
for c in r.chunks:
self.assertIsInstance(c, CATEGORICAL_CHUNK_TYPE)
self.assertEqual(c.shape, (2,))
self.assertEqual(c.ndim, 1)
# test serialize
g = r.build_graph(tiled=False)
g2 = type(g).from_pb(g.to_pb())
g2 = type(g).from_json(g2.to_json())
r2 = next(n for n in g2 if isinstance(n, CATEGORICAL_TYPE))
self.assertEqual(len(r2), len(r))
r = cut([0, 1, 1, 2], bins=4, labels=False)
self.assertIsInstance(r, TENSOR_TYPE)
e = pd.cut([0, 1, 1, 2], bins=4, labels=False)
self.assertEqual(r.dtype, e.dtype)
def testAstype(self):
s = from_pandas_series(pd.Series([1, 2, 1, 2], name='a'), chunk_size=2)
with self.assertRaises(KeyError):
astype(s, {'b': 'str'})
df = from_pandas_df(pd.DataFrame({'a': [1, 2, 1, 2],
'b': ['a', 'b', 'a', 'b']}), chunk_size=2)
with self.assertRaises(KeyError):
astype(df, {'c': 'str', 'a': 'str'})
def testDrop(self):
# test dataframe drop
rs = np.random.RandomState(0)
raw = pd.DataFrame(rs.randint(1000, size=(20, 8)),
columns=['c' + str(i + 1) for i in range(8)])
df = from_pandas_df(raw, chunk_size=8)
with self.assertRaises(KeyError):
df.drop(columns=['c9'])
with self.assertRaises(NotImplementedError):
df.drop(columns=from_pandas_series(pd.Series(['c9'])))
r = df.drop(columns=['c1'])
pd.testing.assert_index_equal(r.index_value.to_pandas(), raw.index)
tiled = r.tiles()
start = 0
for c in tiled.chunks:
raw_index = raw.index[start: start + c.shape[0]]
start += c.shape[0]
pd.testing.assert_index_equal(raw_index, c.index_value.to_pandas())
df = from_pandas_df(raw, chunk_size=3)
columns = ['c2', 'c4', 'c5', 'c6']
index = [3, 6, 7]
r = df.drop(columns=columns, index=index)
self.assertIsInstance(r, DATAFRAME_TYPE)
# test series drop
raw = pd.Series(rs.randint(1000, size=(20,)))
series = from_pandas_series(raw, chunk_size=3)
r = series.drop(index=index)
self.assertIsInstance(r, SERIES_TYPE)
# test index drop
ser = pd.Series(range(20))
rs.shuffle(ser)
raw = pd.Index(ser)
idx = from_pandas_index(raw)
r = idx.drop(index)
self.assertIsInstance(r, INDEX_TYPE)
def testDropDuplicates(self):
rs = np.random.RandomState(0)
raw = pd.DataFrame(rs.randint(1000, size=(20, 7)),
columns=['c' + str(i + 1) for i in range(7)])
raw['c7'] = [f's{j}' for j in range(20)]
df = from_pandas_df(raw, chunk_size=10)
with self.assertRaises(ValueError):
df.drop_duplicates(method='unknown')
with self.assertRaises(KeyError):
df.drop_duplicates(subset='c8')
# test auto method selection
self.assertEqual(df.drop_duplicates().tiles().chunks[0].op.method, 'tree')
# subset size less than chunk_store_limit
self.assertEqual(df.drop_duplicates(subset=['c1', 'c3']).tiles().chunks[0].op.method, 'subset_tree')
with option_context({'chunk_store_limit': 5}):
# subset size greater than chunk_store_limit
self.assertEqual(df.drop_duplicates(subset=['c1', 'c3']).tiles().chunks[0].op.method, 'tree')
self.assertEqual(df.drop_duplicates(subset=['c1', 'c7']).tiles().chunks[0].op.method, 'tree')
self.assertEqual(df['c7'].drop_duplicates().tiles().chunks[0].op.method, 'tree')
s = df['c7']
with self.assertRaises(ValueError):
s.drop_duplicates(method='unknown')
def testMemoryUsage(self):
dtypes = ['int64', 'float64', 'complex128', 'object', 'bool']
data = dict([(t, np.ones(shape=500).astype(t))
for t in dtypes])
raw = pd.DataFrame(data)
df = from_pandas_df(raw, chunk_size=(500, 2))
r = df.memory_usage().tiles()
self.assertIsInstance(r, SERIES_TYPE)
self.assertEqual(r.shape, (6,))
self.assertEqual(len(r.chunks), 3)
self.assertIsNone(r.chunks[0].op.stage)
df = from_pandas_df(raw, chunk_size=(100, 3))
r = df.memory_usage(index=True).tiles()
self.assertIsInstance(r, SERIES_TYPE)
self.assertEqual(r.shape, (6,))
self.assertEqual(len(r.chunks), 2)
self.assertEqual(r.chunks[0].op.stage, OperandStage.reduce)
r = df.memory_usage(index=False).tiles()
self.assertIsInstance(r, SERIES_TYPE)
self.assertEqual(r.shape, (5,))
self.assertEqual(len(r.chunks), 2)
self.assertEqual(r.chunks[0].op.stage, OperandStage.reduce)
raw = pd.Series(np.ones(shape=500).astype('object'), name='s')
series = from_pandas_series(raw)
r = series.memory_usage().tiles()
self.assertIsInstance(r, TENSOR_TYPE)
self.assertEqual(r.shape, ())
self.assertEqual(len(r.chunks), 1)
self.assertIsNone(r.chunks[0].op.stage)
series = from_pandas_series(raw, chunk_size=100)
r = series.memory_usage().tiles()
self.assertIsInstance(r, TENSOR_TYPE)
self.assertEqual(r.shape, ())
self.assertEqual(len(r.chunks), 1)
self.assertEqual(r.chunks[0].op.stage, OperandStage.reduce)
| 45.580466
| 108
| 0.609087
| 5,712
| 41,068
| 4.266282
| 0.06215
| 0.171119
| 0.097173
| 0.05507
| 0.81419
| 0.757848
| 0.716238
| 0.68501
| 0.640937
| 0.60737
| 0
| 0.034029
| 0.237922
| 41,068
| 900
| 109
| 45.631111
| 0.744608
| 0.033262
| 0
| 0.512857
| 0
| 0
| 0.013238
| 0
| 0
| 0
| 0
| 0
| 0.587143
| 1
| 0.024286
| false
| 0
| 0.021429
| 0
| 0.048571
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
fc5815b3f566fa0122a0cd1d854c9fb573eb328b
| 149
|
py
|
Python
|
Python/Set .symmetric_difference() Operation.py
|
vipmunot/HackerRank
|
39d1beb97545592da5cec6e4b9ae0fce32f5ec39
|
[
"MIT"
] | null | null | null |
Python/Set .symmetric_difference() Operation.py
|
vipmunot/HackerRank
|
39d1beb97545592da5cec6e4b9ae0fce32f5ec39
|
[
"MIT"
] | null | null | null |
Python/Set .symmetric_difference() Operation.py
|
vipmunot/HackerRank
|
39d1beb97545592da5cec6e4b9ae0fce32f5ec39
|
[
"MIT"
] | null | null | null |
n = int(input())
s = set(map(int,input().split()))
m = int(input())
t = set(map(int,input().split()))
u = s.symmetric_difference(t)
print(len(u))
| 24.833333
| 33
| 0.604027
| 26
| 149
| 3.423077
| 0.538462
| 0.359551
| 0.202247
| 0.314607
| 0.426966
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.127517
| 149
| 6
| 34
| 24.833333
| 0.684615
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.166667
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
fc743b488ee31b0d623d136d8e0a3e57b0e84e05
| 368
|
py
|
Python
|
tests/fixtures.py
|
mjoblin/neotiles
|
f7370aefb74d4d57692d0e8a302c8c95f817a61a
|
[
"MIT"
] | 1
|
2021-04-25T19:27:12.000Z
|
2021-04-25T19:27:12.000Z
|
tests/fixtures.py
|
mjoblin/neotiles
|
f7370aefb74d4d57692d0e8a302c8c95f817a61a
|
[
"MIT"
] | 1
|
2016-12-27T00:35:51.000Z
|
2017-01-02T06:30:04.000Z
|
tests/fixtures.py
|
mjoblin/neotiles
|
f7370aefb74d4d57692d0e8a302c8c95f817a61a
|
[
"MIT"
] | null | null | null |
import pytest
from neotiles import TileManager, Tile
from neotiles.matrixes import NTNeoPixelMatrix, NTRGBMatrix
@pytest.fixture
def default_tile():
return Tile()
@pytest.fixture
def manager_neopixel():
return TileManager(NTNeoPixelMatrix(size=(10, 5), led_pin=18))
@pytest.fixture
def manager_rgb():
return TileManager(NTRGBMatrix(chain_length=1))
| 18.4
| 66
| 0.774457
| 45
| 368
| 6.222222
| 0.555556
| 0.139286
| 0.171429
| 0.164286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.01875
| 0.130435
| 368
| 19
| 67
| 19.368421
| 0.85625
| 0
| 0
| 0.25
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0.25
| 0.25
| 0.75
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
fc812c465b1da245529fc74f5339ff8a02e285e8
| 111
|
py
|
Python
|
web-server/develop.py
|
siret/prankweb
|
e36f1ca5cfbce2f8aa8dc89c04add0b4c550c266
|
[
"Apache-2.0"
] | 2
|
2019-10-15T11:09:30.000Z
|
2019-10-15T20:31:52.000Z
|
web-server/develop.py
|
siret/p2rank-web
|
e36f1ca5cfbce2f8aa8dc89c04add0b4c550c266
|
[
"Apache-2.0"
] | 23
|
2019-09-25T10:25:16.000Z
|
2020-10-06T12:49:25.000Z
|
web-server/develop.py
|
siret/prankweb
|
e36f1ca5cfbce2f8aa8dc89c04add0b4c550c266
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
from src import create_app
if __name__ == '__main__':
create_app().run(debug=True)
| 18.5
| 32
| 0.711712
| 17
| 111
| 4.058824
| 0.882353
| 0.26087
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.010526
| 0.144144
| 111
| 5
| 33
| 22.2
| 0.715789
| 0.189189
| 0
| 0
| 0
| 0
| 0.089888
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
fc876e9f324e30f5f63e904d63aa62cd8475a543
| 36
|
py
|
Python
|
image-segmentation/mask_rcnn/__init__.py
|
swcho84/image-segmentation
|
ef9b9b3d832e9efe6f43522cc5ca0e17279d6608
|
[
"MIT"
] | 64
|
2019-03-09T08:55:11.000Z
|
2022-01-27T07:08:02.000Z
|
image-segmentation/mask_rcnn/__init__.py
|
swcho84/image-segmentation
|
ef9b9b3d832e9efe6f43522cc5ca0e17279d6608
|
[
"MIT"
] | 2
|
2019-11-07T11:49:13.000Z
|
2020-01-16T14:39:03.000Z
|
image-segmentation/mask_rcnn/__init__.py
|
swcho84/image-segmentation
|
ef9b9b3d832e9efe6f43522cc5ca0e17279d6608
|
[
"MIT"
] | 21
|
2019-03-09T08:56:35.000Z
|
2022-03-02T12:24:43.000Z
|
from .builder import build_maskrcnn
| 18
| 35
| 0.861111
| 5
| 36
| 6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 36
| 1
| 36
| 36
| 0.9375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
fc93317c10c58f89125f246a2c93a128cc925764
| 90
|
py
|
Python
|
ptulsconv/pdf/recordist_log.py
|
iluvcapra/ptulsconv
|
66a71283d56d3664719def33504a07ad976187ff
|
[
"MIT"
] | 3
|
2020-07-30T10:54:45.000Z
|
2022-01-20T13:20:00.000Z
|
ptulsconv/pdf/recordist_log.py
|
iluvcapra/ptulsconv
|
66a71283d56d3664719def33504a07ad976187ff
|
[
"MIT"
] | 4
|
2020-10-19T04:58:31.000Z
|
2022-01-17T01:12:03.000Z
|
ptulsconv/pdf/recordist_log.py
|
iluvcapra/ptulsconv
|
66a71283d56d3664719def33504a07ad976187ff
|
[
"MIT"
] | null | null | null |
# TODO: Complete Recordist Log
def output_report(records):
# order by start
pass
| 15
| 30
| 0.7
| 12
| 90
| 5.166667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.233333
| 90
| 6
| 31
| 15
| 0.898551
| 0.477778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 0
| 1
| 0.5
| false
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
5dd69058dba31ba467cc9d38dc6d5ef8566a6efb
| 48,663
|
py
|
Python
|
tccli/services/postgres/postgres_client.py
|
hapsyou/tencentcloud-cli-intl-en
|
fa8ba71164484f9a2be4b983080a1de08606c0b0
|
[
"Apache-2.0"
] | null | null | null |
tccli/services/postgres/postgres_client.py
|
hapsyou/tencentcloud-cli-intl-en
|
fa8ba71164484f9a2be4b983080a1de08606c0b0
|
[
"Apache-2.0"
] | null | null | null |
tccli/services/postgres/postgres_client.py
|
hapsyou/tencentcloud-cli-intl-en
|
fa8ba71164484f9a2be4b983080a1de08606c0b0
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
import os
import json
import tccli.options_define as OptionsDefine
import tccli.format_output as FormatOutput
from tccli.nice_command import NiceCommand
import tccli.error_msg as ErrorMsg
import tccli.help_template as HelpTemplate
from tccli import __version__
from tccli.utils import Utils
from tccli.configure import Configure
from tencentcloud.common import credential
from tencentcloud.common.profile.http_profile import HttpProfile
from tencentcloud.common.profile.client_profile import ClientProfile
from tencentcloud.postgres.v20170312 import postgres_client as postgres_client_v20170312
from tencentcloud.postgres.v20170312 import models as models_v20170312
from tccli.services.postgres import v20170312
from tccli.services.postgres.v20170312 import help as v20170312_help
def doDescribeOrders(argv, arglist):
g_param = parse_global_arg(argv)
if "help" in argv:
show_help("DescribeOrders", g_param[OptionsDefine.Version])
return
param = {
"DealNames": Utils.try_to_json(argv, "--DealNames"),
}
cred = credential.Credential(g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey])
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile)
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.PostgresClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeOrdersRequest()
model.from_json_string(json.dumps(param))
rsp = client.DescribeOrders(model)
result = rsp.to_json_string()
jsonobj = None
try:
jsonobj = json.loads(result)
except TypeError as e:
jsonobj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", jsonobj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDestroyDBInstance(argv, arglist):
g_param = parse_global_arg(argv)
if "help" in argv:
show_help("DestroyDBInstance", g_param[OptionsDefine.Version])
return
param = {
"DBInstanceId": argv.get("--DBInstanceId"),
}
cred = credential.Credential(g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey])
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile)
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.PostgresClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DestroyDBInstanceRequest()
model.from_json_string(json.dumps(param))
rsp = client.DestroyDBInstance(model)
result = rsp.to_json_string()
jsonobj = None
try:
jsonobj = json.loads(result)
except TypeError as e:
jsonobj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", jsonobj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeDBBackups(argv, arglist):
g_param = parse_global_arg(argv)
if "help" in argv:
show_help("DescribeDBBackups", g_param[OptionsDefine.Version])
return
param = {
"DBInstanceId": argv.get("--DBInstanceId"),
"Type": Utils.try_to_json(argv, "--Type"),
"StartTime": argv.get("--StartTime"),
"EndTime": argv.get("--EndTime"),
"Limit": Utils.try_to_json(argv, "--Limit"),
"Offset": Utils.try_to_json(argv, "--Offset"),
}
cred = credential.Credential(g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey])
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile)
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.PostgresClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeDBBackupsRequest()
model.from_json_string(json.dumps(param))
rsp = client.DescribeDBBackups(model)
result = rsp.to_json_string()
jsonobj = None
try:
jsonobj = json.loads(result)
except TypeError as e:
jsonobj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", jsonobj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doResetAccountPassword(argv, arglist):
g_param = parse_global_arg(argv)
if "help" in argv:
show_help("ResetAccountPassword", g_param[OptionsDefine.Version])
return
param = {
"DBInstanceId": argv.get("--DBInstanceId"),
"UserName": argv.get("--UserName"),
"Password": argv.get("--Password"),
}
cred = credential.Credential(g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey])
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile)
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.PostgresClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ResetAccountPasswordRequest()
model.from_json_string(json.dumps(param))
rsp = client.ResetAccountPassword(model)
result = rsp.to_json_string()
jsonobj = None
try:
jsonobj = json.loads(result)
except TypeError as e:
jsonobj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", jsonobj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeDBErrlogs(argv, arglist):
g_param = parse_global_arg(argv)
if "help" in argv:
show_help("DescribeDBErrlogs", g_param[OptionsDefine.Version])
return
param = {
"DBInstanceId": argv.get("--DBInstanceId"),
"StartTime": argv.get("--StartTime"),
"EndTime": argv.get("--EndTime"),
"DatabaseName": argv.get("--DatabaseName"),
"SearchKeys": Utils.try_to_json(argv, "--SearchKeys"),
"Limit": Utils.try_to_json(argv, "--Limit"),
"Offset": Utils.try_to_json(argv, "--Offset"),
}
cred = credential.Credential(g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey])
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile)
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.PostgresClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeDBErrlogsRequest()
model.from_json_string(json.dumps(param))
rsp = client.DescribeDBErrlogs(model)
result = rsp.to_json_string()
jsonobj = None
try:
jsonobj = json.loads(result)
except TypeError as e:
jsonobj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", jsonobj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doRestartDBInstance(argv, arglist):
g_param = parse_global_arg(argv)
if "help" in argv:
show_help("RestartDBInstance", g_param[OptionsDefine.Version])
return
param = {
"DBInstanceId": argv.get("--DBInstanceId"),
}
cred = credential.Credential(g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey])
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile)
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.PostgresClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.RestartDBInstanceRequest()
model.from_json_string(json.dumps(param))
rsp = client.RestartDBInstance(model)
result = rsp.to_json_string()
jsonobj = None
try:
jsonobj = json.loads(result)
except TypeError as e:
jsonobj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", jsonobj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doInquiryPriceCreateDBInstances(argv, arglist):
g_param = parse_global_arg(argv)
if "help" in argv:
show_help("InquiryPriceCreateDBInstances", g_param[OptionsDefine.Version])
return
param = {
"Zone": argv.get("--Zone"),
"SpecCode": argv.get("--SpecCode"),
"Storage": Utils.try_to_json(argv, "--Storage"),
"InstanceCount": Utils.try_to_json(argv, "--InstanceCount"),
"Period": Utils.try_to_json(argv, "--Period"),
"Pid": Utils.try_to_json(argv, "--Pid"),
"InstanceChargeType": argv.get("--InstanceChargeType"),
}
cred = credential.Credential(g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey])
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile)
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.PostgresClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.InquiryPriceCreateDBInstancesRequest()
model.from_json_string(json.dumps(param))
rsp = client.InquiryPriceCreateDBInstances(model)
result = rsp.to_json_string()
jsonobj = None
try:
jsonobj = json.loads(result)
except TypeError as e:
jsonobj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", jsonobj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doOpenDBExtranetAccess(argv, arglist):
g_param = parse_global_arg(argv)
if "help" in argv:
show_help("OpenDBExtranetAccess", g_param[OptionsDefine.Version])
return
param = {
"DBInstanceId": argv.get("--DBInstanceId"),
"IsIpv6": Utils.try_to_json(argv, "--IsIpv6"),
}
cred = credential.Credential(g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey])
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile)
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.PostgresClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.OpenDBExtranetAccessRequest()
model.from_json_string(json.dumps(param))
rsp = client.OpenDBExtranetAccess(model)
result = rsp.to_json_string()
jsonobj = None
try:
jsonobj = json.loads(result)
except TypeError as e:
jsonobj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", jsonobj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyDBInstancesProject(argv, arglist):
g_param = parse_global_arg(argv)
if "help" in argv:
show_help("ModifyDBInstancesProject", g_param[OptionsDefine.Version])
return
param = {
"DBInstanceIdSet": Utils.try_to_json(argv, "--DBInstanceIdSet"),
"ProjectId": argv.get("--ProjectId"),
}
cred = credential.Credential(g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey])
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile)
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.PostgresClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyDBInstancesProjectRequest()
model.from_json_string(json.dumps(param))
rsp = client.ModifyDBInstancesProject(model)
result = rsp.to_json_string()
jsonobj = None
try:
jsonobj = json.loads(result)
except TypeError as e:
jsonobj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", jsonobj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyAccountRemark(argv, arglist):
g_param = parse_global_arg(argv)
if "help" in argv:
show_help("ModifyAccountRemark", g_param[OptionsDefine.Version])
return
param = {
"DBInstanceId": argv.get("--DBInstanceId"),
"UserName": argv.get("--UserName"),
"Remark": argv.get("--Remark"),
}
cred = credential.Credential(g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey])
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile)
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.PostgresClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyAccountRemarkRequest()
model.from_json_string(json.dumps(param))
rsp = client.ModifyAccountRemark(model)
result = rsp.to_json_string()
jsonobj = None
try:
jsonobj = json.loads(result)
except TypeError as e:
jsonobj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", jsonobj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeDBXlogs(argv, arglist):
g_param = parse_global_arg(argv)
if "help" in argv:
show_help("DescribeDBXlogs", g_param[OptionsDefine.Version])
return
param = {
"DBInstanceId": argv.get("--DBInstanceId"),
"StartTime": argv.get("--StartTime"),
"EndTime": argv.get("--EndTime"),
"Offset": Utils.try_to_json(argv, "--Offset"),
"Limit": Utils.try_to_json(argv, "--Limit"),
}
cred = credential.Credential(g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey])
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile)
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.PostgresClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeDBXlogsRequest()
model.from_json_string(json.dumps(param))
rsp = client.DescribeDBXlogs(model)
result = rsp.to_json_string()
jsonobj = None
try:
jsonobj = json.loads(result)
except TypeError as e:
jsonobj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", jsonobj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doSetAutoRenewFlag(argv, arglist):
g_param = parse_global_arg(argv)
if "help" in argv:
show_help("SetAutoRenewFlag", g_param[OptionsDefine.Version])
return
param = {
"DBInstanceIdSet": Utils.try_to_json(argv, "--DBInstanceIdSet"),
"AutoRenewFlag": Utils.try_to_json(argv, "--AutoRenewFlag"),
}
cred = credential.Credential(g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey])
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile)
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.PostgresClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.SetAutoRenewFlagRequest()
model.from_json_string(json.dumps(param))
rsp = client.SetAutoRenewFlag(model)
result = rsp.to_json_string()
jsonobj = None
try:
jsonobj = json.loads(result)
except TypeError as e:
jsonobj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", jsonobj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeDBInstanceAttribute(argv, arglist):
g_param = parse_global_arg(argv)
if "help" in argv:
show_help("DescribeDBInstanceAttribute", g_param[OptionsDefine.Version])
return
param = {
"DBInstanceId": argv.get("--DBInstanceId"),
}
cred = credential.Credential(g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey])
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile)
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.PostgresClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeDBInstanceAttributeRequest()
model.from_json_string(json.dumps(param))
rsp = client.DescribeDBInstanceAttribute(model)
result = rsp.to_json_string()
jsonobj = None
try:
jsonobj = json.loads(result)
except TypeError as e:
jsonobj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", jsonobj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyDBInstanceName(argv, arglist):
g_param = parse_global_arg(argv)
if "help" in argv:
show_help("ModifyDBInstanceName", g_param[OptionsDefine.Version])
return
param = {
"DBInstanceId": argv.get("--DBInstanceId"),
"InstanceName": argv.get("--InstanceName"),
}
cred = credential.Credential(g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey])
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile)
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.PostgresClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyDBInstanceNameRequest()
model.from_json_string(json.dumps(param))
rsp = client.ModifyDBInstanceName(model)
result = rsp.to_json_string()
jsonobj = None
try:
jsonobj = json.loads(result)
except TypeError as e:
jsonobj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", jsonobj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateDBInstances(argv, arglist):
g_param = parse_global_arg(argv)
if "help" in argv:
show_help("CreateDBInstances", g_param[OptionsDefine.Version])
return
param = {
"SpecCode": argv.get("--SpecCode"),
"DBVersion": argv.get("--DBVersion"),
"Storage": Utils.try_to_json(argv, "--Storage"),
"InstanceCount": Utils.try_to_json(argv, "--InstanceCount"),
"Period": Utils.try_to_json(argv, "--Period"),
"Zone": argv.get("--Zone"),
"ProjectId": Utils.try_to_json(argv, "--ProjectId"),
"InstanceChargeType": argv.get("--InstanceChargeType"),
"AutoVoucher": Utils.try_to_json(argv, "--AutoVoucher"),
"VoucherIds": Utils.try_to_json(argv, "--VoucherIds"),
"VpcId": argv.get("--VpcId"),
"SubnetId": argv.get("--SubnetId"),
"AutoRenewFlag": Utils.try_to_json(argv, "--AutoRenewFlag"),
"ActivityId": Utils.try_to_json(argv, "--ActivityId"),
"Name": argv.get("--Name"),
"NeedSupportIpv6": Utils.try_to_json(argv, "--NeedSupportIpv6"),
"TagList": Utils.try_to_json(argv, "--TagList"),
}
cred = credential.Credential(g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey])
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile)
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.PostgresClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateDBInstancesRequest()
model.from_json_string(json.dumps(param))
rsp = client.CreateDBInstances(model)
result = rsp.to_json_string()
jsonobj = None
try:
jsonobj = json.loads(result)
except TypeError as e:
jsonobj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", jsonobj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doRenewInstance(argv, arglist):
g_param = parse_global_arg(argv)
if "help" in argv:
show_help("RenewInstance", g_param[OptionsDefine.Version])
return
param = {
"DBInstanceId": argv.get("--DBInstanceId"),
"Period": Utils.try_to_json(argv, "--Period"),
"AutoVoucher": Utils.try_to_json(argv, "--AutoVoucher"),
"VoucherIds": Utils.try_to_json(argv, "--VoucherIds"),
}
cred = credential.Credential(g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey])
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile)
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.PostgresClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.RenewInstanceRequest()
model.from_json_string(json.dumps(param))
rsp = client.RenewInstance(model)
result = rsp.to_json_string()
jsonobj = None
try:
jsonobj = json.loads(result)
except TypeError as e:
jsonobj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", jsonobj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeDBInstances(argv, arglist):
g_param = parse_global_arg(argv)
if "help" in argv:
show_help("DescribeDBInstances", g_param[OptionsDefine.Version])
return
param = {
"Filters": Utils.try_to_json(argv, "--Filters"),
"Limit": Utils.try_to_json(argv, "--Limit"),
"Offset": Utils.try_to_json(argv, "--Offset"),
"OrderBy": argv.get("--OrderBy"),
"OrderByType": argv.get("--OrderByType"),
}
cred = credential.Credential(g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey])
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile)
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.PostgresClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeDBInstancesRequest()
model.from_json_string(json.dumps(param))
rsp = client.DescribeDBInstances(model)
result = rsp.to_json_string()
jsonobj = None
try:
jsonobj = json.loads(result)
except TypeError as e:
jsonobj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", jsonobj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeZones(argv, arglist):
g_param = parse_global_arg(argv)
if "help" in argv:
show_help("DescribeZones", g_param[OptionsDefine.Version])
return
param = {
}
cred = credential.Credential(g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey])
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile)
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.PostgresClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeZonesRequest()
model.from_json_string(json.dumps(param))
rsp = client.DescribeZones(model)
result = rsp.to_json_string()
jsonobj = None
try:
jsonobj = json.loads(result)
except TypeError as e:
jsonobj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", jsonobj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doInitDBInstances(argv, arglist):
g_param = parse_global_arg(argv)
if "help" in argv:
show_help("InitDBInstances", g_param[OptionsDefine.Version])
return
param = {
"DBInstanceIdSet": Utils.try_to_json(argv, "--DBInstanceIdSet"),
"AdminName": argv.get("--AdminName"),
"AdminPassword": argv.get("--AdminPassword"),
"Charset": argv.get("--Charset"),
}
cred = credential.Credential(g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey])
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile)
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.PostgresClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.InitDBInstancesRequest()
model.from_json_string(json.dumps(param))
rsp = client.InitDBInstances(model)
result = rsp.to_json_string()
jsonobj = None
try:
jsonobj = json.loads(result)
except TypeError as e:
jsonobj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", jsonobj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doInquiryPriceUpgradeDBInstance(argv, arglist):
g_param = parse_global_arg(argv)
if "help" in argv:
show_help("InquiryPriceUpgradeDBInstance", g_param[OptionsDefine.Version])
return
param = {
"Storage": Utils.try_to_json(argv, "--Storage"),
"Memory": Utils.try_to_json(argv, "--Memory"),
"DBInstanceId": argv.get("--DBInstanceId"),
"InstanceChargeType": argv.get("--InstanceChargeType"),
}
cred = credential.Credential(g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey])
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile)
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.PostgresClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.InquiryPriceUpgradeDBInstanceRequest()
model.from_json_string(json.dumps(param))
rsp = client.InquiryPriceUpgradeDBInstance(model)
result = rsp.to_json_string()
jsonobj = None
try:
jsonobj = json.loads(result)
except TypeError as e:
jsonobj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", jsonobj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeRegions(argv, arglist):
g_param = parse_global_arg(argv)
if "help" in argv:
show_help("DescribeRegions", g_param[OptionsDefine.Version])
return
param = {
}
cred = credential.Credential(g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey])
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile)
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.PostgresClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeRegionsRequest()
model.from_json_string(json.dumps(param))
rsp = client.DescribeRegions(model)
result = rsp.to_json_string()
jsonobj = None
try:
jsonobj = json.loads(result)
except TypeError as e:
jsonobj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", jsonobj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doInquiryPriceRenewDBInstance(argv, arglist):
g_param = parse_global_arg(argv)
if "help" in argv:
show_help("InquiryPriceRenewDBInstance", g_param[OptionsDefine.Version])
return
param = {
"DBInstanceId": argv.get("--DBInstanceId"),
"Period": Utils.try_to_json(argv, "--Period"),
}
cred = credential.Credential(g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey])
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile)
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.PostgresClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.InquiryPriceRenewDBInstanceRequest()
model.from_json_string(json.dumps(param))
rsp = client.InquiryPriceRenewDBInstance(model)
result = rsp.to_json_string()
jsonobj = None
try:
jsonobj = json.loads(result)
except TypeError as e:
jsonobj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", jsonobj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCloseDBExtranetAccess(argv, arglist):
g_param = parse_global_arg(argv)
if "help" in argv:
show_help("CloseDBExtranetAccess", g_param[OptionsDefine.Version])
return
param = {
"DBInstanceId": argv.get("--DBInstanceId"),
"IsIpv6": Utils.try_to_json(argv, "--IsIpv6"),
}
cred = credential.Credential(g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey])
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile)
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.PostgresClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CloseDBExtranetAccessRequest()
model.from_json_string(json.dumps(param))
rsp = client.CloseDBExtranetAccess(model)
result = rsp.to_json_string()
jsonobj = None
try:
jsonobj = json.loads(result)
except TypeError as e:
jsonobj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", jsonobj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeAccounts(argv, arglist):
g_param = parse_global_arg(argv)
if "help" in argv:
show_help("DescribeAccounts", g_param[OptionsDefine.Version])
return
param = {
"DBInstanceId": argv.get("--DBInstanceId"),
"Limit": Utils.try_to_json(argv, "--Limit"),
"Offset": Utils.try_to_json(argv, "--Offset"),
"OrderBy": argv.get("--OrderBy"),
"OrderByType": argv.get("--OrderByType"),
}
cred = credential.Credential(g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey])
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile)
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.PostgresClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeAccountsRequest()
model.from_json_string(json.dumps(param))
rsp = client.DescribeAccounts(model)
result = rsp.to_json_string()
jsonobj = None
try:
jsonobj = json.loads(result)
except TypeError as e:
jsonobj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", jsonobj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeDatabases(argv, arglist):
g_param = parse_global_arg(argv)
if "help" in argv:
show_help("DescribeDatabases", g_param[OptionsDefine.Version])
return
param = {
"DBInstanceId": argv.get("--DBInstanceId"),
}
cred = credential.Credential(g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey])
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile)
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.PostgresClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeDatabasesRequest()
model.from_json_string(json.dumps(param))
rsp = client.DescribeDatabases(model)
result = rsp.to_json_string()
jsonobj = None
try:
jsonobj = json.loads(result)
except TypeError as e:
jsonobj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", jsonobj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doUpgradeDBInstance(argv, arglist):
g_param = parse_global_arg(argv)
if "help" in argv:
show_help("UpgradeDBInstance", g_param[OptionsDefine.Version])
return
param = {
"Memory": Utils.try_to_json(argv, "--Memory"),
"Storage": Utils.try_to_json(argv, "--Storage"),
"DBInstanceId": argv.get("--DBInstanceId"),
"AutoVoucher": Utils.try_to_json(argv, "--AutoVoucher"),
"VoucherIds": Utils.try_to_json(argv, "--VoucherIds"),
"ActivityId": Utils.try_to_json(argv, "--ActivityId"),
}
cred = credential.Credential(g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey])
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile)
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.PostgresClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.UpgradeDBInstanceRequest()
model.from_json_string(json.dumps(param))
rsp = client.UpgradeDBInstance(model)
result = rsp.to_json_string()
jsonobj = None
try:
jsonobj = json.loads(result)
except TypeError as e:
jsonobj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", jsonobj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeProductConfig(argv, arglist):
g_param = parse_global_arg(argv)
if "help" in argv:
show_help("DescribeProductConfig", g_param[OptionsDefine.Version])
return
param = {
"Zone": argv.get("--Zone"),
}
cred = credential.Credential(g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey])
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile)
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.PostgresClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeProductConfigRequest()
model.from_json_string(json.dumps(param))
rsp = client.DescribeProductConfig(model)
result = rsp.to_json_string()
jsonobj = None
try:
jsonobj = json.loads(result)
except TypeError as e:
jsonobj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", jsonobj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeDBSlowlogs(argv, arglist):
g_param = parse_global_arg(argv)
if "help" in argv:
show_help("DescribeDBSlowlogs", g_param[OptionsDefine.Version])
return
param = {
"DBInstanceId": argv.get("--DBInstanceId"),
"StartTime": argv.get("--StartTime"),
"EndTime": argv.get("--EndTime"),
"DatabaseName": argv.get("--DatabaseName"),
"OrderBy": argv.get("--OrderBy"),
"OrderByType": argv.get("--OrderByType"),
"Limit": Utils.try_to_json(argv, "--Limit"),
"Offset": Utils.try_to_json(argv, "--Offset"),
}
cred = credential.Credential(g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey])
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile)
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.PostgresClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeDBSlowlogsRequest()
model.from_json_string(json.dumps(param))
rsp = client.DescribeDBSlowlogs(model)
result = rsp.to_json_string()
jsonobj = None
try:
jsonobj = json.loads(result)
except TypeError as e:
jsonobj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", jsonobj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
CLIENT_MAP = {
"v20170312": postgres_client_v20170312,
}
MODELS_MAP = {
"v20170312": models_v20170312,
}
ACTION_MAP = {
"DescribeOrders": doDescribeOrders,
"DestroyDBInstance": doDestroyDBInstance,
"DescribeDBBackups": doDescribeDBBackups,
"ResetAccountPassword": doResetAccountPassword,
"DescribeDBErrlogs": doDescribeDBErrlogs,
"RestartDBInstance": doRestartDBInstance,
"InquiryPriceCreateDBInstances": doInquiryPriceCreateDBInstances,
"OpenDBExtranetAccess": doOpenDBExtranetAccess,
"ModifyDBInstancesProject": doModifyDBInstancesProject,
"ModifyAccountRemark": doModifyAccountRemark,
"DescribeDBXlogs": doDescribeDBXlogs,
"SetAutoRenewFlag": doSetAutoRenewFlag,
"DescribeDBInstanceAttribute": doDescribeDBInstanceAttribute,
"ModifyDBInstanceName": doModifyDBInstanceName,
"CreateDBInstances": doCreateDBInstances,
"RenewInstance": doRenewInstance,
"DescribeDBInstances": doDescribeDBInstances,
"DescribeZones": doDescribeZones,
"InitDBInstances": doInitDBInstances,
"InquiryPriceUpgradeDBInstance": doInquiryPriceUpgradeDBInstance,
"DescribeRegions": doDescribeRegions,
"InquiryPriceRenewDBInstance": doInquiryPriceRenewDBInstance,
"CloseDBExtranetAccess": doCloseDBExtranetAccess,
"DescribeAccounts": doDescribeAccounts,
"DescribeDatabases": doDescribeDatabases,
"UpgradeDBInstance": doUpgradeDBInstance,
"DescribeProductConfig": doDescribeProductConfig,
"DescribeDBSlowlogs": doDescribeDBSlowlogs,
}
AVAILABLE_VERSION_LIST = [
v20170312.version,
]
AVAILABLE_VERSIONS = {
'v' + v20170312.version.replace('-', ''): {"help": v20170312_help.INFO,"desc": v20170312_help.DESC},
}
def postgres_action(argv, arglist):
if "help" in argv:
versions = sorted(AVAILABLE_VERSIONS.keys())
opt_v = "--" + OptionsDefine.Version
version = versions[-1]
if opt_v in argv:
version = 'v' + argv[opt_v].replace('-', '')
if version not in versions:
print("available versions: %s" % " ".join(AVAILABLE_VERSION_LIST))
return
action_str = ""
docs = AVAILABLE_VERSIONS[version]["help"]
desc = AVAILABLE_VERSIONS[version]["desc"]
for action, info in docs.items():
action_str += " %s\n" % action
action_str += Utils.split_str(" ", info["desc"], 120)
helpstr = HelpTemplate.SERVICE % {"name": "postgres", "desc": desc, "actions": action_str}
print(helpstr)
else:
print(ErrorMsg.FEW_ARG)
def version_merge():
help_merge = {}
for v in AVAILABLE_VERSIONS:
for action in AVAILABLE_VERSIONS[v]["help"]:
if action not in help_merge:
help_merge[action] = {}
help_merge[action]["cb"] = ACTION_MAP[action]
help_merge[action]["params"] = []
for param in AVAILABLE_VERSIONS[v]["help"][action]["params"]:
if param["name"] not in help_merge[action]["params"]:
help_merge[action]["params"].append(param["name"])
return help_merge
def register_arg(command):
cmd = NiceCommand("postgres", postgres_action)
command.reg_cmd(cmd)
cmd.reg_opt("help", "bool")
cmd.reg_opt(OptionsDefine.Version, "string")
help_merge = version_merge()
for actionName, action in help_merge.items():
c = NiceCommand(actionName, action["cb"])
cmd.reg_cmd(c)
c.reg_opt("help", "bool")
for param in action["params"]:
c.reg_opt("--" + param, "string")
for opt in OptionsDefine.ACTION_GLOBAL_OPT:
stropt = "--" + opt
c.reg_opt(stropt, "string")
def parse_global_arg(argv):
params = {}
for opt in OptionsDefine.ACTION_GLOBAL_OPT:
stropt = "--" + opt
if stropt in argv:
params[opt] = argv[stropt]
else:
params[opt] = None
if params[OptionsDefine.Version]:
params[OptionsDefine.Version] = "v" + params[OptionsDefine.Version].replace('-', '')
config_handle = Configure()
profile = config_handle.profile
if ("--" + OptionsDefine.Profile) in argv:
profile = argv[("--" + OptionsDefine.Profile)]
is_conexist, conf_path = config_handle._profile_existed(profile + "." + config_handle.configure)
is_creexist, cred_path = config_handle._profile_existed(profile + "." + config_handle.credential)
config = {}
cred = {}
if is_conexist:
config = config_handle._load_json_msg(conf_path)
if is_creexist:
cred = config_handle._load_json_msg(cred_path)
if os.environ.get(OptionsDefine.ENV_SECRET_ID):
cred[OptionsDefine.SecretId] = os.environ.get(OptionsDefine.ENV_SECRET_ID)
if os.environ.get(OptionsDefine.ENV_SECRET_KEY):
cred[OptionsDefine.SecretKey] = os.environ.get(OptionsDefine.ENV_SECRET_KEY)
if os.environ.get(OptionsDefine.ENV_REGION):
config[OptionsDefine.Region] = os.environ.get(OptionsDefine.ENV_REGION)
for param in params.keys():
if param == OptionsDefine.Version:
continue
if params[param] is None:
if param in [OptionsDefine.SecretKey, OptionsDefine.SecretId]:
if param in cred:
params[param] = cred[param]
else:
raise Exception("%s is invalid" % param)
else:
if param in config:
params[param] = config[param]
elif param == OptionsDefine.Region:
raise Exception("%s is invalid" % OptionsDefine.Region)
try:
if params[OptionsDefine.Version] is None:
version = config["postgres"][OptionsDefine.Version]
params[OptionsDefine.Version] = "v" + version.replace('-', '')
if params[OptionsDefine.Endpoint] is None:
params[OptionsDefine.Endpoint] = config["postgres"][OptionsDefine.Endpoint]
except Exception as err:
raise Exception("config file:%s error, %s" % (conf_path, str(err)))
versions = sorted(AVAILABLE_VERSIONS.keys())
if params[OptionsDefine.Version] not in versions:
raise Exception("available versions: %s" % " ".join(AVAILABLE_VERSION_LIST))
return params
def show_help(action, version):
docs = AVAILABLE_VERSIONS[version]["help"][action]
desc = AVAILABLE_VERSIONS[version]["desc"]
docstr = ""
for param in docs["params"]:
docstr += " %s\n" % ("--" + param["name"])
docstr += Utils.split_str(" ", param["desc"], 120)
helpmsg = HelpTemplate.ACTION % {"name": action, "service": "postgres", "desc": desc, "params": docstr}
print(helpmsg)
def get_actions_info():
config = Configure()
new_version = max(AVAILABLE_VERSIONS.keys())
version = new_version
try:
profile = config._load_json_msg(os.path.join(config.cli_path, "default.configure"))
version = profile["postgres"]["version"]
version = "v" + version.replace('-', '')
except Exception:
pass
if version not in AVAILABLE_VERSIONS.keys():
version = new_version
return AVAILABLE_VERSIONS[version]["help"]
| 40.184145
| 107
| 0.692765
| 5,336
| 48,663
| 6.118628
| 0.051537
| 0.061748
| 0.17924
| 0.066893
| 0.784863
| 0.765567
| 0.75255
| 0.737358
| 0.728476
| 0.6897
| 0
| 0.006916
| 0.18587
| 48,663
| 1,210
| 108
| 40.217355
| 0.817179
| 0.006185
| 0
| 0.671348
| 0
| 0
| 0.087354
| 0.007366
| 0
| 0
| 0
| 0
| 0
| 1
| 0.031835
| false
| 0.007491
| 0.015918
| 0
| 0.077715
| 0.003745
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
5df5e030c424bc40acdb88f90c2bb38e17d6fd11
| 3,873
|
py
|
Python
|
materialdjango/widgets.py
|
Colorless-Green-Ideas/MaterialDjango
|
e7a69e968965d25198d90318623a828cff67f5dc
|
[
"MIT"
] | 33
|
2015-04-21T15:47:03.000Z
|
2020-03-12T00:56:44.000Z
|
materialdjango/widgets.py
|
Colorless-Green-Ideas/MaterialDjango
|
e7a69e968965d25198d90318623a828cff67f5dc
|
[
"MIT"
] | 34
|
2015-03-26T19:26:00.000Z
|
2021-05-08T00:37:18.000Z
|
materialdjango/widgets.py
|
Colorless-Green-Ideas/MaterialDjango
|
e7a69e968965d25198d90318623a828cff67f5dc
|
[
"MIT"
] | 9
|
2015-08-29T10:44:24.000Z
|
2019-05-03T12:57:11.000Z
|
from django.forms.widgets import TextInput, PasswordInput, EmailInput, CheckboxInput, Textarea
from django.utils.html import format_html
# ref https://github.com/django/django/blob/stable/1.8.x/django/forms/widgets.py
# https://github.com/django/django/blob/stable/1.10.x/django/forms/widgets.py
class PaperTextInput(TextInput):
def render(self, name, value, attrs=None, renderer=None):
# Unlike inputs using paper-input-container directly,
# paper-input does not work out of the box with the native form
# element.
if value is None:
html = u"""<paper-input-container label='{0}' >
<label>{0}</label>
<input is="iron-input" name="{0}" class="paper-input-input">
</paper-input-container>"""
return format_html(html, name)
else:
html = u"""<paper-input-container label='{0}' attr-for-value="value">
<label>{0}</label>
<input is="iron-input" name="{0}" value="{1}">
</paper-input-container>"""
return format_html(html, name, value)
class PaperPasswordInput(PasswordInput):
def render(self, name, value, attrs=None, renderer=None):
if value is None:
html = u"""<paper-input-container label='{0}'>
<label>{0}</label>
<input is="iron-input" name="{0}" type="password"/>
</paper-input-container>"""
return format_html(html, name)
else:
html = u"""<paper-input-container label='{0}' type="password" attr-for-value="value">
<label>{0}</label>
<input is="iron-input" name="{0}" type="password" value="{1}"/>
</paper-input-container>"""
return format_html(html, name, value)
class PaperEmailInput(EmailInput):
def __init__(self, attrs=None):
if attrs is not None:
self.attrs = attrs.copy()
else:
self.attrs = {}
def render(self, name, value, attrs=None, renderer=None):
if value is None:
html = u"""<paper-input-container label='{0}' autoValidate>
<label>{0}</label>
<input is="iron-input" name="{1}" type="email">
</paper-input-container>"""
if 'label' in self.attrs:
return html.format(self.attrs['label'], name)
else:
return format_html(html, name, name)
else:
html = u"""<paper-input-container label='{0}' autoValidate attr-for-value="value">
<label>{0}</label>
<input is="iron-input" name="{0}" value="{1}" type="email">
</paper-input-container>"""
return format_html(html, name, value)
class PaperTextArea(Textarea):
def render(self, name, value, attrs=None, renderer=None):
if value is None:
html = u"""<paper-input-container>
<label>{1}</label>
<iron-autogrow-textarea class="paper-input-input" name="{0}" rows=3>
</iron-autogrow-textarea>
</paper-input-container>"""
if 'label' in self.attrs:
return html.format(self.attrs['label'], name)
else:
return format_html(html, name, name)
else:
html = u"""<paper-input-container>
<label>{0}</label>
<iron-autogrow-textarea class="paper-input-input" name="{0}" value="{1}" rows=3>
</iron-autogrow-textarea>
</paper-input-container>"""
return format_html(html, name, value)
class PaperCheckboxInput(CheckboxInput):
def __init__(self, attrs=None, check_test=None):
super(PaperCheckboxInput, self).__init__(attrs)
def render(self, name, value, attrs=None, renderer=None):
html = u"""<paper-checkbox>{0}</paper-checkbox>"""
return format_html(html, name)
| 42.097826
| 98
| 0.576039
| 461
| 3,873
| 4.789588
| 0.173536
| 0.095109
| 0.146286
| 0.081522
| 0.769475
| 0.71875
| 0.710145
| 0.710145
| 0.629076
| 0.609601
| 0
| 0.012014
| 0.2693
| 3,873
| 92
| 99
| 42.097826
| 0.768198
| 0.071521
| 0
| 0.649351
| 0
| 0.038961
| 0.451963
| 0.180173
| 0
| 0
| 0
| 0
| 0
| 1
| 0.090909
| false
| 0.064935
| 0.025974
| 0
| 0.324675
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
5d393d3d22156f6bf8bed07a76f98e0586df76ed
| 70
|
py
|
Python
|
sql_setup_dir.py
|
ylin00/seizurevista
|
de4f167e217b06372e97fc9ac0553e4384953305
|
[
"MIT"
] | null | null | null |
sql_setup_dir.py
|
ylin00/seizurevista
|
de4f167e217b06372e97fc9ac0553e4384953305
|
[
"MIT"
] | null | null | null |
sql_setup_dir.py
|
ylin00/seizurevista
|
de4f167e217b06372e97fc9ac0553e4384953305
|
[
"MIT"
] | 2
|
2021-01-22T06:58:08.000Z
|
2021-11-27T05:11:16.000Z
|
from seizurecast.postgresql import setup_directory
setup_directory()
| 17.5
| 50
| 0.871429
| 8
| 70
| 7.375
| 0.75
| 0.474576
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.085714
| 70
| 3
| 51
| 23.333333
| 0.921875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
5d6ac023be1f9b869fd5a77a4d8900548f75e243
| 106
|
py
|
Python
|
youtubeVideoDownloader/downloadFunc.py
|
suleymansenyer/youtubeVideoDownloader
|
3490e617cca15b5d7c7d9ad23e19f7edcc458a09
|
[
"MIT"
] | null | null | null |
youtubeVideoDownloader/downloadFunc.py
|
suleymansenyer/youtubeVideoDownloader
|
3490e617cca15b5d7c7d9ad23e19f7edcc458a09
|
[
"MIT"
] | null | null | null |
youtubeVideoDownloader/downloadFunc.py
|
suleymansenyer/youtubeVideoDownloader
|
3490e617cca15b5d7c7d9ad23e19f7edcc458a09
|
[
"MIT"
] | null | null | null |
from pytube import YouTube
def downloadFunc(url):
return YouTube(url).streams.first().download()
| 21.2
| 51
| 0.726415
| 13
| 106
| 5.923077
| 0.846154
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.160377
| 106
| 4
| 52
| 26.5
| 0.865169
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 0
| 0
|
0
| 5
|
53b1ae09ecdf16f2c210af41207d8092452889e7
| 49
|
py
|
Python
|
src/clearskies/autodoc/__init__.py
|
cmancone/clearskies
|
aaa33fef6d03205faf26f123183a46adc1dbef9c
|
[
"MIT"
] | 4
|
2021-04-23T18:13:06.000Z
|
2022-03-26T01:51:01.000Z
|
src/clearskies/autodoc/__init__.py
|
cmancone/clearskies
|
aaa33fef6d03205faf26f123183a46adc1dbef9c
|
[
"MIT"
] | null | null | null |
src/clearskies/autodoc/__init__.py
|
cmancone/clearskies
|
aaa33fef6d03205faf26f123183a46adc1dbef9c
|
[
"MIT"
] | null | null | null |
from . import formats, request, response, schema
| 24.5
| 48
| 0.77551
| 6
| 49
| 6.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 49
| 1
| 49
| 49
| 0.904762
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
53b36ce9db3e40eb1dbd38182e72be95c2cfac66
| 178
|
py
|
Python
|
ccal/write_json.py
|
kberkey/ccal
|
92aa8372997dccec2908928f71a11b6c8327d7aa
|
[
"MIT"
] | 9
|
2017-10-09T16:54:58.000Z
|
2018-12-14T19:49:03.000Z
|
ccal/write_json.py
|
kberkey/ccal
|
92aa8372997dccec2908928f71a11b6c8327d7aa
|
[
"MIT"
] | 8
|
2017-03-11T04:43:04.000Z
|
2018-12-10T09:47:14.000Z
|
ccal/write_json.py
|
kberkey/ccal
|
92aa8372997dccec2908928f71a11b6c8327d7aa
|
[
"MIT"
] | 4
|
2017-03-10T19:12:28.000Z
|
2022-01-02T21:11:40.000Z
|
from json import dump
def write_json(json_dict, json_file_path, indent=2):
with open(json_file_path, "w") as json_file:
dump(json_dict, json_file, indent=indent)
| 19.777778
| 52
| 0.724719
| 30
| 178
| 4
| 0.5
| 0.266667
| 0.2
| 0.266667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006849
| 0.179775
| 178
| 8
| 53
| 22.25
| 0.815068
| 0
| 0
| 0
| 0
| 0
| 0.005618
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.25
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
53db67365adfe7def2e331b8c264f9b94df90c9a
| 187
|
py
|
Python
|
src/core/app/app/db/base_class.py
|
WeAreBeep/FrontlineUkraine
|
9ace8222af347f8ebbcaf444f375b2736f49cd9f
|
[
"MIT"
] | null | null | null |
src/core/app/app/db/base_class.py
|
WeAreBeep/FrontlineUkraine
|
9ace8222af347f8ebbcaf444f375b2736f49cd9f
|
[
"MIT"
] | 15
|
2020-11-07T20:21:30.000Z
|
2021-03-31T09:51:51.000Z
|
src/core/app/app/db/base_class.py
|
WeAreBeep/FrontlineUkraine
|
9ace8222af347f8ebbcaf444f375b2736f49cd9f
|
[
"MIT"
] | 11
|
2020-11-07T18:46:12.000Z
|
2022-03-13T15:50:30.000Z
|
from typing import Any
from sqlalchemy.ext.declarative import as_declarative
@as_declarative()
class Base:
id: Any
@as_declarative()
class FLBase:
id: Any
timestamp: Any
| 12.466667
| 53
| 0.73262
| 25
| 187
| 5.36
| 0.52
| 0.291045
| 0.268657
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.197861
| 187
| 14
| 54
| 13.357143
| 0.893333
| 0
| 0
| 0.444444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.222222
| 0
| 0.777778
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
53f1b3205619093b7b35f793dbeecb5b4c0a68bc
| 188
|
py
|
Python
|
test.py
|
partytrumpet/soundboard
|
3e903312096e29019318de64886bdaea71544311
|
[
"MIT"
] | null | null | null |
test.py
|
partytrumpet/soundboard
|
3e903312096e29019318de64886bdaea71544311
|
[
"MIT"
] | null | null | null |
test.py
|
partytrumpet/soundboard
|
3e903312096e29019318de64886bdaea71544311
|
[
"MIT"
] | null | null | null |
# from pydub import AudioSegment
from pydub import playback
from pydub import *
# from pydub.playback import play
song = AudioSegment.from_mp3("./sounds/mkultra.mp3")
playback.play(song)
| 23.5
| 52
| 0.787234
| 26
| 188
| 5.653846
| 0.384615
| 0.244898
| 0.306122
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.012121
| 0.12234
| 188
| 7
| 53
| 26.857143
| 0.878788
| 0.329787
| 0
| 0
| 0
| 0
| 0.162602
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
54fd52e41d0a836d5d27b3129443537da99aebab
| 1,672
|
py
|
Python
|
tests/test_compound_losses.py
|
by-liu/SegLossBia
|
9cc639c04084cda9d5fb20ea34699db7e0beaf5c
|
[
"MIT"
] | 18
|
2021-04-20T17:03:20.000Z
|
2022-03-12T05:56:24.000Z
|
tests/test_compound_losses.py
|
by-liu/SegLossBia
|
9cc639c04084cda9d5fb20ea34699db7e0beaf5c
|
[
"MIT"
] | null | null | null |
tests/test_compound_losses.py
|
by-liu/SegLossBia
|
9cc639c04084cda9d5fb20ea34699db7e0beaf5c
|
[
"MIT"
] | 1
|
2021-07-08T17:44:15.000Z
|
2021-07-08T17:44:15.000Z
|
import unittest
import os.path as osp
import torch
from seglossbias.modeling.compound_losses import CrossEntropyWithL1, CrossEntropyWithKL
torch.manual_seed(101)
batch_size = 8
num_classes = 10
width, height = 512, 512
rand_max, rand_min = 2.5, -2.5
class TestCompoundLoss(unittest.TestCase):
def test_ce_l1(self):
mode = "binary"
loss_func = CrossEntropyWithL1(mode)
logits = (rand_max - rand_min) * torch.rand((batch_size, 1, height, width)) + rand_min
labels = torch.randint(0, 2, (batch_size, height, width))
loss, loss_ce, loss_reg = loss_func(logits, labels)
mode = "multiclass"
loss_func = CrossEntropyWithL1(mode)
logits = (rand_max - rand_min) * torch.rand((batch_size, num_classes, height, width)) + rand_min
labels = torch.randint(0, num_classes, (batch_size, height, width))
loss, loss_ce, loss_reg = loss_func(logits, labels)
self.assertTrue(True)
def test_ce_kl(self):
mode = "binary"
loss_func = CrossEntropyWithKL(mode)
logits = (rand_max - rand_min) * torch.rand((batch_size, 1, height, width)) + rand_min
labels = torch.randint(0, 2, (batch_size, height, width))
loss, loss_ce, loss_reg = loss_func(logits, labels)
mode = "multiclass"
loss_func = CrossEntropyWithKL(mode)
logits = (rand_max - rand_min) * torch.rand((batch_size, num_classes, height, width)) + rand_min
labels = torch.randint(0, num_classes, (batch_size, height, width))
loss, loss_ce, loss_reg = loss_func(logits, labels)
self.assertTrue(True)
if __name__ == "__main__":
unittest.main()
| 34.122449
| 104
| 0.669258
| 220
| 1,672
| 4.822727
| 0.254545
| 0.076343
| 0.051838
| 0.065975
| 0.738926
| 0.712535
| 0.712535
| 0.712535
| 0.712535
| 0.712535
| 0
| 0.021456
| 0.219498
| 1,672
| 48
| 105
| 34.833333
| 0.791571
| 0
| 0
| 0.611111
| 0
| 0
| 0.023923
| 0
| 0
| 0
| 0
| 0
| 0.055556
| 1
| 0.055556
| false
| 0
| 0.111111
| 0
| 0.194444
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
070a56b9a461bba67f667b5fa1299b4f051995c8
| 34
|
py
|
Python
|
test/login.py
|
zaijianada/test001
|
103e23cbea1debe6278f93f8fd74005d9c7c8db5
|
[
"MIT"
] | null | null | null |
test/login.py
|
zaijianada/test001
|
103e23cbea1debe6278f93f8fd74005d9c7c8db5
|
[
"MIT"
] | null | null | null |
test/login.py
|
zaijianada/test001
|
103e23cbea1debe6278f93f8fd74005d9c7c8db5
|
[
"MIT"
] | null | null | null |
num1 = 10
num2 = 20
num3 = 300
| 4.857143
| 10
| 0.558824
| 6
| 34
| 3.166667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.454545
| 0.352941
| 34
| 6
| 11
| 5.666667
| 0.409091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
070e64e603c62bd168f2ee23b77ee0baa92fe10d
| 78
|
py
|
Python
|
app/report/__init__.py
|
NickPTaylor/imbtools
|
2874c0cdaea311c3559a3634c9f383d81fde8a06
|
[
"MIT"
] | null | null | null |
app/report/__init__.py
|
NickPTaylor/imbtools
|
2874c0cdaea311c3559a3634c9f383d81fde8a06
|
[
"MIT"
] | 8
|
2020-03-24T16:40:22.000Z
|
2022-03-11T23:42:36.000Z
|
app/report/__init__.py
|
NickPTaylor/imbtools
|
2874c0cdaea311c3559a3634c9f383d81fde8a06
|
[
"MIT"
] | null | null | null |
"""
Blueprint for rota reports.
"""
from .routes import BP as ROTA_REPORT_BP
| 13
| 40
| 0.730769
| 12
| 78
| 4.583333
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 78
| 5
| 41
| 15.6
| 0.846154
| 0.346154
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
07415805ce3ca72a6d8e486d68ae31b292486cef
| 57,195
|
py
|
Python
|
sdks/python/apache_beam/runners/dataflow/internal/clients/dataflow/dataflow_v1b3_client.py
|
hengfengli/beam
|
83a8855e5997e0311e6274c03bcb38f94efbf8ef
|
[
"PSF-2.0",
"Apache-2.0",
"BSD-3-Clause"
] | 2
|
2022-01-11T19:43:12.000Z
|
2022-01-15T15:45:20.000Z
|
sdks/python/apache_beam/runners/dataflow/internal/clients/dataflow/dataflow_v1b3_client.py
|
hengfengli/beam
|
83a8855e5997e0311e6274c03bcb38f94efbf8ef
|
[
"PSF-2.0",
"Apache-2.0",
"BSD-3-Clause"
] | 7
|
2022-01-04T21:44:54.000Z
|
2022-03-19T12:42:37.000Z
|
sdks/python/apache_beam/runners/dataflow/internal/clients/dataflow/dataflow_v1b3_client.py
|
hengfengli/beam
|
83a8855e5997e0311e6274c03bcb38f94efbf8ef
|
[
"PSF-2.0",
"Apache-2.0",
"BSD-3-Clause"
] | 17
|
2021-12-15T19:31:54.000Z
|
2022-01-31T18:54:23.000Z
|
"""Generated client library for dataflow version v1b3."""
# NOTE: This file is autogenerated and should not be edited by hand.
from __future__ import absolute_import
from apitools.base.py import base_api
from . import dataflow_v1b3_messages as messages
class DataflowV1b3(base_api.BaseApiClient):
"""Generated client library for service dataflow version v1b3."""
MESSAGES_MODULE = messages
BASE_URL = 'https://dataflow.googleapis.com/'
MTLS_BASE_URL = 'https://dataflow.mtls.googleapis.com/'
_PACKAGE = 'dataflow'
_SCOPES = ['https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/compute', 'https://www.googleapis.com/auth/compute.readonly', 'https://www.googleapis.com/auth/userinfo.email']
_VERSION = 'v1b3'
_CLIENT_ID = '1042881264118.apps.googleusercontent.com'
_CLIENT_SECRET = 'x_Tw5K8nnjoRAqULM9PFAC2b'
_USER_AGENT = 'x_Tw5K8nnjoRAqULM9PFAC2b'
_CLIENT_CLASS_NAME = 'DataflowV1b3'
_URL_VERSION = 'v1b3'
_API_KEY = None
def __init__(self, url='', credentials=None,
get_credentials=True, http=None, model=None,
log_request=False, log_response=False,
credentials_args=None, default_global_params=None,
additional_http_headers=None, response_encoding=None):
"""Create a new dataflow handle."""
url = url or self.BASE_URL
super(DataflowV1b3, self).__init__(
url, credentials=credentials,
get_credentials=get_credentials, http=http, model=model,
log_request=log_request, log_response=log_response,
credentials_args=credentials_args,
default_global_params=default_global_params,
additional_http_headers=additional_http_headers,
response_encoding=response_encoding)
self.projects_jobs_debug = self.ProjectsJobsDebugService(self)
self.projects_jobs_messages = self.ProjectsJobsMessagesService(self)
self.projects_jobs_workItems = self.ProjectsJobsWorkItemsService(self)
self.projects_jobs = self.ProjectsJobsService(self)
self.projects_locations_flexTemplates = self.ProjectsLocationsFlexTemplatesService(self)
self.projects_locations_jobs_debug = self.ProjectsLocationsJobsDebugService(self)
self.projects_locations_jobs_messages = self.ProjectsLocationsJobsMessagesService(self)
self.projects_locations_jobs_snapshots = self.ProjectsLocationsJobsSnapshotsService(self)
self.projects_locations_jobs_stages = self.ProjectsLocationsJobsStagesService(self)
self.projects_locations_jobs_workItems = self.ProjectsLocationsJobsWorkItemsService(self)
self.projects_locations_jobs = self.ProjectsLocationsJobsService(self)
self.projects_locations_snapshots = self.ProjectsLocationsSnapshotsService(self)
self.projects_locations_sql = self.ProjectsLocationsSqlService(self)
self.projects_locations_templates = self.ProjectsLocationsTemplatesService(self)
self.projects_locations = self.ProjectsLocationsService(self)
self.projects_snapshots = self.ProjectsSnapshotsService(self)
self.projects_templates = self.ProjectsTemplatesService(self)
self.projects = self.ProjectsService(self)
class ProjectsJobsDebugService(base_api.BaseApiService):
"""Service class for the projects_jobs_debug resource."""
_NAME = 'projects_jobs_debug'
def __init__(self, client):
super(DataflowV1b3.ProjectsJobsDebugService, self).__init__(client)
self._upload_configs = {
}
def GetConfig(self, request, global_params=None):
r"""Get encoded debug configuration for component. Not cacheable.
Args:
request: (DataflowProjectsJobsDebugGetConfigRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GetDebugConfigResponse) The response message.
"""
config = self.GetMethodConfig('GetConfig')
return self._RunMethod(
config, request, global_params=global_params)
GetConfig.method_config = lambda: base_api.ApiMethodInfo(
http_method='POST',
method_id='dataflow.projects.jobs.debug.getConfig',
ordered_params=['projectId', 'jobId'],
path_params=['jobId', 'projectId'],
query_params=[],
relative_path='v1b3/projects/{projectId}/jobs/{jobId}/debug/getConfig',
request_field='getDebugConfigRequest',
request_type_name='DataflowProjectsJobsDebugGetConfigRequest',
response_type_name='GetDebugConfigResponse',
supports_download=False,
)
def SendCapture(self, request, global_params=None):
r"""Send encoded debug capture data for component.
Args:
request: (DataflowProjectsJobsDebugSendCaptureRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(SendDebugCaptureResponse) The response message.
"""
config = self.GetMethodConfig('SendCapture')
return self._RunMethod(
config, request, global_params=global_params)
SendCapture.method_config = lambda: base_api.ApiMethodInfo(
http_method='POST',
method_id='dataflow.projects.jobs.debug.sendCapture',
ordered_params=['projectId', 'jobId'],
path_params=['jobId', 'projectId'],
query_params=[],
relative_path='v1b3/projects/{projectId}/jobs/{jobId}/debug/sendCapture',
request_field='sendDebugCaptureRequest',
request_type_name='DataflowProjectsJobsDebugSendCaptureRequest',
response_type_name='SendDebugCaptureResponse',
supports_download=False,
)
class ProjectsJobsMessagesService(base_api.BaseApiService):
"""Service class for the projects_jobs_messages resource."""
_NAME = 'projects_jobs_messages'
def __init__(self, client):
super(DataflowV1b3.ProjectsJobsMessagesService, self).__init__(client)
self._upload_configs = {
}
def List(self, request, global_params=None):
r"""Request the job status. To request the status of a job, we recommend using `projects.locations.jobs.messages.list` with a [regional endpoint] (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). Using `projects.jobs.messages.list` is not recommended, as you can only request the status of jobs that are running in `us-central1`.
Args:
request: (DataflowProjectsJobsMessagesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ListJobMessagesResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method='GET',
method_id='dataflow.projects.jobs.messages.list',
ordered_params=['projectId', 'jobId'],
path_params=['jobId', 'projectId'],
query_params=['endTime', 'location', 'minimumImportance', 'pageSize', 'pageToken', 'startTime'],
relative_path='v1b3/projects/{projectId}/jobs/{jobId}/messages',
request_field='',
request_type_name='DataflowProjectsJobsMessagesListRequest',
response_type_name='ListJobMessagesResponse',
supports_download=False,
)
class ProjectsJobsWorkItemsService(base_api.BaseApiService):
"""Service class for the projects_jobs_workItems resource."""
_NAME = 'projects_jobs_workItems'
def __init__(self, client):
super(DataflowV1b3.ProjectsJobsWorkItemsService, self).__init__(client)
self._upload_configs = {
}
def Lease(self, request, global_params=None):
r"""Leases a dataflow WorkItem to run.
Args:
request: (DataflowProjectsJobsWorkItemsLeaseRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(LeaseWorkItemResponse) The response message.
"""
config = self.GetMethodConfig('Lease')
return self._RunMethod(
config, request, global_params=global_params)
Lease.method_config = lambda: base_api.ApiMethodInfo(
http_method='POST',
method_id='dataflow.projects.jobs.workItems.lease',
ordered_params=['projectId', 'jobId'],
path_params=['jobId', 'projectId'],
query_params=[],
relative_path='v1b3/projects/{projectId}/jobs/{jobId}/workItems:lease',
request_field='leaseWorkItemRequest',
request_type_name='DataflowProjectsJobsWorkItemsLeaseRequest',
response_type_name='LeaseWorkItemResponse',
supports_download=False,
)
def ReportStatus(self, request, global_params=None):
r"""Reports the status of dataflow WorkItems leased by a worker.
Args:
request: (DataflowProjectsJobsWorkItemsReportStatusRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ReportWorkItemStatusResponse) The response message.
"""
config = self.GetMethodConfig('ReportStatus')
return self._RunMethod(
config, request, global_params=global_params)
ReportStatus.method_config = lambda: base_api.ApiMethodInfo(
http_method='POST',
method_id='dataflow.projects.jobs.workItems.reportStatus',
ordered_params=['projectId', 'jobId'],
path_params=['jobId', 'projectId'],
query_params=[],
relative_path='v1b3/projects/{projectId}/jobs/{jobId}/workItems:reportStatus',
request_field='reportWorkItemStatusRequest',
request_type_name='DataflowProjectsJobsWorkItemsReportStatusRequest',
response_type_name='ReportWorkItemStatusResponse',
supports_download=False,
)
class ProjectsJobsService(base_api.BaseApiService):
"""Service class for the projects_jobs resource."""
_NAME = 'projects_jobs'
def __init__(self, client):
super(DataflowV1b3.ProjectsJobsService, self).__init__(client)
self._upload_configs = {
}
def Aggregated(self, request, global_params=None):
r"""List the jobs of a project across all regions.
Args:
request: (DataflowProjectsJobsAggregatedRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ListJobsResponse) The response message.
"""
config = self.GetMethodConfig('Aggregated')
return self._RunMethod(
config, request, global_params=global_params)
Aggregated.method_config = lambda: base_api.ApiMethodInfo(
http_method='GET',
method_id='dataflow.projects.jobs.aggregated',
ordered_params=['projectId'],
path_params=['projectId'],
query_params=['filter', 'location', 'pageSize', 'pageToken', 'view'],
relative_path='v1b3/projects/{projectId}/jobs:aggregated',
request_field='',
request_type_name='DataflowProjectsJobsAggregatedRequest',
response_type_name='ListJobsResponse',
supports_download=False,
)
def Create(self, request, global_params=None):
r"""Creates a Cloud Dataflow job. To create a job, we recommend using `projects.locations.jobs.create` with a [regional endpoint] (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). Using `projects.jobs.create` is not recommended, as your job will always start in `us-central1`.
Args:
request: (DataflowProjectsJobsCreateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Job) The response message.
"""
config = self.GetMethodConfig('Create')
return self._RunMethod(
config, request, global_params=global_params)
Create.method_config = lambda: base_api.ApiMethodInfo(
http_method='POST',
method_id='dataflow.projects.jobs.create',
ordered_params=['projectId'],
path_params=['projectId'],
query_params=['location', 'replaceJobId', 'view'],
relative_path='v1b3/projects/{projectId}/jobs',
request_field='job',
request_type_name='DataflowProjectsJobsCreateRequest',
response_type_name='Job',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Gets the state of the specified Cloud Dataflow job. To get the state of a job, we recommend using `projects.locations.jobs.get` with a [regional endpoint] (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). Using `projects.jobs.get` is not recommended, as you can only get the state of jobs that are running in `us-central1`.
Args:
request: (DataflowProjectsJobsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Job) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method='GET',
method_id='dataflow.projects.jobs.get',
ordered_params=['projectId', 'jobId'],
path_params=['jobId', 'projectId'],
query_params=['location', 'view'],
relative_path='v1b3/projects/{projectId}/jobs/{jobId}',
request_field='',
request_type_name='DataflowProjectsJobsGetRequest',
response_type_name='Job',
supports_download=False,
)
def GetMetrics(self, request, global_params=None):
r"""Request the job status. To request the status of a job, we recommend using `projects.locations.jobs.getMetrics` with a [regional endpoint] (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). Using `projects.jobs.getMetrics` is not recommended, as you can only request the status of jobs that are running in `us-central1`.
Args:
request: (DataflowProjectsJobsGetMetricsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(JobMetrics) The response message.
"""
config = self.GetMethodConfig('GetMetrics')
return self._RunMethod(
config, request, global_params=global_params)
GetMetrics.method_config = lambda: base_api.ApiMethodInfo(
http_method='GET',
method_id='dataflow.projects.jobs.getMetrics',
ordered_params=['projectId', 'jobId'],
path_params=['jobId', 'projectId'],
query_params=['location', 'startTime'],
relative_path='v1b3/projects/{projectId}/jobs/{jobId}/metrics',
request_field='',
request_type_name='DataflowProjectsJobsGetMetricsRequest',
response_type_name='JobMetrics',
supports_download=False,
)
def List(self, request, global_params=None):
r"""List the jobs of a project. To list the jobs of a project in a region, we recommend using `projects.locations.jobs.list` with a [regional endpoint] (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). To list the all jobs across all regions, use `projects.jobs.aggregated`. Using `projects.jobs.list` is not recommended, as you can only get the list of jobs that are running in `us-central1`.
Args:
request: (DataflowProjectsJobsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ListJobsResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method='GET',
method_id='dataflow.projects.jobs.list',
ordered_params=['projectId'],
path_params=['projectId'],
query_params=['filter', 'location', 'pageSize', 'pageToken', 'view'],
relative_path='v1b3/projects/{projectId}/jobs',
request_field='',
request_type_name='DataflowProjectsJobsListRequest',
response_type_name='ListJobsResponse',
supports_download=False,
)
def Snapshot(self, request, global_params=None):
r"""Snapshot the state of a streaming job.
Args:
request: (DataflowProjectsJobsSnapshotRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Snapshot) The response message.
"""
config = self.GetMethodConfig('Snapshot')
return self._RunMethod(
config, request, global_params=global_params)
Snapshot.method_config = lambda: base_api.ApiMethodInfo(
http_method='POST',
method_id='dataflow.projects.jobs.snapshot',
ordered_params=['projectId', 'jobId'],
path_params=['jobId', 'projectId'],
query_params=[],
relative_path='v1b3/projects/{projectId}/jobs/{jobId}:snapshot',
request_field='snapshotJobRequest',
request_type_name='DataflowProjectsJobsSnapshotRequest',
response_type_name='Snapshot',
supports_download=False,
)
def Update(self, request, global_params=None):
r"""Updates the state of an existing Cloud Dataflow job. To update the state of an existing job, we recommend using `projects.locations.jobs.update` with a [regional endpoint] (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). Using `projects.jobs.update` is not recommended, as you can only update the state of jobs that are running in `us-central1`.
Args:
request: (DataflowProjectsJobsUpdateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Job) The response message.
"""
config = self.GetMethodConfig('Update')
return self._RunMethod(
config, request, global_params=global_params)
Update.method_config = lambda: base_api.ApiMethodInfo(
http_method='PUT',
method_id='dataflow.projects.jobs.update',
ordered_params=['projectId', 'jobId'],
path_params=['jobId', 'projectId'],
query_params=['location'],
relative_path='v1b3/projects/{projectId}/jobs/{jobId}',
request_field='job',
request_type_name='DataflowProjectsJobsUpdateRequest',
response_type_name='Job',
supports_download=False,
)
class ProjectsLocationsFlexTemplatesService(base_api.BaseApiService):
"""Service class for the projects_locations_flexTemplates resource."""
_NAME = 'projects_locations_flexTemplates'
def __init__(self, client):
super(DataflowV1b3.ProjectsLocationsFlexTemplatesService, self).__init__(client)
self._upload_configs = {
}
def Launch(self, request, global_params=None):
r"""Launch a job with a FlexTemplate.
Args:
request: (DataflowProjectsLocationsFlexTemplatesLaunchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(LaunchFlexTemplateResponse) The response message.
"""
config = self.GetMethodConfig('Launch')
return self._RunMethod(
config, request, global_params=global_params)
Launch.method_config = lambda: base_api.ApiMethodInfo(
http_method='POST',
method_id='dataflow.projects.locations.flexTemplates.launch',
ordered_params=['projectId', 'location'],
path_params=['location', 'projectId'],
query_params=[],
relative_path='v1b3/projects/{projectId}/locations/{location}/flexTemplates:launch',
request_field='launchFlexTemplateRequest',
request_type_name='DataflowProjectsLocationsFlexTemplatesLaunchRequest',
response_type_name='LaunchFlexTemplateResponse',
supports_download=False,
)
class ProjectsLocationsJobsDebugService(base_api.BaseApiService):
"""Service class for the projects_locations_jobs_debug resource."""
_NAME = 'projects_locations_jobs_debug'
def __init__(self, client):
super(DataflowV1b3.ProjectsLocationsJobsDebugService, self).__init__(client)
self._upload_configs = {
}
def GetConfig(self, request, global_params=None):
r"""Get encoded debug configuration for component. Not cacheable.
Args:
request: (DataflowProjectsLocationsJobsDebugGetConfigRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GetDebugConfigResponse) The response message.
"""
config = self.GetMethodConfig('GetConfig')
return self._RunMethod(
config, request, global_params=global_params)
GetConfig.method_config = lambda: base_api.ApiMethodInfo(
http_method='POST',
method_id='dataflow.projects.locations.jobs.debug.getConfig',
ordered_params=['projectId', 'location', 'jobId'],
path_params=['jobId', 'location', 'projectId'],
query_params=[],
relative_path='v1b3/projects/{projectId}/locations/{location}/jobs/{jobId}/debug/getConfig',
request_field='getDebugConfigRequest',
request_type_name='DataflowProjectsLocationsJobsDebugGetConfigRequest',
response_type_name='GetDebugConfigResponse',
supports_download=False,
)
def SendCapture(self, request, global_params=None):
r"""Send encoded debug capture data for component.
Args:
request: (DataflowProjectsLocationsJobsDebugSendCaptureRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(SendDebugCaptureResponse) The response message.
"""
config = self.GetMethodConfig('SendCapture')
return self._RunMethod(
config, request, global_params=global_params)
SendCapture.method_config = lambda: base_api.ApiMethodInfo(
http_method='POST',
method_id='dataflow.projects.locations.jobs.debug.sendCapture',
ordered_params=['projectId', 'location', 'jobId'],
path_params=['jobId', 'location', 'projectId'],
query_params=[],
relative_path='v1b3/projects/{projectId}/locations/{location}/jobs/{jobId}/debug/sendCapture',
request_field='sendDebugCaptureRequest',
request_type_name='DataflowProjectsLocationsJobsDebugSendCaptureRequest',
response_type_name='SendDebugCaptureResponse',
supports_download=False,
)
class ProjectsLocationsJobsMessagesService(base_api.BaseApiService):
"""Service class for the projects_locations_jobs_messages resource."""
_NAME = 'projects_locations_jobs_messages'
def __init__(self, client):
super(DataflowV1b3.ProjectsLocationsJobsMessagesService, self).__init__(client)
self._upload_configs = {
}
def List(self, request, global_params=None):
r"""Request the job status. To request the status of a job, we recommend using `projects.locations.jobs.messages.list` with a [regional endpoint] (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). Using `projects.jobs.messages.list` is not recommended, as you can only request the status of jobs that are running in `us-central1`.
Args:
request: (DataflowProjectsLocationsJobsMessagesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ListJobMessagesResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method='GET',
method_id='dataflow.projects.locations.jobs.messages.list',
ordered_params=['projectId', 'location', 'jobId'],
path_params=['jobId', 'location', 'projectId'],
query_params=['endTime', 'minimumImportance', 'pageSize', 'pageToken', 'startTime'],
relative_path='v1b3/projects/{projectId}/locations/{location}/jobs/{jobId}/messages',
request_field='',
request_type_name='DataflowProjectsLocationsJobsMessagesListRequest',
response_type_name='ListJobMessagesResponse',
supports_download=False,
)
class ProjectsLocationsJobsSnapshotsService(base_api.BaseApiService):
"""Service class for the projects_locations_jobs_snapshots resource."""
_NAME = 'projects_locations_jobs_snapshots'
def __init__(self, client):
super(DataflowV1b3.ProjectsLocationsJobsSnapshotsService, self).__init__(client)
self._upload_configs = {
}
def List(self, request, global_params=None):
r"""Lists snapshots.
Args:
request: (DataflowProjectsLocationsJobsSnapshotsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ListSnapshotsResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method='GET',
method_id='dataflow.projects.locations.jobs.snapshots.list',
ordered_params=['projectId', 'location', 'jobId'],
path_params=['jobId', 'location', 'projectId'],
query_params=[],
relative_path='v1b3/projects/{projectId}/locations/{location}/jobs/{jobId}/snapshots',
request_field='',
request_type_name='DataflowProjectsLocationsJobsSnapshotsListRequest',
response_type_name='ListSnapshotsResponse',
supports_download=False,
)
class ProjectsLocationsJobsStagesService(base_api.BaseApiService):
"""Service class for the projects_locations_jobs_stages resource."""
_NAME = 'projects_locations_jobs_stages'
def __init__(self, client):
super(DataflowV1b3.ProjectsLocationsJobsStagesService, self).__init__(client)
self._upload_configs = {
}
def GetExecutionDetails(self, request, global_params=None):
r"""Request detailed information about the execution status of a stage of the job. EXPERIMENTAL. This API is subject to change or removal without notice.
Args:
request: (DataflowProjectsLocationsJobsStagesGetExecutionDetailsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(StageExecutionDetails) The response message.
"""
config = self.GetMethodConfig('GetExecutionDetails')
return self._RunMethod(
config, request, global_params=global_params)
GetExecutionDetails.method_config = lambda: base_api.ApiMethodInfo(
http_method='GET',
method_id='dataflow.projects.locations.jobs.stages.getExecutionDetails',
ordered_params=['projectId', 'location', 'jobId', 'stageId'],
path_params=['jobId', 'location', 'projectId', 'stageId'],
query_params=['endTime', 'pageSize', 'pageToken', 'startTime'],
relative_path='v1b3/projects/{projectId}/locations/{location}/jobs/{jobId}/stages/{stageId}/executionDetails',
request_field='',
request_type_name='DataflowProjectsLocationsJobsStagesGetExecutionDetailsRequest',
response_type_name='StageExecutionDetails',
supports_download=False,
)
class ProjectsLocationsJobsWorkItemsService(base_api.BaseApiService):
"""Service class for the projects_locations_jobs_workItems resource."""
_NAME = 'projects_locations_jobs_workItems'
def __init__(self, client):
super(DataflowV1b3.ProjectsLocationsJobsWorkItemsService, self).__init__(client)
self._upload_configs = {
}
def Lease(self, request, global_params=None):
r"""Leases a dataflow WorkItem to run.
Args:
request: (DataflowProjectsLocationsJobsWorkItemsLeaseRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(LeaseWorkItemResponse) The response message.
"""
config = self.GetMethodConfig('Lease')
return self._RunMethod(
config, request, global_params=global_params)
Lease.method_config = lambda: base_api.ApiMethodInfo(
http_method='POST',
method_id='dataflow.projects.locations.jobs.workItems.lease',
ordered_params=['projectId', 'location', 'jobId'],
path_params=['jobId', 'location', 'projectId'],
query_params=[],
relative_path='v1b3/projects/{projectId}/locations/{location}/jobs/{jobId}/workItems:lease',
request_field='leaseWorkItemRequest',
request_type_name='DataflowProjectsLocationsJobsWorkItemsLeaseRequest',
response_type_name='LeaseWorkItemResponse',
supports_download=False,
)
def ReportStatus(self, request, global_params=None):
r"""Reports the status of dataflow WorkItems leased by a worker.
Args:
request: (DataflowProjectsLocationsJobsWorkItemsReportStatusRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ReportWorkItemStatusResponse) The response message.
"""
config = self.GetMethodConfig('ReportStatus')
return self._RunMethod(
config, request, global_params=global_params)
ReportStatus.method_config = lambda: base_api.ApiMethodInfo(
http_method='POST',
method_id='dataflow.projects.locations.jobs.workItems.reportStatus',
ordered_params=['projectId', 'location', 'jobId'],
path_params=['jobId', 'location', 'projectId'],
query_params=[],
relative_path='v1b3/projects/{projectId}/locations/{location}/jobs/{jobId}/workItems:reportStatus',
request_field='reportWorkItemStatusRequest',
request_type_name='DataflowProjectsLocationsJobsWorkItemsReportStatusRequest',
response_type_name='ReportWorkItemStatusResponse',
supports_download=False,
)
class ProjectsLocationsJobsService(base_api.BaseApiService):
"""Service class for the projects_locations_jobs resource."""
_NAME = 'projects_locations_jobs'
def __init__(self, client):
super(DataflowV1b3.ProjectsLocationsJobsService, self).__init__(client)
self._upload_configs = {
}
def Create(self, request, global_params=None):
r"""Creates a Cloud Dataflow job. To create a job, we recommend using `projects.locations.jobs.create` with a [regional endpoint] (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). Using `projects.jobs.create` is not recommended, as your job will always start in `us-central1`.
Args:
request: (DataflowProjectsLocationsJobsCreateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Job) The response message.
"""
config = self.GetMethodConfig('Create')
return self._RunMethod(
config, request, global_params=global_params)
Create.method_config = lambda: base_api.ApiMethodInfo(
http_method='POST',
method_id='dataflow.projects.locations.jobs.create',
ordered_params=['projectId', 'location'],
path_params=['location', 'projectId'],
query_params=['replaceJobId', 'view'],
relative_path='v1b3/projects/{projectId}/locations/{location}/jobs',
request_field='job',
request_type_name='DataflowProjectsLocationsJobsCreateRequest',
response_type_name='Job',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Gets the state of the specified Cloud Dataflow job. To get the state of a job, we recommend using `projects.locations.jobs.get` with a [regional endpoint] (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). Using `projects.jobs.get` is not recommended, as you can only get the state of jobs that are running in `us-central1`.
Args:
request: (DataflowProjectsLocationsJobsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Job) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method='GET',
method_id='dataflow.projects.locations.jobs.get',
ordered_params=['projectId', 'location', 'jobId'],
path_params=['jobId', 'location', 'projectId'],
query_params=['view'],
relative_path='v1b3/projects/{projectId}/locations/{location}/jobs/{jobId}',
request_field='',
request_type_name='DataflowProjectsLocationsJobsGetRequest',
response_type_name='Job',
supports_download=False,
)
def GetExecutionDetails(self, request, global_params=None):
r"""Request detailed information about the execution status of the job. EXPERIMENTAL. This API is subject to change or removal without notice.
Args:
request: (DataflowProjectsLocationsJobsGetExecutionDetailsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(JobExecutionDetails) The response message.
"""
config = self.GetMethodConfig('GetExecutionDetails')
return self._RunMethod(
config, request, global_params=global_params)
GetExecutionDetails.method_config = lambda: base_api.ApiMethodInfo(
http_method='GET',
method_id='dataflow.projects.locations.jobs.getExecutionDetails',
ordered_params=['projectId', 'location', 'jobId'],
path_params=['jobId', 'location', 'projectId'],
query_params=['pageSize', 'pageToken'],
relative_path='v1b3/projects/{projectId}/locations/{location}/jobs/{jobId}/executionDetails',
request_field='',
request_type_name='DataflowProjectsLocationsJobsGetExecutionDetailsRequest',
response_type_name='JobExecutionDetails',
supports_download=False,
)
def GetMetrics(self, request, global_params=None):
r"""Request the job status. To request the status of a job, we recommend using `projects.locations.jobs.getMetrics` with a [regional endpoint] (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). Using `projects.jobs.getMetrics` is not recommended, as you can only request the status of jobs that are running in `us-central1`.
Args:
request: (DataflowProjectsLocationsJobsGetMetricsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(JobMetrics) The response message.
"""
config = self.GetMethodConfig('GetMetrics')
return self._RunMethod(
config, request, global_params=global_params)
GetMetrics.method_config = lambda: base_api.ApiMethodInfo(
http_method='GET',
method_id='dataflow.projects.locations.jobs.getMetrics',
ordered_params=['projectId', 'location', 'jobId'],
path_params=['jobId', 'location', 'projectId'],
query_params=['startTime'],
relative_path='v1b3/projects/{projectId}/locations/{location}/jobs/{jobId}/metrics',
request_field='',
request_type_name='DataflowProjectsLocationsJobsGetMetricsRequest',
response_type_name='JobMetrics',
supports_download=False,
)
def List(self, request, global_params=None):
r"""List the jobs of a project. To list the jobs of a project in a region, we recommend using `projects.locations.jobs.list` with a [regional endpoint] (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). To list the all jobs across all regions, use `projects.jobs.aggregated`. Using `projects.jobs.list` is not recommended, as you can only get the list of jobs that are running in `us-central1`.
Args:
request: (DataflowProjectsLocationsJobsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ListJobsResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method='GET',
method_id='dataflow.projects.locations.jobs.list',
ordered_params=['projectId', 'location'],
path_params=['location', 'projectId'],
query_params=['filter', 'pageSize', 'pageToken', 'view'],
relative_path='v1b3/projects/{projectId}/locations/{location}/jobs',
request_field='',
request_type_name='DataflowProjectsLocationsJobsListRequest',
response_type_name='ListJobsResponse',
supports_download=False,
)
def Snapshot(self, request, global_params=None):
r"""Snapshot the state of a streaming job.
Args:
request: (DataflowProjectsLocationsJobsSnapshotRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Snapshot) The response message.
"""
config = self.GetMethodConfig('Snapshot')
return self._RunMethod(
config, request, global_params=global_params)
Snapshot.method_config = lambda: base_api.ApiMethodInfo(
http_method='POST',
method_id='dataflow.projects.locations.jobs.snapshot',
ordered_params=['projectId', 'location', 'jobId'],
path_params=['jobId', 'location', 'projectId'],
query_params=[],
relative_path='v1b3/projects/{projectId}/locations/{location}/jobs/{jobId}:snapshot',
request_field='snapshotJobRequest',
request_type_name='DataflowProjectsLocationsJobsSnapshotRequest',
response_type_name='Snapshot',
supports_download=False,
)
def Update(self, request, global_params=None):
r"""Updates the state of an existing Cloud Dataflow job. To update the state of an existing job, we recommend using `projects.locations.jobs.update` with a [regional endpoint] (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). Using `projects.jobs.update` is not recommended, as you can only update the state of jobs that are running in `us-central1`.
Args:
request: (DataflowProjectsLocationsJobsUpdateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Job) The response message.
"""
config = self.GetMethodConfig('Update')
return self._RunMethod(
config, request, global_params=global_params)
Update.method_config = lambda: base_api.ApiMethodInfo(
http_method='PUT',
method_id='dataflow.projects.locations.jobs.update',
ordered_params=['projectId', 'location', 'jobId'],
path_params=['jobId', 'location', 'projectId'],
query_params=[],
relative_path='v1b3/projects/{projectId}/locations/{location}/jobs/{jobId}',
request_field='job',
request_type_name='DataflowProjectsLocationsJobsUpdateRequest',
response_type_name='Job',
supports_download=False,
)
class ProjectsLocationsSnapshotsService(base_api.BaseApiService):
"""Service class for the projects_locations_snapshots resource."""
_NAME = 'projects_locations_snapshots'
def __init__(self, client):
super(DataflowV1b3.ProjectsLocationsSnapshotsService, self).__init__(client)
self._upload_configs = {
}
def Delete(self, request, global_params=None):
r"""Deletes a snapshot.
Args:
request: (DataflowProjectsLocationsSnapshotsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(DeleteSnapshotResponse) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method='DELETE',
method_id='dataflow.projects.locations.snapshots.delete',
ordered_params=['projectId', 'location', 'snapshotId'],
path_params=['location', 'projectId', 'snapshotId'],
query_params=[],
relative_path='v1b3/projects/{projectId}/locations/{location}/snapshots/{snapshotId}',
request_field='',
request_type_name='DataflowProjectsLocationsSnapshotsDeleteRequest',
response_type_name='DeleteSnapshotResponse',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Gets information about a snapshot.
Args:
request: (DataflowProjectsLocationsSnapshotsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Snapshot) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method='GET',
method_id='dataflow.projects.locations.snapshots.get',
ordered_params=['projectId', 'location', 'snapshotId'],
path_params=['location', 'projectId', 'snapshotId'],
query_params=[],
relative_path='v1b3/projects/{projectId}/locations/{location}/snapshots/{snapshotId}',
request_field='',
request_type_name='DataflowProjectsLocationsSnapshotsGetRequest',
response_type_name='Snapshot',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists snapshots.
Args:
request: (DataflowProjectsLocationsSnapshotsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ListSnapshotsResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method='GET',
method_id='dataflow.projects.locations.snapshots.list',
ordered_params=['projectId', 'location'],
path_params=['location', 'projectId'],
query_params=['jobId'],
relative_path='v1b3/projects/{projectId}/locations/{location}/snapshots',
request_field='',
request_type_name='DataflowProjectsLocationsSnapshotsListRequest',
response_type_name='ListSnapshotsResponse',
supports_download=False,
)
class ProjectsLocationsSqlService(base_api.BaseApiService):
"""Service class for the projects_locations_sql resource."""
_NAME = 'projects_locations_sql'
def __init__(self, client):
super(DataflowV1b3.ProjectsLocationsSqlService, self).__init__(client)
self._upload_configs = {
}
def Validate(self, request, global_params=None):
r"""Validates a GoogleSQL query for Cloud Dataflow syntax. Will always confirm the given query parses correctly, and if able to look up schema information from DataCatalog, will validate that the query analyzes properly as well.
Args:
request: (DataflowProjectsLocationsSqlValidateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ValidateResponse) The response message.
"""
config = self.GetMethodConfig('Validate')
return self._RunMethod(
config, request, global_params=global_params)
Validate.method_config = lambda: base_api.ApiMethodInfo(
http_method='GET',
method_id='dataflow.projects.locations.sql.validate',
ordered_params=['projectId', 'location'],
path_params=['location', 'projectId'],
query_params=['query'],
relative_path='v1b3/projects/{projectId}/locations/{location}/sql:validate',
request_field='',
request_type_name='DataflowProjectsLocationsSqlValidateRequest',
response_type_name='ValidateResponse',
supports_download=False,
)
class ProjectsLocationsTemplatesService(base_api.BaseApiService):
"""Service class for the projects_locations_templates resource."""
_NAME = 'projects_locations_templates'
def __init__(self, client):
super(DataflowV1b3.ProjectsLocationsTemplatesService, self).__init__(client)
self._upload_configs = {
}
def Create(self, request, global_params=None):
r"""Creates a Cloud Dataflow job from a template.
Args:
request: (DataflowProjectsLocationsTemplatesCreateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Job) The response message.
"""
config = self.GetMethodConfig('Create')
return self._RunMethod(
config, request, global_params=global_params)
Create.method_config = lambda: base_api.ApiMethodInfo(
http_method='POST',
method_id='dataflow.projects.locations.templates.create',
ordered_params=['projectId', 'location'],
path_params=['location', 'projectId'],
query_params=[],
relative_path='v1b3/projects/{projectId}/locations/{location}/templates',
request_field='createJobFromTemplateRequest',
request_type_name='DataflowProjectsLocationsTemplatesCreateRequest',
response_type_name='Job',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Get the template associated with a template.
Args:
request: (DataflowProjectsLocationsTemplatesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GetTemplateResponse) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method='GET',
method_id='dataflow.projects.locations.templates.get',
ordered_params=['projectId', 'location'],
path_params=['location', 'projectId'],
query_params=['gcsPath', 'view'],
relative_path='v1b3/projects/{projectId}/locations/{location}/templates:get',
request_field='',
request_type_name='DataflowProjectsLocationsTemplatesGetRequest',
response_type_name='GetTemplateResponse',
supports_download=False,
)
def Launch(self, request, global_params=None):
r"""Launch a template.
Args:
request: (DataflowProjectsLocationsTemplatesLaunchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(LaunchTemplateResponse) The response message.
"""
config = self.GetMethodConfig('Launch')
return self._RunMethod(
config, request, global_params=global_params)
Launch.method_config = lambda: base_api.ApiMethodInfo(
http_method='POST',
method_id='dataflow.projects.locations.templates.launch',
ordered_params=['projectId', 'location'],
path_params=['location', 'projectId'],
query_params=['dynamicTemplate_gcsPath', 'dynamicTemplate_stagingLocation', 'gcsPath', 'validateOnly'],
relative_path='v1b3/projects/{projectId}/locations/{location}/templates:launch',
request_field='launchTemplateParameters',
request_type_name='DataflowProjectsLocationsTemplatesLaunchRequest',
response_type_name='LaunchTemplateResponse',
supports_download=False,
)
class ProjectsLocationsService(base_api.BaseApiService):
"""Service class for the projects_locations resource."""
_NAME = 'projects_locations'
def __init__(self, client):
super(DataflowV1b3.ProjectsLocationsService, self).__init__(client)
self._upload_configs = {
}
def WorkerMessages(self, request, global_params=None):
r"""Send a worker_message to the service.
Args:
request: (DataflowProjectsLocationsWorkerMessagesRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(SendWorkerMessagesResponse) The response message.
"""
config = self.GetMethodConfig('WorkerMessages')
return self._RunMethod(
config, request, global_params=global_params)
WorkerMessages.method_config = lambda: base_api.ApiMethodInfo(
http_method='POST',
method_id='dataflow.projects.locations.workerMessages',
ordered_params=['projectId', 'location'],
path_params=['location', 'projectId'],
query_params=[],
relative_path='v1b3/projects/{projectId}/locations/{location}/WorkerMessages',
request_field='sendWorkerMessagesRequest',
request_type_name='DataflowProjectsLocationsWorkerMessagesRequest',
response_type_name='SendWorkerMessagesResponse',
supports_download=False,
)
class ProjectsSnapshotsService(base_api.BaseApiService):
"""Service class for the projects_snapshots resource."""
_NAME = 'projects_snapshots'
def __init__(self, client):
super(DataflowV1b3.ProjectsSnapshotsService, self).__init__(client)
self._upload_configs = {
}
def Get(self, request, global_params=None):
r"""Gets information about a snapshot.
Args:
request: (DataflowProjectsSnapshotsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Snapshot) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method='GET',
method_id='dataflow.projects.snapshots.get',
ordered_params=['projectId', 'snapshotId'],
path_params=['projectId', 'snapshotId'],
query_params=['location'],
relative_path='v1b3/projects/{projectId}/snapshots/{snapshotId}',
request_field='',
request_type_name='DataflowProjectsSnapshotsGetRequest',
response_type_name='Snapshot',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists snapshots.
Args:
request: (DataflowProjectsSnapshotsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ListSnapshotsResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method='GET',
method_id='dataflow.projects.snapshots.list',
ordered_params=['projectId'],
path_params=['projectId'],
query_params=['jobId', 'location'],
relative_path='v1b3/projects/{projectId}/snapshots',
request_field='',
request_type_name='DataflowProjectsSnapshotsListRequest',
response_type_name='ListSnapshotsResponse',
supports_download=False,
)
class ProjectsTemplatesService(base_api.BaseApiService):
"""Service class for the projects_templates resource."""
_NAME = 'projects_templates'
def __init__(self, client):
super(DataflowV1b3.ProjectsTemplatesService, self).__init__(client)
self._upload_configs = {
}
def Create(self, request, global_params=None):
r"""Creates a Cloud Dataflow job from a template.
Args:
request: (DataflowProjectsTemplatesCreateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Job) The response message.
"""
config = self.GetMethodConfig('Create')
return self._RunMethod(
config, request, global_params=global_params)
Create.method_config = lambda: base_api.ApiMethodInfo(
http_method='POST',
method_id='dataflow.projects.templates.create',
ordered_params=['projectId'],
path_params=['projectId'],
query_params=[],
relative_path='v1b3/projects/{projectId}/templates',
request_field='createJobFromTemplateRequest',
request_type_name='DataflowProjectsTemplatesCreateRequest',
response_type_name='Job',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Get the template associated with a template.
Args:
request: (DataflowProjectsTemplatesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GetTemplateResponse) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method='GET',
method_id='dataflow.projects.templates.get',
ordered_params=['projectId'],
path_params=['projectId'],
query_params=['gcsPath', 'location', 'view'],
relative_path='v1b3/projects/{projectId}/templates:get',
request_field='',
request_type_name='DataflowProjectsTemplatesGetRequest',
response_type_name='GetTemplateResponse',
supports_download=False,
)
def Launch(self, request, global_params=None):
r"""Launch a template.
Args:
request: (DataflowProjectsTemplatesLaunchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(LaunchTemplateResponse) The response message.
"""
config = self.GetMethodConfig('Launch')
return self._RunMethod(
config, request, global_params=global_params)
Launch.method_config = lambda: base_api.ApiMethodInfo(
http_method='POST',
method_id='dataflow.projects.templates.launch',
ordered_params=['projectId'],
path_params=['projectId'],
query_params=['dynamicTemplate_gcsPath', 'dynamicTemplate_stagingLocation', 'gcsPath', 'location', 'validateOnly'],
relative_path='v1b3/projects/{projectId}/templates:launch',
request_field='launchTemplateParameters',
request_type_name='DataflowProjectsTemplatesLaunchRequest',
response_type_name='LaunchTemplateResponse',
supports_download=False,
)
class ProjectsService(base_api.BaseApiService):
"""Service class for the projects resource."""
_NAME = 'projects'
def __init__(self, client):
super(DataflowV1b3.ProjectsService, self).__init__(client)
self._upload_configs = {
}
def DeleteSnapshots(self, request, global_params=None):
r"""Deletes a snapshot.
Args:
request: (DataflowProjectsDeleteSnapshotsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(DeleteSnapshotResponse) The response message.
"""
config = self.GetMethodConfig('DeleteSnapshots')
return self._RunMethod(
config, request, global_params=global_params)
DeleteSnapshots.method_config = lambda: base_api.ApiMethodInfo(
http_method='DELETE',
method_id='dataflow.projects.deleteSnapshots',
ordered_params=['projectId'],
path_params=['projectId'],
query_params=['location', 'snapshotId'],
relative_path='v1b3/projects/{projectId}/snapshots',
request_field='',
request_type_name='DataflowProjectsDeleteSnapshotsRequest',
response_type_name='DeleteSnapshotResponse',
supports_download=False,
)
def WorkerMessages(self, request, global_params=None):
r"""Send a worker_message to the service.
Args:
request: (DataflowProjectsWorkerMessagesRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(SendWorkerMessagesResponse) The response message.
"""
config = self.GetMethodConfig('WorkerMessages')
return self._RunMethod(
config, request, global_params=global_params)
WorkerMessages.method_config = lambda: base_api.ApiMethodInfo(
http_method='POST',
method_id='dataflow.projects.workerMessages',
ordered_params=['projectId'],
path_params=['projectId'],
query_params=[],
relative_path='v1b3/projects/{projectId}/WorkerMessages',
request_field='sendWorkerMessagesRequest',
request_type_name='DataflowProjectsWorkerMessagesRequest',
response_type_name='SendWorkerMessagesResponse',
supports_download=False,
)
| 42.939189
| 419
| 0.70382
| 5,520
| 57,195
| 7.07663
| 0.059783
| 0.052531
| 0.040857
| 0.024729
| 0.794742
| 0.775542
| 0.756163
| 0.72442
| 0.666334
| 0.632158
| 0
| 0.003669
| 0.194597
| 57,195
| 1,331
| 420
| 42.97145
| 0.844329
| 0.272419
| 0
| 0.588506
| 1
| 0
| 0.255007
| 0.179549
| 0
| 0
| 0
| 0
| 0
| 1
| 0.070115
| false
| 0
| 0.005747
| 0
| 0.15977
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
0750b7b38786ebfdc0ebee31118b5fff733d0b0c
| 1,096
|
py
|
Python
|
tests/test_area.py
|
jm66/home-assistant-cli
|
2c17482d0d02c66b43b820b1b49fcd077720de7a
|
[
"Apache-2.0"
] | null | null | null |
tests/test_area.py
|
jm66/home-assistant-cli
|
2c17482d0d02c66b43b820b1b49fcd077720de7a
|
[
"Apache-2.0"
] | null | null | null |
tests/test_area.py
|
jm66/home-assistant-cli
|
2c17482d0d02c66b43b820b1b49fcd077720de7a
|
[
"Apache-2.0"
] | 1
|
2020-08-13T21:45:48.000Z
|
2020-08-13T21:45:48.000Z
|
"""Testing Area operations."""
import json
import unittest.mock as mock
from click.testing import CliRunner
import homeassistant_cli.cli as cli
def test_area_list(default_areas) -> None:
"""Test Area List."""
with mock.patch(
'homeassistant_cli.remote.get_areas', return_value=default_areas
):
runner = CliRunner()
result = runner.invoke(
cli.cli, ["--output=json", "area", "list"], catch_exceptions=False
)
assert result.exit_code == 0
data = json.loads(result.output)
assert len(data) == 3
def test_area_list_filter(default_areas) -> None:
"""Test Area List."""
with mock.patch(
'homeassistant_cli.remote.get_areas', return_value=default_areas
):
runner = CliRunner()
result = runner.invoke(
cli.cli,
["--output=json", "area", "list", "Bed.*"],
catch_exceptions=False,
)
assert result.exit_code == 0
data = json.loads(result.output)
assert len(data) == 1
assert data[0]['name'] == "Bedroom"
| 24.909091
| 78
| 0.601277
| 128
| 1,096
| 4.992188
| 0.351563
| 0.075117
| 0.075117
| 0.046948
| 0.719875
| 0.719875
| 0.719875
| 0.719875
| 0.719875
| 0.719875
| 0
| 0.006242
| 0.269161
| 1,096
| 43
| 79
| 25.488372
| 0.791511
| 0.051095
| 0
| 0.482759
| 0
| 0
| 0.123047
| 0.066406
| 0
| 0
| 0
| 0
| 0.172414
| 1
| 0.068966
| false
| 0
| 0.137931
| 0
| 0.206897
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
075e651de2100af7031bf6e5aa704b42330268dd
| 276
|
py
|
Python
|
bou/actions/__init__.py
|
Feastybeast/bou
|
6ea7d95cbc400fc1a0ebbad40fddad8c66717215
|
[
"MIT"
] | null | null | null |
bou/actions/__init__.py
|
Feastybeast/bou
|
6ea7d95cbc400fc1a0ebbad40fddad8c66717215
|
[
"MIT"
] | 2
|
2021-03-14T01:07:02.000Z
|
2021-03-16T08:12:08.000Z
|
bou/actions/__init__.py
|
Feastybeast/bou
|
6ea7d95cbc400fc1a0ebbad40fddad8c66717215
|
[
"MIT"
] | null | null | null |
""" bou.actions
~~~
Reexporting for syntactic sugar
"""
from bou.actions.create import create
from bou.actions.list import list
from bou.actions.manage import manage
from bou.actions.migrate import migrate, Direction
from bou.actions.version import version
| 25.090909
| 51
| 0.75
| 37
| 276
| 5.594595
| 0.378378
| 0.289855
| 0.338164
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.173913
| 276
| 10
| 52
| 27.6
| 0.907895
| 0.17029
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
4afcbd36b379ab0a8d6cf4440a7f886e42ba8317
| 1,840
|
py
|
Python
|
test/unit/test_parse_replicas.py
|
KTH/aspen
|
3be9b55d21dfd950d1a82b2cf4f464cd1f1e9757
|
[
"MIT"
] | null | null | null |
test/unit/test_parse_replicas.py
|
KTH/aspen
|
3be9b55d21dfd950d1a82b2cf4f464cd1f1e9757
|
[
"MIT"
] | 8
|
2019-10-10T08:03:02.000Z
|
2022-01-11T11:28:58.000Z
|
test/unit/test_parse_replicas.py
|
KTH/aspen
|
3be9b55d21dfd950d1a82b2cf4f464cd1f1e9757
|
[
"MIT"
] | null | null | null |
__author__ = 'tinglev@kth.se'
import unittest
from test import mock_test_data
from modules.steps.parse_replicas import ParseReplicas
from modules.util import data_defs, exceptions
class TestParseReplicas(unittest.TestCase):
def test_bad_replicas(self):
step = ParseReplicas()
pipeline_data = {data_defs.STACK_FILE_PARSED_CONTENT:
mock_test_data.get_parsed_stack_content()}
service = pipeline_data[data_defs.STACK_FILE_PARSED_CONTENT]['services']['web']
self.assertRaises(exceptions.DeploymentError, step.get_replicas, service, pipeline_data)
service['deploy'] = {}
self.assertRaises(exceptions.DeploymentError, step.get_replicas, service, pipeline_data)
def test_good_replicas(self):
step = ParseReplicas()
pipeline_data = {data_defs.STACK_FILE_PARSED_CONTENT:
mock_test_data.get_parsed_stack_content()}
pipeline_data[data_defs.STACK_FILE_PARSED_CONTENT]['services']['web']['deploy'] = {
'replicas': 1
}
try:
step.run_step(pipeline_data)
except:
self.fail()
self.assertEqual(pipeline_data[data_defs.REPLICAS], 3)
def test_global_mode(self):
step = ParseReplicas()
pipeline_data = {data_defs.STACK_FILE_PARSED_CONTENT:
mock_test_data.get_parsed_stack_content()}
pipeline_data[data_defs.STACK_FILE_PARSED_CONTENT]['services']['web']['deploy'] = {
'mode': 'global'
}
pipeline_data[data_defs.STACK_FILE_PARSED_CONTENT]['services']['api']['deploy'] = {
'mode': 'global'
}
try:
step.run_step(pipeline_data)
except:
self.fail()
self.assertEqual(pipeline_data[data_defs.REPLICAS], 'global')
| 39.148936
| 96
| 0.653804
| 202
| 1,840
| 5.579208
| 0.237624
| 0.138421
| 0.127773
| 0.159716
| 0.746229
| 0.746229
| 0.746229
| 0.746229
| 0.746229
| 0.701863
| 0
| 0.001437
| 0.243478
| 1,840
| 46
| 97
| 40
| 0.80819
| 0
| 0
| 0.560976
| 0
| 0
| 0.063043
| 0
| 0
| 0
| 0
| 0
| 0.097561
| 1
| 0.073171
| false
| 0
| 0.097561
| 0
| 0.195122
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
ab067ad29a6722f6ff831b739adcd79d80853663
| 167
|
py
|
Python
|
solutions/PySolutions/SolEx12.py
|
ElLorans/PythonCrashCourse
|
e8158cdc376988ac18e3f68d628c2d19a43b8913
|
[
"MIT"
] | 2
|
2021-01-02T21:19:25.000Z
|
2021-02-18T23:10:30.000Z
|
solutions/PySolutions/SolEx12.py
|
ElLorans/PythonCrashCourse
|
e8158cdc376988ac18e3f68d628c2d19a43b8913
|
[
"MIT"
] | null | null | null |
solutions/PySolutions/SolEx12.py
|
ElLorans/PythonCrashCourse
|
e8158cdc376988ac18e3f68d628c2d19a43b8913
|
[
"MIT"
] | 4
|
2021-01-03T10:15:43.000Z
|
2021-01-07T23:10:40.000Z
|
# add to the dictionary a name with its phone number
# result should be phone_book = {'Python': 1}
phone_book = {}
phone_book['Python'] = 1
print(phone_book)
| 20.875
| 53
| 0.682635
| 26
| 167
| 4.230769
| 0.653846
| 0.327273
| 0.272727
| 0.290909
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.015152
| 0.209581
| 167
| 7
| 54
| 23.857143
| 0.818182
| 0.562874
| 0
| 0
| 0
| 0
| 0.095238
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
ab236c3f5cf409059809724d8804a9b39c997265
| 49,763
|
py
|
Python
|
cinder/tests/unit/volume/drivers/test_linstordrv.py
|
stackhpc/cinder
|
93f0ca4dc9eedee10df2f03dad834a31b7f09847
|
[
"Apache-2.0"
] | null | null | null |
cinder/tests/unit/volume/drivers/test_linstordrv.py
|
stackhpc/cinder
|
93f0ca4dc9eedee10df2f03dad834a31b7f09847
|
[
"Apache-2.0"
] | 1
|
2021-03-31T19:22:03.000Z
|
2021-03-31T19:22:03.000Z
|
cinder/tests/unit/volume/drivers/test_linstordrv.py
|
alokchandra11/cinder
|
121d9f512b4a6d1afe6a690effb7c2b379040a7b
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) 2018-2019 LINBIT HA Solutions GmbH
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from oslo_config import cfg
from oslo_utils import timeutils
from cinder import exception as cinder_exception
from cinder import test
from cinder.volume import configuration as conf
from cinder.volume.drivers import linstordrv as drv
CONF = cfg.CONF
CINDER_UNKNOWN = 'unknown'
DISKLESS = 'DISKLESS'
LVM = 'LVM'
LVM_THIN = 'LVM_THIN'
ZFS = 'ZFS'
ZFS_THIN = 'ZFS_THIN'
DRIVER = 'cinder.volume.drivers.linstordrv.'
RESOURCE = {
'name': 'CV_0348a7d3-3bb9-452d-9f40-2cf5ebfe9131',
'volume': {
'device_path': '/dev/drbd1000'
}
}
RESOURCE_LIST = [{
'layer_object': {
'children': [{
'storage': {
'storage_volumes': [{
'allocated_size_kib': 1048576,
'device_path':
'/dev/vol/CV_0348a7d3-3bb9-452d-9f40-2cf5ebfe9131_00000',
'disk_state': '[]',
'usable_size_kib': 1048576,
'volume_number': 0}]},
'type': 'STORAGE'}],
'drbd': {
'al_size': 32,
'al_stripes': 1,
'drbd_resource_definition': {
'al_stripe_size_kib': 32,
'al_stripes': 1,
'down': False,
'peer_slots': 7,
'port': 7005,
'secret': 'poQZ0Ad/Bq8DT9fA7ydB',
'transport_type': 'IP'},
'drbd_volumes': [{
'allocated_size_kib': 1044740,
'backing_device':
'/dev/vol/CV_0348a7d3-3bb9-452d-9f40-2cf5ebfe9131_00000',
'device_path': '/dev/drbd1005',
'drbd_volume_definition': {
'minor_number': 1005,
'volume_number': 0},
'usable_size_kib': 1044480}],
'node_id': 0,
'peer_slots': 7},
'type': 'DRBD'},
'name': 'CV_0348a7d3-3bb9-452d-9f40-2cf5ebfe9131',
'node_name': 'node-2',
'state': {'in_use': False},
'uuid': 'a4ab4670-c5fc-4590-a3a2-39c4685c8c32',
'volumes': [{
'allocated_size_kib': 45403,
'device_path': '/dev/drbd1005',
'layer_data_list': [{
'data': {
'allocated_size_kib': 1044740,
'backing_device':
'/dev/vol/CV_0348a7d3-3bb9-452d-9f40-2cf5ebfe9131_00000',
'device_path': '/dev/drbd1005',
'drbd_volume_definition': {
'minor_number': 1005,
'volume_number': 0},
'usable_size_kib': 1044480},
'type': 'DRBD'}, {
'data': {
'allocated_size_kib': 1048576,
'device_path':
'/dev/vol/CV_0348a7d3-3bb9-452d-9f40-2cf5ebfe9131_00000',
'disk_state': '[]',
'usable_size_kib': 1048576,
'volume_number': 0},
'type': 'STORAGE'}
],
'props': {
'RestoreFromResource': 'CV_123a2fdc-365f-472e-bb8e-484788712abc',
'RestoreFromSnapshot': 'SN_68edb708-48de-4da1-9953-b9de9da9f1b8'
},
'provider_kind': 'LVM_THIN',
'state': {'disk_state': 'UpToDate'},
'storage_pool_name': 'DfltStorPool',
'uuid': 'e270ba0c-b284-4f21-85cc-602f132a2251',
'volume_number': 0}]}, {
'flags': ['DISKLESS'],
'layer_object': {
'children': [{
'storage': {
'storage_volumes': [{
'allocated_size_kib': 0,
'usable_size_kib': 1044480,
'volume_number': 0}]},
'type': 'STORAGE'}],
'drbd': {
'al_size': 32,
'al_stripes': 1,
'drbd_resource_definition': {
'al_stripe_size_kib': 32,
'al_stripes': 1,
'down': False,
'peer_slots': 7,
'port': 7005,
'secret': 'poQZ0Ad/Bq8DT9fA7ydB',
'transport_type': 'IP'},
'drbd_volumes': [{
'allocated_size_kib': 1044740,
'device_path': '/dev/drbd1005',
'drbd_volume_definition': {
'minor_number': 1005,
'volume_number': 0},
'usable_size_kib': 1044480}],
'flags': ['DISKLESS'],
'node_id': 1,
'peer_slots': 7},
'type': 'DRBD'},
'name': 'CV_0348a7d3-3bb9-452d-9f40-2cf5ebfe9131',
'node_name': 'node-1',
'state': {'in_use': False},
'uuid': '11e853df-6f66-4cd9-9fbc-f3f7cc98d5cf',
'volumes': [{
'allocated_size_kib': 45403,
'device_path': '/dev/drbd1005',
'layer_data_list': [
{
'data': {
'allocated_size_kib': 1044740,
'device_path': '/dev/drbd1005',
'drbd_volume_definition': {
'minor_number': 1005,
'volume_number': 0},
'usable_size_kib': 1044480},
'type': 'DRBD'
},
{
'data': {
'allocated_size_kib': 0,
'usable_size_kib': 1044480,
'volume_number': 0
},
'type': 'STORAGE'
}
],
'provider_kind': 'DISKLESS',
'state': {'disk_state': 'Diskless'},
'storage_pool_name': 'DfltStorPool',
'uuid': '27b4aeec-2b42-41c9-b186-86afc8778046',
'volume_number': 0
}]}]
RESOURCE_LIST_RESP = ['node-1', 'node-2']
SNAPSHOT_LIST_RESP = ['node-1']
DISKLESS_LIST_RESP = ['node-1']
RESOURCE_DFN_LIST = [{
'layer_data': [
{
'data': {
'al_stripe_size_kib': 32,
'al_stripes': 1,
'down': False,
'peer_slots': 7,
'port': 7005,
'secret': 'poQZ0Ad/Bq8DT9fA7ydB',
'transport_type': 'IP'
},
'type': 'DRBD'
},
{
'type': 'STORAGE'
}
],
'name': 'CV_0348a7d3-3bb9-452d-9f40-2cf5ebfe9131',
'props': {'DrbdPrimarySetOn': 'node-1'},
'uuid': '9a684294-6db4-40c8-bfeb-e5351200b9db'
}]
RESOURCE_DFN_LIST_RESP = [{
'rd_name': u'CV_0348a7d3-3bb9-452d-9f40-2cf5ebfe9131',
'rd_uuid': u'9a684294-6db4-40c8-bfeb-e5351200b9db',
}]
NODES_LIST = [
{
'connection_status': 'ONLINE',
'name': 'node-1',
'net_interfaces': [{
'address': '192.168.8.63',
'name': 'default',
'satellite_encryption_type': 'PLAIN',
'satellite_port': 3366,
'uuid': '9c5b727f-0c62-4040-9a33-96a4fd4aaac3'}],
'props': {'CurStltConnName': 'default'},
'type': 'COMBINED',
'uuid': '69b88ffb-50d9-4576-9843-d7bf4724d043'
},
{
'connection_status': 'ONLINE',
'name': 'node-2',
'net_interfaces': [{
'address': '192.168.8.102',
'name': 'default',
'satellite_encryption_type': 'PLAIN',
'satellite_port': 3366,
'uuid': '3f911fc9-4f9b-4155-b9da-047d5242484c'}],
'props': {'CurStltConnName': 'default'},
'type': 'SATELLITE',
'uuid': '26bde754-0f05-499c-a63c-9f4e5f30556e'
}
]
NODES_RESP = [
{'node_address': '192.168.8.63', 'node_name': 'node-1'},
{'node_address': '192.168.8.102', 'node_name': 'node-2'}
]
STORAGE_POOL_DEF = [{'storage_pool_name': 'DfltStorPool'}]
STORAGE_POOL_DEF_RESP = ['DfltStorPool']
STORAGE_POOL_LIST = [
{
'free_capacity': 104815656,
'free_space_mgr_name': 'node-2:DfltStorPool',
'node_name': 'node-2',
'props': {
'StorDriver/LvmVg': 'vol',
'StorDriver/ThinPool': 'thin_pool'
},
'provider_kind': 'LVM_THIN',
'static_traits': {
'Provisioning': 'Thin',
'SupportsSnapshots': 'true'
},
'storage_pool_name': 'DfltStorPool',
'total_capacity': 104857600,
'uuid': '004faf29-be1a-4d74-9470-038bcee2c611'
},
{
'free_capacity': 9223372036854775807,
'free_space_mgr_name': 'node-1:DfltStorPool',
'node_name': 'node-1',
'provider_kind': 'DISKLESS',
'static_traits': {'SupportsSnapshots': 'false'},
'storage_pool_name': 'DfltStorPool',
'total_capacity': 9223372036854775807,
'uuid': '897da09e-1316-45c0-a308-c07008af42df'
}
]
STORAGE_POOL_LIST_RESP = [
{
'driver_name': 'LVM_THIN',
'node_name': 'node-2',
'sp_uuid': '004faf29-be1a-4d74-9470-038bcee2c611',
'sp_cap': 100.0,
'sp_free': 100,
'sp_name': u'DfltStorPool'
},
{
'driver_name': 'DISKLESS',
'node_name': 'node-1',
'sp_uuid': '897da09e-1316-45c0-a308-c07008af42df',
'sp_allocated': 0.0,
'sp_cap': -1.0,
'sp_free': -1.0,
'sp_name': 'DfltStorPool'
}
]
VOLUME_STATS_RESP = {
'driver_version': '0.0.7',
'pools': [{
'QoS_support': False,
'backend_state': 'up',
'filter_function': None,
'free_capacity_gb': 100,
'goodness_function': None,
'location_info': 'linstor://localhost',
'max_over_subscription_ratio': 0,
'multiattach': False,
'pool_name': 'lin-test-driver',
'provisioned_capacity_gb': 0.0,
'reserved_percentage': 0,
'thick_provisioning_support': False,
'thin_provisioning_support': True,
'total_capacity_gb': 100.0,
'total_volumes': 1,
}],
'vendor_name': 'LINBIT',
'volume_backend_name': 'lin-test-driver'
}
CINDER_VOLUME = {
'id': '0348a7d3-3bb9-452d-9f40-2cf5ebfe9131',
'name': 'test-lin-vol',
'size': 1,
'volume_type_id': 'linstor',
'created_at': timeutils.utcnow()
}
SNAPSHOT = {
'id': '0348a7d3-3bb9-452d-9f40-2cf5ebfe9131',
'volume_id': '0348a7d3-3bb9-452d-9f40-2cf5ebfe9131',
'volume_size': 1
}
VOLUME_NAMES = {
'linstor': 'CV_0348a7d3-3bb9-452d-9f40-2cf5ebfe9131',
'cinder': '0348a7d3-3bb9-452d-9f40-2cf5ebfe9131',
'snap': 'SN_0348a7d3-3bb9-452d-9f40-2cf5ebfe9131',
}
class LinstorAPIFakeDriver(object):
def fake_api_ping(self):
return 1234
def fake_api_resource_list(self):
return RESOURCE_LIST
def fake_api_node_list(self):
return NODES_LIST
def fake_api_storage_pool_dfn_list(self):
return STORAGE_POOL_DEF
def fake_api_storage_pool_list(self):
return STORAGE_POOL_LIST
def fake_api_resource_dfn_list(self):
return RESOURCE_DFN_LIST
def fake_api_snapshot_list(self):
return SNAPSHOT_LIST_RESP
class LinstorFakeResource(object):
def __init__(self):
self.volumes = [{'size': 1069547520}]
self.id = 0
def delete(self):
return True
def is_diskless(self, host):
if host in DISKLESS_LIST_RESP:
return True
else:
return False
class LinstorBaseDriverTestCase(test.TestCase):
def __init__(self, *args, **kwargs):
super(LinstorBaseDriverTestCase, self).__init__(*args, **kwargs)
def setUp(self):
super(LinstorBaseDriverTestCase, self).setUp()
if drv is None:
return
self._mock = mock.Mock()
self._fake_driver = LinstorAPIFakeDriver()
self.configuration = mock.Mock(conf.Configuration)
self.driver = drv.LinstorBaseDriver(
configuration=self.configuration)
self.driver.VERSION = '0.0.7'
self.driver.default_rsc_size = 1
self.driver.default_vg_name = 'vg-1'
self.driver.default_downsize_factor = int('4096')
self.driver.default_pool = STORAGE_POOL_DEF_RESP[0]
self.driver.host_name = 'node-1'
self.driver.diskless = True
self.driver.default_uri = 'linstor://localhost'
self.driver.default_backend_name = 'lin-test-driver'
self.driver.configuration.reserved_percentage = 0
self.driver.configuration.max_over_subscription_ratio = 0
self.driver.ap_count = 0
@mock.patch(DRIVER + 'LinstorBaseDriver._ping')
def test_ping(self, m_ping):
m_ping.return_value = self._fake_driver.fake_api_ping()
val = self.driver._ping()
expected = 1234
self.assertEqual(expected, val)
@mock.patch('uuid.uuid4')
def test_clean_uuid(self, m_uuid):
m_uuid.return_value = u'bd6472d1-dc3c-4d41-a5f0-f44271c05680'
val = self.driver._clean_uuid()
expected = u'bd6472d1-dc3c-4d41-a5f0-f44271c05680'
self.assertEqual(expected, val)
@mock.patch('uuid.uuid4')
def test_clean_uuid_with_braces(self, m_uuid):
m_uuid.return_value = u'{bd6472d1-dc3c-4d41-a5f0-f44271c05680}'
val = self.driver._clean_uuid()
expected = u'bd6472d1-dc3c-4d41-a5f0-f44271c05680'
m_uuid.assert_called_once()
self.assertEqual(expected, val)
# Test volume size conversions
def test_unit_conversions_to_linstor_1GiB(self):
val = self.driver._vol_size_to_linstor(1)
expected = 1044480 # 1048575 - 4096
self.assertEqual(expected, val)
def test_unit_conversions_to_linstor_2GiB(self):
val = self.driver._vol_size_to_linstor(2)
expected = 2093056 # 2097152 - 4096
self.assertEqual(expected, val)
def test_unit_conversions_to_cinder(self):
val = self.driver._vol_size_to_cinder(1048576)
expected = 1
self.assertEqual(expected, val)
def test_unit_conversions_to_cinder_2GiB(self):
val = self.driver._vol_size_to_cinder(2097152)
expected = 2
self.assertEqual(expected, val)
def test_is_clean_volume_name(self):
val = self.driver._is_clean_volume_name(VOLUME_NAMES['cinder'],
drv.DM_VN_PREFIX)
expected = VOLUME_NAMES['linstor']
self.assertEqual(expected, val)
def test_is_clean_volume_name_invalid(self):
wrong_uuid = 'bc3015e6-695f-4688-91f2-invaliduuid1'
val = self.driver._is_clean_volume_name(wrong_uuid,
drv.DM_VN_PREFIX)
expected = None
self.assertEqual(expected, val)
def test_snapshot_name_from_cinder_snapshot(self):
val = self.driver._snapshot_name_from_cinder_snapshot(
SNAPSHOT)
expected = VOLUME_NAMES['snap']
self.assertEqual(expected, val)
def test_cinder_volume_name_from_drbd_resource(self):
val = self.driver._cinder_volume_name_from_drbd_resource(
VOLUME_NAMES['linstor'])
expected = VOLUME_NAMES['cinder']
self.assertEqual(expected, val)
def test_drbd_resource_name_from_cinder_snapshot(self):
val = self.driver._drbd_resource_name_from_cinder_snapshot(
SNAPSHOT)
expected = VOLUME_NAMES['linstor']
self.assertEqual(expected, val)
def test_drbd_resource_name_from_cinder_volume(self):
val = self.driver._drbd_resource_name_from_cinder_volume(
CINDER_VOLUME)
expected = VOLUME_NAMES['linstor']
self.assertEqual(expected, val)
@mock.patch(DRIVER + 'LinstorBaseDriver._get_api_resource_list')
def test_get_rcs_path(self, m_rsc_list):
m_rsc_list.return_value = self._fake_driver.fake_api_resource_list()
val = self.driver._get_rsc_path(VOLUME_NAMES['linstor'])
expected = '/dev/drbd1005'
m_rsc_list.assert_called_once()
self.assertEqual(expected, val)
@mock.patch(DRIVER + 'LinstorBaseDriver._get_api_resource_list')
def test_get_local_path(self, m_rsc_list):
m_rsc_list.return_value = self._fake_driver.fake_api_resource_list()
val = self.driver._get_local_path(CINDER_VOLUME)
expected = '/dev/drbd1005'
m_rsc_list.assert_called_once()
self.assertEqual(expected, val)
@mock.patch(DRIVER + 'LinstorBaseDriver._get_api_storage_pool_dfn_list')
def test_get_spd(self, m_spd_list):
m_spd_list.return_value = (
self._fake_driver.fake_api_storage_pool_dfn_list())
val = self.driver._get_spd()
expected = STORAGE_POOL_DEF_RESP
m_spd_list.assert_called_once()
self.assertEqual(expected, val)
@mock.patch(DRIVER + 'LinstorBaseDriver._get_api_storage_pool_list')
def test_get_storage_pool(self, m_sp_list):
m_sp_list.return_value = (
self._fake_driver.fake_api_storage_pool_list())
val = self.driver._get_storage_pool()
expected = STORAGE_POOL_LIST_RESP
m_sp_list.assert_called_once()
self.assertEqual(expected, val)
@mock.patch(DRIVER + 'LinstorBaseDriver._get_api_resource_dfn_list')
def test_get_resource_definitions(self, m_rscd_list):
m_rscd_list.return_value = (
self._fake_driver.fake_api_resource_dfn_list())
val = self.driver._get_resource_definitions()
expected = RESOURCE_DFN_LIST_RESP
m_rscd_list.assert_called_once()
self.assertEqual(expected, val)
@mock.patch(DRIVER + 'LinstorBaseDriver._get_snapshot_nodes')
def test_get_snapshot_nodes(self, m_rsc_list):
m_rsc_list.return_value = self._fake_driver.fake_api_snapshot_list()
val = self.driver._get_snapshot_nodes(VOLUME_NAMES['linstor'])
expected = SNAPSHOT_LIST_RESP
m_rsc_list.assert_called_once()
self.assertEqual(expected, val)
@mock.patch(DRIVER + 'LinstorBaseDriver._get_api_resource_list')
def test_get_diskless_nodes(self, m_rsc_list):
m_rsc_list.return_value = self._fake_driver.fake_api_resource_list()
val = self.driver._get_diskless_nodes(RESOURCE['name'])
expected = DISKLESS_LIST_RESP
m_rsc_list.assert_called_once()
self.assertEqual(expected, val)
@mock.patch(DRIVER + 'LinstorBaseDriver._get_api_node_list')
def test_get_linstor_nodes(self, m_node_list):
m_node_list.return_value = self._fake_driver.fake_api_node_list()
val = self.driver._get_linstor_nodes()
expected = RESOURCE_LIST_RESP
m_node_list.assert_called_once()
self.assertEqual(expected, val)
@mock.patch(DRIVER + 'LinstorBaseDriver._get_api_node_list')
def test_get_nodes(self, m_node_list):
m_node_list.return_value = self._fake_driver.fake_api_node_list()
val = self.driver._get_nodes()
expected = NODES_RESP
m_node_list.assert_called_once()
self.assertEqual(expected, val)
@mock.patch(DRIVER + 'LinstorBaseDriver._api_rsc_size')
@mock.patch(DRIVER + 'LinstorBaseDriver._api_rsc_is_diskless')
@mock.patch(DRIVER + 'LinstorBaseDriver._get_api_resource_list')
@mock.patch(DRIVER + 'LinstorBaseDriver.get_goodness_function')
@mock.patch(DRIVER + 'LinstorBaseDriver.get_filter_function')
@mock.patch(DRIVER + 'LinstorBaseDriver._get_api_resource_dfn_list')
@mock.patch(DRIVER + 'LinstorBaseDriver._get_api_storage_pool_list')
def test_get_volume_stats(self,
m_sp_list,
m_rscd_list,
m_filter,
m_goodness,
m_rsc_list,
m_diskless,
m_rsc_size):
m_sp_list.return_value = (
self._fake_driver.fake_api_storage_pool_list())
m_rscd_list.return_value = (
self._fake_driver.fake_api_resource_dfn_list())
m_filter.return_value = None
m_goodness.return_value = None
m_rsc_list.return_value = RESOURCE_LIST
m_diskless.return_value = True
m_rsc_size.return_value = 1069547520
val = self.driver._get_volume_stats()
expected = VOLUME_STATS_RESP
m_sp_list.assert_called_once()
m_rscd_list.assert_called_once()
self.assertEqual(expected, val)
@mock.patch(DRIVER + 'LinstorBaseDriver._api_snapshot_create')
def test_create_snapshot_fail(self,
m_snap_create):
m_snap_create.return_value = False
self.assertRaises(cinder_exception.VolumeBackendAPIException,
self.driver.create_snapshot, SNAPSHOT)
@mock.patch(DRIVER + 'LinstorBaseDriver._api_snapshot_create')
def test_create_snapshot_success(self,
m_snap_create):
m_snap_create.return_value = True
# No exception should be raised
self.assertIsNone(self.driver.create_snapshot(SNAPSHOT))
@mock.patch(DRIVER + 'LinstorBaseDriver._api_snapshot_delete')
def test_delete_snapshot_fail(self,
m_snap_delete):
m_snap_delete.return_value = False
self.assertRaises(cinder_exception.VolumeBackendAPIException,
self.driver.delete_snapshot, SNAPSHOT)
@mock.patch(DRIVER + 'LinstorBaseDriver._get_snapshot_nodes')
@mock.patch(DRIVER + 'LinstorBaseDriver._api_snapshot_delete')
def test_delete_snapshot_success(self,
m_snap_delete,
m_snap_nodes):
m_snap_delete.return_value = True
m_snap_nodes.return_value = self._fake_driver.fake_api_snapshot_list()
# No exception should be raised
self.driver.delete_snapshot(SNAPSHOT)
@mock.patch(DRIVER + 'LinstorBaseDriver._api_rsc_dfn_delete')
@mock.patch(DRIVER + 'LinstorBaseDriver._get_snapshot_nodes')
@mock.patch(DRIVER + 'LinstorBaseDriver._api_snapshot_delete')
def test_delete_snapshot_success_cleanup_rd(self,
m_snap_delete,
m_snap_nodes,
m_rd_delete):
m_snap_delete.return_value = True
m_snap_nodes.return_value = []
m_rd_delete.return_value = None
# No exception should be raised
self.driver.delete_snapshot(SNAPSHOT)
# Resource Definition Delete should run once
m_rd_delete.assert_called_once()
@mock.patch(DRIVER + 'LinstorBaseDriver._get_api_storage_pool_list')
@mock.patch(DRIVER + 'LinstorBaseDriver._api_volume_dfn_set_sp')
@mock.patch(DRIVER + 'LinstorBaseDriver._get_api_volume_extend')
@mock.patch(DRIVER + 'LinstorBaseDriver._api_rsc_create')
@mock.patch(DRIVER + 'LinstorBaseDriver._api_snapshot_resource_restore')
@mock.patch(DRIVER + 'LinstorBaseDriver._get_linstor_nodes')
@mock.patch(DRIVER + 'LinstorBaseDriver._api_snapshot_volume_dfn_restore')
@mock.patch(DRIVER + 'LinstorBaseDriver._check_api_reply')
@mock.patch(DRIVER + 'LinstorBaseDriver._api_rsc_dfn_create')
def test_create_volume_from_snapshot(self,
m_rsc_dfn_create,
m_api_reply,
m_snap_vd_restore,
m_lin_nodes,
m_snap_rsc_restore,
m_rsc_create,
m_vol_extend,
m_vol_dfn,
m_sp_list):
m_rsc_dfn_create.return_value = True
m_api_reply.return_value = True
m_snap_vd_restore.return_value = True
m_nodes = []
m_lin_nodes.return_value = m_nodes
m_snap_rsc_restore.return_value = True
m_rsc_create.return_value = True
m_vol_extend.return_value = True
m_vol_dfn.return_value = True
m_sp_list.return_value = (
self._fake_driver.fake_api_storage_pool_list())
# No exception should be raised
self.assertIsNone(self.driver.create_volume_from_snapshot(
CINDER_VOLUME, SNAPSHOT))
@mock.patch(DRIVER + 'LinstorBaseDriver._get_api_storage_pool_list')
@mock.patch(DRIVER + 'LinstorBaseDriver._api_volume_dfn_set_sp')
@mock.patch(DRIVER + 'LinstorBaseDriver._get_api_volume_extend')
@mock.patch(DRIVER + 'LinstorBaseDriver._api_rsc_create')
@mock.patch(DRIVER + 'LinstorBaseDriver._api_snapshot_resource_restore')
@mock.patch(DRIVER + 'LinstorBaseDriver._get_linstor_nodes')
@mock.patch(DRIVER + 'LinstorBaseDriver._api_snapshot_volume_dfn_restore')
@mock.patch(DRIVER + 'LinstorBaseDriver._check_api_reply')
@mock.patch(DRIVER + 'LinstorBaseDriver._api_rsc_dfn_create')
def test_create_volume_from_snapshot_fail_restore(self,
m_rsc_dfn_create,
m_api_reply,
m_snap_vd_restore,
m_lin_nodes,
m_snap_rsc_restore,
m_rsc_create,
m_vol_extend,
m_vol_dfn,
m_sp_list):
m_rsc_dfn_create.return_value = True
m_api_reply.return_value = True
m_snap_vd_restore.return_value = True
m_nodes = []
m_lin_nodes.return_value = m_nodes
m_snap_rsc_restore.return_value = False
m_rsc_create.return_value = True
m_vol_extend.return_value = True
m_vol_dfn.return_value = True
m_sp_list.return_value = (
self._fake_driver.fake_api_storage_pool_list())
# Failing to restore a snapshot should raise an exception
self.assertRaises(cinder_exception.VolumeBackendAPIException,
self.driver.create_volume_from_snapshot,
CINDER_VOLUME, SNAPSHOT)
@mock.patch(DRIVER + 'LinstorBaseDriver.delete_volume')
@mock.patch(DRIVER + 'LinstorBaseDriver._get_api_storage_pool_list')
@mock.patch(DRIVER + 'LinstorBaseDriver._api_volume_dfn_set_sp')
@mock.patch(DRIVER + 'LinstorBaseDriver._get_api_volume_extend')
@mock.patch(DRIVER + 'LinstorBaseDriver._api_rsc_create')
@mock.patch(DRIVER + 'LinstorBaseDriver._api_snapshot_resource_restore')
@mock.patch(DRIVER + 'LinstorBaseDriver._get_linstor_nodes')
@mock.patch(DRIVER + 'LinstorBaseDriver._api_snapshot_volume_dfn_restore')
@mock.patch(DRIVER + 'LinstorBaseDriver._check_api_reply')
@mock.patch(DRIVER + 'LinstorBaseDriver._api_rsc_dfn_create')
def test_create_volume_from_snapshot_fail_extend(self,
m_rsc_dfn_create,
m_api_reply,
m_snap_vd_restore,
m_lin_nodes,
m_snap_rsc_restore,
m_rsc_create,
m_vol_extend,
m_vol_dfn,
m_sp_list,
m_delete_volume):
m_rsc_dfn_create.return_value = True
m_api_reply.return_value = False
m_snap_vd_restore.return_value = True
m_nodes = []
m_lin_nodes.return_value = m_nodes
m_snap_rsc_restore.return_value = True
m_rsc_create.return_value = True
m_vol_extend.return_value = True
m_vol_dfn.return_value = True
m_sp_list.return_value = (
self._fake_driver.fake_api_storage_pool_list())
m_delete_volume.return_value = True
# Failing to extend the volume after a snapshot restoration should
# raise an exception
new_volume = CINDER_VOLUME
new_volume['size'] = 2
self.assertRaises(cinder_exception.VolumeBackendAPIException,
self.driver.create_volume_from_snapshot,
new_volume, SNAPSHOT)
@mock.patch(DRIVER + 'LinstorBaseDriver._check_api_reply')
@mock.patch(DRIVER + 'LinstorBaseDriver._api_rsc_create')
@mock.patch(DRIVER + 'LinstorBaseDriver._api_volume_dfn_create')
@mock.patch(DRIVER + 'LinstorBaseDriver._api_rsc_dfn_create')
@mock.patch(DRIVER + 'LinstorBaseDriver._api_storage_pool_create')
@mock.patch(DRIVER + 'LinstorBaseDriver._get_api_storage_pool_dfn_list')
@mock.patch(DRIVER + 'LinstorBaseDriver._get_api_node_list')
@mock.patch(DRIVER + 'LinstorBaseDriver._get_api_storage_pool_list')
def test_create_volume_fail_no_linstor_nodes(self,
m_sp_list,
m_node_list,
m_spd_list,
m_sp_create,
m_rsc_dfn_create,
m_vol_dfn_create,
m_rsc_create,
m_api_reply):
m_sp_list.return_value = []
m_node_list.return_value = []
m_spd_list.return_value = (
self._fake_driver.fake_api_storage_pool_dfn_list())
m_sp_create.return_value = True
m_rsc_dfn_create.return_value = True
m_vol_dfn_create.return_value = True
m_rsc_create.return_value = True
m_api_reply.return_value = True
test_volume = CINDER_VOLUME
test_volume['migration_status'] = ('migrating:',
str(VOLUME_NAMES['cinder']))
test_volume['display_name'] = 'test_volume'
test_volume['host'] = 'node_one'
test_volume['size'] = 1
self.assertRaises(cinder_exception.VolumeBackendAPIException,
self.driver.create_volume, test_volume)
@mock.patch(DRIVER + 'LinstorBaseDriver._check_api_reply')
@mock.patch(DRIVER + 'LinstorBaseDriver._api_rsc_create')
@mock.patch(DRIVER + 'LinstorBaseDriver._api_volume_dfn_create')
@mock.patch(DRIVER + 'LinstorBaseDriver._api_rsc_dfn_create')
@mock.patch(DRIVER + 'LinstorBaseDriver._api_storage_pool_create')
@mock.patch(DRIVER + 'LinstorBaseDriver._get_api_storage_pool_dfn_list')
@mock.patch(DRIVER + 'LinstorBaseDriver._get_api_node_list')
@mock.patch(DRIVER + 'LinstorBaseDriver._get_api_storage_pool_list')
def test_create_volume_fail_rsc_create(self,
m_sp_list,
m_node_list,
m_spd_list,
m_sp_create,
m_rsc_dfn_create,
m_vol_dfn_create,
m_rsc_create,
m_api_reply):
m_sp_list.return_value = (
self._fake_driver.fake_api_storage_pool_list())
m_node_list.return_value = self._fake_driver.fake_api_node_list()
m_spd_list.return_value = (
self._fake_driver.fake_api_storage_pool_dfn_list())
m_sp_create.return_value = True
m_rsc_dfn_create.return_value = True
m_vol_dfn_create.return_value = True
m_rsc_create.return_value = True
m_api_reply.return_value = False
test_volume = CINDER_VOLUME
test_volume['migration_status'] = ('migrating:',
str(VOLUME_NAMES['cinder']))
test_volume['display_name'] = 'test_volume'
test_volume['host'] = 'node_one'
test_volume['size'] = 1
self.assertRaises(cinder_exception.VolumeBackendAPIException,
self.driver.create_volume, test_volume)
@mock.patch(DRIVER + 'LinstorBaseDriver._check_api_reply')
@mock.patch(DRIVER + 'LinstorBaseDriver._api_rsc_create')
@mock.patch(DRIVER + 'LinstorBaseDriver._api_volume_dfn_create')
@mock.patch(DRIVER + 'LinstorBaseDriver._api_rsc_dfn_create')
@mock.patch(DRIVER + 'LinstorBaseDriver._api_storage_pool_create')
@mock.patch(DRIVER + 'LinstorBaseDriver._get_api_storage_pool_dfn_list')
@mock.patch(DRIVER + 'LinstorBaseDriver._get_api_node_list')
@mock.patch(DRIVER + 'LinstorBaseDriver._get_api_storage_pool_list')
def test_create_volume(self,
m_sp_list,
m_node_list,
m_spd_list,
m_sp_create,
m_rsc_dfn_create,
m_vol_dfn_create,
m_rsc_create,
m_api_reply):
m_sp_list.return_value = (
self._fake_driver.fake_api_storage_pool_list())
m_node_list.return_value = self._fake_driver.fake_api_node_list()
m_spd_list.return_value = (
self._fake_driver.fake_api_storage_pool_dfn_list())
m_sp_create.return_value = True
m_rsc_dfn_create.return_value = True
m_vol_dfn_create.return_value = True
m_rsc_create.return_value = True
m_api_reply.return_value = True
test_volume = CINDER_VOLUME
test_volume['migration_status'] = ('migrating:',
str(VOLUME_NAMES['cinder']))
test_volume['display_name'] = 'test_volume'
test_volume['host'] = 'node_one'
test_volume['size'] = 1
val = self.driver.create_volume(test_volume)
expected = {}
self.assertEqual(expected, val)
@mock.patch(DRIVER + 'LinstorBaseDriver._api_rsc_auto_delete')
@mock.patch(DRIVER + 'LinstorBaseDriver._check_api_reply')
@mock.patch(DRIVER + 'LinstorBaseDriver._api_rsc_dfn_delete')
@mock.patch(DRIVER + 'LinstorBaseDriver._api_volume_dfn_delete')
@mock.patch(DRIVER + 'LinstorBaseDriver._api_rsc_delete')
@mock.patch(DRIVER + 'LinstorBaseDriver._get_api_resource_list')
def test_delete_volume_fail_incomplete(self,
m_rsc_list,
m_rsc_delete,
m_vol_dfn_delete,
m_rsc_dfn_delete,
m_api_reply,
m_rsc_auto_delete):
m_rsc_list.return_value = self._fake_driver.fake_api_resource_list()
m_rsc_delete.return_value = True
m_vol_dfn_delete.return_value = True
m_rsc_dfn_delete.return_value = True
m_api_reply.return_value = False
m_rsc_auto_delete.return_value = True
test_volume = CINDER_VOLUME
test_volume['display_name'] = 'linstor_test'
test_volume['host'] = 'node_one'
test_volume['size'] = 1
self.assertRaises(cinder_exception.VolumeBackendAPIException,
self.driver.delete_volume, test_volume)
@mock.patch(DRIVER + 'LinstorBaseDriver._api_rsc_auto_delete')
@mock.patch(DRIVER + 'LinstorBaseDriver._get_diskless_nodes')
@mock.patch(DRIVER + 'LinstorBaseDriver._check_api_reply')
@mock.patch(DRIVER + 'LinstorBaseDriver._api_rsc_dfn_delete')
@mock.patch(DRIVER + 'LinstorBaseDriver._api_volume_dfn_delete')
@mock.patch(DRIVER + 'LinstorBaseDriver._api_rsc_delete')
@mock.patch(DRIVER + 'LinstorBaseDriver._get_api_resource_list')
def test_delete_volume_fail_diskless_remove(self,
m_rsc_list,
m_rsc_delete,
m_vol_dfn_delete,
m_rsc_dfn_delete,
m_api_reply,
m_diskless,
m_rsc_auto_delete):
m_rsc_list.return_value = self._fake_driver.fake_api_resource_list()
m_rsc_delete.return_value = False
m_vol_dfn_delete.return_value = True
m_rsc_dfn_delete.return_value = True
m_api_reply.return_value = False
m_diskless.return_value = ['foo']
m_rsc_auto_delete.return_value = True
test_volume = CINDER_VOLUME
test_volume['display_name'] = 'linstor_test'
test_volume['host'] = 'node_one'
test_volume['size'] = 1
# Raises exception for failing to delete a diskless resource
self.assertRaises(cinder_exception.VolumeBackendAPIException,
self.driver.delete_volume, test_volume)
@mock.patch(DRIVER + 'LinstorBaseDriver._api_rsc_auto_delete')
@mock.patch(DRIVER + 'LinstorBaseDriver._get_snapshot_nodes')
@mock.patch(DRIVER + 'LinstorBaseDriver._get_diskless_nodes')
@mock.patch(DRIVER + 'LinstorBaseDriver._check_api_reply')
@mock.patch(DRIVER + 'LinstorBaseDriver._api_rsc_dfn_delete')
@mock.patch(DRIVER + 'LinstorBaseDriver._api_volume_dfn_delete')
@mock.patch(DRIVER + 'LinstorBaseDriver._api_rsc_delete')
@mock.patch(DRIVER + 'LinstorBaseDriver._get_api_resource_list')
def test_delete_volume_fail_diskful_remove(self,
m_rsc_list,
m_rsc_delete,
m_vol_dfn_delete,
m_rsc_dfn_delete,
m_api_reply,
m_diskless,
m_snap_nodes,
m_rsc_auto_delete):
m_rsc_list.return_value = self._fake_driver.fake_api_resource_list()
m_rsc_delete.return_value = False
m_vol_dfn_delete.return_value = True
m_rsc_dfn_delete.return_value = True
m_api_reply.return_value = False
m_diskless.return_value = []
m_snap_nodes.return_value = ['foo']
m_rsc_auto_delete.return_value = True
test_volume = CINDER_VOLUME
test_volume['display_name'] = 'linstor_test'
test_volume['host'] = 'node_one'
test_volume['size'] = 1
# Raises exception for failing to delete a diskful resource
self.assertRaises(cinder_exception.VolumeBackendAPIException,
self.driver.delete_volume, test_volume)
@mock.patch(DRIVER + 'LinstorBaseDriver._api_rsc_auto_delete')
@mock.patch(DRIVER + 'LinstorBaseDriver._get_snapshot_nodes')
@mock.patch(DRIVER + 'LinstorBaseDriver._get_diskless_nodes')
@mock.patch(DRIVER + 'LinstorBaseDriver._check_api_reply')
@mock.patch(DRIVER + 'LinstorBaseDriver._api_rsc_dfn_delete')
@mock.patch(DRIVER + 'LinstorBaseDriver._api_volume_dfn_delete')
@mock.patch(DRIVER + 'LinstorBaseDriver._api_rsc_delete')
@mock.patch(DRIVER + 'LinstorBaseDriver._get_api_resource_list')
def test_delete_volume_fail_volume_definition(self,
m_rsc_list,
m_rsc_delete,
m_vol_dfn_delete,
m_rsc_dfn_delete,
m_api_reply,
m_diskless,
m_snap_nodes,
m_rsc_auto_delete):
m_rsc_list.return_value = self._fake_driver.fake_api_resource_list()
m_rsc_delete.return_value = True
m_vol_dfn_delete.return_value = False
m_rsc_dfn_delete.return_value = True
m_api_reply.return_value = False
m_diskless.return_value = []
m_snap_nodes.return_value = []
m_rsc_auto_delete.return_value = True
test_volume = CINDER_VOLUME
test_volume['display_name'] = 'linstor_test'
test_volume['host'] = 'node_one'
test_volume['size'] = 1
# Raises exception for failing to delete a volume definition
self.assertRaises(cinder_exception.VolumeBackendAPIException,
self.driver.delete_volume, test_volume)
@mock.patch(DRIVER + 'LinstorBaseDriver._api_rsc_auto_delete')
@mock.patch(DRIVER + 'LinstorBaseDriver._check_api_reply')
@mock.patch(DRIVER + 'LinstorBaseDriver._api_rsc_dfn_delete')
@mock.patch(DRIVER + 'LinstorBaseDriver._api_volume_dfn_delete')
@mock.patch(DRIVER + 'LinstorBaseDriver._api_rsc_delete')
@mock.patch(DRIVER + 'LinstorBaseDriver._get_api_resource_list')
def test_delete_volume(self,
m_rsc_list,
m_rsc_delete,
m_vol_dfn_delete,
m_rsc_dfn_delete,
m_api_reply,
m_rsc_auto_delete):
m_rsc_list.return_value = self._fake_driver.fake_api_resource_list()
m_rsc_delete.return_value = True
m_vol_dfn_delete.return_value = True
m_rsc_dfn_delete.return_value = True
m_api_reply.return_value = True
m_rsc_auto_delete.return_value = True
test_volume = CINDER_VOLUME
test_volume['display_name'] = 'linstor_test'
test_volume['host'] = 'node_one'
test_volume['size'] = 1
val = self.driver.delete_volume(test_volume)
expected = True
self.assertEqual(expected, val)
@mock.patch(DRIVER + 'LinstorBaseDriver._check_api_reply')
@mock.patch(DRIVER + 'LinstorBaseDriver._get_api_volume_extend')
def test_extend_volume_success(self, m_vol_extend, m_api_reply):
m_vol_extend.return_value = True
m_api_reply.return_value = True
# No exception should be raised
self.driver.extend_volume(CINDER_VOLUME, 2)
@mock.patch(DRIVER + 'LinstorBaseDriver._check_api_reply')
@mock.patch(DRIVER + 'LinstorBaseDriver._get_api_volume_extend')
def test_extend_volume_fail(self, m_vol_extend, m_api_reply):
m_vol_extend.return_value = False
m_api_reply.return_value = False
self.assertRaises(cinder_exception.VolumeBackendAPIException,
self.driver.extend_volume,
CINDER_VOLUME,
2)
def test_migrate_volume(self):
m_ctxt = {}
m_volume = {}
m_host = ''
val = self.driver.migrate_volume(m_ctxt, m_volume, m_host)
expected = (False, None)
self.assertEqual(expected, val)
class LinstorIscsiDriverTestCase(test.TestCase):
def __init__(self, *args, **kwargs):
super(LinstorIscsiDriverTestCase, self).__init__(*args, **kwargs)
def setUp(self):
super(LinstorIscsiDriverTestCase, self).setUp()
self._mock = mock.Mock()
self._fake_driver = LinstorAPIFakeDriver()
self.configuration = mock.Mock(conf.Configuration)
self.configuration.iscsi_helper = 'tgtadm'
self.driver = drv.LinstorIscsiDriver(
configuration=self.configuration, h_name='tgtadm')
self.driver.VERSION = '0.0.7'
self.driver.default_rsc_size = 1
self.driver.default_vg_name = 'vg-1'
self.driver.default_downsize_factor = int('4096')
self.driver.default_pool = STORAGE_POOL_DEF_RESP[0]
self.driver.host_name = 'node_one'
self.driver.diskless = True
self.driver.location_info = 'LinstorIscsi:linstor://localhost'
self.driver.default_backend_name = 'lin-test-driver'
self.driver.configuration.reserved_percentage = int('0')
self.driver.configuration.max_over_subscription_ratio = int('0')
@mock.patch(DRIVER + 'LinstorIscsiDriver._get_api_resource_list')
@mock.patch(DRIVER + 'LinstorIscsiDriver._get_volume_stats')
def test_iscsi_get_volume_stats(self, m_vol_stats, m_rsc_list):
m_vol_stats.return_value = VOLUME_STATS_RESP
m_rsc_list.return_value = RESOURCE_LIST
val = self.driver.get_volume_stats()
expected = VOLUME_STATS_RESP
expected["storage_protocol"] = 'iSCSI'
self.assertEqual(expected, val)
@mock.patch(DRIVER + 'linstor')
def test_iscsi_check_for_setup_error_pass(self, m_linstor):
m_linstor.return_value = True
# No exception should be raised
self.driver.check_for_setup_error()
class LinstorDrbdDriverTestCase(test.TestCase):
def __init__(self, *args, **kwargs):
super(LinstorDrbdDriverTestCase, self).__init__(*args, **kwargs)
def setUp(self):
super(LinstorDrbdDriverTestCase, self).setUp()
self._mock = mock.Mock()
self._fake_driver = LinstorAPIFakeDriver()
self.configuration = mock.Mock(conf.Configuration)
self.driver = drv.LinstorDrbdDriver(
configuration=self.configuration)
self.driver.VERSION = '0.0.7'
self.driver.default_rsc_size = 1
self.driver.default_vg_name = 'vg-1'
self.driver.default_downsize_factor = int('4096')
self.driver.default_pool = STORAGE_POOL_DEF_RESP[0]
self.driver.host_name = 'node_one'
self.driver.diskless = True
self.driver.location_info = 'LinstorDrbd:linstor://localhost'
self.driver.default_backend_name = 'lin-test-driver'
self.driver.configuration.reserved_percentage = int('0')
self.driver.configuration.max_over_subscription_ratio = int('0')
@mock.patch(DRIVER + 'LinstorDrbdDriver._get_rsc_path')
def test_drbd_return_drbd_config(self, m_rsc_path):
m_rsc_path.return_value = '/dev/drbd1005'
val = self.driver._return_drbd_config(CINDER_VOLUME)
expected = {
'driver_volume_type': 'local',
'data': {
"device_path": str(m_rsc_path.return_value)
}
}
self.assertEqual(expected, val)
@mock.patch(DRIVER + 'LinstorDrbdDriver._get_api_storage_pool_list')
def test_drbd_node_in_sp(self, m_sp_list):
m_sp_list.return_value = (
self._fake_driver.fake_api_storage_pool_list())
val = self.driver._node_in_sp('node-1')
self.assertTrue(val)
@mock.patch(DRIVER + 'LinstorDrbdDriver._get_volume_stats')
def test_drbd_get_volume_stats(self, m_vol_stats):
m_vol_stats.return_value = VOLUME_STATS_RESP
val = self.driver.get_volume_stats()
expected = VOLUME_STATS_RESP
expected["storage_protocol"] = 'DRBD'
self.assertEqual(expected, val)
@mock.patch(DRIVER + 'linstor')
def test_drbd_check_for_setup_error_pass(self, m_linstor):
m_linstor.return_value = True
# No exception should be raised
self.driver.check_for_setup_error()
@mock.patch(DRIVER + 'LinstorDrbdDriver._get_rsc_path')
@mock.patch(DRIVER + 'LinstorDrbdDriver._check_api_reply')
@mock.patch(DRIVER + 'LinstorDrbdDriver._api_rsc_create')
@mock.patch(DRIVER + 'LinstorDrbdDriver._node_in_sp')
def test_drbd_initialize_connection_pass(self,
m_node_sp,
m_rsc_create,
m_check,
m_rsc_path):
m_node_sp.return_value = True
m_rsc_create.return_value = True
m_check.return_value = True
m_rsc_path.return_value = '/dev/drbd1000'
connector = {}
connector["host"] = 'wp-u16-cinder-dev-lg'
val = self.driver.initialize_connection(CINDER_VOLUME, connector)
expected = {
'driver_volume_type': 'local',
'data': {
"device_path": str(m_rsc_path.return_value)
}
}
self.assertEqual(expected, val)
@mock.patch(DRIVER + 'LinstorDrbdDriver._check_api_reply')
@mock.patch(DRIVER + 'LinstorDrbdDriver._api_rsc_delete')
@mock.patch(DRIVER + 'LinstorDrbdDriver._node_in_sp')
def test_drbd_terminate_connection_pass(self,
m_node_sp,
m_rsc_create,
m_check):
m_node_sp.return_value = True
m_rsc_create.return_value = True
m_check.return_value = True
connector = {}
connector["host"] = 'wp-u16-cinder-dev-lg'
# No exception should be raised
self.driver.terminate_connection(CINDER_VOLUME, connector)
| 39.8104
| 78
| 0.59906
| 5,408
| 49,763
| 5.092271
| 0.078957
| 0.053524
| 0.070809
| 0.134791
| 0.810523
| 0.780638
| 0.745851
| 0.721704
| 0.690076
| 0.676677
| 0
| 0.036978
| 0.306031
| 49,763
| 1,249
| 79
| 39.842274
| 0.760468
| 0.025662
| 0
| 0.641148
| 0
| 0
| 0.21912
| 0.137329
| 0
| 0
| 0
| 0
| 0.054545
| 1
| 0.063158
| false
| 0.003828
| 0.006699
| 0.007656
| 0.085167
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
ab31b285dd268f8dae04c4823de7c75db6608116
| 143
|
py
|
Python
|
bin/cubes/soma-high-wall.py
|
tiwo/puzzler
|
7ad3d9a792f0635f7ec59ffa85fb46b54fd77a7e
|
[
"Intel"
] | null | null | null |
bin/cubes/soma-high-wall.py
|
tiwo/puzzler
|
7ad3d9a792f0635f7ec59ffa85fb46b54fd77a7e
|
[
"Intel"
] | null | null | null |
bin/cubes/soma-high-wall.py
|
tiwo/puzzler
|
7ad3d9a792f0635f7ec59ffa85fb46b54fd77a7e
|
[
"Intel"
] | 1
|
2022-01-02T16:54:14.000Z
|
2022-01-02T16:54:14.000Z
|
#!/usr/bin/env python
# $Id$
"""46 solutions"""
import puzzler
from puzzler.puzzles.somacubes import SomaHighWall
puzzler.run(SomaHighWall)
| 14.3
| 50
| 0.755245
| 18
| 143
| 6
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.015748
| 0.111888
| 143
| 9
| 51
| 15.888889
| 0.834646
| 0.265734
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
ab42a4f3f6bf70978acf87e914a3fe46dbe5863e
| 38
|
py
|
Python
|
tests/__init__.py
|
julienpaul/USER2ERDDAP
|
93dbb84fcdd6716a087c1652991c12510cbf63af
|
[
"MIT"
] | null | null | null |
tests/__init__.py
|
julienpaul/USER2ERDDAP
|
93dbb84fcdd6716a087c1652991c12510cbf63af
|
[
"MIT"
] | null | null | null |
tests/__init__.py
|
julienpaul/USER2ERDDAP
|
93dbb84fcdd6716a087c1652991c12510cbf63af
|
[
"MIT"
] | 1
|
2021-11-26T13:39:46.000Z
|
2021-11-26T13:39:46.000Z
|
"""Unit test package for user2edd."""
| 19
| 37
| 0.684211
| 5
| 38
| 5.2
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.030303
| 0.131579
| 38
| 1
| 38
| 38
| 0.757576
| 0.815789
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
ab6036adb04938362807d82c7c9357b93d3158f4
| 19,484
|
py
|
Python
|
tests/test_mints9_basisset.py
|
nuwandesilva/qcdb
|
b47fb2ed550fc4176198ddb1dbea3724d6704d23
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_mints9_basisset.py
|
nuwandesilva/qcdb
|
b47fb2ed550fc4176198ddb1dbea3724d6704d23
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_mints9_basisset.py
|
nuwandesilva/qcdb
|
b47fb2ed550fc4176198ddb1dbea3724d6704d23
|
[
"BSD-3-Clause"
] | null | null | null |
import pytest
from .utils import *
from .addons import *
import qcdb
#! A test of the basis specification. Various basis sets are specified outright and in blocks, both
#! orbital and auxiliary. Constructs libmints BasisSet objects through the constructor that calls
#! qcdb.BasisSet infrastructure. Checks that the resulting bases are of the right size and checks
#! that symmetry of the Molecule observes the basis assignment to atoms.
# cc-pvdz aug-cc-pvdz
# BASIS H 5/ 5 C 14/15 H +4/ 4 C +9/10
# RIFIT H 14/15 C 56/66 H +9/10 C +16/20
# JKFIT H 23/25 C 70/81 H +9/10 C +16/20
smol = """
C 0.0 0.0 0.0
O 1.4 0.0 0.0
H_r -0.5 -0.7 0.0
H_l -0.5 0.7 0.0
"""
BASIS = 'cc-pvdz'
verbose = 2
def test_1():
"""[1] <<< uniform cc-pVDZ >>>"""
qmol = qcdb.Molecule.from_string(smol)
wert, dwert = qcdb.BasisSet.pyconstruct(qmol, 'BASIS', BASIS, verbose=verbose, return_dict=True)
compare_strings('CC-PVDZ', BASIS, 'name')
compare_integers(38, wert.nbf(), 'nbf()')
compare_integers(40, wert.nao(), 'nao()')
compare_strings('c2v', wert.molecule.schoenflies_symbol(), 'symm')
compare_strings('CC-PVDZ', dwert['name'], 'callby')
compare_strings('CC-PVDZ', dwert['blend'], 'blend')
@using_psi4
def test_1b():
"""[1] <<< uniform cc-pVDZ >>>"""
import psi4
qmol = qcdb.Molecule.from_string(smol)
pmol = psi4.core.Molecule.from_string(smol)
wert, dwert = qcdb.BasisSet.pyconstruct(qmol, 'BASIS', BASIS, verbose=verbose, return_dict=True)
pwert = psi4.core.BasisSet.construct_from_pydict(pmol, dwert, -1)
compare_integers(38, pwert.nbf(), 'nbf()')
compare_integers(40, pwert.nao(), 'nao()')
compare_strings('c2v', pwert.molecule().schoenflies_symbol(), 'symm')
compare_strings('CC-PVDZ', pwert.name(), 'callby')
compare_strings('CC-PVDZ', pwert.blend(), 'blend')
def test_2():
"""[2] <<< RIFIT (default) >>>"""
qmol = qcdb.Molecule.from_string(smol)
wert, dwert = qcdb.BasisSet.pyconstruct(qmol, 'DF_BASIS_MP2', '', 'RIFIT', BASIS, verbose=verbose, return_dict=True)
compare_integers(140, wert.nbf(), 'nbf()')
compare_integers(162, wert.nao(), 'nao()')
compare_strings('c2v', wert.molecule.schoenflies_symbol(), 'symm')
compare_strings('(CC-PVDZ AUX)', dwert['name'], 'callby')
compare_strings('CC-PVDZ-RI', dwert['blend'], 'blend')
@using_psi4
def test_2b():
"""[2] <<< RIFIT (default) >>>"""
import psi4
qmol = qcdb.Molecule.from_string(smol)
pmol = psi4.core.Molecule.from_string(smol)
wert, dwert = qcdb.BasisSet.pyconstruct(qmol, 'DF_BASIS_MP2', '', 'RIFIT', BASIS, verbose=verbose, return_dict=True)
pwert = psi4.core.BasisSet.construct_from_pydict(pmol, dwert, -1)
compare_integers(140, pwert.nbf(), 'nbf()')
compare_integers(162, pwert.nao(), 'nao()')
compare_strings('c2v', pwert.molecule().schoenflies_symbol(), 'symm')
compare_strings('(CC-PVDZ AUX)', pwert.name(), 'callby')
compare_strings('CC-PVDZ-RI', pwert.blend(), 'blend')
def test_3():
"""[3] <<< cc-pVDZ w/ aug-cc-pVDZ on C >>>"""
def basisspec_psi4_yo__dz_plus(mol, role):
basstrings = {}
mol.set_basis_all_atoms("cc-pvdz", role=role)
mol.set_basis_by_symbol("c", "aug-cc-pvdz", role=role)
return basstrings
qcdb.libmintsbasisset.basishorde['DZ_PLUS'] = basisspec_psi4_yo__dz_plus
qmol = qcdb.Molecule.from_string(smol)
wert, dwert = qcdb.BasisSet.pyconstruct(qmol, 'BASIS', BASIS, verbose=verbose, return_dict=True)
compare_integers(47, wert.nbf(), 'nbf()')
compare_integers(50, wert.nao(), 'nao()')
compare_strings('c2v', wert.molecule.schoenflies_symbol(), 'symm')
compare_strings('DZ_PLUS', dwert['name'], 'callby')
compare_strings('AUG-CC-PVDZ + CC-PVDZ', dwert['blend'], 'blend')
@using_psi4
def test_3b():
"""[3] <<< cc-pVDZ w/ aug-cc-pVDZ on C >>>"""
import psi4
psi4.basis_helper("""
assign cc-pvdz
assign c aug-cc-pvdz
""", name='dz_PLUS')
qmol = qcdb.Molecule.from_string(smol)
pmol = psi4.core.Molecule.from_string(smol)
wert, dwert = qcdb.BasisSet.pyconstruct(qmol, 'BASIS', BASIS, verbose=verbose, return_dict=True)
pwert = psi4.core.BasisSet.construct_from_pydict(pmol, dwert, -1)
compare_integers(47, pwert.nbf(), 'nbf()')
compare_integers(50, pwert.nao(), 'nao()')
compare_strings('c2v', pwert.molecule().schoenflies_symbol(), 'symm')
compare_strings('DZ_PLUS', pwert.name(), 'callby')
compare_strings('AUG-CC-PVDZ + CC-PVDZ', pwert.blend(), 'blend')
#print('[4] <<< RIFIT (default) >>>')
#wert = psi4.core.BasisSet.build(mymol, 'DF_BASIS_MP2', '', 'RIFIT', psi4.core.get_global_option('BASIS'))
#mymol.print_out()
#wert.print_out()
#psi4.compare_integers(156, wert.nbf(), 'nbf()') #TEST
#psi4.compare_integers(182, wert.nao(), 'nao()') #TEST
#psi4.compare_strings('c2v', mymol.schoenflies_symbol(), 'symm') #TEST
#psi4.compare_strings('(DZ_PLUS AUX)', wert.name(), 'callby') #TEST
#psi4.compare_strings('AUG-CC-PVDZ-RI + CC-PVDZ-RI', wert.blend(), 'blend') #TEST
#mymol.print_out()
#
#
#print('[5] <<< cc-pVDZ w/ aug-cc-pVDZ on C, H_R >>>')
#psi4.basis_helper("""
# assign cc-pvdz
# assign c aug-cc-pvdz
# assign h_r aug-cc-pvdz
#""",
#name='dz_PLUSplus',
#key='BASis')
#wert = psi4.core.BasisSet.build(mymol, 'BASIS', psi4.core.get_global_option('BASIS'))
#psi4.compare_strings('DZ_PLUSPLUS', psi4.core.get_global_option('BASIS'), 'name') #TEST
#psi4.compare_integers(51, wert.nbf(), 'nbf()') #TEST
#psi4.compare_integers(54, wert.nao(), 'nao()') #TEST
#psi4.compare_strings('cs', mymol.schoenflies_symbol(), 'symm') #TEST
#psi4.compare_strings('DZ_PLUSPLUS', wert.name(), 'callby') #TEST
#psi4.compare_strings('AUG-CC-PVDZ + CC-PVDZ', wert.blend(), 'blend') #TEST
#mymol.print_out()
#
#
#print('[6] <<< RIFIT (custom: force cc-pVDZ on H, default on C, O) >>>')
#psi4.basis_helper("""
# assign h cc-pvdz-ri
#""",
#name='dz_PLUSplusRI',
#key='df_basis_mp2')
#wert = psi4.core.BasisSet.build(mymol, 'DF_BASIS_MP2', psi4.core.get_global_option('DF_BASIS_MP2'), 'RIFIT', psi4.core.get_global_option('BASIS'))
#mymol.print_out()
#psi4.compare_integers(156, wert.nbf(), 'nbf()') #TEST
#psi4.compare_integers(182, wert.nao(), 'nao()') #TEST
#psi4.compare_strings('cs', mymol.schoenflies_symbol(), 'symm') #TEST
#psi4.compare_strings('DZ_PLUSPLUSRI', wert.name(), 'callby') #TEST
#psi4.compare_strings('AUG-CC-PVDZ-RI + CC-PVDZ-RI', wert.blend(), 'blend') #TEST
#mymol.print_out()
#
#
#print('[7] <<< cc-pVDZ w/ aug-cc-pVDZ on C, H >>>')
#psi4.basis_helper("""
# assign cc-pvdz
# assign c aug-cc-pvdz
# assign h aug-cc-pvdz
#""",
#name = 'dz_PLUSplusplus')
#wert = psi4.core.BasisSet.build(mymol, 'BASIS', psi4.core.get_global_option('BASIS'))
#psi4.compare_integers(55, wert.nbf(), 'nbf()') #TEST
#psi4.compare_integers(58, wert.nao(), 'nao()') #TEST
#psi4.compare_strings('c2v', mymol.schoenflies_symbol(), 'symm') #TEST
#psi4.compare_strings('DZ_PLUSPLUSPLUS', wert.name(), 'callby') #TEST
#psi4.compare_strings('AUG-CC-PVDZ + CC-PVDZ', wert.blend(), 'blend') #TEST
#mymol.print_out()
#
#
#print('[8] <<< JKFIT (default) >>>')
#wert = psi4.core.BasisSet.build(mymol, 'DF_BASIS_SCF', '', 'JKFIT', psi4.core.get_global_option('BASIS'))
#psi4.compare_integers(220, wert.nbf(), 'nbf()') #TEST
#psi4.compare_integers(252, wert.nao(), 'nao()') #TEST
#psi4.compare_strings('c2v', mymol.schoenflies_symbol(), 'symm') #TEST
#psi4.compare_strings('(DZ_PLUSPLUSPLUS AUX)', wert.name(), 'callby') #TEST
#psi4.compare_strings('AUG-CC-PVDZ-JKFIT + CC-PVDZ-JKFIT', wert.blend(), 'blend') #TEST
#mymol.print_out()
#
#psi4.set_options({'basis': 'aug-cc-pvdz'})
#
#print('[9] <<< aug-cc-pVDZ >>>')
#wert = psi4.core.BasisSet.build(mymol, 'BASIS', psi4.core.get_global_option('BASIS'))
#psi4.compare_integers(64, wert.nbf(), 'nbf()') #TEST
#psi4.compare_integers(68, wert.nao(), 'nao()') #TEST
#psi4.compare_strings('c2v', mymol.schoenflies_symbol(), 'symm') #TEST
#psi4.compare_strings('AUG-CC-PVDZ', wert.name(), 'callby') #TEST
#psi4.compare_strings('AUG-CC-PVDZ', wert.blend(), 'blend') #TEST
#mymol.print_out()
#
#
#print('[10] <<< JKFIT (default) >>>')
#wert = psi4.core.BasisSet.build(mymol, 'DF_BASIS_SCF', '', 'JKFIT', psi4.core.get_global_option('BASIS'))
#psi4.compare_integers(236, wert.nbf(), 'nbf()') #TEST
#psi4.compare_integers(272, wert.nao(), 'nao()') #TEST
#psi4.compare_strings('c2v', mymol.schoenflies_symbol(), 'symm') #TEST
#psi4.compare_strings('(AUG-CC-PVDZ AUX)', wert.name(), 'callby') #TEST
#psi4.compare_strings('AUG-CC-PVDZ-JKFIT', wert.blend(), 'blend') #TEST
#mymol.print_out()
#
#
#mymol2 = psi4.geometry("""
#0 2
#C 0.0 0.0 0.0
#O 1.4 0.0 0.0
#H_r -0.5 -0.6 0.3
#H_l -0.5 0.6 0.3
#H_c -0.5 0.0 0.7
#""")
#
#psi4.set_options({'basis': 'dz_plusplusplus'})
#
#print('[11] <<< cc-pVDZ w/ aug-cc-pVDZ on C, H >>>')
#wert = psi4.core.BasisSet.build(mymol2, 'BASIS', psi4.core.get_global_option('BASIS'))
#psi4.compare_integers(64, wert.nbf(), 'nbf()') #TEST
#psi4.compare_integers(67, wert.nao(), 'nao()') #TEST
#psi4.compare_strings('cs', mymol2.schoenflies_symbol(), 'symm') #TEST
#psi4.compare_strings('DZ_PLUSPLUSPLUS', wert.name(), 'callby') #TEST
#psi4.compare_strings('AUG-CC-PVDZ + CC-PVDZ', wert.blend(), 'blend') #TEST
#mymol2.print_out()
#
#hene = psi4.geometry("""
#He
#Ne 1 2.0
#""")
#
#psi4.basis_helper("""
# assign cc-pv5z
#""", name='disguised5z')
#
#psi4.core.set_global_option('DF_BASIS_MP2', '') # clear df_basis_mp2 {...} to get autoaux below
#
#print('[12] <<< cc-pV5Z on HeNe >>>')
#wert = psi4.core.BasisSet.build(hene, 'BASIS', psi4.core.get_global_option('BASIS'))
#hene.print_out()
#psi4.compare_integers(146, wert.nbf(), 'nbf()') #TEST
#psi4.compare_integers(196, wert.nao(), 'nao()') #TEST
#psi4.compare_strings('DISGUISED5Z', wert.name(), 'callby') #TEST
#psi4.compare_strings('CC-PV5Z', wert.blend(), 'blend') #TEST
#
#print('[13] <<< RI for cc-pV5Z on HeNe >>>')
#wert = psi4.core.BasisSet.build(hene, 'DF_BASIS_MP2', '', 'RIFIT', psi4.core.get_global_option('BASIS'))
#hene.print_out()
#psi4.compare_integers(284, wert.nbf(), 'nbf()') #TEST
#psi4.compare_integers(413, wert.nao(), 'nao()') #TEST
#psi4.compare_strings('(DISGUISED5Z AUX)', wert.name(), 'callby') #TEST
#psi4.compare_strings('CC-PV5Z-RI', wert.blend(), 'blend') #TEST
#
#print('[14] <<< impossible JK for cc-pV5Z on HeNe >>>')
#error_tripped = 0
#try:
# wert = psi4.core.BasisSet.build(hene, 'DF_BASIS_SCF', '', 'JKFIT', psi4.core.get_global_option('BASIS'))
#except qcdb.BasisSetNotFound:
# error_tripped = 1
#psi4.compare_integers(1, error_tripped, 'squashed 4z aux for 5z orb') #TEST
#
#psi4.basis_helper(key='df_basis_scf', name='uggh', block="""
# assign he DEF2-QZVPP-JKFIT
#""")
#hene.print_out()
#
#print('[15] <<< forced JK for cc-pV5Z on HeNe >>>')
#wert = psi4.core.BasisSet.build(hene, 'DF_BASIS_SCF', '', 'JKFIT', psi4.core.get_global_option('BASIS'))
#psi4.compare_integers(169, wert.nbf(), 'nbf()') #TEST
#psi4.compare_integers(241, wert.nao(), 'nao()') #TEST
#psi4.compare_strings('UGGH', wert.name(), 'callby') #TEST
#psi4.compare_strings('CC-PV5Z-JKFIT + DEF2-QZVPP-JKFIT', wert.blend(), 'blend') #TEST
#
# print('[4] <<< RIFIT (default) >>>')
# wert = psi4.core.BasisSet.build(mymol, 'DF_BASIS_MP2', '', 'RIFIT', get_global_option('BASIS'))
# compare_integers(156, wert.nbf(), 'nbf()')
# compare_integers(182, wert.nao(), 'nao()')
# compare_strings('c2v', mymol.schoenflies_symbol(), 'symm')
# compare_strings('(DZ_PLUS AUX)', wert.name(), 'callby')
# compare_strings('AUG-CC-PVDZ-RI + CC-PVDZ-RI', wert.blend(), 'blend')
# mymol.print_out()
# print('[5] <<< cc-pVDZ w/ aug-cc-pVDZ on C, H_R >>>')
# def basisspec_psi4_yo__dz_plusplus(mol, role):
# basstrings = {}
# mol.set_basis_all_atoms("cc-pvdz", role=role)
# mol.set_basis_by_symbol("c", "aug-cc-pvdz", role=role)
# mol.set_basis_by_label("h_r", "aug-cc-pvdz", role=role)
# return basstrings
# qcdb.libmintsbasisset.basishorde['DZ_PLUSPLUS'] = basisspec_psi4_yo__dz_plusplus
# core.set_global_option("BASIS", "dz_PLUSplus")
# wert = psi4.core.BasisSet.build(mymol, 'BASIS', get_global_option('BASIS'))
# compare_strings('DZ_PLUSPLUS', get_global_option('BASIS'), 'name')
# compare_integers(51, wert.nbf(), 'nbf()')
# compare_integers(54, wert.nao(), 'nao()')
# compare_strings('cs', mymol.schoenflies_symbol(), 'symm')
# compare_strings('DZ_PLUSPLUS', wert.name(), 'callby')
# compare_strings('AUG-CC-PVDZ + CC-PVDZ', wert.blend(), 'blend')
# mymol.print_out()
# print('[6] <<< RIFIT (custom: force cc-pVDZ on H, default on C, O) >>>')
# def basisspec_psi4_yo__dz_plusplusri(mol, role):
# basstrings = {}
# mol.set_basis_by_symbol("h", "cc-pvdz-ri", role=role)
# return basstrings
# qcdb.libmintsbasisset.basishorde['DZ_PLUSPLUSRI'] = basisspec_psi4_yo__dz_plusplusri
# core.set_global_option("DF_BASIS_MP2", "dz_PLUSplusRI")
# wert = psi4.core.BasisSet.build(mymol, 'DF_BASIS_MP2', get_global_option('DF_BASIS_MP2'), 'RIFIT', get_global_option('BASIS'))
# compare_integers(156, wert.nbf(), 'nbf()')
# compare_integers(182, wert.nao(), 'nao()')
# compare_strings('cs', mymol.schoenflies_symbol(), 'symm')
# compare_strings('DZ_PLUSPLUSRI', wert.name(), 'callby')
# compare_strings('AUG-CC-PVDZ-RI + CC-PVDZ-RI', wert.blend(), 'blend')
# mymol.print_out()
# print('[7] <<< cc-pVDZ w/ aug-cc-pVDZ on C, H >>>')
# def basisspec_psi4_yo__dz_plusplusplus(mol, role):
# basstrings = {}
# mol.set_basis_all_atoms("cc-pvdz", role=role)
# mol.set_basis_by_symbol("c", "aug-cc-pvdz", role=role)
# mol.set_basis_by_symbol("h", "aug-cc-pvdz", role=role)
# return basstrings
# qcdb.libmintsbasisset.basishorde['DZ_PLUSPLUSPLUS'] = basisspec_psi4_yo__dz_plusplusplus
# core.set_global_option("BASIS", "dz_PLUSplusplus")
# wert = psi4.core.BasisSet.build(mymol, 'BASIS', get_global_option('BASIS'))
# compare_integers(55, wert.nbf(), 'nbf()')
# compare_integers(58, wert.nao(), 'nao()')
# compare_strings('c2v', mymol.schoenflies_symbol(), 'symm')
# compare_strings('DZ_PLUSPLUSPLUS', wert.name(), 'callby')
# compare_strings('AUG-CC-PVDZ + CC-PVDZ', wert.blend(), 'blend')
# mymol.print_out()
# print('[8] <<< JKFIT (default) >>>')
# wert = psi4.core.BasisSet.build(mymol, 'DF_BASIS_SCF', '', 'JKFIT', get_global_option('BASIS'))
# compare_integers(220, wert.nbf(), 'nbf()')
# compare_integers(252, wert.nao(), 'nao()')
# compare_strings('c2v', mymol.schoenflies_symbol(), 'symm')
# compare_strings('(DZ_PLUSPLUSPLUS AUX)', wert.name(), 'callby')
# compare_strings('AUG-CC-PVDZ-JKFIT + CC-PVDZ-JKFIT', wert.blend(), 'blend')
# mymol.print_out()
# core.set_global_option("BASIS", "aug-cc-pvdz")
# print('[9] <<< aug-cc-pVDZ >>>')
# wert = psi4.core.BasisSet.build(mymol, 'BASIS', get_global_option('BASIS'))
# compare_integers(64, wert.nbf(), 'nbf()')
# compare_integers(68, wert.nao(), 'nao()')
# compare_strings('c2v', mymol.schoenflies_symbol(), 'symm')
# compare_strings('AUG-CC-PVDZ', wert.name(), 'callby')
# compare_strings('AUG-CC-PVDZ', wert.blend(), 'blend')
# mymol.print_out()
# print('[10] <<< JKFIT (default) >>>')
# wert = psi4.core.BasisSet.build(mymol, 'DF_BASIS_SCF', '', 'JKFIT', get_global_option('BASIS'))
# compare_integers(236, wert.nbf(), 'nbf()')
# compare_integers(272, wert.nao(), 'nao()')
# compare_strings('c2v', mymol.schoenflies_symbol(), 'symm')
# compare_strings('(AUG-CC-PVDZ AUX)', wert.name(), 'callby')
# compare_strings('AUG-CC-PVDZ-JKFIT', wert.blend(), 'blend')
# mymol.print_out()
# mymol2 = geometry("""
# C 0.0 0.0 0.0
# O 1.4 0.0 0.0
# H_r -0.5 -0.6 0.3
# H_l -0.5 0.6 0.3
# H_c -0.5 0.0 0.7
# ""","mymol2")
# core.IO.set_default_namespace("mymol2")
# core.set_global_option("BASIS", "dz_plusplusplus")
# print('[11] <<< cc-pVDZ w/ aug-cc-pVDZ on C, H >>>')
# wert = psi4.core.BasisSet.build(mymol2, 'BASIS', get_global_option('BASIS'))
# compare_integers(64, wert.nbf(), 'nbf()')
# compare_integers(67, wert.nao(), 'nao()')
# compare_strings('cs', mymol2.schoenflies_symbol(), 'symm')
# compare_strings('DZ_PLUSPLUSPLUS', wert.name(), 'callby')
# compare_strings('AUG-CC-PVDZ + CC-PVDZ', wert.blend(), 'blend')
# mymol2.print_out()
# hene = geometry("""
# He
# Ne 1 2.0
# ""","hene")
# core.IO.set_default_namespace("hene")
# def basisspec_psi4_yo__disguised5z(mol, role):
# basstrings = {}
# mol.set_basis_all_atoms("cc-pv5z", role=role)
# return basstrings
# qcdb.libmintsbasisset.basishorde['DISGUISED5Z'] = basisspec_psi4_yo__disguised5z
# core.set_global_option("BASIS", "disguised5z")
# set_global_option('DF_BASIS_MP2', '')
# print('[12] <<< cc-pV5Z on HeNe >>>')
# wert = psi4.core.BasisSet.build(hene, 'BASIS', get_global_option('BASIS'))
# compare_integers(146, wert.nbf(), 'nbf()')
# compare_integers(196, wert.nao(), 'nao()')
# compare_strings('DISGUISED5Z', wert.name(), 'callby')
# compare_strings('CC-PV5Z', wert.blend(), 'blend')
# hene.print_out()
# print('[13] <<< RI for cc-pV5Z on HeNe >>>')
# wert = psi4.core.BasisSet.build(hene, 'DF_BASIS_MP2', '', 'RIFIT', get_global_option('BASIS'))
# compare_integers(284, wert.nbf(), 'nbf()')
# compare_integers(413, wert.nao(), 'nao()')
# compare_strings('(DISGUISED5Z AUX)', wert.name(), 'callby')
# compare_strings('CC-PV5Z-RI', wert.blend(), 'blend')
# hene.print_out()
# print('[14] <<< impossible JK for cc-pV5Z on HeNe >>>')
# error_tripped = 0
# try:
# wert = psi4.core.BasisSet.build(hene, 'DF_BASIS_SCF', '', 'JKFIT', get_global_option('BASIS'))
# except qcdb.BasisSetNotFound:
# error_tripped = 1
# compare_integers(1, error_tripped, 'squashed 4z aux for 5z orb')
# def basisspec_psi4_yo__uggh(mol, role):
# basstrings = {}
# mol.set_basis_by_symbol("he", "DEF2-QZVPP-JKFIT", role=role)
# return basstrings
# qcdb.libmintsbasisset.basishorde['UGGH'] = basisspec_psi4_yo__uggh
# core.set_global_option("DF_BASIS_SCF", "uggh")
# print('[15] <<< forced JK for cc-pV5Z on HeNe >>>')
# wert = psi4.core.BasisSet.build(hene, 'DF_BASIS_SCF', '', 'JKFIT', get_global_option('BASIS'))
# compare_integers(169, wert.nbf(), 'nbf()')
# compare_integers(241, wert.nao(), 'nao()')
# compare_strings('UGGH', wert.name(), 'callby')
# compare_strings('CC-PV5Z-JKFIT + DEF2-QZVPP-JKFIT', wert.blend(), 'blend')
# hene.print_out()
| 44.894009
| 147
| 0.63257
| 2,658
| 19,484
| 4.453348
| 0.076373
| 0.04562
| 0.034215
| 0.055757
| 0.902847
| 0.846752
| 0.800118
| 0.723072
| 0.690209
| 0.64687
| 0
| 0.033162
| 0.170448
| 19,484
| 433
| 148
| 44.997691
| 0.69919
| 0.765449
| 0
| 0.361446
| 0
| 0
| 0.13132
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.084337
| false
| 0
| 0.084337
| 0
| 0.180723
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
db4323edc2440265a240232682fbdb294fce850b
| 40
|
py
|
Python
|
src/molecule_containers/test/__init__.py
|
ansible-community/molecule-containers
|
30e6750d053f32d2a3738bb4286e2fba51b29862
|
[
"MIT"
] | 8
|
2020-02-01T17:35:00.000Z
|
2022-01-30T11:12:02.000Z
|
src/molecule_containers/test/__init__.py
|
ansible-community/molecule-containers
|
30e6750d053f32d2a3738bb4286e2fba51b29862
|
[
"MIT"
] | 8
|
2020-04-28T13:03:27.000Z
|
2021-12-21T23:06:14.000Z
|
src/molecule_containers/test/__init__.py
|
ansible-community/molecule-containers
|
30e6750d053f32d2a3738bb4286e2fba51b29862
|
[
"MIT"
] | 1
|
2020-02-01T14:06:52.000Z
|
2020-02-01T14:06:52.000Z
|
"""Molecule Containers Driver Tests."""
| 20
| 39
| 0.725
| 4
| 40
| 7.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1
| 40
| 1
| 40
| 40
| 0.805556
| 0.825
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
db5244a9f3507ac3dacacc3c7ae13efbffaaf077
| 190
|
py
|
Python
|
app/api/types/user_following.py
|
P4R/django-graphql-api-z1
|
5b469384631e8e916567865b659641ac5710dfb3
|
[
"MIT"
] | null | null | null |
app/api/types/user_following.py
|
P4R/django-graphql-api-z1
|
5b469384631e8e916567865b659641ac5710dfb3
|
[
"MIT"
] | null | null | null |
app/api/types/user_following.py
|
P4R/django-graphql-api-z1
|
5b469384631e8e916567865b659641ac5710dfb3
|
[
"MIT"
] | null | null | null |
from core.models.user_following import UserFollowing
from graphene_django import DjangoObjectType
class UserFollowingType(DjangoObjectType):
class Meta:
model = UserFollowing
| 21.111111
| 52
| 0.805263
| 19
| 190
| 7.947368
| 0.736842
| 0.278146
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.157895
| 190
| 8
| 53
| 23.75
| 0.94375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.4
| 0
| 0.8
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
db5811e66e52b3b31a0e8520aad8b189faf82c4a
| 181
|
py
|
Python
|
lib/junno/j_utils/ipython/__init__.py
|
LIV4D/JuNNo
|
7358f8344a7c125088e53aa1de0072c4699a9f07
|
[
"BSD-3-Clause"
] | null | null | null |
lib/junno/j_utils/ipython/__init__.py
|
LIV4D/JuNNo
|
7358f8344a7c125088e53aa1de0072c4699a9f07
|
[
"BSD-3-Clause"
] | 1
|
2019-03-04T09:18:54.000Z
|
2019-03-05T06:15:06.000Z
|
lib/junno/j_utils/ipython/__init__.py
|
LIV4D/JuNNo
|
7358f8344a7c125088e53aa1de0072c4699a9f07
|
[
"BSD-3-Clause"
] | null | null | null |
from .customwidgets import TinyLoading, RichLabel, TimerLabel, LogView, LogToolBar, HTMLButton, ToolButton, VSpace, HSpace
from .import_js import AutoImportDOMWidget, import_display
| 90.5
| 122
| 0.850829
| 19
| 181
| 8
| 0.789474
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.088398
| 181
| 2
| 123
| 90.5
| 0.921212
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
db617640f01973ba5f9d633da669dd1484dc635d
| 22,502
|
py
|
Python
|
weights_conversion.py
|
hiroyasuakada/Stylegan2-by-PyTorch
|
1866dd043a01b706f3af702f42cf0f0a6abbe30c
|
[
"MIT"
] | null | null | null |
weights_conversion.py
|
hiroyasuakada/Stylegan2-by-PyTorch
|
1866dd043a01b706f3af702f42cf0f0a6abbe30c
|
[
"MIT"
] | null | null | null |
weights_conversion.py
|
hiroyasuakada/Stylegan2-by-PyTorch
|
1866dd043a01b706f3af702f42cf0f0a6abbe30c
|
[
"MIT"
] | null | null | null |
import numpy as np
import cv2
import torch
import torch.nn as nn
import torch.nn.functional as F
class WeightsConverter():
def __init__(self):
# 'style_mixing_rate' : ['uns', 'lod' ],
self.name_trans_dict = {
'synthesis_network.init_block.const_input' : ['any', 'G_synthesis/4x4/Const/const' ],
'mapping_network.blocks.1.fc.weight' : ['fc_', 'G_mapping/Dense0/weight' ],
'mapping_network.blocks.1.bias.bias' : ['any', 'G_mapping/Dense0/bias' ],
'mapping_network.blocks.2.fc.weight' : ['fc_', 'G_mapping/Dense1/weight' ],
'mapping_network.blocks.2.bias.bias' : ['any', 'G_mapping/Dense1/bias' ],
'mapping_network.blocks.3.fc.weight' : ['fc_', 'G_mapping/Dense2/weight' ],
'mapping_network.blocks.3.bias.bias' : ['any', 'G_mapping/Dense2/bias' ],
'mapping_network.blocks.4.fc.weight' : ['fc_', 'G_mapping/Dense3/weight' ],
'mapping_network.blocks.4.bias.bias' : ['any', 'G_mapping/Dense3/bias' ],
'mapping_network.blocks.5.fc.weight' : ['fc_', 'G_mapping/Dense4/weight' ],
'mapping_network.blocks.5.bias.bias' : ['any', 'G_mapping/Dense4/bias' ],
'mapping_network.blocks.6.fc.weight' : ['fc_', 'G_mapping/Dense5/weight' ],
'mapping_network.blocks.6.bias.bias' : ['any', 'G_mapping/Dense5/bias' ],
'mapping_network.blocks.7.fc.weight' : ['fc_', 'G_mapping/Dense6/weight' ],
'mapping_network.blocks.7.bias.bias' : ['any', 'G_mapping/Dense6/bias' ],
'mapping_network.blocks.8.fc.weight' : ['fc_', 'G_mapping/Dense7/weight' ],
'mapping_network.blocks.8.bias.bias' : ['any', 'G_mapping/Dense7/bias' ],
'mapping_network.blocks.9.avg_style' : ['any', 'dlatent_avg' ],
'synthesis_network.init_block.conv.conv.weight' : ['con', 'G_synthesis/4x4/Conv/weight' ],
'synthesis_network.init_block.conv.conv.fc.weight' : ['fc_', 'G_synthesis/4x4/Conv/mod_weight' ],
'synthesis_network.init_block.conv.conv.bias.bias' : ['any', 'G_synthesis/4x4/Conv/mod_bias' ],
'synthesis_network.init_block.conv.noise.noise_scaler' : ['uns', 'G_synthesis/4x4/Conv/noise_strength' ],
'synthesis_network.init_block.conv.noise.const_noise' : ['any', 'G_synthesis/noise0' ],
'synthesis_network.init_block.conv.bias.bias' : ['any', 'G_synthesis/4x4/Conv/bias' ],
'synthesis_network.blocks.0.conv0_up.conv.weight' : ['mTc', 'G_synthesis/8x8/Conv0_up/weight' ],
'synthesis_network.blocks.0.conv0_up.conv.fc.weight' : ['fc_', 'G_synthesis/8x8/Conv0_up/mod_weight' ],
'synthesis_network.blocks.0.conv0_up.conv.bias.bias' : ['any', 'G_synthesis/8x8/Conv0_up/mod_bias' ],
'synthesis_network.blocks.0.conv0_up.noise.noise_scaler' : ['uns', 'G_synthesis/8x8/Conv0_up/noise_strength' ],
'synthesis_network.blocks.0.conv0_up.noise.const_noise' : ['any', 'G_synthesis/noise1' ],
'synthesis_network.blocks.0.conv0_up.bias.bias' : ['any', 'G_synthesis/8x8/Conv0_up/bias' ],
'synthesis_network.blocks.0.conv1.conv.weight' : ['con', 'G_synthesis/8x8/Conv1/weight' ],
'synthesis_network.blocks.0.conv1.conv.fc.weight' : ['fc_', 'G_synthesis/8x8/Conv1/mod_weight' ],
'synthesis_network.blocks.0.conv1.conv.bias.bias' : ['any', 'G_synthesis/8x8/Conv1/mod_bias' ],
'synthesis_network.blocks.0.conv1.noise.noise_scaler' : ['uns', 'G_synthesis/8x8/Conv1/noise_strength' ],
'synthesis_network.blocks.0.conv1.noise.const_noise' : ['any', 'G_synthesis/noise2' ],
'synthesis_network.blocks.0.conv1.bias.bias' : ['any', 'G_synthesis/8x8/Conv1/bias' ],
'synthesis_network.blocks.1.conv0_up.conv.weight' : ['mTc', 'G_synthesis/16x16/Conv0_up/weight' ],
'synthesis_network.blocks.1.conv0_up.conv.fc.weight' : ['fc_', 'G_synthesis/16x16/Conv0_up/mod_weight' ],
'synthesis_network.blocks.1.conv0_up.conv.bias.bias' : ['any', 'G_synthesis/16x16/Conv0_up/mod_bias' ],
'synthesis_network.blocks.1.conv0_up.noise.noise_scaler' : ['uns', 'G_synthesis/16x16/Conv0_up/noise_strength' ],
'synthesis_network.blocks.1.conv0_up.noise.const_noise' : ['any', 'G_synthesis/noise3' ],
'synthesis_network.blocks.1.conv0_up.bias.bias' : ['any', 'G_synthesis/16x16/Conv0_up/bias' ],
'synthesis_network.blocks.1.conv1.conv.weight' : ['con', 'G_synthesis/16x16/Conv1/weight' ],
'synthesis_network.blocks.1.conv1.conv.fc.weight' : ['fc_', 'G_synthesis/16x16/Conv1/mod_weight' ],
'synthesis_network.blocks.1.conv1.conv.bias.bias' : ['any', 'G_synthesis/16x16/Conv1/mod_bias' ],
'synthesis_network.blocks.1.conv1.noise.noise_scaler' : ['uns', 'G_synthesis/16x16/Conv1/noise_strength' ],
'synthesis_network.blocks.1.conv1.noise.const_noise' : ['any', 'G_synthesis/noise4' ],
'synthesis_network.blocks.1.conv1.bias.bias' : ['any', 'G_synthesis/16x16/Conv1/bias' ],
'synthesis_network.blocks.2.conv0_up.conv.weight' : ['mTc', 'G_synthesis/32x32/Conv0_up/weight' ],
'synthesis_network.blocks.2.conv0_up.conv.fc.weight' : ['fc_', 'G_synthesis/32x32/Conv0_up/mod_weight' ],
'synthesis_network.blocks.2.conv0_up.conv.bias.bias' : ['any', 'G_synthesis/32x32/Conv0_up/mod_bias' ],
'synthesis_network.blocks.2.conv0_up.noise.noise_scaler' : ['uns', 'G_synthesis/32x32/Conv0_up/noise_strength' ],
'synthesis_network.blocks.2.conv0_up.noise.const_noise' : ['any', 'G_synthesis/noise5' ],
'synthesis_network.blocks.2.conv0_up.bias.bias' : ['any', 'G_synthesis/32x32/Conv0_up/bias' ],
'synthesis_network.blocks.2.conv1.conv.weight' : ['con', 'G_synthesis/32x32/Conv1/weight' ],
'synthesis_network.blocks.2.conv1.conv.fc.weight' : ['fc_', 'G_synthesis/32x32/Conv1/mod_weight' ],
'synthesis_network.blocks.2.conv1.conv.bias.bias' : ['any', 'G_synthesis/32x32/Conv1/mod_bias' ],
'synthesis_network.blocks.2.conv1.noise.noise_scaler' : ['uns', 'G_synthesis/32x32/Conv1/noise_strength' ],
'synthesis_network.blocks.2.conv1.noise.const_noise' : ['any', 'G_synthesis/noise6' ],
'synthesis_network.blocks.2.conv1.bias.bias' : ['any', 'G_synthesis/32x32/Conv1/bias' ],
'synthesis_network.blocks.3.conv0_up.conv.weight' : ['mTc', 'G_synthesis/64x64/Conv0_up/weight' ],
'synthesis_network.blocks.3.conv0_up.conv.fc.weight' : ['fc_', 'G_synthesis/64x64/Conv0_up/mod_weight' ],
'synthesis_network.blocks.3.conv0_up.conv.bias.bias' : ['any', 'G_synthesis/64x64/Conv0_up/mod_bias' ],
'synthesis_network.blocks.3.conv0_up.noise.noise_scaler' : ['uns', 'G_synthesis/64x64/Conv0_up/noise_strength' ],
'synthesis_network.blocks.3.conv0_up.noise.const_noise' : ['any', 'G_synthesis/noise7' ],
'synthesis_network.blocks.3.conv0_up.bias.bias' : ['any', 'G_synthesis/64x64/Conv0_up/bias' ],
'synthesis_network.blocks.3.conv1.conv.weight' : ['con', 'G_synthesis/64x64/Conv1/weight' ],
'synthesis_network.blocks.3.conv1.conv.fc.weight' : ['fc_', 'G_synthesis/64x64/Conv1/mod_weight' ],
'synthesis_network.blocks.3.conv1.conv.bias.bias' : ['any', 'G_synthesis/64x64/Conv1/mod_bias' ],
'synthesis_network.blocks.3.conv1.noise.noise_scaler' : ['uns', 'G_synthesis/64x64/Conv1/noise_strength' ],
'synthesis_network.blocks.3.conv1.noise.const_noise' : ['any', 'G_synthesis/noise8' ],
'synthesis_network.blocks.3.conv1.bias.bias' : ['any', 'G_synthesis/64x64/Conv1/bias' ],
'synthesis_network.blocks.4.conv0_up.conv.weight' : ['mTc', 'G_synthesis/128x128/Conv0_up/weight' ],
'synthesis_network.blocks.4.conv0_up.conv.fc.weight' : ['fc_', 'G_synthesis/128x128/Conv0_up/mod_weight' ],
'synthesis_network.blocks.4.conv0_up.conv.bias.bias' : ['any', 'G_synthesis/128x128/Conv0_up/mod_bias' ],
'synthesis_network.blocks.4.conv0_up.noise.noise_scaler' : ['uns', 'G_synthesis/128x128/Conv0_up/noise_strength' ],
'synthesis_network.blocks.4.conv0_up.noise.const_noise' : ['any', 'G_synthesis/noise9' ],
'synthesis_network.blocks.4.conv0_up.bias.bias' : ['any', 'G_synthesis/128x128/Conv0_up/bias' ],
'synthesis_network.blocks.4.conv1.conv.weight' : ['con', 'G_synthesis/128x128/Conv1/weight' ],
'synthesis_network.blocks.4.conv1.conv.fc.weight' : ['fc_', 'G_synthesis/128x128/Conv1/mod_weight' ],
'synthesis_network.blocks.4.conv1.conv.bias.bias' : ['any', 'G_synthesis/128x128/Conv1/mod_bias' ],
'synthesis_network.blocks.4.conv1.noise.noise_scaler' : ['uns', 'G_synthesis/128x128/Conv1/noise_strength' ],
'synthesis_network.blocks.4.conv1.noise.const_noise' : ['any', 'G_synthesis/noise10' ],
'synthesis_network.blocks.4.conv1.bias.bias' : ['any', 'G_synthesis/128x128/Conv1/bias' ],
'synthesis_network.blocks.5.conv0_up.conv.weight' : ['mTc', 'G_synthesis/256x256/Conv0_up/weight' ],
'synthesis_network.blocks.5.conv0_up.conv.fc.weight' : ['fc_', 'G_synthesis/256x256/Conv0_up/mod_weight' ],
'synthesis_network.blocks.5.conv0_up.conv.bias.bias' : ['any', 'G_synthesis/256x256/Conv0_up/mod_bias' ],
'synthesis_network.blocks.5.conv0_up.noise.noise_scaler' : ['uns', 'G_synthesis/256x256/Conv0_up/noise_strength' ],
'synthesis_network.blocks.5.conv0_up.noise.const_noise' : ['any', 'G_synthesis/noise11' ],
'synthesis_network.blocks.5.conv0_up.bias.bias' : ['any', 'G_synthesis/256x256/Conv0_up/bias' ],
'synthesis_network.blocks.5.conv1.conv.weight' : ['con', 'G_synthesis/256x256/Conv1/weight' ],
'synthesis_network.blocks.5.conv1.conv.fc.weight' : ['fc_', 'G_synthesis/256x256/Conv1/mod_weight' ],
'synthesis_network.blocks.5.conv1.conv.bias.bias' : ['any', 'G_synthesis/256x256/Conv1/mod_bias' ],
'synthesis_network.blocks.5.conv1.noise.noise_scaler' : ['uns', 'G_synthesis/256x256/Conv1/noise_strength' ],
'synthesis_network.blocks.5.conv1.noise.const_noise' : ['any', 'G_synthesis/noise12' ],
'synthesis_network.blocks.5.conv1.bias.bias' : ['any', 'G_synthesis/256x256/Conv1/bias' ],
'synthesis_network.blocks.6.conv0_up.conv.weight' : ['mTc', 'G_synthesis/512x512/Conv0_up/weight' ],
'synthesis_network.blocks.6.conv0_up.conv.fc.weight' : ['fc_', 'G_synthesis/512x512/Conv0_up/mod_weight' ],
'synthesis_network.blocks.6.conv0_up.conv.bias.bias' : ['any', 'G_synthesis/512x512/Conv0_up/mod_bias' ],
'synthesis_network.blocks.6.conv0_up.noise.noise_scaler' : ['uns', 'G_synthesis/512x512/Conv0_up/noise_strength' ],
'synthesis_network.blocks.6.conv0_up.noise.const_noise' : ['any', 'G_synthesis/noise13' ],
'synthesis_network.blocks.6.conv0_up.bias.bias' : ['any', 'G_synthesis/512x512/Conv0_up/bias' ],
'synthesis_network.blocks.6.conv1.conv.weight' : ['con', 'G_synthesis/512x512/Conv1/weight' ],
'synthesis_network.blocks.6.conv1.conv.fc.weight' : ['fc_', 'G_synthesis/512x512/Conv1/mod_weight' ],
'synthesis_network.blocks.6.conv1.conv.bias.bias' : ['any', 'G_synthesis/512x512/Conv1/mod_bias' ],
'synthesis_network.blocks.6.conv1.noise.noise_scaler' : ['uns', 'G_synthesis/512x512/Conv1/noise_strength' ],
'synthesis_network.blocks.6.conv1.noise.const_noise' : ['any', 'G_synthesis/noise14' ],
'synthesis_network.blocks.6.conv1.bias.bias' : ['any', 'G_synthesis/512x512/Conv1/bias' ],
'synthesis_network.blocks.7.conv0_up.conv.weight' : ['mTc', 'G_synthesis/1024x1024/Conv0_up/weight' ],
'synthesis_network.blocks.7.conv0_up.conv.fc.weight' : ['fc_', 'G_synthesis/1024x1024/Conv0_up/mod_weight' ],
'synthesis_network.blocks.7.conv0_up.conv.bias.bias' : ['any', 'G_synthesis/1024x1024/Conv0_up/mod_bias' ],
'synthesis_network.blocks.7.conv0_up.noise.noise_scaler' : ['uns', 'G_synthesis/1024x1024/Conv0_up/noise_strength'],
'synthesis_network.blocks.7.conv0_up.noise.const_noise' : ['any', 'G_synthesis/noise15' ],
'synthesis_network.blocks.7.conv0_up.bias.bias' : ['any', 'G_synthesis/1024x1024/Conv0_up/bias' ],
'synthesis_network.blocks.7.conv1.conv.weight' : ['con', 'G_synthesis/1024x1024/Conv1/weight' ],
'synthesis_network.blocks.7.conv1.conv.fc.weight' : ['fc_', 'G_synthesis/1024x1024/Conv1/mod_weight' ],
'synthesis_network.blocks.7.conv1.conv.bias.bias' : ['any', 'G_synthesis/1024x1024/Conv1/mod_bias' ],
'synthesis_network.blocks.7.conv1.noise.noise_scaler' : ['uns', 'G_synthesis/1024x1024/Conv1/noise_strength' ],
'synthesis_network.blocks.7.conv1.noise.const_noise' : ['any', 'G_synthesis/noise16' ],
'synthesis_network.blocks.7.conv1.bias.bias' : ['any', 'G_synthesis/1024x1024/Conv1/bias' ],
'synthesis_network.init_block.to_rgb.conv.weight' : ['con', 'G_synthesis/4x4/ToRGB/weight' ],
'synthesis_network.init_block.to_rgb.conv.fc.weight' : ['fc_', 'G_synthesis/4x4/ToRGB/mod_weight' ],
'synthesis_network.init_block.to_rgb.conv.bias.bias' : ['any', 'G_synthesis/4x4/ToRGB/mod_bias' ],
'synthesis_network.init_block.to_rgb.bias.bias' : ['any', 'G_synthesis/4x4/ToRGB/bias' ],
'synthesis_network.blocks.0.to_rgb.conv.weight' : ['con', 'G_synthesis/8x8/ToRGB/weight' ],
'synthesis_network.blocks.0.to_rgb.conv.fc.weight' : ['fc_', 'G_synthesis/8x8/ToRGB/mod_weight' ],
'synthesis_network.blocks.0.to_rgb.conv.bias.bias' : ['any', 'G_synthesis/8x8/ToRGB/mod_bias' ],
'synthesis_network.blocks.0.to_rgb.bias.bias' : ['any', 'G_synthesis/8x8/ToRGB/bias' ],
'synthesis_network.blocks.1.to_rgb.conv.weight' : ['con', 'G_synthesis/16x16/ToRGB/weight' ],
'synthesis_network.blocks.1.to_rgb.conv.fc.weight' : ['fc_', 'G_synthesis/16x16/ToRGB/mod_weight' ],
'synthesis_network.blocks.1.to_rgb.conv.bias.bias' : ['any', 'G_synthesis/16x16/ToRGB/mod_bias' ],
'synthesis_network.blocks.1.to_rgb.bias.bias' : ['any', 'G_synthesis/16x16/ToRGB/bias' ],
'synthesis_network.blocks.2.to_rgb.conv.weight' : ['con', 'G_synthesis/32x32/ToRGB/weight' ],
'synthesis_network.blocks.2.to_rgb.conv.fc.weight' : ['fc_', 'G_synthesis/32x32/ToRGB/mod_weight' ],
'synthesis_network.blocks.2.to_rgb.conv.bias.bias' : ['any', 'G_synthesis/32x32/ToRGB/mod_bias' ],
'synthesis_network.blocks.2.to_rgb.bias.bias' : ['any', 'G_synthesis/32x32/ToRGB/bias' ],
'synthesis_network.blocks.3.to_rgb.conv.weight' : ['con', 'G_synthesis/64x64/ToRGB/weight' ],
'synthesis_network.blocks.3.to_rgb.conv.fc.weight' : ['fc_', 'G_synthesis/64x64/ToRGB/mod_weight' ],
'synthesis_network.blocks.3.to_rgb.conv.bias.bias' : ['any', 'G_synthesis/64x64/ToRGB/mod_bias' ],
'synthesis_network.blocks.3.to_rgb.bias.bias' : ['any', 'G_synthesis/64x64/ToRGB/bias' ],
'synthesis_network.blocks.4.to_rgb.conv.weight' : ['con', 'G_synthesis/128x128/ToRGB/weight' ],
'synthesis_network.blocks.4.to_rgb.conv.fc.weight' : ['fc_', 'G_synthesis/128x128/ToRGB/mod_weight' ],
'synthesis_network.blocks.4.to_rgb.conv.bias.bias' : ['any', 'G_synthesis/128x128/ToRGB/mod_bias' ],
'synthesis_network.blocks.4.to_rgb.bias.bias' : ['any', 'G_synthesis/128x128/ToRGB/bias' ],
'synthesis_network.blocks.5.to_rgb.conv.weight' : ['con', 'G_synthesis/256x256/ToRGB/weight' ],
'synthesis_network.blocks.5.to_rgb.conv.fc.weight' : ['fc_', 'G_synthesis/256x256/ToRGB/mod_weight' ],
'synthesis_network.blocks.5.to_rgb.conv.bias.bias' : ['any', 'G_synthesis/256x256/ToRGB/mod_bias' ],
'synthesis_network.blocks.5.to_rgb.bias.bias' : ['any', 'G_synthesis/256x256/ToRGB/bias' ],
'synthesis_network.blocks.6.to_rgb.conv.weight' : ['con', 'G_synthesis/512x512/ToRGB/weight' ],
'synthesis_network.blocks.6.to_rgb.conv.fc.weight' : ['fc_', 'G_synthesis/512x512/ToRGB/mod_weight' ],
'synthesis_network.blocks.6.to_rgb.conv.bias.bias' : ['any', 'G_synthesis/512x512/ToRGB/mod_bias' ],
'synthesis_network.blocks.6.to_rgb.bias.bias' : ['any', 'G_synthesis/512x512/ToRGB/bias' ],
'synthesis_network.blocks.7.to_rgb.conv.weight' : ['con', 'G_synthesis/1024x1024/ToRGB/weight' ],
'synthesis_network.blocks.7.to_rgb.conv.fc.weight' : ['fc_', 'G_synthesis/1024x1024/ToRGB/mod_weight' ],
'synthesis_network.blocks.7.to_rgb.conv.bias.bias' : ['any', 'G_synthesis/1024x1024/ToRGB/mod_bias' ],
'synthesis_network.blocks.7.to_rgb.bias.bias' : ['any', 'G_synthesis/1024x1024/ToRGB/bias' ]
}
self.functions_dict = {
# EqualizedConv2DTranspose (iC,oC,kH,kW)
'mTc' : lambda weight: torch.flip(torch.from_numpy(weight.transpose((2,3,0,1))), [2, 3]),
# Conv2DTranspose (iC,oC,kH,kW)
'Tco' : lambda weight: torch.from_numpy(weight.transpose((2,3,0,1))),
# Conv2D (oC,iC,kH,kW)
'con' : lambda weight: torch.from_numpy(weight.transpose((3,2,0,1))),
# FullyConnect (oD, iD)
'fc_' : lambda weight: torch.from_numpy(weight.transpose((1, 0))),
# Bias, const_input, noise, v1 noise
'any' : lambda weight: torch.from_numpy(weight),
# Style-Mixing, v2 noise (scalar)
'uns' : lambda weight: torch.from_numpy(np.array(weight).reshape(1)),
}
def convert(self, src_dict):
new_dict_pt = { k : self.functions_dict[v[0]](src_dict[v[1]]) for k,v in self.name_trans_dict.items()}
return new_dict_pt
| 106.64455
| 135
| 0.531375
| 2,400
| 22,502
| 4.727083
| 0.051667
| 0.166152
| 0.248215
| 0.063464
| 0.891847
| 0.832966
| 0.678096
| 0.27316
| 0.005818
| 0
| 0
| 0.064131
| 0.330593
| 22,502
| 211
| 136
| 106.64455
| 0.689039
| 0.011821
| 0
| 0
| 0
| 0
| 0.561344
| 0.524902
| 0
| 0
| 0
| 0
| 0
| 1
| 0.011364
| false
| 0
| 0.028409
| 0
| 0.051136
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
db992ef8b9ae681cc01b907e87eb0610ff4c1bf4
| 39
|
py
|
Python
|
backend/api/app/tests/unit/__init__.py
|
jhouser/houseoffun
|
a5a9dab377864d4da15f7ba64b505d2db3af34ef
|
[
"MIT"
] | null | null | null |
backend/api/app/tests/unit/__init__.py
|
jhouser/houseoffun
|
a5a9dab377864d4da15f7ba64b505d2db3af34ef
|
[
"MIT"
] | 3
|
2018-03-31T09:52:03.000Z
|
2018-08-16T18:12:51.000Z
|
backend/api/app/tests/unit/__init__.py
|
jhouser/houseoffun
|
a5a9dab377864d4da15f7ba64b505d2db3af34ef
|
[
"MIT"
] | 1
|
2018-03-21T16:05:36.000Z
|
2018-03-21T16:05:36.000Z
|
from api.app.tests.unit.games import *
| 19.5
| 38
| 0.769231
| 7
| 39
| 4.285714
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.102564
| 39
| 1
| 39
| 39
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
db9ad5a040f2528802272bc68bcb90564dbfea2b
| 9,193
|
py
|
Python
|
tests/test_BSpline_Curve2D.py
|
Nodli/NURBS-Python
|
27b0209d5c193936769bdc802c1fc50f34f678f2
|
[
"MIT"
] | null | null | null |
tests/test_BSpline_Curve2D.py
|
Nodli/NURBS-Python
|
27b0209d5c193936769bdc802c1fc50f34f678f2
|
[
"MIT"
] | null | null | null |
tests/test_BSpline_Curve2D.py
|
Nodli/NURBS-Python
|
27b0209d5c193936769bdc802c1fc50f34f678f2
|
[
"MIT"
] | null | null | null |
"""
Tests for the NURBS-Python package
Released under The MIT License. See LICENSE file for details.
Copyright (c) 2018 Onur Rauf Bingol
Tests geomdl.BSpline.Curve module. Requires "pytest" to run.
"""
from geomdl import BSpline
from geomdl import evaluators
GEOMDL_DELTA = 0.001
OBJECT_INSTANCE = BSpline.Curve
CONTROL_POINTS = [[5.0, 5.0], [10.0, 10.0], [20.0, 15.0], [35.0, 15.0], [45.0, 10.0], [50.0, 5.0]]
def test_bspline_curve_name():
# Create a Curve instance
curve = OBJECT_INSTANCE()
curve.name = "Testing"
assert curve.name == "Testing"
def test_bspline_curve_degree():
# Create a curve instance
curve = OBJECT_INSTANCE()
# Set curve degree
curve.degree = 3
assert curve.degree == 3
def test_bspline_curve_ctrlpts():
# Create a curve instance
curve = OBJECT_INSTANCE()
# Set curve degree
curve.degree = 3
# Set control points
curve.ctrlpts = [[5.0, 5.0], [10.0, 10.0], [20.0, 15.0], [35.0, 15.0], [45.0, 10.0], [50.0, 5.0]]
assert curve.ctrlpts == ((5.0, 5.0), (10.0, 10.0), (20.0, 15.0), (35.0, 15.0), (45.0, 10.0), (50.0, 5.0))
assert curve.dimension == 2
def test_bspline_curve_knot_vector():
# Create a curve instance
curve = OBJECT_INSTANCE()
# Set curve degree
curve.degree = 3
# Set control points
curve.ctrlpts = [[5.0, 5.0], [10.0, 10.0], [20.0, 15.0], [35.0, 15.0], [45.0, 10.0], [50.0, 5.0]]
# Set knot vector
curve.knotvector = [0.0, 0.0, 0.0, 0.0, 0.33, 0.66, 1.0, 1.0, 1.0, 1.0]
assert curve.knotvector == (0.0, 0.0, 0.0, 0.0, 0.33, 0.66, 1.0, 1.0, 1.0, 1.0)
def test_bspline_curve2d_eval1():
# Create a curve instance
curve = OBJECT_INSTANCE()
# Set curve degree
curve.degree = 3
# Set control points
curve.ctrlpts = CONTROL_POINTS
# Set knot vector
curve.knotvector = [0.0, 0.0, 0.0, 0.0, 0.33, 0.66, 1.0, 1.0, 1.0, 1.0]
# Evaluate curve
evalpt = curve.curvept(0.0)
# Evaluation result
res = [5.0, 5.0]
assert abs(evalpt[0] - res[0]) < GEOMDL_DELTA
assert abs(evalpt[1] - res[1]) < GEOMDL_DELTA
def test_bspline_curve2d_eval2():
# Create a curve instance
curve = OBJECT_INSTANCE()
# Set curve degree
curve.degree = 3
# Set control points
curve.ctrlpts = CONTROL_POINTS
# Set knot vector
curve.knotvector = [0.0, 0.0, 0.0, 0.0, 0.33, 0.66, 1.0, 1.0, 1.0, 1.0]
# Evaluate curve
evalpt = curve.curvept(0.3)
# Evaluation result
res = [18.617, 13.377]
assert abs(evalpt[0] - res[0]) < GEOMDL_DELTA
assert abs(evalpt[1] - res[1]) < GEOMDL_DELTA
def test_bspline_curve2d_eval3():
# Create a curve instance
curve = OBJECT_INSTANCE()
# Set curve degree
curve.degree = 3
# Set control points
curve.ctrlpts = CONTROL_POINTS
# Set knot vector
curve.knotvector = [0.0, 0.0, 0.0, 0.0, 0.33, 0.66, 1.0, 1.0, 1.0, 1.0]
# Evaluate curve
evalpt = curve.curvept(0.5)
# Evaluation result
res = [27.645, 14.691]
assert abs(evalpt[0] - res[0]) < GEOMDL_DELTA
assert abs(evalpt[1] - res[1]) < GEOMDL_DELTA
def test_bspline_curve2d_eval4():
# Create a curve instance
curve = OBJECT_INSTANCE()
# Set curve degree
curve.degree = 3
# Set control points
curve.ctrlpts = CONTROL_POINTS
# Set knot vector
curve.knotvector = [0.0, 0.0, 0.0, 0.0, 0.33, 0.66, 1.0, 1.0, 1.0, 1.0]
# Evaluate curve
evalpt = curve.curvept(0.6)
# Evaluation result
res = [32.143, 14.328]
assert abs(evalpt[0] - res[0]) < GEOMDL_DELTA
assert abs(evalpt[1] - res[1]) < GEOMDL_DELTA
def test_bspline_curve2d_eval5():
# Create a curve instance
curve = OBJECT_INSTANCE()
# Set curve degree
curve.degree = 3
# Set control points
curve.ctrlpts = CONTROL_POINTS
# Set knot vector
curve.knotvector = [0.0, 0.0, 0.0, 0.0, 0.33, 0.66, 1.0, 1.0, 1.0, 1.0]
# Evaluate curve
evalpt = curve.curvept(1.0)
# Evaluation result
res = [50.0, 5.0]
assert abs(evalpt[0] - res[0]) < GEOMDL_DELTA
assert abs(evalpt[1] - res[1]) < GEOMDL_DELTA
def test_bspline_curve2d_deriv_ctrlpts():
test_degree = 3
test_knotvector = [0.0, 0.0, 0.0, 0.0, 0.33, 0.66, 1.0, 1.0, 1.0, 1.0]
test_u = 0.35
test_order = test_degree
# Create a curve instance
curve = OBJECT_INSTANCE()
# Set curve degree
curve.degree = test_degree
# Set control points
curve.ctrlpts = CONTROL_POINTS
# Set knot vector
curve.knotvector = test_knotvector
# Take the derivative
der1 = curve.derivatives(u=test_u, order=test_order)
# Compute control points of the derivative
deriv_ctrlpts = curve.derivatives_ctrlpts(order=test_order - 1)
for k in range(0, test_order):
curvek = OBJECT_INSTANCE()
curvek.degree = test_degree - k
# Cutting out None values in deriv_ctrlpts[k] and excess clamping values in test_knotvector
if k == 0:
curvek.ctrlpts = deriv_ctrlpts[k]
curvek.knotvector = test_knotvector
else:
curvek.ctrlpts = deriv_ctrlpts[k][:-k]
curvek.knotvector = test_knotvector[k:-k]
assert abs(curvek.curvept(test_u)[0] - der1[k][0]) < GEOMDL_DELTA
assert abs(curvek.curvept(test_u)[1] - der1[k][1]) < GEOMDL_DELTA
def test_bspline_curve2d_deriv1():
# Create a curve instance
curve = OBJECT_INSTANCE()
# Set curve degree
curve.degree = 3
# Set control points
curve.ctrlpts = CONTROL_POINTS
# Set knot vector
curve.knotvector = [0.0, 0.0, 0.0, 0.0, 0.33, 0.66, 1.0, 1.0, 1.0, 1.0]
# Take the derivative
der1 = curve.derivatives(u=0.35, order=2)
curve.evaluator = evaluators.CurveEvaluator2()
der2 = curve.derivatives(u=0.35, order=2)
assert abs(der1[0][0] - der2[0][0]) < GEOMDL_DELTA
assert abs(der1[0][1] - der2[0][1]) < GEOMDL_DELTA
assert abs(der1[1][0] - der2[1][0]) < GEOMDL_DELTA
assert abs(der1[1][1] - der2[1][1]) < GEOMDL_DELTA
assert abs(der1[2][0] - der2[2][0]) < GEOMDL_DELTA
assert abs(der1[2][1] - der2[2][1]) < GEOMDL_DELTA
def test_bspline_curve2d_deriv2():
# Create a curve instance
curve = OBJECT_INSTANCE()
# Set curve degree
curve.degree = 3
# Set control points
curve.ctrlpts = CONTROL_POINTS
# Set knot vector
curve.knotvector = [0.0, 0.0, 0.0, 0.0, 0.33, 0.66, 1.0, 1.0, 1.0, 1.0]
# Take the derivative
evalpt = curve.curvept(u=0.35)
der1 = curve.derivatives(u=0.35)
curve.evaluator = evaluators.CurveEvaluator2()
der2 = curve.derivatives(u=0.35)
assert abs(der1[0][0] - evalpt[0]) < GEOMDL_DELTA
assert abs(der1[0][1] - evalpt[1]) < GEOMDL_DELTA
assert abs(der2[0][0] - evalpt[0]) < GEOMDL_DELTA
assert abs(der2[0][1] - evalpt[1]) < GEOMDL_DELTA
def test_bspline_curve2d_insert_knot1():
# Create a curve instance
curve = OBJECT_INSTANCE()
# Set curve degree
curve.degree = 3
# Set control points
curve.ctrlpts = CONTROL_POINTS
# Set knot vector
curve.knotvector = [0.0, 0.0, 0.0, 0.0, 0.33, 0.66, 1.0, 1.0, 1.0, 1.0]
# Set evaluation parameter
u = 0.3
# Insert knot
curve.insert_knot(u)
# Evaluate curve at the given parameter
evalpt = curve.curvept(u)
# Evaluation result
res = [18.617, 13.377]
assert abs(evalpt[0] - res[0]) < GEOMDL_DELTA
assert abs(evalpt[1] - res[1]) < GEOMDL_DELTA
def test_bspline_curve2d_insert_knot2():
# Create a curve instance
curve = OBJECT_INSTANCE()
# Set curve degree
curve.degree = 3
# Set control points
curve.ctrlpts = CONTROL_POINTS
# Set knot vector
curve.knotvector = [0.0, 0.0, 0.0, 0.0, 0.33, 0.66, 1.0, 1.0, 1.0, 1.0]
# Set evaluation parameter
u = 0.6
# Insert knot
curve.insert_knot(u)
# Evaluate curve at the given parameter
evalpt = curve.curvept(u)
# Evaluation result
res = [32.143, 14.328]
assert abs(evalpt[0] - res[0]) < GEOMDL_DELTA
assert abs(evalpt[1] - res[1]) < GEOMDL_DELTA
def test_bspline_curve2d_insert_knot3():
# Create a curve instance
curve = OBJECT_INSTANCE()
# Set curve degree
curve.degree = 3
# Set control points
curve.ctrlpts = CONTROL_POINTS
# Set knot vector
curve.knotvector = [0.0, 0.0, 0.0, 0.0, 0.33, 0.66, 1.0, 1.0, 1.0, 1.0]
# Set evaluation parameter
u = 0.6
# Insert knot
curve.insert_knot(u, 2)
# Evaluate curve at the given parameter
evalpt = curve.curvept(u)
# Evaluation result
res = [32.143, 14.328]
assert abs(evalpt[0] - res[0]) < GEOMDL_DELTA
assert abs(evalpt[1] - res[1]) < GEOMDL_DELTA
def test_bspline_curve2d_insert_knot4():
# Create a curve instance
curve = OBJECT_INSTANCE()
# Set curve degree
curve.degree = 3
# Set control points
curve.ctrlpts = CONTROL_POINTS
# Set knot vector
curve.knotvector = [0.0, 0.0, 0.0, 0.0, 0.33, 0.66, 1.0, 1.0, 1.0, 1.0]
# Set evaluation parameter
u = 0.6
# Insert knot
curve.insert_knot(u, 2)
assert curve.knotvector[5] == u
| 24.255937
| 109
| 0.622865
| 1,463
| 9,193
| 3.818182
| 0.084074
| 0.04189
| 0.052632
| 0.06015
| 0.821876
| 0.794128
| 0.753849
| 0.721805
| 0.698532
| 0.698532
| 0
| 0.100374
| 0.243555
| 9,193
| 378
| 110
| 24.320106
| 0.702905
| 0.215381
| 0
| 0.55625
| 0
| 0
| 0.001971
| 0
| 0
| 0
| 0
| 0
| 0.2125
| 1
| 0.1
| false
| 0
| 0.0125
| 0
| 0.1125
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
db9c8aedd9a4b3bada7820aafd103e935331e982
| 193
|
py
|
Python
|
src/ansible_navigator/__main__.py
|
ekmixon/ansible-navigator
|
9903d82ac76a4aee61a64c2e5f19f5ccca3cf136
|
[
"Apache-2.0",
"MIT"
] | 134
|
2021-03-26T17:44:49.000Z
|
2022-03-31T13:15:52.000Z
|
src/ansible_navigator/__main__.py
|
cidrblock/ansible-navigator
|
674e5edce4d4181e6f79b6f24b590a347156665d
|
[
"Apache-2.0",
"MIT"
] | 631
|
2021-03-26T19:38:32.000Z
|
2022-03-31T22:57:36.000Z
|
src/ansible_navigator/__main__.py
|
cidrblock/ansible-navigator
|
674e5edce4d4181e6f79b6f24b590a347156665d
|
[
"Apache-2.0",
"MIT"
] | 48
|
2021-03-26T17:44:29.000Z
|
2022-03-08T21:12:26.000Z
|
"""A runpy entry point for ansible-navigator.
This makes it possible to invoke CLI
via :command:`python -m ansible_navigator`.
"""
from .cli import main
if __name__ == "__main__":
main()
| 19.3
| 45
| 0.715026
| 28
| 193
| 4.607143
| 0.821429
| 0.248062
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.170984
| 193
| 9
| 46
| 21.444444
| 0.80625
| 0.642487
| 0
| 0
| 0
| 0
| 0.129032
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
db9dd5c7685805e039685af877913f3d38b61eb3
| 762
|
py
|
Python
|
sem6/wbo/laby/lab4/src/task5.py
|
abrams27/mimuw
|
ad8b01b63c05d7903aab29fd145845cf97ac32d9
|
[
"MIT"
] | 3
|
2021-10-07T18:19:37.000Z
|
2021-10-07T19:02:14.000Z
|
sem6/wbo/laby/lab4/src/task5.py
|
abrams27/mimuw
|
ad8b01b63c05d7903aab29fd145845cf97ac32d9
|
[
"MIT"
] | null | null | null |
sem6/wbo/laby/lab4/src/task5.py
|
abrams27/mimuw
|
ad8b01b63c05d7903aab29fd145845cf97ac32d9
|
[
"MIT"
] | 3
|
2021-12-02T11:09:09.000Z
|
2022-01-25T21:31:23.000Z
|
from Bio import Phylo
from task3 import pah_paralogues_tree, h2bfs_paralogues_tree, pah_orthologues_30_tree
Phylo.write([pah_paralogues_tree], open("../output/Human_PAH_paralogues.nck", "w"), format="newick")
Phylo.write([h2bfs_paralogues_tree], open("../output/Human_H2BFS_paralogues.nck", "w"), format="newick")
Phylo.write([pah_orthologues_30_tree], open("../output/Human_PAH_orthologues_30.nck", "w"), format="newick")
Phylo.write([pah_paralogues_tree], open("../output/Human_PAH_paralogues.phyloxml", "w"), format="phyloxml")
Phylo.write([h2bfs_paralogues_tree], open("../output/Human_H2BFS_paralogues.phyloxml", "w"), format="phyloxml")
Phylo.write([pah_orthologues_30_tree], open("../output/Human_PAH_orthologues_30.phyloxml", "w"), format="phyloxml")
| 63.5
| 115
| 0.775591
| 105
| 762
| 5.295238
| 0.190476
| 0.151079
| 0.151079
| 0.205036
| 0.798561
| 0.798561
| 0.798561
| 0.629496
| 0.629496
| 0.629496
| 0
| 0.022069
| 0.048556
| 762
| 11
| 116
| 69.272727
| 0.744828
| 0
| 0
| 0
| 0
| 0
| 0.366142
| 0.30315
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.25
| 0
| 0.25
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
db9ef149d809a565a625e650b8fbde81face0976
| 94
|
py
|
Python
|
UICPC/25/test.py
|
MilladMuhammadi/Competitive-Programming
|
9f84a2d2734a5efe0e1fde0062e51782cd5af2c6
|
[
"MIT"
] | null | null | null |
UICPC/25/test.py
|
MilladMuhammadi/Competitive-Programming
|
9f84a2d2734a5efe0e1fde0062e51782cd5af2c6
|
[
"MIT"
] | null | null | null |
UICPC/25/test.py
|
MilladMuhammadi/Competitive-Programming
|
9f84a2d2734a5efe0e1fde0062e51782cd5af2c6
|
[
"MIT"
] | null | null | null |
a,b = map(int,input().split())
li = list(map(int,input().split()))
ls = list(input().split())
| 23.5
| 35
| 0.595745
| 16
| 94
| 3.5
| 0.5625
| 0.535714
| 0.392857
| 0.571429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.095745
| 94
| 3
| 36
| 31.333333
| 0.658824
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
dba371b933f6346af51835a5553b40040575995e
| 55
|
py
|
Python
|
dataprep/data_connector/__init__.py
|
Abhishek-pv/dataprep
|
9997fa3e46b82716caabeb697af012c8946136c5
|
[
"MIT"
] | 1
|
2020-11-29T08:15:57.000Z
|
2020-11-29T08:15:57.000Z
|
dataprep/data_connector/__init__.py
|
Abhishek-pv/dataprep
|
9997fa3e46b82716caabeb697af012c8946136c5
|
[
"MIT"
] | null | null | null |
dataprep/data_connector/__init__.py
|
Abhishek-pv/dataprep
|
9997fa3e46b82716caabeb697af012c8946136c5
|
[
"MIT"
] | null | null | null |
"""
DataConnector
"""
from .connector import Connector
| 11
| 32
| 0.745455
| 5
| 55
| 8.2
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.127273
| 55
| 4
| 33
| 13.75
| 0.854167
| 0.236364
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
918f52edcbeb2a272a48ac93f13ab71549ba81ae
| 181
|
py
|
Python
|
pincer/middleware/voice_state_delete.py
|
ashu96902/Pincer
|
102ac4ff998cbb3c57a86b252439f69895650cf3
|
[
"MIT"
] | null | null | null |
pincer/middleware/voice_state_delete.py
|
ashu96902/Pincer
|
102ac4ff998cbb3c57a86b252439f69895650cf3
|
[
"MIT"
] | null | null | null |
pincer/middleware/voice_state_delete.py
|
ashu96902/Pincer
|
102ac4ff998cbb3c57a86b252439f69895650cf3
|
[
"MIT"
] | null | null | null |
# Copyright Pincer 2021-Present
# Full MIT License can be found in `LICENSE` at the project root.
""" sent when a user parts a subscribed voice channel"""
# TODO: Implement event
| 25.857143
| 65
| 0.740331
| 28
| 181
| 4.785714
| 0.928571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.027027
| 0.18232
| 181
| 6
| 66
| 30.166667
| 0.878378
| 0.917127
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0.166667
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
919eb588964010155b0b7dd2a028b5b344bb2038
| 113
|
py
|
Python
|
zemberek/tokenization/__init__.py
|
Loodos/zemberek-python
|
4f6b47abda98ed5a4d440738d39a92374d50ef6b
|
[
"Apache-2.0"
] | 52
|
2020-08-24T09:52:58.000Z
|
2022-03-19T05:02:06.000Z
|
zemberek/tokenization/__init__.py
|
Loodos/zemberek-python
|
4f6b47abda98ed5a4d440738d39a92374d50ef6b
|
[
"Apache-2.0"
] | 7
|
2020-09-07T09:02:33.000Z
|
2021-11-26T14:15:41.000Z
|
zemberek/tokenization/__init__.py
|
Loodos/zemberek-python
|
4f6b47abda98ed5a4d440738d39a92374d50ef6b
|
[
"Apache-2.0"
] | 7
|
2020-09-23T19:27:55.000Z
|
2022-03-14T09:02:41.000Z
|
from .turkish_tokenizer import TurkishTokenizer
from .turkish_sentence_extractor import TurkishSentenceExtractor
| 37.666667
| 64
| 0.911504
| 11
| 113
| 9.090909
| 0.727273
| 0.22
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.070796
| 113
| 2
| 65
| 56.5
| 0.952381
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
37f039b075bca3696ce9bb084cf31f443216dbfc
| 6,951
|
py
|
Python
|
pytorch2keras/activation_layers.py
|
idearibosome/pytorch2keras-srzoo
|
ef0b98429142a6e62a64912b7edefd5ffff72ff3
|
[
"MIT"
] | null | null | null |
pytorch2keras/activation_layers.py
|
idearibosome/pytorch2keras-srzoo
|
ef0b98429142a6e62a64912b7edefd5ffff72ff3
|
[
"MIT"
] | null | null | null |
pytorch2keras/activation_layers.py
|
idearibosome/pytorch2keras-srzoo
|
ef0b98429142a6e62a64912b7edefd5ffff72ff3
|
[
"MIT"
] | null | null | null |
import tensorflow.keras.layers
import numpy as np
import random
import string
import tensorflow as tf
from .common import random_string
def convert_relu(params, w_name, scope_name, inputs, layers, weights, names):
"""
Convert relu layer.
Args:
params: dictionary with layer parameters
w_name: name prefix in state_dict
scope_name: pytorch scope name
inputs: pytorch node inputs
layers: dictionary with keras tensors
weights: pytorch state_dict
names: use short names for keras layers
"""
print('Converting relu ...')
if names == 'short':
tf_name = 'RELU' + random_string(4)
elif names == 'keep':
tf_name = w_name
else:
tf_name = w_name + str(random.random())
relu = tensorflow.keras.layers.Activation('relu', name=tf_name)
layers[scope_name] = relu(layers[inputs[0]])
def convert_lrelu(params, w_name, scope_name, inputs, layers, weights, names):
"""
Convert leaky relu layer.
Args:
params: dictionary with layer parameters
w_name: name prefix in state_dict
scope_name: pytorch scope name
inputs: pytorch node inputs
layers: dictionary with keras tensors
weights: pytorch state_dict
names: use short names for keras layers
"""
print('Converting lrelu ...')
if names == 'short':
tf_name = 'lRELU' + random_string(3)
elif names == 'keep':
tf_name = w_name
else:
tf_name = w_name + str(random.random())
leakyrelu = \
tensorflow.keras.layers.LeakyReLU(alpha=params['alpha'], name=tf_name)
layers[scope_name] = leakyrelu(layers[inputs[0]])
def convert_sigmoid(params, w_name, scope_name, inputs, layers, weights, names):
"""
Convert sigmoid layer.
Args:
params: dictionary with layer parameters
w_name: name prefix in state_dict
scope_name: pytorch scope name
inputs: pytorch node inputs
layers: dictionary with keras tensors
weights: pytorch state_dict
names: use short names for keras layers
"""
print('Converting sigmoid ...')
if names == 'short':
tf_name = 'SIGM' + random_string(4)
elif names == 'keep':
tf_name = w_name
else:
tf_name = w_name + str(random.random())
sigmoid = tensorflow.keras.layers.Activation('sigmoid', name=tf_name)
layers[scope_name] = sigmoid(layers[inputs[0]])
def convert_softmax(params, w_name, scope_name, inputs, layers, weights, names):
"""
Convert softmax layer.
Args:
params: dictionary with layer parameters
w_name: name prefix in state_dict
scope_name: pytorch scope name
inputs: pytorch node inputs
layers: dictionary with keras tensors
weights: pytorch state_dict
names: use short names for keras layers
"""
print('Converting softmax ...')
if names == 'short':
tf_name = 'SMAX' + random_string(4)
elif names == 'keep':
tf_name = w_name
else:
tf_name = w_name + str(random.random())
if 'axis' in params:
axis = params['axis']
if 'value' in params:
axis = params['value'].item()
else:
if len(inputs) > 1:
axis = layers[inputs[1] + '_np']
def target_layer(x, dim=axis):
import tensorflow.keras
return tensorflow.keras.activations.softmax(x, axis=dim)
lambda_layer = tensorflow.keras.layers.Lambda(target_layer)
layers[scope_name] = lambda_layer(layers[inputs[0]])
def convert_tanh(params, w_name, scope_name, inputs, layers, weights, names):
"""
Convert tanh layer.
Args:
params: dictionary with layer parameters
w_name: name prefix in state_dict
scope_name: pytorch scope name
inputs: pytorch node inputs
layers: dictionary with keras tensors
weights: pytorch state_dict
names: use short names for keras layers
"""
print('Converting tanh ...')
if names == 'short':
tf_name = 'TANH' + random_string(4)
elif names == 'keep':
tf_name = w_name
else:
tf_name = w_name + str(random.random())
tanh = tensorflow.keras.layers.Activation('tanh', name=tf_name)
layers[scope_name] = tanh(layers[inputs[0]])
def convert_hardtanh(params, w_name, scope_name, inputs, layers, weights, names):
"""
Convert hardtanh layer.
Args:
params: dictionary with layer parameters
w_name: name prefix in state_dict
scope_name: pytorch scope name
inputs: pytorch node inputs
layers: dictionary with keras tensors
weights: pytorch state_dict
names: use short names for keras layers
"""
print('Converting hardtanh (clip) ...')
def target_layer(x, max_val=float(params['max_val']), min_val=float(params['min_val'])):
return tf.minimum(max_val, tf.maximum(min_val, x))
lambda_layer = tensorflow.keras.layers.Lambda(target_layer)
layers[scope_name] = lambda_layer(layers[inputs[0]])
def convert_selu(params, w_name, scope_name, inputs, layers, weights, names):
"""
Convert selu layer.
Args:
params: dictionary with layer parameters
w_name: name prefix in state_dict
scope_name: pytorch scope name
inputs: pytorch node inputs
layers: dictionary with keras tensors
weights: pytorch state_dict
names: use short names for keras layers
"""
print('Converting selu ...')
if names == 'short':
tf_name = 'SELU' + random_string(4)
elif names == 'keep':
tf_name = w_name
else:
tf_name = w_name + str(random.random())
selu = tensorflow.keras.layers.Activation('selu', name=tf_name)
layers[scope_name] = selu(layers[inputs[0]])
def convert_prelu(params, w_name, scope_name, inputs, layers, weights, names):
"""
Convert parametric relu layer.
Args:
params: dictionary with layer parameters
w_name: name prefix in state_dict
scope_name: pytorch scope name
inputs: pytorch node inputs
layers: dictionary with keras tensors
weights: pytorch state_dict
names: use short names for keras layers
"""
print('Converting prelu ...')
if names == 'short':
tf_name = 'pRELU' + random_string(3)
elif names == 'keep':
tf_name = w_name
else:
tf_name = w_name + str(random.random())
input_name = inputs[0]
weight_name = inputs[1]
W = layers[weight_name]
if params['change_ordering']:
prelu = \
tensorflow.keras.layers.PReLU(weights=[W], shared_axes=[1, 2], name=tf_name)
layers[scope_name] = prelu(layers[input_name])
else:
prelu = \
tensorflow.keras.layers.PReLU(weights=[W], shared_axes=[2, 3], name=tf_name)
layers[scope_name] = prelu(layers[input_name])
| 29.578723
| 92
| 0.638469
| 881
| 6,951
| 4.880817
| 0.097616
| 0.06907
| 0.055814
| 0.035814
| 0.790465
| 0.734419
| 0.705349
| 0.705349
| 0.705349
| 0.682558
| 0
| 0.004289
| 0.261977
| 6,951
| 234
| 93
| 29.705128
| 0.833918
| 0.346137
| 0
| 0.441176
| 0
| 0
| 0.080668
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.098039
| false
| 0
| 0.068627
| 0.009804
| 0.186275
| 0.078431
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
37f827a760a644553b82ff61ff4de7c893c97cb6
| 9,490
|
py
|
Python
|
ontologyaccess/tests/test_views_ajax.py
|
bihealth/sodar-server
|
0c6a03c274ab34cd8987280fe97dc8989551d4bd
|
[
"MIT"
] | null | null | null |
ontologyaccess/tests/test_views_ajax.py
|
bihealth/sodar-server
|
0c6a03c274ab34cd8987280fe97dc8989551d4bd
|
[
"MIT"
] | 1
|
2021-05-28T10:59:49.000Z
|
2021-06-03T12:30:23.000Z
|
ontologyaccess/tests/test_views_ajax.py
|
bihealth/sodar-server
|
0c6a03c274ab34cd8987280fe97dc8989551d4bd
|
[
"MIT"
] | null | null | null |
"""Tests for Ajax API views in the ontologyaccess app"""
import json
from django.urls import reverse
from ontologyaccess.tests.test_views import (
TestOntologyAccessViewBase,
OBO_TERM_NAME,
)
OBO_ONTOLOGY_ID_ALT = 'alt.obo'
OBO_ONTOLOGY_NAME_ALT = 'ALT'
OBO_ONTOLOGY_FILE_ALT = 'alt.obo'
OBO_ONTOLOGY_TITLE_ALT = 'Alternative ontology'
OBO_TERM_ID_ALT = 'ALT:0000003'
OBO_TERM_NAME_ALT = 'Alt term'
class TestOBOOntologyListAjaxView(TestOntologyAccessViewBase):
"""Tests for OBOOntologyListAjaxView"""
def test_list(self):
"""Test listing ontologies"""
with self.login(self.superuser):
response = self.client.get(reverse('ontologyaccess:ajax_obo_list'))
self.assertEqual(response.status_code, 200)
response_data = json.loads(response.content)
expected = {
'ontologies': {
self.ontology.name: {
'sodar_uuid': str(self.ontology.sodar_uuid),
'file': self.ontology.file,
'title': self.ontology.title,
'ontology_id': self.ontology.ontology_id,
'description': self.ontology.description,
'data_version': self.ontology.data_version,
'term_url': self.ontology.term_url,
}
}
}
self.assertEqual(response_data, expected)
class TestOBOTermQueryAjaxView(TestOntologyAccessViewBase):
"""Tests for OBOTermQueryAjaxView"""
def setUp(self):
super().setUp()
# Create second ontology and term
self.ontology2 = self._make_obo_ontology(
name=OBO_ONTOLOGY_NAME_ALT,
file=OBO_ONTOLOGY_FILE_ALT,
ontology_id=OBO_ONTOLOGY_ID_ALT,
title=OBO_ONTOLOGY_TITLE_ALT,
)
self.term2 = self._make_obo_term(
ontology=self.ontology2,
term_id=OBO_TERM_ID_ALT,
name=OBO_TERM_NAME_ALT,
)
def test_query(self):
"""Test querying for a single term"""
query_data = {'s': self.term.name}
with self.login(self.superuser):
response = self.client.get(
reverse('ontologyaccess:ajax_obo_term_query'), data=query_data
)
self.assertEqual(response.status_code, 200)
response_data = json.loads(response.content)
self.assertEqual(len(response_data['terms']), 1)
expected = {
'ontology_name': self.ontology.name,
'term_id': self.term.term_id,
'name': self.term.name,
# 'definition': self.term.definition,
'is_obsolete': self.term.is_obsolete,
'replaced_by': self.term.replaced_by,
'accession': self.term.get_url(),
}
self.assertEqual(response_data['terms'][0], expected)
def test_query_multiple(self):
"""Test querying for multiple terms"""
query_data = {'s': 'term'}
with self.login(self.superuser):
response = self.client.get(
reverse('ontologyaccess:ajax_obo_term_query'), data=query_data
)
self.assertEqual(response.status_code, 200)
response_data = json.loads(response.content)
self.assertEqual(len(response_data['terms']), 2)
def test_query_limit(self):
"""Test querying limited to a specific ontology"""
query_data = {'s': 'term', 'o': self.ontology2.name}
with self.login(self.superuser):
response = self.client.get(
reverse('ontologyaccess:ajax_obo_term_query'),
data=query_data,
)
self.assertEqual(response.status_code, 200)
response_data = json.loads(response.content)
self.assertEqual(len(response_data['terms']), 1)
expected = {
'ontology_name': self.ontology2.name,
'term_id': self.term2.term_id,
'name': self.term2.name,
# 'definition': self.term2.definition,
'is_obsolete': self.term2.is_obsolete,
'replaced_by': self.term2.replaced_by,
'accession': self.term2.get_url(),
}
self.assertEqual(response_data['terms'][0], expected)
def test_query_limit_multiple(self):
"""Test querying limited to a multiple ontologies"""
query_data = {
's': 'term',
'o': [self.ontology.name, self.ontology2.name],
}
with self.login(self.superuser):
response = self.client.get(
reverse('ontologyaccess:ajax_obo_term_query'),
data=query_data,
)
self.assertEqual(response.status_code, 200)
response_data = json.loads(response.content)
self.assertEqual(len(response_data['terms']), 2)
def test_query_no_data(self):
"""Test querying without a query string (should fail)"""
query_data = {}
with self.login(self.superuser):
response = self.client.get(
reverse('ontologyaccess:ajax_obo_term_query'),
data=query_data,
)
self.assertEqual(response.status_code, 400)
def test_query_order(self):
"""Test querying with ordering by ontology"""
query_data = {
's': 'term',
'o': [self.ontology2.name, self.ontology.name],
'order': '1',
}
with self.login(self.superuser):
response = self.client.get(
reverse('ontologyaccess:ajax_obo_term_query'),
data=query_data,
)
self.assertEqual(response.status_code, 200)
response_data = json.loads(response.content)
self.assertEqual(len(response_data['terms']), 2)
expected = {
'ontology_name': self.ontology2.name,
'term_id': self.term2.term_id,
'name': self.term2.name,
# 'definition': self.term2.definition,
'is_obsolete': self.term2.is_obsolete,
'replaced_by': self.term2.replaced_by,
'accession': self.term2.get_url(),
}
self.assertEqual(response_data['terms'][0], expected)
def test_query_id(self):
"""Test querying for a single term with term id"""
query_data = {'s': self.term.term_id}
with self.login(self.superuser):
response = self.client.get(
reverse('ontologyaccess:ajax_obo_term_query'), data=query_data
)
self.assertEqual(response.status_code, 200)
response_data = json.loads(response.content)
self.assertEqual(len(response_data['terms']), 1)
expected = {
'ontology_name': self.ontology.name,
'term_id': self.term.term_id,
'name': self.term.name,
# 'definition': self.term.definition,
'is_obsolete': self.term.is_obsolete,
'replaced_by': self.term.replaced_by,
'accession': self.term.get_url(),
}
self.assertEqual(response_data['terms'][0], expected)
class TestOBOTermListAjaxView(TestOntologyAccessViewBase):
"""Tests for OBOTermListAjaxView"""
def setUp(self):
super().setUp()
# Create second ontology and term
self.ontology2 = self._make_obo_ontology(
name=OBO_ONTOLOGY_NAME_ALT,
file=OBO_ONTOLOGY_FILE_ALT,
ontology_id=OBO_ONTOLOGY_ID_ALT,
title=OBO_ONTOLOGY_TITLE_ALT,
)
self.term2 = self._make_obo_term(
ontology=self.ontology2,
term_id=OBO_TERM_ID_ALT,
name=OBO_TERM_NAME_ALT,
)
def test_list(self):
"""Test listing OBO ontology terms"""
query_data = {'t': [OBO_TERM_NAME, OBO_TERM_NAME_ALT]}
with self.login(self.superuser):
response = self.client.get(
reverse('ontologyaccess:ajax_obo_term_list'),
data=query_data,
)
self.assertEqual(response.status_code, 200)
response_data = json.loads(response.content)
self.assertEqual(len(response_data['terms']), 2)
expected = [
{
'ontology_name': self.ontology.name,
'term_id': self.term.term_id,
'name': self.term.name,
# 'definition': self.term.definition,
'is_obsolete': self.term.is_obsolete,
'replaced_by': self.term.replaced_by,
'accession': self.term.get_url(),
},
{
'ontology_name': self.ontology2.name,
'term_id': self.term2.term_id,
'name': self.term2.name,
# 'definition': self.term2.definition,
'is_obsolete': self.term2.is_obsolete,
'replaced_by': self.term2.replaced_by,
'accession': self.term2.get_url(),
},
]
self.assertEqual(response_data['terms'], expected)
def test_list_inexact(self):
"""Test listing OBO ontology terms with an inexact key (should fail)"""
query_data = {'t': 'term'}
with self.login(self.superuser):
response = self.client.get(
reverse('ontologyaccess:ajax_obo_term_list'),
data=query_data,
)
self.assertEqual(response.status_code, 200)
response_data = json.loads(response.content)
self.assertEqual(len(response_data['terms']), 0)
| 36.92607
| 79
| 0.588303
| 1,022
| 9,490
| 5.233855
| 0.098826
| 0.042064
| 0.068798
| 0.031782
| 0.794728
| 0.769303
| 0.73883
| 0.727613
| 0.727613
| 0.715835
| 0
| 0.012014
| 0.298314
| 9,490
| 256
| 80
| 37.070313
| 0.79126
| 0.088936
| 0
| 0.637681
| 0
| 0
| 0.103992
| 0.038749
| 0
| 0
| 0
| 0
| 0.115942
| 1
| 0.057971
| false
| 0
| 0.014493
| 0
| 0.086957
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
530a3ecb5f378a938392475b39b89d6cd8b75efa
| 271
|
py
|
Python
|
pandas/core/categorical.py
|
vimalromeo/pandas
|
7c14e4f14aff216be558bf5d4d2d00b4838c2360
|
[
"PSF-2.0",
"Apache-2.0",
"BSD-3-Clause-No-Nuclear-License-2014",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | 69
|
2020-03-31T06:40:17.000Z
|
2022-02-25T11:48:18.000Z
|
venv/lib/python3.7/site-packages/pandas/core/categorical.py
|
John1001Song/Big-Data-Robo-Adviser
|
9444dce96954c546333d5aecc92a06c3bfd19aa5
|
[
"MIT"
] | 8
|
2019-12-04T23:44:11.000Z
|
2022-02-10T08:31:40.000Z
|
venv/lib/python3.7/site-packages/pandas/core/categorical.py
|
John1001Song/Big-Data-Robo-Adviser
|
9444dce96954c546333d5aecc92a06c3bfd19aa5
|
[
"MIT"
] | 28
|
2020-04-15T15:24:17.000Z
|
2021-12-26T04:05:02.000Z
|
import warnings
# TODO: Remove after 0.23.x
warnings.warn("'pandas.core' is private. Use 'pandas.Categorical'",
FutureWarning, stacklevel=2)
from pandas.core.arrays import Categorical # noqa
from pandas.core.dtypes.dtypes import CategoricalDtype # noqa
| 30.111111
| 67
| 0.741697
| 35
| 271
| 5.742857
| 0.657143
| 0.149254
| 0.139303
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.017621
| 0.162362
| 271
| 8
| 68
| 33.875
| 0.867841
| 0.129151
| 0
| 0
| 0
| 0
| 0.215517
| 0
| 0
| 0
| 0
| 0.125
| 0
| 1
| 0
| true
| 0
| 0.6
| 0
| 0.6
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
5312d6d857638eaef4227f098e957bf0cbe14fcb
| 223
|
py
|
Python
|
src/moredataframes/encode.py
|
GlorifiedStatistics/MoreDataframes
|
147d5b8104d1cbd1cf2836220f43fb6c8ca099b7
|
[
"MIT"
] | null | null | null |
src/moredataframes/encode.py
|
GlorifiedStatistics/MoreDataframes
|
147d5b8104d1cbd1cf2836220f43fb6c8ca099b7
|
[
"MIT"
] | null | null | null |
src/moredataframes/encode.py
|
GlorifiedStatistics/MoreDataframes
|
147d5b8104d1cbd1cf2836220f43fb6c8ca099b7
|
[
"MIT"
] | null | null | null |
"""
Function to apply the encodings.
"""
from moredataframes.mdfutils.typing import ArrayLike, EncodingDict
def encode_df(df: ArrayLike, encodings: EncodingDict):
"""
A function
:return: None
"""
pass
| 17.153846
| 66
| 0.686099
| 24
| 223
| 6.333333
| 0.791667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.206278
| 223
| 12
| 67
| 18.583333
| 0.858757
| 0.255605
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 1
| 0
|
0
| 5
|
5358f9f570cc90ee01f9ef159a9bb3bc279a323a
| 233
|
py
|
Python
|
tests/context.py
|
hhatefi/sc_planner
|
d704cb8a5eb62075b992eb244ac7b45de52b1203
|
[
"MIT"
] | 2
|
2020-05-19T19:55:33.000Z
|
2020-11-17T20:02:32.000Z
|
tests/context.py
|
hhatefi/sc_planner
|
d704cb8a5eb62075b992eb244ac7b45de52b1203
|
[
"MIT"
] | null | null | null |
tests/context.py
|
hhatefi/sc_planner
|
d704cb8a5eb62075b992eb244ac7b45de52b1203
|
[
"MIT"
] | 1
|
2021-04-04T15:07:31.000Z
|
2021-04-04T15:07:31.000Z
|
import os
import sys
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
import lib.entities as entities
import lib.parser as parser
import lib.supply_chain as supply_chain
import lib.solver as solver
| 25.888889
| 82
| 0.785408
| 40
| 233
| 4.425
| 0.425
| 0.20339
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.004785
| 0.103004
| 233
| 8
| 83
| 29.125
| 0.842105
| 0
| 0
| 0
| 0
| 0
| 0.008584
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.857143
| 0
| 0.857143
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
536a452d94d21b49a142da85f242635b35dcae94
| 168
|
py
|
Python
|
kinesis_producer/partitioner.py
|
Centriam/kinesis_producer
|
d6710d48f5d61ae3398843e92de4bc480d9f7109
|
[
"MIT"
] | 25
|
2016-09-21T11:27:05.000Z
|
2021-01-03T17:12:24.000Z
|
kinesis_producer/partitioner.py
|
Centriam/kinesis_producer
|
d6710d48f5d61ae3398843e92de4bc480d9f7109
|
[
"MIT"
] | 2
|
2016-04-15T18:17:43.000Z
|
2017-03-07T16:41:23.000Z
|
kinesis_producer/partitioner.py
|
Centriam/kinesis_producer
|
d6710d48f5d61ae3398843e92de4bc480d9f7109
|
[
"MIT"
] | 14
|
2016-04-19T21:18:17.000Z
|
2020-11-09T00:20:40.000Z
|
import random
def random_partitioner(stream_record):
"""Generate a random partition_key."""
random_key = str(random.randint(0, 10**12))
return random_key
| 21
| 47
| 0.720238
| 23
| 168
| 5.043478
| 0.695652
| 0.155172
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.035714
| 0.166667
| 168
| 7
| 48
| 24
| 0.792857
| 0.190476
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.25
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
72bf7db6f0bc0d14da261a4f4d4942a2560df01b
| 210
|
py
|
Python
|
tickit/core/state_interfaces/__init__.py
|
dls-controls/tickit
|
00bb013e69674bcfe4926f365ecb3c65c080abe8
|
[
"Apache-2.0"
] | 4
|
2021-09-16T13:35:33.000Z
|
2022-02-01T23:35:53.000Z
|
tickit/core/state_interfaces/__init__.py
|
dls-controls/tickit
|
00bb013e69674bcfe4926f365ecb3c65c080abe8
|
[
"Apache-2.0"
] | 46
|
2021-09-16T13:44:58.000Z
|
2022-02-02T13:42:56.000Z
|
tickit/core/state_interfaces/__init__.py
|
dls-controls/tickit
|
00bb013e69674bcfe4926f365ecb3c65c080abe8
|
[
"Apache-2.0"
] | null | null | null |
from tickit.core.state_interfaces import internal, kafka
from tickit.core.state_interfaces.state_interface import StateConsumer, StateProducer
__all__ = ["StateConsumer", "StateProducer", "internal", "kafka"]
| 42
| 85
| 0.819048
| 23
| 210
| 7.173913
| 0.521739
| 0.121212
| 0.169697
| 0.230303
| 0.351515
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.080952
| 210
| 4
| 86
| 52.5
| 0.854922
| 0
| 0
| 0
| 0
| 0
| 0.185714
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
72c2f6343191af1ece96f0e5a84a56a7d824e482
| 204
|
py
|
Python
|
fdbk/data_tools/__init__.py
|
kangasta/fdbk
|
426a04131869ceefd3bd2c80d327b60a3a8e2d7b
|
[
"MIT"
] | 1
|
2019-05-04T09:18:48.000Z
|
2019-05-04T09:18:48.000Z
|
fdbk/data_tools/__init__.py
|
kangasta/fdbk
|
426a04131869ceefd3bd2c80d327b60a3a8e2d7b
|
[
"MIT"
] | 36
|
2018-10-25T13:29:12.000Z
|
2021-09-23T22:30:07.000Z
|
fdbk/data_tools/__init__.py
|
kangasta/fdbk
|
426a04131869ceefd3bd2c80d327b60a3a8e2d7b
|
[
"MIT"
] | null | null | null |
'''Data analysis tools
Functions to ease the simple data analysis done by the DBConnection.
'''
from .functions import *
from ._aggregate import *
from ._process import *
from ._run import *
| 18.545455
| 69
| 0.710784
| 26
| 204
| 5.461538
| 0.615385
| 0.211268
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.215686
| 204
| 10
| 70
| 20.4
| 0.8875
| 0.436275
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
72de1a2434cb99df43de8a500691468a91f7ce2d
| 262
|
py
|
Python
|
oauth/serializers.py
|
chr0nu5/review-api
|
ec391c642334ef37eca565fbd6df30ef80a256d5
|
[
"MIT"
] | null | null | null |
oauth/serializers.py
|
chr0nu5/review-api
|
ec391c642334ef37eca565fbd6df30ef80a256d5
|
[
"MIT"
] | null | null | null |
oauth/serializers.py
|
chr0nu5/review-api
|
ec391c642334ef37eca565fbd6df30ef80a256d5
|
[
"MIT"
] | null | null | null |
from rest_framework import serializers
class ClientSerializer(serializers.Serializer):
username = serializers.CharField(required=True, allow_blank=False, max_length=100)
password = serializers.CharField(required=True, allow_blank=False, max_length=100)
| 43.666667
| 86
| 0.820611
| 31
| 262
| 6.774194
| 0.612903
| 0.190476
| 0.266667
| 0.304762
| 0.561905
| 0.561905
| 0.561905
| 0.561905
| 0.561905
| 0.561905
| 0
| 0.025316
| 0.09542
| 262
| 5
| 87
| 52.4
| 0.860759
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.25
| 0.25
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 5
|
72e401364b75e7fb90de3427abeb342488bf5438
| 7,322
|
py
|
Python
|
src/the_tale/the_tale/game/quests/tests/test_person_quests.py
|
al-arz/the-tale
|
542770257eb6ebd56a5ac44ea1ef93ff4ab19eb5
|
[
"BSD-3-Clause"
] | null | null | null |
src/the_tale/the_tale/game/quests/tests/test_person_quests.py
|
al-arz/the-tale
|
542770257eb6ebd56a5ac44ea1ef93ff4ab19eb5
|
[
"BSD-3-Clause"
] | null | null | null |
src/the_tale/the_tale/game/quests/tests/test_person_quests.py
|
al-arz/the-tale
|
542770257eb6ebd56a5ac44ea1ef93ff4ab19eb5
|
[
"BSD-3-Clause"
] | null | null | null |
import smart_imports
smart_imports.all()
class QuestsTests(utils_testcase.TestCase,
helpers.QuestTestsMixin):
def setUp(self):
super().setUp()
self.places = game_logic.create_test_map()
self.account = self.accounts_factory.create_account(is_fast=True)
self.storage = game_logic_storage.LogicStorage()
self.storage.load_account_data(self.account)
self.hero = self.storage.accounts_to_heroes[self.account.id]
self.hero_uid = uids.hero(self.hero.id)
self.knowledge_base = questgen_knowledge_base.KnowledgeBase()
self.hero_info = logic.create_hero_info(self.hero)
self.hero.premium_state_end_at = datetime.datetime.now() + datetime.timedelta(days=30)
self.random_person = random.choice(persons_storage.persons.all())
def test_create(self):
knowledge_base = logic.create_random_quest_for_person(hero_info=self.hero_info,
person=self.random_person,
person_action=relations.PERSON_ACTION.random(),
logger=mock.Mock())
self.assertNotEqual(knowledge_base, None)
enter_uids = set(jump.state_to for jump in knowledge_base.filter(questgen_facts.Jump))
starts = [start for start in knowledge_base.filter(questgen_facts.Start) if start.uid not in enter_uids]
self.assertEqual(len(starts), 1)
start = starts[0]
person_uid = uids.person(self.random_person.id)
self.assertTrue(any(participant.start == start.uid and participant.participant == person_uid)
for participant in knowledge_base.filter(questgen_facts.QuestParticipant))
@mock.patch('the_tale.game.quests.prototypes.QuestPrototype.check_is_alive', lambda *argv, **kwargs: True)
def test_complete__help(self):
actions_logic.force_new_hero_quest(hero=self.hero,
logger=mock.Mock(),
person_id=self.random_person.id,
person_action=relations.PERSON_ACTION.HELP)
politic_power_storage.places.sync(force=True)
with self.check_increased(lambda: politic_power_storage.persons.outer_power(self.random_person.id)):
self.complete_quest(self.hero)
politic_power_storage.persons.sync(force=True)
@mock.patch('the_tale.game.quests.prototypes.QuestPrototype.check_is_alive', lambda *argv, **kwargs: True)
def test_complete__help__enemy(self):
self.hero.preferences.set(heroes_relations.PREFERENCE_TYPE.ENEMY, self.random_person)
actions_logic.force_new_hero_quest(hero=self.hero,
logger=mock.Mock(),
person_id=self.random_person.id,
person_action=relations.PERSON_ACTION.HELP)
politic_power_storage.places.sync(force=True)
with self.check_increased(lambda: politic_power_storage.persons.outer_power(self.random_person.id)):
self.complete_quest(self.hero)
politic_power_storage.persons.sync(force=True)
@mock.patch('the_tale.game.quests.prototypes.QuestPrototype.check_is_alive', lambda *argv, **kwargs: True)
def test_complete__harm(self):
actions_logic.force_new_hero_quest(hero=self.hero,
logger=mock.Mock(),
person_id=self.random_person.id,
person_action=relations.PERSON_ACTION.HARM)
politic_power_storage.places.sync(force=True)
with self.check_decreased(lambda: politic_power_storage.persons.outer_power(self.random_person.id)):
self.complete_quest(self.hero)
politic_power_storage.persons.sync(force=True)
@mock.patch('the_tale.game.quests.prototypes.QuestPrototype.check_is_alive', lambda *argv, **kwargs: True)
def test_complete__harm__friend(self):
self.hero.preferences.set(heroes_relations.PREFERENCE_TYPE.FRIEND, self.random_person)
actions_logic.force_new_hero_quest(hero=self.hero,
logger=mock.Mock(),
person_id=self.random_person.id,
person_action=relations.PERSON_ACTION.HARM)
politic_power_storage.places.sync(force=True)
with self.check_decreased(lambda: politic_power_storage.persons.outer_power(self.random_person.id)):
self.complete_quest(self.hero)
politic_power_storage.persons.sync(force=True)
@mock.patch('the_tale.game.quests.prototypes.QuestPrototype.check_is_alive', lambda *argv, **kwargs: True)
def test_complete__harm__hometown(self):
self.hero.preferences.set(heroes_relations.PREFERENCE_TYPE.PLACE, self.random_person.place)
actions_logic.force_new_hero_quest(hero=self.hero,
logger=mock.Mock(),
person_id=self.random_person.id,
person_action=relations.PERSON_ACTION.HARM)
politic_power_storage.places.sync(force=True)
with self.check_decreased(lambda: politic_power_storage.persons.outer_power(self.random_person.id)):
self.complete_quest(self.hero)
politic_power_storage.persons.sync(force=True)
@mock.patch('the_tale.game.quests.prototypes.QuestPrototype.check_is_alive', lambda *argv, **kwargs: True)
def test_hero_in_same_place(self):
self.hero.position.set_place(self.random_person.place)
actions_logic.force_new_hero_quest(hero=self.hero,
logger=mock.Mock(),
person_id=self.random_person.id,
person_action=relations.PERSON_ACTION.random())
politic_power_storage.places.sync(force=True)
with self.check_changed(lambda: politic_power_storage.persons.outer_power(self.random_person.id)):
self.complete_quest(self.hero)
politic_power_storage.persons.sync(force=True)
@mock.patch('the_tale.game.quests.prototypes.QuestPrototype.check_is_alive', lambda *argv, **kwargs: True)
def test_hero_in_other_place(self):
for place in self.places:
if place.id == self.random_person.place.id:
continue
self.hero.position.set_place(self.random_person.place)
break
actions_logic.force_new_hero_quest(hero=self.hero,
logger=mock.Mock(),
person_id=self.random_person.id,
person_action=relations.PERSON_ACTION.random())
politic_power_storage.places.sync(force=True)
with self.check_changed(lambda: politic_power_storage.persons.outer_power(self.random_person.id)):
self.complete_quest(self.hero)
politic_power_storage.persons.sync(force=True)
| 44.646341
| 112
| 0.629336
| 832
| 7,322
| 5.248798
| 0.141827
| 0.04763
| 0.084268
| 0.061827
| 0.754065
| 0.749027
| 0.716739
| 0.716739
| 0.716739
| 0.6643
| 0
| 0.00076
| 0.281071
| 7,322
| 163
| 113
| 44.920245
| 0.828837
| 0
| 0
| 0.601852
| 0
| 0
| 0.058325
| 0.058325
| 0
| 0
| 0
| 0
| 0.027778
| 1
| 0.083333
| false
| 0
| 0.018519
| 0
| 0.111111
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
f40da2e8a9a87b1a376bfdfea69d85ba8a6fb0cd
| 141
|
py
|
Python
|
technomarin_scraper/__init__.py
|
alsolovyev/parser
|
37a76f4b335020cdda9cef9788cf003ffdb8379b
|
[
"MIT"
] | null | null | null |
technomarin_scraper/__init__.py
|
alsolovyev/parser
|
37a76f4b335020cdda9cef9788cf003ffdb8379b
|
[
"MIT"
] | null | null | null |
technomarin_scraper/__init__.py
|
alsolovyev/parser
|
37a76f4b335020cdda9cef9788cf003ffdb8379b
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf8 -*-
from .scraper import TechnomarinScraper
from .args import args
from .logger import logger
| 20.142857
| 39
| 0.716312
| 18
| 141
| 5.611111
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.017094
| 0.170213
| 141
| 6
| 40
| 23.5
| 0.846154
| 0.297872
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
f433773f43ce570efd1ef76fe325a8718a782033
| 75
|
py
|
Python
|
ifdemo.py
|
vishabsingh/Python
|
04514c2e6fd8471a299860d6457146bf961ec86b
|
[
"Apache-2.0"
] | null | null | null |
ifdemo.py
|
vishabsingh/Python
|
04514c2e6fd8471a299860d6457146bf961ec86b
|
[
"Apache-2.0"
] | null | null | null |
ifdemo.py
|
vishabsingh/Python
|
04514c2e6fd8471a299860d6457146bf961ec86b
|
[
"Apache-2.0"
] | 2
|
2020-10-27T06:19:16.000Z
|
2020-10-27T13:42:08.000Z
|
if a>b:
print ("a")
elif 20>10:
print ("b")
elif 15>10:
print ("c")
| 7.5
| 13
| 0.506667
| 15
| 75
| 2.533333
| 0.6
| 0.368421
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 0.253333
| 75
| 9
| 14
| 8.333333
| 0.535714
| 0
| 0
| 0
| 0
| 0
| 0.041667
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
f47587e80ae648256240e4fe9a640b23c26bec52
| 93
|
py
|
Python
|
pagefetch_project/pagefetch/__init__.py
|
leifos/pagefetch
|
1d8333bd8204dbe86dbcb12354ed26cc2c04506f
|
[
"MIT"
] | null | null | null |
pagefetch_project/pagefetch/__init__.py
|
leifos/pagefetch
|
1d8333bd8204dbe86dbcb12354ed26cc2c04506f
|
[
"MIT"
] | null | null | null |
pagefetch_project/pagefetch/__init__.py
|
leifos/pagefetch
|
1d8333bd8204dbe86dbcb12354ed26cc2c04506f
|
[
"MIT"
] | null | null | null |
from ifind.common.setuplogger import create_ifind_logger
logger = create_ifind_logger('log')
| 31
| 56
| 0.849462
| 13
| 93
| 5.769231
| 0.615385
| 0.293333
| 0.453333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.075269
| 93
| 2
| 57
| 46.5
| 0.872093
| 0
| 0
| 0
| 0
| 0
| 0.032258
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
be47d3ff390f585a7be04383c4aaa9a459bd8043
| 249
|
py
|
Python
|
Code/node_distance.py
|
xiaoyanLi629/CSE_812
|
55b6358a9c96c665f572e8d6f15d926c6b0e4b63
|
[
"MIT"
] | null | null | null |
Code/node_distance.py
|
xiaoyanLi629/CSE_812
|
55b6358a9c96c665f572e8d6f15d926c6b0e4b63
|
[
"MIT"
] | null | null | null |
Code/node_distance.py
|
xiaoyanLi629/CSE_812
|
55b6358a9c96c665f572e8d6f15d926c6b0e4b63
|
[
"MIT"
] | null | null | null |
# import numpy as np
import math
def node_distance_function(node1, node2, position_matrix):
dis = math.sqrt((position_matrix[node1, 1]-position_matrix[node2, 1])**2 + (position_matrix[node1, 2]-position_matrix[node2, 2])**2)
return dis
| 41.5
| 137
| 0.726908
| 37
| 249
| 4.702703
| 0.486486
| 0.402299
| 0.218391
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.056075
| 0.140562
| 249
| 6
| 138
| 41.5
| 0.757009
| 0.072289
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.25
| 0
| 0.75
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
be550e9f32ebf12bcc8458a50608167032d76225
| 1,926
|
py
|
Python
|
test/features/db_utils.py
|
lyrl/mycli
|
d62eefdc819a11ecdb97d93dd7ad1922d28a3795
|
[
"BSD-3-Clause"
] | 10,997
|
2015-07-27T06:59:04.000Z
|
2022-03-31T07:49:26.000Z
|
test/features/db_utils.py
|
lyrl/mycli
|
d62eefdc819a11ecdb97d93dd7ad1922d28a3795
|
[
"BSD-3-Clause"
] | 937
|
2015-07-29T09:25:30.000Z
|
2022-03-30T23:54:03.000Z
|
test/features/db_utils.py
|
lyrl/mycli
|
d62eefdc819a11ecdb97d93dd7ad1922d28a3795
|
[
"BSD-3-Clause"
] | 799
|
2015-07-27T13:13:49.000Z
|
2022-03-29T21:24:39.000Z
|
import pymysql
def create_db(hostname='localhost', port=3306, username=None,
password=None, dbname=None):
"""Create test database.
:param hostname: string
:param port: int
:param username: string
:param password: string
:param dbname: string
:return:
"""
cn = pymysql.connect(
host=hostname,
port=port,
user=username,
password=password,
charset='utf8mb4',
cursorclass=pymysql.cursors.DictCursor
)
with cn.cursor() as cr:
cr.execute('drop database if exists ' + dbname)
cr.execute('create database ' + dbname)
cn.close()
cn = create_cn(hostname, port, password, username, dbname)
return cn
def create_cn(hostname, port, password, username, dbname):
"""Open connection to database.
:param hostname:
:param port:
:param password:
:param username:
:param dbname: string
:return: psycopg2.connection
"""
cn = pymysql.connect(
host=hostname,
port=port,
user=username,
password=password,
db=dbname,
charset='utf8mb4',
cursorclass=pymysql.cursors.DictCursor
)
return cn
def drop_db(hostname='localhost', port=3306, username=None,
password=None, dbname=None):
"""Drop database.
:param hostname: string
:param port: int
:param username: string
:param password: string
:param dbname: string
"""
cn = pymysql.connect(
host=hostname,
port=port,
user=username,
password=password,
db=dbname,
charset='utf8mb4',
cursorclass=pymysql.cursors.DictCursor
)
with cn.cursor() as cr:
cr.execute('drop database if exists ' + dbname)
close_cn(cn)
def close_cn(cn=None):
"""Close connection.
:param connection: pymysql.connection
"""
if cn:
cn.close()
| 20.489362
| 62
| 0.601765
| 206
| 1,926
| 5.597087
| 0.199029
| 0.057242
| 0.05464
| 0.052038
| 0.738075
| 0.738075
| 0.738075
| 0.665221
| 0.665221
| 0.665221
| 0
| 0.010973
| 0.290239
| 1,926
| 93
| 63
| 20.709677
| 0.83248
| 0.244548
| 0
| 0.733333
| 0
| 0
| 0.076981
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.088889
| false
| 0.155556
| 0.022222
| 0
| 0.155556
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
bea98622513ac9669336fe31205f5507f89cd3bb
| 246
|
py
|
Python
|
terminio/commandexecutor/exit.py
|
SourishS/terminio
|
e512ef2dff9b47415565469e9a10ae5613af0975
|
[
"Apache-2.0"
] | 1
|
2019-07-24T02:29:39.000Z
|
2019-07-24T02:29:39.000Z
|
terminio/commandexecutor/exit.py
|
SourishS/terminio
|
e512ef2dff9b47415565469e9a10ae5613af0975
|
[
"Apache-2.0"
] | null | null | null |
terminio/commandexecutor/exit.py
|
SourishS/terminio
|
e512ef2dff9b47415565469e9a10ae5613af0975
|
[
"Apache-2.0"
] | null | null | null |
from terminio.commandexecutor.commandexecutor import CommandExecutor
import sys
class exit(CommandExecutor):
def __init__(self, session):
super(exit, self).__init__(session)
def execute_command(self, cwd, args):
sys.exit(0)
| 27.333333
| 68
| 0.743902
| 29
| 246
| 6
| 0.586207
| 0.241379
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.004854
| 0.162602
| 246
| 9
| 69
| 27.333333
| 0.839806
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| false
| 0
| 0.285714
| 0
| 0.714286
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
beb834f31c73cd80f46a3c5c87d0dd8ffd4815d3
| 1,534
|
py
|
Python
|
keras/layers/merging/__init__.py
|
itsraina/keras
|
5e9376b5b94b6fb445dd52dbfafbc4e95bff5e35
|
[
"Apache-2.0"
] | null | null | null |
keras/layers/merging/__init__.py
|
itsraina/keras
|
5e9376b5b94b6fb445dd52dbfafbc4e95bff5e35
|
[
"Apache-2.0"
] | null | null | null |
keras/layers/merging/__init__.py
|
itsraina/keras
|
5e9376b5b94b6fb445dd52dbfafbc4e95bff5e35
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Keras merging layers."""
# Merging functions.
# Merging layers.
from keras.layers.merging.add import Add
from keras.layers.merging.add import add
from keras.layers.merging.average import Average
from keras.layers.merging.average import average
from keras.layers.merging.concatenate import Concatenate
from keras.layers.merging.concatenate import concatenate
from keras.layers.merging.dot import Dot
from keras.layers.merging.dot import dot
from keras.layers.merging.maximum import Maximum
from keras.layers.merging.maximum import maximum
from keras.layers.merging.minimum import Minimum
from keras.layers.merging.minimum import minimum
from keras.layers.merging.multiply import Multiply
from keras.layers.merging.multiply import multiply
from keras.layers.merging.subtract import Subtract
from keras.layers.merging.subtract import subtract
| 42.611111
| 80
| 0.767275
| 213
| 1,534
| 5.525822
| 0.375587
| 0.187766
| 0.203908
| 0.299065
| 0.564146
| 0.564146
| 0.564146
| 0.508071
| 0.508071
| 0.508071
| 0
| 0.005908
| 0.11734
| 1,534
| 35
| 81
| 43.828571
| 0.863368
| 0.468057
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
fe2fae0768c6b7b8ce6d67ce3fd6d6cf6d52a5c0
| 86
|
py
|
Python
|
Server/utils/blueprints/__init__.py
|
thearyadev/Security-System
|
f9fa48196eef4dc83a9059e10e3c97e2f0842b8d
|
[
"MIT"
] | 1
|
2022-02-26T21:43:19.000Z
|
2022-02-26T21:43:19.000Z
|
Server/utils/blueprints/__init__.py
|
thearyadev/Security-System
|
f9fa48196eef4dc83a9059e10e3c97e2f0842b8d
|
[
"MIT"
] | null | null | null |
Server/utils/blueprints/__init__.py
|
thearyadev/Security-System
|
f9fa48196eef4dc83a9059e10e3c97e2f0842b8d
|
[
"MIT"
] | null | null | null |
from .API import API
from .Logging import Logging
from .Renderable import Renderable
| 17.2
| 34
| 0.813953
| 12
| 86
| 5.833333
| 0.416667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.151163
| 86
| 4
| 35
| 21.5
| 0.958904
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
fe55ff4902fb1825ad9f09d3702f3421f55e6fc6
| 3,169
|
py
|
Python
|
hc/front/tests/test_add_pushover.py
|
sumonst21/healthchecks
|
967ca840adee6c72addde46c944c88b1bd5484e2
|
[
"BSD-3-Clause"
] | 4
|
2021-03-27T09:40:00.000Z
|
2021-03-28T06:11:03.000Z
|
hc/front/tests/test_add_pushover.py
|
sumonst21/healthchecks
|
967ca840adee6c72addde46c944c88b1bd5484e2
|
[
"BSD-3-Clause"
] | 7
|
2020-06-05T23:16:36.000Z
|
2022-02-10T08:33:36.000Z
|
hc/front/tests/test_add_pushover.py
|
sumonst21/healthchecks
|
967ca840adee6c72addde46c944c88b1bd5484e2
|
[
"BSD-3-Clause"
] | 1
|
2021-01-29T13:36:14.000Z
|
2021-01-29T13:36:14.000Z
|
from django.test.utils import override_settings
from hc.api.models import Channel
from hc.test import BaseTestCase
@override_settings(
PUSHOVER_API_TOKEN="token", PUSHOVER_SUBSCRIPTION_URL="http://example.org"
)
class AddPushoverTestCase(BaseTestCase):
@override_settings(PUSHOVER_API_TOKEN=None)
def test_it_requires_api_token(self):
self.client.login(username="alice@example.org", password="password")
r = self.client.get("/integrations/add_pushover/")
self.assertEqual(r.status_code, 404)
def test_instructions_work_without_login(self):
r = self.client.get("/integrations/add_pushover/")
self.assertContains(r, "Setup Guide")
def test_it_shows_form(self):
self.client.login(username="alice@example.org", password="password")
r = self.client.get("/integrations/add_pushover/")
self.assertContains(r, "Subscribe with Pushover")
def test_post_redirects(self):
self.client.login(username="alice@example.org", password="password")
payload = {"po_priority": 2}
r = self.client.post("/integrations/add_pushover/", form=payload)
self.assertEqual(r.status_code, 302)
def test_post_requires_authenticated_user(self):
payload = {"po_priority": 2}
r = self.client.post("/integrations/add_pushover/", form=payload)
self.assertEqual(r.status_code, 200)
self.assertContains(r, "Setup Guide")
def test_it_adds_channel(self):
self.client.login(username="alice@example.org", password="password")
session = self.client.session
session["pushover"] = "foo"
session.save()
params = "pushover_user_key=a&state=foo&prio=0&prio_up=-1"
r = self.client.get("/integrations/add_pushover/?%s" % params)
self.assertEqual(r.status_code, 302)
channel = Channel.objects.get()
self.assertEqual(channel.value, "a|0|-1")
self.assertEqual(channel.project, self.project)
def test_it_validates_priority(self):
self.client.login(username="alice@example.org", password="password")
session = self.client.session
session["pushover"] = "foo"
session.save()
params = "pushover_user_key=a&state=foo&prio=abc"
r = self.client.get("/integrations/add_pushover/?%s" % params)
self.assertEqual(r.status_code, 400)
def test_it_validates_priority_up(self):
self.client.login(username="alice@example.org", password="password")
session = self.client.session
session["pushover"] = "foo"
session.save()
params = "pushover_user_key=a&state=foo&prio_up=abc"
r = self.client.get("/integrations/add_pushover/?%s" % params)
self.assertEqual(r.status_code, 400)
def test_it_validates_state(self):
self.client.login(username="alice@example.org", password="password")
session = self.client.session
session["pushover"] = "foo"
session.save()
params = "pushover_user_key=a&state=INVALID&prio=0"
r = self.client.get("/integrations/add_pushover/?%s" % params)
self.assertEqual(r.status_code, 400)
| 37.72619
| 78
| 0.675292
| 393
| 3,169
| 5.274809
| 0.201018
| 0.096479
| 0.047757
| 0.064158
| 0.783406
| 0.756392
| 0.711047
| 0.711047
| 0.68355
| 0.68355
| 0
| 0.010925
| 0.191228
| 3,169
| 83
| 79
| 38.180723
| 0.797893
| 0
| 0
| 0.578125
| 0
| 0
| 0.23225
| 0.132849
| 0
| 0
| 0
| 0
| 0.1875
| 1
| 0.140625
| false
| 0.109375
| 0.046875
| 0
| 0.203125
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
fe592054e779830c217c3ed7a273be9290cf07e1
| 2,914
|
py
|
Python
|
tests/test_090-accounts.py
|
britive/python-api
|
2daa7693f1d4adf03626abd78598e30f62b6e2e6
|
[
"MIT"
] | null | null | null |
tests/test_090-accounts.py
|
britive/python-api
|
2daa7693f1d4adf03626abd78598e30f62b6e2e6
|
[
"MIT"
] | null | null | null |
tests/test_090-accounts.py
|
britive/python-api
|
2daa7693f1d4adf03626abd78598e30f62b6e2e6
|
[
"MIT"
] | null | null | null |
import json
from .cache import * # will also import some globals like `britive`
# starting with map so we can get a cached account to use for testing
def test_map(cached_application, cached_environment, cached_user, cached_account):
response = britive.accounts.map(
user_id=cached_user['userId'],
application_id=cached_application['appContainerId'],
environment_id=cached_environment['id'],
account_id=cached_account['accountId']
)
assert isinstance(response, list)
assert len(response) > 0
assert cached_user['userId'] in [u['userId'] for u in response]
def test_mapped_users(cached_application, cached_environment, cached_user, cached_account):
response = britive.accounts.mapped_users(
application_id=cached_application['appContainerId'],
environment_id=cached_environment['id'],
account_id=cached_account['accountId']
)
assert isinstance(response, list)
assert len(response) > 0
assert cached_user['userId'] in [u['userId'] for u in response]
def test_users_available_to_map(cached_application, cached_environment, cached_user, cached_account):
response = britive.accounts.users_available_to_map(
application_id=cached_application['appContainerId'],
environment_id=cached_environment['id'],
account_id=cached_account['accountId']
)
assert isinstance(response, list)
assert len(response) > 0
assert cached_user['userId'] not in [u['userId'] for u in response]
def test_unmap(cached_application, cached_environment, cached_user, cached_account):
response = britive.accounts.unmap(
user_id=cached_user['userId'],
application_id=cached_application['appContainerId'],
environment_id=cached_environment['id'],
account_id=cached_account['accountId']
)
assert isinstance(response, list)
assert cached_user['userId'] not in [u['userId'] for u in response]
def test_list(cached_application, cached_environment):
accounts = britive.accounts.list(
application_id=cached_application['appContainerId'],
environment_id=cached_environment['id']
)
assert isinstance(accounts, list)
assert len(accounts) > 0
assert isinstance(accounts[0], dict)
def test_permissions(cached_application, cached_environment, cached_account):
permissions = britive.accounts.permissions(
account_id=cached_account['accountId'],
application_id=cached_application['appContainerId'],
environment_id=cached_environment['id']
)
assert isinstance(permissions, list)
def test_groups(cached_application, cached_environment, cached_account):
groups = britive.accounts.groups(
account_id=cached_account['accountId'],
application_id=cached_application['appContainerId'],
environment_id=cached_environment['id']
)
assert isinstance(groups, list)
| 35.975309
| 101
| 0.731984
| 336
| 2,914
| 6.089286
| 0.14881
| 0.086022
| 0.07869
| 0.116325
| 0.786901
| 0.786901
| 0.740958
| 0.740958
| 0.740958
| 0.740958
| 0
| 0.00207
| 0.171242
| 2,914
| 80
| 102
| 36.425
| 0.845135
| 0.038435
| 0
| 0.540984
| 0
| 0
| 0.080772
| 0
| 0
| 0
| 0
| 0
| 0.262295
| 1
| 0.114754
| false
| 0
| 0.032787
| 0
| 0.147541
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
fe5db97bd3429e3656a09d76517c338679f81add
| 207
|
py
|
Python
|
srilm/__init__.py
|
rcgale/srilm-python
|
1ac8ca6e5a743abd3692018e578aabf094750c89
|
[
"MIT"
] | 28
|
2015-04-09T23:11:23.000Z
|
2021-10-30T09:04:38.000Z
|
srilm/__init__.py
|
rcgale/srilm-python
|
1ac8ca6e5a743abd3692018e578aabf094750c89
|
[
"MIT"
] | 6
|
2015-10-27T07:40:12.000Z
|
2021-12-08T02:27:05.000Z
|
srilm/__init__.py
|
rcgale/srilm-python
|
1ac8ca6e5a743abd3692018e578aabf094750c89
|
[
"MIT"
] | 11
|
2015-09-22T05:01:31.000Z
|
2021-04-29T02:35:00.000Z
|
"""
Python binding for SRI LM Toolkit implemented in Cython
"""
__all__ = ["vocab", "stats", "discount", "base", "ngram", "maxent", "utils"]
from . import vocab, stats, discount, base, ngram, maxent, utils
| 29.571429
| 76
| 0.676329
| 26
| 207
| 5.230769
| 0.730769
| 0.147059
| 0.264706
| 0.323529
| 0.558824
| 0.558824
| 0.558824
| 0
| 0
| 0
| 0
| 0
| 0.149758
| 207
| 6
| 77
| 34.5
| 0.772727
| 0.2657
| 0
| 0
| 0
| 0
| 0.263889
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
fe5f14f54998fc32eaa2a918da0fccc1507c5a2e
| 151
|
py
|
Python
|
lesson16n2_projects/wcsc/auto_gen/code/states/reply_reject.py
|
muzudho/py-state-machine-practice
|
e31c066f4cf142b6b6c5ff273b56a0f89428c59e
|
[
"MIT"
] | null | null | null |
lesson16n2_projects/wcsc/auto_gen/code/states/reply_reject.py
|
muzudho/py-state-machine-practice
|
e31c066f4cf142b6b6c5ff273b56a0f89428c59e
|
[
"MIT"
] | null | null | null |
lesson16n2_projects/wcsc/auto_gen/code/states/reply_reject.py
|
muzudho/py-state-machine-practice
|
e31c066f4cf142b6b6c5ff273b56a0f89428c59e
|
[
"MIT"
] | null | null | null |
from lesson15_projects.wcsc.data.const import E_OVER
class ReplyRejectState():
def update(self, req):
# 何もせず終わります
return E_OVER
| 16.777778
| 52
| 0.688742
| 19
| 151
| 5.315789
| 0.894737
| 0.09901
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.017391
| 0.238411
| 151
| 8
| 53
| 18.875
| 0.86087
| 0.059603
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.25
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
fe69661798a6603dac4c2dcfe66ba72c3a08e888
| 318
|
py
|
Python
|
gaphor/UML/usecases/__init__.py
|
bertob/gaphor
|
a1d6f8dd8c878f299980bba6c055436148573274
|
[
"Apache-2.0"
] | 867
|
2018-01-09T00:19:09.000Z
|
2022-03-31T02:49:23.000Z
|
gaphor/UML/usecases/__init__.py
|
burakozturk16/gaphor
|
86267a5200ac4439626d35d306dbb376c3800107
|
[
"Apache-2.0"
] | 790
|
2018-01-13T23:47:07.000Z
|
2022-03-31T16:04:27.000Z
|
gaphor/UML/usecases/__init__.py
|
burakozturk16/gaphor
|
86267a5200ac4439626d35d306dbb376c3800107
|
[
"Apache-2.0"
] | 117
|
2018-01-09T02:24:49.000Z
|
2022-03-23T08:07:42.000Z
|
from gaphor.UML.usecases import usecaseconnect
from gaphor.UML.usecases.actor import ActorItem
from gaphor.UML.usecases.extend import ExtendItem
from gaphor.UML.usecases.include import IncludeItem
from gaphor.UML.usecases.usecase import UseCaseItem
__all__ = ["ActorItem", "ExtendItem", "IncludeItem", "UseCaseItem"]
| 39.75
| 67
| 0.827044
| 39
| 318
| 6.641026
| 0.384615
| 0.19305
| 0.250965
| 0.405405
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.084906
| 318
| 7
| 68
| 45.428571
| 0.890034
| 0
| 0
| 0
| 0
| 0
| 0.128931
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.833333
| 0
| 0.833333
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
fe780921d0d07d4474ce650db8a3765ff4995dce
| 37
|
py
|
Python
|
api/__init__.py
|
markjmiller/startup-generator
|
d863c3ac0e5d6bc2a20aefb960f66dd2b35b563c
|
[
"MIT"
] | null | null | null |
api/__init__.py
|
markjmiller/startup-generator
|
d863c3ac0e5d6bc2a20aefb960f66dd2b35b563c
|
[
"MIT"
] | null | null | null |
api/__init__.py
|
markjmiller/startup-generator
|
d863c3ac0e5d6bc2a20aefb960f66dd2b35b563c
|
[
"MIT"
] | null | null | null |
# For Elastic Beanstalk and Gunicorn
| 18.5
| 36
| 0.810811
| 5
| 37
| 6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.162162
| 37
| 1
| 37
| 37
| 0.967742
| 0.918919
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
fe8b4e5cb5d7a7fd7d1e9f2a48c1af1b97742f2c
| 40,775
|
py
|
Python
|
sds/distributions/composite.py
|
hanyas/sds
|
3c195fb9cbd88a9284287d62c0eacb6afc4598a7
|
[
"MIT"
] | 12
|
2019-09-21T13:52:09.000Z
|
2022-02-14T06:48:46.000Z
|
sds/distributions/composite.py
|
hanyas/sds
|
3c195fb9cbd88a9284287d62c0eacb6afc4598a7
|
[
"MIT"
] | 1
|
2020-01-22T12:34:52.000Z
|
2020-01-26T21:14:11.000Z
|
sds/distributions/composite.py
|
hanyas/sds
|
3c195fb9cbd88a9284287d62c0eacb6afc4598a7
|
[
"MIT"
] | 5
|
2019-09-18T15:11:26.000Z
|
2021-12-10T14:04:53.000Z
|
from abc import ABC
import numpy as np
import numpy.random as npr
import scipy as sc
from scipy import linalg
from scipy.special import digamma
from sds.distributions.gaussian import GaussianWithPrecision
from sds.distributions.gaussian import GaussianWithDiagonalPrecision
from sds.distributions.matrix import MatrixNormalWithPrecision
from sds.distributions.matrix import MatrixNormalWithDiagonalPrecision
from sds.distributions.lingauss import LinearGaussianWithPrecision
from sds.distributions.lingauss import SingleOutputLinearGaussianWithKnownPrecision
from sds.distributions.lingauss import SingleOutputLinearGaussianWithKnownMean
from sds.distributions.gaussian import GaussianWithKnownMeanAndDiagonalPrecision
from sds.distributions.wishart import Wishart
from sds.distributions.gamma import Gamma
from sds.utils.general import Statistics as Stats
from functools import partial
from copy import deepcopy
class NormalWishart:
def __init__(self, dim, mu=None, kappa=None,
psi=None, nu=None):
self.dim = dim
self.gaussian = GaussianWithPrecision(dim=dim, mu=mu)
self.wishart = Wishart(dim=dim, psi=psi, nu=nu)
self.kappa = kappa
@property
def params(self):
return self.gaussian.mu, self.kappa, self.wishart.psi, self.wishart.nu
@params.setter
def params(self, values):
self.gaussian.mu, self.kappa, self.wishart.psi, self.wishart.nu = values
@property
def nb_params(self):
raise NotImplementedError
@property
def nat_param(self):
return self.std_to_nat(self.params)
@nat_param.setter
def nat_param(self, natparam):
self.params = self.nat_to_std(natparam)
@staticmethod
def std_to_nat(params):
# stats = [mu.T @ lmbda,
# -0.5 * lmbda @ (mu @ mu.T),
# -0.5 * lmbda,
# 0.5 * logdet(lmbda)]
#
# nats = [kappa * m,
# kappa,
# psi^-1 + kappa * (m @ m.T),
# nu - d]
a = params[1] * params[0]
b = params[1]
c = np.linalg.inv(params[2]) + params[1] * np.outer(params[0], params[0])
d = params[3] - params[2].shape[0]
return Stats([a, b, c, d])
@staticmethod
def nat_to_std(natparam):
mu = natparam[0] / natparam[1]
kappa = natparam[1]
psi = np.linalg.inv(natparam[2] - kappa * np.outer(mu, mu))
nu = natparam[3] + natparam[2].shape[0]
return mu, kappa, psi, nu
def mean(self):
return self.gaussian.mean(), self.wishart.mean()
def mode(self):
mu = self.gaussian.mode()
lmbda = (self.wishart.nu - self.dim) * self.wishart.psi
return mu, lmbda
def rvs(self):
lmbda = self.wishart.rvs()
self.gaussian.lmbda = self.kappa * lmbda
mu = self.gaussian.rvs()
return mu, lmbda
@property
def base(self):
return self.gaussian.base * self.wishart.base
def log_base(self):
return np.log(self.base)
def log_partition(self):
_, kappa, psi, nu = self.params
return - 0.5 * self.dim * np.log(kappa)\
+ Wishart(dim=self.dim, psi=psi, nu=nu).log_partition()
def log_likelihood(self, x):
mu, lmbda = x
return GaussianWithPrecision(dim=self.dim, mu=self.gaussian.mu,
lmbda=self.kappa * lmbda).log_likelihood(mu) \
+ self.wishart.log_likelihood(lmbda)
def log_likelihood_grad(self, x):
mu, lmbda = x
a = lmbda @ (mu - self.gaussian.mu)
b = 0.5 * (self.dim / self.kappa - (mu - self.gaussian.mu).T @ lmbda @ (mu - self.gaussian.mu))
c = 0.5 * ((np.linalg.inv(self.wishart.psi) @ lmbda @ np.linalg.inv(self.wishart.psi)).T
- self.wishart.nu * np.linalg.inv(self.wishart.psi).T)
d = 0.5 * (np.linalg.slogdet(lmbda)[1] - self.dim * np.log(2.)
- np.linalg.slogdet(self.wishart.psi)[1] - digamma(self.wishart.nu / 2.))
return a, b, c, d
class StackedNormalWisharts:
def __init__(self, size, dim,
mus=None, kappas=None,
psis=None, nus=None):
self.size = size
self.dim = dim
mus = [None] * self.size if mus is None else mus
kappas = [None] * self.size if kappas is None else kappas
psis = [None] * self.size if psis is None else psis
nus = [None] * self.size if nus is None else nus
self.dists = [NormalWishart(dim, mus[k], kappas[k],
psis[k], nus[k])
for k in range(self.size)]
@property
def params(self):
return self.mus, self.kappas, self.psis, self.nus
@params.setter
def params(self, values):
self.mus, self.kappas, self.psis, self.nus = values
@property
def nat_param(self):
return self.std_to_nat(self.params)
@nat_param.setter
def nat_param(self, natparam):
self.params = self.nat_to_std(natparam)
def std_to_nat(self, params):
params_list = list(zip(*params))
natparams_list = [dist.std_to_nat(par) for dist, par in zip(self.dists, params_list)]
natparams_stack = Stats(map(partial(np.stack, axis=0), zip(*natparams_list)))
return natparams_stack
def nat_to_std(self, natparam):
natparams_list = list(zip(*natparam))
params_list = [dist.nat_to_std(par) for dist, par in zip(self.dists, natparams_list)]
params_stack = tuple(map(partial(np.stack, axis=0), zip(*params_list)))
return params_stack
@property
def mus(self):
return np.array([dist.gaussian.mu for dist in self.dists])
@mus.setter
def mus(self, value):
for k, dist in enumerate(self.dists):
dist.gaussian.mu = value[k, ...]
@property
def kappas(self):
return np.array([dist.kappa for dist in self.dists])
@kappas.setter
def kappas(self, value):
for k, dist in enumerate(self.dists):
dist.kappa = value[k, ...]
@property
def psis(self):
return np.array([dist.wishart.psi for dist in self.dists])
@psis.setter
def psis(self, value):
for k, dist in enumerate(self.dists):
dist.wishart.psi = value[k, ...]
@property
def nus(self):
return np.array([dist.wishart.nu for dist in self.dists])
@nus.setter
def nus(self, value):
for k, dist in enumerate(self.dists):
dist.wishart.nu = value[k, ...]
def mean(self):
zipped = zip(*[dist.mean() for dist in self.dists])
return tuple(map(partial(np.stack, axis=0), zipped))
def mode(self):
zipped = zip(*[dist.mode() for dist in self.dists])
return tuple(map(partial(np.stack, axis=0), zipped))
def rvs(self):
zipped = zip(*[dist.rvs() for dist in self.dists])
return tuple(map(partial(np.stack, axis=0), zipped))
@property
def base(self):
return np.array([dist.base for dist in self.dists])
def log_base(self):
return np.log(self.base)
def log_partition(self):
return np.array([dist.log_partition() for dist in self.dists])
def log_likelihood(self, x):
return np.sum([dist.log_likelihood(_x)
for dist, _x in zip(self.dists, list(zip(*x)))])
def log_likelihood_grad(self, x):
grad_list = [dist.log_likelihood_grad(_x)
for dist, _x in zip(self.dists, list(zip(*x)))]
grad_stack = tuple(map(partial(np.stack, axis=0), zip(*grad_list)))
return grad_stack
class TiedNormalWisharts(StackedNormalWisharts):
def __init_(self, size, dim,
mus=None, kappas=None,
psis=None, nus=None):
super(TiedNormalWisharts, self).__init__(size, dim,
mus, kappas,
psis, nus)
def std_to_nat(self, params):
params_list = list(zip(*params))
natparams_list = [dist.std_to_nat(par) for dist, par in zip(self.dists, params_list)]
natparams_stack = Stats(map(partial(np.stack, axis=0), zip(*natparams_list)))
return natparams_stack
def nat_to_std(self, natparam):
mus = np.einsum('k,kd->kd', 1. / natparam[1], natparam[0])
kappas = natparam[1]
psi = np.linalg.inv(np.mean(natparam[2] - np.einsum('k,kd,kl->kdl', kappas, mus, mus), axis=0))
nu = np.mean(natparam[3] + self.dim)
psis = np.array(self.size * [psi])
nus = np.array(self.size * [nu])
return mus, kappas, psis, nus
class NormalGamma:
def __init__(self, dim, mu=None, kappas=None,
alphas=None, betas=None):
self.dim = dim
self.gaussian = GaussianWithDiagonalPrecision(dim=dim, mu=mu)
self.gamma = Gamma(dim=dim, alphas=alphas, betas=betas)
self.kappas = kappas
@property
def params(self):
return self.gaussian.mu, self.kappas, self.gamma.alphas, self.gamma.betas
@params.setter
def params(self, values):
self.gaussian.mu, self.kappas, self.gamma.alphas, self.gamma.betas = values
@property
def nat_param(self):
return self.std_to_nat(self.params)
@nat_param.setter
def nat_param(self, natparam):
self.params = self.nat_to_std(natparam)
@staticmethod
def std_to_nat(params):
# stats = [mu * lmbda_diag,
# -0.5 * lmbda_diag * mu * mu,
# 0.5 * log(lmbda_diag),
# -0.5 * lmbda_diag]
#
# nats = [kappa * m,
# kappa,
# 2. * alpha - 1.,
# 2. * beta + kappa * m * m]
a = params[1] * params[0]
b = params[1]
c = 2. * params[2] - 1.
d = 2. * params[3] + params[1] * params[0]**2
return Stats([a, b, c, d])
@staticmethod
def nat_to_std(natparam):
mu = natparam[0] / natparam[1]
kappas = natparam[1]
alphas = 0.5 * (natparam[2] + 1.)
betas = 0.5 * (natparam[3] - kappas * mu**2)
return mu, kappas, alphas, betas
def mean(self):
return self.gaussian.mean(), self.gamma.mean()
def mode(self):
mu = self.gaussian.mode()
lmbda_diag = (self.gamma.alphas - 1. / 2.) / self.gamma.betas
return mu, lmbda_diag
def rvs(self):
lmbda_diag = self.gamma.rvs()
self.gaussian.lmbda_diag = self.kappas * lmbda_diag
mu = self.gaussian.rvs()
return mu, lmbda_diag
@property
def base(self):
return self.gaussian.base * self.gamma.base
def log_base(self):
return np.log(self.base)
def log_partition(self):
mu, kappas, alphas, betas = self.params
return - 0.5 * np.sum(np.log(kappas))\
+ Gamma(dim=self.dim, alphas=alphas, betas=betas).log_partition()
def log_likelihood(self, x):
mu, lmbda_diag = x
return GaussianWithDiagonalPrecision(dim=self.dim, mu=self.gaussian.mu,
lmbda_diag=self.kappas * lmbda_diag).log_likelihood(mu)\
+ self.gamma.log_likelihood(lmbda_diag)
class StackedNormalGammas:
def __init__(self, size, dim,
mus=None, kappas=None,
alphas=None, betas=None):
self.size = size
self.dim = dim
mus = [None] * self.size if mus is None else mus
kappas = [None] * self.size if kappas is None else kappas
alphas = [None] * self.size if alphas is None else alphas
betas = [None] * self.size if betas is None else betas
self.dists = [NormalGamma(dim, mus[k], kappas[k],
alphas[k], betas[k])
for k in range(self.size)]
@property
def params(self):
return self.mus, self.kappas, self.alphas, self.betas
@params.setter
def params(self, values):
self.mus, self.kappas, self.alphas, self.betas = values
@property
def nat_param(self):
return self.std_to_nat(self.params)
@nat_param.setter
def nat_param(self, natparam):
self.params = self.nat_to_std(natparam)
def std_to_nat(self, params):
params_list = list(zip(*params))
natparams_list = [dist.std_to_nat(par) for dist, par in zip(self.dists, params_list)]
natparams_stack = Stats(map(partial(np.stack, axis=0), zip(*natparams_list)))
return natparams_stack
def nat_to_std(self, natparam):
natparams_list = list(zip(*natparam))
params_list = [dist.nat_to_std(par) for dist, par in zip(self.dists, natparams_list)]
params_stack = tuple(map(partial(np.stack, axis=0), zip(*params_list)))
return params_stack
@property
def mus(self):
return np.array([dist.gaussian.mu for dist in self.dists])
@mus.setter
def mus(self, value):
for k, dist in enumerate(self.dists):
dist.gaussian.mu = value[k, ...]
@property
def kappas(self):
return np.array([dist.kappas for dist in self.dists])
@kappas.setter
def kappas(self, value):
for k, dist in enumerate(self.dists):
dist.kappas = value[k, ...]
@property
def alphas(self):
return np.array([dist.gamma.alphas for dist in self.dists])
@alphas.setter
def alphas(self, value):
for k, dist in enumerate(self.dists):
dist.gamma.alphas = value[k, ...]
@property
def betas(self):
return np.array([dist.gamma.betas for dist in self.dists])
@betas.setter
def betas(self, value):
for k, dist in enumerate(self.dists):
dist.gamma.betas = value[k, ...]
def mean(self):
zipped = zip(*[dist.mean() for dist in self.dists])
return tuple(map(partial(np.stack, axis=0), zipped))
def mode(self):
zipped = zip(*[dist.mode() for dist in self.dists])
return tuple(map(partial(np.stack, axis=0), zipped))
def rvs(self):
zipped = zip(*[dist.rvs() for dist in self.dists])
return tuple(map(partial(np.stack, axis=0), zipped))
@property
def base(self):
return np.array([dist.base for dist in self.dists])
def log_base(self):
return np.log(self.base)
def log_partition(self):
return np.array([dist.log_partition() for dist in self.dists])
def log_likelihood(self, x):
return np.sum([dist.log_likelihood(_x)
for dist, _x in zip(self.dists, list(zip(*x)))])
class TiedNormalGammas(StackedNormalGammas):
def __init_(self, size, dim,
mus=None, kappas=None,
alphas=None, betas=None):
super(TiedNormalGammas, self).__init__(size, dim,
mus, kappas,
alphas, betas)
def std_to_nat(self, params):
params_list = list(zip(*params))
natparams_list = [dist.std_to_nat(par) for dist, par in zip(self.dists, params_list)]
natparams_stack = Stats(map(partial(np.stack, axis=0), zip(*natparams_list)))
return natparams_stack
def nat_to_std(self, natparam):
mus = np.einsum('kd,kd->kd', 1. / natparam[1], natparam[0])
kappas = natparam[1]
alphas = np.mean(0.5 * (natparam[2] + 1.), axis=0)
betas = np.mean(0.5 * (natparam[3] - kappas * mus**2), axis=0)
alphas = np.array(self.size * [alphas])
betas = np.array(self.size * [betas])
return mus, kappas, alphas, betas
class MatrixNormalWishart:
def __init__(self, column_dim, row_dim,
M=None, K=None, psi=None, nu=None):
self.column_dim = column_dim
self.row_dim = row_dim
self.matnorm = MatrixNormalWithPrecision(column_dim, row_dim, M=M, K=K)
self.wishart = Wishart(dim=row_dim, psi=psi, nu=nu)
@property
def params(self):
return self.matnorm.M, self.matnorm.K, self.wishart.psi, self.wishart.nu
@params.setter
def params(self, values):
self.matnorm.M, self.matnorm.K, self.wishart.psi, self.wishart.nu = values
@property
def nat_param(self):
return self.std_to_nat(self.params)
@nat_param.setter
def nat_param(self, natparam):
self.params = self.nat_to_std(natparam)
def std_to_nat(self, params):
# stats = [A.T @ V,
# -0.5 * A.T @ V @ A,
# -0.5 * V,
# 0.5 * log_det(V)]
#
# nats = [M @ K,
# K,
# psi^-1 + M @ K @ M.T,
# nu - d - 1. + l]
a = params[0] @ params[1]
b = params[1]
c = np.linalg.inv(params[2]) + params[0] @ params[1] @ params[0].T
d = params[3] - self.row_dim - 1. + self.column_dim
return Stats([a, b, c, d])
def nat_to_std(self, natparam):
M = natparam[0] @ np.linalg.inv(natparam[1])
K = natparam[1]
psi = np.linalg.inv(natparam[2] - M @ K @ M.T)
nu = natparam[3] + self.row_dim + 1. - self.column_dim
return M, K, psi, nu
def mean(self):
return self.matnorm.mean(), self.wishart.mean()
def mode(self):
A = self.matnorm.mode()
lmbda = (self.wishart.nu - self.row_dim) * self.wishart.psi
return A, lmbda
def rvs(self, size=1):
lmbda = self.wishart.rvs()
self.matnorm.V = lmbda
A = self.matnorm.rvs()
return A, lmbda
@property
def base(self):
return self.matnorm.base * self.wishart.base
def log_base(self):
return np.log(self.base)
def log_partition(self):
_, K, psi, nu = self.params
return - 0.5 * self.row_dim * np.linalg.slogdet(K)[1]\
+ Wishart(dim=self.row_dim, psi=psi, nu=nu).log_partition()
def log_likelihood(self, x):
A, lmbda = x
return MatrixNormalWithPrecision(column_dim=self.column_dim,
row_dim=self.row_dim,
M=self.matnorm.M, V=lmbda,
K=self.matnorm.K).log_likelihood(A)\
+ self.wishart.log_likelihood(lmbda)
def log_likelihood_grad(self, x):
A, lmbda = x
a = 0.5 * (lmbda @ A @ self.matnorm.K + (self.matnorm.K @ A.T @ lmbda).T) \
- 0.5 * (lmbda @ self.matnorm.M @ self.matnorm.K + lmbda.T @ self.matnorm.M @ self.matnorm.K.T)
b = 0.5 * (self.row_dim * np.linalg.inv(self.matnorm.K).T
- ((A - self.matnorm.M).T @ lmbda @ (A - self.matnorm.M)).T)
c = 0.5 * ((np.linalg.inv(self.wishart.psi) @ lmbda @ np.linalg.inv(self.wishart.psi)).T
- self.wishart.nu * np.linalg.inv(self.wishart.psi).T)
d = 0.5 * (np.linalg.slogdet(lmbda)[1] - self.row_dim * np.log(2.)
- np.linalg.slogdet(self.wishart.psi)[1] - digamma(self.wishart.nu / 2.))
return a, b, c, d
class StackedMatrixNormalWisharts:
def __init__(self, size, column_dim, row_dim,
Ms=None, Ks=None, psis=None, nus=None):
self.size = size
self.column_dim = column_dim
self.row_dim = row_dim
Ms = [None] * self.size if Ms is None else Ms
Ks = [None] * self.size if Ks is None else Ks
psis = [None] * self.size if psis is None else psis
nus = [None] * self.size if nus is None else nus
self.dists = [MatrixNormalWishart(column_dim, row_dim,
Ms[k], Ks[k], psis[k], nus[k])
for k in range(self.size)]
@property
def params(self):
return self.Ms, self.Ks, self.psis, self.nus
@params.setter
def params(self, values):
self.Ms, self.Ks, self.psis, self.nus = values
@property
def nat_param(self):
return self.std_to_nat(self.params)
@nat_param.setter
def nat_param(self, natparam):
self.params = self.nat_to_std(natparam)
def std_to_nat(self, params):
params_list = list(zip(*params))
natparams_list = [dist.std_to_nat(par) for dist, par in zip(self.dists, params_list)]
natparams_stack = Stats(map(partial(np.stack, axis=0), zip(*natparams_list)))
return natparams_stack
def nat_to_std(self, natparam):
natparams_list = list(zip(*natparam))
params_list = [dist.nat_to_std(par) for dist, par in zip(self.dists, natparams_list)]
params_stack = tuple(map(partial(np.stack, axis=0), zip(*params_list)))
return params_stack
@property
def Ms(self):
return np.array([dist.matnorm.M for dist in self.dists])
@Ms.setter
def Ms(self, value):
for k, dist in enumerate(self.dists):
dist.matnorm.M = value[k, ...]
@property
def Ks(self):
return np.array([dist.matnorm.K for dist in self.dists])
@Ks.setter
def Ks(self, value):
for k, dist in enumerate(self.dists):
dist.matnorm.K = value[k, ...]
@property
def psis(self):
return np.array([dist.wishart.psi for dist in self.dists])
@psis.setter
def psis(self, value):
for k, dist in enumerate(self.dists):
dist.wishart.psi = value[k, ...]
@property
def nus(self):
return np.array([dist.wishart.nu for dist in self.dists])
@nus.setter
def nus(self, value):
for k, dist in enumerate(self.dists):
dist.wishart.nu = value[k, ...]
def mean(self):
zipped = zip(*[dist.mean() for dist in self.dists])
return tuple(map(partial(np.stack, axis=0), zipped))
def mode(self):
zipped = zip(*[dist.mode() for dist in self.dists])
return tuple(map(partial(np.stack, axis=0), zipped))
def rvs(self):
zipped = zip(*[dist.rvs() for dist in self.dists])
return tuple(map(partial(np.stack, axis=0), zipped))
@property
def base(self):
return np.array([dist.base for dist in self.dists])
def log_base(self):
return np.log(self.base)
def log_partition(self):
return np.array([dist.log_partition() for dist in self.dists])
def log_likelihood(self, x):
return np.sum([dist.log_likelihood(_x)
for dist, _x in zip(self.dists, list(zip(*x)))])
def log_likelihood_grad(self, x):
grad_list = [dist.log_likelihood_grad(_x)
for dist, _x in zip(self.dists, list(zip(*x)))]
grad_stack = tuple(map(partial(np.stack, axis=0), zip(*grad_list)))
return grad_stack
class TiedMatrixNormalWisharts(StackedMatrixNormalWisharts):
def __init__(self, size, column_dim, row_dim,
Ms=None, Ks=None, psis=None, nus=None):
super(TiedMatrixNormalWisharts, self).__init__(size, column_dim, row_dim,
Ms, Ks, psis, nus)
def std_to_nat(self, params):
params_list = list(zip(*params))
natparams_list = [dist.std_to_nat(par) for dist, par in zip(self.dists, params_list)]
natparams_stack = Stats(map(partial(np.stack, axis=0), zip(*natparams_list)))
return natparams_stack
def nat_to_std(self, natparam):
Ms = np.einsum('kdl,klh->kdh', natparam[0], np.linalg.inv(natparam[1]))
Ks = natparam[1]
psi = np.linalg.inv(np.mean(natparam[2] - np.einsum('kdl,klm,khm->kdh', Ms, Ks, Ms), axis=0))
nu = np.mean(natparam[3] + self.row_dim + 1 - self.column_dim)
psis = np.array(self.size * [psi])
nus = np.array(self.size * [nu])
return Ms, Ks, psis, nus
class MatrixNormalGamma:
def __init__(self, column_dim, row_dim,
M=None, K=None, alphas=None, betas=None):
self.column_dim = column_dim
self.row_dim = row_dim
self.matnorm = MatrixNormalWithDiagonalPrecision(column_dim, row_dim, M=M, K=K)
self.gamma = Gamma(dim=row_dim, alphas=alphas, betas=betas)
@property
def params(self):
return self.matnorm.M, self.matnorm.K, self.gamma.alphas, self.gamma.betas
@params.setter
def params(self, values):
self.matnorm.M, self.matnorm.K, self.gamma.alphas, self.gamma.betas = values
@property
def nat_param(self):
return self.std_to_nat(self.params)
@nat_param.setter
def nat_param(self, natparam):
self.params = self.nat_to_std(natparam)
@staticmethod
def std_to_nat(params):
# stats = [A.T * V_diag,
# -0.5 * A.T @ A,
# 0.5 * log(V_diag),
# -0.5 * V_diag]
#
# nats = [M @ K,
# K,
# 2. * alpha - 1.,
# 2. * beta + M @ K @ M.T]
a = params[0] @ params[1]
b = params[1]
c = 2. * params[2] - 1.
d = 2. * params[3] + np.einsum('dl,lm,dm->d', params[0], params[1], params[0])
return Stats([a, b, c, d])
@staticmethod
def nat_to_std(natparam):
M = natparam[0] @ np.linalg.inv(natparam[1])
K = natparam[1]
alphas = 0.5 * (natparam[2] + 1.)
betas = 0.5 * (natparam[3] - np.einsum('dl,lm,dm->d', M, K, M))
return M, K, alphas, betas
def mean(self):
return self.matnorm.mean(), self.gamma.mean()
def mode(self):
A = self.matnorm.mode()
lmbda_diag = (self.gamma.alphas - 1. / 2.) / self.gamma.betas
return A, lmbda_diag
def rvs(self, size=1):
lmbdas = self.gamma.rvs()
self.matnorm.V_diag = lmbdas
A = self.matnorm.rvs()
return A, lmbdas
@property
def base(self):
return self.matnorm.base * self.gamma.base
def log_base(self):
return np.log(self.base)
def log_partition(self):
_, K, alphas, betas = self.params
return - self.row_dim * (0.5 * self.column_dim * np.linalg.slogdet(K)[1])\
+ Gamma(dim=self.row_dim, alphas=alphas, betas=betas).log_partition()
def log_likelihood(self, x):
A, lmbda_diag = x
return MatrixNormalWithDiagonalPrecision(column_dim=self.column_dim,
row_dim=self.row_dim,
M=self.matnorm.M, V_diag=lmbda_diag,
K=self.matnorm.K).log_likelihood(A)\
+ self.gamma.log_likelihood(lmbda_diag)
class StackedMatrixNormalGammas:
def __init__(self, size,
column_dim, row_dim,
Ms=None, Ks=None,
alphas=None, betas=None):
self.size = size
self.column_dim = column_dim
self.row_dim = row_dim
Ms = [None] * self.size if Ms is None else Ms
Ks = [None] * self.size if Ks is None else Ks
alphas = [None] * self.size if alphas is None else alphas
betas = [None] * self.size if betas is None else betas
self.dists = [MatrixNormalGamma(column_dim, row_dim,
Ms[k], Ks[k], alphas[k], betas[k])
for k in range(self.size)]
@property
def params(self):
return self.Ms, self.Ks, self.alphas, self.betas
@params.setter
def params(self, values):
self.Ms, self.Ks, self.alphas, self.betas = values
@property
def nat_param(self):
return self.std_to_nat(self.params)
@nat_param.setter
def nat_param(self, natparam):
self.params = self.nat_to_std(natparam)
def std_to_nat(self, params):
params_list = list(zip(*params))
natparams_list = [dist.std_to_nat(par) for dist, par in zip(self.dists, params_list)]
natparams_stack = Stats(map(partial(np.stack, axis=0), zip(*natparams_list)))
return natparams_stack
def nat_to_std(self, natparam):
natparams_list = list(zip(*natparam))
params_list = [dist.nat_to_std(par) for dist, par in zip(self.dists, natparams_list)]
params_stack = tuple(map(partial(np.stack, axis=0), zip(*params_list)))
return params_stack
@property
def Ms(self):
return np.array([dist.matnorm.M for dist in self.dists])
@Ms.setter
def Ms(self, value):
for k, dist in enumerate(self.dists):
dist.matnorm.M = value[k, ...]
@property
def Ks(self):
return np.array([dist.matnorm.K for dist in self.dists])
@Ks.setter
def Ks(self, value):
for k, dist in enumerate(self.dists):
dist.matnorm.K = value[k, ...]
@property
def alphas(self):
return np.array([dist.gamma.alphas for dist in self.dists])
@alphas.setter
def alphas(self, value):
for k, dist in enumerate(self.dists):
dist.gamma.alphas = value[k, ...]
@property
def betas(self):
return np.array([dist.gamma.betas for dist in self.dists])
@betas.setter
def betas(self, value):
for k, dist in enumerate(self.dists):
dist.gamma.betas = value[k, ...]
def mean(self):
zipped = zip(*[dist.mean() for dist in self.dists])
return tuple(map(partial(np.stack, axis=0), zipped))
def mode(self):
zipped = zip(*[dist.mode() for dist in self.dists])
return tuple(map(partial(np.stack, axis=0), zipped))
def rvs(self):
zipped = zip(*[dist.rvs() for dist in self.dists])
return tuple(map(partial(np.stack, axis=0), zipped))
@property
def base(self):
return np.array([dist.base for dist in self.dists])
def log_base(self):
return np.log(self.base)
def log_partition(self):
return np.array([dist.log_partition() for dist in self.dists])
def log_likelihood(self, x):
return np.sum([dist.log_likelihood(_x)
for dist, _x in zip(self.dists, list(zip(*x)))])
class TiedMatrixNormalGammas(StackedMatrixNormalGammas):
def __init__(self, size, column_dim, row_dim,
Ms=None, Ks=None, alphas=None, betas=None):
super(TiedMatrixNormalGammas, self).__init__(size, column_dim, row_dim,
Ms, Ks, alphas, betas)
def std_to_nat(self, params):
params_list = list(zip(*params))
natparams_list = [dist.std_to_nat(par) for dist, par in zip(self.dists, params_list)]
natparams_stack = Stats(map(partial(np.stack, axis=0), zip(*natparams_list)))
return natparams_stack
def nat_to_std(self, natparam):
aT = np.transpose(natparam[0], (0, 2, 1))
bT = np.transpose(natparam[1], (0, 2, 1))
Ms = np.transpose(np.linalg.solve(bT, aT), (0, 2, 1))
Ks = natparam[1]
alphas = np.mean(0.5 * (natparam[2] + 1.), axis=0)
betas = np.mean(0.5 * (natparam[3] - np.einsum('kdl,klm,kdm->kd', Ms, Ks, Ms)), axis=0)
alphas = np.array(self.size * [alphas])
betas = np.array(self.size * [betas])
return Ms, Ks, alphas, betas
class SingleOutputLinearGaussianWithAutomaticRelevance:
def __init__(self, input_dim,
likelihood_precision_prior,
parameter_precision_prior, affine=True):
self.input_dim = input_dim
self.affine = affine
self.likelihood_precision_prior = likelihood_precision_prior
self.parameter_precision_prior = parameter_precision_prior
alphas = self.parameter_precision_prior.rvs()
self.parameter_prior = GaussianWithPrecision(dim=input_dim,
mu=np.zeros((self.input_dim, )),
lmbda=np.diag(alphas))
self.likelihood_precision_posterior = deepcopy(likelihood_precision_prior)
self.parameter_precision_posterior = deepcopy(parameter_precision_prior)
self.parameter_posterior = deepcopy(self.parameter_prior)
beta = self.likelihood_precision_prior.rvs()
self.likelihood_known_precision = SingleOutputLinearGaussianWithKnownPrecision(column_dim=input_dim,
lmbda=beta,
affine=affine)
coef = self.parameter_prior.rvs()
self.likelihood_known_mean = SingleOutputLinearGaussianWithKnownMean(column_dim=input_dim,
W=coef, affine=affine)
self.likelihood = LinearGaussianWithPrecision(column_dim=input_dim, row_dim=1,
A=np.expand_dims(coef, axis=0),
lmbda=np.diag(beta), affine=affine)
@property
def params(self):
return self.A, self.lmbda
@params.setter
def params(self, values):
self.A, self.lmbda = values
@property
def A(self):
return self.likelihood.A
@A.setter
def A(self, value):
# value is a single row 1d-array
self.likelihood.A = np.expand_dims(value, axis=0)
@property
def lmbda(self):
return self.likelihood.lmbda
@lmbda.setter
def lmbda(self, value):
# value is a 1d-array
self.likelihood.lmbda = np.diag(value)
@property
def sigma(self):
return self.likelihood.sigma
def predict(self, x):
return self.likelihood.predict(x)
def mean(self, x):
return self.likelihood.mean(x)
def mode(self, x):
return self.likelihood.mode(x)
def rvs(self, x):
return self.likelihood.rvs(x)
def log_likelihood(self, x, y):
if isinstance(x, np.ndarray) and isinstance(y, np.ndarray):
yi = np.expand_dims(y, axis=1)
return self.likelihood.log_likelihood(x, yi)
else:
return list(map(self.log_likelihood, x, y))
def _em(self, x, y, w=None, nb_iter=10):
# self.likelihood_precision_posterior = deepcopy(self.likelihood_precision_prior)
# self.parameter_precision_posterior = deepcopy(self.parameter_precision_prior)
# self.parameter_posterior = deepcopy(self.parameter_prior)
for i in range(nb_iter):
# variational e-step
# parameter posterior
alphas = self.parameter_precision_posterior.mean()
self.parameter_prior.lmbda = np.diag(alphas)
beta = self.likelihood_precision_posterior.mean()
self.likelihood_known_precision.lmbda = beta
stats = self.likelihood_known_precision.statistics(x, y) if w is None\
else self.likelihood_known_precision.weighted_statistics(x, y, w)
self.parameter_posterior.nat_param = self.parameter_prior.nat_param + stats
# variatinoal m-step
# likelihood precision posterior
coef = self.parameter_posterior.mean()
self.likelihood_known_mean.W = coef
stats = self.likelihood_known_mean.statistics(x, y) if w is None\
else self.likelihood_known_mean.weighted_statistics(x, y, w)
self.likelihood_precision_posterior.nat_param = self.likelihood_precision_prior.nat_param + stats
# parameter precision posterior
parameter_likelihood = GaussianWithKnownMeanAndDiagonalPrecision(dim=self.input_dim)
stats = parameter_likelihood.statistics(coef)
self.parameter_precision_posterior.nat_param = self.parameter_precision_prior.nat_param + stats
def em(self, x, y, w=None, **kwargs):
nb_iter = kwargs.get('nb_iter', 10)
self._em(x, y, w, nb_iter)
values = kwargs.get('values', 'mode')
if values == 'mode':
coef = self.parameter_posterior.mode()
beta = self.likelihood_precision_posterior.mode()
else:
coef = self.parameter_posterior.rvs()
beta = self.likelihood_precision_posterior.rvs()
self.A, self.lmbda = coef, beta
class MultiOutputLinearGaussianWithAutomaticRelevance:
def __init__(self, input_dim, output_dim,
likelihood_precision_prior,
parameter_precision_prior, affine=True):
self.input_dim = input_dim
self.output_dim = output_dim
self.affine = affine
self.dists = []
for i in range(self.output_dim):
dist = SingleOutputLinearGaussianWithAutomaticRelevance(input_dim,
likelihood_precision_prior,
parameter_precision_prior,
affine)
self.dists.append(dist)
@property
def params(self):
return self.A, self.lmbda
@params.setter
def params(self, values):
self.A = values[0]
self.lmbda = values[1]
@property
def A(self):
return np.vstack([dist.A for dist in self.dists])
@A.setter
def A(self, values):
for i, dist in enumerate(self.dists):
dist.A = values[i]
@property
def lmbda(self):
lmbdas = [dist.lmbda for dist in self.dists]
return sc.linalg.block_diag(*lmbdas)
@lmbda.setter
def lmbda(self, values):
diags = np.diag(values)
for i, dist in enumerate(self.dists):
dist.lmbda = np.atleast_1d(diags[i])
def predict(self, x):
return np.hstack([dist.predict(x) for dist in self.dists])
def mean(self, x):
return self.predict(x)
def mode(self, x):
return self.predict(x)
def rvs(self, x):
lmbda_chol_inv = 1. / np.sqrt(self.lmbda)
return self.mean(x) + npr.normal(size=self.output_dim).dot(lmbda_chol_inv.T)
def log_likelihood(self, x, y):
if isinstance(x, np.ndarray) and isinstance(y, np.ndarray):
log_lik = np.zeros((len(x), ))
for i, dist in enumerate(self.dists):
log_lik += dist.log_likelihood(x, y[:, i])
return log_lik
else:
return list(map(self.log_likelihood, x, y))
def em(self, x, y, w=None, **kwargs):
for i, dist in enumerate(self.dists):
dist.em(x, y[:, i], w, **kwargs)
class StackedMultiOutputLinearGaussianWithAutomaticRelevance:
def __init__(self, stack_size, input_dim, output_dim,
likelihood_precision_prior,
parameter_precision_prior, affine=True):
self.stack_size = stack_size
self.input_dim = input_dim
self.output_dim = output_dim
self.affine = affine
self.stack = []
for k in range(self.stack_size):
dist = MultiOutputLinearGaussianWithAutomaticRelevance(input_dim, output_dim,
likelihood_precision_prior,
parameter_precision_prior,
affine)
self.stack.append(dist)
@property
def params(self):
return self.As, self.lmbdas
@params.setter
def params(self, values):
self.As = values[0]
self.lmbdas = values[1]
@property
def As(self):
return np.array([dist.A for dist in self.stack])
@As.setter
def As(self, values):
for k, dist in enumerate(self.stack):
dist.A = values[k]
@property
def lmbdas(self):
return np.array([dist.lmbda for dist in self.stack])
@lmbdas.setter
def lmbdas(self, values):
for k, dist in enumerate(self.stack):
dist.lmbda = values[k]
def predict(self, z, x):
return self.stack[z].predict(x)
def mean(self, z, x):
return self.predict(z, x)
def mode(self, z, x):
return self.predict(z, x)
def rvs(self, z, x):
return self.stack[z].rvs(x)
def log_likelihood(self, x, y):
if isinstance(x, np.ndarray) and isinstance(y, np.ndarray):
log_lik = np.zeros((len(x), self.stack_size))
for k, dist in enumerate(self.stack):
log_lik[:, k] = dist.log_likelihood(x, y)
return log_lik
else:
return list(map(self.log_likelihood, x, y))
def em(self, x, y, w=None, **kwargs):
for k, dist in enumerate(self.stack):
dist.em(x, y, w[:, k], **kwargs)
| 32.962813
| 109
| 0.580846
| 5,357
| 40,775
| 4.307075
| 0.038641
| 0.032376
| 0.015993
| 0.023101
| 0.807567
| 0.737052
| 0.70316
| 0.681663
| 0.649244
| 0.612534
| 0
| 0.008365
| 0.299277
| 40,775
| 1,236
| 110
| 32.989482
| 0.799174
| 0.028449
| 0
| 0.684564
| 0
| 0
| 0.002906
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.225951
| false
| 0
| 0.021253
| 0.089485
| 0.425056
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
22cae21ce6cee62b39eb0bd31e27c1af4177c683
| 83
|
py
|
Python
|
interrogator/question/__init__.py
|
jackmaney/interrogator
|
1037b6b49542f1f309fbe18afe7ca305d1faabad
|
[
"MIT"
] | 2
|
2015-03-17T21:42:51.000Z
|
2018-02-24T22:54:11.000Z
|
interrogator/question/__init__.py
|
jackmaney/interrogator
|
1037b6b49542f1f309fbe18afe7ca305d1faabad
|
[
"MIT"
] | 5
|
2015-03-13T03:33:16.000Z
|
2015-03-20T22:17:48.000Z
|
interrogator/question/__init__.py
|
jackmaney/interrogator
|
1037b6b49542f1f309fbe18afe7ca305d1faabad
|
[
"MIT"
] | null | null | null |
"""
This module provides the ``Question`` class.
"""
from question import Question
| 16.6
| 44
| 0.722892
| 10
| 83
| 6
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.144578
| 83
| 4
| 45
| 20.75
| 0.84507
| 0.53012
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
22cee56c4930839af828dd7cb5db0bfe53a61968
| 75
|
py
|
Python
|
distkit/__init__.py
|
abhinav314/distkit
|
56f0cfc5123d64b0d4a43195cd48028d8f5b2bb7
|
[
"MIT"
] | null | null | null |
distkit/__init__.py
|
abhinav314/distkit
|
56f0cfc5123d64b0d4a43195cd48028d8f5b2bb7
|
[
"MIT"
] | null | null | null |
distkit/__init__.py
|
abhinav314/distkit
|
56f0cfc5123d64b0d4a43195cd48028d8f5b2bb7
|
[
"MIT"
] | null | null | null |
from distkit.gaussian import Gaussian
from distkit.binomial import Binomial
| 37.5
| 37
| 0.88
| 10
| 75
| 6.6
| 0.5
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.093333
| 75
| 2
| 38
| 37.5
| 0.970588
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
22d672e3b8fe31416af76f4969f1d8b058510645
| 42
|
py
|
Python
|
slack_utils/exceptions.py
|
wilgucki/slack_utils
|
206ba34e54baa63ce44de6e9cdeb6c20c18d2f8c
|
[
"MIT"
] | null | null | null |
slack_utils/exceptions.py
|
wilgucki/slack_utils
|
206ba34e54baa63ce44de6e9cdeb6c20c18d2f8c
|
[
"MIT"
] | 30
|
2020-01-21T07:21:16.000Z
|
2020-10-12T06:07:07.000Z
|
slack_utils/exceptions.py
|
wilgucki/slack_utils
|
206ba34e54baa63ce44de6e9cdeb6c20c18d2f8c
|
[
"MIT"
] | null | null | null |
class SlackException(Exception):
pass
| 14
| 32
| 0.761905
| 4
| 42
| 8
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 42
| 2
| 33
| 21
| 0.914286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
22ff240b5c4504d49ea670dc608bbcc935cdab9c
| 797
|
py
|
Python
|
datadog_checks_base/tests/test_config.py
|
mchelen-gov/integrations-core
|
81281600b3cc7025a7a32148c59620c9592a564f
|
[
"BSD-3-Clause"
] | 663
|
2016-08-23T05:23:45.000Z
|
2022-03-29T00:37:23.000Z
|
datadog_checks_base/tests/test_config.py
|
mchelen-gov/integrations-core
|
81281600b3cc7025a7a32148c59620c9592a564f
|
[
"BSD-3-Clause"
] | 6,642
|
2016-06-09T16:29:20.000Z
|
2022-03-31T22:24:09.000Z
|
datadog_checks_base/tests/test_config.py
|
mchelen-gov/integrations-core
|
81281600b3cc7025a7a32148c59620c9592a564f
|
[
"BSD-3-Clause"
] | 1,222
|
2017-01-27T15:51:38.000Z
|
2022-03-31T18:17:51.000Z
|
# (C) Datadog, Inc. 2018-present
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
from datadog_checks import config
def test_alias():
"""
Ensure we have an alias to import is_affirmative as _is_affirmative for
backward compatibility with Agent 5.x
"""
assert getattr(config, "_is_affirmative", None) is not None
def test_is_affirmative():
assert config.is_affirmative(None) is False
assert config.is_affirmative(0) is False
assert config.is_affirmative("whatever, it could be 'off'") is False
assert config.is_affirmative(1) is True
assert config.is_affirmative('YES') is True
assert config.is_affirmative('True') is True
assert config.is_affirmative('On') is True
assert config.is_affirmative('1') is True
| 31.88
| 75
| 0.734003
| 118
| 797
| 4.813559
| 0.457627
| 0.274648
| 0.301056
| 0.352113
| 0.485915
| 0.401408
| 0.112676
| 0
| 0
| 0
| 0
| 0.013804
| 0.181932
| 797
| 24
| 76
| 33.208333
| 0.857362
| 0.27478
| 0
| 0
| 0
| 0
| 0.093525
| 0
| 0
| 0
| 0
| 0
| 0.75
| 1
| 0.166667
| true
| 0
| 0.083333
| 0
| 0.25
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
a3b0a95b743ca84edb1ed51b46f5e46c7f1e30f2
| 7,369
|
py
|
Python
|
tests/test_integration_client.py
|
tylerthetiger/basketball_reference_web_scraper
|
2bf0a260e08b649326852c05f39da2922b912bfe
|
[
"MIT"
] | null | null | null |
tests/test_integration_client.py
|
tylerthetiger/basketball_reference_web_scraper
|
2bf0a260e08b649326852c05f39da2922b912bfe
|
[
"MIT"
] | null | null | null |
tests/test_integration_client.py
|
tylerthetiger/basketball_reference_web_scraper
|
2bf0a260e08b649326852c05f39da2922b912bfe
|
[
"MIT"
] | null | null | null |
from datetime import datetime
from unittest import TestCase
import basketball_reference_web_scraper.client as client
from basketball_reference_web_scraper.data import OutputWriteOption, OutputType
class TestClient(TestCase):
def test_schedules_from_2001(self):
now = datetime.now()
current_year = now.year
for year in range(2001, current_year + 1):
season_schedule = client.season_schedule(season_end_year=year)
self.assertIsNotNone(season_schedule)
def test_output_json_box_scores_to_file(self):
client.player_box_scores(
day=1,
month=1,
year=2001,
output_type=OutputType.JSON,
output_file_path="./foo.json",
output_write_option=OutputWriteOption.WRITE
)
def test_output_json_box_scores_to_memory(self):
january_first_box_scores = client.player_box_scores(
day=1,
month=1,
year=2001,
output_type=OutputType.JSON,
)
self.assertIsNotNone(january_first_box_scores)
def test_2001_season_schedule(self):
schedule = client.season_schedule(season_end_year=2001)
self.assertIsNotNone(schedule)
def test_2002_season_schedule(self):
schedule = client.season_schedule(season_end_year=2002)
self.assertIsNotNone(schedule)
def test_2003_season_schedule(self):
schedule = client.season_schedule(season_end_year=2003)
self.assertIsNotNone(schedule)
def test_2004_season_schedule(self):
schedule = client.season_schedule(season_end_year=2004)
self.assertIsNotNone(schedule)
def test_2005_season_schedule(self):
schedule = client.season_schedule(season_end_year=2005)
self.assertIsNotNone(schedule)
def test_2006_season_schedule(self):
schedule = client.season_schedule(season_end_year=2006)
self.assertIsNotNone(schedule)
def test_2007_season_schedule(self):
schedule = client.season_schedule(season_end_year=2007)
self.assertIsNotNone(schedule)
def test_2008_season_schedule(self):
schedule = client.season_schedule(season_end_year=2008)
self.assertIsNotNone(schedule)
def test_2009_season_schedule(self):
schedule = client.season_schedule(season_end_year=2009)
self.assertIsNotNone(schedule)
def test_2010_season_schedule(self):
player_season_totals = client.season_schedule(season_end_year=2010)
self.assertIsNotNone(player_season_totals)
def test_2011_season_schedule(self):
schedule = client.season_schedule(season_end_year=2011)
self.assertIsNotNone(schedule)
def test_2012_season_schedule(self):
schedule = client.season_schedule(season_end_year=2012)
self.assertIsNotNone(schedule)
def test_2013_season_schedule(self):
schedule = client.season_schedule(season_end_year=2013)
self.assertIsNotNone(schedule)
def test_2014_season_schedule(self):
schedule = client.season_schedule(season_end_year=2014)
self.assertIsNotNone(schedule)
def test_2015_season_schedule(self):
schedule = client.season_schedule(season_end_year=2015)
self.assertIsNotNone(schedule)
def test_2016_season_schedule(self):
schedule = client.season_schedule(season_end_year=2016)
self.assertIsNotNone(schedule)
def test_2017_season_schedule(self):
schedule = client.season_schedule(season_end_year=2017)
self.assertIsNotNone(schedule)
def test_2018_season_schedule(self):
schedule = client.season_schedule(season_end_year=2018)
self.assertIsNotNone(schedule)
# TODO: @jaebradley there's an open PR that's fixing this broken test
# def test_2019_season_schedule(self):
# schedule = client.season_schedule(season_end_year=2019)
# self.assertIsNotNone(schedule)
def test_2001_player_season_totals(self):
player_season_totals = client.players_season_totals(season_end_year=2001)
self.assertIsNotNone(player_season_totals)
def test_2002_player_season_totals(self):
player_season_totals = client.players_season_totals(season_end_year=2002)
self.assertIsNotNone(player_season_totals)
def test_2003_player_season_totals(self):
player_season_totals = client.players_season_totals(season_end_year=2003)
self.assertIsNotNone(player_season_totals)
def test_2004_player_season_totals(self):
player_season_totals = client.players_season_totals(season_end_year=2004)
self.assertIsNotNone(player_season_totals)
def test_2005_player_season_totals(self):
player_season_totals = client.players_season_totals(season_end_year=2005)
self.assertIsNotNone(player_season_totals)
def test_2006_player_season_totals(self):
player_season_totals = client.players_season_totals(season_end_year=2006)
self.assertIsNotNone(player_season_totals)
def test_2007_player_season_totals(self):
player_season_totals = client.players_season_totals(season_end_year=2007)
self.assertIsNotNone(player_season_totals)
def test_2008_player_season_totals(self):
player_season_totals = client.players_season_totals(season_end_year=2008)
self.assertIsNotNone(player_season_totals)
def test_2009_player_season_totals(self):
player_season_totals = client.players_season_totals(season_end_year=2009)
self.assertIsNotNone(player_season_totals)
def test_2010_player_season_totals(self):
player_season_totals = client.players_season_totals(season_end_year=2010)
self.assertIsNotNone(player_season_totals)
def test_2011_player_season_totals(self):
player_season_totals = client.players_season_totals(season_end_year=2011)
self.assertIsNotNone(player_season_totals)
def test_2012_player_season_totals(self):
player_season_totals = client.players_season_totals(season_end_year=2012)
self.assertIsNotNone(player_season_totals)
def test_2013_player_season_totals(self):
player_season_totals = client.players_season_totals(season_end_year=2013)
self.assertIsNotNone(player_season_totals)
def test_2014_player_season_totals(self):
player_season_totals = client.players_season_totals(season_end_year=2014)
self.assertIsNotNone(player_season_totals)
def test_2015_player_season_totals(self):
player_season_totals = client.players_season_totals(season_end_year=2015)
self.assertIsNotNone(player_season_totals)
def test_2016_player_season_totals(self):
player_season_totals = client.players_season_totals(season_end_year=2016)
self.assertIsNotNone(player_season_totals)
def test_2017_player_season_totals(self):
player_season_totals = client.players_season_totals(season_end_year=2017)
self.assertIsNotNone(player_season_totals)
def test_2018_player_season_totals(self):
player_season_totals = client.players_season_totals(season_end_year=2018)
self.assertIsNotNone(player_season_totals)
def test_2019_player_season_totals(self):
player_season_totals = client.players_season_totals(season_end_year=2019)
self.assertIsNotNone(player_season_totals)
| 38.989418
| 81
| 0.751934
| 905
| 7,369
| 5.691713
| 0.088398
| 0.181712
| 0.206174
| 0.100951
| 0.864298
| 0.807028
| 0.705106
| 0.54106
| 0.54106
| 0.54106
| 0
| 0.053826
| 0.180622
| 7,369
| 188
| 82
| 39.196809
| 0.799271
| 0.027005
| 0
| 0.323741
| 0
| 0
| 0.001396
| 0
| 0
| 0
| 0
| 0.005319
| 0.280576
| 1
| 0.28777
| false
| 0
| 0.028777
| 0
| 0.323741
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
a3c0ff2bf594c3260b361f9b1d4403a0af8b68a6
| 227
|
py
|
Python
|
ui/buttons.py
|
okkdev/concerto-mbaacc
|
2772111b0482a2024d14c370a5f6759df07f8460
|
[
"MIT"
] | null | null | null |
ui/buttons.py
|
okkdev/concerto-mbaacc
|
2772111b0482a2024d14c370a5f6759df07f8460
|
[
"MIT"
] | null | null | null |
ui/buttons.py
|
okkdev/concerto-mbaacc
|
2772111b0482a2024d14c370a5f6759df07f8460
|
[
"MIT"
] | null | null | null |
from kivy.uix.button import Button
from kivy.uix.anchorlayout import AnchorLayout
class MenuBtn(Button):
pass
class DummyBtn(Button):
pass
class LobbyBtn(Button):
pass
class PlayerRow(AnchorLayout):
pass
| 12.611111
| 46
| 0.740088
| 28
| 227
| 6
| 0.428571
| 0.178571
| 0.267857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.189427
| 227
| 18
| 47
| 12.611111
| 0.913043
| 0
| 0
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.4
| 0.2
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 5
|
a3ca31991b55ed964f7cad9a846387eae13b8f12
| 7,698
|
py
|
Python
|
dojo/unittests/test_finding_helper.py
|
axelpavageau/django-DefectDojo
|
00b425742b783ada0f432241c2812ac1257feb73
|
[
"BSD-3-Clause"
] | 1,772
|
2018-01-22T23:32:15.000Z
|
2022-03-31T14:49:33.000Z
|
dojo/unittests/test_finding_helper.py
|
axelpavageau/django-DefectDojo
|
00b425742b783ada0f432241c2812ac1257feb73
|
[
"BSD-3-Clause"
] | 3,461
|
2018-01-20T19:12:28.000Z
|
2022-03-31T17:14:39.000Z
|
dojo/unittests/test_finding_helper.py
|
axelpavageau/django-DefectDojo
|
00b425742b783ada0f432241c2812ac1257feb73
|
[
"BSD-3-Clause"
] | 1,173
|
2018-01-23T07:10:23.000Z
|
2022-03-31T14:40:43.000Z
|
from django.test import TestCase
from dojo.models import Finding, Test
from django.contrib.auth.models import User
from unittest import mock
from crum import impersonate
import datetime
from django.utils import timezone
import logging
logger = logging.getLogger(__name__)
# frozen_datetime = timezone.make_aware(datetime.datetime(2021, 1, 1, 2, 2, 2), timezone.get_default_timezone())
frozen_datetime = timezone.now()
class TestUpdateFindingStatusSignal(TestCase):
fixtures = ['dojo_testdata.json']
def setUp(self):
self.user_1 = User.objects.get(id='1')
self.user_2 = User.objects.get(id='2')
def get_status_fields(self, finding):
logger.debug('%s, %s, %s, %s, %s, %s, %s, %s', finding.active, finding.verified, finding.false_p, finding.out_of_scope, finding.is_mitigated, finding.mitigated, finding.mitigated_by, finding.last_status_update)
return finding.active, finding.verified, finding.false_p, finding.out_of_scope, finding.is_mitigated, finding.mitigated, finding.mitigated_by, finding.last_status_update
@mock.patch('dojo.finding.helper.timezone.now')
def test_new_finding(self, mock_tz):
mock_tz.return_value = frozen_datetime
with impersonate(self.user_1):
test = Test.objects.last()
finding = Finding(test=test)
finding.save()
self.assertEqual(
self.get_status_fields(finding),
(True, True, False, False, False, None, None, frozen_datetime)
)
@mock.patch('dojo.finding.helper.timezone.now')
def test_no_status_change(self, mock_tz):
mock_tz.return_value = frozen_datetime
with impersonate(self.user_1):
test = Test.objects.last()
finding = Finding(test=test)
finding.save()
status_fields = self.get_status_fields(finding)
finding.title = finding.title + '!!!'
finding.save()
self.assertEqual(
self.get_status_fields(finding),
status_fields
)
@mock.patch('dojo.finding.helper.timezone.now')
def test_mark_fresh_as_mitigated(self, mock_dt):
mock_dt.return_value = frozen_datetime
with impersonate(self.user_1):
test = Test.objects.last()
finding = Finding(test=test, is_mitigated=True, active=False)
finding.save()
self.assertEqual(
self.get_status_fields(finding),
(False, True, False, False, True, frozen_datetime, self.user_1, frozen_datetime)
)
@mock.patch('dojo.finding.helper.timezone.now')
@mock.patch('dojo.finding.helper.can_edit_mitigated_data', return_value=False)
def test_mark_old_active_as_mitigated(self, mock_can_edit, mock_tz):
mock_tz.return_value = frozen_datetime
with impersonate(self.user_1):
test = Test.objects.last()
finding = Finding(test=test, is_mitigated=True, active=False)
finding.save()
finding.is_mitigated = True
finding.active = False
finding.save()
self.assertEqual(
self.get_status_fields(finding),
(False, True, False, False, True, frozen_datetime, self.user_1, frozen_datetime)
)
@mock.patch('dojo.finding.helper.timezone.now')
@mock.patch('dojo.finding.helper.can_edit_mitigated_data', return_value=True)
def test_mark_old_active_as_mitigated_custom_edit(self, mock_can_edit, mock_tz):
mock_tz.return_value = frozen_datetime
custom_mitigated = datetime.datetime.now()
with impersonate(self.user_1):
test = Test.objects.last()
finding = Finding(test=test)
finding.save()
finding.is_mitigated = True
finding.active = False
finding.mitigated = custom_mitigated
finding.mitigated_by = self.user_2
finding.save()
self.assertEqual(
self.get_status_fields(finding),
(False, True, False, False, True, custom_mitigated, self.user_2, frozen_datetime)
)
@mock.patch('dojo.finding.helper.timezone.now')
@mock.patch('dojo.finding.helper.can_edit_mitigated_data', return_value=True)
def test_update_old_mitigated_with_custom_edit(self, mock_can_edit, mock_tz):
mock_tz.return_value = frozen_datetime
custom_mitigated = datetime.datetime.now()
with impersonate(self.user_1):
test = Test.objects.last()
finding = Finding(test=test, is_mitigated=True, active=False, mitigated=frozen_datetime, mitigated_by=self.user_1)
finding.save()
finding.is_mitigated = True
finding.active = False
finding.mitigated = custom_mitigated
finding.mitigated_by = self.user_2
finding.save()
self.assertEqual(
self.get_status_fields(finding),
(False, True, False, False, True, custom_mitigated, self.user_2, frozen_datetime)
)
@mock.patch('dojo.finding.helper.timezone.now')
@mock.patch('dojo.finding.helper.can_edit_mitigated_data', return_value=True)
def test_update_old_mitigated_with_missing_data(self, mock_can_edit, mock_tz):
mock_tz.return_value = frozen_datetime
custom_mitigated = datetime.datetime.now()
with impersonate(self.user_1):
test = Test.objects.last()
finding = Finding(test=test, is_mitigated=True, active=False, mitigated=custom_mitigated, mitigated_by=self.user_2)
finding.save()
finding.is_mitigated = True
finding.active = False
# trying to remove mitigated fields will trigger the signal to set them to now/current user
finding.mitigated = None
finding.mitigated_by = None
finding.save()
self.assertEqual(
self.get_status_fields(finding),
(False, True, False, False, True, frozen_datetime, self.user_1, frozen_datetime)
)
@mock.patch('dojo.finding.helper.timezone.now')
@mock.patch('dojo.finding.helper.can_edit_mitigated_data', return_value=True)
def test_set_old_mitigated_as_active(self, mock_can_edit, mock_tz):
mock_tz.return_value = frozen_datetime
with impersonate(self.user_1):
test = Test.objects.last()
finding = Finding(test=test, is_mitigated=True, active=False, mitigated=frozen_datetime, mitigated_by=self.user_2)
logger.debug('save1')
finding.save()
finding.active = True
logger.debug('save2')
finding.save()
self.assertEqual(
self.get_status_fields(finding),
(True, True, False, False, False, None, None, frozen_datetime)
)
@mock.patch('dojo.finding.helper.timezone.now')
@mock.patch('dojo.finding.helper.can_edit_mitigated_data', return_value=False)
def test_set_active_as_false_p(self, mock_can_edit, mock_tz):
mock_tz.return_value = frozen_datetime
with impersonate(self.user_1):
test = Test.objects.last()
finding = Finding(test=test)
finding.save()
finding.false_p = True
finding.save()
self.assertEqual(
self.get_status_fields(finding),
# TODO marking as false positive resets verified to False, possible bug / undesired behaviour?
(False, False, True, False, True, frozen_datetime, self.user_1, frozen_datetime)
)
| 40.303665
| 218
| 0.644713
| 920
| 7,698
| 5.152174
| 0.109783
| 0.07384
| 0.028481
| 0.063291
| 0.797679
| 0.791983
| 0.791983
| 0.775949
| 0.775949
| 0.713502
| 0
| 0.006124
| 0.257599
| 7,698
| 190
| 219
| 40.515789
| 0.823272
| 0.038062
| 0
| 0.673203
| 0
| 0.006536
| 0.082286
| 0.073774
| 0
| 0
| 0
| 0.005263
| 0.058824
| 1
| 0.071895
| false
| 0
| 0.052288
| 0
| 0.143791
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
a3f6d2820735b11ca577de173506b4b4f34902d2
| 10,791
|
py
|
Python
|
mafipy/function/tests/test_payoff.py
|
i05nagai/mafipy
|
ea7312065b8abea4c7054203176269637ff346ca
|
[
"MIT"
] | 6
|
2017-01-15T05:05:09.000Z
|
2020-12-29T20:03:37.000Z
|
mafipy/function/tests/test_payoff.py
|
i05nagai/mafipy
|
ea7312065b8abea4c7054203176269637ff346ca
|
[
"MIT"
] | 77
|
2016-12-03T12:54:42.000Z
|
2018-06-15T14:44:14.000Z
|
mafipy/function/tests/test_payoff.py
|
i05nagai/mafipy
|
ea7312065b8abea4c7054203176269637ff346ca
|
[
"MIT"
] | 3
|
2016-12-17T11:09:38.000Z
|
2017-11-05T09:15:02.000Z
|
#!/bin/python
# -*- coding: utf-8 -*-
from __future__ import division, print_function, absolute_import
from pytest import approx
import pytest
from . import util
import mafipy.function as target
class TestPayoff(object):
# before all tests starts
@classmethod
def setup_class(cls):
pass
# after all tests finish
@classmethod
def teardown_class(cls):
pass
# before each test start
def setup(self):
pass
# after each test finish
def teardown(self):
pass
@pytest.mark.parametrize(
"underlying, strike, gearing",
[
# underlying = strike
(2.0, 2.0, 1.0),
# underlying > strike
(3.0, 2.0, 1.0),
# underlying < strike
(1.0, 2.0, 1.0),
# gearing = 2
(2.0, 1.0, 2.0),
])
def test_payoff_call(self, underlying, strike, gearing):
expect = gearing * max(underlying - strike, 0.0)
actual = target.payoff_call(underlying, strike, gearing)
assert(expect == approx(actual))
@pytest.mark.parametrize(
"underlying, strike, gearing",
[
# underlying = strike
(2.0, 2.0, 1.0),
# underlying > strike
(3.0, 2.0, 1.0),
# underlying < strike
(1.0, 2.0, 1.0),
# gearing = 2
(2.0, 1.0, 2.0),
])
def test_payoff_call_fprime(self, underlying, strike, gearing):
expect = 0.0
if underlying > strike:
expect = gearing
actual = target.payoff_call_fprime(underlying, strike, gearing)
assert(expect == approx(actual))
@pytest.mark.parametrize(
"underlying, strike, gearing",
[
# underlying = strike
(2.0, 2.0, 1.0),
# underlying > strike
(3.0, 2.0, 1.0),
# underlying < strike
(1.0, 2.0, 1.0),
# gearing = 2
(2.0, 1.0, 2.0),
])
def test_payoff_put(self, underlying, strike, gearing):
expect = gearing * max(strike - underlying, 0.0)
actual = target.payoff_put(underlying, strike, gearing)
assert(expect == approx(actual))
@pytest.mark.parametrize(
"underlying, strike, gearing",
[
# underlying = strike
(2.0, 2.0, 1.0),
# underlying > strike
(3.0, 2.0, 1.0),
# underlying < strike
(1.0, 2.0, 1.0),
# gearing = 2
(2.0, 1.0, 2.0),
])
def test_payoff_put_fprime(self, underlying, strike, gearing):
expect = 0.0
if underlying < strike:
expect = -gearing
actual = target.payoff_put_fprime(underlying, strike, gearing)
assert(expect == approx(actual))
@pytest.mark.parametrize(
"underlying, lower_strike, upper_strike, gearing",
[
# underlying <= lower_strike,
(1.0, 1.0, 2.0, 1.0),
# lower_strike < underlying < upper_strike,
(1.5, 1.0, 2.0, 1.0),
# underlying >= upper_strike
(2.0, 1.0, 2.0, 1.0),
# lower_strike >= upper_strike
(2.0, 1.0, 1.0, 1.0),
# gearing = 2
(1.5, 1.0, 2.0, 2.0),
])
def test_payoff_bull_spread(self,
underlying,
lower_strike,
upper_strike,
gearing):
expect = (target.payoff_call(underlying, lower_strike, gearing)
- target.payoff_call(underlying, upper_strike, gearing))
if lower_strike >= upper_strike:
expect = 0.0
actual = target.payoff_bull_spread(
underlying, lower_strike, upper_strike, gearing)
assert(expect == approx(actual))
@pytest.mark.parametrize(
"underlying, lower_strike, upper_strike, gearing",
[
# underlying <= lower_strike,
(1.0, 1.0, 2.0, 1.0),
# lower_strike < underlying < upper_strike,
(1.5, 1.0, 2.0, 1.0),
# underlying >= upper_strike
(2.0, 1.0, 2.0, 1.0),
# lower_strike >= upper_strike
(2.0, 1.0, 1.0, 1.0),
# gearing = 2
(1.5, 1.0, 2.0, 2.0),
])
def test_payoff_bull_spread_fprime(self,
underlying,
lower_strike,
upper_strike,
gearing):
expect = 0.0
if lower_strike < underlying < upper_strike:
expect = gearing
actual = target.payoff_bull_spread_fprime(
underlying, lower_strike, upper_strike, gearing)
assert(expect == approx(actual))
@pytest.mark.parametrize(
"underlying, strike, gearing",
[
# underlying = strike
(2.0, 2.0, 1.0),
# underlying > strike
(3.0, 2.0, 1.0),
# underlying < strike
(1.0, 2.0, 1.0),
# gearing = 2
(3.0, 2.0, 2.0),
])
def test_payoff_straddle(self, underlying, strike, gearing):
expect = (target.payoff_call(underlying, strike, gearing)
+ target.payoff_put(underlying, strike, gearing))
actual = target.payoff_straddle(underlying, strike, gearing)
assert(expect == approx(actual))
@pytest.mark.parametrize(
"underlying, lower_strike, upper_strike, gearing",
[
# underlying <= lower_strike,
(1.0, 1.0, 2.0, 1.0),
# lower_strike < underlying < upper_strike,
(1.5, 1.0, 2.0, 1.0),
# underlying >= upper_strike
(2.0, 1.0, 2.0, 1.0),
# lower_strike >= upper_strike
(2.0, 1.0, 1.0, 1.0),
# gearing = 2
(1.5, 1.0, 2.0, 2.0),
])
def test_payoff_strangle(self,
underlying,
lower_strike,
upper_strike,
gearing):
expect = (target.payoff_put(underlying, lower_strike, gearing)
+ target.payoff_call(underlying, upper_strike, gearing))
if lower_strike >= upper_strike:
return 0.0
actual = target.payoff_strangle(
underlying, lower_strike, upper_strike, gearing)
assert(expect == approx(actual))
@pytest.mark.parametrize(
"underlying, spot_price, spread, gearing",
[
# underlying <= spot_price - spread
(1.0, 2.0, 1.0, 1.0),
# spot_price - spread < underlying < spot_price
(1.5, 2.0, 1.0, 1.0),
# spot_price + spread > underlying > spot_price
(2.5, 2.0, 1.0, 1.0),
# underlying >= spot_price + spread
(3.0, 2.0, 1.0, 1.0),
# spread = 0
(2.0, 2.0, 0.0, 1.0),
# spread < 0
(2.0, 1.0, -1.0, 1.0),
# gearing = 2
(1.5, 2.0, 1.0, 2.0),
])
def test_payoff_butterfly_spread(self,
underlying,
spot_price,
spread,
gearing):
expect = (target.payoff_call(underlying, spot_price - spread, gearing)
- 2.0 * target.payoff_call(underlying, spot_price, gearing)
+ target.payoff_call(
underlying, spot_price + spread, gearing))
if spread < 0.0:
return 0.0
actual = target.payoff_butterfly_spread(
underlying, spot_price, spread, gearing)
assert(expect == approx(actual))
@pytest.mark.parametrize(
"underlying, lower_strike, upper_strike, gearing",
[
# underlying <= lower_strike,
(1.0, 1.0, 2.0, 1.0),
# lower_strike < underlying < upper_strike,
(1.5, 1.0, 2.0, 1.0),
# underlying >= upper_strike
(2.0, 1.0, 2.0, 1.0),
# lower_strike >= upper_strike
(2.0, 1.0, 1.0, 1.0),
# gearing = 2
(1.5, 1.0, 2.0, 2.0),
])
def test_payoff_risk_riversal(self,
underlying,
lower_strike,
upper_strike,
gearing):
expect = (-target.payoff_put(underlying, lower_strike, gearing)
+ target.payoff_call(underlying, upper_strike, gearing))
if lower_strike > upper_strike:
return 0.0
actual = target.payoff_risk_reversal(
underlying, lower_strike, upper_strike, gearing)
assert(expect == approx(actual))
class TestBullSpreadUnderlyingPayoffHelper(object):
# before all tests starts
@classmethod
def setup_class(cls):
pass
# after all tests finish
@classmethod
def teardown_class(cls):
pass
# before each test start
def setup(self):
data = sorted(util.get_real(2))
self.lower_strike = data[0]
self.upper_strike = data[1]
data = util.get_real()
self.gearing = data[0]
params = {
"lower_strike": self.lower_strike,
"upper_strike": self.upper_strike,
"gearing": self.gearing
}
self.target = target.BullSpreadUnderlyingPayoffHelper(**params)
# after each test finish
def teardown(self):
pass
def test_make_func(self):
def case1():
swap_rate = self.lower_strike
actual = self.target.make_func()(swap_rate)
assert 0.0 == approx(actual)
case1()
def case2():
swap_rate = self.upper_strike
actual = self.target.make_func()(swap_rate)
expect = (self.upper_strike - self.lower_strike) * self.gearing
assert expect == approx(actual)
case2()
def case3():
swap_rate = util.get_real()[0]
actual = self.target.make_func()(swap_rate)
expect = target.payoff_bull_spread(
swap_rate, self.lower_strike, self.upper_strike, self.gearing)
assert expect == approx(actual)
case3()
def test_make_fprime(self):
swap_rate = util.get_real()[0]
actual = self.target.make_func()(swap_rate)
expect = target.payoff_bull_spread(
swap_rate, self.lower_strike, self.upper_strike, self.gearing)
assert expect == approx(actual)
| 33.512422
| 78
| 0.504772
| 1,204
| 10,791
| 4.375415
| 0.068937
| 0.031131
| 0.036446
| 0.034169
| 0.84833
| 0.811503
| 0.773538
| 0.733485
| 0.672551
| 0.668945
| 0
| 0.05779
| 0.382634
| 10,791
| 321
| 79
| 33.616822
| 0.732963
| 0.12288
| 0
| 0.617021
| 0
| 0
| 0.041773
| 0
| 0
| 0
| 0
| 0
| 0.059574
| 1
| 0.097872
| false
| 0.029787
| 0.021277
| 0
| 0.140426
| 0.004255
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
430a169f6b01a2c66a118a19133d68cf3b54adf5
| 184
|
py
|
Python
|
AI_RC/get_data.py
|
xiaoqian19940510/TASLP-EAREE
|
21b3b964e169156f69cc379110db4d63fa6dcff4
|
[
"Apache-2.0"
] | 15
|
2021-06-24T08:15:33.000Z
|
2022-03-23T11:52:03.000Z
|
AI_RC/get_data.py
|
RobertIBM/TASLP-EAREE
|
dda8b8de6383466663a90d00dcd9e27b1686cdd1
|
[
"Apache-2.0"
] | 2
|
2021-06-30T14:07:44.000Z
|
2021-07-27T02:39:29.000Z
|
AI_RC/get_data.py
|
RobertIBM/TASLP-EAREE
|
dda8b8de6383466663a90d00dcd9e27b1686cdd1
|
[
"Apache-2.0"
] | 2
|
2021-06-24T08:07:36.000Z
|
2021-07-21T02:35:54.000Z
|
from preprocessing.data_processor import read_squad_data
if __name__ == "__main__":
read_squad_data("AI_RC/data/squad-like_all_train_data.json", "AI_RC/data/",is_training=True)
| 26.285714
| 96
| 0.793478
| 29
| 184
| 4.37931
| 0.655172
| 0.141732
| 0.204724
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.092391
| 184
| 6
| 97
| 30.666667
| 0.760479
| 0
| 0
| 0
| 0
| 0
| 0.32967
| 0.225275
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
43191611bf2174a75d5ade64e1c7ce6b97c1e82c
| 29
|
py
|
Python
|
autocalendar/autocalendar.py
|
danielqiang/autocalendar
|
32b268c0b0958be2cfb7e2891172195474feec86
|
[
"MIT"
] | null | null | null |
autocalendar/autocalendar.py
|
danielqiang/autocalendar
|
32b268c0b0958be2cfb7e2891172195474feec86
|
[
"MIT"
] | null | null | null |
autocalendar/autocalendar.py
|
danielqiang/autocalendar
|
32b268c0b0958be2cfb7e2891172195474feec86
|
[
"MIT"
] | null | null | null |
class AutoCalendar:
pass
| 9.666667
| 19
| 0.724138
| 3
| 29
| 7
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.241379
| 29
| 2
| 20
| 14.5
| 0.954545
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
431b0c930ae29fdf5fd188d79dba3b7e29377101
| 186
|
py
|
Python
|
hubnspoke/common/exceptions.py
|
pawelpreczynski-digica/monaifl
|
992caa152dcee00c57cde5b1e08f2170ca177046
|
[
"Unlicense",
"MIT"
] | 17
|
2019-10-25T13:35:59.000Z
|
2021-01-06T09:18:07.000Z
|
hubnspoke/common/exceptions.py
|
pawelpreczynski-digica/monaifl
|
992caa152dcee00c57cde5b1e08f2170ca177046
|
[
"Unlicense",
"MIT"
] | 21
|
2019-11-05T20:39:47.000Z
|
2020-07-17T17:15:42.000Z
|
hubnspoke/common/exceptions.py
|
pawelpreczynski-digica/monaifl
|
992caa152dcee00c57cde5b1e08f2170ca177046
|
[
"Unlicense",
"MIT"
] | 5
|
2021-06-03T11:52:17.000Z
|
2022-02-22T21:21:58.000Z
|
class InvalidInterface(Exception):
pass
class EmptyInterface(InvalidInterface):
pass
class NotAFileError(Exception):
pass
class MissingFileError(Exception):
pass
| 10.941176
| 39
| 0.741935
| 16
| 186
| 8.625
| 0.4375
| 0.282609
| 0.26087
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.193548
| 186
| 16
| 40
| 11.625
| 0.92
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
431ca4b47d906eb17f1341803677aa6870493cf4
| 55
|
py
|
Python
|
transmission_logging/middleware/__init__.py
|
samhelman/django-transmission-logging
|
8ff3227111b0701fa5c1165b7fb51fd0acefbd0f
|
[
"MIT"
] | null | null | null |
transmission_logging/middleware/__init__.py
|
samhelman/django-transmission-logging
|
8ff3227111b0701fa5c1165b7fb51fd0acefbd0f
|
[
"MIT"
] | null | null | null |
transmission_logging/middleware/__init__.py
|
samhelman/django-transmission-logging
|
8ff3227111b0701fa5c1165b7fb51fd0acefbd0f
|
[
"MIT"
] | null | null | null |
from .middleware import (
TransmissionMiddleware,
)
| 18.333333
| 27
| 0.763636
| 4
| 55
| 10.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.163636
| 55
| 3
| 28
| 18.333333
| 0.913043
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
433293c6ba3c99f41f09649a8592a749f08d3a09
| 177
|
py
|
Python
|
transcription_compare/tokenizer/abstract_tokenizer.py
|
HannaHUp/transcription-compare
|
e25d9651e604a854acba9659602ae1ea5497169e
|
[
"MIT"
] | 2
|
2019-09-03T13:26:55.000Z
|
2020-08-04T20:32:35.000Z
|
transcription_compare/tokenizer/abstract_tokenizer.py
|
HannaHUp/transcription-compare
|
e25d9651e604a854acba9659602ae1ea5497169e
|
[
"MIT"
] | null | null | null |
transcription_compare/tokenizer/abstract_tokenizer.py
|
HannaHUp/transcription-compare
|
e25d9651e604a854acba9659602ae1ea5497169e
|
[
"MIT"
] | null | null | null |
from abc import ABC, abstractmethod
from typing import List
class AbstractTokenizer(ABC):
@abstractmethod
def tokenize(self, token_string: str) -> List:
pass
| 17.7
| 50
| 0.717514
| 21
| 177
| 6
| 0.714286
| 0.269841
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.214689
| 177
| 9
| 51
| 19.666667
| 0.906475
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0.166667
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
|
0
| 5
|
433c3c7227cf59add1b4f780682931562a6247c8
| 873
|
py
|
Python
|
Day 32 - Birthday Wisher/art.py
|
atulmkamble/100DaysOfCode
|
ccfb6cb8582be69c63c666a3b097e0130585e4c9
|
[
"MIT"
] | 2
|
2021-12-07T00:39:57.000Z
|
2021-12-07T01:44:21.000Z
|
Day 32 - Birthday Wisher/art.py
|
atulmkamble/100DaysOfCode
|
ccfb6cb8582be69c63c666a3b097e0130585e4c9
|
[
"MIT"
] | null | null | null |
Day 32 - Birthday Wisher/art.py
|
atulmkamble/100DaysOfCode
|
ccfb6cb8582be69c63c666a3b097e0130585e4c9
|
[
"MIT"
] | 1
|
2021-09-12T14:02:27.000Z
|
2021-09-12T14:02:27.000Z
|
"""
This file contains ASCII art used in the Birthday Wisher program
"""
logo = """
,-----. ,--. ,--. ,--. ,--. ,--. ,--.,--. ,--.
| |) /_ `--',--.--.,-' '-.| ,---. ,-| | ,--,--.,--. ,--. | | | |`--' ,---. | ,---. ,---. ,--.--.
| .-. \,--.| .--''-. .-'| .-. |' .-. |' ,-. | \ ' / | |.'.| |,--.( .-' | .-. || .-. :| .--'
| '--' /| || | | | | | | |\ `-' |\ '-' | \ ' | ,'. || |.-' `)| | | |\ --.| |
`------' `--'`--' `--' `--' `--' `---' `--`--'.-' / '--' '--'`--'`----' `--' `--' `----'`--'
`---'
"""
| 62.357143
| 111
| 0.067583
| 12
| 873
| 4.833333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.552119
| 873
| 13
| 112
| 67.153846
| 0.148338
| 0.07331
| 0
| 0
| 0
| 0.25
| 0.980025
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
4a2e5ed93e2276995f5727ed501a43eae18a6d9a
| 50
|
py
|
Python
|
reid/models/__init__.py
|
zhangxinyu-tj/PAST
|
67f1f7a780e869aa7867167538edb03faa96dec5
|
[
"MIT"
] | 112
|
2019-08-01T01:18:42.000Z
|
2022-03-29T07:49:35.000Z
|
reid/models/__init__.py
|
zhangxinyu-tj/PAST
|
67f1f7a780e869aa7867167538edb03faa96dec5
|
[
"MIT"
] | 15
|
2019-08-22T09:17:52.000Z
|
2022-03-12T00:18:56.000Z
|
reid/models/__init__.py
|
zhangxinyu-tj/PAST
|
67f1f7a780e869aa7867167538edb03faa96dec5
|
[
"MIT"
] | 25
|
2019-08-27T19:07:04.000Z
|
2022-02-05T05:59:56.000Z
|
from .model import Model
from .resnet import names
| 25
| 25
| 0.82
| 8
| 50
| 5.125
| 0.625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.14
| 50
| 2
| 25
| 25
| 0.953488
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
4a33da01d4b1ce12d32fbc07f96fd78d0f3f5f70
| 640
|
py
|
Python
|
kerasRun-test.py
|
newsgac/keras-runs
|
edd947e9ca61dcacc7f03f92612bbc03dfc972f4
|
[
"Apache-2.0"
] | null | null | null |
kerasRun-test.py
|
newsgac/keras-runs
|
edd947e9ca61dcacc7f03f92612bbc03dfc972f4
|
[
"Apache-2.0"
] | null | null | null |
kerasRun-test.py
|
newsgac/keras-runs
|
edd947e9ca61dcacc7f03f92612bbc03dfc972f4
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/python3
"""
kerasRun-test.py: tests for kerasRun.py
usage: kerasRun-test.py
20171216 erikt(at)xs4all.nl
"""
import io
import re
import sys
import unittest
from contextlib import redirect_stdout
from kerasRun import makeNumeric
from kerasRun import predict
from kerasRun import readData
from kerasRun import run10cv
from kerasRun import runExperiment
class myTest(unittest.TestCase):
def testMakeNumeric(self): pass
def testPredict(self): pass
def testReadData(self): pass
def testRun10cv(self): pass
def testRunExperiment(self): pass
if __name__ == '__main__':
unittest.main()
| 20
| 43
| 0.742188
| 81
| 640
| 5.753086
| 0.518519
| 0.128755
| 0.193133
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.026769
| 0.182813
| 640
| 31
| 44
| 20.645161
| 0.864245
| 0.170313
| 0
| 0
| 0
| 0
| 0.015656
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.277778
| false
| 0.277778
| 0.555556
| 0
| 0.888889
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 1
| 0
|
0
| 5
|
4a34387b0525de0477c90bbbf20ba9596f0c5357
| 78
|
py
|
Python
|
tests/test_nothin.py
|
saifuddin778/mkalgo
|
3271c0507680cb62ded3c17c76aee1fbd8050e0d
|
[
"MIT"
] | 21
|
2017-05-06T06:38:46.000Z
|
2021-12-14T10:04:06.000Z
|
tests/test_nothin.py
|
microprediction/mkalgo
|
08d72f690d9a328765871cbd61019dff1f694219
|
[
"MIT"
] | 2
|
2018-05-24T04:27:49.000Z
|
2021-03-01T17:26:34.000Z
|
tests/test_nothin.py
|
saifuddin778/mkalgo
|
3271c0507680cb62ded3c17c76aee1fbd8050e0d
|
[
"MIT"
] | 12
|
2017-07-10T05:37:32.000Z
|
2022-01-11T06:26:17.000Z
|
from mkalgo.utilities import utils, funcs
def test_nothin():
assert True
| 15.6
| 41
| 0.75641
| 11
| 78
| 5.272727
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.179487
| 78
| 4
| 42
| 19.5
| 0.90625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
4a503834f0b33528075af5bd1a28a491a6be9dfb
| 4,046
|
py
|
Python
|
tests/linear_swap/test_ws_account.py
|
hbdmapi/huobi_sdk_Python
|
a4ee876f947011fb5d66da32853cb3a21d852a4b
|
[
"MIT"
] | 1
|
2022-03-13T16:55:34.000Z
|
2022-03-13T16:55:34.000Z
|
tests/linear_swap/test_ws_account.py
|
hbdmapi/huobi_sdk_Python
|
a4ee876f947011fb5d66da32853cb3a21d852a4b
|
[
"MIT"
] | null | null | null |
tests/linear_swap/test_ws_account.py
|
hbdmapi/huobi_sdk_Python
|
a4ee876f947011fb5d66da32853cb3a21d852a4b
|
[
"MIT"
] | null | null | null |
import sys
import unittest
import time
sys.path.append('../../src')
sys.path.append('..')
from huobi.utils.logger import logger
from huobi.linear_swap.ws.account import Account
from config import ACCESS_KEY, SECRET_KEY
class TestWsAccount(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.api = Account(ACCESS_KEY, SECRET_KEY)
def test_isolated_sub_orders(self):
self.api.sub({"op": "sub", "topic": "orders.btc-usdt"},
lambda x: logger.info(x))
time.sleep(10)
self.api.unsub({"op": "unsub", "topic": "orders.btc-usdt"})
time.sleep(10)
def test_cross_sub_orders(self):
self.api.sub({"op": "sub", "topic": "orders_cross.btc-usdt"},
lambda x: logger.info(x))
time.sleep(10)
self.api.unsub({"op": "unsub", "topic": "orders_cross.btc-usdt"})
time.sleep(10)
def test_isolated_sub_accounts(self):
self.api.sub({"op": "sub", "topic": "accounts.btc-usdt"},
lambda x: logger.info(x))
time.sleep(10)
self.api.unsub({"op": "unsub", "topic": "accounts.btc-usdt"})
time.sleep(10)
def test_cross_sub_accounts(self):
self.api.sub({"op": "sub", "topic": "accounts_cross.usdt"},
lambda x: logger.info(x))
time.sleep(10)
self.api.unsub({"op": "unsub", "topic": "accounts_cross.usdt"})
time.sleep(10)
def test_isolated_sub_positions(self):
self.api.sub({"op": "sub", "topic": "positions.btc-usdt"},
lambda x: logger.info(x))
time.sleep(10)
self.api.unsub({"op": "unsub", "topic": "positions.btc-usdt"})
time.sleep(10)
def test_cross_sub_positions(self):
self.api.sub({"op": "sub", "topic": "positions_cross.btc-usdt"},
lambda x: logger.info(x))
time.sleep(10)
self.api.unsub({"op": "unsub", "topic": "positions_cross.btc-usdt"})
time.sleep(10)
def test_isolated_sub_matchOrders(self):
self.api.sub({"op": "sub", "topic": "matchOrders.btc-usdt"},
lambda x: logger.info(x))
time.sleep(10)
self.api.unsub({"op": "unsub", "topic": "matchOrders.btc-usdt"})
time.sleep(10)
def test_cross_sub_matchOrders(self):
self.api.sub({"op": "sub", "topic": "matchOrders_cross.btc-usdt"},
lambda x: logger.info(x))
time.sleep(10)
self.api.unsub({"op": "unsub", "topic": "matchOrders_cross.btc-usdt"})
time.sleep(10)
def test_sub_liquidation_orders(self):
self.api.sub({"op": "sub", "topic": "public.*.liquidation_orders"},
lambda x: logger.info(x))
time.sleep(10)
self.api.unsub({"op": "unsub", "topic": "public.*.liquidation_orders"})
time.sleep(10)
def test_sub_funding_rate(self):
self.api.sub({"op": "sub", "topic": "public.*.funding_rate"},
lambda x: logger.info(x))
time.sleep(10)
self.api.unsub({"op": "unsub", "topic": "public.*.funding_rate"})
time.sleep(10)
def test_sub_contract_info(self):
self.api.sub({"op": "sub", "topic": "public.*.contract_info"},
lambda x: logger.info(x))
time.sleep(10)
self.api.unsub({"op": "unsub", "topic": "public.*.contract_info"})
time.sleep(10)
def test_isolated_sub_trigger_order(self):
self.api.sub({"op": "sub", "topic": "trigger_order.*"},
lambda x: logger.info(x))
time.sleep(10)
self.api.unsub({"op": "unsub", "topic": "trigger_order.*"})
time.sleep(10)
def test_cross_sub_trigger_order(self):
self.api.sub({"op": "sub", "topic": "trigger_order_cross.btc-usdt"},
lambda x: logger.info(x))
time.sleep(10)
self.api.unsub({"op": "unsub", "topic": "trigger_order_cross.btc-usdt"})
time.sleep(10)
if __name__ == '__main__':
unittest.main(verbosity=2)
| 36.125
| 80
| 0.57044
| 521
| 4,046
| 4.287908
| 0.111324
| 0.081468
| 0.128021
| 0.081468
| 0.806625
| 0.806625
| 0.776186
| 0.754252
| 0.684423
| 0.630707
| 0
| 0.017457
| 0.249629
| 4,046
| 111
| 81
| 36.45045
| 0.718379
| 0
| 0
| 0.423913
| 0
| 0
| 0.210331
| 0.083539
| 0
| 0
| 0
| 0
| 0
| 1
| 0.152174
| false
| 0
| 0.065217
| 0
| 0.228261
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
4a578211848555c6c5fd44c956915c822d8d90ea
| 241
|
py
|
Python
|
backend/src/data/mongo/wrapped/__init__.py
|
rutvikpadhiyar000/github-trends
|
af66cd1419586c6c91b75c3e32013160b2c36bcb
|
[
"MIT"
] | 157
|
2021-09-11T15:53:52.000Z
|
2022-03-27T07:03:09.000Z
|
backend/src/data/mongo/wrapped/__init__.py
|
rutvikpadhiyar000/github-trends
|
af66cd1419586c6c91b75c3e32013160b2c36bcb
|
[
"MIT"
] | 120
|
2021-02-27T21:37:47.000Z
|
2022-03-25T14:44:08.000Z
|
backend/src/data/mongo/wrapped/__init__.py
|
rutvikpadhiyar000/github-trends
|
af66cd1419586c6c91b75c3e32013160b2c36bcb
|
[
"MIT"
] | 5
|
2021-12-06T18:43:01.000Z
|
2022-01-31T07:06:16.000Z
|
from src.data.mongo.wrapped.functions import set_wrapped_user
from src.data.mongo.wrapped.get import get_wrapped_user
from src.data.mongo.wrapped.models import WrappedModel
__all__ = ["set_wrapped_user", "get_wrapped_user", "WrappedModel"]
| 40.166667
| 66
| 0.829876
| 36
| 241
| 5.222222
| 0.361111
| 0.234043
| 0.175532
| 0.255319
| 0.484043
| 0.361702
| 0.361702
| 0
| 0
| 0
| 0
| 0
| 0.074689
| 241
| 5
| 67
| 48.2
| 0.843049
| 0
| 0
| 0
| 0
| 0
| 0.182573
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.75
| 0
| 0.75
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
4ac5d25e7419a609e4ea6ae3d0eedf8eef92671a
| 426
|
py
|
Python
|
config/blog_system_api/base/permissions.py
|
StepanGavrilov/PyConnect-Test
|
b561490ba1df3c887f660aa086c940f9a8ab2158
|
[
"CC0-1.0"
] | null | null | null |
config/blog_system_api/base/permissions.py
|
StepanGavrilov/PyConnect-Test
|
b561490ba1df3c887f660aa086c940f9a8ab2158
|
[
"CC0-1.0"
] | null | null | null |
config/blog_system_api/base/permissions.py
|
StepanGavrilov/PyConnect-Test
|
b561490ba1df3c887f660aa086c940f9a8ab2158
|
[
"CC0-1.0"
] | null | null | null |
from rest_framework.permissions import BasePermission
class IsAuthorEntry(BasePermission):
"""
Проверяем автора объекта
"""
def has_object_permission(self, request, view, obj):
return obj.owner == request.user
class IsAuthorCommentEntry(BasePermission):
"""
Проверяем автора объекта
"""
def has_object_permission(self, request, view, obj):
return obj.author == request.user
| 23.666667
| 56
| 0.701878
| 44
| 426
| 6.681818
| 0.545455
| 0.156463
| 0.197279
| 0.244898
| 0.578231
| 0.578231
| 0.578231
| 0.578231
| 0.578231
| 0.578231
| 0
| 0
| 0.206573
| 426
| 17
| 57
| 25.058824
| 0.869822
| 0.115023
| 0
| 0.285714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| false
| 0
| 0.142857
| 0.285714
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
434e3b223aebebca3834fd0cbb4f784fd407c275
| 159
|
py
|
Python
|
learnergy/models/extra/__init__.py
|
anukaal/learnergy
|
704fc2b3fcb80df41ed28d750dc4e6475df23315
|
[
"Apache-2.0"
] | 39
|
2020-02-27T00:47:45.000Z
|
2022-03-28T14:57:26.000Z
|
learnergy/models/extra/__init__.py
|
anukaal/learnergy
|
704fc2b3fcb80df41ed28d750dc4e6475df23315
|
[
"Apache-2.0"
] | 5
|
2021-05-11T08:23:37.000Z
|
2022-01-20T12:50:59.000Z
|
learnergy/models/extra/__init__.py
|
anukaal/learnergy
|
704fc2b3fcb80df41ed28d750dc4e6475df23315
|
[
"Apache-2.0"
] | 6
|
2020-04-15T00:23:13.000Z
|
2022-01-29T16:22:05.000Z
|
"""A package contaning additional RBM-based models (networks) for all common learnergy modules.
"""
from learnergy.models.extra.sigmoid_rbm import SigmoidRBM
| 31.8
| 95
| 0.805031
| 21
| 159
| 6.047619
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.113208
| 159
| 4
| 96
| 39.75
| 0.900709
| 0.578616
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
438e4afa3f5807de59f4fe7aa637d73ddfeec755
| 414
|
py
|
Python
|
pandas/api/types/__init__.py
|
vimalromeo/pandas
|
7c14e4f14aff216be558bf5d4d2d00b4838c2360
|
[
"PSF-2.0",
"Apache-2.0",
"BSD-3-Clause-No-Nuclear-License-2014",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | 6,989
|
2017-07-18T06:23:18.000Z
|
2022-03-31T15:58:36.000Z
|
venv/lib/python3.7/site-packages/pandas/api/types/__init__.py
|
John1001Song/Big-Data-Robo-Adviser
|
9444dce96954c546333d5aecc92a06c3bfd19aa5
|
[
"MIT"
] | 1,978
|
2017-07-18T09:17:58.000Z
|
2022-03-31T14:28:43.000Z
|
venv/lib/python3.7/site-packages/pandas/api/types/__init__.py
|
John1001Song/Big-Data-Robo-Adviser
|
9444dce96954c546333d5aecc92a06c3bfd19aa5
|
[
"MIT"
] | 1,228
|
2017-07-18T09:03:13.000Z
|
2022-03-29T05:57:40.000Z
|
""" public toolkit API """
from pandas.core.dtypes.api import * # noqa
from pandas.core.dtypes.dtypes import (CategoricalDtype, # noqa
DatetimeTZDtype,
PeriodDtype,
IntervalDtype)
from pandas.core.dtypes.concat import union_categoricals # noqa
from pandas._libs.lib import infer_dtype # noqa
| 41.4
| 64
| 0.562802
| 38
| 414
| 6.052632
| 0.526316
| 0.173913
| 0.182609
| 0.26087
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.371981
| 414
| 9
| 65
| 46
| 0.884615
| 0.096618
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.571429
| 0
| 0.571429
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
4398635a712e73c92e44ba4950b5b52554081867
| 77
|
py
|
Python
|
examples/missingreturntype.py
|
quynhanh-ngx/pytago
|
de976ad8d85702ae665e97978bc4a75d282c857f
|
[
"MIT"
] | 206
|
2021-06-24T16:16:13.000Z
|
2022-03-31T07:44:17.000Z
|
examples/missingreturntype.py
|
quynhanh-ngx/pytago
|
de976ad8d85702ae665e97978bc4a75d282c857f
|
[
"MIT"
] | 13
|
2021-06-24T17:51:36.000Z
|
2022-02-23T10:07:17.000Z
|
examples/missingreturntype.py
|
quynhanh-ngx/pytago
|
de976ad8d85702ae665e97978bc4a75d282c857f
|
[
"MIT"
] | 14
|
2021-06-26T02:19:45.000Z
|
2022-03-30T03:02:49.000Z
|
def main():
print(add(1, 3))
def add(a: int, b: int):
return a + b
| 11
| 24
| 0.506494
| 15
| 77
| 2.6
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.037037
| 0.298701
| 77
| 6
| 25
| 12.833333
| 0.685185
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.25
| 0.75
| 0.25
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
43b000028e2b2e5f9dc001cc8e99e7463504fe00
| 338
|
py
|
Python
|
useraudit/signals.py
|
solee-dev/django-useraudit
|
cd9114abb99755c0c43e13e67b02a2e783b4411c
|
[
"BSD-3-Clause"
] | null | null | null |
useraudit/signals.py
|
solee-dev/django-useraudit
|
cd9114abb99755c0c43e13e67b02a2e783b4411c
|
[
"BSD-3-Clause"
] | null | null | null |
useraudit/signals.py
|
solee-dev/django-useraudit
|
cd9114abb99755c0c43e13e67b02a2e783b4411c
|
[
"BSD-3-Clause"
] | null | null | null |
from django.dispatch import Signal
password_will_expire_warning = Signal(providing_args=["user", "days_left"])
password_has_expired = Signal(providing_args=["user"])
account_has_expired = Signal(providing_args=["user"])
login_failure_limit_reached = Signal(providing_args=["user"])
account_made_inactive = Signal(providing_args=["user"])
| 48.285714
| 75
| 0.810651
| 44
| 338
| 5.818182
| 0.522727
| 0.292969
| 0.371094
| 0.449219
| 0.402344
| 0.257813
| 0
| 0
| 0
| 0
| 0
| 0
| 0.059172
| 338
| 7
| 76
| 48.285714
| 0.805031
| 0
| 0
| 0
| 0
| 0
| 0.085546
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.333333
| 0.166667
| 0
| 0.166667
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
43d0a67e877a10b643419170198beb6f39958cd9
| 120
|
py
|
Python
|
commands/pre_1_13/parser/primitives/__init__.py
|
Red-Teapot/mc-commandblock-1.13-update
|
64106e1ecb5adca2aff1eeb3a1fcc11486940000
|
[
"MIT"
] | 1
|
2020-07-27T16:53:26.000Z
|
2020-07-27T16:53:26.000Z
|
commands/pre_1_13/parser/primitives/__init__.py
|
Red-Teapot/mc-commandblock-1.13-update
|
64106e1ecb5adca2aff1eeb3a1fcc11486940000
|
[
"MIT"
] | 5
|
2019-01-02T14:21:32.000Z
|
2019-07-07T05:39:39.000Z
|
commands/pre_1_13/parser/primitives/__init__.py
|
Red-Teapot/mc-commandblock-1.13-update
|
64106e1ecb5adca2aff1eeb3a1fcc11486940000
|
[
"MIT"
] | null | null | null |
from .id import ID
from .coordinate import Coordinate
from .selector import Selector
from .block_state import BlockState
| 30
| 35
| 0.841667
| 17
| 120
| 5.882353
| 0.470588
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 120
| 4
| 35
| 30
| 0.952381
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
78e79986957c4f4514302c7b8a799be18e718167
| 39
|
py
|
Python
|
codeup/1001.py
|
love-adela/algorithm
|
4ccd02173c96f8369962f1fd4e5166a221690fa2
|
[
"MIT"
] | 3
|
2019-03-09T05:19:23.000Z
|
2019-04-06T09:26:36.000Z
|
codeup/1001.py
|
love-adela/algorithm
|
4ccd02173c96f8369962f1fd4e5166a221690fa2
|
[
"MIT"
] | 1
|
2020-02-23T10:38:04.000Z
|
2020-02-23T10:38:04.000Z
|
codeup/1001.py
|
love-adela/algorithm
|
4ccd02173c96f8369962f1fd4e5166a221690fa2
|
[
"MIT"
] | 1
|
2019-05-22T13:47:53.000Z
|
2019-05-22T13:47:53.000Z
|
import sys
sys.stdout.write('Hello\n')
| 13
| 27
| 0.74359
| 7
| 39
| 4.142857
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.076923
| 39
| 2
| 28
| 19.5
| 0.805556
| 0
| 0
| 0
| 0
| 0
| 0.179487
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
6014354053255457d1f2a74c2dd06cda6584a49b
| 114
|
py
|
Python
|
pycrostates/segmentation/__init__.py
|
vferat/pycrostates
|
ac324fa5c61c0024d3720e3956cb93258cb86f9c
|
[
"BSD-3-Clause"
] | 11
|
2021-11-27T04:02:55.000Z
|
2022-03-14T13:55:32.000Z
|
pycrostates/segmentation/__init__.py
|
vferat/pycrostates
|
ac324fa5c61c0024d3720e3956cb93258cb86f9c
|
[
"BSD-3-Clause"
] | 17
|
2021-01-05T15:20:19.000Z
|
2022-03-24T11:02:59.000Z
|
pycrostates/segmentation/__init__.py
|
vferat/pycrostates
|
ac324fa5c61c0024d3720e3956cb93258cb86f9c
|
[
"BSD-3-Clause"
] | 3
|
2021-12-13T15:19:13.000Z
|
2022-03-30T13:57:40.000Z
|
from .segmentation import RawSegmentation, EpochsSegmentation
__all__ = ('RawSegmentation', 'EpochsSegmentation')
| 38
| 61
| 0.833333
| 8
| 114
| 11.375
| 0.75
| 0.725275
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.078947
| 114
| 3
| 62
| 38
| 0.866667
| 0
| 0
| 0
| 0
| 0
| 0.286957
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
603bf6e50cf4ee10163a6412a4c4d6f24b5c85a1
| 1,091
|
py
|
Python
|
python/anyascii/_data/_0a0.py
|
casept/anyascii
|
d4f426b91751254b68eaa84c6cd23099edd668e6
|
[
"ISC"
] | null | null | null |
python/anyascii/_data/_0a0.py
|
casept/anyascii
|
d4f426b91751254b68eaa84c6cd23099edd668e6
|
[
"ISC"
] | null | null | null |
python/anyascii/_data/_0a0.py
|
casept/anyascii
|
d4f426b91751254b68eaa84c6cd23099edd668e6
|
[
"ISC"
] | null | null | null |
b='it ix i ip iet iex ie iep at ax a ap uox uo uop ot ox o op ex e w bit bix bi bip biet biex bie biep bat bax ba bap buox buo buop bot box bo bop bex be bep but bux bu bup burx bur byt byx by byp byrx byr pit pix pi pip piex pie piep pat pax pa pap puox puo puop pot pox po pop put pux pu pup purx pur pyt pyx py pyp pyrx pyr bbit bbix bbi bbip bbiet bbiex bbie bbiep bbat bbax bba bbap bbuox bbuo bbuop bbot bbox bbo bbop bbex bbe bbep bbut bbux bbu bbup bburx bbur bbyt bbyx bby bbyp nbit nbix nbi nbip nbiex nbie nbiep nbat nbax nba nbap nbot nbox nbo nbop nbut nbux nbu nbup nburx nbur nbyt nbyx nby nbyp nbyrx nbyr hmit hmix hmi hmip hmiex hmie hmiep hmat hmax hma hmap hmuox hmuo hmuop hmot hmox hmo hmop hmut hmux hmu hmup hmurx hmur hmyx hmy hmyp hmyrx hmyr mit mix mi mip miex mie miep mat max ma map muot muox muo muop mot mox mo mop mex me mut mux mu mup murx mur myt myx my myp fit fix fi fip fat fax fa fap fox fo fop fut fux fu fup furx fur fyt fyx fy fyp vit vix vi vip viet viex vie viep vat vax va vap vot vox vo vop vex vep vut vux vu vup vurx vur vyt vyx vy vyp vyrx vyr'
| 1,091
| 1,091
| 0.76352
| 257
| 1,091
| 3.241245
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.233731
| 1,091
| 1
| 1,091
| 1,091
| 0.996411
| 0
| 0
| 0
| 0
| 1
| 0.995421
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
6049cd4c52134a332eb2a35dd91720475bd650f5
| 51
|
py
|
Python
|
pettingzoo/classic/texas_holdem_v4.py
|
RedTachyon/PettingZoo
|
0c4be0ca0de5a11bf8eff3f7b87976edcacd093e
|
[
"Apache-2.0"
] | 846
|
2020-05-12T05:55:00.000Z
|
2021-10-08T19:38:40.000Z
|
pettingzoo/classic/texas_holdem_v4.py
|
RedTachyon/PettingZoo
|
0c4be0ca0de5a11bf8eff3f7b87976edcacd093e
|
[
"Apache-2.0"
] | 237
|
2020-04-27T06:01:39.000Z
|
2021-10-13T02:55:54.000Z
|
pettingzoo/classic/texas_holdem_v4.py
|
RedTachyon/PettingZoo
|
0c4be0ca0de5a11bf8eff3f7b87976edcacd093e
|
[
"Apache-2.0"
] | 126
|
2020-05-29T04:20:29.000Z
|
2021-10-13T05:31:12.000Z
|
from .rlcard_envs.texas_holdem import env, raw_env
| 25.5
| 50
| 0.843137
| 9
| 51
| 4.444444
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.098039
| 51
| 1
| 51
| 51
| 0.869565
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
606d3eb06a16c7e70c233ee9952f4543ac55ee4e
| 117
|
py
|
Python
|
keras_contrib/layers/noise.py
|
stante/keras-contrib
|
890d3e2e5729d6cc1d5e3d9d9310ed424c4cc92f
|
[
"MIT"
] | 9
|
2019-07-03T12:45:13.000Z
|
2022-02-17T09:18:30.000Z
|
keras_contrib/layers/noise.py
|
stante/keras-contrib
|
890d3e2e5729d6cc1d5e3d9d9310ed424c4cc92f
|
[
"MIT"
] | null | null | null |
keras_contrib/layers/noise.py
|
stante/keras-contrib
|
890d3e2e5729d6cc1d5e3d9d9310ed424c4cc92f
|
[
"MIT"
] | 7
|
2019-05-27T08:32:25.000Z
|
2021-05-28T11:46:27.000Z
|
from __future__ import absolute_import
from keras.layers import Layer
from .. import backend as K
import numpy as np
| 23.4
| 38
| 0.820513
| 19
| 117
| 4.789474
| 0.631579
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.153846
| 117
| 4
| 39
| 29.25
| 0.919192
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
60aadfb680327bd44fdae2dad91f3e72a1d4ae11
| 113
|
py
|
Python
|
LifeIsShort/b/lambda_.py
|
loopyme/Life-is-Short
|
bd37e8597971283aa35bc31e29543c071f03acba
|
[
"MIT"
] | 1
|
2020-04-02T02:03:21.000Z
|
2020-04-02T02:03:21.000Z
|
LifeIsShort/c/func_as_return.py
|
loopyme/Life-is-Short
|
bd37e8597971283aa35bc31e29543c071f03acba
|
[
"MIT"
] | null | null | null |
LifeIsShort/c/func_as_return.py
|
loopyme/Life-is-Short
|
bd37e8597971283aa35bc31e29543c071f03acba
|
[
"MIT"
] | null | null | null |
words = "Life is short"
def lazy_print(text):
return lambda: print(text)
task = lazy_print(words)
task()
| 11.3
| 30
| 0.681416
| 17
| 113
| 4.411765
| 0.647059
| 0.24
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.19469
| 113
| 9
| 31
| 12.555556
| 0.824176
| 0
| 0
| 0
| 0
| 0
| 0.115044
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0
| 0.2
| 0.4
| 0.6
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
|
0
| 5
|
714f8e93dcd2aa29643f667e5931f0b471622b68
| 42
|
py
|
Python
|
tests/__init__.py
|
martinlarsalbert/hooks
|
a2f930030c93c83629b4f0d019f028f4446a07a3
|
[
"MIT"
] | 1
|
2021-05-21T06:05:20.000Z
|
2021-05-21T06:05:20.000Z
|
tests/__init__.py
|
martinlarsalbert/hooks
|
a2f930030c93c83629b4f0d019f028f4446a07a3
|
[
"MIT"
] | null | null | null |
tests/__init__.py
|
martinlarsalbert/hooks
|
a2f930030c93c83629b4f0d019f028f4446a07a3
|
[
"MIT"
] | null | null | null |
import os
path = os.path.dirname(__file__)
| 21
| 32
| 0.785714
| 7
| 42
| 4.142857
| 0.714286
| 0.413793
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.095238
| 42
| 2
| 32
| 21
| 0.763158
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
71d62f75e982180f7ff12fab45e91d1d513a5c9b
| 1,355
|
py
|
Python
|
tests/tests_keras/test_correctness_gpu.py
|
ThomasRot/rational_activations
|
1fa26d1ee5f3c916eda00c899afa96eccb960143
|
[
"MIT"
] | 26
|
2020-11-17T11:44:52.000Z
|
2022-02-25T22:14:49.000Z
|
tests/tests_keras/test_correctness_gpu.py
|
ThomasRot/rational_activations
|
1fa26d1ee5f3c916eda00c899afa96eccb960143
|
[
"MIT"
] | 8
|
2020-11-27T11:34:14.000Z
|
2021-11-11T08:23:13.000Z
|
tests/tests_keras/test_correctness_gpu.py
|
ThomasRot/rational_activations
|
1fa26d1ee5f3c916eda00c899afa96eccb960143
|
[
"MIT"
] | 8
|
2020-11-21T10:12:28.000Z
|
2022-01-15T16:54:48.000Z
|
"""
This file tests that cuda calculations produce correct results.
"""
from tensorflow.nn import leaky_relu
from tensorflow.math import tanh, sigmoid
from .helpers import _test_template
# test cuda execution
CUDA = True
def test_a_on_cuda_lrelu():
_test_template(version='A', approx_func=leaky_relu, cuda=CUDA)
def test_a_on_cuda_tanh():
_test_template(version='A', approx_func=tanh, cuda=CUDA)
def test_a_on_cuda_sigmoid():
_test_template(version='A', approx_func=sigmoid, cuda=CUDA)
def test_b_on_cuda_lrelu():
_test_template(version='B', approx_func=leaky_relu, cuda=CUDA)
def test_b_on_cuda_tanh():
_test_template(version='B', approx_func=tanh, cuda=CUDA)
def test_b_on_cuda_sigmoid():
_test_template(version='B', approx_func=sigmoid, cuda=CUDA)
def test_c_on_cuda_lrelu():
_test_template(version='C', approx_func=leaky_relu, cuda=CUDA)
def test_c_on_cuda_tanh():
_test_template(version='C', approx_func=tanh, cuda=CUDA)
def test_c_on_cuda_sigmoid():
_test_template(version='C', approx_func=sigmoid, cuda=CUDA)
def test_d_on_cuda_lrelu():
_test_template(version='D', approx_func=leaky_relu, cuda=CUDA)
def test_d_on_cuda_tanh():
_test_template(version='D', approx_func=tanh, cuda=CUDA)
def test_d_on_cuda_sigmoid():
_test_template(version='D', approx_func=sigmoid, cuda=CUDA)
| 22.966102
| 66
| 0.762362
| 215
| 1,355
| 4.381395
| 0.153488
| 0.165605
| 0.242038
| 0.175159
| 0.815287
| 0.79087
| 0.451168
| 0.144374
| 0
| 0
| 0
| 0
| 0.121771
| 1,355
| 58
| 67
| 23.362069
| 0.791597
| 0.061993
| 0
| 0
| 0
| 0
| 0.009501
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.428571
| false
| 0
| 0.107143
| 0
| 0.535714
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
71e165b43c610bfe2d1562aef1e0c4fd2186f559
| 550
|
py
|
Python
|
examples/inheritance/inherit.py
|
irmen/Pyro3
|
5bd531088d9a11ec83556a0429f18df6cb5cd437
|
[
"MIT"
] | 3
|
2018-01-13T20:50:41.000Z
|
2020-02-24T13:35:08.000Z
|
examples/inheritance/inherit.py
|
irmen/Pyro3
|
5bd531088d9a11ec83556a0429f18df6cb5cd437
|
[
"MIT"
] | null | null | null |
examples/inheritance/inherit.py
|
irmen/Pyro3
|
5bd531088d9a11ec83556a0429f18df6cb5cd437
|
[
"MIT"
] | 6
|
2015-03-21T20:34:05.000Z
|
2021-06-08T04:04:33.000Z
|
from ftplib import FTP
import Pyro.core
class base1(object):
def meth1(self):
return 'base1.meth1'
def meth2(self):
return 'base1.meth2'
class base2(object):
def meth2(self):
return 'base2.meth2'
def meth3(self):
return 'base2.meth3'
class sub(base1,base2):
def meth2(self):
return 'sub.meth2 (overridden)'
def meth4(self):
return 'sub.meth4'
class Fsub(base1,base2,FTP):
def meth2(self):
return 'Fsub.meth2 (overridden)'
def meth4(self):
return 'Fsub.meth4'
class Gsub(base1, Pyro.core.ObjBase):
def ding(self):
pass
| 17.741935
| 37
| 0.703636
| 82
| 550
| 4.719512
| 0.292683
| 0.206718
| 0.124031
| 0.186047
| 0.170543
| 0.170543
| 0
| 0
| 0
| 0
| 0
| 0.05819
| 0.156364
| 550
| 30
| 38
| 18.333333
| 0.775862
| 0
| 0
| 0.24
| 0
| 0
| 0.196364
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.36
| false
| 0.04
| 0.08
| 0.32
| 0.96
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
e07d969fc652f21d0cc8deff806a7360fa07e85d
| 37
|
py
|
Python
|
mdd/__init__.py
|
mvcisback/py-mdd
|
9d5bdf297ab3bee86e939a2dae8d7fbe91146a44
|
[
"MIT"
] | 1
|
2021-11-28T03:47:11.000Z
|
2021-11-28T03:47:11.000Z
|
mdd/__init__.py
|
mvcisback/py-mdd
|
9d5bdf297ab3bee86e939a2dae8d7fbe91146a44
|
[
"MIT"
] | 1
|
2020-11-12T07:33:03.000Z
|
2020-11-13T09:38:22.000Z
|
mdd/__init__.py
|
mvcisback/py-mdd
|
9d5bdf297ab3bee86e939a2dae8d7fbe91146a44
|
[
"MIT"
] | null | null | null |
# flake8: noqa
from mdd.mdd import *
| 12.333333
| 21
| 0.702703
| 6
| 37
| 4.333333
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.033333
| 0.189189
| 37
| 2
| 22
| 18.5
| 0.833333
| 0.324324
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.