hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
7d7bfde87e7be896e80bd91366d8ba499d2ef258
| 131
|
py
|
Python
|
InvenTree/common/test_views.py
|
carlos-riquelme/InvenTree
|
724dd2a9c82e4c10e14bd6aba8f48553b183fef9
|
[
"MIT"
] | 5
|
2020-08-14T06:18:57.000Z
|
2022-01-11T01:22:56.000Z
|
InvenTree/common/test_views.py
|
carlos-riquelme/InvenTree
|
724dd2a9c82e4c10e14bd6aba8f48553b183fef9
|
[
"MIT"
] | 27
|
2021-04-12T22:05:39.000Z
|
2022-03-13T20:33:54.000Z
|
InvenTree/common/test_views.py
|
carlos-riquelme/InvenTree
|
724dd2a9c82e4c10e14bd6aba8f48553b183fef9
|
[
"MIT"
] | 1
|
2021-12-10T04:47:10.000Z
|
2021-12-10T04:47:10.000Z
|
"""
Unit tests for the views associated with the 'common' app
"""
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
| 18.714286
| 57
| 0.70229
| 18
| 131
| 4.833333
| 0.944444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.009174
| 0.167939
| 131
| 6
| 58
| 21.833333
| 0.788991
| 0.610687
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
7d8ceb95813a801f9859bd922c218e869bfcea10
| 315
|
py
|
Python
|
Python/libraries/recognizers-number/recognizers_number/resources/__init__.py
|
ahmedabuamra/Recognizers-Text
|
31193d89d3532839742992a2755c1d8539c68116
|
[
"MIT"
] | 2
|
2017-08-22T11:21:19.000Z
|
2017-09-17T20:06:00.000Z
|
Python/libraries/recognizers-number/recognizers_number/resources/__init__.py
|
ahmedabuamra/Recognizers-Text
|
31193d89d3532839742992a2755c1d8539c68116
|
[
"MIT"
] | 9
|
2020-07-21T11:36:38.000Z
|
2020-08-26T11:40:29.000Z
|
Python/libraries/recognizers-number/recognizers_number/resources/__init__.py
|
ahmedabuamra/Recognizers-Text
|
31193d89d3532839742992a2755c1d8539c68116
|
[
"MIT"
] | 1
|
2020-07-30T11:53:22.000Z
|
2020-07-30T11:53:22.000Z
|
from .base_numbers import BaseNumbers
from .chinese_numeric import ChineseNumeric
from .english_numeric import EnglishNumeric
from .french_numeric import FrenchNumeric
from .portuguese_numeric import PortugueseNumeric
from .spanish_numeric import SpanishNumeric
from .japanese_numeric import JapaneseNumeric
| 39.375
| 50
| 0.866667
| 35
| 315
| 7.6
| 0.514286
| 0.293233
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 315
| 7
| 51
| 45
| 0.95
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
7d8d8de2e415abcd118468d209df0b738ec107d0
| 30,430
|
py
|
Python
|
tests/cupy_tests/test_cusparse.py
|
Pandinosaurus/cupy
|
c98064928c8242d0c6a07e2c714e6c811f684a4e
|
[
"MIT"
] | 1
|
2021-05-16T11:52:30.000Z
|
2021-05-16T11:52:30.000Z
|
tests/cupy_tests/test_cusparse.py
|
Pandinosaurus/cupy
|
c98064928c8242d0c6a07e2c714e6c811f684a4e
|
[
"MIT"
] | null | null | null |
tests/cupy_tests/test_cusparse.py
|
Pandinosaurus/cupy
|
c98064928c8242d0c6a07e2c714e6c811f684a4e
|
[
"MIT"
] | null | null | null |
import pickle
import unittest
import numpy
import pytest
try:
import scipy.sparse
except ImportError:
pass
import cupy
from cupy import testing
from cupy import cusparse
from cupyx.scipy import sparse
if cupy.cuda.runtime.is_hip:
pytest.skip('HIP sparse support is not yet ready',
allow_module_level=True)
class TestMatDescriptor(unittest.TestCase):
def test_create(self):
md = cusparse.MatDescriptor.create()
assert isinstance(md.descriptor, int)
def test_pickle(self):
md = cusparse.MatDescriptor.create()
md2 = pickle.loads(pickle.dumps(md))
assert isinstance(md2.descriptor, int)
assert md.descriptor != md2.descriptor
@testing.parameterize(*testing.product({
'dtype': [numpy.float32, numpy.float64],
'transa': [True, False],
}))
@testing.with_requires('scipy')
class TestCsrmm(unittest.TestCase):
alpha = 0.5
beta = 0.25
def setUp(self):
self.op_a = scipy.sparse.random(2, 3, density=0.5, dtype=self.dtype)
if self.transa:
self.a = self.op_a.T
else:
self.a = self.op_a
self.b = numpy.random.uniform(-1, 1, (3, 4)).astype(self.dtype)
self.c = numpy.random.uniform(-1, 1, (2, 4)).astype(self.dtype)
def test_csrmm(self):
if not cusparse.check_availability('csrmm'):
pytest.skip('csrmm is not available')
a = sparse.csr_matrix(self.a)
b = cupy.array(self.b, order='f')
y = cupy.cusparse.csrmm(a, b, alpha=self.alpha, transa=self.transa)
expect = self.alpha * self.op_a.dot(self.b)
testing.assert_array_almost_equal(y, expect)
def test_csrmm_with_c(self):
if not cusparse.check_availability('csrmm'):
pytest.skip('csrmm is not available')
a = sparse.csr_matrix(self.a)
b = cupy.array(self.b, order='f')
c = cupy.array(self.c, order='f')
y = cupy.cusparse.csrmm(
a, b, c=c, alpha=self.alpha, beta=self.beta, transa=self.transa)
expect = self.alpha * self.op_a.dot(self.b) + self.beta * self.c
assert y is c
testing.assert_array_almost_equal(y, expect)
@testing.parameterize(*testing.product({
'dtype': [numpy.float32, numpy.float64],
'trans': [(False, False), (True, False), (False, True)],
}))
@testing.with_requires('scipy')
class TestCsrmm2(unittest.TestCase):
alpha = 0.5
beta = 0.25
def setUp(self):
self.transa, self.transb = self.trans
self.op_a = scipy.sparse.random(2, 3, density=0.5, dtype=self.dtype)
if self.transa:
self.a = self.op_a.T
else:
self.a = self.op_a
self.op_b = numpy.random.uniform(-1, 1, (3, 4)).astype(self.dtype)
if self.transb:
self.b = self.op_b.T
else:
self.b = self.op_b
self.c = numpy.random.uniform(-1, 1, (2, 4)).astype(self.dtype)
def test_csrmm2(self):
if not cusparse.check_availability('csrmm2'):
pytest.skip('csrmm2 is not available')
a = sparse.csr_matrix(self.a)
b = cupy.array(self.b, order='f')
y = cupy.cusparse.csrmm2(
a, b, alpha=self.alpha, transa=self.transa, transb=self.transb)
expect = self.alpha * self.op_a.dot(self.op_b)
testing.assert_array_almost_equal(y, expect)
def test_csrmm2_with_c(self):
if not cusparse.check_availability('csrmm2'):
pytest.skip('csrmm2 is not available')
a = sparse.csr_matrix(self.a)
b = cupy.array(self.b, order='f')
c = cupy.array(self.c, order='f')
y = cupy.cusparse.csrmm2(
a, b, c=c, alpha=self.alpha, beta=self.beta,
transa=self.transa, transb=self.transb)
expect = self.alpha * self.op_a.dot(self.op_b) + self.beta * self.c
assert y is c
testing.assert_array_almost_equal(y, expect)
@testing.parameterize(*testing.product({
'dtype': [numpy.float32, numpy.float64, numpy.complex64, numpy.complex128],
'shape': [(3, 4), (4, 3)]
}))
@testing.with_requires('scipy>=1.2.0')
class TestCsrgeam(unittest.TestCase):
alpha = 0.5
beta = 0.25
def setUp(self):
m, n = self.shape
self.a = scipy.sparse.random(m, n, density=0.3, dtype=self.dtype)
self.b = scipy.sparse.random(m, n, density=0.3, dtype=self.dtype)
def test_csrgeam(self):
if not cupy.cusparse.check_availability('csrgeam'):
pytest.skip('csrgeam is not available')
a = sparse.csr_matrix(self.a)
b = sparse.csr_matrix(self.b)
c = cupy.cusparse.csrgeam(a, b, alpha=self.alpha, beta=self.beta)
expect = self.alpha * self.a + self.beta * self.b
testing.assert_array_almost_equal(c.toarray(), expect.toarray())
def test_csrgeam2(self):
if not cupy.cusparse.check_availability('csrgeam2'):
pytest.skip('csrgeam2 is not available')
a = sparse.csr_matrix(self.a)
b = sparse.csr_matrix(self.b)
c = cupy.cusparse.csrgeam2(a, b, alpha=self.alpha, beta=self.beta)
expect = self.alpha * self.a + self.beta * self.b
testing.assert_array_almost_equal(c.toarray(), expect.toarray())
@testing.with_requires('scipy')
class TestCsrgeamInvalidCases(unittest.TestCase):
dtype = numpy.float32
shape = (4, 3)
def setUp(self):
m, n = self.shape
self.a = scipy.sparse.random(m, n, density=0.3, dtype=self.dtype)
self.b = scipy.sparse.random(m, n, density=0.3, dtype=self.dtype)
def test_csrgeam_invalid_format(self):
if not cupy.cusparse.check_availability('csrgeam'):
pytest.skip('csrgeam is not available')
a = sparse.csc_matrix(self.a)
b = sparse.csr_matrix(self.b)
with self.assertRaises(TypeError):
cupy.cusparse.csrgeam(a, b)
with self.assertRaises(TypeError):
cupy.cusparse.csrgeam(b, a)
def test_csrgeam_invalid_shape(self):
if not cupy.cusparse.check_availability('csrgeam'):
pytest.skip('csrgeam is not available')
a = sparse.csr_matrix(self.a.T)
b = sparse.csr_matrix(self.b)
with self.assertRaises(ValueError):
cupy.cusparse.csrgeam(a, b)
def test_csrgeam_availability(self):
if not cupy.cusparse.check_availability('csrgeam'):
a = sparse.csr_matrix(self.a)
b = sparse.csr_matrix(self.b)
with self.assertRaises(RuntimeError):
cupy.cusparse.csrgeam(a, b)
def test_csrgeam2_invalid_format(self):
if not cupy.cusparse.check_availability('csrgeam2'):
pytest.skip('csrgeam2 is not available')
a = sparse.csc_matrix(self.a)
b = sparse.csr_matrix(self.b)
with self.assertRaises(TypeError):
cupy.cusparse.csrgeam2(a, b)
with self.assertRaises(TypeError):
cupy.cusparse.csrgeam2(b, a)
def test_csrgeam2_invalid_shape(self):
if not cupy.cusparse.check_availability('csrgeam2'):
pytest.skip('csrgeam2 is not available')
a = sparse.csr_matrix(self.a.T)
b = sparse.csr_matrix(self.b)
with self.assertRaises(ValueError):
cupy.cusparse.csrgeam2(a, b)
def test_csrgeam2_availability(self):
if not cupy.cusparse.check_availability('csrgeam2'):
a = sparse.csr_matrix(self.a)
b = sparse.csr_matrix(self.b)
with self.assertRaises(RuntimeError):
cupy.cusparse.csrgeam2(a, b)
@testing.parameterize(*testing.product({
'dtype': [numpy.float32, numpy.float64],
'transa': [False, True],
'transb': [False, True],
}))
@testing.with_requires('scipy')
class TestCsrgemm(unittest.TestCase):
def setUp(self):
self.op_a = scipy.sparse.random(2, 3, density=0.5, dtype=self.dtype)
if self.transa:
self.a = self.op_a.T
else:
self.a = self.op_a
self.op_b = scipy.sparse.random(3, 4, density=0.5, dtype=self.dtype)
if self.transb:
self.b = self.op_b.T
else:
self.b = self.op_b
def test_csrgemm(self):
if not cupy.cusparse.check_availability('csrgemm'):
pytest.skip('csrgemm is not available.')
a = sparse.csr_matrix(self.a)
b = sparse.csr_matrix(self.b)
y = cupy.cusparse.csrgemm(a, b, transa=self.transa, transb=self.transb)
expect = self.op_a.dot(self.op_b)
testing.assert_array_almost_equal(y.toarray(), expect.toarray())
@testing.parameterize(*testing.product({
'dtype': [numpy.float32, numpy.float64, numpy.complex64, numpy.complex128],
'shape': [(2, 3, 4), (4, 3, 2)]
}))
@testing.with_requires('scipy>=1.2.0')
class TestCsrgemm2(unittest.TestCase):
alpha = 0.5
beta = 0.25
def setUp(self):
m, n, k = self.shape
self.a = scipy.sparse.random(m, k, density=0.5, dtype=self.dtype)
self.b = scipy.sparse.random(k, n, density=0.5, dtype=self.dtype)
self.d = scipy.sparse.random(m, n, density=0.5, dtype=self.dtype)
def test_csrgemm2_ab(self):
if not cupy.cusparse.check_availability('csrgemm2'):
pytest.skip('csrgemm2 is not available.')
a = sparse.csr_matrix(self.a)
b = sparse.csr_matrix(self.b)
c = cupy.cusparse.csrgemm2(a, b, alpha=self.alpha)
expect = self.alpha * self.a.dot(self.b)
testing.assert_array_almost_equal(c.toarray(), expect.toarray())
def test_csrgemm2_abpd(self):
if not cupy.cusparse.check_availability('csrgemm2'):
pytest.skip('csrgemm2 is not available.')
a = sparse.csr_matrix(self.a)
b = sparse.csr_matrix(self.b)
d = sparse.csr_matrix(self.d)
c = cupy.cusparse.csrgemm2(a, b, d=d, alpha=self.alpha, beta=self.beta)
expect = self.alpha * self.a.dot(self.b) + self.beta * self.d
testing.assert_array_almost_equal(c.toarray(), expect.toarray())
@testing.with_requires('scipy')
class TestCsrgemm2InvalidCases(unittest.TestCase):
dtype = numpy.float32
shape = (2, 3, 4)
def setUp(self):
m, n, k = self.shape
self.a = scipy.sparse.random(m, k, density=0.5, dtype=self.dtype)
self.b = scipy.sparse.random(k, n, density=0.5, dtype=self.dtype)
self.d = scipy.sparse.random(m, n, density=0.5, dtype=self.dtype)
def test_csrgemm2_invalid_format(self):
if not cupy.cusparse.check_availability('csrgemm2'):
pytest.skip('csrgemm2 is not available.')
a = sparse.csc_matrix(self.a)
b = sparse.csr_matrix(self.b)
with self.assertRaises(TypeError):
cupy.cusparse.csrgemm2(a, b)
a = sparse.csr_matrix(self.a)
b = sparse.csc_matrix(self.b)
with self.assertRaises(TypeError):
cupy.cusparse.csrgemm2(a, b)
a = sparse.csr_matrix(self.a)
b = sparse.csr_matrix(self.b)
d = sparse.csc_matrix(self.d)
with self.assertRaises(TypeError):
cupy.cusparse.csrgemm2(a, b, d=d)
def test_csrgemm2_invalid_shape(self):
if not cupy.cusparse.check_availability('csrgemm2'):
pytest.skip('csrgemm2 is not available.')
a = sparse.csc_matrix(self.a).T
b = sparse.csr_matrix(self.b)
with self.assertRaises(ValueError):
cupy.cusparse.csrgemm2(a, b)
a = sparse.csr_matrix(self.a)
b = sparse.csc_matrix(self.b).T
with self.assertRaises(ValueError):
cupy.cusparse.csrgemm2(a, b)
a = sparse.csr_matrix(self.a)
b = sparse.csr_matrix(self.b)
d = sparse.csc_matrix(self.d).T
with self.assertRaises(ValueError):
cupy.cusparse.csrgemm2(a, b, d=d)
def test_csrgemm2_availability(self):
if not cupy.cusparse.check_availability('csrgemm2'):
a = sparse.csr_matrix(self.a)
b = sparse.csr_matrix(self.b)
with self.assertRaises(RuntimeError):
cupy.cusparse.csrgemm2(a, b)
@testing.parameterize(*testing.product({
'dtype': [numpy.float32, numpy.float64],
'transa': [False, True],
}))
@testing.with_requires('scipy')
class TestCsrmv(unittest.TestCase):
alpha = 0.5
beta = 0.25
def setUp(self):
self.op_a = scipy.sparse.random(2, 3, density=0.5, dtype=self.dtype)
if self.transa:
self.a = self.op_a.T
else:
self.a = self.op_a
self.x = numpy.random.uniform(-1, 1, 3).astype(self.dtype)
self.y = numpy.random.uniform(-1, 1, 2).astype(self.dtype)
def test_csrmv(self):
if not cusparse.check_availability('csrmv'):
pytest.skip('csrmv is not available')
a = sparse.csr_matrix(self.a)
x = cupy.array(self.x, order='f')
y = cupy.cusparse.csrmv(
a, x, alpha=self.alpha, transa=self.transa)
expect = self.alpha * self.op_a.dot(self.x)
testing.assert_array_almost_equal(y, expect)
def test_csrmv_with_y(self):
if not cusparse.check_availability('csrmv'):
pytest.skip('csrmv is not available')
a = sparse.csr_matrix(self.a)
x = cupy.array(self.x, order='f')
y = cupy.array(self.y, order='f')
z = cupy.cusparse.csrmv(
a, x, y=y, alpha=self.alpha, beta=self.beta, transa=self.transa)
expect = self.alpha * self.op_a.dot(self.x) + self.beta * self.y
assert y is z
testing.assert_array_almost_equal(y, expect)
def test_csrmvEx_aligned(self):
if not cusparse.check_availability('csrmvEx'):
pytest.skip('csrmvEx is not available')
a = sparse.csr_matrix(self.a)
x = cupy.array(self.x, order='f')
assert cupy.cusparse.csrmvExIsAligned(a, x)
def test_csrmvEx_not_aligned(self):
if not cusparse.check_availability('csrmvEx'):
pytest.skip('csrmvEx is not available')
a = sparse.csr_matrix(self.a)
tmp = cupy.array(numpy.hstack([self.x, self.y]), order='f')
x = tmp[0:len(self.x)]
y = tmp[len(self.x):]
assert not cupy.cusparse.csrmvExIsAligned(a, x, y)
def test_csrmvEx(self):
if not cusparse.check_availability('csrmvEx'):
pytest.skip('csrmvEx is not available')
if self.transa:
# no support for transa
return
a = sparse.csr_matrix(self.a)
x = cupy.array(self.x, order='f')
y = cupy.cusparse.csrmvEx(a, x, alpha=self.alpha)
expect = self.alpha * self.op_a.dot(self.x)
testing.assert_array_almost_equal(y, expect)
def test_csrmvEx_with_y(self):
if not cusparse.check_availability('csrmvEx'):
pytest.skip('csrmvEx is not available')
if self.transa:
# no support for transa
return
a = sparse.csr_matrix(self.a)
x = cupy.array(self.x, order='f')
y = cupy.array(self.y, order='f')
z = cupy.cusparse.csrmvEx(
a, x, y=y, alpha=self.alpha, beta=self.beta)
expect = self.alpha * self.op_a.dot(self.x) + self.beta * self.y
assert y is z
testing.assert_array_almost_equal(y, expect)
@testing.with_requires('scipy')
class TestCoosort(unittest.TestCase):
def setUp(self):
if not cusparse.check_availability('coosort'):
pytest.skip('coosort is not available')
self.a = scipy.sparse.random(
100, 100, density=0.9, dtype=numpy.float32, format='coo')
numpy.random.shuffle(self.a.row)
numpy.random.shuffle(self.a.col)
def test_coosort(self):
a = sparse.coo_matrix(self.a)
cupy.cusparse.coosort(a)
# lexsort by row first and col second
argsort = numpy.lexsort((self.a.col, self.a.row))
testing.assert_array_equal(self.a.row[argsort], a.row)
testing.assert_array_equal(self.a.col[argsort], a.col)
testing.assert_array_almost_equal(self.a.data[argsort], a.data)
def test_coosort_by_column(self):
a = sparse.coo_matrix(self.a)
cupy.cusparse.coosort(a, sort_by='c')
# lexsort by col first and row second
argsort = numpy.lexsort((self.a.row, self.a.col))
testing.assert_array_equal(self.a.row[argsort], a.row)
testing.assert_array_equal(self.a.col[argsort], a.col)
testing.assert_array_almost_equal(self.a.data[argsort], a.data)
@testing.with_requires('scipy')
class TestCsrsort(unittest.TestCase):
def setUp(self):
if not cusparse.check_availability('csrsort'):
pytest.skip('csrsort is not available')
self.a = scipy.sparse.random(
1, 1000, density=0.9, dtype=numpy.float32, format='csr')
numpy.random.shuffle(self.a.indices)
self.a.has_sorted_indices = False
def test_csrsort(self):
a = sparse.csr_matrix(self.a)
cupy.cusparse.csrsort(a)
self.a.sort_indices()
testing.assert_array_equal(self.a.indptr, a.indptr)
testing.assert_array_equal(self.a.indices, a.indices)
testing.assert_array_almost_equal(self.a.data, a.data)
@testing.with_requires('scipy')
class TestCscsort(unittest.TestCase):
def setUp(self):
if not cusparse.check_availability('cscsort'):
pytest.skip('cscsort is not available')
self.a = scipy.sparse.random(
1000, 1, density=0.9, dtype=numpy.float32, format='csc')
numpy.random.shuffle(self.a.indices)
self.a.has_sorted_indices = False
def test_cscsort(self):
a = sparse.csc_matrix(self.a)
cupy.cusparse.cscsort(a)
self.a.sort_indices()
testing.assert_array_equal(self.a.indptr, a.indptr)
testing.assert_array_equal(self.a.indices, a.indices)
testing.assert_array_almost_equal(self.a.data, a.data)
@testing.parameterize(*testing.product({
'dtype': [numpy.float32, numpy.float64, numpy.complex64, numpy.complex128],
'transa': [False, True],
'shape': [(3, 2), (4, 3)],
'format': ['csr', 'csc', 'coo'],
}))
@testing.with_requires('scipy>=1.2.0')
class TestSpmv(unittest.TestCase):
alpha = 0.5
beta = 0.25
def setUp(self):
m, n = self.shape
self.op_a = scipy.sparse.random(m, n, density=0.5, format=self.format,
dtype=self.dtype)
if self.transa:
self.a = self.op_a.T
else:
self.a = self.op_a
self.x = numpy.random.uniform(-1, 1, n).astype(self.dtype)
self.y = numpy.random.uniform(-1, 1, m).astype(self.dtype)
if self.format == 'csr':
self.sparse_matrix = sparse.csr_matrix
elif self.format == 'csc':
self.sparse_matrix = sparse.csc_matrix
elif self.format == 'coo':
self.sparse_matrix = sparse.coo_matrix
def test_spmv(self):
if not cupy.cusparse.check_availability('spmv'):
pytest.skip('spmv is not available')
a = self.sparse_matrix(self.a)
if not a.has_canonical_format:
a.sum_duplicates()
x = cupy.array(self.x)
y = cupy.cusparse.spmv(a, x, alpha=self.alpha, transa=self.transa)
expect = self.alpha * self.op_a.dot(self.x)
testing.assert_array_almost_equal(y, expect)
def test_spmv_with_y(self):
if not cupy.cusparse.check_availability('spmv'):
pytest.skip('spmv is not available')
a = self.sparse_matrix(self.a)
if not a.has_canonical_format:
a.sum_duplicates()
x = cupy.array(self.x)
y = cupy.array(self.y)
z = cupy.cusparse.spmv(a, x, y=y, alpha=self.alpha, beta=self.beta,
transa=self.transa)
expect = self.alpha * self.op_a.dot(self.x) + self.beta * self.y
assert y is z
testing.assert_array_almost_equal(y, expect)
@testing.with_requires('scipy')
class TestErrorSpmv(unittest.TestCase):
dtype = numpy.float32
def setUp(self):
m, n = 2, 3
self.a = scipy.sparse.random(m, n, density=0.5,
dtype=self.dtype)
self.x = numpy.random.uniform(-1, 1, n).astype(self.dtype)
self.y = numpy.random.uniform(-1, 1, m).astype(self.dtype)
def test_error_shape(self):
if not cupy.cusparse.check_availability('spmv'):
pytest.skip('spmv is not available')
a = sparse.csr_matrix(self.a.T)
x = cupy.array(self.x)
with self.assertRaises(ValueError):
cupy.cusparse.spmv(a, x)
a = sparse.csr_matrix(self.a)
x = cupy.array(self.x)
with self.assertRaises(ValueError):
cupy.cusparse.spmv(a, x, transa=True)
a = sparse.csr_matrix(self.a)
x = cupy.array(self.y)
with self.assertRaises(ValueError):
cupy.cusparse.spmv(a, x)
@testing.parameterize(*testing.product({
'dtype': [numpy.float32, numpy.float64, numpy.complex64, numpy.complex128],
'transa': [False, True],
'transb': [False, True],
'dims': [(2, 3, 4), (3, 4, 2)],
'format': ['csr', 'csc', 'coo'],
}))
@testing.with_requires('scipy>=1.2.0')
class TestSpmm(unittest.TestCase):
alpha = 0.5
beta = 0.25
def setUp(self):
m, n, k = self.dims
self.op_a = scipy.sparse.random(m, k, density=0.5, format=self.format,
dtype=self.dtype)
if self.transa:
self.a = self.op_a.T
else:
self.a = self.op_a
self.op_b = numpy.random.uniform(-1, 1, (k, n)).astype(self.dtype)
if self.transb:
self.b = self.op_b.T
else:
self.b = self.op_b
self.c = numpy.random.uniform(-1, 1, (m, n)).astype(self.dtype)
if self.format == 'csr':
self.sparse_matrix = sparse.csr_matrix
elif self.format == 'csc':
self.sparse_matrix = sparse.csc_matrix
elif self.format == 'coo':
self.sparse_matrix = sparse.coo_matrix
def test_spmm(self):
if not cupy.cusparse.check_availability('spmm'):
pytest.skip('spmm is not available')
a = self.sparse_matrix(self.a)
if not a.has_canonical_format:
a.sum_duplicates()
b = cupy.array(self.b, order='f')
c = cupy.cusparse.spmm(
a, b, alpha=self.alpha, transa=self.transa, transb=self.transb)
expect = self.alpha * self.op_a.dot(self.op_b)
testing.assert_array_almost_equal(c, expect)
def test_spmm_with_c(self):
if not cupy.cusparse.check_availability('spmm'):
pytest.skip('spmm is not available')
a = self.sparse_matrix(self.a)
if not a.has_canonical_format:
a.sum_duplicates()
b = cupy.array(self.b, order='f')
c = cupy.array(self.c, order='f')
y = cupy.cusparse.spmm(
a, b, c=c, alpha=self.alpha, beta=self.beta,
transa=self.transa, transb=self.transb)
expect = self.alpha * self.op_a.dot(self.op_b) + self.beta * self.c
assert y is c
testing.assert_array_almost_equal(y, expect)
@testing.with_requires('scipy')
class TestErrorSpmm(unittest.TestCase):
dtype = numpy.float32
def setUp(self):
m, n, k = 2, 3, 4
self.a = scipy.sparse.random(m, k, density=0.5,
dtype=self.dtype)
self.b = numpy.random.uniform(-1, 1, (k, n)).astype(self.dtype)
self.c = numpy.random.uniform(-1, 1, (m, n)).astype(self.dtype)
def test_error_shape(self):
if not cupy.cusparse.check_availability('spmm'):
pytest.skip('spmm is not available')
a = sparse.csr_matrix(self.a.T)
b = cupy.array(self.b, order='f')
with self.assertRaises(ValueError):
cupy.cusparse.spmm(a, b)
a = sparse.csr_matrix(self.a)
b = cupy.array(self.b, order='f')
with self.assertRaises(AssertionError):
cupy.cusparse.spmm(a, b.T)
a = sparse.csr_matrix(self.a)
b = cupy.array(self.b)
with self.assertRaises(AssertionError):
cupy.cusparse.spmm(a, b)
a = sparse.csr_matrix(self.a)
b = cupy.array(self.c, order='f')
with self.assertRaises(ValueError):
cupy.cusparse.spmm(a, b)
a = sparse.csr_matrix(self.a)
b = cupy.array(self.b, order='f')
c = cupy.array(self.b, order='f')
with self.assertRaises(ValueError):
cupy.cusparse.spmm(a, b, c=c)
@testing.parameterize(*testing.product({
'lower': [True, False],
'unit_diag': [True, False],
'transa': ['N', 'T', 'H'],
'blocking': [True, False],
'level_info': [True, False],
'format': ['csr', 'csc'],
'nrhs': [None, 1, 4],
'order': ['C', 'F']
}))
@testing.with_requires('scipy')
class TestCsrsm2(unittest.TestCase):
n = 6
alpha = 1.0
density = 0.75
_tol = {'f': 1e-5, 'd': 1e-12}
def _setup(self, dtype):
dtype = numpy.dtype(dtype)
self.tol = self._tol[dtype.char.lower()]
a_shape = (self.n, self.n)
a = testing.shaped_random(a_shape, numpy, dtype=dtype, scale=1)
a_mask = testing.shaped_random(a_shape, numpy, dtype='f', scale=1)
a[a_mask > self.density] = 0
a_diag = numpy.diag(numpy.ones((self.n,), dtype=dtype))
if self.unit_diag:
a[a_diag > 0] = 0
a = a + a_diag
cp_a = cupy.array(a)
if self.unit_diag:
cp_a[a_diag > 0] = 0.1 # any number except 0
if self.format == 'csr':
self.a = sparse.csr_matrix(cp_a)
elif self.format == 'csc':
self.a = sparse.csc_matrix(cp_a)
b_shape = (self.n,) if self.nrhs is None else (self.n, self.nrhs)
b = numpy.arange(1, numpy.prod(b_shape) + 1,
dtype=dtype).reshape(b_shape)
b = b.copy(order=self.order)
self.b = cupy.array(b, order=self.order)
if self.lower:
a = numpy.tril(a)
else:
a = numpy.triu(a)
if self.transa == 'T':
a = a.T
elif self.transa == 'H':
a = a.conj().T
self.ref_x = numpy.linalg.solve(a, self.alpha * b)
@testing.for_dtypes('fdFD')
def test_csrsm2(self, dtype):
if not cusparse.check_availability('csrsm2'):
raise unittest.SkipTest('csrsm2 is not available')
if (self.format == 'csc' and numpy.dtype(dtype).char in 'FD' and
self.transa == 'H'):
raise unittest.SkipTest('unsupported combination')
self._setup(dtype)
x = self.b.copy(order=self.order)
cusparse.csrsm2(self.a, x, alpha=self.alpha,
lower=self.lower, unit_diag=self.unit_diag,
transa=self.transa, blocking=self.blocking,
level_info=self.level_info)
testing.assert_allclose(x, self.ref_x, atol=self.tol, rtol=self.tol)
@testing.parameterize(*testing.product({
'n': [7, 10],
'level_info': [True, False],
}))
@testing.with_requires('scipy')
class TestCsrilu02(unittest.TestCase):
_tol = {'f': 1e-5, 'd': 1e-12}
def _make_matrix(self, dtype):
if not cusparse.check_availability('csrilu02'):
unittest.SkipTest('csrilu02 is not available')
a = testing.shaped_random((self.n, self.n), cupy, dtype=dtype,
scale=0.9) + 0.1
a = a + cupy.diag(cupy.ones((self.n,), dtype=dtype.char.lower()))
return a
@testing.for_dtypes('fdFD')
def test_csrilu02(self, dtype):
dtype = numpy.dtype(dtype)
a_ref = self._make_matrix(dtype)
a = sparse.csr_matrix(a_ref)
cusparse.csrilu02(a, level_info=self.level_info)
a = a.todense()
al = cupy.tril(a, k=-1)
al = al + cupy.diag(cupy.ones((self.n,), dtype=dtype.char.lower()))
au = cupy.triu(a)
a = al @ au
tol = self._tol[dtype.char.lower()]
cupy.testing.assert_allclose(a, a_ref, atol=tol, rtol=tol)
def test_invalid_cases(self):
dtype = numpy.dtype('d')
a_ref = self._make_matrix(dtype)
# invalid format
a = sparse.csc_matrix(a_ref)
with self.assertRaises(TypeError):
cusparse.csrilu02(a, level_info=self.level_info)
# invalid shape
a = cupy.ones((self.n, self.n + 1), dtype=dtype)
a = sparse.csr_matrix(a)
with self.assertRaises(ValueError):
cusparse.csrilu02(a, level_info=self.level_info)
# matrix with zero diagonal element
a = a_ref
a[-1, -1] = 0
a = sparse.csr_matrix(a)
with self.assertRaises(ValueError):
cusparse.csrilu02(a, level_info=self.level_info)
# singular matrix
a = a_ref
a[1:] = a[0]
a = sparse.csr_matrix(a)
with self.assertRaises(ValueError):
cusparse.csrilu02(a, level_info=self.level_info)
@testing.parameterize(*testing.product({
'shape': [(3, 4), (4, 4), (4, 3)],
'density': [0.0, 0.5, 1.0],
'format': ['csr', 'csc', 'coo']
}))
@testing.with_requires('scipy')
class TestSparseMatrixConversion(unittest.TestCase):
@testing.for_dtypes('fdFD')
def test_denseToSparse(self, dtype):
if not cusparse.check_availability('denseToSparse'):
pytest.skip('denseToSparse is not available')
x = cupy.random.uniform(0, 1, self.shape).astype(dtype)
x[x < self.density] = 0
y = cusparse.denseToSparse(x, format=self.format)
assert y.format == self.format
testing.assert_array_equal(x, y.todense())
@testing.for_dtypes('fdFD')
def test_sparseToDense(self, dtype):
if not cusparse.check_availability('sparseToDense'):
pytest.skip('sparseToDense is not available')
m, n = self.shape
x = scipy.sparse.random(m, n, density=self.density, format=self.format,
dtype=dtype)
if self.format == 'csr':
x = sparse.csr_matrix(x)
elif self.format == 'csc':
x = sparse.csc_matrix(x)
elif self.format == 'coo':
x = sparse.coo_matrix(x)
y = cusparse.sparseToDense(x)
testing.assert_array_equal(x.todense(), y)
| 35.342625
| 79
| 0.603319
| 4,196
| 30,430
| 4.271687
| 0.055291
| 0.027896
| 0.048538
| 0.053002
| 0.827159
| 0.786153
| 0.755021
| 0.7158
| 0.674794
| 0.659897
| 0
| 0.017146
| 0.260204
| 30,430
| 860
| 80
| 35.383721
| 0.779051
| 0.007033
| 0
| 0.642857
| 0
| 0
| 0.052076
| 0
| 0
| 0
| 0
| 0
| 0.10084
| 1
| 0.081232
| false
| 0.001401
| 0.014006
| 0
| 0.161064
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
7dd99c9710524ffbea5b4c2d672ae245e2a8d295
| 139
|
py
|
Python
|
kkutil/type_util/str_util.py
|
kaka19ace/kkutils
|
1ac449488d85ba2c6b18c5dc9cf77a0bc36579b1
|
[
"MIT"
] | 1
|
2015-12-13T18:42:52.000Z
|
2015-12-13T18:42:52.000Z
|
kkutil/type_util/str_util.py
|
kaka19ace/kkutil
|
1ac449488d85ba2c6b18c5dc9cf77a0bc36579b1
|
[
"MIT"
] | null | null | null |
kkutil/type_util/str_util.py
|
kaka19ace/kkutil
|
1ac449488d85ba2c6b18c5dc9cf77a0bc36579b1
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
def is_whitespaces_str(s):
return True if len(s.strip(" \t\n\r\f\v")) == 0 else False
| 19.857143
| 62
| 0.597122
| 26
| 139
| 3.115385
| 0.961538
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.017391
| 0.172662
| 139
| 7
| 62
| 19.857143
| 0.686957
| 0.302158
| 0
| 0
| 0
| 0
| 0.115789
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
7de49f84adccf90e986eb2b6f01530707e24d499
| 408
|
py
|
Python
|
mythx_models/request/project_status.py
|
ConsenSys/mythx-models
|
e912c2fc6e7d18041310d3b9f0f95085db47ed9b
|
[
"MIT"
] | 2
|
2019-08-26T13:42:28.000Z
|
2019-11-13T15:44:16.000Z
|
mythx_models/request/project_status.py
|
ConsenSys/mythx-models
|
e912c2fc6e7d18041310d3b9f0f95085db47ed9b
|
[
"MIT"
] | 22
|
2019-08-26T13:14:55.000Z
|
2021-04-18T14:22:52.000Z
|
mythx_models/request/project_status.py
|
ConsenSys/mythx-models
|
e912c2fc6e7d18041310d3b9f0f95085db47ed9b
|
[
"MIT"
] | 6
|
2019-08-29T15:51:38.000Z
|
2021-04-05T11:41:34.000Z
|
from pydantic import BaseModel
class ProjectStatusRequest(BaseModel):
project_id: str
@property
def endpoint(self):
return f"v1/projects/{self.project_id}"
@property
def method(self):
return "GET"
@property
def payload(self):
return {}
@property
def headers(self):
return {}
@property
def parameters(self):
return {}
| 15.692308
| 47
| 0.60049
| 42
| 408
| 5.785714
| 0.52381
| 0.226337
| 0.148148
| 0.17284
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.003521
| 0.303922
| 408
| 25
| 48
| 16.32
| 0.852113
| 0
| 0
| 0.444444
| 0
| 0
| 0.078431
| 0.071078
| 0
| 0
| 0
| 0
| 0
| 1
| 0.277778
| false
| 0
| 0.055556
| 0.277778
| 0.722222
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
81626ebc523afb0144bc98e3786b83cfd756165e
| 198
|
py
|
Python
|
tmp.py
|
adamamiller/NU_git_intro
|
dd775beedca82c7d24db23d143992cac678fea9b
|
[
"MIT"
] | null | null | null |
tmp.py
|
adamamiller/NU_git_intro
|
dd775beedca82c7d24db23d143992cac678fea9b
|
[
"MIT"
] | null | null | null |
tmp.py
|
adamamiller/NU_git_intro
|
dd775beedca82c7d24db23d143992cac678fea9b
|
[
"MIT"
] | null | null | null |
import numpy as np
print("Did you know 2 + 2 = {}".format(2+2))
print("Of course I knew that, I have 4 fingers")
print("Well, I knew you had 4 fingers. I didn't know that you knew how to count!")
| 28.285714
| 82
| 0.676768
| 41
| 198
| 3.268293
| 0.609756
| 0.029851
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.037736
| 0.19697
| 198
| 7
| 82
| 28.285714
| 0.805031
| 0
| 0
| 0
| 0
| 0.25
| 0.678392
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.25
| 0
| 0.25
| 0.75
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
817548d2cfd30d338110cab215d59d6c64b56a62
| 159
|
py
|
Python
|
uranus_middleware/endpoints/__init__.py
|
cigui/uranus-middleware
|
5e77cea7fd6a3b0e77e8a095820546ca5cf224eb
|
[
"MIT"
] | null | null | null |
uranus_middleware/endpoints/__init__.py
|
cigui/uranus-middleware
|
5e77cea7fd6a3b0e77e8a095820546ca5cf224eb
|
[
"MIT"
] | null | null | null |
uranus_middleware/endpoints/__init__.py
|
cigui/uranus-middleware
|
5e77cea7fd6a3b0e77e8a095820546ca5cf224eb
|
[
"MIT"
] | null | null | null |
from flask_socketio import SocketIO
socketio = SocketIO()
def init_app(app):
# from . import test
socketio.init_app(app, cors_allowed_origins='*')
| 15.9
| 52
| 0.72327
| 21
| 159
| 5.238095
| 0.52381
| 0.290909
| 0.181818
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.176101
| 159
| 9
| 53
| 17.666667
| 0.839695
| 0.113208
| 0
| 0
| 0
| 0
| 0.007194
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.25
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
81d6310965bc6aed90479884f156f01c70591fd8
| 3,061
|
py
|
Python
|
jdaviz/tests/test_subsets.py
|
orifox/jdaviz
|
49979137e54a3b8b3d9da996aff4d0575c60e998
|
[
"MIT",
"BSD-3-Clause"
] | null | null | null |
jdaviz/tests/test_subsets.py
|
orifox/jdaviz
|
49979137e54a3b8b3d9da996aff4d0575c60e998
|
[
"MIT",
"BSD-3-Clause"
] | 6
|
2021-03-05T14:27:53.000Z
|
2021-09-30T14:02:54.000Z
|
jdaviz/tests/test_subsets.py
|
javerbukh/jdaviz
|
552ab90ab6ec3c44d53325670324c773aab20e54
|
[
"BSD-3-Clause"
] | null | null | null |
import numpy as np
import pytest
from glue.core import Data
from glue.core.roi import RectangularROI, XRangeROI
from glue.core.subset import RoiSubsetState
from numpy.testing import assert_allclose
from regions import RectanglePixelRegion
from jdaviz.app import Application
@pytest.fixture
def jdaviz_app():
return Application(configuration='cubeviz')
def test_region_from_subset_2d(jdaviz_app):
data = Data(flux=np.ones((128, 128)), label='Test 2D Flux')
jdaviz_app.data_collection.append(data)
subset_state = RoiSubsetState(data.pixel_component_ids[1],
data.pixel_component_ids[0],
RectangularROI(1, 3.5, -0.2, 3.3))
jdaviz_app.add_data_to_viewer('flux-viewer', 'Test 2D Flux')
jdaviz_app.data_collection.new_subset_group(
subset_state=subset_state, label='rectangular')
subsets = jdaviz_app.get_subsets_from_viewer('flux-viewer')
reg = subsets.get('rectangular')
assert len(subsets) == 1
assert isinstance(reg, RectanglePixelRegion)
assert_allclose(reg.center.x, 2.25)
assert_allclose(reg.center.x, 2.25)
assert_allclose(reg.center.y, 1.55)
assert_allclose(reg.width, 2.5)
assert_allclose(reg.height, 3.5)
def test_region_from_subset_3d(jdaviz_app):
data = Data(flux=np.ones((256, 128, 128)), label='Test 3D Flux')
jdaviz_app.data_collection.append(data)
subset_state = RoiSubsetState(data.pixel_component_ids[1],
data.pixel_component_ids[0],
RectangularROI(1, 3.5, -0.2, 3.3))
jdaviz_app.add_data_to_viewer('flux-viewer', 'Test 3D Flux')
jdaviz_app.data_collection.new_subset_group(
subset_state=subset_state, label='rectangular')
subsets = jdaviz_app.get_subsets_from_viewer('flux-viewer')
reg = subsets.get('rectangular')
assert len(subsets) == 1
assert isinstance(reg, RectanglePixelRegion)
assert_allclose(reg.center.x, 2.25)
assert_allclose(reg.center.x, 2.25)
assert_allclose(reg.center.y, 1.55)
assert_allclose(reg.width, 2.5)
assert_allclose(reg.height, 3.5)
def test_region_from_subset_profile(jdaviz_app, spectral_cube_wcs):
data = Data(flux=np.ones((256, 128, 128)), label='Test 1D Flux', coords=spectral_cube_wcs)
jdaviz_app.data_collection.append(data)
subset_state = RoiSubsetState(data.pixel_component_ids[1],
data.pixel_component_ids[0],
XRangeROI(1, 3.5))
jdaviz_app.add_data_to_viewer('spectrum-viewer', 'Test 1D Flux')
jdaviz_app.data_collection.new_subset_group(
subset_state=subset_state, label='rectangular')
subsets = jdaviz_app.get_subsets_from_viewer('spectrum-viewer')
reg = subsets.get('rectangular')
assert len(subsets) == 1
assert isinstance(reg, RectanglePixelRegion)
assert_allclose(reg.center.x, 2.25)
assert_allclose(reg.center.y, 128)
assert_allclose(reg.width, 2.5)
assert_allclose(reg.height, 256)
| 33.271739
| 94
| 0.694871
| 416
| 3,061
| 4.877404
| 0.173077
| 0.075407
| 0.117299
| 0.090685
| 0.795466
| 0.78413
| 0.772302
| 0.748645
| 0.748645
| 0.748645
| 0
| 0.038649
| 0.196994
| 3,061
| 91
| 95
| 33.637363
| 0.786819
| 0
| 0
| 0.625
| 0
| 0
| 0.071545
| 0
| 0
| 0
| 0
| 0
| 0.328125
| 1
| 0.0625
| false
| 0
| 0.125
| 0.015625
| 0.203125
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
c492af4efed7ccf3e3d347cf3679626646b2041d
| 239
|
py
|
Python
|
tools/PerformanceProfiler.pack/performance_profiler/command/__init__.py
|
usualoma/mt-plugin-PerformanceProfiler
|
3c37e5033f296057885e9ce2e86edb199877f3fc
|
[
"MIT"
] | 1
|
2021-03-28T02:05:53.000Z
|
2021-03-28T02:05:53.000Z
|
tools/PerformanceProfiler.pack/performance_profiler/command/__init__.py
|
usualoma/mt-plugin-PerformanceProfiler
|
3c37e5033f296057885e9ce2e86edb199877f3fc
|
[
"MIT"
] | 1
|
2021-04-15T11:47:09.000Z
|
2021-04-15T11:47:09.000Z
|
tools/PerformanceProfiler.pack/performance_profiler/command/__init__.py
|
usualoma/mt-plugin-PerformanceProfiler
|
3c37e5033f296057885e9ce2e86edb199877f3fc
|
[
"MIT"
] | null | null | null |
from .dump_command import DumpCommand
from .load_command import LoadCommand
from .prepare_command import PrepareCommand
from .tidyup_command import TidyupCommand
__all__ = ["DumpCommand", "LoadCommand", "PrepareCommand", "TidyupCommand"]
| 34.142857
| 75
| 0.828452
| 25
| 239
| 7.6
| 0.48
| 0.273684
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.096234
| 239
| 6
| 76
| 39.833333
| 0.87963
| 0
| 0
| 0
| 0
| 0
| 0.205021
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.8
| 0
| 0.8
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
c4bcf598f0235822f75a35f7588b877731e03503
| 113
|
py
|
Python
|
Easy/Hamming_distance/hamming_distance.py
|
nitin3685/LeetCode_Solutions
|
ab920e96cd27e0b2c3c895ce20853edceef0cce8
|
[
"MIT"
] | null | null | null |
Easy/Hamming_distance/hamming_distance.py
|
nitin3685/LeetCode_Solutions
|
ab920e96cd27e0b2c3c895ce20853edceef0cce8
|
[
"MIT"
] | null | null | null |
Easy/Hamming_distance/hamming_distance.py
|
nitin3685/LeetCode_Solutions
|
ab920e96cd27e0b2c3c895ce20853edceef0cce8
|
[
"MIT"
] | null | null | null |
class Solution:
def hammingDistance(self, x: int, y: int) -> int:
return (list(bin(x^y))).count('1')
| 28.25
| 53
| 0.59292
| 17
| 113
| 3.941176
| 0.764706
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.011236
| 0.212389
| 113
| 3
| 54
| 37.666667
| 0.741573
| 0
| 0
| 0
| 0
| 0
| 0.00885
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
c4bf72901dbdd848e02683e8f28808e1b1e8af5b
| 29,695
|
py
|
Python
|
venv/lib/python3.9/site-packages/NetworkExtension/_metadata.py
|
naveenthammu/edith
|
80c91f7d4a8f5617fbc9d3586932c4b9264faebf
|
[
"MIT"
] | 1
|
2020-12-21T13:05:08.000Z
|
2020-12-21T13:05:08.000Z
|
venv/lib/python3.9/site-packages/NetworkExtension/_metadata.py
|
naveenthammu/edith
|
80c91f7d4a8f5617fbc9d3586932c4b9264faebf
|
[
"MIT"
] | null | null | null |
venv/lib/python3.9/site-packages/NetworkExtension/_metadata.py
|
naveenthammu/edith
|
80c91f7d4a8f5617fbc9d3586932c4b9264faebf
|
[
"MIT"
] | null | null | null |
# This file is generated by objective.metadata
#
# Last update: Thu Nov 12 22:02:13 2020
#
# flake8: noqa
import objc, sys
if sys.maxsize > 2 ** 32:
def sel32or64(a, b): return b
else:
def sel32or64(a, b): return a
misc = {
}
constants = '''$NEAppProxyErrorDomain$NEAppPushErrorDomain$NEDNSProxyConfigurationDidChangeNotification$NEDNSProxyErrorDomain$NEDNSSettingsConfigurationDidChangeNotification$NEDNSSettingsErrorDomain$NEFilterConfigurationDidChangeNotification$NEFilterErrorDomain$NEFilterProviderRemediationMapRemediationButtonTexts$NEFilterProviderRemediationMapRemediationURLs$NEHotspotConfigurationErrorDomain$NETunnelProviderErrorDomain$NEVPNConfigurationChangeNotification$NEVPNConnectionStartOptionPassword$NEVPNConnectionStartOptionUsername$NEVPNErrorDomain$NEVPNStatusDidChangeNotification$kNEHotspotHelperOptionDisplayName$'''
enums = '''$NEAppProxyFlowErrorAborted@5$NEAppProxyFlowErrorDatagramTooLarge@9$NEAppProxyFlowErrorHostUnreachable@3$NEAppProxyFlowErrorInternal@8$NEAppProxyFlowErrorInvalidArgument@4$NEAppProxyFlowErrorNotConnected@1$NEAppProxyFlowErrorPeerReset@2$NEAppProxyFlowErrorReadAlreadyPending@10$NEAppProxyFlowErrorRefused@6$NEAppProxyFlowErrorTimedOut@7$NEAppPushManagerErrorConfigurationInvalid@1$NEAppPushManagerErrorConfigurationNotLoaded@2$NEAppPushManagerErrorInactiveSession@4$NEAppPushManagerErrorInternalError@3$NEDNSProtocolCleartext@1$NEDNSProtocolHTTPS@3$NEDNSProtocolTLS@2$NEDNSProxyManagerErrorConfigurationCannotBeRemoved@4$NEDNSProxyManagerErrorConfigurationDisabled@2$NEDNSProxyManagerErrorConfigurationInvalid@1$NEDNSProxyManagerErrorConfigurationStale@3$NEDNSSettingsManagerErrorConfigurationCannotBeRemoved@4$NEDNSSettingsManagerErrorConfigurationDisabled@2$NEDNSSettingsManagerErrorConfigurationInvalid@1$NEDNSSettingsManagerErrorConfigurationStale@3$NEEvaluateConnectionRuleActionConnectIfNeeded@1$NEEvaluateConnectionRuleActionNeverConnect@2$NEFilterActionAllow@1$NEFilterActionDrop@2$NEFilterActionFilterData@4$NEFilterActionInvalid@0$NEFilterActionRemediate@3$NEFilterDataAttributeHasIPHeader@1$NEFilterFlowBytesMax@18446744073709551615$NEFilterManagerErrorConfigurationCannotBeRemoved@4$NEFilterManagerErrorConfigurationDisabled@2$NEFilterManagerErrorConfigurationInternalError@6$NEFilterManagerErrorConfigurationInvalid@1$NEFilterManagerErrorConfigurationPermissionDenied@5$NEFilterManagerErrorConfigurationStale@3$NEFilterManagerGradeFirewall@1$NEFilterManagerGradeInspector@2$NEFilterPacketProviderVerdictAllow@0$NEFilterPacketProviderVerdictDelay@2$NEFilterPacketProviderVerdictDrop@1$NEFilterReportEventDataDecision@2$NEFilterReportEventFlowClosed@3$NEFilterReportEventNewFlow@1$NEFilterReportEventStatistics@4$NEFilterReportFrequencyHigh@3$NEFilterReportFrequencyLow@1$NEFilterReportFrequencyMedium@2$NEFilterReportFrequencyNone@0$NEHotspotConfigurationEAPTLSVersion_1_0@0$NEHotspotConfigurationEAPTLSVersion_1_1@1$NEHotspotConfigurationEAPTLSVersion_1_2@2$NEHotspotConfigurationEAPTTLSInnerAuthenticationCHAP@1$NEHotspotConfigurationEAPTTLSInnerAuthenticationEAP@4$NEHotspotConfigurationEAPTTLSInnerAuthenticationMSCHAP@2$NEHotspotConfigurationEAPTTLSInnerAuthenticationMSCHAPv2@3$NEHotspotConfigurationEAPTTLSInnerAuthenticationPAP@0$NEHotspotConfigurationEAPTypeEAPFAST@43$NEHotspotConfigurationEAPTypeEAPPEAP@25$NEHotspotConfigurationEAPTypeEAPTLS@13$NEHotspotConfigurationEAPTypeEAPTTLS@21$NEHotspotConfigurationErrorAlreadyAssociated@13$NEHotspotConfigurationErrorApplicationIsNotInForeground@14$NEHotspotConfigurationErrorInternal@8$NEHotspotConfigurationErrorInvalid@0$NEHotspotConfigurationErrorInvalidEAPSettings@4$NEHotspotConfigurationErrorInvalidHS20DomainName@6$NEHotspotConfigurationErrorInvalidHS20Settings@5$NEHotspotConfigurationErrorInvalidSSID@1$NEHotspotConfigurationErrorInvalidSSIDPrefix@15$NEHotspotConfigurationErrorInvalidWEPPassphrase@3$NEHotspotConfigurationErrorInvalidWPAPassphrase@2$NEHotspotConfigurationErrorJoinOnceNotSupported@12$NEHotspotConfigurationErrorPending@9$NEHotspotConfigurationErrorSystemConfiguration@10$NEHotspotConfigurationErrorUnknown@11$NEHotspotConfigurationErrorUserDenied@7$NENetworkRuleProtocolAny@0$NENetworkRuleProtocolTCP@1$NENetworkRuleProtocolUDP@2$NEOnDemandRuleActionConnect@1$NEOnDemandRuleActionDisconnect@2$NEOnDemandRuleActionEvaluateConnection@3$NEOnDemandRuleActionIgnore@4$NEOnDemandRuleInterfaceTypeAny@0$NEOnDemandRuleInterfaceTypeCellular@3$NEOnDemandRuleInterfaceTypeEthernet@1$NEOnDemandRuleInterfaceTypeWiFi@2$NEProviderStopReasonAppUpdate@16$NEProviderStopReasonAuthenticationCanceled@6$NEProviderStopReasonConfigurationDisabled@9$NEProviderStopReasonConfigurationFailed@7$NEProviderStopReasonConfigurationRemoved@10$NEProviderStopReasonConnectionFailed@14$NEProviderStopReasonIdleTimeout@8$NEProviderStopReasonNoNetworkAvailable@3$NEProviderStopReasonNone@0$NEProviderStopReasonProviderDisabled@5$NEProviderStopReasonProviderFailed@2$NEProviderStopReasonSleep@15$NEProviderStopReasonSuperceded@11$NEProviderStopReasonUnrecoverableNetworkChange@4$NEProviderStopReasonUserInitiated@1$NEProviderStopReasonUserLogout@12$NEProviderStopReasonUserSwitch@13$NETrafficDirectionAny@0$NETrafficDirectionInbound@1$NETrafficDirectionOutbound@2$NETunnelProviderErrorNetworkSettingsCanceled@2$NETunnelProviderErrorNetworkSettingsFailed@3$NETunnelProviderErrorNetworkSettingsInvalid@1$NETunnelProviderRoutingMethodDestinationIP@1$NETunnelProviderRoutingMethodNetworkRule@3$NETunnelProviderRoutingMethodSourceApplication@2$NEVPNErrorConfigurationDisabled@2$NEVPNErrorConfigurationInvalid@1$NEVPNErrorConfigurationReadWriteFailed@5$NEVPNErrorConfigurationStale@4$NEVPNErrorConfigurationUnknown@6$NEVPNErrorConnectionFailed@3$NEVPNIKEAuthenticationMethodCertificate@1$NEVPNIKEAuthenticationMethodNone@0$NEVPNIKEAuthenticationMethodSharedSecret@2$NEVPNIKEv2CertificateTypeECDSA256@2$NEVPNIKEv2CertificateTypeECDSA384@3$NEVPNIKEv2CertificateTypeECDSA521@4$NEVPNIKEv2CertificateTypeEd25519@5$NEVPNIKEv2CertificateTypeRSA@1$NEVPNIKEv2DeadPeerDetectionRateHigh@3$NEVPNIKEv2DeadPeerDetectionRateLow@1$NEVPNIKEv2DeadPeerDetectionRateMedium@2$NEVPNIKEv2DeadPeerDetectionRateNone@0$NEVPNIKEv2DiffieHellmanGroup0@0$NEVPNIKEv2DiffieHellmanGroup1@1$NEVPNIKEv2DiffieHellmanGroup14@14$NEVPNIKEv2DiffieHellmanGroup15@15$NEVPNIKEv2DiffieHellmanGroup16@16$NEVPNIKEv2DiffieHellmanGroup17@17$NEVPNIKEv2DiffieHellmanGroup18@18$NEVPNIKEv2DiffieHellmanGroup19@19$NEVPNIKEv2DiffieHellmanGroup2@2$NEVPNIKEv2DiffieHellmanGroup20@20$NEVPNIKEv2DiffieHellmanGroup21@21$NEVPNIKEv2DiffieHellmanGroup31@31$NEVPNIKEv2DiffieHellmanGroup5@5$NEVPNIKEv2DiffieHellmanGroupInvalid@0$NEVPNIKEv2EncryptionAlgorithm3DES@2$NEVPNIKEv2EncryptionAlgorithmAES128@3$NEVPNIKEv2EncryptionAlgorithmAES128GCM@5$NEVPNIKEv2EncryptionAlgorithmAES256@4$NEVPNIKEv2EncryptionAlgorithmAES256GCM@6$NEVPNIKEv2EncryptionAlgorithmChaCha20Poly1305@7$NEVPNIKEv2EncryptionAlgorithmDES@1$NEVPNIKEv2IntegrityAlgorithmSHA160@2$NEVPNIKEv2IntegrityAlgorithmSHA256@3$NEVPNIKEv2IntegrityAlgorithmSHA384@4$NEVPNIKEv2IntegrityAlgorithmSHA512@5$NEVPNIKEv2IntegrityAlgorithmSHA96@1$NEVPNIKEv2TLSVersion1_0@1$NEVPNIKEv2TLSVersion1_1@2$NEVPNIKEv2TLSVersion1_2@3$NEVPNIKEv2TLSVersionDefault@0$NEVPNStatusConnected@3$NEVPNStatusConnecting@2$NEVPNStatusDisconnected@1$NEVPNStatusDisconnecting@5$NEVPNStatusInvalid@0$NEVPNStatusReasserting@4$NWPathStatusInvalid@0$NWPathStatusSatisfiable@3$NWPathStatusSatisfied@1$NWPathStatusUnsatisfied@2$NWTCPConnectionStateCancelled@5$NWTCPConnectionStateConnected@3$NWTCPConnectionStateConnecting@1$NWTCPConnectionStateDisconnected@4$NWTCPConnectionStateInvalid@0$NWTCPConnectionStateWaiting@2$NWUDPSessionStateCancelled@5$NWUDPSessionStateFailed@4$NWUDPSessionStateInvalid@0$NWUDPSessionStatePreparing@2$NWUDPSessionStateReady@3$NWUDPSessionStateWaiting@1$kNEHotspotHelperCommandTypeAuthenticate@3$kNEHotspotHelperCommandTypeEvaluate@2$kNEHotspotHelperCommandTypeFilterScanList@1$kNEHotspotHelperCommandTypeLogoff@6$kNEHotspotHelperCommandTypeMaintain@5$kNEHotspotHelperCommandTypeNone@0$kNEHotspotHelperCommandTypePresentUI@4$kNEHotspotHelperConfidenceHigh@2$kNEHotspotHelperConfidenceLow@1$kNEHotspotHelperConfidenceNone@0$kNEHotspotHelperResultAuthenticationRequired@4$kNEHotspotHelperResultCommandNotRecognized@3$kNEHotspotHelperResultFailure@1$kNEHotspotHelperResultSuccess@0$kNEHotspotHelperResultTemporaryFailure@6$kNEHotspotHelperResultUIRequired@2$kNEHotspotHelperResultUnsupportedNetwork@5$'''
misc.update({'NEFilterProviderRemediationURLFlowURL': 'NE_FLOW_URL', 'NEFilterProviderRemediationURLFlowURLHostname': 'NE_FLOW_HOSTNAME', 'NEFilterProviderRemediationURLUsername': 'NE_USERNAME', 'NEFilterProviderRemediationURLOrganization': 'NE_ORGANIZATION'})
aliases = {'NEFilterFlowBytesMax': 'UINT64_MAX'}
r = objc.registerMetaDataForSelector
objc._updatingMetadata(True)
try:
r(b'NEAppProxyFlow', b'openWithLocalEndpoint:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}})
r(b'NEAppProxyProvider', b'handleNewFlow:', {'retval': {'type': 'Z'}})
r(b'NEAppProxyProvider', b'handleNewUDPFlow:initialRemoteEndpoint:', {'retval': {'type': b'Z'}})
r(b'NEAppProxyProvider', b'startProxyWithOptions:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}})
r(b'NEAppProxyProvider', b'stopProxyWithReason:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}, 'type': '@?'}}})
r(b'NEAppProxyProviderManager', b'loadAllFromPreferencesWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}, 'type': '@?'}}})
r(b'NEAppProxyTCPFlow', b'readDataWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}, 'type': '@?'}}})
r(b'NEAppProxyTCPFlow', b'writeData:withCompletionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}})
r(b'NEAppProxyUDPFlow', b'readDatagramsWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}, 3: {'type': b'@'}}}, 'type': '@?'}}})
r(b'NEAppProxyUDPFlow', b'writeDatagrams:sentByEndpoints:completionHandler:', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}})
r(b'NEAppPushManager', b'isActive', {'retval': {'type': b'Z'}})
r(b'NEAppPushManager', b'isEnabled', {'retval': {'type': b'Z'}})
r(b'NEAppPushManager', b'loadAllFromPreferencesWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}})
r(b'NEAppPushManager', b'loadFromPreferencesWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}})
r(b'NEAppPushManager', b'removeFromPreferencesWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}})
r(b'NEAppPushManager', b'saveToPreferencesWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}})
r(b'NEAppPushManager', b'setEnabled:', {'arguments': {2: {'type': b'Z'}}})
r(b'NEAppPushProvider', b'startWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}})
r(b'NEAppPushProvider', b'stopWithReason:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}})
r(b'NEDNSProxyManager', b'isEnabled', {'retval': {'type': b'Z'}})
r(b'NEDNSProxyManager', b'loadFromPreferencesWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}})
r(b'NEDNSProxyManager', b'removeFromPreferencesWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}})
r(b'NEDNSProxyManager', b'saveToPreferencesWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}})
r(b'NEDNSProxyManager', b'setEnabled:', {'arguments': {2: {'type': b'Z'}}})
r(b'NEDNSProxyProvider', b'handleNewFlow:', {'retval': {'type': b'Z'}})
r(b'NEDNSProxyProvider', b'handleNewUDPFlow:initialRemoteEndpoint:', {'retval': {'type': b'Z'}})
r(b'NEDNSProxyProvider', b'startProxyWithOptions:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}})
r(b'NEDNSProxyProvider', b'stopProxyWithReason:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}})
r(b'NEDNSSettings', b'matchDomainsNoSearch', {'retval': {'type': 'Z'}})
r(b'NEDNSSettings', b'setMatchDomainsNoSearch:', {'arguments': {2: {'type': 'Z'}}})
r(b'NEDNSSettingsManager', b'isEnabled', {'retval': {'type': b'Z'}})
r(b'NEDNSSettingsManager', b'loadFromPreferencesWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}})
r(b'NEDNSSettingsManager', b'removeFromPreferencesWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}})
r(b'NEDNSSettingsManager', b'saveToPreferencesWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}})
r(b'NEFilterControlProvider', b'handleNewFlow:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}})
r(b'NEFilterControlProvider', b'handleRemediationForFlow:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}})
r(b'NEFilterControlVerdict', b'allowVerdictWithUpdateRules:', {'arguments': {2: {'type': b'Z'}}})
r(b'NEFilterControlVerdict', b'dropVerdictWithUpdateRules:', {'arguments': {2: {'type': b'Z'}}})
r(b'NEFilterDataProvider', b'applySettings:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}})
r(b'NEFilterManager', b'isEnabled', {'retval': {'type': 'Z'}})
r(b'NEFilterManager', b'loadFromPreferencesWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}})
r(b'NEFilterManager', b'removeFromPreferencesWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}})
r(b'NEFilterManager', b'saveToPreferencesWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}})
r(b'NEFilterManager', b'setEnabled:', {'arguments': {2: {'type': 'Z'}}})
r(b'NEFilterNewFlowVerdict', b'filterDataVerdictWithFilterInbound:peekInboundBytes:filterOutbound:peekOutboundBytes:', {'arguments': {2: {'type': b'Z'}, 4: {'type': b'Z'}}})
r(b'NEFilterPacketProvider', b'packetHandler', {'retval': {'callable': {'retval': {'type': b'q'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}, 3: {'type': b'q'}, 4: {'type': b'n^v'}, 5: {'type': b'l'}}}}})
r(b'NEFilterPacketProvider', b'setPacketHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'q'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}, 3: {'type': b'q'}, 4: {'type': b'n^v'}, 5: {'type': b'l'}}}}}})
r(b'NEFilterProvider', b'startFilterWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}})
r(b'NEFilterProvider', b'stopFilterWithReason:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}})
r(b'NEFilterProviderConfiguration', b'filterBrowsers', {'retval': {'type': 'Z'}})
r(b'NEFilterProviderConfiguration', b'filterPackets', {'retval': {'type': b'Z'}})
r(b'NEFilterProviderConfiguration', b'filterSockets', {'retval': {'type': 'Z'}})
r(b'NEFilterProviderConfiguration', b'setFilterBrowsers:', {'arguments': {2: {'type': 'Z'}}})
r(b'NEFilterProviderConfiguration', b'setFilterPackets:', {'arguments': {2: {'type': b'Z'}}})
r(b'NEFilterProviderConfiguration', b'setFilterSockets:', {'arguments': {2: {'type': 'Z'}}})
r(b'NEFilterVerdict', b'setShouldReport:', {'arguments': {2: {'type': b'Z'}}})
r(b'NEFilterVerdict', b'shouldReport', {'retval': {'type': b'Z'}})
r(b'NEHotspotConfiguration', b'hidden', {'retval': {'type': b'Z'}})
r(b'NEHotspotConfiguration', b'initWithSSID:passphrase:isWEP:', {'arguments': {4: {'type': b'Z'}}})
r(b'NEHotspotConfiguration', b'initWithSSIDPrefix:passphrase:isWEP:', {'arguments': {4: {'type': b'Z'}}})
r(b'NEHotspotConfiguration', b'joinOnce', {'retval': {'type': b'Z'}})
r(b'NEHotspotConfiguration', b'setHidden:', {'arguments': {2: {'type': b'Z'}}})
r(b'NEHotspotConfiguration', b'setJoinOnce:', {'arguments': {2: {'type': b'Z'}}})
r(b'NEHotspotConfigurationManager', b'applyConfiguration:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}})
r(b'NEHotspotConfigurationManager', b'getConfiguredSSIDsWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}})
r(b'NEHotspotEAPSettings', b'isTLSClientCertificateRequired', {'retval': {'type': b'Z'}})
r(b'NEHotspotEAPSettings', b'setIdentity:', {'retval': {'type': b'Z'}})
r(b'NEHotspotEAPSettings', b'setTlsClientCertificateRequired:', {'arguments': {2: {'type': b'Z'}}})
r(b'NEHotspotEAPSettings', b'setTrustedServerCertificates:', {'retval': {'type': b'Z'}})
r(b'NEHotspotHS20Settings', b'initWithDomainName:roamingEnabled:', {'arguments': {3: {'type': b'Z'}}})
r(b'NEHotspotHS20Settings', b'isRoamingEnabled', {'retval': {'type': b'Z'}})
r(b'NEHotspotHS20Settings', b'setRoamingEnabled:', {'arguments': {2: {'type': b'Z'}}})
r(b'NEHotspotHelper', b'logoff:', {'retval': {'type': b'Z'}})
r(b'NEHotspotHelper', b'registerWithOptions:queue:handler:', {'retval': {'type': b'Z'}, 'arguments': {4: {'callable': {'retval': {'type': b'@?'}, 'arguments': {0: {'type': b'^v'}}}}}})
r(b'NEHotspotNetwork', b'didAutoJoin', {'retval': {'type': b'Z'}})
r(b'NEHotspotNetwork', b'didJustJoin', {'retval': {'type': b'Z'}})
r(b'NEHotspotNetwork', b'fetchCurrentWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}})
r(b'NEHotspotNetwork', b'isChosenHelper', {'retval': {'type': b'Z'}})
r(b'NEHotspotNetwork', b'isSecure', {'retval': {'type': b'Z'}})
r(b'NEPacketTunnelFlow', b'readPacketObjectsWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}})
r(b'NEPacketTunnelFlow', b'readPacketsWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}, 'type': '@?'}}})
r(b'NEPacketTunnelFlow', b'writePacketObjects:', {'retval': {'type': 'Z'}})
r(b'NEPacketTunnelFlow', b'writePackets:withProtocols:', {'retval': {'type': 'Z'}})
r(b'NEPacketTunnelProvider', b'createTCPConnectionThroughTunnelToEndpoint:enableTLS:TLSParameters:delegate:', {'arguments': {3: {'type': 'Z'}}})
r(b'NEPacketTunnelProvider', b'startTunnelWithOptions:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}})
r(b'NEPacketTunnelProvider', b'stopTunnelWithReason:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}, 'type': '@?'}}})
r(b'NEProvider', b'createTCPConnectionToEndpoint:enableTLS:TLSParameters:delegate:', {'arguments': {3: {'type': 'Z'}}})
r(b'NEProvider', b'displayMessage:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'Z'}}}}}})
r(b'NEProvider', b'sleepWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}, 'type': '@?'}}})
r(b'NEProxyServer', b'authenticationRequired', {'retval': {'type': 'Z'}})
r(b'NEProxyServer', b'setAuthenticationRequired:', {'arguments': {2: {'type': 'Z'}}})
r(b'NEProxySettings', b'HTTPEnabled', {'retval': {'type': 'Z'}})
r(b'NEProxySettings', b'HTTPSEnabled', {'retval': {'type': 'Z'}})
r(b'NEProxySettings', b'autoProxyConfigurationEnabled', {'retval': {'type': 'Z'}})
r(b'NEProxySettings', b'excludeSimpleHostnames', {'retval': {'type': 'Z'}})
r(b'NEProxySettings', b'setAutoProxyConfigurationEnabled:', {'arguments': {2: {'type': 'Z'}}})
r(b'NEProxySettings', b'setExcludeSimpleHostnames:', {'arguments': {2: {'type': 'Z'}}})
r(b'NEProxySettings', b'setHTTPEnabled:', {'arguments': {2: {'type': 'Z'}}})
r(b'NEProxySettings', b'setHTTPSEnabled:', {'arguments': {2: {'type': 'Z'}}})
r(b'NETransparentProxyManager', b'loadAllFromPreferencesWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}})
r(b'NETunnelProvider', b'handleAppMessage:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}})
r(b'NETunnelProvider', b'reasserting', {'retval': {'type': 'Z'}})
r(b'NETunnelProvider', b'setReasserting:', {'arguments': {2: {'type': 'Z'}}})
r(b'NETunnelProvider', b'setTunnelNetworkSettings:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}})
r(b'NETunnelProviderManager', b'loadAllFromPreferencesWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}, 'type': '@?'}}})
r(b'NETunnelProviderSession', b'sendProviderMessage:returnError:responseHandler:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type_modifier': b'o'}, 4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}})
r(b'NETunnelProviderSession', b'startTunnelWithOptions:andReturnError:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type_modifier': b'o'}}})
r(b'NEVPNConnection', b'startVPNTunnelAndReturnError:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type_modifier': b'o'}}})
r(b'NEVPNConnection', b'startVPNTunnelWithOptions:andReturnError:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type_modifier': b'o'}}})
r(b'NEVPNManager', b'isEnabled', {'retval': {'type': 'Z'}})
r(b'NEVPNManager', b'isOnDemandEnabled', {'retval': {'type': 'Z'}})
r(b'NEVPNManager', b'loadFromPreferencesWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}})
r(b'NEVPNManager', b'protocol', {'deprecated': 1011})
r(b'NEVPNManager', b'removeFromPreferencesWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}})
r(b'NEVPNManager', b'saveToPreferencesWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}})
r(b'NEVPNManager', b'setEnabled:', {'arguments': {2: {'type': 'Z'}}})
r(b'NEVPNManager', b'setOnDemandEnabled:', {'arguments': {2: {'type': 'Z'}}})
r(b'NEVPNManager', b'setProtocol:', {'deprecated': 1011})
r(b'NEVPNProtocol', b'disconnectOnSleep', {'retval': {'type': 'Z'}})
r(b'NEVPNProtocol', b'enforceRoutes', {'retval': {'type': b'Z'}})
r(b'NEVPNProtocol', b'excludeLocalNetworks', {'retval': {'type': b'Z'}})
r(b'NEVPNProtocol', b'includeAllNetworks', {'retval': {'type': b'Z'}})
r(b'NEVPNProtocol', b'setDisconnectOnSleep:', {'arguments': {2: {'type': 'Z'}}})
r(b'NEVPNProtocol', b'setEnforceRoutes:', {'arguments': {2: {'type': b'Z'}}})
r(b'NEVPNProtocol', b'setExcludeLocalNetworks:', {'arguments': {2: {'type': b'Z'}}})
r(b'NEVPNProtocol', b'setIncludeAllNetworks:', {'arguments': {2: {'type': b'Z'}}})
r(b'NEVPNProtocolIKEv2', b'disableMOBIKE', {'retval': {'type': 'Z'}})
r(b'NEVPNProtocolIKEv2', b'disableRedirect', {'retval': {'type': 'Z'}})
r(b'NEVPNProtocolIKEv2', b'enableFallback', {'retval': {'type': b'Z'}})
r(b'NEVPNProtocolIKEv2', b'enablePFS', {'retval': {'type': 'Z'}})
r(b'NEVPNProtocolIKEv2', b'enableRevocationCheck', {'retval': {'type': 'Z'}})
r(b'NEVPNProtocolIKEv2', b'setDisableMOBIKE:', {'arguments': {2: {'type': 'Z'}}})
r(b'NEVPNProtocolIKEv2', b'setDisableRedirect:', {'arguments': {2: {'type': 'Z'}}})
r(b'NEVPNProtocolIKEv2', b'setEnableFallback:', {'arguments': {2: {'type': b'Z'}}})
r(b'NEVPNProtocolIKEv2', b'setEnablePFS:', {'arguments': {2: {'type': 'Z'}}})
r(b'NEVPNProtocolIKEv2', b'setEnableRevocationCheck:', {'arguments': {2: {'type': 'Z'}}})
r(b'NEVPNProtocolIKEv2', b'setStrictRevocationCheck:', {'arguments': {2: {'type': 'Z'}}})
r(b'NEVPNProtocolIKEv2', b'setUseConfigurationAttributeInternalIPSubnet:', {'arguments': {2: {'type': 'Z'}}})
r(b'NEVPNProtocolIKEv2', b'strictRevocationCheck', {'retval': {'type': 'Z'}})
r(b'NEVPNProtocolIKEv2', b'useConfigurationAttributeInternalIPSubnet', {'retval': {'type': 'Z'}})
r(b'NEVPNProtocolIPSec', b'setUseExtendedAuthentication:', {'arguments': {2: {'type': 'Z'}}})
r(b'NEVPNProtocolIPSec', b'useExtendedAuthentication', {'retval': {'type': 'Z'}})
r(b'NSObject', b'appPushManager:didReceiveIncomingCallWithUserInfo:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}})
r(b'NSObject', b'evaluateTrustForConnection:peerCertificateChain:completionHandler:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}})
r(b'NSObject', b'provideIdentityForConnection:completionHandler:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}, 'type': '@?'}}})
r(b'NSObject', b'shouldEvaluateTrustForConnection:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}}})
r(b'NSObject', b'shouldProvideIdentityForConnection:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}}})
r(b'NWPath', b'isConstrained', {'retval': {'type': b'Z'}})
r(b'NWPath', b'isEqualToPath:', {'retval': {'type': 'Z'}})
r(b'NWPath', b'isExpensive', {'retval': {'type': 'Z'}})
r(b'NWTCPConnection', b'hasBetterPath', {'retval': {'type': 'Z'}})
r(b'NWTCPConnection', b'isViable', {'retval': {'type': 'Z'}})
r(b'NWTCPConnection', b'readLength:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}})
r(b'NWTCPConnection', b'readMinimumLength:maximumLength:completionHandler:', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}})
r(b'NWTCPConnection', b'write:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}})
r(b'NWUDPSession', b'hasBetterPath', {'retval': {'type': 'Z'}})
r(b'NWUDPSession', b'isViable', {'retval': {'type': 'Z'}})
r(b'NWUDPSession', b'setReadHandler:maxDatagrams:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}})
r(b'NWUDPSession', b'writeDatagram:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}})
r(b'NWUDPSession', b'writeMultipleDatagrams:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}})
r(b'null', b'didAutoJoin', {'retval': {'type': b'Z'}})
r(b'null', b'didJustJoin', {'retval': {'type': b'Z'}})
r(b'null', b'isChosenHelper', {'retval': {'type': b'Z'}})
r(b'null', b'isSecure', {'retval': {'type': b'Z'}})
finally:
objc._updatingMetadata(False)
expressions = {}
# END OF FILE
| 154.661458
| 7,545
| 0.691194
| 2,810
| 29,695
| 7.296441
| 0.181851
| 0.059991
| 0.033946
| 0.053748
| 0.436424
| 0.419695
| 0.394576
| 0.337853
| 0.278203
| 0.257377
| 0
| 0.024421
| 0.078869
| 29,695
| 191
| 7,546
| 155.471204
| 0.725149
| 0.003603
| 0
| 0
| 1
| 0.01105
| 0.676076
| 0.423042
| 0
| 0
| 0
| 0
| 0.016575
| 1
| 0.01105
| false
| 0.022099
| 0.005525
| 0.01105
| 0.016575
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
c4c5cb9be617a6585ef8b182b5b24b9ee0d7549a
| 66
|
py
|
Python
|
tests.py
|
zurgeg/esp32-micropython-installer
|
2367f4570000db7b3adcb7aa50a2e96b391d91a9
|
[
"MIT"
] | null | null | null |
tests.py
|
zurgeg/esp32-micropython-installer
|
2367f4570000db7b3adcb7aa50a2e96b391d91a9
|
[
"MIT"
] | 1
|
2020-03-14T15:29:32.000Z
|
2020-03-14T15:29:32.000Z
|
tests.py
|
zurgeg/esp32-micropython-installer
|
2367f4570000db7b3adcb7aa50a2e96b391d91a9
|
[
"MIT"
] | null | null | null |
import os
os.system('ampy --help')
os.system('esptool.py --help')
| 16.5
| 30
| 0.681818
| 11
| 66
| 4.090909
| 0.636364
| 0.355556
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.090909
| 66
| 3
| 31
| 22
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0.424242
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
c4c75f41f1e1fa1e5dc379633e37c4bcc5442f91
| 319
|
py
|
Python
|
Flask/flaskApp.py
|
golesuman/flask-webpage
|
a057c4241fd536559d6ab61f6213535ea3d7f6fa
|
[
"Apache-2.0"
] | 1
|
2020-11-18T01:20:15.000Z
|
2020-11-18T01:20:15.000Z
|
Flask/flaskApp.py
|
golesuman/flask-webpage
|
a057c4241fd536559d6ab61f6213535ea3d7f6fa
|
[
"Apache-2.0"
] | null | null | null |
Flask/flaskApp.py
|
golesuman/flask-webpage
|
a057c4241fd536559d6ab61f6213535ea3d7f6fa
|
[
"Apache-2.0"
] | null | null | null |
from flask import Flask, render_template
app = Flask(__name__)
@app.route('/')
def hello_world():
return render_template("index.html")
@app.route('/About')
def About():
return render_template('AboutMe.html')
@app.route('/boots')
def bootstrap():
return render_template('boots.html')
app.run()
| 22.785714
| 43
| 0.680251
| 41
| 319
| 5.073171
| 0.463415
| 0.269231
| 0.288462
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.15674
| 319
| 14
| 44
| 22.785714
| 0.773234
| 0
| 0
| 0
| 0
| 0
| 0.14658
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.083333
| 0.25
| 0.583333
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
c4d8ad74ae930e704d9b23a379f37f5079a82ba9
| 402
|
py
|
Python
|
sachima/game.py
|
DessertsLab/Sachima
|
5ddf2c8afd493b593e36703dbb09b000b08eeede
|
[
"MIT"
] | 4
|
2019-01-25T01:44:36.000Z
|
2020-06-28T00:44:43.000Z
|
sachima/game.py
|
DessertsLab/Sachima
|
5ddf2c8afd493b593e36703dbb09b000b08eeede
|
[
"MIT"
] | 154
|
2019-01-28T03:35:34.000Z
|
2022-03-24T03:04:25.000Z
|
sachima/game.py
|
DessertsLab/Sachima
|
5ddf2c8afd493b593e36703dbb09b000b08eeede
|
[
"MIT"
] | 1
|
2019-02-18T06:10:55.000Z
|
2019-02-18T06:10:55.000Z
|
if __name__ == "__main__":
ant_map = """
------a------------------------------------
------------------o----------------------
-----------------o-----------------------
------------------------------------------
------------------------------------------
------------------------------------------
------------------------------------------#
"""
print(ant_map)
| 30.923077
| 47
| 0.099502
| 11
| 402
| 2.727273
| 0.727273
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.141791
| 402
| 12
| 48
| 33.5
| 0.086957
| 0
| 0
| 0.272727
| 0
| 0
| 0.850746
| 0.731343
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.090909
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
f20500b7e50945538710fb4f513f4a15558b1a69
| 16,614
|
py
|
Python
|
docker-app/qfieldcloud/core/tests/test_qfield_file.py
|
stcz/qfieldcloud
|
b128f8689268d73de052e67e0c09c6c0d5abdc05
|
[
"MIT"
] | null | null | null |
docker-app/qfieldcloud/core/tests/test_qfield_file.py
|
stcz/qfieldcloud
|
b128f8689268d73de052e67e0c09c6c0d5abdc05
|
[
"MIT"
] | null | null | null |
docker-app/qfieldcloud/core/tests/test_qfield_file.py
|
stcz/qfieldcloud
|
b128f8689268d73de052e67e0c09c6c0d5abdc05
|
[
"MIT"
] | null | null | null |
import json
import logging
import os
import tempfile
import time
import psycopg2
import requests
from django.http.response import HttpResponse, HttpResponseRedirect
from qfieldcloud.authentication.models import AuthToken
from qfieldcloud.core.geodb_utils import delete_db_and_role
from qfieldcloud.core.models import Geodb, Job, PackageJob, Project, User
from rest_framework import status
from rest_framework.test import APITransactionTestCase
from .utils import testdata_path
logging.disable(logging.CRITICAL)
class QfcTestCase(APITransactionTestCase):
def setUp(self):
# Create a user
self.user1 = User.objects.create_user(username="user1", password="abc123")
self.user2 = User.objects.create_user(username="user2", password="abc123")
self.token1 = AuthToken.objects.get_or_create(user=self.user1)[0]
# Create a project
self.project1 = Project.objects.create(
name="project1", is_public=False, owner=self.user1
)
try:
delete_db_and_role("test", self.user1.username)
except Exception:
pass
self.geodb = Geodb.objects.create(
user=self.user1,
dbname="test",
hostname="geodb",
port=5432,
)
self.conn = psycopg2.connect(
dbname="test",
user=os.environ.get("GEODB_USER"),
password=os.environ.get("GEODB_PASSWORD"),
host="geodb",
port=5432,
)
def tearDown(self):
self.conn.close()
def fail(self, msg: str, job: Job = None):
if job:
msg += f"\n\nOutput:\n================\n{job.output}\n================"
if job.feedback:
if "error_stack" in job.feedback:
msg += "\n\nError:\n================"
for single_error_stack in job.feedback["error_stack"]:
msg += "\n"
msg += single_error_stack
msg += f" {job.feedback['error']}\n================"
feedback = json.dumps(job.feedback, indent=2, sort_keys=True)
msg += f"\n\nFeedback:\n================\n{feedback}\n================"
else:
msg += "\n\nFeedback: None"
super().fail(msg)
def assertHttpOk(self, response: HttpResponse):
try:
self.assertTrue(status.is_success(response.status_code), response.json())
except Exception:
self.assertTrue(status.is_success(response.status_code), response.content)
def test_list_files_for_qfield(self):
self.client.credentials(HTTP_AUTHORIZATION="Token " + self.token1.key)
cur = self.conn.cursor()
cur.execute(
"""
CREATE TABLE point (
id integer,
geometry geometry(point, 2056)
);
"""
)
self.conn.commit()
cur.execute(
"""
INSERT INTO point(id, geometry)
VALUES(1, ST_GeomFromText('POINT(2725505 1121435)', 2056));
"""
)
self.conn.commit()
# Add the qgis project
file = testdata_path("delta/project2.qgs")
response = self.client.post(
"/api/v1/files/{}/project.qgs/".format(self.project1.id),
{"file": open(file, "rb")},
format="multipart",
)
self.assertTrue(status.is_success(response.status_code))
response = self.client.post(
"/api/v1/qfield-files/export/{}/".format(self.project1.id)
)
self.assertTrue(status.is_success(response.status_code))
# Wait for the worker to finish
for _ in range(20):
time.sleep(3)
response = self.client.get(
"/api/v1/qfield-files/export/{}/".format(self.project1.id),
)
payload = response.json()
if payload["status"] == "STATUS_EXPORTED":
response = self.client.get(
"/api/v1/qfield-files/{}/".format(self.project1.id),
)
json_resp = response.json()
files = sorted(json_resp["files"], key=lambda k: k["name"])
self.assertEqual(files[0]["name"], "data.gpkg")
self.assertEqual(files[1]["name"], "project_qfield.qgs")
return
elif payload["status"] == "STATUS_ERROR":
self.fail("Worker failed with error")
self.fail("Worker didn't finish")
def test_list_files_for_qfield_incomplete_project(self):
# the qgs file is missing
self.client.credentials(HTTP_AUTHORIZATION="Token " + self.token1.key)
# Add files to the project
file = testdata_path("delta/points.geojson")
response = self.client.post(
"/api/v1/files/{}/points.geojson/".format(self.project1.id),
{"file": open(file, "rb")},
format="multipart",
)
self.assertTrue(status.is_success(response.status_code))
response = self.client.post(
"/api/v1/qfield-files/export/{}/".format(self.project1.id)
)
self.assertEqual(response.status_code, 400)
self.assertEqual(response.json()["code"], "no_qgis_project")
def test_download_file_for_qfield(self):
self.client.credentials(HTTP_AUTHORIZATION="Token " + self.token1.key)
# Add files to the project
file = testdata_path("delta/points.geojson")
response = self.client.post(
"/api/v1/files/{}/points.geojson/".format(self.project1.id),
{"file": open(file, "rb")},
format="multipart",
)
self.assertTrue(status.is_success(response.status_code))
file = testdata_path("delta/polygons.geojson")
response = self.client.post(
"/api/v1/files/{}/polygons.geojson/".format(self.project1.id),
{"file": open(file, "rb")},
format="multipart",
)
self.assertTrue(status.is_success(response.status_code))
file = testdata_path("delta/project.qgs")
response = self.client.post(
"/api/v1/files/{}/project.qgs/".format(self.project1.id),
{"file": open(file, "rb")},
format="multipart",
)
self.assertTrue(status.is_success(response.status_code))
# Launch the export
response = self.client.post(
"/api/v1/qfield-files/export/{}/".format(self.project1.id),
)
self.assertTrue(status.is_success(response.status_code))
# Wait for the worker to finish
for _ in range(10):
time.sleep(3)
response = self.client.get(
"/api/v1/qfield-files/export/{}/".format(self.project1.id),
)
self.assertHttpOk(response)
payload = response.json()
if payload["status"] == "STATUS_EXPORTED":
response = self.client.get(
f"/api/v1/qfield-files/{self.project1.id}/project_qfield.qgs/"
)
self.assertIsInstance(response, HttpResponseRedirect)
temp_dir = tempfile.mkdtemp()
local_file = os.path.join(temp_dir, "project.qgs")
# We cannot use the self.client HTTP client, since it does not support
# requests outside the current Django App
# Using the rest_api_framework.RequestsClient is not much better, so better
# use the `requests` module
with requests.get(response.url, stream=True) as r:
with open(local_file, "wb") as f:
for chunk in r.iter_content():
f.write(chunk)
with open(local_file, "r") as f:
self.assertEqual(
f.readline().strip(),
"<!DOCTYPE qgis PUBLIC 'http://mrcc.com/qgis.dtd' 'SYSTEM'>",
)
return
elif payload["status"] == "STATUS_ERROR":
self.fail(
"Worker failed with error",
job=PackageJob.objects.filter(project=self.project1).last(),
)
self.fail("Worker didn't finish")
def test_list_files_for_qfield_broken_file(self):
self.client.credentials(HTTP_AUTHORIZATION="Token " + self.token1.key)
# Add files to the project
file = testdata_path("delta/broken.qgs")
response = self.client.post(
"/api/v1/files/{}/broken.qgs/".format(self.project1.id),
{"file": open(file, "rb")},
format="multipart",
)
self.assertTrue(status.is_success(response.status_code))
# Launch the export
response = self.client.post(
"/api/v1/qfield-files/export/{}/".format(self.project1.id),
)
self.assertTrue(status.is_success(response.status_code))
# Wait for the worker to finish
for _ in range(10):
time.sleep(3)
response = self.client.get(
"/api/v1/qfield-files/export/{}/".format(self.project1.id),
)
self.assertHttpOk(response)
if response.json()["status"] == "STATUS_ERROR":
return
self.fail(
"Worker didn't finish", job=Job.objects.filter(project=self.project1).last()
)
def test_downloaded_file_has_canvas_name(self):
self.client.credentials(HTTP_AUTHORIZATION="Token " + self.token1.key)
# Add files to the project
file = testdata_path("delta/points.geojson")
response = self.client.post(
"/api/v1/files/{}/points.geojson/".format(self.project1.id),
{"file": open(file, "rb")},
format="multipart",
)
self.assertTrue(status.is_success(response.status_code))
file = testdata_path("delta/polygons.geojson")
response = self.client.post(
"/api/v1/files/{}/polygons.geojson/".format(self.project1.id),
{"file": open(file, "rb")},
format="multipart",
)
self.assertTrue(status.is_success(response.status_code))
file = testdata_path("delta/project.qgs")
response = self.client.post(
"/api/v1/files/{}/project.qgs/".format(self.project1.id),
{"file": open(file, "rb")},
format="multipart",
)
self.assertTrue(status.is_success(response.status_code))
# Launch the export
response = self.client.post(
"/api/v1/qfield-files/export/{}/".format(self.project1.id),
)
self.assertTrue(status.is_success(response.status_code))
# Wait for the worker to finish
for _ in range(10):
time.sleep(3)
response = self.client.get(
"/api/v1/qfield-files/export/{}/".format(self.project1.id),
)
payload = response.json()
if payload["status"] == "STATUS_EXPORTED":
response = self.client.get(
f"/api/v1/qfield-files/{self.project1.id}/project_qfield.qgs/"
)
self.assertIsInstance(response, HttpResponseRedirect)
temp_dir = tempfile.mkdtemp()
local_file = os.path.join(temp_dir, "project.qgs")
# We cannot use the self.client HTTP client, since it does not support
# requests outside the current Django App
# Using the rest_api_framework.RequestsClient is not much better, so better
# use the `requests` module
with requests.get(response.url, stream=True) as r:
with open(local_file, "wb") as f:
for chunk in r.iter_content():
f.write(chunk)
with open(local_file, "r") as f:
for line in f:
if 'name="theMapCanvas"' in line:
return
self.fail(
'Worker failed, missing .qgs XML attribute: name="theMapCanvas"'
)
elif payload["status"] == "STATUS_ERROR":
self.fail("Worker failed with error")
self.fail("Worker didn't finish or there was an error")
def test_download_project_with_broken_layer_datasources(self):
self.client.credentials(HTTP_AUTHORIZATION="Token " + self.token1.key)
# Add files to the project
file = testdata_path("delta/points.geojson")
response = self.client.post(
"/api/v1/files/{}/points.geojson/".format(self.project1.id),
{"file": open(file, "rb")},
format="multipart",
)
self.assertTrue(status.is_success(response.status_code))
file = testdata_path("delta/project_broken_datasource.qgs")
response = self.client.post(
"/api/v1/files/{}/project.qgs/".format(self.project1.id),
{"file": open(file, "rb")},
format="multipart",
)
self.assertTrue(status.is_success(response.status_code))
# Launch the export
response = self.client.post(
"/api/v1/qfield-files/export/{}/".format(self.project1.id),
)
self.assertTrue(status.is_success(response.status_code))
# Wait for the worker to finish
for _ in range(10):
time.sleep(3)
response = self.client.get(
"/api/v1/qfield-files/export/{}/".format(self.project1.id),
)
payload = response.json()
if payload["status"] == "STATUS_EXPORTED":
response = self.client.get(
"/api/v1/qfield-files/{}/".format(self.project1.id),
)
self.assertHttpOk(response)
export_payload = response.json()
layer_ok = export_payload["layers"][
"points_c2784cf9_c9c3_45f6_9ce5_98a6047e4d6c"
]
layer_failed = export_payload["layers"][
"surfacestructure_35131bca_337c_483b_b09e_1cf77b1dfb16"
]
self.assertTrue(layer_ok["valid"], layer_ok["status"])
self.assertFalse(layer_failed["valid"], layer_failed["status"])
return
elif payload["status"] == "STATUS_ERROR":
self.fail(
"Worker failed with error",
job=Job.objects.filter(project=self.project1).last(),
)
self.fail("Worker didn't finish")
def test_filename_with_whitespace(self):
self.client.credentials(HTTP_AUTHORIZATION="Token " + self.token1.key)
# Add files to the project
file = testdata_path("delta/points.geojson")
response = self.client.post(
"/api/v1/files/{}/points.geojson/".format(self.project1.id),
{"file": open(file, "rb")},
format="multipart",
)
self.assertTrue(status.is_success(response.status_code))
file = testdata_path("delta/polygons.geojson")
response = self.client.post(
"/api/v1/files/{}/polygons.geojson/".format(self.project1.id),
{"file": open(file, "rb")},
format="multipart",
)
self.assertTrue(status.is_success(response.status_code))
file = testdata_path("delta/project.qgs")
response = self.client.post(
"/api/v1/files/{}/whitespace project.qgs/".format(self.project1.id),
{"file": open(file, "rb")},
format="multipart",
)
self.assertTrue(status.is_success(response.status_code))
# Launch the export
response = self.client.post(
"/api/v1/qfield-files/export/{}/".format(self.project1.id),
)
self.assertTrue(status.is_success(response.status_code))
# Wait for the worker to finish
for _ in range(10):
time.sleep(3)
response = self.client.get(
"/api/v1/qfield-files/export/{}/".format(self.project1.id),
)
payload = response.json()
if payload["status"] == "STATUS_EXPORTED":
return
elif payload["status"] == "STATUS_ERROR":
self.fail("Worker failed with error")
self.fail("Worker didn't finish or there was an error")
| 36.514286
| 91
| 0.555315
| 1,778
| 16,614
| 5.084364
| 0.140607
| 0.044248
| 0.061726
| 0.064159
| 0.738274
| 0.723673
| 0.719137
| 0.716482
| 0.707633
| 0.695465
| 0
| 0.016418
| 0.314434
| 16,614
| 454
| 92
| 36.594714
| 0.777261
| 0.054954
| 0
| 0.607784
| 0
| 0
| 0.176884
| 0.090359
| 0
| 0
| 0
| 0
| 0.10479
| 1
| 0.032934
| false
| 0.011976
| 0.041916
| 0
| 0.095808
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
1eedb5ebbace35b279cfe2795f493c1721dea351
| 416
|
py
|
Python
|
src/Shared/InterfaceAdapters/IPaginator.py
|
DigiChanges/python-experience
|
3332d2c4d922a5eb302fa151582a4f63c668a570
|
[
"MIT"
] | null | null | null |
src/Shared/InterfaceAdapters/IPaginator.py
|
DigiChanges/python-experience
|
3332d2c4d922a5eb302fa151582a4f63c668a570
|
[
"MIT"
] | null | null | null |
src/Shared/InterfaceAdapters/IPaginator.py
|
DigiChanges/python-experience
|
3332d2c4d922a5eb302fa151582a4f63c668a570
|
[
"MIT"
] | null | null | null |
from abc import ABC, abstractmethod
from typing import Any
class IPaginator(ABC):
@abstractmethod
def paginate(self) -> Any:
pass
@abstractmethod
def getTotal(self) -> int:
pass
@abstractmethod
def getCurrentUrl(self) -> str:
pass
@abstractmethod
def getNextUrl(self) -> str:
pass
@abstractmethod
def getExist(self) -> bool:
pass
| 16.64
| 35
| 0.612981
| 42
| 416
| 6.071429
| 0.452381
| 0.333333
| 0.329412
| 0.196078
| 0.219608
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.305288
| 416
| 25
| 36
| 16.64
| 0.882353
| 0
| 0
| 0.555556
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.277778
| false
| 0.277778
| 0.111111
| 0
| 0.444444
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
48049eb279750085b104ecc67fed60d3cf6decb8
| 144
|
py
|
Python
|
tests/test_noise.py
|
patel-zeel/regdata
|
4b823368750a1afd24b1b8d63e3b2ba58d983c79
|
[
"MIT"
] | null | null | null |
tests/test_noise.py
|
patel-zeel/regdata
|
4b823368750a1afd24b1b8d63e3b2ba58d983c79
|
[
"MIT"
] | 3
|
2021-09-22T08:31:49.000Z
|
2021-10-13T14:23:14.000Z
|
tests/test_noise.py
|
patel-zeel/regdata
|
4b823368750a1afd24b1b8d63e3b2ba58d983c79
|
[
"MIT"
] | null | null | null |
import pytest
def test_noise():
import regdata as rd
with pytest.raises(ValueError):
rd.Step(s_to_n_ratio=2, noise_variance=2)
| 20.571429
| 49
| 0.708333
| 23
| 144
| 4.217391
| 0.782609
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.017391
| 0.201389
| 144
| 6
| 50
| 24
| 0.826087
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| true
| 0
| 0.4
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
481a63486886655f4d23b99ce43e8a415d03fcf7
| 5,881
|
py
|
Python
|
tests/components/wemo/test_binary_sensor.py
|
MrDelik/core
|
93a66cc357b226389967668441000498a10453bb
|
[
"Apache-2.0"
] | 4
|
2021-07-11T09:11:00.000Z
|
2022-02-27T14:43:50.000Z
|
tests/components/wemo/test_binary_sensor.py
|
MrDelik/core
|
93a66cc357b226389967668441000498a10453bb
|
[
"Apache-2.0"
] | 1,016
|
2019-06-18T21:27:47.000Z
|
2020-03-06T11:09:58.000Z
|
tests/components/wemo/test_binary_sensor.py
|
MrDelik/core
|
93a66cc357b226389967668441000498a10453bb
|
[
"Apache-2.0"
] | 3
|
2022-01-02T18:49:54.000Z
|
2022-01-25T02:03:54.000Z
|
"""Tests for the Wemo binary_sensor entity."""
import pytest
from homeassistant.components.homeassistant import (
DOMAIN as HA_DOMAIN,
SERVICE_UPDATE_ENTITY,
)
from homeassistant.components.wemo.binary_sensor import (
InsightBinarySensor,
MakerBinarySensor,
)
from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON
from homeassistant.setup import async_setup_component
from .entity_test_helpers import EntityTestHelpers
class TestMotion(EntityTestHelpers):
"""Test for the pyWeMo Motion device."""
@pytest.fixture
def pywemo_model(self):
"""Pywemo Motion models use the binary_sensor platform."""
return "Motion"
async def test_binary_sensor_registry_state_callback(
self, hass, pywemo_registry, pywemo_device, wemo_entity
):
"""Verify that the binary_sensor receives state updates from the registry."""
# On state.
pywemo_device.get_state.return_value = 1
pywemo_registry.callbacks[pywemo_device.name](pywemo_device, "", "")
await hass.async_block_till_done()
assert hass.states.get(wemo_entity.entity_id).state == STATE_ON
# Off state.
pywemo_device.get_state.return_value = 0
pywemo_registry.callbacks[pywemo_device.name](pywemo_device, "", "")
await hass.async_block_till_done()
assert hass.states.get(wemo_entity.entity_id).state == STATE_OFF
async def test_binary_sensor_update_entity(
self, hass, pywemo_registry, pywemo_device, wemo_entity
):
"""Verify that the binary_sensor performs state updates."""
await async_setup_component(hass, HA_DOMAIN, {})
# On state.
pywemo_device.get_state.return_value = 1
await hass.services.async_call(
HA_DOMAIN,
SERVICE_UPDATE_ENTITY,
{ATTR_ENTITY_ID: [wemo_entity.entity_id]},
blocking=True,
)
assert hass.states.get(wemo_entity.entity_id).state == STATE_ON
# Off state.
pywemo_device.get_state.return_value = 0
await hass.services.async_call(
HA_DOMAIN,
SERVICE_UPDATE_ENTITY,
{ATTR_ENTITY_ID: [wemo_entity.entity_id]},
blocking=True,
)
assert hass.states.get(wemo_entity.entity_id).state == STATE_OFF
class TestMaker(EntityTestHelpers):
"""Test for the pyWeMo Maker device."""
@pytest.fixture
def pywemo_model(self):
"""Pywemo Motion models use the binary_sensor platform."""
return "Maker"
@pytest.fixture
def wemo_entity_suffix(self):
"""Select the MakerBinarySensor entity."""
return MakerBinarySensor._name_suffix.lower()
@pytest.fixture(name="pywemo_device")
def pywemo_device_fixture(self, pywemo_device):
"""Fixture for WeMoDevice instances."""
pywemo_device.maker_params = {
"hassensor": 1,
"sensorstate": 1,
"switchmode": 1,
"switchstate": 0,
}
pywemo_device.has_sensor = pywemo_device.maker_params["hassensor"]
pywemo_device.sensor_state = pywemo_device.maker_params["sensorstate"]
yield pywemo_device
async def test_registry_state_callback(
self, hass, pywemo_registry, pywemo_device, wemo_entity
):
"""Verify that the binary_sensor receives state updates from the registry."""
# On state.
pywemo_device.sensor_state = 0
pywemo_registry.callbacks[pywemo_device.name](pywemo_device, "", "")
await hass.async_block_till_done()
assert hass.states.get(wemo_entity.entity_id).state == STATE_ON
# Off state.
pywemo_device.sensor_state = 1
pywemo_registry.callbacks[pywemo_device.name](pywemo_device, "", "")
await hass.async_block_till_done()
assert hass.states.get(wemo_entity.entity_id).state == STATE_OFF
class TestInsight(EntityTestHelpers):
"""Test for the pyWeMo Insight device."""
@pytest.fixture
def pywemo_model(self):
"""Pywemo Motion models use the binary_sensor platform."""
return "Insight"
@pytest.fixture
def wemo_entity_suffix(self):
"""Select the InsightBinarySensor entity."""
return InsightBinarySensor._name_suffix.lower()
@pytest.fixture(name="pywemo_device")
def pywemo_device_fixture(self, pywemo_device):
"""Fixture for WeMoDevice instances."""
pywemo_device.insight_params = {
"currentpower": 1.0,
"todaymw": 200000000.0,
"state": "0",
"onfor": 0,
"ontoday": 0,
"ontotal": 0,
"powerthreshold": 0,
}
yield pywemo_device
async def test_registry_state_callback(
self, hass, pywemo_registry, pywemo_device, wemo_entity
):
"""Verify that the binary_sensor receives state updates from the registry."""
# On state.
pywemo_device.get_state.return_value = 1
pywemo_device.insight_params["state"] = "1"
pywemo_registry.callbacks[pywemo_device.name](pywemo_device, "", "")
await hass.async_block_till_done()
assert hass.states.get(wemo_entity.entity_id).state == STATE_ON
# Standby (Off) state.
pywemo_device.get_state.return_value = 1
pywemo_device.insight_params["state"] = "8"
pywemo_registry.callbacks[pywemo_device.name](pywemo_device, "", "")
await hass.async_block_till_done()
assert hass.states.get(wemo_entity.entity_id).state == STATE_OFF
# Off state.
pywemo_device.get_state.return_value = 0
pywemo_device.insight_params["state"] = "1"
pywemo_registry.callbacks[pywemo_device.name](pywemo_device, "", "")
await hass.async_block_till_done()
assert hass.states.get(wemo_entity.entity_id).state == STATE_OFF
| 35.859756
| 85
| 0.666893
| 687
| 5,881
| 5.419214
| 0.135371
| 0.141821
| 0.047274
| 0.053183
| 0.792103
| 0.726833
| 0.726833
| 0.726833
| 0.726027
| 0.691378
| 0
| 0.007341
| 0.235674
| 5,881
| 163
| 86
| 36.079755
| 0.820912
| 0.094202
| 0
| 0.610619
| 0
| 0
| 0.036397
| 0
| 0
| 0
| 0
| 0
| 0.079646
| 1
| 0.061947
| false
| 0
| 0.053097
| 0
| 0.185841
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
48294613ec0b38a81f7e318d47c91775abdd37f2
| 416
|
py
|
Python
|
src/classifier/interface.py
|
William9923/IF4072-SentimentClassification
|
5e22a6da418056955243c310bab0382e4683b781
|
[
"MIT"
] | null | null | null |
src/classifier/interface.py
|
William9923/IF4072-SentimentClassification
|
5e22a6da418056955243c310bab0382e4683b781
|
[
"MIT"
] | null | null | null |
src/classifier/interface.py
|
William9923/IF4072-SentimentClassification
|
5e22a6da418056955243c310bab0382e4683b781
|
[
"MIT"
] | null | null | null |
from abc import ABC, abstractmethod
class IClassifier(ABC):
@abstractmethod
def train(self):
pass
@abstractmethod
def predict_proba(self):
pass
@abstractmethod
def predict(self):
pass
@abstractmethod
def summary(self):
pass
@abstractmethod
def save(self):
pass
@abstractmethod
def load(self):
pass
| 14.344828
| 35
| 0.574519
| 39
| 416
| 6.102564
| 0.384615
| 0.428571
| 0.462185
| 0.52521
| 0.268908
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.358173
| 416
| 28
| 36
| 14.857143
| 0.891386
| 0
| 0
| 0.6
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.3
| false
| 0.3
| 0.05
| 0
| 0.4
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
486be6799e220e0c5e1de56c3cb23902bacb1e85
| 116
|
py
|
Python
|
utils/__init__.py
|
willdevgh/dlbingwallpaper
|
8514399a9662febd3a7521bbdfaf746dd544d79b
|
[
"MIT"
] | 1
|
2017-07-12T08:00:52.000Z
|
2017-07-12T08:00:52.000Z
|
utils/__init__.py
|
willdevgh/dlbingwallpaper
|
8514399a9662febd3a7521bbdfaf746dd544d79b
|
[
"MIT"
] | 6
|
2021-03-18T20:29:55.000Z
|
2022-03-11T23:19:40.000Z
|
utils/__init__.py
|
willdevgh/dlbingwallpaper
|
8514399a9662febd3a7521bbdfaf746dd544d79b
|
[
"MIT"
] | null | null | null |
from utils.database import WallpaperDatabase
from utils.spin import Spin
__all__ = ["WallpaperDatabase", "Spin", ]
| 23.2
| 44
| 0.784483
| 13
| 116
| 6.692308
| 0.538462
| 0.206897
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.12069
| 116
| 4
| 45
| 29
| 0.852941
| 0
| 0
| 0
| 0
| 0
| 0.181034
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
6febc036822800b0bbe4f13857e0e9653620aa36
| 235
|
py
|
Python
|
tests/basics/builtin_issubclass.py
|
learnforpractice/micropython-cpp
|
004bc8382f74899e7b876cc29bfa6a9cc976ba10
|
[
"MIT"
] | 13,648
|
2015-01-01T01:34:51.000Z
|
2022-03-31T16:19:53.000Z
|
tests/basics/builtin_issubclass.py
|
learnforpractice/micropython-cpp
|
004bc8382f74899e7b876cc29bfa6a9cc976ba10
|
[
"MIT"
] | 7,092
|
2015-01-01T07:59:11.000Z
|
2022-03-31T23:52:18.000Z
|
tests/basics/builtin_issubclass.py
|
learnforpractice/micropython-cpp
|
004bc8382f74899e7b876cc29bfa6a9cc976ba10
|
[
"MIT"
] | 4,942
|
2015-01-02T11:48:50.000Z
|
2022-03-31T19:57:10.000Z
|
# test builtin issubclass
class A:
pass
print(issubclass(A, A))
print(issubclass(A, (A,)))
try:
issubclass(A, 1)
except TypeError:
print('TypeError')
try:
issubclass('a', 1)
except TypeError:
print('TypeError')
| 13.055556
| 26
| 0.651064
| 30
| 235
| 5.1
| 0.366667
| 0.287582
| 0.20915
| 0.222222
| 0.575163
| 0.575163
| 0.575163
| 0.575163
| 0
| 0
| 0
| 0.010638
| 0.2
| 235
| 17
| 27
| 13.823529
| 0.803191
| 0.097872
| 0
| 0.5
| 0
| 0
| 0.090476
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.083333
| 0
| 0
| 0.083333
| 0.333333
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
6ff120b369a70dd964f738ade7013dec14e0a541
| 22
|
py
|
Python
|
base/test-show-scope/class-2.py
|
jpolitz/lambda-py-paper
|
746ef63fc1123714b4adaf78119028afbea7bd76
|
[
"Apache-2.0"
] | 25
|
2015-04-16T04:31:49.000Z
|
2022-03-10T15:53:28.000Z
|
base/test-show-scope/class-2.py
|
jpolitz/lambda-py-paper
|
746ef63fc1123714b4adaf78119028afbea7bd76
|
[
"Apache-2.0"
] | 1
|
2018-11-21T22:40:02.000Z
|
2018-11-26T17:53:11.000Z
|
base/test-show-scope/class-2.py
|
jpolitz/lambda-py-paper
|
746ef63fc1123714b4adaf78119028afbea7bd76
|
[
"Apache-2.0"
] | 1
|
2021-03-26T03:36:19.000Z
|
2021-03-26T03:36:19.000Z
|
x = 7
class C:
x = 9
| 5.5
| 8
| 0.454545
| 6
| 22
| 1.666667
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.153846
| 0.409091
| 22
| 3
| 9
| 7.333333
| 0.615385
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0.666667
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
b51616851b731dd79e3580faab311b514d9588d6
| 9,064
|
py
|
Python
|
oplot/plot_audio.py
|
otosense/oplot
|
5b4b4b96ebfa5486501c02e7051d1c11b1c3b86c
|
[
"Apache-2.0"
] | 1
|
2021-12-21T05:43:01.000Z
|
2021-12-21T05:43:01.000Z
|
oplot/plot_audio.py
|
otosense/oplot
|
5b4b4b96ebfa5486501c02e7051d1c11b1c3b86c
|
[
"Apache-2.0"
] | 2
|
2021-01-09T01:13:48.000Z
|
2021-07-29T12:33:11.000Z
|
oplot/plot_audio.py
|
otosense/oplot
|
5b4b4b96ebfa5486501c02e7051d1c11b1c3b86c
|
[
"Apache-2.0"
] | 1
|
2021-12-14T14:33:07.000Z
|
2021-12-14T14:33:07.000Z
|
"""Functions intended to plot waveform and spectra plus some timestamps information as vertical lines on the plots"""
import matplotlib.pyplot as plt
from cycler import cycler
import numpy as np
from matplotlib.pyplot import cm
def plot_lines(
ax,
lines_loc,
label=None,
color='r',
line_width=0.5,
line_style='-',
line_type='vert',
alpha=1,
):
"""
Function to draw vertical or horizontal lines on an ax
Args:
ax: the matplolib axis on which to draw
lines_loc: the location of the lines
labels: a list of floats, the labels of the lines, optionsl
colors: a list of strings, the colors of the lines
line_widths: a list of floats, the widths of the lines
def_col: default color if no list of colors if provided
line_type: 'vert' or 'horiz
Examples:
An initial plot
fig, ax = plt.subplots()
... ax.plot([1, 2, 3])
Adding vertical lines to the plot
plot_vlines(ax,
... lines_loc=[1,2],
... line_type='horiz',
... colors=['b', 'r'],
... line_widths=[0.5, 2],
... labels=['thin blue', 'wide red'])
"""
if line_type == 'vert':
line_ = ax.axvline
if line_type == 'horiz':
line_ = ax.axhline
for line in lines_loc:
line_(
line,
c=color,
linewidth=line_width,
label=label,
linestyle=line_style,
alpha=alpha,
)
# only the first one produce a label
label = None
def plot_spectro(ax, wf, chk_size=2048, noverlap=0, sr=44100):
"""
Args:
ax
wf
chk_size
noverlap
sr
"""
ax.specgram(x=wf, NFFT=chk_size, noverlap=noverlap, Fs=sr)
def plot_wf(ax, wf, wf_line_width=0.8, wf_color='b'):
"""
:param ax:
:param wf:
:param wf_line_width:
:param wf_color:
"""
ax.plot(wf, linewidth=wf_line_width, c=wf_color)
def plot_wf_and_spectro(
wf,
figsize=(40, 8),
chk_size=2048,
noverlap=0,
sr=44100,
spectra_ylim=None,
wf_y_lim=None,
wf_x_lim=None,
spectra_xlim=None,
n_sec_per_tick=None,
vert_lines_samp=None,
vert_lines_sec=None,
vert_lines_colors=None,
vert_lines_labels=None,
vert_lines_width=None,
vert_lines_style=None,
alpha_lines=None,
n_tick_dec=None,
wf_line_width=1,
wf_color='b',
title=None,
title_font_size=10,
):
"""
:param wf:
:param figsize:
:param chk_size:
:param noverlap:
:param sr:
:param spectra_ylim:
:param wf_y_lim:
:param wf_x_lim:
:param spectra_xlim:
:param n_sec_per_tick:
:param vert_lines_samp:
:param vert_lines_sec:
:param vert_lines_colors:
:param vert_lines_labels:
:param vert_lines_width:
:param vert_lines_style:
:param alpha_lines:
:param n_tick_dec:
:param wf_line_width:
:param wf_color:
:param title:
:param title_font_size:
:return:
"""
fig, ax = plt.subplots(2, 1, figsize=figsize)
if n_tick_dec is None:
n_tick_dec = max(str(n_sec_per_tick)[::-1].find('.'), 1)
if n_sec_per_tick is None:
# make a tick every 10% of the whole wf, roughly if possible, or every 1sec if 10% is less than 1sec
n_sec_per_tick = max(int((len(wf) / sr) / 10), 1)
# getting the ticks where we want them
ticks_pos = range(0, len(wf), int(sr * n_sec_per_tick))
ticks_labels = [
f'{round(n_sec_per_tick * i, n_tick_dec)}s' for i in range(len(ticks_pos))
]
# TODO: udnerstand wtf is going on here
plt.sca(ax[0])
plt.xticks(ticks_pos, ticks_labels)
plt.sca(ax[0])
plt.xticks(ticks_pos, ticks_labels)
# plot the wf
plot_wf(ax[0], wf=wf, wf_line_width=wf_line_width, wf_color=wf_color)
ax[0].set_xlim((0, len(wf)))
# set some wf plot limits
if wf_y_lim:
ax[0].set_ylim(*wf_y_lim)
if wf_x_lim:
ax[0].set_xlim(*wf_x_lim)
# plot the vertical lines:
if vert_lines_samp is None:
vert_lines_samp = []
if vert_lines_sec is None:
vert_lines_sec = []
vert_lines_samp = list(vert_lines_samp)
vert_lines_samp += [
[int(i * sr) for i in list_lines] for list_lines in vert_lines_sec
]
for lines_idx, lines_loc in enumerate(vert_lines_samp):
if alpha_lines is None:
alpha_line = None
else:
alpha_line = alpha_lines[lines_idx]
if vert_lines_labels is None:
vert_line_label = None
else:
vert_line_label = vert_lines_labels[lines_idx]
if vert_lines_colors is None:
vert_lines_color = 'r'
else:
vert_lines_color = vert_lines_colors[lines_idx]
if vert_lines_width is None:
vert_line_width = 0.5
else:
vert_line_width = vert_lines_width[lines_idx]
if vert_lines_style is None:
vert_line_style = '-'
else:
vert_line_style = vert_lines_style[lines_idx]
plot_lines(
ax[0],
lines_loc=lines_loc,
label=vert_line_label,
color=vert_lines_color,
line_width=vert_line_width,
line_style=vert_line_style,
line_type='vert',
alpha=alpha_line,
)
first = False
# plotting the spectrogram and some limits
plot_spectro(ax=ax[1], wf=wf, chk_size=chk_size, noverlap=noverlap, sr=sr)
if spectra_ylim:
ax[1].set_ylim(*spectra_ylim)
if spectra_xlim:
ax[1].set_xlim(*spectra_xlim)
fig.suptitle(title, fontsize=title_font_size)
plt.legend(loc=(1.04, 0.8))
plt.show()
def plot_wf_with_lines(
wf,
figsize=(40, 10),
sr=44100,
wf_y_lim=None,
wf_x_lim=None,
n_sec_per_tick=None,
vert_lines_samp=None,
vert_lines_sec=None,
vert_lines_colors=None,
vert_lines_labels=None,
vert_lines_width=None,
vert_lines_style=None,
alpha_lines=None,
n_tick_dec=None,
wf_line_width=1,
wf_color='b',
title=None,
title_font_size=10,
):
"""
:param wf:
:param figsize:
:param sr:
:param wf_y_lim:
:param wf_x_lim:
:param n_sec_per_tick:
:param vert_lines_samp:
:param vert_lines_sec:
:param vert_lines_colors:
:param vert_lines_labels:
:param vert_lines_width:
:param vert_lines_style:
:param alpha_lines:
:param n_tick_dec:
:param wf_line_width:
:param wf_color:
:param title:
:param title_font_size:
:return:
"""
fig, ax = plt.subplots(1, 1, figsize=figsize)
if n_tick_dec is None:
n_tick_dec = max(str(n_sec_per_tick)[::-1].find('.'), 1)
if n_sec_per_tick is None:
# make a tick every 10% of the whole wf, roughly if possible, or every 1sec if 10% is less than 1sec
n_sec_per_tick = max(int((len(wf) / sr) / 10), 1)
# getting the ticks where we want them
ticks_pos = range(0, len(wf), int(sr * n_sec_per_tick))
ticks_labels = [
f'{round(n_sec_per_tick * i, n_tick_dec)}s' for i in range(len(ticks_pos))
]
# TODO: udnerstand wtf is going on here
plt.sca(ax)
plt.xticks(ticks_pos, ticks_labels)
plt.sca(ax)
plt.xticks(ticks_pos, ticks_labels)
# plot the wf
plot_wf(ax, wf=wf, wf_line_width=wf_line_width, wf_color=wf_color)
ax.set_xlim((0, len(wf)))
# set some wf plot limits
if wf_y_lim:
ax.set_ylim(*wf_y_lim)
if wf_x_lim:
ax.set_xlim(*wf_x_lim)
# plot the vertical lines:
if vert_lines_samp is None:
vert_lines_samp = []
if vert_lines_sec is None:
vert_lines_sec = []
vert_lines_samp = list(vert_lines_samp)
vert_lines_samp += [
[int(i * sr) for i in list_lines] for list_lines in vert_lines_sec
]
for lines_idx, lines_loc in enumerate(vert_lines_samp):
if alpha_lines is None:
alpha_line = None
else:
alpha_line = alpha_lines[lines_idx]
if vert_lines_labels is None:
vert_line_label = ''
else:
vert_line_label = vert_lines_labels[lines_idx]
if vert_lines_colors is None:
vert_lines_color = 'r'
else:
vert_lines_color = vert_lines_colors[lines_idx]
if vert_lines_width is None:
vert_line_width = 0.5
else:
vert_line_width = vert_lines_width[lines_idx]
if vert_lines_style is None:
vert_line_style = '-'
else:
vert_line_style = vert_lines_style[lines_idx]
plot_lines(
ax,
lines_loc=lines_loc,
label=vert_line_label,
color=vert_lines_color,
line_width=vert_line_width,
line_style=vert_line_style,
line_type='vert',
alpha=alpha_line,
)
fig.suptitle(title, fontsize=title_font_size)
plt.legend(loc=(1.04, 0.8))
plt.show()
| 26.196532
| 117
| 0.611209
| 1,357
| 9,064
| 3.775976
| 0.123803
| 0.112412
| 0.045667
| 0.030055
| 0.745902
| 0.733607
| 0.728532
| 0.712529
| 0.705504
| 0.688915
| 0
| 0.016386
| 0.293027
| 9,064
| 345
| 118
| 26.272464
| 0.78324
| 0.254744
| 0
| 0.696078
| 0
| 0
| 0.017538
| 0.006577
| 0
| 0
| 0
| 0.005797
| 0
| 1
| 0.02451
| false
| 0
| 0.019608
| 0
| 0.044118
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
d204dd1bd0b4c970f44c3f61a5c340a34df73bc4
| 8,998
|
py
|
Python
|
script/SpiderGanji/SpiderGanji/spiders/startURL_cs.py
|
jiming-liu/overpick_Scrapy
|
0fd9995f6a4560285804371608216ec527c2a793
|
[
"MIT"
] | null | null | null |
script/SpiderGanji/SpiderGanji/spiders/startURL_cs.py
|
jiming-liu/overpick_Scrapy
|
0fd9995f6a4560285804371608216ec527c2a793
|
[
"MIT"
] | null | null | null |
script/SpiderGanji/SpiderGanji/spiders/startURL_cs.py
|
jiming-liu/overpick_Scrapy
|
0fd9995f6a4560285804371608216ec527c2a793
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# coding=utf-8
class startURL:
xinfangURL = [
'http://cs.ganji.com/fang12/o1/',
'http://cs.ganji.com/fang12/o2/',
'http://cs.ganji.com/fang12/o3/',
'http://cs.ganji.com/fang12/o4/',
'http://cs.ganji.com/fang12/o5/',
'http://cs.ganji.com/fang12/o6/',
'http://cs.ganji.com/fang12/o7/',
'http://cs.ganji.com/fang12/o8/',
'http://cs.ganji.com/fang12/o9/',
'http://cs.ganji.com/fang12/o10/',
'http://cs.ganji.com/fang12/o11/',
'http://cs.ganji.com/fang12/o12/',
'http://cs.ganji.com/fang12/o13/',
'http://cs.ganji.com/fang12/o14/',
'http://cs.ganji.com/fang12/o15/',
'http://cs.ganji.com/fang12/o16/',
'http://cs.ganji.com/fang12/o17/',
'http://cs.ganji.com/fang12/o18/',
'http://cs.ganji.com/fang12/o19/',
'http://cs.ganji.com/fang12/o20/',
'http://cs.ganji.com/fang12/o21/',
'http://cs.ganji.com/fang12/o22/',
'http://cs.ganji.com/fang12/o23/',
'http://cs.ganji.com/fang12/o24/',
'http://cs.ganji.com/fang12/o25/',
'http://cs.ganji.com/fang12/o26/',
'http://cs.ganji.com/fang12/o27/',
'http://cs.ganji.com/fang12/o28/',
'http://cs.ganji.com/fang12/o29/',
'http://cs.ganji.com/fang12/o30/',
'http://cs.ganji.com/fang12/o31/',
'http://cs.ganji.com/fang12/o32/',
'http://cs.ganji.com/fang12/o33/',
'http://cs.ganji.com/fang12/o34/',
'http://cs.ganji.com/fang12/o35/',
'http://cs.ganji.com/fang12/o36/',
'http://cs.ganji.com/fang12/o37/',
'http://cs.ganji.com/fang12/o38/',
'http://cs.ganji.com/fang12/o39/',
'http://cs.ganji.com/fang12/o40/',
'http://cs.ganji.com/fang12/o41/',
'http://cs.ganji.com/fang12/o42/',
'http://cs.ganji.com/fang12/o43/',
'http://cs.ganji.com/fang12/o44/',
'http://cs.ganji.com/fang12/o45/',
'http://cs.ganji.com/fang12/o46/',
'http://cs.ganji.com/fang12/o47/',
'http://cs.ganji.com/fang12/o48/',
'http://cs.ganji.com/fang12/o49/',
'http://cs.ganji.com/fang12/o50/',
'http://cs.ganji.com/fang12/o51/',
'http://cs.ganji.com/fang12/o52/',
'http://cs.ganji.com/fang12/o53/',
'http://cs.ganji.com/fang12/o54/',
'http://cs.ganji.com/fang12/o55/',
'http://cs.ganji.com/fang12/o56/',
'http://cs.ganji.com/fang12/o57/',
'http://cs.ganji.com/fang12/o58/',
'http://cs.ganji.com/fang12/o59/',
'http://cs.ganji.com/fang12/o60/',
'http://cs.ganji.com/fang12/o61/',
'http://cs.ganji.com/fang12/o62/',
'http://cs.ganji.com/fang12/o63/',
'http://cs.ganji.com/fang12/o64/',
'http://cs.ganji.com/fang12/o65/',
'http://cs.ganji.com/fang12/o66/',
'http://cs.ganji.com/fang12/o67/',
'http://cs.ganji.com/fang12/o68/',
'http://cs.ganji.com/fang12/o69/',
'http://cs.ganji.com/fang12/o70/'
]
ershoufangURL = [
'http://cs.ganji.com/fang5/o1/',
'http://cs.ganji.com/fang5/o2/',
'http://cs.ganji.com/fang5/o3/',
'http://cs.ganji.com/fang5/o4/',
'http://cs.ganji.com/fang5/o5/',
'http://cs.ganji.com/fang5/o6/',
'http://cs.ganji.com/fang5/o7/',
'http://cs.ganji.com/fang5/o8/',
'http://cs.ganji.com/fang5/o9/',
'http://cs.ganji.com/fang5/o10/',
'http://cs.ganji.com/fang5/o11/',
'http://cs.ganji.com/fang5/o12/',
'http://cs.ganji.com/fang5/o13/',
'http://cs.ganji.com/fang5/o14/',
'http://cs.ganji.com/fang5/o15/',
'http://cs.ganji.com/fang5/o16/',
'http://cs.ganji.com/fang5/o17/',
'http://cs.ganji.com/fang5/o18/',
'http://cs.ganji.com/fang5/o19/',
'http://cs.ganji.com/fang5/o20/',
'http://cs.ganji.com/fang5/o21/',
'http://cs.ganji.com/fang5/o22/',
'http://cs.ganji.com/fang5/o23/',
'http://cs.ganji.com/fang5/o24/',
'http://cs.ganji.com/fang5/o25/',
'http://cs.ganji.com/fang5/o26/',
'http://cs.ganji.com/fang5/o27/',
'http://cs.ganji.com/fang5/o28/',
'http://cs.ganji.com/fang5/o29/',
'http://cs.ganji.com/fang5/o30/',
'http://cs.ganji.com/fang5/o31/',
'http://cs.ganji.com/fang5/o32/',
'http://cs.ganji.com/fang5/o33/',
'http://cs.ganji.com/fang5/o34/',
'http://cs.ganji.com/fang5/o35/',
'http://cs.ganji.com/fang5/o36/',
'http://cs.ganji.com/fang5/o37/',
'http://cs.ganji.com/fang5/o38/',
'http://cs.ganji.com/fang5/o39/',
'http://cs.ganji.com/fang5/o40/',
'http://cs.ganji.com/fang5/o41/',
'http://cs.ganji.com/fang5/o42/',
'http://cs.ganji.com/fang5/o43/',
'http://cs.ganji.com/fang5/o44/',
'http://cs.ganji.com/fang5/o45/',
'http://cs.ganji.com/fang5/o46/',
'http://cs.ganji.com/fang5/o47/',
'http://cs.ganji.com/fang5/o48/',
'http://cs.ganji.com/fang5/o49/',
'http://cs.ganji.com/fang5/o50/',
'http://cs.ganji.com/fang5/o51/',
'http://cs.ganji.com/fang5/o52/',
'http://cs.ganji.com/fang5/o53/',
'http://cs.ganji.com/fang5/o54/',
'http://cs.ganji.com/fang5/o55/',
'http://cs.ganji.com/fang5/o56/',
'http://cs.ganji.com/fang5/o57/',
'http://cs.ganji.com/fang5/o58/',
'http://cs.ganji.com/fang5/o59/',
'http://cs.ganji.com/fang5/o60/',
'http://cs.ganji.com/fang5/o61/',
'http://cs.ganji.com/fang5/o62/',
'http://cs.ganji.com/fang5/o63/',
'http://cs.ganji.com/fang5/o64/',
'http://cs.ganji.com/fang5/o65/',
'http://cs.ganji.com/fang5/o66/',
'http://cs.ganji.com/fang5/o67/',
'http://cs.ganji.com/fang5/o68/',
'http://cs.ganji.com/fang5/o69/',
'http://cs.ganji.com/fang5/o70/'
]
zufangURL = [
'http://cs.ganji.com/fang1/o1/',
'http://cs.ganji.com/fang1/o2/',
'http://cs.ganji.com/fang1/o3/',
'http://cs.ganji.com/fang1/o4/',
'http://cs.ganji.com/fang1/o5/',
'http://cs.ganji.com/fang1/o6/',
'http://cs.ganji.com/fang1/o7/',
'http://cs.ganji.com/fang1/o8/',
'http://cs.ganji.com/fang1/o9/',
'http://cs.ganji.com/fang1/o10/',
'http://cs.ganji.com/fang1/o11/',
'http://cs.ganji.com/fang1/o12/',
'http://cs.ganji.com/fang1/o13/',
'http://cs.ganji.com/fang1/o14/',
'http://cs.ganji.com/fang1/o15/',
'http://cs.ganji.com/fang1/o16/',
'http://cs.ganji.com/fang1/o17/',
'http://cs.ganji.com/fang1/o18/',
'http://cs.ganji.com/fang1/o19/',
'http://cs.ganji.com/fang1/o20/',
'http://cs.ganji.com/fang1/o21/',
'http://cs.ganji.com/fang1/o22/',
'http://cs.ganji.com/fang1/o23/',
'http://cs.ganji.com/fang1/o24/',
'http://cs.ganji.com/fang1/o25/',
'http://cs.ganji.com/fang1/o26/',
'http://cs.ganji.com/fang1/o27/',
'http://cs.ganji.com/fang1/o28/',
'http://cs.ganji.com/fang1/o29/',
'http://cs.ganji.com/fang1/o30/',
'http://cs.ganji.com/fang1/o31/',
'http://cs.ganji.com/fang1/o32/',
'http://cs.ganji.com/fang1/o33/',
'http://cs.ganji.com/fang1/o34/',
'http://cs.ganji.com/fang1/o35/',
'http://cs.ganji.com/fang1/o36/',
'http://cs.ganji.com/fang1/o37/',
'http://cs.ganji.com/fang1/o38/',
'http://cs.ganji.com/fang1/o39/',
'http://cs.ganji.com/fang1/o40/',
'http://cs.ganji.com/fang1/o41/',
'http://cs.ganji.com/fang1/o42/',
'http://cs.ganji.com/fang1/o43/',
'http://cs.ganji.com/fang1/o44/',
'http://cs.ganji.com/fang1/o45/',
'http://cs.ganji.com/fang1/o46/',
'http://cs.ganji.com/fang1/o47/',
'http://cs.ganji.com/fang1/o48/',
'http://cs.ganji.com/fang1/o49/',
'http://cs.ganji.com/fang1/o50/',
'http://cs.ganji.com/fang1/o51/',
'http://cs.ganji.com/fang1/o52/',
'http://cs.ganji.com/fang1/o53/',
'http://cs.ganji.com/fang1/o54/',
'http://cs.ganji.com/fang1/o55/',
'http://cs.ganji.com/fang1/o56/',
'http://cs.ganji.com/fang1/o57/',
'http://cs.ganji.com/fang1/o58/',
'http://cs.ganji.com/fang1/o59/',
'http://cs.ganji.com/fang1/o60/',
'http://cs.ganji.com/fang1/o61/',
'http://cs.ganji.com/fang1/o62/',
'http://cs.ganji.com/fang1/o63/',
'http://cs.ganji.com/fang1/o64/',
'http://cs.ganji.com/fang1/o65/',
'http://cs.ganji.com/fang1/o66/',
'http://cs.ganji.com/fang1/o67/',
'http://cs.ganji.com/fang1/o68/',
'http://cs.ganji.com/fang1/o69/',
'http://cs.ganji.com/fang1/o70/'
]
| 40.169643
| 42
| 0.526006
| 1,272
| 8,998
| 3.720912
| 0.069969
| 0.266216
| 0.488063
| 0.621171
| 0.983309
| 0
| 0
| 0
| 0
| 0
| 0
| 0.095494
| 0.215603
| 8,998
| 223
| 43
| 40.349776
| 0.575092
| 0.003667
| 0
| 0
| 0
| 0
| 0.707687
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0.018433
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
d2062449803043e0c9f8ee74ffad0ada9af45ec0
| 127
|
py
|
Python
|
numpy/arange_&_randam.py
|
abhayanigam/Learn_Python_Programming
|
801e3fff2b1fe35e4c93f4ced649516c519eb8f9
|
[
"MIT"
] | 1
|
2021-08-28T15:10:26.000Z
|
2021-08-28T15:10:26.000Z
|
numpy/arange_&_randam.py
|
abhayanigam/Learn_Python_Programming
|
801e3fff2b1fe35e4c93f4ced649516c519eb8f9
|
[
"MIT"
] | null | null | null |
numpy/arange_&_randam.py
|
abhayanigam/Learn_Python_Programming
|
801e3fff2b1fe35e4c93f4ced649516c519eb8f9
|
[
"MIT"
] | null | null | null |
import numpy as np
n1 = np.arange(10,21)
print(n1)
n2 = np.arange(10,21,2)
print(n2)
n3 = np.random.randint(1,10,5)
print(n3)
| 14.111111
| 30
| 0.677165
| 28
| 127
| 3.071429
| 0.571429
| 0.186047
| 0.232558
| 0.27907
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.172727
| 0.133858
| 127
| 9
| 31
| 14.111111
| 0.609091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.142857
| 0
| 0.142857
| 0.428571
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
d2091c89054b2a2a0cd1920bc938ecbb011050c7
| 5,896
|
py
|
Python
|
mongoengine_migrate/actions/index.py
|
Mohsen-Khodabakhshi/mongoengine-migrate
|
1a7a26a47a474f70743c04700ce2a42f1872f166
|
[
"Apache-2.0"
] | 15
|
2020-08-05T22:25:54.000Z
|
2022-02-08T20:50:35.000Z
|
mongoengine_migrate/actions/index.py
|
Mohsen-Khodabakhshi/mongoengine-migrate
|
1a7a26a47a474f70743c04700ce2a42f1872f166
|
[
"Apache-2.0"
] | 36
|
2020-10-22T09:05:01.000Z
|
2022-02-21T14:50:17.000Z
|
mongoengine_migrate/actions/index.py
|
Mohsen-Khodabakhshi/mongoengine-migrate
|
1a7a26a47a474f70743c04700ce2a42f1872f166
|
[
"Apache-2.0"
] | 5
|
2020-10-23T04:06:32.000Z
|
2022-02-21T14:35:33.000Z
|
__all__ = [
'CreateIndex',
'DropIndex',
'AlterIndex'
]
from copy import deepcopy
from typing import Optional, Sequence
from pymongo.database import Database
from mongoengine_migrate.flags import EMBEDDED_DOCUMENT_NAME_PREFIX
from mongoengine_migrate.graph import MigrationPolicy
from mongoengine_migrate.schema import Schema
from .base import BaseIndexAction
class CreateIndex(BaseIndexAction):
"""
Create index in given document
CreateIndex should go after DropIndex in order to avoid situation
when user added explicit name for the index, but didn't change
fields spec. MongoDB will raise duplicate index error in this case.
Also the such index can be created by hand.
"""
priority = 130
def __init__(self, document_type: str, index_name: str, *, fields: Sequence, **kwargs):
super().__init__(document_type, index_name, fields=fields, **kwargs)
@classmethod
def build_object(cls,
document_type: str,
index_name: str,
left_schema: Schema,
right_schema: Schema) -> Optional['CreateIndex']:
match = not document_type.startswith(EMBEDDED_DOCUMENT_NAME_PREFIX) \
and document_type in left_schema \
and document_type in right_schema \
and index_name not in left_schema[document_type].indexes \
and index_name in right_schema[document_type].indexes
if match:
params = right_schema[document_type].indexes[index_name]
return cls(document_type=document_type, index_name=index_name, **params)
def prepare(self, db: Database, left_schema: Schema, migration_policy: MigrationPolicy):
self._prepare(db, left_schema, migration_policy, False)
self._run_ctx['left_index_schema'] = \
left_schema[self.document_type].indexes.get(self.index_name, {})
def to_schema_patch(self, left_schema: Schema):
left_item = left_schema[self.document_type]
right_item = deepcopy(left_item)
right_item.indexes[self.index_name] = self.parameters
# Document must be already created, therefore do 'change'
return [('change', self.document_type, (left_item, right_item))]
def run_forward(self):
self._create_index(self.parameters)
def run_backward(self):
self._drop_index(self.parameters)
class DropIndex(BaseIndexAction):
"""
Drop index in given document
CreateIndex should go after DropIndex in order to avoid situation
when user added explicit name for the index, but didn't change
fields spec. MongoDB will raise duplicate index error in this case.
Also the such index can be created by hand.
"""
priority = 120
@classmethod
def build_object(cls,
document_type: str,
index_name: str,
left_schema: Schema,
right_schema: Schema) -> Optional['DropIndex']:
match = not document_type.startswith(EMBEDDED_DOCUMENT_NAME_PREFIX) \
and document_type in left_schema \
and document_type in right_schema \
and index_name in left_schema[document_type].indexes \
and index_name not in right_schema[document_type].indexes
if match:
params = left_schema[document_type].indexes[index_name]
return cls(document_type=document_type, index_name=index_name, **params)
def to_schema_patch(self, left_schema: Schema):
left_item = left_schema[self.document_type]
right_item = deepcopy(left_item)
right_item.indexes.pop(self.index_name, None)
# Document must be already created, therefore do 'change'
return [('change', self.document_type, (left_item, right_item))]
def run_forward(self):
self._drop_index(self._run_ctx['left_index_schema'])
def run_backward(self):
self._create_index(self._run_ctx['left_index_schema'])
class AlterIndex(BaseIndexAction):
"""Alter index parameters. Actually drop the existing index
and create a new one with new parameters
"""
priority = 110
def __init__(self, document_type: str, index_name: str, *, fields: Sequence, **kwargs):
super().__init__(document_type, index_name, fields=fields, **kwargs)
@classmethod
def build_object(cls,
document_type: str,
index_name: str,
left_schema: Schema,
right_schema: Schema) -> Optional['AlterIndex']:
right, left = right_schema, left_schema
match = not document_type.startswith(EMBEDDED_DOCUMENT_NAME_PREFIX) \
and document_type in left \
and document_type in right \
and index_name in left[document_type].indexes \
and index_name in right[document_type].indexes \
and right[document_type].indexes[index_name] != left[document_type].indexes[index_name]
if match:
params = right[document_type].indexes[index_name]
return cls(document_type=document_type, index_name=index_name, **params)
def to_schema_patch(self, left_schema: Schema):
left_item = left_schema[self.document_type]
right_item = deepcopy(left_item)
right_item.indexes[self.index_name].clear()
right_item.indexes[self.index_name].update(self.parameters)
# Document must be already created, therefore do 'change'
return [('change', self.document_type, (left_item, right_item))]
def run_forward(self):
self._drop_index(self._run_ctx['left_index_schema'])
self._create_index(self.parameters)
def run_backward(self):
self._drop_index(self.parameters)
self._create_index(self._run_ctx['left_index_schema'])
| 39.046358
| 103
| 0.669098
| 721
| 5,896
| 5.188627
| 0.162275
| 0.128308
| 0.060946
| 0.027265
| 0.797648
| 0.7637
| 0.739107
| 0.739107
| 0.728949
| 0.660519
| 0
| 0.002036
| 0.250339
| 5,896
| 150
| 104
| 39.306667
| 0.844344
| 0.137381
| 0
| 0.58
| 0
| 0
| 0.032554
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.15
| false
| 0
| 0.07
| 0
| 0.34
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
d2809440c6da11a5a7c0f3d53f2382a9d4364f4c
| 94
|
py
|
Python
|
aaem_summaries/components/biomass_wood/__init__.py
|
gina-alaska/alaska_affordable_energy_model
|
96fed0137152985ce280ea37e0affec131e3087f
|
[
"MIT-feh"
] | 1
|
2022-01-23T07:18:36.000Z
|
2022-01-23T07:18:36.000Z
|
aaem_summaries/components/biomass_wood/__init__.py
|
gina-alaska/alaska_affordable_energy_model
|
96fed0137152985ce280ea37e0affec131e3087f
|
[
"MIT-feh"
] | 5
|
2017-07-14T21:56:46.000Z
|
2017-07-14T21:59:15.000Z
|
aaem_summaries/components/biomass_wood/__init__.py
|
gina-alaska/alaska_affordable_energy_model
|
96fed0137152985ce280ea37e0affec131e3087f
|
[
"MIT-feh"
] | 2
|
2020-04-28T18:12:55.000Z
|
2021-01-13T01:56:57.000Z
|
"""
__init__.py
summary for
Biomass - Cordwood in a community
"""
from summary import *
| 11.75
| 33
| 0.680851
| 12
| 94
| 5
| 0.916667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.223404
| 94
| 7
| 34
| 13.428571
| 0.821918
| 0.670213
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
966f8cc647da0f7b75021cefc29faeb32ddad27d
| 156
|
py
|
Python
|
neuralnetwork/code/cells/__init__.py
|
realwsq/MuscleSpindleCircuitsModel
|
4418b180559ea1464ee6139161af7e6bf3762c50
|
[
"MIT"
] | 5
|
2018-11-13T15:22:14.000Z
|
2022-03-23T17:00:38.000Z
|
neuralnetwork/code/cells/__init__.py
|
realwsq/MuscleSpindleCircuitsModel
|
4418b180559ea1464ee6139161af7e6bf3762c50
|
[
"MIT"
] | null | null | null |
neuralnetwork/code/cells/__init__.py
|
realwsq/MuscleSpindleCircuitsModel
|
4418b180559ea1464ee6139161af7e6bf3762c50
|
[
"MIT"
] | 1
|
2020-05-28T15:40:11.000Z
|
2020-05-28T15:40:11.000Z
|
from Cell import Cell
from AfferentFiber import AfferentFiber
from IntFire import IntFire
from IntFireMn import IntFireMn
from Motoneuron import Motoneuron
| 26
| 39
| 0.871795
| 20
| 156
| 6.8
| 0.35
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.128205
| 156
| 5
| 40
| 31.2
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
7367d4d019ee8161e39bb213b44757e547a96f5f
| 147
|
py
|
Python
|
a/empresa.py
|
mariagarciau/AgregComposPOO
|
dc2671ec54d5678d79c003b070ecb4d233d7f35f
|
[
"Apache-2.0"
] | null | null | null |
a/empresa.py
|
mariagarciau/AgregComposPOO
|
dc2671ec54d5678d79c003b070ecb4d233d7f35f
|
[
"Apache-2.0"
] | null | null | null |
a/empresa.py
|
mariagarciau/AgregComposPOO
|
dc2671ec54d5678d79c003b070ecb4d233d7f35f
|
[
"Apache-2.0"
] | null | null | null |
import empleado
import edificio
class empresa:
def __init__(self, nombreEmpresa):
self.nombreEmpresa = nombreEmpresa
YooHoo = empresa()
| 24.5
| 42
| 0.755102
| 15
| 147
| 7.133333
| 0.666667
| 0.317757
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.176871
| 147
| 6
| 43
| 24.5
| 0.884298
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
73ad4350ace962ff533f8b90806fe838e01968ed
| 55
|
py
|
Python
|
src/ranking.py
|
Thoughtful-Automation/bots-ta2-python-hockey-challenge
|
5a0a213ba8db9f23ae47b21c6d5ab8524c349aa3
|
[
"Apache-2.0"
] | null | null | null |
src/ranking.py
|
Thoughtful-Automation/bots-ta2-python-hockey-challenge
|
5a0a213ba8db9f23ae47b21c6d5ab8524c349aa3
|
[
"Apache-2.0"
] | null | null | null |
src/ranking.py
|
Thoughtful-Automation/bots-ta2-python-hockey-challenge
|
5a0a213ba8db9f23ae47b21c6d5ab8524c349aa3
|
[
"Apache-2.0"
] | null | null | null |
class Ranking:
# TODO: Add your code here
pass
| 13.75
| 30
| 0.636364
| 8
| 55
| 4.375
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.309091
| 55
| 4
| 31
| 13.75
| 0.921053
| 0.436364
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.25
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
73be6c50f61d79c38fdc27e76a4283abd02d6bab
| 40
|
py
|
Python
|
carla/recourse_methods/catalog/clue/__init__.py
|
jayanthyetukuri/CARLA
|
c3f3aaf11a5a8499c4bec5065e0c17ec8e6f5950
|
[
"MIT"
] | 140
|
2021-08-03T21:53:32.000Z
|
2022-03-20T08:52:02.000Z
|
carla/recourse_methods/catalog/clue/__init__.py
|
jayanthyetukuri/CARLA
|
c3f3aaf11a5a8499c4bec5065e0c17ec8e6f5950
|
[
"MIT"
] | 54
|
2021-03-07T18:22:16.000Z
|
2021-08-03T12:06:31.000Z
|
carla/recourse_methods/catalog/clue/__init__.py
|
jayanthyetukuri/CARLA
|
c3f3aaf11a5a8499c4bec5065e0c17ec8e6f5950
|
[
"MIT"
] | 16
|
2021-08-23T12:14:58.000Z
|
2022-03-01T00:52:58.000Z
|
# flake8: noqa
from .model import Clue
| 10
| 23
| 0.725
| 6
| 40
| 4.833333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.03125
| 0.2
| 40
| 3
| 24
| 13.333333
| 0.875
| 0.3
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
73fc04b570eb73b6ca58068fe6300d2076334be2
| 5,557
|
py
|
Python
|
stdplugins/getmusic.py
|
Colossalhavoc/PepeBot
|
238f246e84696970a93eba508c3b084704b44944
|
[
"Apache-2.0"
] | 20
|
2020-01-25T05:08:26.000Z
|
2022-01-18T07:37:53.000Z
|
stdplugins/getmusic.py
|
ishaizz/PepeBot
|
7440cadc8228106d221fc8e436a0809a86be5159
|
[
"Apache-2.0"
] | 15
|
2019-11-07T07:53:56.000Z
|
2022-01-23T09:21:17.000Z
|
stdplugins/getmusic.py
|
ishaizz/PepeBot
|
7440cadc8228106d221fc8e436a0809a86be5159
|
[
"Apache-2.0"
] | 62
|
2019-10-20T06:35:19.000Z
|
2021-01-23T17:26:05.000Z
|
"""
by @sandy1709 ( https://t.me/mrconfused )
"""
# songs finder for PepeBot
import os
from pathlib import Path
from validators.url import url
from uniborg import SYNTAX, name_dl, runcmd, song_dl, thumb_dl, video_dl, yt_search
from uniborg.util import admin_cmd, edit_or_reply
DEFAULTUSER = "𠘨 工 长 工 丅 卂"
@borg.on(admin_cmd(pattern="(song|song32)($| (.*))", allow_sudo=True))
async def _(event):
reply_to_id = None
if event.from_id != bot.uid:
reply_to_id = event.message.id
if event.reply_to_msg_id:
reply_to_id = event.reply_to_msg_id
reply = await event.get_reply_message()
if event.pattern_match.group(2):
query = event.pattern_match.group(2)
elif reply:
if reply.message:
query = reply.message
else:
await edit_or_reply(event, "`What I am Supposed to find `")
return
catevent = await edit_or_reply(event, "`wi8..! I am finding your song....`")
video_link = await yt_search(str(query))
if not url(video_link):
return await catevent.edit(
f"Sorry!. I can't find any related video/audio for `{query}`"
)
cmd = event.pattern_match.group(1)
if cmd == "song":
q = "128k"
elif cmd == "song32":
q = "320k"
song_cmd = song_dl.format(QUALITY=q, video_link=video_link)
thumb_cmd = thumb_dl.format(video_link=video_link)
name_cmd = name_dl.format(video_link=video_link)
stderr = (await runcmd(song_cmd))[1]
if stderr:
return await catevent.edit(f"**Error :** `{stderr}`")
catname, stderr = (await runcmd(name_cmd))[:2]
if stderr:
return await catevent.edit(f"**Error :** `{stderr}`")
stderr = (await runcmd(thumb_cmd))[1]
catname = os.path.splitext(catname)[0]
if stderr:
return await catevent.edit(f"**Error :** `{stderr}`")
song_file = Path(f"{catname}.mp3")
if not os.path.exists(song_file):
return await catevent.edit(
f"Sorry!. I can't find any related video/audio for `{query}`"
)
await catevent.edit("`yeah..! i found something wi8..🥰`")
catthumb = Path(f"{catname}.jpg")
if not os.path.exists(catthumb):
catthumb = Path(f"{catname}.webp")
elif not os.path.exists(catthumb):
catthumb = None
await borg.send_file(
event.chat_id,
song_file,
force_document=False,
caption=f"➥ __**Song :- {query}**__\n__**➥ Uploaded by :-**__ {DEFAULTUSER}",
thumb=catthumb,
supports_streaming=True,
reply_to=reply_to_id,
)
await catevent.delete()
for files in (catthumb, song_file):
if files and os.path.exists(files):
os.remove(files)
@borg.on(admin_cmd(pattern="vsong( (.*)|$)", allow_sudo=True))
async def _(event):
reply_to_id = None
if event.from_id != bot.uid:
reply_to_id = event.message.id
if event.reply_to_msg_id:
reply_to_id = event.reply_to_msg_id
reply = await event.get_reply_message()
if event.pattern_match.group(1):
query = event.pattern_match.group(1)
elif reply:
if reply.message:
query = reply.messag
else:
event = await edit_or_reply(event, "What I am Supposed to find")
return
catevent = await edit_or_reply(event, "`wi8..! I am finding your song....`")
video_link = await yt_search(str(query))
if not url(video_link):
return await catevent.edit(
f"Sorry!. I can't find any related video/audio for `{query}`"
)
thumb_cmd = thumb_dl.format(video_link=video_link)
name_cmd = name_dl.format(video_link=video_link)
video_cmd = video_dl.format(video_link=video_link)
stderr = (await runcmd(video_cmd))[1]
if stderr:
return await catevent.edit(f"**Error :** `{stderr}`")
catname, stderr = (await runcmd(name_cmd))[:2]
if stderr:
return await catevent.edit(f"**Error :** `{stderr}`")
stderr = (await runcmd(thumb_cmd))[1]
if stderr:
return await catevent.edit(f"**Error :** `{stderr}`")
catname = os.path.splitext(catname)[0]
vsong_file = Path(f"{catname}.mp4")
if not os.path.exists(vsong_file):
vsong_file = Path(f"{catname}.mkv")
elif not os.path.exists(vsong_file):
return await catevent.edit(
f"Sorry!. I can't find any related video/audio for `{query}`"
)
await catevent.edit("`yeah..! i found something wi8..🥰`")
catthumb = Path(f"{catname}.jpg")
if not os.path.exists(catthumb):
catthumb = Path(f"{catname}.webp")
elif not os.path.exists(catthumb):
catthumb = None
await borg.send_file(
event.chat_id,
vsong_file,
force_document=False,
caption=query,
thumb=catthumb,
supports_streaming=True,
reply_to=reply_to_id,
)
await catevent.delete()
for files in (catthumb, vsong_file):
if files and os.path.exists(files):
os.remove(files)
SYNTAX.update(
{
"getsongs": "**Plugin : **`getsongs`\
\n\n**Syntax : **`.song query` or `.song reply to song name`\
\n**Usage : **searches the song you entered in query and sends it quality of it is 128k\
\n\n**Syntax : **`.song32 query` or `.song32 reply to song name`\
\n**Usage : **searches the song you entered in query and sends it quality of it is 320k\
\n\n**Syntax : **`.vsong query` or `.vsong reply to song name`\
\n**Usage : **Searches the video song you entered in query and sends it"
}
)
| 35.170886
| 96
| 0.623718
| 788
| 5,557
| 4.244924
| 0.173858
| 0.035575
| 0.060987
| 0.068759
| 0.826607
| 0.753064
| 0.724664
| 0.704933
| 0.686099
| 0.655306
| 0
| 0.010233
| 0.243837
| 5,557
| 157
| 97
| 35.394904
| 0.784864
| 0.012417
| 0
| 0.619718
| 0
| 0.014085
| 0.143796
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.035211
| 0
| 0.119718
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
fb77a8c7007783846524e88b0bf1a21fa0443484
| 197
|
py
|
Python
|
experiments/models/__init__.py
|
QUVA-Lab/e2cnn_experiments
|
0c8f275be0361367c52d2d268471ac32f39fe3f3
|
[
"BSD-3-Clause"
] | 13
|
2021-02-26T10:42:10.000Z
|
2022-02-09T05:08:07.000Z
|
experiments/models/__init__.py
|
QUVA-Lab/e2cnn_experiments
|
0c8f275be0361367c52d2d268471ac32f39fe3f3
|
[
"BSD-3-Clause"
] | 2
|
2021-11-23T12:04:17.000Z
|
2022-03-07T01:51:35.000Z
|
experiments/models/__init__.py
|
QUVA-Lab/e2cnn_experiments
|
0c8f275be0361367c52d2d268471ac32f39fe3f3
|
[
"BSD-3-Clause"
] | 2
|
2021-11-26T05:56:50.000Z
|
2022-01-25T13:21:42.000Z
|
from .e2sfcnn import E2SFCNN
from .e2sfcnn_quotient import E2SFCNN_QUOT
from .exp_e2sfcnn import ExpE2SFCNN
from .exp_cnn import ExpCNN
from .e2_wide_resnet import *
from .wide_resnet import *
| 17.909091
| 42
| 0.817259
| 29
| 197
| 5.310345
| 0.413793
| 0.142857
| 0.207792
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.04142
| 0.142132
| 197
| 10
| 43
| 19.7
| 0.869822
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
fb90cbf9de4df75107352597d50f047f44bac86c
| 525
|
py
|
Python
|
Python/treehopper/libraries/sensors/Proximity.py
|
ehailey1/treehopper-sdk
|
c242f939a93d93da11ff79577666130c15aecec7
|
[
"MIT"
] | 3
|
2018-03-16T07:00:42.000Z
|
2022-03-27T00:39:55.000Z
|
Python/treehopper/libraries/sensors/Proximity.py
|
ehailey1/treehopper-sdk
|
c242f939a93d93da11ff79577666130c15aecec7
|
[
"MIT"
] | 16
|
2016-08-12T18:51:04.000Z
|
2021-04-16T16:14:07.000Z
|
Python/treehopper/libraries/sensors/Proximity.py
|
ehailey1/treehopper-sdk
|
c242f939a93d93da11ff79577666130c15aecec7
|
[
"MIT"
] | 6
|
2015-11-04T15:53:49.000Z
|
2020-06-25T18:34:47.000Z
|
from abc import abstractmethod, ABC
from treehopper.libraries.sensors import Pollable
class Proximity(Pollable):
def __init__(self):
self._meters = 0
@property
def meters(self):
if self.auto_update_when_property_read:
self.update()
return self._meters
@property
def centimeters(self):
return self.meters * 100
@property
def inches(self):
return self.meters * 39.3701
@property
def feet(self):
return self.meters * 3.28085
| 19.444444
| 49
| 0.64
| 62
| 525
| 5.258065
| 0.483871
| 0.153374
| 0.196319
| 0.184049
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.04244
| 0.281905
| 525
| 26
| 50
| 20.192308
| 0.822281
| 0
| 0
| 0.210526
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.263158
| false
| 0
| 0.105263
| 0.157895
| 0.631579
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
fbe1c23b13bee85060daedfc744013aa2d84713b
| 91
|
py
|
Python
|
PythonClient/carla/driving_benchmark/experiment_suites/__init__.py
|
Fatiepie/SelfDrivingCarController
|
359f5416f9d001a989d87cc002727f0e14903aa7
|
[
"MIT"
] | 1
|
2021-04-22T16:15:51.000Z
|
2021-04-22T16:15:51.000Z
|
PythonClient/carla/driving_benchmark/experiment_suites/__init__.py
|
Fatiepie/SelfDrivingCarController
|
359f5416f9d001a989d87cc002727f0e14903aa7
|
[
"MIT"
] | null | null | null |
PythonClient/carla/driving_benchmark/experiment_suites/__init__.py
|
Fatiepie/SelfDrivingCarController
|
359f5416f9d001a989d87cc002727f0e14903aa7
|
[
"MIT"
] | null | null | null |
from .basic_experiment_suite import BasicExperimentSuite
from .corl_2017 import CoRL2017
| 30.333333
| 57
| 0.868132
| 11
| 91
| 6.909091
| 0.818182
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.098765
| 0.10989
| 91
| 2
| 58
| 45.5
| 0.839506
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
8376a9ef9fc9afbcf6cafdf6801ffa2cf8c0775f
| 261
|
py
|
Python
|
tipping/sqlalchemy-fauna/sqlalchemy_fauna/fauna/fql/__init__.py
|
tipresias/tipresias
|
8945acb8276f22f2159c24e5a1bd411e7920a79e
|
[
"MIT"
] | 12
|
2019-05-27T10:28:59.000Z
|
2022-02-01T23:49:50.000Z
|
tipping/sqlalchemy-fauna/sqlalchemy_fauna/fauna/fql/__init__.py
|
tipresias/tipresias
|
8945acb8276f22f2159c24e5a1bd411e7920a79e
|
[
"MIT"
] | 275
|
2019-05-27T06:46:37.000Z
|
2022-03-18T03:58:33.000Z
|
tipping/sqlalchemy-fauna/sqlalchemy_fauna/fauna/fql/__init__.py
|
tipresias/tipresias
|
8945acb8276f22f2159c24e5a1bd411e7920a79e
|
[
"MIT"
] | 6
|
2019-08-27T08:49:50.000Z
|
2021-10-04T12:28:37.000Z
|
"""Module for building FQL queries based on SQL query structures."""
from .delete import translate_delete
from .insert import translate_insert
from .select import translate_select
from .common import update_documents, index_name, convert_to_ref_set, IndexType
| 37.285714
| 79
| 0.831418
| 37
| 261
| 5.648649
| 0.702703
| 0.215311
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.114943
| 261
| 6
| 80
| 43.5
| 0.904762
| 0.237548
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
8382d787fa36147f7475a831c267e3ae57e0e8f3
| 26,442
|
py
|
Python
|
firestore/tests/unit/gapic/v1beta1/test_firestore_client_v1beta1.py
|
bomboradata/bombora-google-cloud-python
|
255bbebe6c50490f40fcc3eed40bae1e77e03859
|
[
"Apache-2.0"
] | null | null | null |
firestore/tests/unit/gapic/v1beta1/test_firestore_client_v1beta1.py
|
bomboradata/bombora-google-cloud-python
|
255bbebe6c50490f40fcc3eed40bae1e77e03859
|
[
"Apache-2.0"
] | null | null | null |
firestore/tests/unit/gapic/v1beta1/test_firestore_client_v1beta1.py
|
bomboradata/bombora-google-cloud-python
|
255bbebe6c50490f40fcc3eed40bae1e77e03859
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2017, Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unit tests."""
import mock
import unittest
from google.gax import errors
from google.cloud.firestore_v1beta1.gapic import firestore_client
from google.cloud.firestore_v1beta1.proto import common_pb2
from google.cloud.firestore_v1beta1.proto import document_pb2
from google.cloud.firestore_v1beta1.proto import firestore_pb2
from google.protobuf import empty_pb2
class CustomException(Exception):
pass
class TestFirestoreClient(unittest.TestCase):
@mock.patch('google.gax.config.create_stub', spec=True)
def test_get_document(self, mock_create_stub):
# Mock gRPC layer
grpc_stub = mock.Mock()
mock_create_stub.return_value = grpc_stub
client = firestore_client.FirestoreClient()
# Mock request
name = client.any_path_path('[PROJECT]', '[DATABASE]', '[DOCUMENT]',
'[ANY_PATH]')
# Mock response
name_2 = 'name2-1052831874'
expected_response = {'name': name_2}
expected_response = document_pb2.Document(**expected_response)
grpc_stub.GetDocument.return_value = expected_response
response = client.get_document(name)
self.assertEqual(expected_response, response)
grpc_stub.GetDocument.assert_called_once()
args, kwargs = grpc_stub.GetDocument.call_args
self.assertEqual(len(args), 2)
self.assertEqual(len(kwargs), 1)
self.assertIn('metadata', kwargs)
actual_request = args[0]
expected_request = firestore_pb2.GetDocumentRequest(name=name)
self.assertEqual(expected_request, actual_request)
@mock.patch('google.gax.config.API_ERRORS', (CustomException, ))
@mock.patch('google.gax.config.create_stub', spec=True)
def test_get_document_exception(self, mock_create_stub):
# Mock gRPC layer
grpc_stub = mock.Mock()
mock_create_stub.return_value = grpc_stub
client = firestore_client.FirestoreClient()
# Mock request
name = client.any_path_path('[PROJECT]', '[DATABASE]', '[DOCUMENT]',
'[ANY_PATH]')
# Mock exception response
grpc_stub.GetDocument.side_effect = CustomException()
self.assertRaises(errors.GaxError, client.get_document, name)
@mock.patch('google.gax.config.create_stub', spec=True)
def test_list_documents(self, mock_create_stub):
# Mock gRPC layer
grpc_stub = mock.Mock()
mock_create_stub.return_value = grpc_stub
client = firestore_client.FirestoreClient()
# Mock request
parent = client.any_path_path('[PROJECT]', '[DATABASE]', '[DOCUMENT]',
'[ANY_PATH]')
collection_id = 'collectionId-821242276'
# Mock response
next_page_token = ''
documents_element = {}
documents = [documents_element]
expected_response = {
'next_page_token': next_page_token,
'documents': documents
}
expected_response = firestore_pb2.ListDocumentsResponse(
**expected_response)
grpc_stub.ListDocuments.return_value = expected_response
paged_list_response = client.list_documents(parent, collection_id)
resources = list(paged_list_response)
self.assertEqual(1, len(resources))
self.assertEqual(expected_response.documents[0], resources[0])
grpc_stub.ListDocuments.assert_called_once()
args, kwargs = grpc_stub.ListDocuments.call_args
self.assertEqual(len(args), 2)
self.assertEqual(len(kwargs), 1)
self.assertIn('metadata', kwargs)
actual_request = args[0]
expected_request = firestore_pb2.ListDocumentsRequest(
parent=parent, collection_id=collection_id)
self.assertEqual(expected_request, actual_request)
@mock.patch('google.gax.config.API_ERRORS', (CustomException, ))
@mock.patch('google.gax.config.create_stub', spec=True)
def test_list_documents_exception(self, mock_create_stub):
# Mock gRPC layer
grpc_stub = mock.Mock()
mock_create_stub.return_value = grpc_stub
client = firestore_client.FirestoreClient()
# Mock request
parent = client.any_path_path('[PROJECT]', '[DATABASE]', '[DOCUMENT]',
'[ANY_PATH]')
collection_id = 'collectionId-821242276'
# Mock exception response
grpc_stub.ListDocuments.side_effect = CustomException()
paged_list_response = client.list_documents(parent, collection_id)
self.assertRaises(errors.GaxError, list, paged_list_response)
@mock.patch('google.gax.config.create_stub', spec=True)
def test_create_document(self, mock_create_stub):
# Mock gRPC layer
grpc_stub = mock.Mock()
mock_create_stub.return_value = grpc_stub
client = firestore_client.FirestoreClient()
# Mock request
parent = client.any_path_path('[PROJECT]', '[DATABASE]', '[DOCUMENT]',
'[ANY_PATH]')
collection_id = 'collectionId-821242276'
document_id = 'documentId506676927'
document = {}
# Mock response
name = 'name3373707'
expected_response = {'name': name}
expected_response = document_pb2.Document(**expected_response)
grpc_stub.CreateDocument.return_value = expected_response
response = client.create_document(parent, collection_id, document_id,
document)
self.assertEqual(expected_response, response)
grpc_stub.CreateDocument.assert_called_once()
args, kwargs = grpc_stub.CreateDocument.call_args
self.assertEqual(len(args), 2)
self.assertEqual(len(kwargs), 1)
self.assertIn('metadata', kwargs)
actual_request = args[0]
expected_request = firestore_pb2.CreateDocumentRequest(
parent=parent,
collection_id=collection_id,
document_id=document_id,
document=document)
self.assertEqual(expected_request, actual_request)
@mock.patch('google.gax.config.API_ERRORS', (CustomException, ))
@mock.patch('google.gax.config.create_stub', spec=True)
def test_create_document_exception(self, mock_create_stub):
# Mock gRPC layer
grpc_stub = mock.Mock()
mock_create_stub.return_value = grpc_stub
client = firestore_client.FirestoreClient()
# Mock request
parent = client.any_path_path('[PROJECT]', '[DATABASE]', '[DOCUMENT]',
'[ANY_PATH]')
collection_id = 'collectionId-821242276'
document_id = 'documentId506676927'
document = {}
# Mock exception response
grpc_stub.CreateDocument.side_effect = CustomException()
self.assertRaises(errors.GaxError, client.create_document, parent,
collection_id, document_id, document)
@mock.patch('google.gax.config.create_stub', spec=True)
def test_update_document(self, mock_create_stub):
# Mock gRPC layer
grpc_stub = mock.Mock()
mock_create_stub.return_value = grpc_stub
client = firestore_client.FirestoreClient()
# Mock request
document = {}
update_mask = {}
# Mock response
name = 'name3373707'
expected_response = {'name': name}
expected_response = document_pb2.Document(**expected_response)
grpc_stub.UpdateDocument.return_value = expected_response
response = client.update_document(document, update_mask)
self.assertEqual(expected_response, response)
grpc_stub.UpdateDocument.assert_called_once()
args, kwargs = grpc_stub.UpdateDocument.call_args
self.assertEqual(len(args), 2)
self.assertEqual(len(kwargs), 1)
self.assertIn('metadata', kwargs)
actual_request = args[0]
expected_request = firestore_pb2.UpdateDocumentRequest(
document=document, update_mask=update_mask)
self.assertEqual(expected_request, actual_request)
@mock.patch('google.gax.config.API_ERRORS', (CustomException, ))
@mock.patch('google.gax.config.create_stub', spec=True)
def test_update_document_exception(self, mock_create_stub):
# Mock gRPC layer
grpc_stub = mock.Mock()
mock_create_stub.return_value = grpc_stub
client = firestore_client.FirestoreClient()
# Mock request
document = {}
update_mask = {}
# Mock exception response
grpc_stub.UpdateDocument.side_effect = CustomException()
self.assertRaises(errors.GaxError, client.update_document, document,
update_mask)
@mock.patch('google.gax.config.create_stub', spec=True)
def test_delete_document(self, mock_create_stub):
# Mock gRPC layer
grpc_stub = mock.Mock()
mock_create_stub.return_value = grpc_stub
client = firestore_client.FirestoreClient()
# Mock request
name = client.any_path_path('[PROJECT]', '[DATABASE]', '[DOCUMENT]',
'[ANY_PATH]')
client.delete_document(name)
grpc_stub.DeleteDocument.assert_called_once()
args, kwargs = grpc_stub.DeleteDocument.call_args
self.assertEqual(len(args), 2)
self.assertEqual(len(kwargs), 1)
self.assertIn('metadata', kwargs)
actual_request = args[0]
expected_request = firestore_pb2.DeleteDocumentRequest(name=name)
self.assertEqual(expected_request, actual_request)
@mock.patch('google.gax.config.API_ERRORS', (CustomException, ))
@mock.patch('google.gax.config.create_stub', spec=True)
def test_delete_document_exception(self, mock_create_stub):
# Mock gRPC layer
grpc_stub = mock.Mock()
mock_create_stub.return_value = grpc_stub
client = firestore_client.FirestoreClient()
# Mock request
name = client.any_path_path('[PROJECT]', '[DATABASE]', '[DOCUMENT]',
'[ANY_PATH]')
# Mock exception response
grpc_stub.DeleteDocument.side_effect = CustomException()
self.assertRaises(errors.GaxError, client.delete_document, name)
@mock.patch('google.gax.config.create_stub', spec=True)
def test_batch_get_documents(self, mock_create_stub):
# Mock gRPC layer
grpc_stub = mock.Mock()
mock_create_stub.return_value = grpc_stub
client = firestore_client.FirestoreClient()
# Mock request
database = client.database_root_path('[PROJECT]', '[DATABASE]')
documents = []
# Mock response
missing = 'missing1069449574'
transaction = b'-34'
expected_response = {'missing': missing, 'transaction': transaction}
expected_response = firestore_pb2.BatchGetDocumentsResponse(
**expected_response)
grpc_stub.BatchGetDocuments.return_value = iter([expected_response])
response = client.batch_get_documents(database, documents)
resources = list(response)
self.assertEqual(1, len(resources))
self.assertEqual(expected_response, resources[0])
grpc_stub.BatchGetDocuments.assert_called_once()
args, kwargs = grpc_stub.BatchGetDocuments.call_args
self.assertEqual(len(args), 2)
self.assertEqual(len(kwargs), 1)
self.assertIn('metadata', kwargs)
actual_request = args[0]
expected_request = firestore_pb2.BatchGetDocumentsRequest(
database=database, documents=documents)
self.assertEqual(expected_request, actual_request)
@mock.patch('google.gax.config.API_ERRORS', (CustomException, ))
@mock.patch('google.gax.config.create_stub', spec=True)
def test_batch_get_documents_exception(self, mock_create_stub):
# Mock gRPC layer
grpc_stub = mock.Mock()
mock_create_stub.return_value = grpc_stub
client = firestore_client.FirestoreClient()
# Mock request
database = client.database_root_path('[PROJECT]', '[DATABASE]')
documents = []
# Mock exception response
grpc_stub.BatchGetDocuments.side_effect = CustomException()
self.assertRaises(errors.GaxError, client.batch_get_documents,
database, documents)
@mock.patch('google.gax.config.create_stub', spec=True)
def test_begin_transaction(self, mock_create_stub):
# Mock gRPC layer
grpc_stub = mock.Mock()
mock_create_stub.return_value = grpc_stub
client = firestore_client.FirestoreClient()
# Mock request
database = client.database_root_path('[PROJECT]', '[DATABASE]')
# Mock response
transaction = b'-34'
expected_response = {'transaction': transaction}
expected_response = firestore_pb2.BeginTransactionResponse(
**expected_response)
grpc_stub.BeginTransaction.return_value = expected_response
response = client.begin_transaction(database)
self.assertEqual(expected_response, response)
grpc_stub.BeginTransaction.assert_called_once()
args, kwargs = grpc_stub.BeginTransaction.call_args
self.assertEqual(len(args), 2)
self.assertEqual(len(kwargs), 1)
self.assertIn('metadata', kwargs)
actual_request = args[0]
expected_request = firestore_pb2.BeginTransactionRequest(
database=database)
self.assertEqual(expected_request, actual_request)
@mock.patch('google.gax.config.API_ERRORS', (CustomException, ))
@mock.patch('google.gax.config.create_stub', spec=True)
def test_begin_transaction_exception(self, mock_create_stub):
# Mock gRPC layer
grpc_stub = mock.Mock()
mock_create_stub.return_value = grpc_stub
client = firestore_client.FirestoreClient()
# Mock request
database = client.database_root_path('[PROJECT]', '[DATABASE]')
# Mock exception response
grpc_stub.BeginTransaction.side_effect = CustomException()
self.assertRaises(errors.GaxError, client.begin_transaction, database)
@mock.patch('google.gax.config.create_stub', spec=True)
def test_commit(self, mock_create_stub):
# Mock gRPC layer
grpc_stub = mock.Mock()
mock_create_stub.return_value = grpc_stub
client = firestore_client.FirestoreClient()
# Mock request
database = client.database_root_path('[PROJECT]', '[DATABASE]')
writes = []
# Mock response
expected_response = {}
expected_response = firestore_pb2.CommitResponse(**expected_response)
grpc_stub.Commit.return_value = expected_response
response = client.commit(database, writes)
self.assertEqual(expected_response, response)
grpc_stub.Commit.assert_called_once()
args, kwargs = grpc_stub.Commit.call_args
self.assertEqual(len(args), 2)
self.assertEqual(len(kwargs), 1)
self.assertIn('metadata', kwargs)
actual_request = args[0]
expected_request = firestore_pb2.CommitRequest(
database=database, writes=writes)
self.assertEqual(expected_request, actual_request)
@mock.patch('google.gax.config.API_ERRORS', (CustomException, ))
@mock.patch('google.gax.config.create_stub', spec=True)
def test_commit_exception(self, mock_create_stub):
# Mock gRPC layer
grpc_stub = mock.Mock()
mock_create_stub.return_value = grpc_stub
client = firestore_client.FirestoreClient()
# Mock request
database = client.database_root_path('[PROJECT]', '[DATABASE]')
writes = []
# Mock exception response
grpc_stub.Commit.side_effect = CustomException()
self.assertRaises(errors.GaxError, client.commit, database, writes)
@mock.patch('google.gax.config.create_stub', spec=True)
def test_rollback(self, mock_create_stub):
# Mock gRPC layer
grpc_stub = mock.Mock()
mock_create_stub.return_value = grpc_stub
client = firestore_client.FirestoreClient()
# Mock request
database = client.database_root_path('[PROJECT]', '[DATABASE]')
transaction = b'-34'
client.rollback(database, transaction)
grpc_stub.Rollback.assert_called_once()
args, kwargs = grpc_stub.Rollback.call_args
self.assertEqual(len(args), 2)
self.assertEqual(len(kwargs), 1)
self.assertIn('metadata', kwargs)
actual_request = args[0]
expected_request = firestore_pb2.RollbackRequest(
database=database, transaction=transaction)
self.assertEqual(expected_request, actual_request)
@mock.patch('google.gax.config.API_ERRORS', (CustomException, ))
@mock.patch('google.gax.config.create_stub', spec=True)
def test_rollback_exception(self, mock_create_stub):
# Mock gRPC layer
grpc_stub = mock.Mock()
mock_create_stub.return_value = grpc_stub
client = firestore_client.FirestoreClient()
# Mock request
database = client.database_root_path('[PROJECT]', '[DATABASE]')
transaction = b'-34'
# Mock exception response
grpc_stub.Rollback.side_effect = CustomException()
self.assertRaises(errors.GaxError, client.rollback, database,
transaction)
@mock.patch('google.gax.config.create_stub', spec=True)
def test_run_query(self, mock_create_stub):
# Mock gRPC layer
grpc_stub = mock.Mock()
mock_create_stub.return_value = grpc_stub
client = firestore_client.FirestoreClient()
# Mock request
parent = client.any_path_path('[PROJECT]', '[DATABASE]', '[DOCUMENT]',
'[ANY_PATH]')
# Mock response
transaction = b'-34'
skipped_results = 880286183
expected_response = {
'transaction': transaction,
'skipped_results': skipped_results
}
expected_response = firestore_pb2.RunQueryResponse(**expected_response)
grpc_stub.RunQuery.return_value = iter([expected_response])
response = client.run_query(parent)
resources = list(response)
self.assertEqual(1, len(resources))
self.assertEqual(expected_response, resources[0])
grpc_stub.RunQuery.assert_called_once()
args, kwargs = grpc_stub.RunQuery.call_args
self.assertEqual(len(args), 2)
self.assertEqual(len(kwargs), 1)
self.assertIn('metadata', kwargs)
actual_request = args[0]
expected_request = firestore_pb2.RunQueryRequest(parent=parent)
self.assertEqual(expected_request, actual_request)
@mock.patch('google.gax.config.API_ERRORS', (CustomException, ))
@mock.patch('google.gax.config.create_stub', spec=True)
def test_run_query_exception(self, mock_create_stub):
# Mock gRPC layer
grpc_stub = mock.Mock()
mock_create_stub.return_value = grpc_stub
client = firestore_client.FirestoreClient()
# Mock request
parent = client.any_path_path('[PROJECT]', '[DATABASE]', '[DOCUMENT]',
'[ANY_PATH]')
# Mock exception response
grpc_stub.RunQuery.side_effect = CustomException()
self.assertRaises(errors.GaxError, client.run_query, parent)
@mock.patch('google.gax.config.create_stub', spec=True)
def test_write(self, mock_create_stub):
# Mock gRPC layer
grpc_stub = mock.Mock()
mock_create_stub.return_value = grpc_stub
client = firestore_client.FirestoreClient()
# Mock request
database = client.database_root_path('[PROJECT]', '[DATABASE]')
request = {'database': database}
requests = [request]
# Mock response
stream_id = 'streamId-315624902'
stream_token = b'122'
expected_response = {
'stream_id': stream_id,
'stream_token': stream_token
}
expected_response = firestore_pb2.WriteResponse(**expected_response)
grpc_stub.Write.return_value = iter([expected_response])
response = client.write(requests)
resources = list(response)
self.assertEqual(1, len(resources))
self.assertEqual(expected_response, resources[0])
grpc_stub.Write.assert_called_once()
args, kwargs = grpc_stub.Write.call_args
self.assertEqual(len(args), 2)
self.assertEqual(len(kwargs), 1)
self.assertIn('metadata', kwargs)
actual_requests = args[0]
self.assertEqual(1, len(actual_requests))
actual_request = list(actual_requests)[0]
self.assertEqual(request, actual_request)
@mock.patch('google.gax.config.API_ERRORS', (CustomException, ))
@mock.patch('google.gax.config.create_stub', spec=True)
def test_write_exception(self, mock_create_stub):
# Mock gRPC layer
grpc_stub = mock.Mock()
mock_create_stub.return_value = grpc_stub
client = firestore_client.FirestoreClient()
# Mock request
database = client.database_root_path('[PROJECT]', '[DATABASE]')
request = {'database': database}
requests = [request]
# Mock exception response
grpc_stub.Write.side_effect = CustomException()
self.assertRaises(errors.GaxError, client.write, requests)
@mock.patch('google.gax.config.create_stub', spec=True)
def test_listen(self, mock_create_stub):
# Mock gRPC layer
grpc_stub = mock.Mock()
mock_create_stub.return_value = grpc_stub
client = firestore_client.FirestoreClient()
# Mock request
database = client.database_root_path('[PROJECT]', '[DATABASE]')
request = {'database': database}
requests = [request]
# Mock response
expected_response = {}
expected_response = firestore_pb2.ListenResponse(**expected_response)
grpc_stub.Listen.return_value = iter([expected_response])
response = client.listen(requests)
resources = list(response)
self.assertEqual(1, len(resources))
self.assertEqual(expected_response, resources[0])
grpc_stub.Listen.assert_called_once()
args, kwargs = grpc_stub.Listen.call_args
self.assertEqual(len(args), 2)
self.assertEqual(len(kwargs), 1)
self.assertIn('metadata', kwargs)
actual_requests = args[0]
self.assertEqual(1, len(actual_requests))
actual_request = list(actual_requests)[0]
self.assertEqual(request, actual_request)
@mock.patch('google.gax.config.API_ERRORS', (CustomException, ))
@mock.patch('google.gax.config.create_stub', spec=True)
def test_listen_exception(self, mock_create_stub):
# Mock gRPC layer
grpc_stub = mock.Mock()
mock_create_stub.return_value = grpc_stub
client = firestore_client.FirestoreClient()
# Mock request
database = client.database_root_path('[PROJECT]', '[DATABASE]')
request = {'database': database}
requests = [request]
# Mock exception response
grpc_stub.Listen.side_effect = CustomException()
self.assertRaises(errors.GaxError, client.listen, requests)
@mock.patch('google.gax.config.create_stub', spec=True)
def test_list_collection_ids(self, mock_create_stub):
# Mock gRPC layer
grpc_stub = mock.Mock()
mock_create_stub.return_value = grpc_stub
client = firestore_client.FirestoreClient()
# Mock request
parent = client.any_path_path('[PROJECT]', '[DATABASE]', '[DOCUMENT]',
'[ANY_PATH]')
# Mock response
next_page_token = ''
collection_ids_element = 'collectionIdsElement1368994900'
collection_ids = [collection_ids_element]
expected_response = {
'next_page_token': next_page_token,
'collection_ids': collection_ids
}
expected_response = firestore_pb2.ListCollectionIdsResponse(
**expected_response)
grpc_stub.ListCollectionIds.return_value = expected_response
paged_list_response = client.list_collection_ids(parent)
resources = list(paged_list_response)
self.assertEqual(1, len(resources))
self.assertEqual(expected_response.collection_ids[0], resources[0])
grpc_stub.ListCollectionIds.assert_called_once()
args, kwargs = grpc_stub.ListCollectionIds.call_args
self.assertEqual(len(args), 2)
self.assertEqual(len(kwargs), 1)
self.assertIn('metadata', kwargs)
actual_request = args[0]
expected_request = firestore_pb2.ListCollectionIdsRequest(
parent=parent)
self.assertEqual(expected_request, actual_request)
@mock.patch('google.gax.config.API_ERRORS', (CustomException, ))
@mock.patch('google.gax.config.create_stub', spec=True)
def test_list_collection_ids_exception(self, mock_create_stub):
# Mock gRPC layer
grpc_stub = mock.Mock()
mock_create_stub.return_value = grpc_stub
client = firestore_client.FirestoreClient()
# Mock request
parent = client.any_path_path('[PROJECT]', '[DATABASE]', '[DOCUMENT]',
'[ANY_PATH]')
# Mock exception response
grpc_stub.ListCollectionIds.side_effect = CustomException()
paged_list_response = client.list_collection_ids(parent)
self.assertRaises(errors.GaxError, list, paged_list_response)
| 36.930168
| 79
| 0.661448
| 2,834
| 26,442
| 5.918843
| 0.071983
| 0.048647
| 0.043401
| 0.04185
| 0.8354
| 0.810481
| 0.780494
| 0.726899
| 0.663229
| 0.635925
| 0
| 0.011497
| 0.24011
| 26,442
| 715
| 80
| 36.981818
| 0.823322
| 0.068187
| 0
| 0.640257
| 0
| 0
| 0.095254
| 0.050356
| 0
| 0
| 0
| 0
| 0.207709
| 1
| 0.055675
| false
| 0.002141
| 0.017131
| 0
| 0.077088
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
83c54bc9eaa4a1861dd3fa4ea4a880beb29a904e
| 630
|
py
|
Python
|
mock_ahrs.py
|
NAzT/ahrs-visualizer
|
98d69592af60cc98323680a2e8c442e7bffd123e
|
[
"MIT"
] | 39
|
2015-01-25T02:12:07.000Z
|
2021-09-12T12:56:26.000Z
|
mock_ahrs.py
|
NAzT/ahrs-visualizer
|
98d69592af60cc98323680a2e8c442e7bffd123e
|
[
"MIT"
] | 8
|
2015-05-21T08:27:25.000Z
|
2020-03-21T17:07:42.000Z
|
mock_ahrs.py
|
NAzT/ahrs-visualizer
|
98d69592af60cc98323680a2e8c442e7bffd123e
|
[
"MIT"
] | 10
|
2015-03-22T23:55:12.000Z
|
2021-03-16T11:08:33.000Z
|
#!/usr/bin/env python2
import time
import sys
def out(*args):
print "%.6f %.6f %.6f %.6f %.6f %.6f %.6f %.6f %.6f %.6f %.6f %.6f %.6f %.6f %.6f" % args
sys.stdout.flush()
# Origin
while True:
out(0, 0, 1, 1, 0, 0, 0, 1, 0, 1, 1, 1, -1, 1, 1)
time.sleep(1)
out(0, 0, -1, 1, 0, 0, 0, -1, 0, 1, 1, 1, -1, 1, 1)
time.sleep(1)
out(-0.315, 0.946, -0.075, -0.783, -0.304, -0.542, -0.536, -0.112, 0.837,
0.524, 0.099, -0.846, -0.526, 0.548, 0.481)
time.sleep(1)
out(0.188, 0.951, 0.243, 0.105, 0.227, -0.968, -0.976, 0.208, -0.057,
0.953, -0.208, 0.079, -0.802, 0.012, -0.212)
time.sleep(1)
| 27.391304
| 93
| 0.5
| 140
| 630
| 2.25
| 0.35
| 0.177778
| 0.247619
| 0.304762
| 0.330159
| 0.285714
| 0.285714
| 0.285714
| 0.285714
| 0.285714
| 0
| 0.348361
| 0.225397
| 630
| 22
| 94
| 28.636364
| 0.297131
| 0.044444
| 0
| 0.25
| 0
| 0.0625
| 0.126878
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.125
| null | null | 0.0625
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
83e0267cb0bec96a31b11a4704d2ad653baf866d
| 330
|
py
|
Python
|
setup.py
|
Lawliet19189/Dirichlet
|
08a72efc39b88d74d0790552f03332979cd6ca61
|
[
"MIT"
] | null | null | null |
setup.py
|
Lawliet19189/Dirichlet
|
08a72efc39b88d74d0790552f03332979cd6ca61
|
[
"MIT"
] | null | null | null |
setup.py
|
Lawliet19189/Dirichlet
|
08a72efc39b88d74d0790552f03332979cd6ca61
|
[
"MIT"
] | null | null | null |
# For Dataset Download
import torchaudio
import torch
def download_dataset()
data_samples = torchaudio.datasets.LIBRISPEECH("Datasets/LibriSpeech", url="train-clean-100", download=True)
dev_samples = torchaudio.datasets.LIBRISPEECH("Datasets/LibriSpeech", url="dev-clean", download=True)
assert len(data_samples)>0
| 30
| 112
| 0.775758
| 40
| 330
| 6.3
| 0.5
| 0.301587
| 0.198413
| 0.285714
| 0.460317
| 0.460317
| 0.460317
| 0
| 0
| 0
| 0
| 0.013652
| 0.112121
| 330
| 10
| 113
| 33
| 0.846416
| 0.060606
| 0
| 0
| 0
| 0
| 0.20915
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 0
| null | null | 0
| 0.333333
| null | null | 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
f7ac58209ea3ac573a3aecf1c00a0065f24d2583
| 122
|
py
|
Python
|
cvxpy/reductions/dgp2dcp/atom_canonicalizers/power_canon.py
|
lumbric/cvxpy
|
77ee02aadba1bdcf995117ea98224054dba6406e
|
[
"ECL-2.0",
"Apache-2.0"
] | 3,285
|
2015-01-03T04:02:29.000Z
|
2021-04-19T14:51:29.000Z
|
cvxpy/reductions/dgp2dcp/atom_canonicalizers/power_canon.py
|
h-vetinari/cvxpy
|
86307f271819bb78fcdf64a9c3a424773e8269fa
|
[
"ECL-2.0",
"Apache-2.0"
] | 1,138
|
2015-01-01T19:40:14.000Z
|
2021-04-18T23:37:31.000Z
|
cvxpy/reductions/dgp2dcp/atom_canonicalizers/power_canon.py
|
h-vetinari/cvxpy
|
86307f271819bb78fcdf64a9c3a424773e8269fa
|
[
"ECL-2.0",
"Apache-2.0"
] | 765
|
2015-01-02T19:29:39.000Z
|
2021-04-20T00:50:43.000Z
|
def power_canon(expr, args):
# u = log x; x^p --> exp(u^p) --> log(exp(u^p)) = p * u
return expr.p * args[0], []
| 30.5
| 60
| 0.491803
| 24
| 122
| 2.458333
| 0.5
| 0.135593
| 0.169492
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.010989
| 0.254098
| 122
| 3
| 61
| 40.666667
| 0.637363
| 0.442623
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
f7c1a9a48fbd29d1dce14cb6d7a9d838bab9f312
| 202
|
py
|
Python
|
foe_pool.py
|
QwerTech/foe-automation
|
9978cd365097a2c9ebec9039642c4e5f6c361018
|
[
"MIT"
] | null | null | null |
foe_pool.py
|
QwerTech/foe-automation
|
9978cd365097a2c9ebec9039642c4e5f6c361018
|
[
"MIT"
] | 3
|
2021-09-08T02:13:20.000Z
|
2022-03-12T00:36:40.000Z
|
foe_pool.py
|
QwerTech/foe-automation
|
9978cd365097a2c9ebec9039642c4e5f6c361018
|
[
"MIT"
] | null | null | null |
import multiprocessing
pool = None
def initPool():
global pool
pool = multiprocessing.Pool(int(multiprocessing.cpu_count()))
def execInPool(func, params):
return pool.map(func, params)
| 15.538462
| 65
| 0.722772
| 24
| 202
| 6.041667
| 0.625
| 0.262069
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.173267
| 202
| 12
| 66
| 16.833333
| 0.868263
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| false
| 0
| 0.142857
| 0.142857
| 0.571429
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
f7ea7fe9f9912328c0fc442f2071a09cf4c299dc
| 182
|
py
|
Python
|
doc/training/test_02.py
|
sriiora/tcf
|
e607ce04f97dbb4910d94428c0600a6a7145a825
|
[
"Apache-2.0"
] | 24
|
2018-08-21T18:04:48.000Z
|
2022-02-07T22:50:06.000Z
|
doc/training/test_02.py
|
sriiora/tcf
|
e607ce04f97dbb4910d94428c0600a6a7145a825
|
[
"Apache-2.0"
] | 16
|
2018-08-21T18:03:52.000Z
|
2022-03-01T17:15:42.000Z
|
doc/training/test_02.py
|
sriiora/tcf
|
e607ce04f97dbb4910d94428c0600a6a7145a825
|
[
"Apache-2.0"
] | 29
|
2018-08-22T19:40:59.000Z
|
2021-12-21T11:13:23.000Z
|
#! /usr/bin/python3
import tcfl.tc
class _test(tcfl.tc.tc_c):
def eval_00(self):
self.report_info("Hello 1")
def eval_01(self):
self.report_info("Hello 2")
| 18.2
| 35
| 0.631868
| 30
| 182
| 3.633333
| 0.633333
| 0.110092
| 0.256881
| 0.330275
| 0.422018
| 0
| 0
| 0
| 0
| 0
| 0
| 0.049296
| 0.21978
| 182
| 9
| 36
| 20.222222
| 0.71831
| 0.098901
| 0
| 0
| 0
| 0
| 0.08589
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.166667
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
f7f3244612927f459a9a92bc9aa249e3a62575cf
| 10,932
|
py
|
Python
|
Algorithm.Python/stubs/QuantConnect/Data/__Fundamental_59.py
|
gaoxiaojun/Lean
|
9dca43bccb720d0df91e4bfc1d363b71e3a36cb5
|
[
"Apache-2.0"
] | 2
|
2020-12-08T11:27:20.000Z
|
2021-04-06T13:21:15.000Z
|
Algorithm.Python/stubs/QuantConnect/Data/__Fundamental_59.py
|
gaoxiaojun/Lean
|
9dca43bccb720d0df91e4bfc1d363b71e3a36cb5
|
[
"Apache-2.0"
] | null | null | null |
Algorithm.Python/stubs/QuantConnect/Data/__Fundamental_59.py
|
gaoxiaojun/Lean
|
9dca43bccb720d0df91e4bfc1d363b71e3a36cb5
|
[
"Apache-2.0"
] | 1
|
2020-12-08T11:27:21.000Z
|
2020-12-08T11:27:21.000Z
|
from .__Fundamental_60 import *
import typing
import System.IO
import System.Collections.Generic
import System
import QuantConnect.Data.Fundamental.MultiPeriodField
import QuantConnect.Data.Fundamental
import QuantConnect.Data
import QuantConnect
import datetime
class RentExpenseSupplementalIncomeStatement(QuantConnect.Data.Fundamental.MultiPeriodField):
"""
The sum of all rent expenses incurred by the company for operating leases during the year, it is a supplemental value which would
be reported outside consolidated statements or consolidated statement's footnotes.
RentExpenseSupplementalIncomeStatement(store: IDictionary[str, Decimal])
"""
def GetPeriodValue(self, period: str) -> float:
pass
def SetPeriodValue(self, period: str, value: float) -> None:
pass
def __init__(self, store: System.Collections.Generic.IDictionary[str, float]) -> QuantConnect.Data.Fundamental.RentExpenseSupplementalIncomeStatement:
pass
NineMonths: float
SixMonths: float
ThreeMonths: float
TwelveMonths: float
Store: typing.List[QuantConnect.Data.Fundamental.MultiPeriodField.PeriodField]
class ReorganizationOtherCostsCashFlowStatement(QuantConnect.Data.Fundamental.MultiPeriodField):
"""
A non-cash adjustment relating to restructuring costs.
ReorganizationOtherCostsCashFlowStatement(store: IDictionary[str, Decimal])
"""
def GetPeriodValue(self, period: str) -> float:
pass
def SetPeriodValue(self, period: str, value: float) -> None:
pass
def __init__(self, store: System.Collections.Generic.IDictionary[str, float]) -> QuantConnect.Data.Fundamental.ReorganizationOtherCostsCashFlowStatement:
pass
NineMonths: float
SixMonths: float
ThreeMonths: float
TwelveMonths: float
Store: typing.List[QuantConnect.Data.Fundamental.MultiPeriodField.PeriodField]
class RepaymentinLeaseFinancingCashFlowStatement(QuantConnect.Data.Fundamental.MultiPeriodField):
"""
The cash outflow to repay lease financing during the PeriodAsByte.
RepaymentinLeaseFinancingCashFlowStatement(store: IDictionary[str, Decimal])
"""
def GetPeriodValue(self, period: str) -> float:
pass
def SetPeriodValue(self, period: str, value: float) -> None:
pass
def __init__(self, store: System.Collections.Generic.IDictionary[str, float]) -> QuantConnect.Data.Fundamental.RepaymentinLeaseFinancingCashFlowStatement:
pass
NineMonths: float
SixMonths: float
ThreeMonths: float
TwelveMonths: float
Store: typing.List[QuantConnect.Data.Fundamental.MultiPeriodField.PeriodField]
class RepaymentOfDebtCashFlowStatement(QuantConnect.Data.Fundamental.MultiPeriodField):
"""
Payments to Settle Long Term Debt plus Payments to Settle Short Term Debt.
RepaymentOfDebtCashFlowStatement(store: IDictionary[str, Decimal])
"""
def GetPeriodValue(self, period: str) -> float:
pass
def SetPeriodValue(self, period: str, value: float) -> None:
pass
def __init__(self, store: System.Collections.Generic.IDictionary[str, float]) -> QuantConnect.Data.Fundamental.RepaymentOfDebtCashFlowStatement:
pass
NineMonths: float
OneMonth: float
SixMonths: float
ThreeMonths: float
TwelveMonths: float
TwoMonths: float
Store: typing.List[QuantConnect.Data.Fundamental.MultiPeriodField.PeriodField]
class ReportedNormalizedBasicEPS(QuantConnect.Data.Fundamental.MultiPeriodField):
"""
Normalized Basic EPS as reported by the company in the financial statements.
ReportedNormalizedBasicEPS(store: IDictionary[str, Decimal])
"""
def GetPeriodValue(self, period: str) -> float:
pass
def SetPeriodValue(self, period: str, value: float) -> None:
pass
def __init__(self, store: System.Collections.Generic.IDictionary[str, float]) -> QuantConnect.Data.Fundamental.ReportedNormalizedBasicEPS:
pass
NineMonths: float
SixMonths: float
ThreeMonths: float
TwelveMonths: float
Store: typing.List[QuantConnect.Data.Fundamental.MultiPeriodField.PeriodField]
class ReportedNormalizedDilutedEPS(QuantConnect.Data.Fundamental.MultiPeriodField):
"""
Normalized Diluted EPS as reported by the company in the financial statements.
ReportedNormalizedDilutedEPS(store: IDictionary[str, Decimal])
"""
def GetPeriodValue(self, period: str) -> float:
pass
def SetPeriodValue(self, period: str, value: float) -> None:
pass
def __init__(self, store: System.Collections.Generic.IDictionary[str, float]) -> QuantConnect.Data.Fundamental.ReportedNormalizedDilutedEPS:
pass
NineMonths: float
SixMonths: float
ThreeMonths: float
TwelveMonths: float
Store: typing.List[QuantConnect.Data.Fundamental.MultiPeriodField.PeriodField]
class RepurchaseOfCapitalStockCashFlowStatement(QuantConnect.Data.Fundamental.MultiPeriodField):
"""
Payments for Common Stock plus Payments for Preferred Stock.
RepurchaseOfCapitalStockCashFlowStatement(store: IDictionary[str, Decimal])
"""
def GetPeriodValue(self, period: str) -> float:
pass
def SetPeriodValue(self, period: str, value: float) -> None:
pass
def __init__(self, store: System.Collections.Generic.IDictionary[str, float]) -> QuantConnect.Data.Fundamental.RepurchaseOfCapitalStockCashFlowStatement:
pass
NineMonths: float
OneMonth: float
SixMonths: float
ThreeMonths: float
TwelveMonths: float
TwoMonths: float
Store: typing.List[QuantConnect.Data.Fundamental.MultiPeriodField.PeriodField]
class ResearchAndDevelopmentExpensesSupplementalIncomeStatement(QuantConnect.Data.Fundamental.MultiPeriodField):
"""
The aggregate amount of research and development expenses during the year. It is a supplemental value which would be reported
outside consolidated statements.
ResearchAndDevelopmentExpensesSupplementalIncomeStatement(store: IDictionary[str, Decimal])
"""
def GetPeriodValue(self, period: str) -> float:
pass
def SetPeriodValue(self, period: str, value: float) -> None:
pass
def __init__(self, store: System.Collections.Generic.IDictionary[str, float]) -> QuantConnect.Data.Fundamental.ResearchAndDevelopmentExpensesSupplementalIncomeStatement:
pass
NineMonths: float
SixMonths: float
ThreeMonths: float
TwelveMonths: float
Store: typing.List[QuantConnect.Data.Fundamental.MultiPeriodField.PeriodField]
class ResearchAndDevelopmentIncomeStatement(QuantConnect.Data.Fundamental.MultiPeriodField):
"""
The aggregate amount of research and development expenses during the year.
ResearchAndDevelopmentIncomeStatement(store: IDictionary[str, Decimal])
"""
def GetPeriodValue(self, period: str) -> float:
pass
def SetPeriodValue(self, period: str, value: float) -> None:
pass
def __init__(self, store: System.Collections.Generic.IDictionary[str, float]) -> QuantConnect.Data.Fundamental.ResearchAndDevelopmentIncomeStatement:
pass
NineMonths: float
OneMonth: float
SixMonths: float
ThreeMonths: float
TwelveMonths: float
TwoMonths: float
Store: typing.List[QuantConnect.Data.Fundamental.MultiPeriodField.PeriodField]
class RestrictedCashAndCashEquivalentsBalanceSheet(QuantConnect.Data.Fundamental.MultiPeriodField):
"""
The carrying amounts of cash and cash equivalent items which are restricted as to withdrawal or usage. This item is available for
bank and insurance industries.
RestrictedCashAndCashEquivalentsBalanceSheet(store: IDictionary[str, Decimal])
"""
def GetPeriodValue(self, period: str) -> float:
pass
def SetPeriodValue(self, period: str, value: float) -> None:
pass
def __init__(self, store: System.Collections.Generic.IDictionary[str, float]) -> QuantConnect.Data.Fundamental.RestrictedCashAndCashEquivalentsBalanceSheet:
pass
NineMonths: float
SixMonths: float
ThreeMonths: float
TwelveMonths: float
Store: typing.List[QuantConnect.Data.Fundamental.MultiPeriodField.PeriodField]
class RestrictedCashAndInvestmentsBalanceSheet(QuantConnect.Data.Fundamental.MultiPeriodField):
"""
The cash and investments whose use in whole or in part is restricted for the long-term, generally by contractual agreements or
regulatory requirements. This item is usually only available for bank industry.
RestrictedCashAndInvestmentsBalanceSheet(store: IDictionary[str, Decimal])
"""
def GetPeriodValue(self, period: str) -> float:
pass
def SetPeriodValue(self, period: str, value: float) -> None:
pass
def __init__(self, store: System.Collections.Generic.IDictionary[str, float]) -> QuantConnect.Data.Fundamental.RestrictedCashAndInvestmentsBalanceSheet:
pass
NineMonths: float
SixMonths: float
ThreeMonths: float
TwelveMonths: float
Store: typing.List[QuantConnect.Data.Fundamental.MultiPeriodField.PeriodField]
class RestrictedCashBalanceSheet(QuantConnect.Data.Fundamental.MultiPeriodField):
"""
The carrying amounts of cash and cash equivalent items, which are restricted as to withdrawal or usage. Restrictions may include
legally restricted deposits held as compensating balances against short-term borrowing arrangements, contracts entered into with
others, or entity statements of intention with regard to particular deposits; however, time deposits and short-term certificates of
deposit are not generally included in legally restricted deposits. Excludes compensating balance arrangements that are not
agreements, which legally restrict the use of cash amounts shown on the balance sheet. For a classified balance sheet, represents
the current portion only (the non-current portion has a separate concept); for an unclassified balance sheet represents the entire
amount. This item is usually not available for bank and insurance industries.
RestrictedCashBalanceSheet(store: IDictionary[str, Decimal])
"""
def GetPeriodValue(self, period: str) -> float:
pass
def SetPeriodValue(self, period: str, value: float) -> None:
pass
def __init__(self, store: System.Collections.Generic.IDictionary[str, float]) -> QuantConnect.Data.Fundamental.RestrictedCashBalanceSheet:
pass
NineMonths: float
OneMonth: float
SixMonths: float
ThreeMonths: float
TwelveMonths: float
TwoMonths: float
Store: typing.List[QuantConnect.Data.Fundamental.MultiPeriodField.PeriodField]
| 31.323782
| 173
| 0.737559
| 1,056
| 10,932
| 7.587121
| 0.171402
| 0.077883
| 0.128058
| 0.134174
| 0.664254
| 0.632551
| 0.610584
| 0.610584
| 0.610584
| 0.610584
| 0
| 0.000226
| 0.189261
| 10,932
| 348
| 174
| 31.413793
| 0.903757
| 0.282108
| 0
| 0.790123
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.222222
| false
| 0.222222
| 0.061728
| 0
| 0.777778
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 5
|
f7f46e2b3c6e42b33eb61ceb086623c097b17c00
| 21,617
|
py
|
Python
|
pvpn/crypto.py
|
qwj/python-vpn
|
b09e4ab180e3cb4c6b480e9693e8f19854c7b6ec
|
[
"MIT"
] | 125
|
2018-12-25T12:18:55.000Z
|
2022-03-26T17:56:31.000Z
|
pvpn/crypto.py
|
Open-ATS-Github/python-vpn
|
b09e4ab180e3cb4c6b480e9693e8f19854c7b6ec
|
[
"MIT"
] | 6
|
2019-01-08T09:12:28.000Z
|
2022-01-11T15:39:24.000Z
|
pvpn/crypto.py
|
Open-ATS-Github/python-vpn
|
b09e4ab180e3cb4c6b480e9693e8f19854c7b6ec
|
[
"MIT"
] | 36
|
2018-12-29T15:40:12.000Z
|
2022-03-25T21:37:04.000Z
|
import hashlib, os, random, hmac
from Crypto.Cipher import AES, ChaCha20_Poly1305
from . import enums
class Prf:
DIGESTS_1 = {
enums.HashId_1.MD5: (hashlib.md5, 16),
enums.HashId_1.SHA: (hashlib.sha1, 20),
enums.HashId_1.SHA2_256: (hashlib.sha256, 32),
enums.HashId_1.SHA2_384: (hashlib.sha384, 48),
enums.HashId_1.SHA2_512: (hashlib.sha512, 64),
}
DIGESTS = {
enums.PrfId.PRF_HMAC_MD5: (hashlib.md5, 16),
enums.PrfId.PRF_HMAC_SHA1: (hashlib.sha1, 20),
enums.PrfId.PRF_HMAC_SHA2_256: (hashlib.sha256, 32),
enums.PrfId.PRF_HMAC_SHA2_384: (hashlib.sha384, 48),
enums.PrfId.PRF_HMAC_SHA2_512: (hashlib.sha512, 64),
}
def __init__(self, transform):
self.hasher, self.key_size = self.DIGESTS[transform] if type(transform) is enums.PrfId else self.DIGESTS_1[transform]
def prf(self, key, data):
return hmac.HMAC(key, data, digestmod=self.hasher).digest()
def prfplus(self, key, seed, count=True):
temp = bytes()
for i in range(1, 1024):
temp = self.prf(key, temp + seed + (bytes([i]) if count else b''))
yield from temp
class Integrity:
DIGESTS_1 = {
enums.IntegId_1.AUTH_HMAC_MD5: (hashlib.md5, 16, 12),
enums.IntegId_1.AUTH_HMAC_SHA1: (hashlib.sha1, 20, 12),
enums.IntegId_1.AUTH_HMAC_SHA2_256: (hashlib.sha256, 32, 16),
enums.IntegId_1.AUTH_HMAC_SHA2_384: (hashlib.sha384, 48, 24),
enums.IntegId_1.AUTH_HMAC_SHA2_512: (hashlib.sha512, 64, 32),
}
DIGESTS = {
enums.IntegId.AUTH_HMAC_MD5_96: (hashlib.md5, 16, 12),
enums.IntegId.AUTH_HMAC_SHA1_96: (hashlib.sha1, 20, 12),
enums.IntegId.AUTH_HMAC_MD5_128: (hashlib.md5, 16, 16),
enums.IntegId.AUTH_HMAC_SHA1_160: (hashlib.sha1, 20, 20),
enums.IntegId.AUTH_HMAC_SHA2_256_128: (hashlib.sha256, 32, 16),
enums.IntegId.AUTH_HMAC_SHA2_384_192: (hashlib.sha384, 48, 24),
enums.IntegId.AUTH_HMAC_SHA2_512_256: (hashlib.sha512, 64, 32),
}
def __init__(self, transform):
self.hasher, self.key_size, self.hash_size = self.DIGESTS[transform] if type(transform) is enums.IntegId else self.DIGESTS_1[transform]
def compute(self, key, data):
return hmac.HMAC(key, data, digestmod=self.hasher).digest()[:self.hash_size]
class Cipher:
def __init__(self, transform, keylen):
assert type(transform) is enums.EncrId and transform == enums.EncrId.ENCR_AES_CBC or \
type(transform) is enums.EncrId_1 and transform == enums.EncrId_1.AES_CBC
self.keylen = keylen
@property
def block_size(self):
return 16
@property
def key_size(self):
return self.keylen // 8
def encrypt(self, key, iv, data):
return AES.new(key, AES.MODE_CBC, iv=iv).encrypt(data)
def decrypt(self, key, iv, data):
return AES.new(key, AES.MODE_CBC, iv=iv).decrypt(data)
def generate_iv(self):
return os.urandom(self.block_size)
class Crypto:
def __init__(self, cipher, sk_e, integrity=None, sk_a=None, prf=None, sk_p=None, *, iv=None):
self.cipher = cipher
self.sk_e = sk_e
self.integrity = integrity
self.sk_a = sk_a
self.prf = prf
self.sk_p = sk_p
self.iv = {0: iv}
self.last_iv = None
self.m_id = set()
def decrypt_esp(self, encrypted):
iv = encrypted[:self.cipher.block_size]
ciphertext = encrypted[self.cipher.block_size:len(encrypted)-self.integrity.hash_size]
plain = self.cipher.decrypt(self.sk_e, bytes(iv), bytes(ciphertext))
next_header = plain[-1]
padlen = plain[-2]
return next_header, plain[:-2-padlen]
def encrypt_esp(self, next_header, plain):
iv = self.cipher.generate_iv()
padlen = self.cipher.block_size - ((len(plain)+1) % self.cipher.block_size) - 1
plain += b'\x00' * padlen + bytes([padlen, next_header])
encrypted = self.cipher.encrypt(self.sk_e, bytes(iv), bytes(plain))
return iv + encrypted + bytes(self.integrity.hash_size)
def encrypt_1(self, plain, m_id):
if m_id not in self.iv:
self.iv[m_id] = self.prf.hasher(self.iv[0]+m_id.to_bytes(4, 'big')).digest()[:self.cipher.block_size]
padlen = self.cipher.block_size - ((len(plain)+1) % self.cipher.block_size)
plain += b'\x00' * padlen + bytes([padlen])
encrypted = self.cipher.encrypt(self.sk_e, self.iv[m_id], bytes(plain))
self.iv[m_id] = encrypted[-self.cipher.block_size:]
return encrypted
def decrypt_1(self, encrypted, m_id):
if m_id not in self.iv:
self.iv[m_id] = self.prf.hasher(self.iv[0]+m_id.to_bytes(4, 'big')).digest()[:self.cipher.block_size]
plain = self.cipher.decrypt(self.sk_e, self.iv[m_id], encrypted)
self.iv[m_id] = encrypted[-self.cipher.block_size:]
padlen = plain[-1]
# do not remove padding according to ios cisco ipsec bug
return plain
def decrypt(self, encrypted):
iv = encrypted[:self.cipher.block_size]
ciphertext = encrypted[self.cipher.block_size:len(encrypted)-self.integrity.hash_size]
plain = self.cipher.decrypt(self.sk_e, bytes(iv), bytes(ciphertext))
padlen = plain[-1]
return plain[:-1-padlen]
def encrypt(self, plain):
iv = self.cipher.generate_iv()
padlen = self.cipher.block_size - (len(plain) % self.cipher.block_size) - 1
plain += b'\x00' * padlen + bytes([padlen])
encrypted = self.cipher.encrypt(self.sk_e, bytes(iv), bytes(plain))
return iv + encrypted + bytes(self.integrity.hash_size)
def verify_checksum(self, encrypted):
checksum = self.integrity.compute(self.sk_a, encrypted[:len(encrypted)-self.integrity.hash_size])
assert checksum == encrypted[len(encrypted)-self.integrity.hash_size:]
def add_checksum(self, encrypted):
checksum = self.integrity.compute(self.sk_a, encrypted[:len(encrypted)-self.integrity.hash_size])
encrypted[len(encrypted)-len(checksum):] = checksum
def aead_chacha20poly1305_encrypt(key, counter, plain_text, auth_text):
cipher = ChaCha20_Poly1305.new(key=key, nonce=b'\x00\x00\x00\x00'+counter.to_bytes(8, 'little'))
cipher.update(auth_text)
cipher_text, digest = cipher.encrypt_and_digest(plain_text)
return cipher_text+digest
def aead_chacha20poly1305_decrypt(key, counter, cipher_text, auth_text):
cipher = ChaCha20_Poly1305.new(key=key, nonce=b'\x00\x00\x00\x00'+counter.to_bytes(8, 'little'))
cipher.update(auth_text)
return cipher.decrypt_and_verify(cipher_text[:-16], cipher_text[-16:])
# DH and ECDH algorithms
def ec_add(P, Q, l, p, a):
if P == 0:
return Q
if P == Q:
z = (3*(P>>l)*(P>>l)+a) * pow(2*(P&(1<<l)-1), p-2, p)
else:
z = ((Q&(1<<l)-1) - (P&(1<<l)-1)) * pow((Q>>l)-(P>>l), p-2, p)
x = (z*z - (P>>l) - (Q>>l)) % p
return x<<l | (z*((P>>l)-x) - (P&(1<<l)-1)) % p
def ec_mul(P, l, i, p, a):
r = 0
while i > 0:
if i & 1:
r = ec_add(r, P, l<<3, p, a)
i, P = i>>1, ec_add(P, P, l<<3, p, a)
return r
def ec_scalar(k, u, p, a24, bits):
x_2, x_3, z_2, z_3, swap = 1, u, 0, 1, 0
for t in range(bits-1, -1, -1):
k_t = (k >> t) & 1
if swap^k_t:
x_2, x_3, z_2, z_3 = x_3, x_2, z_3, z_2
swap = k_t
A, B, C, D = x_2+z_2, x_2-z_2, x_3+z_3, x_3-z_3
AA, BB, DA, CB = A*A, B*B, D*A, C*B
E = AA - BB
x_3 = pow(DA + CB, 2, p)
z_3 = u * pow(DA - CB, 2, p) % p
x_2 = AA * BB % p
z_2 = E * (AA + a24*E) % p
if swap:
x_2, x_3, z_2, z_3 = x_3, x_2, z_3, z_2
return (x_2 * pow(z_2, p-2, p) % p)
def X25519(k, u):
u, k = int.from_bytes(u, 'little') if isinstance(u, bytes) else u, int.from_bytes(k, 'little')
k = k & ((1 << 256) - (1 << 255) - 8) | (1 << 254)
return ec_scalar(k, u, 2**255-19, 121665, 255).to_bytes(32, 'little')
def X448(k, u):
u, k = int.from_bytes(u, 'little') if isinstance(u, bytes) else u, int.from_bytes(k, 'little')
k = k & (-4) | (1 << 447)
return ec_scalar(k, u, 2**448-2**224-1, 39081, 448).to_bytes(56, 'little')
PRIMES = {
enums.DhId.DH_1: (0xFFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74020BBEA63B139B22514A08798E3404DDEF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245E485B576625E7EC6F44C42E9A63A3620FFFFFFFFFFFFFFFF, 2, 96),
enums.DhId.DH_2: (0xFFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74020BBEA63B139B22514A08798E3404DDEF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7EDEE386BFB5A899FA5AE9F24117C4B1FE649286651ECE65381FFFFFFFFFFFFFFFF, 2, 128),
enums.DhId.DH_5: (0xFFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74020BBEA63B139B22514A08798E3404DDEF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7EDEE386BFB5A899FA5AE9F24117C4B1FE649286651ECE45B3DC2007CB8A163BF0598DA48361C55D39A69163FA8FD24CF5F83655D23DCA3AD961C62F356208552BB9ED529077096966D670C354E4ABC9804F1746C08CA237327FFFFFFFFFFFFFFFF, 2, 192),
enums.DhId.DH_14: (0xFFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74020BBEA63B139B22514A08798E3404DDEF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7EDEE386BFB5A899FA5AE9F24117C4B1FE649286651ECE45B3DC2007CB8A163BF0598DA48361C55D39A69163FA8FD24CF5F83655D23DCA3AD961C62F356208552BB9ED529077096966D670C354E4ABC9804F1746C08CA18217C32905E462E36CE3BE39E772C180E86039B2783A2EC07A28FB5C55DF06F4C52C9DE2BCBF6955817183995497CEA956AE515D2261898FA051015728E5A8AACAA68FFFFFFFFFFFFFFFF, 2, 256),
enums.DhId.DH_15: (0xFFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74020BBEA63B139B22514A08798E3404DDEF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7EDEE386BFB5A899FA5AE9F24117C4B1FE649286651ECE45B3DC2007CB8A163BF0598DA48361C55D39A69163FA8FD24CF5F83655D23DCA3AD961C62F356208552BB9ED529077096966D670C354E4ABC9804F1746C08CA18217C32905E462E36CE3BE39E772C180E86039B2783A2EC07A28FB5C55DF06F4C52C9DE2BCBF6955817183995497CEA956AE515D2261898FA051015728E5A8AAAC42DAD33170D04507A33A85521ABDF1CBA64ECFB850458DBEF0A8AEA71575D060C7DB3970F85A6E1E4C7ABF5AE8CDB0933D71E8C94E04A25619DCEE3D2261AD2EE6BF12FFA06D98A0864D87602733EC86A64521F2B18177B200CBBE117577A615D6C770988C0BAD946E208E24FA074E5AB3143DB5BFCE0FD108E4B82D120A93AD2CAFFFFFFFFFFFFFFFF, 2, 384),
enums.DhId.DH_16: (0xFFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74020BBEA63B139B22514A08798E3404DDEF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7EDEE386BFB5A899FA5AE9F24117C4B1FE649286651ECE45B3DC2007CB8A163BF0598DA48361C55D39A69163FA8FD24CF5F83655D23DCA3AD961C62F356208552BB9ED529077096966D670C354E4ABC9804F1746C08CA18217C32905E462E36CE3BE39E772C180E86039B2783A2EC07A28FB5C55DF06F4C52C9DE2BCBF6955817183995497CEA956AE515D2261898FA051015728E5A8AAAC42DAD33170D04507A33A85521ABDF1CBA64ECFB850458DBEF0A8AEA71575D060C7DB3970F85A6E1E4C7ABF5AE8CDB0933D71E8C94E04A25619DCEE3D2261AD2EE6BF12FFA06D98A0864D87602733EC86A64521F2B18177B200CBBE117577A615D6C770988C0BAD946E208E24FA074E5AB3143DB5BFCE0FD108E4B82D120A92108011A723C12A787E6D788719A10BDBA5B2699C327186AF4E23C1A946834B6150BDA2583E9CA2AD44CE8DBBBC2DB04DE8EF92E8EFC141FBECAA6287C59474E6BC05D99B2964FA090C3A2233BA186515BE7ED1F612970CEE2D7AFB81BDD762170481CD0069127D5B05AA993B4EA988D8FDDC186FFB7DC90A6C08F4DF435C934063199FFFFFFFFFFFFFFFF, 2, 512),
enums.DhId.DH_17: (0xFFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74020BBEA63B139B22514A08798E3404DDEF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7EDEE386BFB5A899FA5AE9F24117C4B1FE649286651ECE45B3DC2007CB8A163BF0598DA48361C55D39A69163FA8FD24CF5F83655D23DCA3AD961C62F356208552BB9ED529077096966D670C354E4ABC9804F1746C08CA18217C32905E462E36CE3BE39E772C180E86039B2783A2EC07A28FB5C55DF06F4C52C9DE2BCBF6955817183995497CEA956AE515D2261898FA051015728E5A8AAAC42DAD33170D04507A33A85521ABDF1CBA64ECFB850458DBEF0A8AEA71575D060C7DB3970F85A6E1E4C7ABF5AE8CDB0933D71E8C94E04A25619DCEE3D2261AD2EE6BF12FFA06D98A0864D87602733EC86A64521F2B18177B200CBBE117577A615D6C770988C0BAD946E208E24FA074E5AB3143DB5BFCE0FD108E4B82D120A92108011A723C12A787E6D788719A10BDBA5B2699C327186AF4E23C1A946834B6150BDA2583E9CA2AD44CE8DBBBC2DB04DE8EF92E8EFC141FBECAA6287C59474E6BC05D99B2964FA090C3A2233BA186515BE7ED1F612970CEE2D7AFB81BDD762170481CD0069127D5B05AA993B4EA988D8FDDC186FFB7DC90A6C08F4DF435C93402849236C3FAB4D27C7026C1D4DCB2602646DEC9751E763DBA37BDF8FF9406AD9E530EE5DB382F413001AEB06A53ED9027D831179727B0865A8918DA3EDBEBCF9B14ED44CE6CBACED4BB1BDB7F1447E6CC254B332051512BD7AF426FB8F401378CD2BF5983CA01C64B92ECF032EA15D1721D03F482D7CE6E74FEF6D55E702F46980C82B5A84031900B1C9E59E7C97FBEC7E8F323A97A7E36CC88BE0F1D45B7FF585AC54BD407B22B4154AACC8F6D7EBF48E1D814CC5ED20F8037E0A79715EEF29BE32806A1D58BB7C5DA76F550AA3D8A1FBFF0EB19CCB1A313D55CDA56C9EC2EF29632387FE8D76E3C0468043E8F663F4860EE12BF2D5B0B7474D6E694F91E6DCC4024FFFFFFFFFFFFFFFF, 2, 768),
enums.DhId.DH_18: (0xFFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74020BBEA63B139B22514A08798E3404DDEF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7EDEE386BFB5A899FA5AE9F24117C4B1FE649286651ECE45B3DC2007CB8A163BF0598DA48361C55D39A69163FA8FD24CF5F83655D23DCA3AD961C62F356208552BB9ED529077096966D670C354E4ABC9804F1746C08CA18217C32905E462E36CE3BE39E772C180E86039B2783A2EC07A28FB5C55DF06F4C52C9DE2BCBF6955817183995497CEA956AE515D2261898FA051015728E5A8AAAC42DAD33170D04507A33A85521ABDF1CBA64ECFB850458DBEF0A8AEA71575D060C7DB3970F85A6E1E4C7ABF5AE8CDB0933D71E8C94E04A25619DCEE3D2261AD2EE6BF12FFA06D98A0864D87602733EC86A64521F2B18177B200CBBE117577A615D6C770988C0BAD946E208E24FA074E5AB3143DB5BFCE0FD108E4B82D120A92108011A723C12A787E6D788719A10BDBA5B2699C327186AF4E23C1A946834B6150BDA2583E9CA2AD44CE8DBBBC2DB04DE8EF92E8EFC141FBECAA6287C59474E6BC05D99B2964FA090C3A2233BA186515BE7ED1F612970CEE2D7AFB81BDD762170481CD0069127D5B05AA993B4EA988D8FDDC186FFB7DC90A6C08F4DF435C93402849236C3FAB4D27C7026C1D4DCB2602646DEC9751E763DBA37BDF8FF9406AD9E530EE5DB382F413001AEB06A53ED9027D831179727B0865A8918DA3EDBEBCF9B14ED44CE6CBACED4BB1BDB7F1447E6CC254B332051512BD7AF426FB8F401378CD2BF5983CA01C64B92ECF032EA15D1721D03F482D7CE6E74FEF6D55E702F46980C82B5A84031900B1C9E59E7C97FBEC7E8F323A97A7E36CC88BE0F1D45B7FF585AC54BD407B22B4154AACC8F6D7EBF48E1D814CC5ED20F8037E0A79715EEF29BE32806A1D58BB7C5DA76F550AA3D8A1FBFF0EB19CCB1A313D55CDA56C9EC2EF29632387FE8D76E3C0468043E8F663F4860EE12BF2D5B0B7474D6E694F91E6DBE115974A3926F12FEE5E438777CB6A932DF8CD8BEC4D073B931BA3BC832B68D9DD300741FA7BF8AFC47ED2576F6936BA424663AAB639C5AE4F5683423B4742BF1C978238F16CBE39D652DE3FDB8BEFC848AD922222E04A4037C0713EB57A81A23F0C73473FC646CEA306B4BCBC8862F8385DDFA9D4B7FA2C087E879683303ED5BDD3A062B3CF5B3A278A66D2A13F83F44F82DDF310EE074AB6A364597E899A0255DC164F31CC50846851DF9AB48195DED7EA1B1D510BD7EE74D73FAF36BC31ECFA268359046F4EB879F924009438B481C6CD7889A002ED5EE382BC9190DA6FC026E479558E4475677E9AA9E3050E2765694DFC81F56E880B96E7160C980DD98EDD3DFFFFFFFFFFFFFFFFF, 2, 1024),
enums.DhId.DH_19: (0xFFFFFFFF00000001000000000000000000000000FFFFFFFFFFFFFFFFFFFFFFFF, (0x6B17D1F2E12C4247F8BCE6E563A440F277037D812DEB33A0F4A13945D898C2964FE342E2FE1A7F9B8EE7EB4A7C0F9E162BCE33576B315ECECBB6406837BF51F5, -3), 32),
enums.DhId.DH_20: (0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFFFF0000000000000000FFFFFFFF, (0xAA87CA22BE8B05378EB1C71EF320AD746E1D3B628BA79B9859F741E082542A385502F25DBF55296C3A545E3872760AB73617DE4A96262C6F5D9E98BF9292DC29F8F41DBD289A147CE9DA3113B5F0B8C00A60B1CE1D7E819D7A431D7C90EA0E5F, -3), 48),
enums.DhId.DH_21: (0x01FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF, (0x00C6858E06B70404E9CD9E3ECB662395B4429C648139053FB521F828AF606B4D3DBAA14B5E77EFE75928FE1DC127A2FFA8DE3348B3C1856A429BF97E7E31C2E5BD66011839296A789A3BC0045C8A5FB42C7D1BD998F54449579B446817AFBD17273E662C97EE72995EF42640C550B9013FAD0761353C7086A272C24088BE94769FD16650, -3), 66),
enums.DhId.DH_22: (0xB10B8F96A080E01DDE92DE5EAE5D54EC52C99FBCFB06A3C69A6A9DCA52D23B616073E28675A23D189838EF1E2EE652C013ECB4AEA906112324975C3CD49B83BFACCBDD7D90C4BD7098488E9C219A73724EFFD6FAE5644738FAA31A4FF55BCCC0A151AF5F0DC8B4BD45BF37DF365C1A65E68CFDA76D4DA708DF1FB2BC2E4A4371, 0xA4D1CBD5C3FD34126765A442EFB99905F8104DD258AC507FD6406CFF14266D31266FEA1E5C41564B777E690F5504F213160217B4B01B886A5E91547F9E2749F4D7FBD7D3B9A92EE1909D0D2263F80A76A6A24C087A091F531DBF0A0169B6A28AD662A4D18E73AFA32D779D5918D08BC8858F4DCEF97C2A24855E6EEB22B3B2E5, 128),
enums.DhId.DH_23: (0xAD107E1E9123A9D0D660FAA79559C51FA20D64E5683B9FD1B54B1597B61D0A75E6FA141DF95A56DBAF9A3C407BA1DF15EB3D688A309C180E1DE6B85A1274A0A66D3F8152AD6AC2129037C9EDEFDA4DF8D91E8FEF55B7394B7AD5B7D0B6C12207C9F98D11ED34DBF6C6BA0B2C8BBC27BE6A00E0A0B9C49708B3BF8A317091883681286130BC8985DB1602E714415D9330278273C7DE31EFDC7310F7121FD5A07415987D9ADC0A486DCDF93ACC44328387315D75E198C641A480CD86A1B9E587E8BE60E69CC928B2B9C52172E413042E9B23F10B0E16E79763C9B53DCF4BA80A29E3FB73C16B8E75B97EF363E2FFA31F71CF9DE5384E71B81C0AC4DFFE0C10E64F, 0xAC4032EF4F2D9AE39DF30B5C8FFDAC506CDEBE7B89998CAF74866A08CFE4FFE3A6824A4E10B9A6F0DD921F01A70C4AFAAB739D7700C29F52C57DB17C620A8652BE5E9001A8D66AD7C17669101999024AF4D027275AC1348BB8A762D0521BC98AE247150422EA1ED409939D54DA7460CDB5F6C6B250717CBEF180EB34118E98D119529A45D6F834566E3025E316A330EFBB77A86F0C1AB15B051AE3D428C8F8ACB70A8137150B8EEB10E183EDD19963DDD9E263E4770589EF6AA21E7F5F2FF381B539CCE3409D13CD566AFBB48D6C019181E1BCFE94B30269EDFE72FE9B6AA4BD7B5A0F1C71CFFF4C19C418E1F6EC017981BC087F2A7065B384B890D3191F2BFA, 256),
enums.DhId.DH_24: (0x87A8E61DB4B6663CFFBBD19C651959998CEEF608660DD0F25D2CEED4435E3B00E00DF8F1D61957D4FAF7DF4561B2AA3016C3D91134096FAA3BF4296D830E9A7C209E0C6497517ABD5A8A9D306BCF67ED91F9E6725B4758C022E0B1EF4275BF7B6C5BFC11D45F9088B941F54EB1E59BB8BC39A0BF12307F5C4FDB70C581B23F76B63ACAE1CAA6B7902D52526735488A0EF13C6D9A51BFA4AB3AD8347796524D8EF6A167B5A41825D967E144E5140564251CCACB83E6B486F6B3CA3F7971506026C0B857F689962856DED4010ABD0BE621C3A3960A54E710C375F26375D7014103A4B54330C198AF126116D2276E11715F693877FAD7EF09CADB094AE91E1A1597, 0x3FB32C9B73134D0B2E77506660EDBD484CA7B18F21EF205407F4793A1A0BA12510DBC15077BE463FFF4FED4AAC0BB555BE3A6C1B0C6B47B1BC3773BF7E8C6F62901228F8C28CBB18A55AE31341000A650196F931C77A57F2DDF463E5E9EC144B777DE62AAAB8A8628AC376D282D6ED3864E67982428EBC831D14348F6F2F9193B5045AF2767164E1DFC967C1FB3F2E55A4BD1BFFE83B9C80D052B985D182EA0ADB2A3B7313D3FE14C8484B1E052588B9B7D2BBD2DF016199ECD06E1557CD0915B3353BBB64E0EC377FD028370DF92B52C7891428CDC67EB6184B523D1DB246C32F63078490F00EF8D647D148D47954515E2327CFEF98C582664B4C0F6CC41659, 256),
enums.DhId.DH_25: (0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFFFFFFFFFFFF, (0x188DA80EB03090F67CBF20EB43A18800F4FF0AFD82FF101207192B95FFC8DA78631011ED6B24CDD573F977A11E794811, 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFFFFFFFFFFFC), 24),
enums.DhId.DH_26: (0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF000000000000000000000001, (0xB70E0CBD6BB4BF7F321390B94A03C1D356C21122343280D6115C1D21BD376388B5F723FB4C22DFE6CD4375A05A07476444D5819985007E34, 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFFFFFFFFFFFFFFFFFFFE), 28),
enums.DhId.DH_27: (0xD7C134AA264366862A18302575D1D787B09F075797DA89F57EC8C0FF, (0x0D9029AD2C7E5CF4340823B2A87DC68C9E4CE3174C1E6EFDEE12C07D58AA56F772C0726F24C6B89E4ECDAC24354B9E99CAA3F6D3761402CD, 0x68A5E62CA9CE6C1C299803A6C1530B514E182AD8B0042A59CAD29F43), 28),
enums.DhId.DH_28: (0xA9FB57DBA1EEA9BC3E660A909D838D726E3BF623D52620282013481D1F6E5377, (0x8BD2AEB9CB7E57CB2C4B482FFC81B7AFB9DE27E1E3BD23C23A4453BD9ACE3262547EF835C3DAC4FD97F8461A14611DC9C27745132DED8E545C1D54C72F046997, 0x7D5A0975FC2C3057EEF67530417AFFE7FB8055C126DC5C6CE94A4B44F330B5D9), 32),
enums.DhId.DH_29: (0x8CB91E82A3386D280F5D6F7E50E641DF152F7109ED5456B412B1DA197FB71123ACD3A729901D1A71874700133107EC53, (0x1D1C64F068CF45FFA2A63A81B7C13F6B8847A3E77EF14FE3DB7FCAFE0CBD10E8E826E03436D646AAEF87B2E247D4AF1E8ABE1D7520F9C2A45CB1EB8E95CFD55262B70B29FEEC5864E19C054FF99129280E4646217791811142820341263C5315, 0x7BC382C63D8C150C3C72080ACE05AFA0C2BEA28E4FB22787139165EFBA91F90F8AA5814A503AD4EB04A8C7DD22CE2826), 48),
enums.DhId.DH_30: (0xAADD9DB8DBE9C48B3FD4E6AE33C9FC07CB308DB3B3C9D20ED6639CCA703308717D4D9B009BC66842AECDA12AE6A380E62881FF2F2D82C68528AA6056583A48F3, (0x81AEE4BDD82ED9645A21322E9C4C6A9385ED9F70B5D916C1B43B62EEF4D0098EFF3B1F78E2D0D48D50D1687B93B97D5F7C6D5047406A5E688B352209BCB9F8227DDE385D566332ECC0EABFA9CF7822FDF209F70024A57B1AA000C55B881F8111B2DCDE494A5F485E5BCA4BD88A2763AED1CA2B2FA8F0540678CD1E0F3AD80892, 0x7830A3318B603B89E2327145AC234CC594CBDD8D3DF91610A83441CAEA9863BC2DED5D5AA8253AA10A2EF1C98B9AC8B57F1117A72BF2C7B9E7C1AC4D77FC94CA), 64),
enums.DhId.DH_31: (1<<32, X25519, 9),
enums.DhId.DH_32: (1<<56, X448, 5),
}
def DiffieHellman(group, peer):
if group not in PRIMES:
raise Exception(f'Unsupported DH Group DH_{group}')
p, g, l = PRIMES[group]
a = random.randrange(p>>8, p)
if callable(g):
return g(a, l), g(a, peer)
elif type(g) is tuple:
return ec_mul(g[0], l, a, p, g[1]).to_bytes(l*2, 'big'), ec_mul(int.from_bytes(peer, 'big'), l, a, p, g[1]).to_bytes(l*2, 'big')[:l]
else:
return pow(g, a, p).to_bytes(l, 'big'), pow(int.from_bytes(peer, 'big'), a, p).to_bytes(l, 'big')
| 96.075556
| 2,084
| 0.825554
| 1,581
| 21,617
| 11.117647
| 0.151803
| 0.013654
| 0.013768
| 0.015133
| 0.178358
| 0.155032
| 0.123115
| 0.119702
| 0.117597
| 0.108608
| 0
| 0.393716
| 0.101864
| 21,617
| 224
| 2,085
| 96.504464
| 0.511615
| 0.003562
| 0
| 0.202899
| 0
| 0
| 0.006826
| 0
| 0
| 1
| 0.558321
| 0
| 0.009662
| 1
| 0.135266
| false
| 0
| 0.014493
| 0.033816
| 0.304348
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
790148dd4299989881099779cd6bebf759687808
| 133
|
py
|
Python
|
basics/math.py
|
augustoscher/python-excercises
|
502fb3c15597033ba19e32f871be12d347a9aa2a
|
[
"MIT"
] | null | null | null |
basics/math.py
|
augustoscher/python-excercises
|
502fb3c15597033ba19e32f871be12d347a9aa2a
|
[
"MIT"
] | null | null | null |
basics/math.py
|
augustoscher/python-excercises
|
502fb3c15597033ba19e32f871be12d347a9aa2a
|
[
"MIT"
] | null | null | null |
print()
print("--- Math ---")
print(1+1)
print(1*3)
print(1/2)
print(3**2)
print(4%2)
print(4%2 == 0)
print(type(1))
print(type(1.0))
| 13.3
| 21
| 0.586466
| 29
| 133
| 2.689655
| 0.275862
| 0.230769
| 0.179487
| 0.205128
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.133333
| 0.097744
| 133
| 10
| 22
| 13.3
| 0.516667
| 0
| 0
| 0
| 0
| 0
| 0.089552
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
792c6117ab0836c1f92f4b975a15d00d72d0ed4a
| 243
|
py
|
Python
|
launcher/drkcraft_launcher/installer.py
|
D3r3k23/DrkCraft
|
5eaae66f558ce84f18de702b4227ca8d2cfe534f
|
[
"MIT"
] | 1
|
2022-02-10T04:41:57.000Z
|
2022-02-10T04:41:57.000Z
|
launcher/drkcraft_launcher/installer.py
|
D3r3k23/DrkCraft
|
5eaae66f558ce84f18de702b4227ca8d2cfe534f
|
[
"MIT"
] | null | null | null |
launcher/drkcraft_launcher/installer.py
|
D3r3k23/DrkCraft
|
5eaae66f558ce84f18de702b4227ca8d2cfe534f
|
[
"MIT"
] | null | null | null |
from typing import *
import os.path
from . import *
def run():
...
def get_window_size() -> tuple[int, int]:
return ( 600, 480 )
def get_base_installation_dir() -> str:
return os.path.join(os.path.expanduser('~'), '.drkcraft')
| 17.357143
| 61
| 0.641975
| 34
| 243
| 4.441176
| 0.647059
| 0.119205
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.030457
| 0.1893
| 243
| 13
| 62
| 18.692308
| 0.736041
| 0
| 0
| 0
| 0
| 0
| 0.041152
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.222222
| 0.888889
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 5
|
f70c9e3b50c4675c04919b64426f03ccededdeba
| 118
|
py
|
Python
|
zdevelop/tests/test_example.py
|
illuscio-dev/islelib-py
|
6f4dd27233a7c38f112954673bb683c6790956fd
|
[
"MIT"
] | 1
|
2020-04-16T00:58:39.000Z
|
2020-04-16T00:58:39.000Z
|
zdevelop/tests/test_example.py
|
illuscio-dev/islelib-py
|
6f4dd27233a7c38f112954673bb683c6790956fd
|
[
"MIT"
] | null | null | null |
zdevelop/tests/test_example.py
|
illuscio-dev/islelib-py
|
6f4dd27233a7c38f112954673bb683c6790956fd
|
[
"MIT"
] | 1
|
2021-04-28T22:25:21.000Z
|
2021-04-28T22:25:21.000Z
|
from islelib import __version__
def test_example(example_fixture: int) -> None:
assert __version__ is not False
| 19.666667
| 47
| 0.779661
| 16
| 118
| 5.125
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.169492
| 118
| 5
| 48
| 23.6
| 0.836735
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.333333
| false
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
f756e5b7f5b1a9393484397ceb2e9e27d7532533
| 129
|
py
|
Python
|
python-sdk/nuscenes/prediction/tests/test_mtp_loss.py
|
tanjiangyuan/Classification_nuScence
|
b94c4b0b6257fc1c048a676e3fd9e71183108d53
|
[
"Apache-2.0"
] | null | null | null |
python-sdk/nuscenes/prediction/tests/test_mtp_loss.py
|
tanjiangyuan/Classification_nuScence
|
b94c4b0b6257fc1c048a676e3fd9e71183108d53
|
[
"Apache-2.0"
] | null | null | null |
python-sdk/nuscenes/prediction/tests/test_mtp_loss.py
|
tanjiangyuan/Classification_nuScence
|
b94c4b0b6257fc1c048a676e3fd9e71183108d53
|
[
"Apache-2.0"
] | null | null | null |
version https://git-lfs.github.com/spec/v1
oid sha256:3348198148bbe54d8b7ff5d3e8ed868e45a258d2922323ff5915ff307ab28bf0
size 7907
| 32.25
| 75
| 0.883721
| 13
| 129
| 8.769231
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.406504
| 0.046512
| 129
| 3
| 76
| 43
| 0.520325
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
f77065a3c82b76ba704d7535d3bf1066d1716c36
| 58
|
py
|
Python
|
ts/model_service/__init__.py
|
vvekic/serve
|
f02a56bf1f0de1705fd9f399c1115d36e343c90c
|
[
"Apache-2.0"
] | 2
|
2022-03-26T05:17:45.000Z
|
2022-03-26T05:44:53.000Z
|
ts/model_service/__init__.py
|
vvekic/serve
|
f02a56bf1f0de1705fd9f399c1115d36e343c90c
|
[
"Apache-2.0"
] | 3
|
2022-03-12T01:08:09.000Z
|
2022-03-15T10:56:14.000Z
|
ts/model_service/__init__.py
|
vvekic/serve
|
f02a56bf1f0de1705fd9f399c1115d36e343c90c
|
[
"Apache-2.0"
] | 4
|
2020-04-23T17:55:41.000Z
|
2020-04-27T17:06:33.000Z
|
"""
Model services code
"""
from . import model_service
| 8.285714
| 27
| 0.689655
| 7
| 58
| 5.571429
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.189655
| 58
| 6
| 28
| 9.666667
| 0.829787
| 0.327586
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
f78ae60441cd1bf47214da397a97921946a3dbbd
| 351
|
py
|
Python
|
mailbot/tests/__init__.py
|
alseageo/mailbot
|
642200dce8b34cfcce6276d76952b9454155f8b3
|
[
"BSD-3-Clause"
] | 13
|
2015-07-15T19:28:09.000Z
|
2021-11-04T23:50:13.000Z
|
mailbot/tests/__init__.py
|
alseageo/mailbot
|
642200dce8b34cfcce6276d76952b9454155f8b3
|
[
"BSD-3-Clause"
] | 7
|
2015-04-26T07:11:29.000Z
|
2019-11-19T12:54:12.000Z
|
mailbot/tests/__init__.py
|
alseageo/mailbot
|
642200dce8b34cfcce6276d76952b9454155f8b3
|
[
"BSD-3-Clause"
] | 12
|
2015-04-25T17:48:57.000Z
|
2021-07-11T14:28:34.000Z
|
# -*- coding: utf-8 -*-
from unittest2 import TestCase
from .. import CALLBACKS_MAP
class MailBotTestCase(TestCase):
"""TestCase that restores the CALLBACKS_MAP after each test run."""
def setUp(self):
self.callbacks_map_save = CALLBACKS_MAP.copy()
def tearDown(self):
CALLBACKS_MAP = self.callbacks_map_save # noqa
| 21.9375
| 71
| 0.698006
| 44
| 351
| 5.386364
| 0.568182
| 0.303797
| 0.202532
| 0.168776
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.007143
| 0.202279
| 351
| 15
| 72
| 23.4
| 0.839286
| 0.253561
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| false
| 0
| 0.285714
| 0
| 0.714286
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
e3ba619f35b89a226ec0d4771dbbf96d3265ed33
| 109
|
py
|
Python
|
src/test/ocr_test.py
|
WaterHyacinthInNANHU/ArkOS
|
1919b7a2f22bc407d0a5503a9c1db8e30bbbc092
|
[
"MIT"
] | null | null | null |
src/test/ocr_test.py
|
WaterHyacinthInNANHU/ArkOS
|
1919b7a2f22bc407d0a5503a9c1db8e30bbbc092
|
[
"MIT"
] | null | null | null |
src/test/ocr_test.py
|
WaterHyacinthInNANHU/ArkOS
|
1919b7a2f22bc407d0a5503a9c1db8e30bbbc092
|
[
"MIT"
] | null | null | null |
from arknights.player import Player
player = Player()
player.connect_device()
res = player.stage_ocr()
pass
| 15.571429
| 35
| 0.779817
| 15
| 109
| 5.533333
| 0.666667
| 0.433735
| 0.433735
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.119266
| 109
| 6
| 36
| 18.166667
| 0.864583
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.2
| 0.2
| 0
| 0.2
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
e3c7872df24ce5d54bd6f1a67b76f54b19d05659
| 158
|
py
|
Python
|
src/src/globaloptim/__init__.py
|
ychnlgy/LipoWithGradients
|
4fe5228a3dae8bf5d457eef6191ba29314421f6b
|
[
"MIT"
] | null | null | null |
src/src/globaloptim/__init__.py
|
ychnlgy/LipoWithGradients
|
4fe5228a3dae8bf5d457eef6191ba29314421f6b
|
[
"MIT"
] | null | null | null |
src/src/globaloptim/__init__.py
|
ychnlgy/LipoWithGradients
|
4fe5228a3dae8bf5d457eef6191ba29314421f6b
|
[
"MIT"
] | null | null | null |
from .Lipo import Lipo
from .AdaptiveDataTable import AdaptiveDataTable
from .GlobalOptim import GlobalOptim
from .NeuralGlobalOptim import NeuralGlobalOptim
| 31.6
| 48
| 0.873418
| 16
| 158
| 8.625
| 0.375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.101266
| 158
| 4
| 49
| 39.5
| 0.971831
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
e3e1e421a9bfd409df81eff1a34f88e6c7c10f0b
| 8,668
|
py
|
Python
|
CUR_D.py
|
xadityax/Recommender-Systems-MovieLens1M
|
80298dc74d5458c325f946dd1d5e4f05760c39f8
|
[
"MIT"
] | 1
|
2020-11-23T05:23:09.000Z
|
2020-11-23T05:23:09.000Z
|
CUR_D.py
|
xadityax/Recommender-Systems-MovieLens1M
|
80298dc74d5458c325f946dd1d5e4f05760c39f8
|
[
"MIT"
] | null | null | null |
CUR_D.py
|
xadityax/Recommender-Systems-MovieLens1M
|
80298dc74d5458c325f946dd1d5e4f05760c39f8
|
[
"MIT"
] | 1
|
2021-05-04T03:06:08.000Z
|
2021-05-04T03:06:08.000Z
|
# -*- coding: utf-8 -*-
"""
Created on sat Nov 21 18:09:37 2020
@author: Jalaj
"""
import numpy as np
from math import sqrt
from SV_D import building_matrix_svd_for
from SV_D import energy_90_top
from Stats import for_getting_metrics
import timeit
from sklearn.metrics import mean_squared_error
def C_1_U_1_R(k):
"""
performs the CUR decomposition on the user matrix and stores them as numpy arrays.
Parameters
----------
k :
number of rows and columns taken
Returns
-------
None.
"""
matrix_of_user_movie = np.load('train.npy')
ssq_sum = 0 #sum of squares of all elements
users_of_numbers = matrix_of_user_movie.shape[0]
movies_of_numbers = matrix_of_user_movie[0].size
for i in range(users_of_numbers):
for j in range(movies_of_numbers):
ssq_sum = ssq_sum + matrix_of_user_movie[i][j]*matrix_of_user_movie[i][j]
users_probabilities = []
movies_probabilities = []
for i in range(users_of_numbers):
ssq_of_row = 0 #
for j in range(movies_of_numbers):
ssq_of_row = ssq_of_row + matrix_of_user_movie[i][j]*matrix_of_user_movie[i][j]
users_probabilities.append(ssq_of_row/ssq_sum)#computing user probabilities
for j in range(movies_of_numbers):
ssq_of_column = 0
for i in range(users_of_numbers):
ssq_of_column = ssq_of_column + matrix_of_user_movie[i][j]*matrix_of_user_movie[i][j]
movies_probabilities.append(ssq_of_column/ssq_sum)#computing movie probabilties
users_that_are_top = np.random.choice(len(users_probabilities),k, replace=False, p=users_probabilities) #sampling rows
movies_that_are_top = np.random.choice(len(movies_probabilities),k, replace=False, p=movies_probabilities) #sampling columns
movies_that_are_top.sort()
users_that_are_top.sort()
C = []
R = []
for i in users_that_are_top:
R.append(list(matrix_of_user_movie[i]/sqrt(k*users_probabilities[i])))
for j in movies_that_are_top:
C.append(list(matrix_of_user_movie[:,j]/sqrt(k*movies_probabilities[j])))
Ct = np.transpose(C)
W = []
for i in users_that_are_top:
X=[]
for j in movies_that_are_top:
X.append(matrix_of_user_movie[i][j])#intersection of sampled rows and columns
W.append(np.array(X))
W = np.array(W)
x,yt,sigma = building_matrix_svd_for(W)#SVD of intersection
sigm_pinv = np.linalg.pinv(sigma) #Moore Penrose Pseudo Inverse
sig_sq = np.linalg.matrix_power(sigm_pinv, 2)#square of pseudo-inverse
y = np.transpose(yt)
xt = np.transpose(x)
U = np.matmul(y, sig_sq)
U = np.matmul(U, xt) #reconstructing U
np.save('cur_ct.npy', Ct)
np.save('cur_r.npy', R)
new_x, new_yt, new_sigma = energy_90_top(x,yt,sigma)
pinv_new_sigma = np.linalg.pinv(new_sigma)
new_sig_sq = np.linalg.matrix_power(pinv_new_sigma, 2)
y = np.transpose(new_yt)
xt = np.transpose(new_x)
U = np.matmul(y, new_sig_sq)
U = np.matmul(U, xt)
np.save('cur_u.npy', U)
def C_1_U_90_1_R(k):
"""
performs the CUR decomposition on the user matrix with 90% retained energy and stores them as numpy arrays
Parameters
----------
k :
number of rows and columns and taken
Returns
-------
None.
"""
matrix_of_user_movie = np.load('train.npy')
#[[1,1,1,0,0],[3,3,3,0,0],[4,4,4,0,0],[5,5,5,0,0],[0,0,0,4,4],[0,0,0,5,5],[0,0,0,2,2]]
ssq_sum = 0 #sum of squares of all elements
users_of_numbers = matrix_of_user_movie.shape[0]
movies_of_numbers = matrix_of_user_movie[0].size
for i in range(users_of_numbers):
for j in range(movies_of_numbers):
ssq_sum = ssq_sum + matrix_of_user_movie[i][j]*matrix_of_user_movie[i][j]
users_probabilities = []
movies_probabilities = []
for i in range(users_of_numbers):
ssq_of_row = 0 #
for j in range(movies_of_numbers):
ssq_of_row = ssq_of_row + matrix_of_user_movie[i][j]*matrix_of_user_movie[i][j]
users_probabilities.append(ssq_of_row/ssq_sum)#computing user probabilities
for j in range(movies_of_numbers):
ssq_of_column = 0
for i in range(users_of_numbers):
ssq_of_column = ssq_of_column + matrix_of_user_movie[i][j]*matrix_of_user_movie[i][j]
movies_probabilities.append(ssq_of_column/ssq_sum)#computing movie probabilties
users_that_are_top = np.random.choice(len(users_probabilities),k, replace=False, p=users_probabilities) #sampling rows
movies_that_are_top = np.random.choice(len(movies_probabilities),k, replace=False, p=movies_probabilities) #sampling columns
movies_that_are_top.sort()
users_that_are_top.sort()
C = []
R = []
for i in users_that_are_top:
R.append(list(matrix_of_user_movie[i]/sqrt(k*users_probabilities[i])))
for j in movies_that_are_top:
C.append(list(matrix_of_user_movie[:,j]/sqrt(k*movies_probabilities[j])))
Ct = np.transpose(C)
W = []
for i in users_that_are_top:
X=[]
for j in movies_that_are_top:
X.append(matrix_of_user_movie[i][j])#intersection of sampled rows and columns
W.append(np.array(X))
W = np.array(W)
x,yt,sigma = building_matrix_svd_for(W)#SVD of intersection
sigm_pinv = np.linalg.pinv(sigma) #Moore Penrose Pseudo Inverse
sig_sq = np.linalg.matrix_power(sigm_pinv, 2)#square of pseudo-inverse
y = np.transpose(yt)
xt = np.transpose(x)
U = np.matmul(y, sig_sq)
U = np.matmul(U, xt) #reconstructing U
np.save('cur_ct_90.npy', Ct)
np.save('cur_r_90.npy', R)
new_x, new_yt, new_sigma = energy_90_top(x,yt,sigma)#SVD with top 90% energy
pinv_new_sigma = np.linalg.pinv(new_sigma)
new_sig_sq = np.linalg.matrix_power(pinv_new_sigma, 2)
y = np.transpose(new_yt)
xt = np.transpose(new_x)
U = np.matmul(y, new_sig_sq)
U = np.matmul(U, xt)
np.save('cur_u_90.npy', U)
def srcr(matrix,final):
"""
calculates spearman rank correlation coefficient
Parameters
----------
matrix :
train values matrix
final :
C*U*R
Returns
-------
values :
Spearman Rank Correlation.
"""
freq=0
sum=0
for i in range(0,len(matrix)):
for j in range(0,len(matrix[i])):
sum=sum+(matrix[i][j]-final[i][j])**2
freq=freq+1
sum=6*sum
flag=(freq**3)-freq
values=1-(sum/flag)
return values
def cur_ponk_precision(mat, final):
"""
calculating precision on top k for CUR
Parameters
----------
mat :
train values matrix.
final :
C*U*R
Returns
-------
Precision/100:
precision on top K
"""
k_mat=final.tolist()
freq=0.00
dart=0.00
for i in range(0,len(mat)):
for j in range(0,len(mat[i])):
freq=freq+1
a=int(round(mat[i][j]))
b=int(round(k_mat[i][j]))
if (a==b):
dart=dart+1
precision=(dart*100)/freq
return precision/100
def main():
start=timeit.default_timer()
C_1_U_1_R(600)
print("Time taken")
stop=timeit.default_timer()
print("%s seconds" %(stop-start))
C_1_U_90_1_R(600)
print("Time taken for 90%")
stop=timeit.default_timer()
print("%s seconds" %(stop-start))
Ct = np.load('cur_ct.npy')
A = np.load('train.npy')
#[[1,1,1,0,0],[3,3,3,0,0],[4,4,4,0,0],[5,5,5,0,0],[0,0,0,4,4],[0,0,0,5,5],[0,0,0,2,2]]
R = np.load('cur_r.npy')
U = np.load('cur_u.npy')
final = np.matmul(Ct, U)
final = np.matmul(final, R)
rmse_err=sqrt(mean_squared_error(A, final))
print("RMSE error is :")
print(rmse_err)
print("Precision on top k is :")
ans=cur_ponk_precision(A, final)
print(ans)
answer = srcr(A, final)
print("Spearman Rank Correlation is ", answer)
Ct_90 = np.load('cur_ct_90.npy')
R_90 = np.load('cur_r_90.npy')
U_90 = np.load('cur_u_90.npy')
final_90 = np.matmul(Ct_90, U_90)
final_90 = np.matmul(final_90, R_90)
rmse_err_90=sqrt(mean_squared_error(A, final_90))
print("RMSE error for 90% is :")
print(rmse_err_90)
print("Precision on top k for 90% is :")
ans_90=cur_ponk_precision(A, final_90)
print(ans_90)
answer_90 = srcr(A, final_90)
print("Spearman Rank Correlation for 90% is ", answer_90)
del A
if __name__ == '__main__':
main()
| 33.596899
| 129
| 0.631518
| 1,405
| 8,668
| 3.642705
| 0.121708
| 0.037515
| 0.056272
| 0.079719
| 0.776866
| 0.751465
| 0.714342
| 0.714342
| 0.701837
| 0.684642
| 0
| 0.031777
| 0.241232
| 8,668
| 257
| 130
| 33.727626
| 0.746389
| 0.170282
| 0
| 0.596685
| 0
| 0
| 0.055572
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.027624
| false
| 0
| 0.038674
| 0
| 0.077348
| 0.077348
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
e3e3e47cfcfcccb5d74b4d16ddd929ae31cbb62f
| 213
|
py
|
Python
|
tests/neural_net/__init__.py
|
X-rayLaser/DeepNetLib
|
abe64d2c0d87082aae1b2063a06a4b778ea285bd
|
[
"MIT",
"BSD-2-Clause",
"Apache-2.0"
] | null | null | null |
tests/neural_net/__init__.py
|
X-rayLaser/DeepNetLib
|
abe64d2c0d87082aae1b2063a06a4b778ea285bd
|
[
"MIT",
"BSD-2-Clause",
"Apache-2.0"
] | null | null | null |
tests/neural_net/__init__.py
|
X-rayLaser/DeepNetLib
|
abe64d2c0d87082aae1b2063a06a4b778ea285bd
|
[
"MIT",
"BSD-2-Clause",
"Apache-2.0"
] | null | null | null |
from .init_tests import *
from .feed_tests import *
from .layer_sizes_tests import *
from .create_from_file_tests import *
from .save_tests import *
from .add_layer_tests import *
from .net_factory_tests import *
| 26.625
| 37
| 0.802817
| 33
| 213
| 4.818182
| 0.393939
| 0.484277
| 0.566038
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.131455
| 213
| 7
| 38
| 30.428571
| 0.859459
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
5415e92698f3131feac1feb736325bbfc4de2d50
| 97
|
py
|
Python
|
duty/callback_signals/ping.py
|
MirEEtoN/IrCA-Duty
|
d06993071fa30d7da61c5429e9a92a3a60922d28
|
[
"MIT"
] | null | null | null |
duty/callback_signals/ping.py
|
MirEEtoN/IrCA-Duty
|
d06993071fa30d7da61c5429e9a92a3a60922d28
|
[
"MIT"
] | null | null | null |
duty/callback_signals/ping.py
|
MirEEtoN/IrCA-Duty
|
d06993071fa30d7da61c5429e9a92a3a60922d28
|
[
"MIT"
] | null | null | null |
from duty.objects import dp
@dp.event_register('ping')
def ping(event) -> str:
return "ok"
| 13.857143
| 27
| 0.680412
| 15
| 97
| 4.333333
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.175258
| 97
| 6
| 28
| 16.166667
| 0.8125
| 0
| 0
| 0
| 0
| 0
| 0.061856
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.25
| 0.25
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
54167ebb8383c51524b281d417d695a64acc7b0c
| 5,560
|
py
|
Python
|
pymnn/pip_package/MNN/tools/mnn_fb/IDSTQuan.py
|
xhuan28/MNN
|
81df3a48d79cbc0b75251d12934345948866f7be
|
[
"Apache-2.0"
] | 3
|
2019-12-27T01:10:32.000Z
|
2021-05-14T08:10:40.000Z
|
pymnn/pip_package/MNN/tools/mnn_fb/IDSTQuan.py
|
xhuan28/MNN
|
81df3a48d79cbc0b75251d12934345948866f7be
|
[
"Apache-2.0"
] | 10
|
2019-07-04T01:40:13.000Z
|
2019-10-30T02:38:42.000Z
|
pymnn/pip_package/MNN/tools/mnn_fb/IDSTQuan.py
|
xhuan28/MNN
|
81df3a48d79cbc0b75251d12934345948866f7be
|
[
"Apache-2.0"
] | 1
|
2020-03-10T02:17:47.000Z
|
2020-03-10T02:17:47.000Z
|
# automatically generated by the FlatBuffers compiler, do not modify
# namespace: MNN
import flatbuffers
class IDSTQuan(object):
__slots__ = ['_tab']
@classmethod
def GetRootAsIDSTQuan(cls, buf, offset):
n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset)
x = IDSTQuan()
x.Init(buf, n + offset)
return x
# IDSTQuan
def Init(self, buf, pos):
self._tab = flatbuffers.table.Table(buf, pos)
# IDSTQuan
def Buffer(self, j):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4))
if o != 0:
a = self._tab.Vector(o)
return self._tab.Get(flatbuffers.number_types.Int8Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1))
return 0
# IDSTQuan
def BufferAsNumpy(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4))
if o != 0:
return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int8Flags, o)
return 0
# IDSTQuan
def BufferLength(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4))
if o != 0:
return self._tab.VectorLen(o)
return 0
# IDSTQuan
def Alpha(self, j):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6))
if o != 0:
a = self._tab.Vector(o)
return self._tab.Get(flatbuffers.number_types.Float32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4))
return 0
# IDSTQuan
def AlphaAsNumpy(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6))
if o != 0:
return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Float32Flags, o)
return 0
# IDSTQuan
def AlphaLength(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6))
if o != 0:
return self._tab.VectorLen(o)
return 0
# IDSTQuan
def Type(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8))
if o != 0:
return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos)
return 0
# IDSTQuan
def UseInt32(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10))
if o != 0:
return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos))
return False
# IDSTQuan
def QuantScale(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12))
if o != 0:
return self._tab.Get(flatbuffers.number_types.Float32Flags, o + self._tab.Pos)
return 0.0
# IDSTQuan
def ScaleIn(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14))
if o != 0:
return self._tab.Get(flatbuffers.number_types.Float32Flags, o + self._tab.Pos)
return 0.0
# IDSTQuan
def ScaleOut(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16))
if o != 0:
return self._tab.Get(flatbuffers.number_types.Float32Flags, o + self._tab.Pos)
return 0.0
# IDSTQuan
def AMax(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18))
if o != 0:
return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos)
return 0
# IDSTQuan
def AMin(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(20))
if o != 0:
return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos)
return 0
# IDSTQuan
def ReadType(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(22))
if o != 0:
return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos)
return 0
# IDSTQuan
def HasScaleInt(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(24))
if o != 0:
return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos))
return False
def IDSTQuanStart(builder): builder.StartObject(11)
def IDSTQuanAddBuffer(builder, buffer): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(buffer), 0)
def IDSTQuanStartBufferVector(builder, numElems): return builder.StartVector(1, numElems, 1)
def IDSTQuanAddAlpha(builder, alpha): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(alpha), 0)
def IDSTQuanStartAlphaVector(builder, numElems): return builder.StartVector(4, numElems, 4)
def IDSTQuanAddType(builder, type): builder.PrependInt32Slot(2, type, 0)
def IDSTQuanAddUseInt32(builder, useInt32): builder.PrependBoolSlot(3, useInt32, 0)
def IDSTQuanAddQuantScale(builder, quantScale): builder.PrependFloat32Slot(4, quantScale, 0.0)
def IDSTQuanAddScaleIn(builder, scaleIn): builder.PrependFloat32Slot(5, scaleIn, 0.0)
def IDSTQuanAddScaleOut(builder, scaleOut): builder.PrependFloat32Slot(6, scaleOut, 0.0)
def IDSTQuanAddAMax(builder, aMax): builder.PrependInt32Slot(7, aMax, 0)
def IDSTQuanAddAMin(builder, aMin): builder.PrependInt32Slot(8, aMin, 0)
def IDSTQuanAddReadType(builder, readType): builder.PrependInt32Slot(9, readType, 0)
def IDSTQuanAddHasScaleInt(builder, hasScaleInt): builder.PrependBoolSlot(10, hasScaleInt, 0)
def IDSTQuanEnd(builder): return builder.EndObject()
| 38.881119
| 137
| 0.680396
| 688
| 5,560
| 5.356105
| 0.15843
| 0.079783
| 0.191045
| 0.180461
| 0.609227
| 0.574491
| 0.552239
| 0.552239
| 0.525645
| 0.525645
| 0
| 0.029633
| 0.210971
| 5,560
| 142
| 138
| 39.15493
| 0.810349
| 0.040468
| 0
| 0.475728
| 1
| 0
| 0.000752
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.31068
| false
| 0
| 0.009709
| 0.029126
| 0.640777
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
580c961256ef6e760c883aba29553ed149773328
| 572
|
py
|
Python
|
stackoversight/pipeline/tokenizer.py
|
walker76/stackoversight
|
c33e1b98910d054da0f9698bc086d7736c2c5656
|
[
"BSD-2-Clause"
] | 3
|
2019-07-15T14:59:09.000Z
|
2019-07-25T04:02:56.000Z
|
stackoversight/pipeline/tokenizer.py
|
walker76-research/stackoversight
|
c33e1b98910d054da0f9698bc086d7736c2c5656
|
[
"BSD-2-Clause"
] | 27
|
2019-07-16T07:57:27.000Z
|
2019-09-10T12:40:55.000Z
|
stackoversight/pipeline/tokenizer.py
|
walker76-research/stackoversight
|
c33e1b98910d054da0f9698bc086d7736c2c5656
|
[
"BSD-2-Clause"
] | 2
|
2019-07-21T17:45:11.000Z
|
2019-08-07T11:13:36.000Z
|
from io import StringIO
from tokenize import generate_tokens
from pipeline.processing_step import ProcessingStep
class Tokenizer(ProcessingStep):
"""
Input for Pipeline - An array of strings, each string is a code snippet
Output for Pipeline - An array of arrays of tokens
"""
def operation(self, item):
"""
Returns the tokens for a string representation of the code
"""
return [(token[0], token[1]) for token in generate_tokens(StringIO(item).readline)]
@property
def name(self):
return "tokenizer"
| 27.238095
| 91
| 0.681818
| 73
| 572
| 5.30137
| 0.575342
| 0.072351
| 0.067183
| 0.093023
| 0.103359
| 0
| 0
| 0
| 0
| 0
| 0
| 0.004619
| 0.243007
| 572
| 20
| 92
| 28.6
| 0.889146
| 0.316434
| 0
| 0
| 1
| 0
| 0.025862
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.222222
| false
| 0
| 0.333333
| 0.111111
| 0.888889
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 5
|
580ff7adf98ed584840e6a089c9001370965d0e0
| 69
|
py
|
Python
|
thoughtrnn/__init__.py
|
tehZevo/thoughtrnn
|
f59e16b1f59f386cb645579173c567500f10fc37
|
[
"MIT"
] | 2
|
2019-10-22T15:29:57.000Z
|
2019-10-31T02:48:09.000Z
|
thoughtrnn/__init__.py
|
tehZevo/thoughtrnn
|
f59e16b1f59f386cb645579173c567500f10fc37
|
[
"MIT"
] | null | null | null |
thoughtrnn/__init__.py
|
tehZevo/thoughtrnn
|
f59e16b1f59f386cb645579173c567500f10fc37
|
[
"MIT"
] | null | null | null |
from .thoughtrnn import ThoughtRNN
from .helper import ThoughtHelper
| 23
| 34
| 0.855072
| 8
| 69
| 7.375
| 0.625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.115942
| 69
| 2
| 35
| 34.5
| 0.967213
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
58141645d916e48b973a40f720590e89b2672280
| 16,448
|
py
|
Python
|
Tools/resultsdbpy/resultsdbpy/model/commit_context_unittest.py
|
jacadcaps/webkitty
|
9aebd2081349f9a7b5d168673c6f676a1450a66d
|
[
"BSD-2-Clause"
] | 6
|
2021-07-05T16:09:39.000Z
|
2022-03-06T22:44:42.000Z
|
Tools/resultsdbpy/resultsdbpy/model/commit_context_unittest.py
|
jacadcaps/webkitty
|
9aebd2081349f9a7b5d168673c6f676a1450a66d
|
[
"BSD-2-Clause"
] | 7
|
2022-03-15T13:25:39.000Z
|
2022-03-15T13:25:44.000Z
|
Tools/resultsdbpy/resultsdbpy/model/commit_context_unittest.py
|
jacadcaps/webkitty
|
9aebd2081349f9a7b5d168673c6f676a1450a66d
|
[
"BSD-2-Clause"
] | null | null | null |
# Copyright (C) 2019 Apple Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from redis import StrictRedis
from fakeredis import FakeStrictRedis
from resultsdbpy.model.cassandra_context import CassandraContext
from resultsdbpy.model.commit_context import CommitContext
from resultsdbpy.model.mock_cassandra_context import MockCassandraContext
from resultsdbpy.model.mock_repository import MockStashRepository, MockSVNRepository
from resultsdbpy.model.wait_for_docker_test_case import WaitForDockerTestCase
class CommitContextTest(WaitForDockerTestCase):
KEYSPACE = 'commit_mapping_test_keyspace'
def init_database(self, redis=StrictRedis, cassandra=CassandraContext):
redis_instance = redis()
self.stash_repository = MockStashRepository.safari(redis_instance)
self.svn_repository = MockSVNRepository.webkit(redis_instance)
cassandra.drop_keyspace(keyspace=self.KEYSPACE)
self.database = CommitContext(
redis=redis_instance,
cassandra=cassandra(keyspace=self.KEYSPACE, create_keyspace=True),
)
self.database.register_repository(self.stash_repository)
self.database.register_repository(self.svn_repository)
def add_all_commits_to_database(self):
for mock_repository in [self.stash_repository, self.svn_repository]:
for commit_list in mock_repository.commits.values():
for commit in commit_list:
self.database.register_commit(commit)
@WaitForDockerTestCase.mock_if_no_docker(mock_redis=FakeStrictRedis, mock_cassandra=MockCassandraContext)
def test_verify_table(self, redis=StrictRedis, cassandra=CassandraContext):
self.init_database(redis=redis, cassandra=cassandra)
CommitContext(redis=redis(), cassandra=cassandra(keyspace=self.KEYSPACE))
@WaitForDockerTestCase.mock_if_no_docker(mock_redis=FakeStrictRedis, mock_cassandra=MockCassandraContext)
def test_commit_by_id(self, redis=StrictRedis, cassandra=CassandraContext):
self.init_database(redis=redis, cassandra=cassandra)
self.add_all_commits_to_database()
self.assertEqual(
[self.stash_repository.commit_for_id(id='bb6bda5f44', branch='master')],
self.database.find_commits_by_id(repository_id='safari', branch='master', commit_id='bb6bda5f44'),
)
self.assertEqual(2, len(self.database.find_commits_by_id(repository_id='safari', branch='master', commit_id='336610a')))
self.assertEqual(
[self.svn_repository.commit_for_id(id=236544, branch='trunk')],
self.database.find_commits_by_id(repository_id='webkit', branch='trunk', commit_id=236544),
)
self.assertEqual(0, len(self.database.find_commits_by_id(repository_id='webkit', branch='trunk', commit_id='23654')))
@WaitForDockerTestCase.mock_if_no_docker(mock_redis=FakeStrictRedis, mock_cassandra=MockCassandraContext)
def test_commit_by_uuid(self, redis=StrictRedis, cassandra=CassandraContext):
self.init_database(redis=redis, cassandra=cassandra)
self.add_all_commits_to_database()
self.assertEqual(
[self.stash_repository.commit_for_id(id='7be4084258', branch='master')],
self.database.find_commits_by_uuid(repository_id='safari', branch='master', uuid=153755068501),
)
self.assertEqual(
[self.svn_repository.commit_for_id(id=236540, branch='trunk')],
self.database.find_commits_by_uuid(repository_id='webkit', branch='trunk', uuid=153802947900),
)
@WaitForDockerTestCase.mock_if_no_docker(mock_redis=FakeStrictRedis, mock_cassandra=MockCassandraContext)
def test_commit_by_timestamp(self, redis=StrictRedis, cassandra=CassandraContext):
self.init_database(redis=redis, cassandra=cassandra)
self.add_all_commits_to_database()
self.assertEqual(
[self.stash_repository.commit_for_id(id='336610a84f', branch='master')],
self.database.find_commits_by_timestamp(repository_id='safari', branch='master', timestamp=1537809818),
)
self.assertEqual(
[self.svn_repository.commit_for_id(id=236540, branch='trunk')],
self.database.find_commits_by_timestamp(repository_id='webkit', branch='trunk', timestamp=1538029479),
)
self.assertEqual(2, len(self.database.find_commits_by_timestamp(repository_id='safari', branch='master', timestamp=1537550685)))
@WaitForDockerTestCase.mock_if_no_docker(mock_redis=FakeStrictRedis, mock_cassandra=MockCassandraContext)
def test_all_commits_stash(self, redis=StrictRedis, cassandra=CassandraContext):
self.init_database(redis=redis, cassandra=cassandra)
self.add_all_commits_to_database()
self.assertEqual(5, len(self.database.find_commits_in_range(repository_id='safari', branch='master')))
self.assertEqual(
[self.stash_repository.commit_for_id(id='bb6bda5f44', branch='master'), self.stash_repository.commit_for_id(id='336610a84f', branch='master')],
self.database.find_commits_in_range(repository_id='safari', branch='master', limit=2),
)
@WaitForDockerTestCase.mock_if_no_docker(mock_redis=FakeStrictRedis, mock_cassandra=MockCassandraContext)
def test_all_commits_svn(self, redis=StrictRedis, cassandra=CassandraContext):
self.init_database(redis=redis, cassandra=cassandra)
self.add_all_commits_to_database()
self.assertEqual(5, len(self.database.find_commits_in_range(repository_id='webkit', branch='trunk')))
self.assertEqual(
[self.svn_repository.commit_for_id(id=236544, branch='trunk'), self.svn_repository.commit_for_id(id=236543, branch='trunk')],
self.database.find_commits_in_range(repository_id='webkit', branch='trunk', limit=2),
)
@WaitForDockerTestCase.mock_if_no_docker(mock_redis=FakeStrictRedis, mock_cassandra=MockCassandraContext)
def test_stash_commits_in_range(self, redis=StrictRedis, cassandra=CassandraContext):
self.init_database(redis=redis, cassandra=cassandra)
self.add_all_commits_to_database()
self.assertEqual(
[self.stash_repository.commit_for_id(id='bb6bda5f44', branch='master'), self.stash_repository.commit_for_id(id='336610a84f', branch='master')],
self.database.find_commits_in_range(repository_id='safari', branch='master', begin=1537809818, end=1537810281),
)
@WaitForDockerTestCase.mock_if_no_docker(mock_redis=FakeStrictRedis, mock_cassandra=MockCassandraContext)
def test_svn_commits_in_range(self, redis=StrictRedis, cassandra=CassandraContext):
self.init_database(redis=redis, cassandra=cassandra)
self.add_all_commits_to_database()
self.assertEqual(
[self.svn_repository.commit_for_id(id=236544, branch='trunk'), self.svn_repository.commit_for_id(id=236543, branch='trunk')],
self.database.find_commits_in_range(repository_id='webkit', branch='trunk', begin=1538050458, end=1538052408),
)
@WaitForDockerTestCase.mock_if_no_docker(mock_redis=FakeStrictRedis, mock_cassandra=MockCassandraContext)
def test_stash_commits_between(self, redis=StrictRedis, cassandra=CassandraContext):
self.init_database(redis=redis, cassandra=cassandra)
self.add_all_commits_to_database()
commits = [
self.stash_repository.commit_for_id(id='bb6bda5f', branch='master'),
self.stash_repository.commit_for_id(id='336610a8', branch='master'),
self.stash_repository.commit_for_id(id='336610a4', branch='master'),
]
self.assertEqual(commits, self.database.find_commits_in_range(repository_id='safari', branch='master', begin=commits[-1], end=commits[0]))
@WaitForDockerTestCase.mock_if_no_docker(mock_redis=FakeStrictRedis, mock_cassandra=MockCassandraContext)
def test_svn_commits_between(self, redis=StrictRedis, cassandra=CassandraContext):
self.init_database(redis=redis, cassandra=cassandra)
self.add_all_commits_to_database()
commits = [
self.svn_repository.commit_for_id(id=236544, branch='trunk'),
self.svn_repository.commit_for_id(id=236543, branch='trunk'),
self.svn_repository.commit_for_id(id=236542, branch='trunk'),
]
self.assertEqual(commits, self.database.find_commits_in_range(repository_id='webkit', branch='trunk', begin=commits[-1], end=commits[0]))
@WaitForDockerTestCase.mock_if_no_docker(mock_redis=FakeStrictRedis, mock_cassandra=MockCassandraContext)
def test_commit_from_stash_repo(self, redis=StrictRedis, cassandra=CassandraContext):
self.init_database(redis=redis, cassandra=cassandra)
self.database.register_commit_with_repo_and_id('safari', 'master', 'bb6bda5f44')
self.assertEqual(
[self.stash_repository.commit_for_id(id='bb6bda5f44', branch='master')],
self.database.find_commits_by_id(repository_id='safari', branch='master', commit_id='bb6bda5f44'),
)
@WaitForDockerTestCase.mock_if_no_docker(mock_redis=FakeStrictRedis, mock_cassandra=MockCassandraContext)
def test_commit_from_svn_repo(self, redis=StrictRedis, cassandra=CassandraContext):
self.init_database(redis=redis, cassandra=cassandra)
self.database.register_commit_with_repo_and_id('webkit', 'trunk', 236544)
self.assertEqual(
[self.svn_repository.commit_for_id(id=236544, branch='trunk')],
self.database.find_commits_by_id(repository_id='webkit', branch='trunk', commit_id=236544),
)
@WaitForDockerTestCase.mock_if_no_docker(mock_redis=FakeStrictRedis, mock_cassandra=MockCassandraContext)
def test_branches(self, redis=StrictRedis, cassandra=CassandraContext):
self.init_database(redis=redis, cassandra=cassandra)
self.add_all_commits_to_database()
self.assertEqual(['master', 'safari-606-branch'], self.database.branches(repository_id='safari'))
self.assertEqual(['safari-606-branch', 'trunk'], self.database.branches(repository_id='webkit'))
self.assertEqual(['safari-606-branch'], self.database.branches(repository_id='safari', branch='safari'))
self.assertEqual(['safari-606-branch'], self.database.branches(repository_id='webkit', branch='safari-606-branch'))
@WaitForDockerTestCase.mock_if_no_docker(mock_redis=FakeStrictRedis, mock_cassandra=MockCassandraContext)
def test_next_commit(self, redis=StrictRedis, cassandra=CassandraContext):
self.init_database(redis=redis, cassandra=cassandra)
self.add_all_commits_to_database()
self.assertEqual(
self.database.next_commit(self.svn_repository.commit_for_id(id=236542)),
self.svn_repository.commit_for_id(id=236543),
)
self.assertEqual(
self.database.next_commit(self.stash_repository.commit_for_id(id='336610a40c3fecb728871e12ca31482ca715b383')),
self.stash_repository.commit_for_id(id='336610a84fdcf14ddcf1db65075af95480516fda'),
)
@WaitForDockerTestCase.mock_if_no_docker(mock_redis=FakeStrictRedis, mock_cassandra=MockCassandraContext)
def test_previous_commit(self, redis=StrictRedis, cassandra=CassandraContext):
self.init_database(redis=redis, cassandra=cassandra)
self.add_all_commits_to_database()
self.assertEqual(
self.svn_repository.commit_for_id(id=236542),
self.database.previous_commit(self.svn_repository.commit_for_id(id=236543)),
)
self.assertEqual(
self.stash_repository.commit_for_id(id='336610a40c3fecb728871e12ca31482ca715b383'),
self.database.previous_commit(self.stash_repository.commit_for_id(id='336610a84fdcf14ddcf1db65075af95480516fda')),
)
@WaitForDockerTestCase.mock_if_no_docker(mock_redis=FakeStrictRedis, mock_cassandra=MockCassandraContext)
def test_sibling_commits(self, redis=StrictRedis, cassandra=CassandraContext):
self.init_database(redis=redis, cassandra=cassandra)
self.add_all_commits_to_database()
self.assertEqual(
self.database.sibling_commits(self.svn_repository.commit_for_id(id=236542), ['safari']),
{'safari': [self.stash_repository.commit_for_id(id='bb6bda5f44dd24d0b54539b8ff6e8c17f519249a')]},
)
self.assertEqual(
self.database.sibling_commits(self.stash_repository.commit_for_id(id='bb6bda5f44dd24d0b54539b8ff6e8c17f519249a'), ['webkit']),
{'webkit': [
self.svn_repository.commit_for_id(id=236544),
self.svn_repository.commit_for_id(id=236543),
self.svn_repository.commit_for_id(id=236542),
self.svn_repository.commit_for_id(id=236541),
self.svn_repository.commit_for_id(id=236540),
]},
)
self.assertEqual(
self.database.sibling_commits(self.stash_repository.commit_for_id(id='336610a84fdcf14ddcf1db65075af95480516fda'), ['webkit']),
{'webkit': []},
)
def test_uuid_for_commits(self):
uuid = CommitContext.uuid_for_commits([MockStashRepository.safari().commit_for_id(id='bb6bda5f'), MockSVNRepository.webkit().commit_for_id(id=236544)])
self.assertEqual(uuid, 153805240800)
@WaitForDockerTestCase.mock_if_no_docker(mock_redis=FakeStrictRedis, mock_cassandra=MockCassandraContext)
def test_branch_keys_for_commits(self, redis=StrictRedis, cassandra=CassandraContext):
self.init_database(redis=redis, cassandra=cassandra)
branches = self.database.branch_keys_for_commits([
MockStashRepository.safari().commit_for_id(id='bb6bda5f'),
MockSVNRepository.webkit().commit_for_id(id=236544),
])
self.assertEqual(branches, ['default'])
branches = self.database.branch_keys_for_commits([
MockStashRepository.safari().commit_for_id(id='79256c32', branch='safari-606-branch'),
MockSVNRepository.webkit().commit_for_id(id=236544),
])
self.assertEqual(branches, ['safari-606-branch'])
branches = self.database.branch_keys_for_commits([
MockStashRepository.safari().commit_for_id(id='79256c32', branch='safari-606-branch'),
MockSVNRepository.webkit().commit_for_id(id=236335, branch='safari-606-branch'),
])
self.assertEqual(branches, ['safari-606-branch'])
@WaitForDockerTestCase.mock_if_no_docker(mock_redis=FakeStrictRedis, mock_cassandra=MockCassandraContext)
def test_commit_url(self, redis=StrictRedis, cassandra=CassandraContext):
self.init_database(redis=redis, cassandra=cassandra)
self.assertEqual(
'https://fake-stash-instance.apple.com/projects/BROWSER/repos/safari/commits/bb6bda5f44dd24d0b54539b8ff6e8c17f519249a',
self.database.url(MockStashRepository.safari().commit_for_id(id='bb6bda5f')),
)
self.assertEqual(
'https://trac.webkit.org/changeset/236544/webkit',
self.database.url(MockSVNRepository.webkit().commit_for_id(id=236544)),
)
| 58.120141
| 159
| 0.738266
| 1,886
| 16,448
| 6.172322
| 0.113468
| 0.037883
| 0.046302
| 0.05472
| 0.790911
| 0.762649
| 0.750193
| 0.739627
| 0.705008
| 0.670303
| 0
| 0.046003
| 0.158135
| 16,448
| 282
| 160
| 58.326241
| 0.794685
| 0.078064
| 0
| 0.455357
| 0
| 0.004464
| 0.084137
| 0.020341
| 0
| 0
| 0
| 0
| 0.160714
| 1
| 0.09375
| false
| 0
| 0.03125
| 0
| 0.133929
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
586d3da93d189fd94120f0a3a2d50c1835b522f8
| 26,599
|
py
|
Python
|
spherical/spherical_Jfactors.py
|
jls713/jfactors
|
2427f4bad052b37fb7eefffaa4ee46d398f33504
|
[
"MIT"
] | null | null | null |
spherical/spherical_Jfactors.py
|
jls713/jfactors
|
2427f4bad052b37fb7eefffaa4ee46d398f33504
|
[
"MIT"
] | null | null | null |
spherical/spherical_Jfactors.py
|
jls713/jfactors
|
2427f4bad052b37fb7eefffaa4ee46d398f33504
|
[
"MIT"
] | null | null | null |
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
import pandas as pd
from scipy.special import gamma as Gamma
from scipy.integrate import quad
G = 4.300918e-6 ## in units solar mass, km/s kpc
GEV2cm5toMsol2kpc5 = 2.2482330e-07
GEVcm2toMsolkpc2 = 8.5358230e-15
def integrate_J_spherical_alphabetagamma(thetamax,D,rho0,rs,alpha,beta,gamma,rt):
''' J for spherical alpha,beta,gamma model '''
def rho(r):
return np.power(r/rs,-gamma)*np.power(1+np.power(r/rs,alpha),((gamma-beta)/alpha))*np.sqrt(1-np.tanh(r/rt)**2)
def J(ll,th):
z = ll
b = np.tan(th)*D
x = np.sqrt(b*b+z*z)
return np.sin(th)*(rho(x)**2)
return np.log10(rho0*rho0*2.*np.pi*quad(lambda y: quad(lambda z: J(y,z), 0., thetamax)[0],-np.inf,np.inf)[0]/GEV2cm5toMsol2kpc5)
def integrate_J_farfield_spherical_alphabetagamma(thetamax,D,rho0,rs,alpha,beta,gamma,rt):
''' J for spherical alpha,beta,gamma model in far-field limit'''
def rho(r):
return np.power(r/rs,-gamma)*np.power(1+np.power(r/rs,alpha),((gamma-beta)/alpha))*np.sqrt(1-np.tanh(r/rt)**2)
def J(ll,b):
z = ll
x = np.sqrt(b*b+z*z)
return b*(rho(x)**2)
return np.log10(rho0*rho0*2.*np.pi*quad(lambda y: quad(lambda z: J(y,z), 0., thetamax*D)[0],-np.inf,np.inf)[0]/D/D/GEV2cm5toMsol2kpc5)
def integrate_D_spherical_alphabetagamma(thetamax,D,rho0,rs,alpha,beta,gamma,rt):
''' D for spherical alpha,beta,gamma model'''
def rho(r):
return np.power(r/rs,-gamma)*np.power(1+np.power(r/rs,alpha),((gamma-beta)/alpha))*np.sqrt(1-np.tanh(r/rt)**2)
def J(ll,th):
z = ll
b = np.tan(th)*D
x = np.sqrt(b*b+z*z)
return np.sin(th)*rho(x)
return np.log10(rho0*2.*np.pi*quad(lambda y: quad(lambda z: J(y,z), 0., thetamax)[0],-np.inf,np.inf)[0]/GEVcm2toMsolkpc2)
def integrate_D_farfield_spherical_alphabetagamma(thetamax,D,rho0,rs,alpha,beta,gamma,rt):
''' D for spherical alpha,beta,gamma model in far-field limit'''
def rho(r):
return np.power(r/rs,-gamma)*np.power(1+np.power(r/rs,alpha),((gamma-beta)/alpha))*np.sqrt(1-np.tanh(r/rt)**2)
def J(ll,b):
z = ll
x = np.sqrt(b*b+z*z)
return b*rho(x)
return np.log10(rho0*2.*np.pi*quad(lambda y: quad(lambda z: J(y,z), 0., thetamax*D)[0],-np.inf,np.inf)[0]/D/D/GEVcm2toMsolkpc2)
def integrate_rho_spherical_alphabetagamma(R,rho0,rs,alpha,beta,gamma,rt):
def rho(r):
return np.power(r/rs,-gamma)*np.power(1+np.power(r/rs,alpha),((gamma-beta)/alpha))*np.sqrt(1-np.tanh(r/rt)**2)
def J(x):
return x*x*rho(x)
return 4.*np.pi*rho0*quad(J, 0., R)[0]
def asymmetric_gaussian_samples(mean,sigma,N=1):
''' sigmas = [lower error bar, upper error bar] '''
updown=(np.random.uniform(size=N)>0.5)
sigmas=[sigma[i] for i in updown]
return mean+(2*updown-1.)*np.fabs(np.random.normal(loc=0.,scale=sigmas))
def barlow_asymmetric_gaussian_samples(mean,sigma,N=1):
## Taken from Barlow (2003)
## This produces very asymmetric looking distributions with sharp cut-offs
## on the smaller error side
## The bifurcated (or dimidated as Barlow corrects us) Gaussian
## (implemented above) is in my mind better.
alpha = .5*(sigma[1]-sigma[0])
sig = .5*(sigma[1]+sigma[0])
u = np.random.normal(loc=0.,scale=1.,size=N)
return sig*u+mean+alpha*u*u
def HernquistX(s):
"""
Computes X function from equations (33) & (34) of Hernquist (1990)
"""
if(s<0.):
raise ValueError("s must be positive in Hernquist X function")
elif(s<1.):
return np.log((1+np.sqrt(1-s*s))/s)/np.sqrt(1-s*s)
elif(s==1.):
return 1.
else:
return np.arccos(1./s)/np.sqrt(s*s-1)
def wyns_formulaJ_NFW(rho0,r_s,distance,angle):
''' Analytic integration of J factor for NFW '''
Delta2 = r_s**2-distance**2*angle**2
X = distance*angle/r_s
J = 2.*distance*angle*(7.*distance*r_s**3*angle-4.*distance**3*r_s*angle**3+3.*np.pi*Delta2**2)+6./r_s*(2*Delta2**3-2*r_s**4*Delta2-distance**4*r_s**2*angle**4)*np.array(map(lambda s:HernquistX(s),X))
J *= np.pi*rho0**2*r_s**2/(3.*distance**2*Delta2**2)
return np.log10(J/GEV2cm5toMsol2kpc5)
def wyns_formulaJ_NFW_data(sigma_los,r_half,distance,angle,r_s,walker_or_wolf="wolf"):
'''
J factor from M_half for NFW profile
sigma_los in km/s, r_half in pc, distance in kpc, angle in deg, r_s in kpc
'''
r_half=0.001*r_half
angle=np.deg2rad(angle)
delta_Omega = 2.*np.pi*(1-np.cos(angle))
if(walker_or_wolf=="wolf"):
Mhalf = 4.*sigma_los**2*r_half/G
r_half=4./3.*r_half
rho0 = Mhalf/4./np.pi/r_s**3/(np.log((r_s+r_half)/r_s)-r_half/(r_s+r_half))
return wyns_formulaJ_NFW(rho0,r_s,distance,angle)
else:
Mhalf = 2.5*sigma_los**2*r_half/G
rho0 = Mhalf/4./np.pi/r_s**3/(np.log((r_s+r_half)/r_s)-r_half/(r_s+r_half))
return wyns_formulaJ_NFW(rho0,r_s,distance,angle)
def wyns_formulaD_NFW(rho0,r_s,distance,angle):
''' Analytic integration of J factor for NFW '''
X = distance*angle/r_s
D = np.log(X/2.)+np.array(map(lambda s:HernquistX(s),X))
D *= 4.*np.pi*rho0*r_s**3/distance**2
return np.log10(D/GEVcm2toMsolkpc2)
def wyns_formulaD_NFW_data(sigma_los,r_half,distance,angle,r_s,walker_or_wolf="wolf"):
'''
D factor from M_half for NFW profile
sigma_los in km/s, r_half in pc, distance in kpc, angle in deg, r_s in kpc
'''
r_half=0.001*r_half
angle=np.deg2rad(angle)
delta_Omega = 2.*np.pi*(1-np.cos(angle))
if(walker_or_wolf=="wolf"):
Mhalf = 4.*sigma_los**2*r_half/G
r_half=4./3.*r_half
rho0 = Mhalf/4./np.pi/r_s**3/(np.log((r_s+r_half)/r_s)-r_half/(r_s+r_half))
return wyns_formulaD_NFW(rho0,r_s,distance,angle)
else:
Mhalf = 2.5*sigma_los**2*r_half/G
rho0 = Mhalf/4./np.pi/r_s**3/(np.log((r_s+r_half)/r_s)-r_half/(r_s+r_half))
return wyns_formulaD_NFW(rho0,r_s,distance,angle)
def wyns_formulaJ(sigma_los,r_half,distance,angle,gamma=1.,walker_or_wolf="wolf"):
'''
J factor from M_half for power-law profile (slope = gamma)
sigma_los in km/s, r_half in pc, distance in kpc, angle in deg, r_s in kpc
'''
r_half=0.001*r_half
angle=np.deg2rad(angle)
delta_Omega = 2.*np.pi*(1-np.cos(angle))
fac = (2.5/4.)**2
if(walker_or_wolf=="wolf"):
fac=(0.25*(27./16.)*(4./3.)**gamma)**2
if(gamma!=1. and gamma>.5 and gamma<1.5):
factor = 2.*(3.-gamma)**2*Gamma(gamma-0.5)/(np.pi**(2-gamma)*(3.-2*gamma)*Gamma(gamma))
return np.log10(factor*sigma_los**4*delta_Omega**(1.5-gamma)/G**2*distance**(1-2.*gamma)*r_half**(2*gamma-4.)/GEV2cm5toMsol2kpc5)+np.log10(fac)
else:
return np.log10(8./np.sqrt(np.pi)*sigma_los**4*np.sqrt(delta_Omega)/G**2/distance/(r_half**2)/GEV2cm5toMsol2kpc5)+np.log10(fac)
def wyns_formulaD(sigma_los,r_half,distance,angle,gamma=1.,walker_or_wolf="wolf"):
'''
D factor from M_half for power-law profile (slope = gamma)
sigma_los in km/s, r_half in pc, distance in kpc, angle in deg, r_s in kpc
'''
r_half=0.001*r_half
angle=np.deg2rad(angle)
delta_Omega = 2.*np.pi*(1-np.cos(angle))
fac = (2.5/4.)
if(walker_or_wolf=="wolf"):
fac=(0.25*(27./16.)*(4./3.)**gamma)
if(gamma>1. and gamma<3.):
factor = 2.*Gamma(gamma*0.5-0.5)/(np.pi**(1-0.5*gamma)*Gamma(gamma*0.5))
return np.log10(factor*sigma_los**2*delta_Omega**(1.5-gamma*0.5)/G*distance**(1.-gamma)*r_half**(gamma-2.)/GEVcm2toMsolkpc2)+np.log10(fac)
def sample_errorsJ(sigma_los,esigma_los,r_half,er_half,distance,edistance,angle,eangle,gamma=1.,N=1000,nfw=-1.,walker_or_wolf="wolf"):
''' Samples from sigma_los (km/s), r_half (pc), distance (kpc) and angle (deg) pdfs (gaussians) and returns median J value and pm 1 sigma '''
if(esigma_los[0]==0.):
## In this case sigma_los is the 95% upper limit. We sample from a uniform distribution from 0.1 km/s to sigma_los/0.95
s=np.random.uniform(0.1,sigma_los/0.95,N)
else:
# s=asymmetric_gaussian_samples(sigma_los,esigma_los,N)
s=np.exp(asymmetric_gaussian_samples(np.log(sigma_los),esigma_los/sigma_los,N))
# r=asymmetric_gaussian_samples(r_half,er_half,N)
r=np.exp(asymmetric_gaussian_samples(np.log(r_half),er_half/r_half,N))
a=np.exp(asymmetric_gaussian_samples(np.log(angle),eangle/angle,N))
d=np.random.normal(loc=distance,scale=edistance,size=N)
if(nfw>0.):
wf = wyns_formulaJ_NFW_data(s,r,d,a,nfw,walker_or_wolf)
else:
wf = wyns_formulaJ(s,r,d,a,gamma,walker_or_wolf)
mean=np.nanmedian(wf)
return np.array([mean,mean-np.nanpercentile(wf,15.87),np.nanpercentile(wf,84.13)-mean])
def wyns_formulaJ_error_sample(data,gamma=1.,gammaarray=None,angle='Max',nfw=[0.],N=1000,geo_factor=True,walker_or_wolf="wolf"):
''' Performs J sampling for a set of data '''
if(len(nfw)<len(data)):
nfw=-1.*np.ones(len(data))
angles=data.theta_max
angerrs=[[1e-15,1e-15] for i in range(len(data))]
if(angle=='Half'):
angles=data.theta_half
angerrs=[[data.etheta_half2[i],data.etheta_half1[i]] for i in range(len(data))]
if(angle=='Half_05'):
angles=0.5*np.ones(len(data))
angerrs=[[1e-15,1e-15] for i in range(len(data))]
geof=np.ones(len(data))
if geo_factor:
geof = np.sqrt(1.-data.ellip)
if(isinstance(gammaarray,np.ndarray)):
return np.array([
sample_errorsJ(data.sigma_los[i],
[data.esigma_los2[i],data.esigma_los1[i]],
data.R_half[i]*geof[i],
[data.eR_half2[i]*geof[i],
data.eR_half1[i]*geof[i]],
data.D[i],
data.eD[i],
angles[i],
angerrs[i],
gammaarray[i],
N=N,
nfw=nfw[i],
walker_or_wolf=walker_or_wolf) for i in range(len(data))])
return np.array([sample_errorsJ(data.sigma_los[i],
[data.esigma_los2[i],data.esigma_los1[i]],
data.R_half[i]*geof[i],
[data.eR_half2[i]*geof[i],
data.eR_half1[i]*geof[i]],
data.D[i],
data.eD[i],
angles[i],
angerrs[i],
gamma,
N=N,
nfw=nfw[i],
walker_or_wolf=walker_or_wolf) for i in range(len(data))])
def sample_errorsD(sigma_los,esigma_los,r_half,er_half,distance,edistance,angle,eangle,gamma=1.,N=1000,nfw=-1.,walker_or_wolf="wolf"):
''' Samples from sigma_los (km/s), r_half (pc), distance (kpc) and angle (deg) pdfs (gaussians) and returns median D value and pm 1 sigma '''
if(esigma_los[0]==0.):
## In this case sigma_los is the 95% upper limit. We sample from a uniform distribution from 0.1 km/s to sigma_los/0.95
s=np.random.uniform(0.1,sigma_los,N)
else:
# s=asymmetric_gaussian_samples(sigma_los,esigma_los,N)
s=np.exp(asymmetric_gaussian_samples(np.log(sigma_los),esigma_los/sigma_los,N))
# r=asymmetric_gaussian_samples(r_half,er_half,N)
r=np.exp(asymmetric_gaussian_samples(np.log(r_half),er_half/r_half,N))
a=np.exp(asymmetric_gaussian_samples(np.log(angle),eangle/angle,N))
d=np.random.normal(loc=distance,scale=edistance,size=N)
if(nfw>0.):
wf = wyns_formulaD_NFW_data(s,r,d,a,nfw,walker_or_wolf)
else:
wf = wyns_formulaD(s,r,d,a,gamma,walker_or_wolf)
mean=np.nanmedian(wf)
return np.array([mean,mean-np.nanpercentile(wf,15.87),np.nanpercentile(wf,84.13)-mean])
def wyns_formulaD_error_sample(data,gamma=1.,gammaarray=None,angle='Max',nfw=[0.],N=1000,geo_factor=True,walker_or_wolf="wolf"):
''' Performs D sampling for a set of data '''
if(len(nfw)<len(data)):
nfw=-1.*np.ones(len(data))
angles=data.theta_max
angerrs=[[1e-15,1e-15] for i in range(len(data))]
if(angle=='Half'):
angles=data.dtheta_half
angerrs=[[data.edtheta_half2[i],data.edtheta_half1[i]] for i in range(len(data))]
if(angle=='Half_05'):
angles=0.5*np.ones(len(data))
angerrs=[[1e-15,1e-15] for i in range(len(data))]
geof=np.ones(len(data))
if geo_factor:
geof = np.sqrt(1.-data.ellip)
if(isinstance(gammaarray,np.ndarray)):
return np.array([
sample_errorsD(data.sigma_los[i],
[data.esigma_los2[i],data.esigma_los1[i]],
data.R_half[i]*geof[i],
[data.eR_half2[i]*geof[i],
data.eR_half1[i]*geof[i]],
data.D[i],
data.eD[i],
angles[i],
angerrs[i],
gammaarray[i],
N=N,
nfw=nfw[i],
walker_or_wolf=walker_or_wolf) for i in range(len(data))])
return np.array([sample_errorsD(data.sigma_los[i],
[data.esigma_los2[i],data.esigma_los1[i]],
data.R_half[i]*geof[i],
[data.eR_half2[i]*geof[i],
data.eR_half1[i]*geof[i]],
data.D[i],
data.eD[i],
angles[i],
angerrs[i],
gamma,
N=N,
nfw=nfw[i],
walker_or_wolf=walker_or_wolf) for i in range(len(data))])
# def add_thetas(ax,xrang,thetalist):
# ylim=ax.get_ylim()
# ax.set_ylim(ylim[0]-0.5,ylim[1])
# for x,t in zip(xrang,thetalist):
# ax.annotate(str(t)+r'$^\circ$',xy=(x,ylim[0]),horizontalalignment='center',verticalalignment='bottom',rotation=90)
# def make_table(data):
# WEJ2 = wyns_formulaJ_error_sample(data,gamma=1.,angle='Half_05',N=10000,nfw=5.*data['R_half']/1000.)
# WED2 = wyns_formulaD_error_sample(data,gamma=1.,angle='Half_05',N=10000,nfw=5.*data['R_half']/1000.)
# WEJ3 = wyns_formulaJ_error_sample(data,gamma=1.,angle='Max',N=10000,nfw=5.*data['R_half']/1000.)
# WED3 = wyns_formulaD_error_sample(data,gamma=1.,angle='Max',N=10000,nfw=5.*data['R_half']/1000.)
# # outfile=open('dwarfs_Jfactors.dat','w')
# # outfile.write('\\begin{tabular}{lccccccc}\n')
# # outfile.write('\\hline\n\\hline\n')
# # outfile.write('Name & $\\theta_\mathrm{max}$ & $\\theta_{0.5}$ & $\\theta_{0.5, \mathrm{decay}}$ & $\log_{10} J(\\theta_\mathrm{max})$ & $\log_{10} J(0.5^\circ)$ & $\log_{10} D(\\theta_\mathrm{max})$ & $\log_{10} D(0.5^\circ)$\\\\ \n')
# # outfile.write('& [$^\circ$] & [$^\circ$] & [$^\circ$] & [$\mathrm{GeV^2\,cm}^{-5}$] & [$\mathrm{GeV^2\,cm}^{-5}$] & [$\mathrm{GeV\,cm}^{-2}$] & [$\mathrm{GeV\,cm}^{-2}$]\\\\\n')
# # outfile.write('\\hline\n')
# # for i in range(len(WEJ)):
# # string= str(data['Name'][i])+" & $"+\
# # str(data['theta_max'][i])+"$&$"+\
# # str(data['theta_half'][i])+"$&$"+\
# # str(data['dtheta_half'][i])+"$&"+\
# # "$%0.2f_{-%0.2f}^{+%0.2f}$&"%(WEJ3[i][0],WEJ3[i][1],WEJ3[i][2])
# # if(i>21):
# # string+="-&"
# # else:
# # string+="$%0.2f_{-%0.2f}^{+%0.2f}$&"%(WEJ2[i][0],WEJ2[i][1],WEJ2[i][2])
# # string+="$%0.2f_{-%0.2f}^{+%0.2f}$&"%(WED3[i][0],WED3[i][1],WED3[i][2])
# # if(i>21):
# # string+="-&"
# # else:
# # string+="$%0.2f_{-%0.2f}^{+%0.2f}$"%(WED2[i][0],WEJ3[i][1],WEJ3[i][2])+"\\\\\n"
# # if(i==7 or i==22):
# # outfile.write('\\hline\n')
# # outfile.write(string)
# # outfile.write('\\hline\n')
# # outfile.write('\end{tabular}\n')
# # outfile.close()
# outfile=open('dwarfs_Jfactors.dat','w')
# outfile.write('\\begin{tabular}{lccccc}\n')
# outfile.write('\\hline\n\\hline\n')
# outfile.write('Name & $\\theta_\mathrm{max}$ & $\log_{10} J(\\theta_\mathrm{max})$ & $\log_{10} J(0.5^\circ)$ & $\log_{10} D(\\theta_\mathrm{max})$ & $\log_{10} D(0.5^\circ)$\\\\ \n')
# outfile.write('& [$^\circ$] & [$\mathrm{GeV^2\,cm}^{-5}$] & [$\mathrm{GeV^2\,cm}^{-5}$] & [$\mathrm{GeV\,cm}^{-2}$] & [$\mathrm{GeV\,cm}^{-2}$]\\\\\n')
# outfile.write('\\hline\n')
# for i in range(len(WEJ2)):
# string= str(data['Name'][i])+" & $"+\
# str(data['theta_max'][i])+"$&"+\
# "$%0.2f_{-%0.2f}^{+%0.2f}$&"%(WEJ3[i][0],WEJ3[i][1],WEJ3[i][2])
# if(i>21):
# string+="-&"
# else:
# string+="$%0.2f_{-%0.2f}^{+%0.2f}$&"%(WEJ2[i][0],WEJ2[i][1],WEJ2[i][2])
# string+="$%0.2f_{-%0.2f}^{+%0.2f}$&"%(WED3[i][0],WED3[i][1],WED3[i][2])
# if(i>21):
# string+="-"+"\\\\\n"
# else:
# string+="$%0.2f_{-%0.2f}^{+%0.2f}$"%(WED2[i][0],WEJ3[i][1],WEJ3[i][2])+"\\\\\n"
# if(i==7 or i==22):
# outfile.write('\\hline\n')
# outfile.write(string)
# outfile.write('\\hline\n')
# outfile.write('\end{tabular}\n')
# outfile.close()
# if __name__ == '__main__':
# data = pd.read_csv('data.dat',sep=' ')
# make_table(data)
# gs_gammas=np.genfromtxt('geringer_sameth_gamma.dat',skip_header=49)
# # for i in range(len(gs_gammas)):
# # if(gs_gammas[i][23]<0.5):
# # gs_gammas[i][23]=0.50005
# cd=data[data.Class=='CD']
# uf=data[data.Class=='UF']
# labelrange=np.linspace(0.,len(data),len(data))
# labelscd=labelrange[:len(cd)]
# labelsuf=labelrange[len(cd):]
# f,a=plt.subplots(2,4,figsize=(16,8))
# plt.subplots_adjust(hspace=0.5)
# for ai in a:
# for aj in ai:
# aj.set_xticks(labelrange)
# aj.set_xticklabels(data.Name.values,rotation=90)
# aj.set_xlim(labelrange[0]-1,labelrange[-1]+1)
# for i in a[1]:
# ls=i.axvline(labelscd[-1]+.5,c='k',ls='dashed')
# ls.set_dashes((2,1))
# ls=i.axvline(labelsuf[13]+.5,c='k',ls='dashed')
# ls.set_dashes((2,1))
# a[0][0].errorbar(labelscd,cd.D,yerr=cd.eD,fmt='.')
# a[0][0].errorbar(labelsuf,uf.D.values,yerr=uf.eD.values,fmt='.')
# a[0][0].set_ylabel(r'Distance/kpc')
# a[0][1].errorbar(labelscd,cd.R_half,yerr=[cd.eR_half2,cd.eR_half1],fmt='.')
# a[0][1].errorbar(labelsuf,uf.R_half,yerr=[uf.eR_half2,uf.eR_half1],fmt='.')
# a[0][1].set_ylabel(r'$R_{\mathrm{half}}/\mathrm{pc}$')
# a[0][2].errorbar(labelscd,cd.sigma_los,yerr=[cd.esigma_los2,cd.esigma_los1],fmt='.')
# a[0][2].errorbar(labelsuf,uf.sigma_los,yerr=[uf.esigma_los2,uf.esigma_los1],fmt='.')
# a[0][2].arrow(labelsuf[9],uf.sigma_los.values[9],0.,-0.5,fc=sns.color_palette()[1],ec=sns.color_palette()[1],head_length=0.2,head_width=0.3)
# a[0][2].arrow(labelsuf[15],uf.sigma_los.values[15],0.,-0.5,fc=sns.color_palette()[1],ec=sns.color_palette()[1],head_length=0.2,head_width=0.3)
# a[0][2].arrow(labelsuf[17],uf.sigma_los.values[17],0.,-0.5,fc=sns.color_palette()[1],ec=sns.color_palette()[1],head_length=0.2,head_width=0.3)
# a[0][2].set_ylabel(r'$\sigma_{\mathrm{los}}/\mathrm{km\,s}^{-1}$')
# a[1][0].errorbar(labelscd,cd.Jmax,yerr=[cd.eJmax2,cd.eJmax1],fmt='.',color='k')
# a[1][0].errorbar(labelsuf,uf.Jmax,yerr=[uf.eJmax2,uf.eJmax1],fmt='.',color='k')
# WE = wyns_formulaJ_error_sample(data,gamma=1.)
# for i in range(len(data)):
# a[1][0].fill_between([labelrange[i]-0.2,labelrange[i]+0.2], [WE[i][0]-WE[i][1],WE[i][0]-WE[i][1]], [WE[i][0]+WE[i][2],WE[i][0]+WE[i][2]],alpha=0.5,edgecolor="None",color=sns.color_palette()[2])
# # WE = wyns_formulaJ_error_sample(data,gamma=0.75)
# # for i in range(len(data)):
# # a[1][0].fill_between([labelrange[i]-0.2,labelrange[i]+0.2], [WE[i][0]-WE[i][1],WE[i][0]-WE[i][1]], [WE[i][0]+WE[i][2],WE[i][0]+WE[i][2]],alpha=0.5,edgecolor="None",color=sns.color_palette()[3])
# WE = wyns_formulaJ_error_sample(data,gamma=0.51)
# for i in range(len(data)):
# a[1][0].fill_between([labelrange[i]-0.2,labelrange[i]+0.2], [WE[i][0]-WE[i][1],WE[i][0]-WE[i][1]], [WE[i][0]+WE[i][2],WE[i][0]+WE[i][2]],alpha=0.5,edgecolor="None",color=sns.color_palette()[4])
# WE = wyns_formulaJ_error_sample(data,gamma=1.,nfw=5.*data['R_half']/1000.)
# for i in range(len(data)):
# a[1][0].fill_between([labelrange[i]-0.2,labelrange[i]+0.2], [WE[i][0]-WE[i][1],WE[i][0]-WE[i][1]], [WE[i][0]+WE[i][2],WE[i][0]+WE[i][2]],alpha=0.5,edgecolor="None",color=sns.color_palette()[0])
# add_thetas(a[1][0],labelrange,data.theta_max)
# a[1][0].set_ylabel(r'$\log_{10}(J_\mathrm{max}/\,\mathrm{GeV^2\,cm}^{-5})$')
# a[1][1].errorbar(labelscd,cd.Jmax.values-np.log10(2.),yerr=[cd.eJmax2,cd.eJmax1],fmt='.',label="",color='k')
# a[1][1].errorbar(labelsuf,uf.Jmax.values-np.log10(2.),yerr=[uf.eJmax2,uf.eJmax1],fmt='.',label="",color='k')
# WE = wyns_formulaJ_error_sample(data,gamma=1.,angle='Half')
# for i in range(len(data)):
# label=None
# if(i==0):
# label=r'$\gamma=1$'
# a[1][1].fill_between([labelrange[i]-0.2,labelrange[i]+0.2], [WE[i][0]-WE[i][1],WE[i][0]-WE[i][1]], [WE[i][0]+WE[i][2],WE[i][0]+WE[i][2]],alpha=0.5,edgecolor="None",color=sns.color_palette()[2],label=label)
# # WE = wyns_formulaJ_error_sample(data,gamma=0.75,angle='Half')
# # for i in range(len(data)):
# # label=None
# # if(i==0):
# # label=r'$\gamma=0.75$'
# # a[1][1].fill_between([labelrange[i]-0.2,labelrange[i]+0.2], [WE[i][0]-WE[i][1],WE[i][0]-WE[i][1]], [WE[i][0]+WE[i][2],WE[i][0]+WE[i][2]],alpha=0.5,edgecolor="None",color=sns.color_palette()[3],label=label)
# WE = wyns_formulaJ_error_sample(data,gamma=0.51,angle='Half')
# for i in range(len(data)):
# label=None
# if(i==0):
# label=r'$\gamma=0.51$'
# a[1][1].fill_between([labelrange[i]-0.2,labelrange[i]+0.2], [WE[i][0]-WE[i][1],WE[i][0]-WE[i][1]], [WE[i][0]+WE[i][2],WE[i][0]+WE[i][2]],alpha=0.5,edgecolor="None",color=sns.color_palette()[4],label=label)
# WE = wyns_formulaJ_error_sample(data,gamma=1.,angle='Half',nfw=5.*data['R_half']/1000.)
# for i in range(len(data)):
# label=None
# if(i==0):
# label=r'NFW'
# a[1][1].fill_between([labelrange[i]-0.2,labelrange[i]+0.2], [WE[i][0]-WE[i][1],WE[i][0]-WE[i][1]], [WE[i][0]+WE[i][2],WE[i][0]+WE[i][2]],alpha=0.5,edgecolor="None",color=sns.color_palette()[0],label=label)
# gammas = gs_gammas.T[23]
# while(len(gammas)<len(data)):
# gammas = np.append(gammas,0.8)
# WE = wyns_formulaJ_error_sample(data,gammaarray=gammas,angle='Half')
# for i in range(len(data)):
# label=None
# if(i==0):
# label=r'$\gamma_\mathrm{GS}$'
# a[1][1].fill_between([labelrange[i]-0.3,labelrange[i]+0.3], [WE[i][0]-WE[i][1],WE[i][0]-WE[i][1]], [WE[i][0]+WE[i][2],WE[i][0]+WE[i][2]],alpha=1.,facecolor="None",label=label)
# add_thetas(a[1][1],labelrange,data.theta_half)
# a[1][1].legend(loc="lower center",ncol=2, bbox_to_anchor=(0.5, 1.0))
# a[1][1].set_ylabel(r'$\log_{10}(J_\mathrm{half}/\,\mathrm{GeV^2\,cm}^{-5})$')
# a[1][2].errorbar(labelscd,cd.dJmax.values-np.log10(2.),yerr=[cd.eJmax2,cd.edJmax1],fmt='.',color='k')
# a[1][2].errorbar(labelsuf,uf.dJmax.values-np.log10(2.),yerr=[uf.edJmax2,uf.edJmax1],fmt='.',color='k')
# WE = wyns_formulaD_error_sample(data,gamma=1.)
# for i in range(len(data)):
# label=None
# if(i==0):
# label=r'$\gamma=1.$'
# a[1][2].fill_between([labelrange[i]-0.2,labelrange[i]+0.2], [WE[i][0]-WE[i][1],WE[i][0]-WE[i][1]], [WE[i][0]+WE[i][2],WE[i][0]+WE[i][2]],alpha=0.5,edgecolor="None",color=sns.color_palette()[2],label=label)
# # WE = wyns_formulaD_error_sample(data,gamma=1.25)
# # for i in range(len(data)):
# # label=None
# # if(i==0):
# # label=r'$\gamma=1.25$'
# # a[1][2].fill_between([labelrange[i]-0.2,labelrange[i]+0.2], [WE[i][0]-WE[i][1],WE[i][0]-WE[i][1]], [WE[i][0]+WE[i][2],WE[i][0]+WE[i][2]],alpha=0.5,edgecolor="None",color=sns.color_palette()[3],label=label)
# WE = wyns_formulaD_error_sample(data,gamma=1.49)
# for i in range(len(data)):
# label=None
# if(i==0):
# label=r'$\gamma=1.49$'
# a[1][2].fill_between([labelrange[i]-0.2,labelrange[i]+0.2], [WE[i][0]-WE[i][1],WE[i][0]-WE[i][1]], [WE[i][0]+WE[i][2],WE[i][0]+WE[i][2]],alpha=0.5,edgecolor="None",color=sns.color_palette()[4],label=label)
# WE = wyns_formulaD_error_sample(data,gamma=1.,nfw=5.*data['R_half']/1000.)
# for i in range(len(data)):
# label=None
# if(i==0):
# label=r'NFW'
# a[1][2].fill_between([labelrange[i]-0.2,labelrange[i]+0.2], [WE[i][0]-WE[i][1],WE[i][0]-WE[i][1]], [WE[i][0]+WE[i][2],WE[i][0]+WE[i][2]],alpha=0.5,edgecolor="None",color=sns.color_palette()[0],label=label)
# WE = wyns_formulaD_error_sample(data,gammaarray=gammas,angle='Half')
# for i in range(len(data)):
# label=None
# if(i==0):
# label=r'$\gamma_\mathrm{GS}$'
# a[1][2].fill_between([labelrange[i]-0.2,labelrange[i]+0.2], [WE[i][0]-WE[i][1],WE[i][0]-WE[i][1]], [WE[i][0]+WE[i][2],WE[i][0]+WE[i][2]],alpha=1.,facecolor="None",label=label)
# add_thetas(a[1][2],labelrange,data.dtheta_half)
# a[1][2].legend(loc="lower center",ncol=2, bbox_to_anchor=(0.5, 1.0))
# a[1][2].set_ylabel(r'$\log_{10}(D_\mathrm{half}/\,\mathrm{GeV\,cm}^{-2})$')
# a[1][3].errorbar(labelscd,cd.Jhalf.values,yerr=[cd.eJhalf2,cd.eJhalf1],fmt='.',label="",color='k')
# a[1][3].errorbar(labelsuf,uf.Jhalf.values,yerr=[uf.eJhalf2,uf.eJhalf1],fmt='.',label="",color='k')
# WE = wyns_formulaJ_error_sample(data,gamma=1.,angle='Half' )
# for i in range(len(data)):
# label=None
# if(i==0):
# label=r'$\gamma=1$'
# a[1][3].fill_between([labelrange[i]-0.2,labelrange[i]+0.2], [WE[i][0]-WE[i][1],WE[i][0]-WE[i][1]], [WE[i][0]+WE[i][2],WE[i][0]+WE[i][2]],alpha=0.5,edgecolor="None",color=sns.color_palette()[2],label=label)
# # WE = wyns_formulaJ_error_sample(data,gamma=0.75,angle='Half_05')
# # for i in range(len(data)):
# # label=None
# # if(i==0):
# # label=r'$\gamma=0.75$'
# # a[1][3].fill_between([labelrange[i]-0.2,labelrange[i]+0.2], [WE[i][0]-WE[i][1],WE[i][0]-WE[i][1]], [WE[i][0]+WE[i][2],WE[i][0]+WE[i][2]],alpha=0.5,edgecolor="None",color=sns.color_palette()[3],label=label)
# WE = wyns_formulaJ_error_sample(data,gamma=0.51,angle='Half_05')
# for i in range(len(data)):
# label=None
# if(i==0):
# label=r'$\gamma=0.51$'
# a[1][3].fill_between([labelrange[i]-0.2,labelrange[i]+0.2], [WE[i][0]-WE[i][1],WE[i][0]-WE[i][1]], [WE[i][0]+WE[i][2],WE[i][0]+WE[i][2]],alpha=0.5,edgecolor="None",color=sns.color_palette()[4],label=label)
# WE = wyns_formulaJ_error_sample(data,gamma=1.,angle='Half_05',nfw=5.*data['R_half']/1000.)
# for i in range(len(data)):
# label=None
# if(i==0):
# label=r'NFW'
# a[1][3].fill_between([labelrange[i]-0.2,labelrange[i]+0.2], [WE[i][0]-WE[i][1],WE[i][0]-WE[i][1]], [WE[i][0]+WE[i][2],WE[i][0]+WE[i][2]],alpha=0.5,edgecolor="None",color=sns.color_palette()[0],label=label)
# gammas = gs_gammas.T[23]
# while(len(gammas)<len(data)):
# gammas = np.append(gammas,0.8)
# WE = wyns_formulaJ_error_sample(data,gammaarray=gammas,angle='Half_05')
# for i in range(len(data)):
# label=None
# if(i==0):
# label=r'$\gamma_\mathrm{GS}$'
# a[1][3].fill_between([labelrange[i]-0.3,labelrange[i]+0.3], [WE[i][0]-WE[i][1],WE[i][0]-WE[i][1]], [WE[i][0]+WE[i][2],WE[i][0]+WE[i][2]],alpha=1.,facecolor="None",label=label)
# add_thetas(a[1][3],labelrange,np.ones(0.5)*len(data))
# a[1][3].legend(loc="lower center",ncol=2, bbox_to_anchor=(0.5, 1.0))
# a[1][3].set_ylabel(r'$\log_{10}(J(0.5^\circ)/\,\mathrm{GeV^2\,cm}^{-5})$')
# plt.savefig('dwarfs_data.pdf',bbox_inches='tight')
| 47.329181
| 240
| 0.618106
| 4,925
| 26,599
| 3.226802
| 0.073503
| 0.028694
| 0.019129
| 0.028694
| 0.795432
| 0.778945
| 0.754468
| 0.744274
| 0.729927
| 0.725271
| 0
| 0.056419
| 0.133727
| 26,599
| 561
| 241
| 47.413547
| 0.633278
| 0.561562
| 0
| 0.647059
| 0
| 0
| 0.010496
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.117647
| false
| 0
| 0.02521
| 0.02521
| 0.289916
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
58877904bfdf9fc9566d69c4af636854fdeeb4e5
| 56,760
|
py
|
Python
|
src/azure-cli/azure/cli/command_modules/rdbms/tests/latest/test_rdbms_flexible_commands.py
|
major/azure-cli
|
13c46f6e1d94a2eddf31539a0bbf1d5f2f28f63a
|
[
"MIT"
] | 2
|
2021-06-21T14:15:32.000Z
|
2021-06-21T14:15:38.000Z
|
src/azure-cli/azure/cli/command_modules/rdbms/tests/latest/test_rdbms_flexible_commands.py
|
major/azure-cli
|
13c46f6e1d94a2eddf31539a0bbf1d5f2f28f63a
|
[
"MIT"
] | 2
|
2017-02-11T21:16:40.000Z
|
2017-02-11T21:30:54.000Z
|
src/azure-cli/azure/cli/command_modules/rdbms/tests/latest/test_rdbms_flexible_commands.py
|
major/azure-cli
|
13c46f6e1d94a2eddf31539a0bbf1d5f2f28f63a
|
[
"MIT"
] | 2
|
2020-07-16T09:56:27.000Z
|
2021-07-09T00:52:51.000Z
|
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
import time
from datetime import datetime, timedelta, tzinfo
from time import sleep
from dateutil.tz import tzutc
import pytest
from azure_devtools.scenario_tests import AllowLargeResponse
from msrestazure.azure_exceptions import CloudError
from azure.cli.core.local_context import AzCLILocalContext, ALL, LOCAL_CONTEXT_FILE
from azure.cli.core.util import CLIError
from azure.cli.core.util import parse_proxy_resource_id
from azure.cli.testsdk.base import execute
from azure.cli.testsdk.exceptions import CliTestError
from azure.cli.testsdk import (
JMESPathCheck,
NoneCheck,
ResourceGroupPreparer,
ScenarioTest,
StringContainCheck,
VirtualNetworkPreparer,
LocalContextScenarioTest,
live_only)
from azure.cli.testsdk.preparers import (
AbstractPreparer,
SingleValueReplacer)
from .conftest import resource_random_name
# Constants
SERVER_NAME_PREFIX = 'azuredbclitest-'
SERVER_NAME_MAX_LENGTH = 20
class RdbmsScenarioTest(ScenarioTest):
def create_random_name(self, prefix, length):
self.test_resources_count += 1
moniker = '{}{:06}'.format(prefix, self.test_resources_count)
if self.in_recording:
name = prefix + resource_random_name + '-' + type(self).__name__[22:]
name = name[:40].lower()
self.name_replacer.register_name_pair(name, moniker)
return name
return moniker
class ServerPreparer(AbstractPreparer, SingleValueReplacer):
def __init__(self, engine_type, location, engine_parameter_name='database_engine',
name_prefix=SERVER_NAME_PREFIX, parameter_name='server',
resource_group_parameter_name='resource_group'):
super(ServerPreparer, self).__init__(name_prefix, SERVER_NAME_MAX_LENGTH)
from azure.cli.core.mock import DummyCli
self.cli_ctx = DummyCli()
self.engine_type = engine_type
self.engine_parameter_name = engine_parameter_name
self.location = location
self.parameter_name = parameter_name
self.resource_group_parameter_name = resource_group_parameter_name
def create_resource(self, name, **kwargs):
group = self._get_resource_group(**kwargs)
template = 'az {} flexible-server create -l {} -g {} -n {} --public-access none'
execute(self.cli_ctx, template.format(self.engine_type,
self.location,
group, name))
return {self.parameter_name: name,
self.engine_parameter_name: self.engine_type}
# def remove_resource(self, name, **kwargs):
# group = self._get_resource_group(**kwargs)
# execute(self.cli_ctx, 'az {} flexible-server delete -g {} -n {} --yes'.format(self.engine_type, group, name))
def _get_resource_group(self, **kwargs):
return kwargs.get(self.resource_group_parameter_name)
class FlexibleServerMgmtScenarioTest(RdbmsScenarioTest):
def _test_flexible_server_create(self, database_engine, resource_group, server):
if self.cli_ctx.local_context.is_on:
self.cmd('local-context off')
if database_engine == 'postgres':
tier = 'GeneralPurpose'
sku_name = 'Standard_D2s_v3'
version = '12'
storage_size = 128
elif database_engine == 'mysql':
tier = 'Burstable'
sku_name = 'Standard_B1ms'
storage_size = 10
version = '5.7'
storage_size_mb = storage_size * 1024
backup_retention = 7
list_checks = [JMESPathCheck('name', server),
JMESPathCheck('resourceGroup', resource_group),
JMESPathCheck('sku.name', sku_name),
JMESPathCheck('sku.tier', tier),
JMESPathCheck('version', version),
JMESPathCheck('storageProfile.storageMb', storage_size_mb),
JMESPathCheck('storageProfile.backupRetentionDays', backup_retention)]
self.cmd('{} flexible-server show -g {} -n {}'
.format(database_engine, resource_group, server), checks=list_checks)
if database_engine == 'mysql':
self.cmd('{} flexible-server db show -g {} -s {} -d flexibleserverdb'
.format(database_engine, resource_group, server), checks=[JMESPathCheck('name', 'flexibleserverdb')])
def _test_flexible_server_create_non_default_tiers(self, database_engine, resource_group):
if database_engine == 'postgres':
self.cmd('postgres flexible-server create -g {} -l {} -n {} --tier Burstable --sku-name Standard_B1ms --public-access none'
.format(resource_group, self.location, self.random_name_1))
self.cmd('postgres flexible-server show -g {} -n {}'
.format(resource_group, self.random_name_1),
checks=[JMESPathCheck('sku.tier', 'Burstable'),
JMESPathCheck('sku.name', 'Standard_B1ms')])
self.cmd('postgres flexible-server create -g {} -l {} -n {} --tier MemoryOptimized --sku-name Standard_E2s_v3 --public-access none'
.format(resource_group, self.location, self.random_name_2))
self.cmd('postgres flexible-server show -g {} -n {}'
.format(resource_group, self.random_name_2),
checks=[JMESPathCheck('sku.tier', 'MemoryOptimized'),
JMESPathCheck('sku.name', 'Standard_E2s_v3')])
elif database_engine == 'mysql':
self.cmd('mysql flexible-server create -g {} -l {} -n {} --tier GeneralPurpose --sku-name Standard_D2s_v3 --public-access none'
.format(resource_group, self.location, self.random_name_1))
self.cmd('mysql flexible-server show -g {} -n {}'
.format(resource_group, self.random_name_1),
checks=[JMESPathCheck('sku.tier', 'GeneralPurpose'),
JMESPathCheck('sku.name', 'Standard_D2s_v3')])
self.cmd('mysql flexible-server create -g {} -l {} -n {} --tier MemoryOptimized --sku-name Standard_E2s_v3 --public-access none'
.format(resource_group, self.location, self.random_name_2))
self.cmd('mysql flexible-server show -g {} -n {}'
.format(resource_group, self.random_name_2),
checks=[JMESPathCheck('sku.tier', 'MemoryOptimized'),
JMESPathCheck('sku.name', 'Standard_E2s_v3')])
def _test_flexible_server_create_different_version(self, database_engine, resource_group):
if database_engine == 'postgres':
self.cmd('postgres flexible-server create -g {} -n {} -l {} --version 11 --public-access none'
.format(resource_group, self.random_name_3, self.location))
self.cmd('postgres flexible-server show -g {} -n {}'
.format(resource_group, self.random_name_3),
checks=[JMESPathCheck('version', 11)])
def _test_flexible_server_create_select_zone(self, database_engine, resource_group):
if database_engine == 'postgres':
self.cmd('postgres flexible-server create -g {} -l {} -n {} --zone 1 --public-access none'
.format(resource_group, self.location, self.random_name_4))
self.cmd('postgres flexible-server show -g {} -n {}'
.format(resource_group, self.random_name_4),
checks=[JMESPathCheck('availabilityZone', 1)])
def _test_flexible_server_update_password(self, database_engine, resource_group, server):
self.cmd('{} flexible-server update -g {} -n {} -p randompw321##@!'
.format(database_engine, resource_group, server))
def _test_flexible_server_update_storage(self, database_engine, resource_group, server):
self.cmd('{} flexible-server update -g {} -n {} --storage-size 256'
.format(database_engine, resource_group, server),
checks=[JMESPathCheck('storageProfile.storageMb', 256 * 1024)])
def _test_flexible_server_update_backup_retention(self, database_engine, resource_group, server):
self.cmd('{} flexible-server update -g {} -n {} --backup-retention {}'
.format(database_engine, resource_group, server, 17),
checks=[JMESPathCheck('storageProfile.backupRetentionDays', 17)])
def _test_flexible_server_update_scale_up(self, database_engine, resource_group, server):
# Scale up
if database_engine == 'postgres':
tier = 'MemoryOptimized'
sku_name = 'Standard_E16s_v3'
elif database_engine == 'mysql':
tier = 'GeneralPurpose'
sku_name = 'Standard_D16s_v3'
self.cmd('{} flexible-server update -g {} -n {} --tier {} --sku-name {}'
.format(database_engine, resource_group, server, tier, sku_name),
checks=[JMESPathCheck('sku.tier', tier),
JMESPathCheck('sku.name', sku_name)])
def _test_flexible_server_update_scale_down(self, database_engine, resource_group, server):
# Scale down
if database_engine == 'postgres':
tier = 'MemoryOptimized'
sku_name = 'Standard_E2s_v3'
elif database_engine == 'mysql':
tier = 'GeneralPurpose'
sku_name = 'Standard_D2s_v3'
self.cmd('{} flexible-server update -g {} -n {} --tier {} --sku-name {}'
.format(database_engine, resource_group, server, tier, sku_name),
checks=[JMESPathCheck('sku.tier', tier),
JMESPathCheck('sku.name', sku_name)])
def _test_flexible_server_update_mmw(self, database_engine, resource_group, server):
self.cmd('{} flexible-server update -g {} -n {} --maintenance-window Mon:1:30'
.format(database_engine, resource_group, server),
checks=[JMESPathCheck('maintenanceWindow.dayOfWeek', 1),
JMESPathCheck('maintenanceWindow.startHour', 1),
JMESPathCheck('maintenanceWindow.startMinute', 30)])
def _test_flexible_server_update_tag(self, database_engine, resource_group, server):
self.cmd('{} flexible-server update -g {} -n {} --tags key=3'
.format(database_engine, resource_group, server),
checks=[JMESPathCheck('tags.key', '3')])
def _test_flexible_server_restore(self, database_engine, resource_group, server):
time.sleep(20 * 60)
restore_server = 'restore-' + server[:50]
restore_time = (datetime.utcnow() - timedelta(minutes=40)).replace(tzinfo=tzutc()).isoformat()
if database_engine == 'postgres':
self.cmd('{} flexible-server restore -g {} --name {} --source-server {} --restore-time {} --zone 2'
.format(database_engine, resource_group, restore_server, server, restore_time),
checks=[JMESPathCheck('name', restore_server),
JMESPathCheck('resourceGroup', resource_group),
JMESPathCheck('availabilityZone', 2)])
else:
self.cmd('{} flexible-server restore -g {} --name {} --source-server {} --restore-time {}'
.format(database_engine, resource_group, restore_server, server, restore_time),
checks=[JMESPathCheck('name', restore_server),
JMESPathCheck('resourceGroup', resource_group)])
def _test_flexible_server_restart(self, database_engine, resource_group, server):
self.cmd('{} flexible-server restart -g {} -n {}'
.format(database_engine, resource_group, server), checks=NoneCheck())
def _test_flexible_server_stop(self, database_engine, resource_group, server):
self.cmd('{} flexible-server stop -g {} -n {}'
.format(database_engine, resource_group, server), checks=NoneCheck())
def _test_flexible_server_start(self, database_engine, resource_group, server):
self.cmd('{} flexible-server start -g {} -n {}'
.format(database_engine, resource_group, server), checks=NoneCheck())
def _test_flexible_server_list(self, database_engine, resource_group):
self.cmd('{} flexible-server list -g {}'.format(database_engine, resource_group),
checks=[JMESPathCheck('type(@)', 'array')])
def _test_flexible_server_connection_string(self, database_engine, server):
connection_string = self.cmd('{} flexible-server show-connection-string -s {}'
.format(database_engine, server)).get_output_in_json()
self.assertIn('jdbc', connection_string['connectionStrings'])
self.assertIn('node.js', connection_string['connectionStrings'])
self.assertIn('php', connection_string['connectionStrings'])
self.assertIn('python', connection_string['connectionStrings'])
self.assertIn('ado.net', connection_string['connectionStrings'])
def _test_flexible_server_list_skus(self, database_engine, location):
self.cmd('{} flexible-server list-skus -l {}'.format(database_engine, location),
checks=[JMESPathCheck('type(@)', 'array')])
class FlexibleServerIopsMgmtScenarioTest(RdbmsScenarioTest):
def _test_flexible_server_iops_create(self, database_engine, resource_group, server_1, server_2, server_3):
if self.cli_ctx.local_context.is_on:
self.cmd('local-context off')
# IOPS passed is beyond limit of max allowed by SKU and free storage
self.cmd('{} flexible-server create --public-access none -g {} -n {} -l {} --iops 350 --storage-size 200 --tier Burstable --sku-name Standard_B1ms'
.format(database_engine, resource_group, server_1, self.location))
self.cmd('{} flexible-server show -g {} -n {}'.format(database_engine, resource_group, server_1),
checks=[JMESPathCheck('storageProfile.storageIops', 640)])
# IOPS passed is within limit of max allowed by SKU but smaller than default
self.cmd('{} flexible-server create --public-access none -g {} -n {} -l {} --iops 50 --storage-size 30 --tier Burstable --sku-name Standard_B1ms'
.format(database_engine, resource_group, server_2, self.location))
self.cmd('{} flexible-server show -g {} -n {}'.format(database_engine, resource_group, server_2),
checks=[JMESPathCheck('storageProfile.storageIops', 390)])
# IOPS passed is within limit of max allowed by SKU and bigger than default
self.cmd('{} flexible-server create --public-access none -g {} -n {} -l {} --iops 600 --storage-size 50 --tier Burstable --sku-name Standard_B1ms'
.format(database_engine, resource_group, server_3, self.location))
self.cmd('{} flexible-server show -g {} -n {}'.format(database_engine, resource_group, server_3),
checks=[JMESPathCheck('storageProfile.storageIops', 600)])
def _test_flexible_server_iops_scale_up(self, database_engine, resource_group, server_1, server_2, server_3):
# SKU upgraded and IOPS value set smaller than free iops, max iops for the sku
self.cmd('{} flexible-server update -g {} -n {} --tier GeneralPurpose --sku-name Standard_D8s_v3 --iops 400'
.format(database_engine, resource_group, server_1),
checks=[JMESPathCheck('storageProfile.storageIops', 900)])
# SKU upgraded and IOPS value set bigger than max iops for the sku
self.cmd('{} flexible-server update -g {} -n {} --tier GeneralPurpose --sku-name Standard_D4s_v3 --iops 7000'
.format(database_engine, resource_group, server_2),
checks=[JMESPathCheck('storageProfile.storageIops', 6400)])
# SKU upgraded and IOPS value set lower than max iops for the sku but bigger than free iops
self.cmd('{} flexible-server update -g {} -n {} --tier GeneralPurpose --sku-name Standard_D8s_v3 --storage-size 200 --iops 1000'
.format(database_engine, resource_group, server_3),
checks=[JMESPathCheck('storageProfile.storageIops', 1000)])
def _test_flexible_server_iops_scale_down(self, database_engine, resource_group, server_1, server_2, server_3):
# SKU downgraded and free iops is bigger than free iops
self.cmd('{} flexible-server update -g {} -n {} --tier GeneralPurpose --sku-name Standard_D2s_v3 --storage-size 300'
.format(database_engine, resource_group, server_1),
checks=[JMESPathCheck('storageProfile.storageIops', 1200)])
# SKU downgraded and IOPS not specified but bigger than new tier's max IOPS
self.cmd('{} flexible-server update -g {} -n {} --tier GeneralPurpose --sku-name Standard_D2s_v3'
.format(database_engine, resource_group, server_2),
checks=[JMESPathCheck('storageProfile.storageIops', 3200)])
# SKU downgraded and IOPS specified no exception case.
self.cmd('{} flexible-server update -g {} -n {} --tier GeneralPurpose --sku-name Standard_D2s_v3 --iops 1100'
.format(database_engine, resource_group, server_3),
checks=[JMESPathCheck('storageProfile.storageIops', 1100)])
class FlexibleServerHighAvailabilityMgmt(RdbmsScenarioTest):
def _test_flexible_server_high_availability_create(self, database_engine, resource_group, server):
self.cmd('{} flexible-server create -g {} -l {} -n {} --high-availability Enabled --tier GeneralPurpose --sku-name Standard_D2s_v3 --public-access none'
.format(database_engine, resource_group, self.location, server))
self.cmd('{} flexible-server show -g {} -n {}'
.format(database_engine, resource_group, server),
checks=[JMESPathCheck('haEnabled', 'Enabled')])
def _test_flexible_server_high_availability_disable(self, database_engine, resource_group, server):
self.cmd('{} flexible-server update -g {} -n {} --high-availability Disabled'
.format(database_engine, resource_group, server),
checks=[JMESPathCheck('haEnabled', 'Disabled')])
time.sleep(3 * 60)
def _test_flexible_server_high_availability_enable(self, database_engine, resource_group, server):
self.cmd('{} flexible-server update -g {} -n {} --high-availability Enabled'
.format(database_engine, resource_group, server),
checks=[JMESPathCheck('haEnabled', 'Enabled')])
def _test_flexible_server_high_availability_update_scale_up(self, database_engine, resource_group, server):
# if database_engine == 'postgres':
# sku_name = 'Standard_D4s_v3'
# else:
# sku_name = 'Standard_D4ds_v4'
# self.cmd('{} flexible-server update -g {} -n {} --tier GeneralPurpose --sku-name {}'
# .format(database_engine, resource_group, server, sku_name),
# checks=[JMESPathCheck('sku.name', sku_name'),
# JMESPathCheck('sku.tier', 'GeneralPurpose')])
self.cmd('{} flexible-server update -g {} -n {} --tier GeneralPurpose --sku-name Standard_D4s_v3'
.format(database_engine, resource_group, server))
def _test_flexible_server_high_availability_update_parameter(self, database_engine, resource_group, server):
if database_engine == 'mysql':
parameter_name = 'wait_timeout'
value = '30000'
elif database_engine == 'postgres':
parameter_name = 'lock_timeout'
value = '2000'
source = 'user-override'
self.cmd('{} flexible-server parameter set --name {} -v {} --source {} -s {} -g {}'.format(database_engine, parameter_name, value, source, server, resource_group),
checks=[JMESPathCheck('value', value),
JMESPathCheck('source', source)])
def _test_flexible_server_high_availability_restart(self, database_engine, resource_group, server):
self.cmd('{} flexible-server restart -g {} -n {}'
.format(database_engine, resource_group, server), checks=NoneCheck())
def _test_flexible_server_high_availability_stop(self, database_engine, resource_group, server):
self.cmd('{} flexible-server stop -g {} -n {}'
.format(database_engine, resource_group, server), checks=NoneCheck())
def _test_flexible_server_high_availability_start(self, database_engine, resource_group, server):
self.cmd('{} flexible-server start -g {} -n {}'
.format(database_engine, resource_group, server), checks=NoneCheck())
def _test_flexible_server_high_availability_delete(self, database_engine, resource_group, server):
self.cmd('{} flexible-server delete -g {} -n {} --yes'
.format(database_engine, resource_group, server), checks=NoneCheck())
def _test_flexible_server_high_availability_restore(self, database_engine, resource_group, server):
time.sleep(40 * 60)
restore_server = 'restore-' + server[:55]
restore_time = (datetime.utcnow() - timedelta(minutes=40)).replace(tzinfo=tzutc()).isoformat()
if database_engine == 'postgres':
self.cmd('{} flexible-server restore -g {} --name {} --source-server {} --restore-time {} --zone 2'
.format(database_engine, resource_group, restore_server, server, restore_time),
checks=[JMESPathCheck('name', restore_server),
JMESPathCheck('resourceGroup', resource_group),
JMESPathCheck('availabilityZone', 2)])
else:
self.cmd('{} flexible-server restore -g {} --name {} --source-server {} --restore-time {}'
.format(database_engine, resource_group, restore_server, server, restore_time),
checks=[JMESPathCheck('name', restore_server),
JMESPathCheck('resourceGroup', resource_group)])
def _test_flexible_server_high_availability_delete(self, resource_group):
self.cmd('az group delete --name {} --yes --no-wait'.format(resource_group), checks=NoneCheck())
class FlexibleServerVnetServerMgmtScenarioTest(RdbmsScenarioTest):
def _test_flexible_server_vnet_server_create(self, database_engine, resource_group, server):
if database_engine == 'postgres':
self.cmd('{} flexible-server create -g {} -n {} -l {} --private-dns-zone {}'.
format(database_engine, resource_group, server, self.location, 'testdnsname.private.postgres.database.azure.com'))
elif database_engine == 'mysql':
self.cmd('{} flexible-server create -g {} -n {} -l {}'.
format(database_engine, resource_group, server, self.location))
show_result = self.cmd('{} flexible-server show -g {} -n {}'
.format(database_engine, resource_group, server)).get_output_in_json()
self.assertEqual(show_result['delegatedSubnetArguments']['subnetArmResourceId'],
'/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Network/virtualNetworks/{}/subnets/{}'.format(
self.get_subscription_id(), resource_group, 'Vnet' + server, 'Subnet' + server))
if database_engine == 'postgres':
self.assertIn('testdnsname.private.postgres.database.azure.com', show_result['privateDnsZoneArguments']['privateDnsZoneArmResourceId'])
def _test_flexible_server_vnet_ha_server_create(self, database_engine, resource_group, server):
self.cmd('{} flexible-server create -g {} -n {} -l {} --tier GeneralPurpose --sku-name Standard_D2s_v3 --high-availability Enabled'.
format(database_engine, resource_group, server, self.location))
show_result = self.cmd('{} flexible-server show -g {} -n {}'
.format(database_engine, resource_group, server),
checks=[JMESPathCheck('haEnabled', 'Enabled')]).get_output_in_json()
self.assertEqual(show_result['delegatedSubnetArguments']['subnetArmResourceId'],
'/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Network/virtualNetworks/{}/subnets/{}'.format(
self.get_subscription_id(), resource_group, 'Vnet' + server, 'Subnet' + server))
def _test_flexible_server_vnet_server_update_scale_up(self, database_engine, resource_group, server):
self.cmd('{} flexible-server update -g {} -n {} --tier GeneralPurpose --sku-name Standard_D8s_v3'
.format(database_engine, resource_group, server))
def _test_flexible_server_vnet_server_restore(self, database_engine, resource_group, server, restore_server):
# time.sleep(40 * 60)
restore_time = (datetime.utcnow() - timedelta(minutes=40)).replace(tzinfo=tzutc()).isoformat()
if database_engine == 'postgres':
self.cmd('{} flexible-server restore -g {} --name {} --source-server {} --restore-time {} --zone 1'
.format(database_engine, resource_group, restore_server, server, restore_time),
checks=[JMESPathCheck('name', restore_server),
JMESPathCheck('resourceGroup', resource_group),
JMESPathCheck('availabilityZone', 1)])
elif database_engine == 'mysql':
self.cmd('{} flexible-server restore -g {} --name {} --source-server {} --restore-time {}'
.format(database_engine, resource_group, restore_server, server, restore_time),
checks=[JMESPathCheck('name', restore_server),
JMESPathCheck('resourceGroup', resource_group)])
def _test_flexible_server_vnet_server_delete(self, database_engine, resource_group, server, restore_server=None):
self.cmd('{} flexible-server delete -g {} -n {} --yes'
.format(database_engine, resource_group, server), checks=NoneCheck())
if restore_server is not None:
self.cmd('{} flexible-server delete -g {} -n {} --yes'
.format(database_engine, resource_group, restore_server), checks=NoneCheck())
# Wait until vnet can be detached from the deleted server
time.sleep(20 * 60)
def _test_flexible_server_vnet_server_mgmt_delete(self, resource_group):
self.cmd('az group delete --name {} --yes --no-wait'.format(resource_group), checks=NoneCheck())
class FlexibleServerProxyResourceMgmtScenarioTest(RdbmsScenarioTest):
def _test_firewall_rule_mgmt(self, database_engine, resource_group, server):
firewall_rule_name = 'firewall_test_rule'
start_ip_address = '10.10.10.10'
end_ip_address = '12.12.12.12'
firewall_rule_checks = [JMESPathCheck('name', firewall_rule_name),
JMESPathCheck('endIpAddress', end_ip_address),
JMESPathCheck('startIpAddress', start_ip_address)]
self.cmd('{} flexible-server firewall-rule create -g {} --name {} --rule-name {} '
'--start-ip-address {} --end-ip-address {} '
.format(database_engine, resource_group, server, firewall_rule_name, start_ip_address, end_ip_address),
checks=firewall_rule_checks)
self.cmd('{} flexible-server firewall-rule show -g {} --name {} --rule-name {} '
.format(database_engine, resource_group, server, firewall_rule_name),
checks=firewall_rule_checks)
new_start_ip_address = '9.9.9.9'
self.cmd('{} flexible-server firewall-rule update -g {} --name {} --rule-name {} --start-ip-address {}'
.format(database_engine, resource_group, server, firewall_rule_name, new_start_ip_address),
checks=[JMESPathCheck('startIpAddress', new_start_ip_address)])
new_end_ip_address = '13.13.13.13'
self.cmd('{} flexible-server firewall-rule update -g {} --name {} --rule-name {} --end-ip-address {}'
.format(database_engine, resource_group, server, firewall_rule_name, new_end_ip_address))
new_firewall_rule_name = 'firewall_test_rule2'
firewall_rule_checks = [JMESPathCheck('name', new_firewall_rule_name),
JMESPathCheck('endIpAddress', end_ip_address),
JMESPathCheck('startIpAddress', start_ip_address)]
self.cmd('{} flexible-server firewall-rule create -g {} -n {} --rule-name {} '
'--start-ip-address {} --end-ip-address {} '
.format(database_engine, resource_group, server, new_firewall_rule_name, start_ip_address, end_ip_address),
checks=firewall_rule_checks)
self.cmd('{} flexible-server firewall-rule list -g {} -n {}'
.format(database_engine, resource_group, server), checks=[JMESPathCheck('length(@)', 2)])
self.cmd('{} flexible-server firewall-rule delete --rule-name {} -g {} --name {} --yes'
.format(database_engine, firewall_rule_name, resource_group, server), checks=NoneCheck())
self.cmd('{} flexible-server firewall-rule list -g {} --name {}'
.format(database_engine, resource_group, server), checks=[JMESPathCheck('length(@)', 1)])
self.cmd('{} flexible-server firewall-rule delete -g {} -n {} --rule-name {} --yes'
.format(database_engine, resource_group, server, new_firewall_rule_name))
self.cmd('{} flexible-server firewall-rule list -g {} -n {}'
.format(database_engine, resource_group, server), checks=NoneCheck())
def _test_parameter_mgmt(self, database_engine, resource_group, server):
self.cmd('{} flexible-server parameter list -g {} -s {}'.format(database_engine, resource_group, server), checks=[JMESPathCheck('type(@)', 'array')])
if database_engine == 'mysql':
parameter_name = 'wait_timeout'
default_value = '28800'
value = '30000'
elif database_engine == 'postgres':
parameter_name = 'lock_timeout'
default_value = '0'
value = '2000'
source = 'system-default'
self.cmd('{} flexible-server parameter show --name {} -g {} -s {}'.format(database_engine, parameter_name, resource_group, server),
checks=[JMESPathCheck('defaultValue', default_value),
JMESPathCheck('source', source)])
source = 'user-override'
self.cmd('{} flexible-server parameter set --name {} -v {} --source {} -s {} -g {}'.format(database_engine, parameter_name, value, source, server, resource_group),
checks=[JMESPathCheck('value', value),
JMESPathCheck('source', source)])
def _test_database_mgmt(self, database_engine, resource_group, server):
database_name = 'flexibleserverdbtest'
self.cmd('{} flexible-server db create -g {} -s {} -d {}'.format(database_engine, resource_group, server, database_name),
checks=[JMESPathCheck('name', database_name)])
self.cmd('{} flexible-server db show -g {} -s {} -d {}'.format(database_engine, resource_group, server, database_name),
checks=[
JMESPathCheck('name', database_name),
JMESPathCheck('resourceGroup', resource_group)])
self.cmd('{} flexible-server db list -g {} -s {} '.format(database_engine, resource_group, server),
checks=[JMESPathCheck('type(@)', 'array')])
self.cmd('{} flexible-server db delete -g {} -s {} -d {} --yes'.format(database_engine, resource_group, server, database_name),
checks=NoneCheck())
def _test_flexible_server_proxy_resource_mgmt_delete(self, resource_group):
self.cmd('az group delete --name {} --yes --no-wait'.format(resource_group), checks=NoneCheck())
class FlexibleServerValidatorScenarioTest(ScenarioTest):
def _test_mgmt_validator(self, database_engine, resource_group):
RANDOM_VARIABLE_MAX_LENGTH = 30
if database_engine == 'postgres':
location = self.postgres_location
elif database_engine == 'mysql':
location = self.mysql_location
invalid_version = self.create_random_name('version', RANDOM_VARIABLE_MAX_LENGTH)
invalid_sku_name = self.create_random_name('sku_name', RANDOM_VARIABLE_MAX_LENGTH)
invalid_tier = self.create_random_name('tier', RANDOM_VARIABLE_MAX_LENGTH)
valid_tier = 'GeneralPurpose'
invalid_backup_retention = 1
# Create
self.cmd('{} flexible-server create -g {} -l {} --tier {} --public-access none'.format(database_engine, resource_group, location, invalid_tier), expect_failure=True)
self.cmd('{} flexible-server create -g {} -l {} --version {} --public-access none'.format(database_engine, resource_group, location, invalid_version), expect_failure=True)
self.cmd('{} flexible-server create -g {} -l {} --tier {} --sku-name {} --public-access none'.format(database_engine, resource_group, location, valid_tier, invalid_sku_name), expect_failure=True)
self.cmd('{} flexible-server create -g {} -l {} --backup-retention {} --public-access none'.format(database_engine, resource_group, location, invalid_backup_retention), expect_failure=True)
if database_engine == 'postgres':
invalid_storage_size = 60
elif database_engine == 'mysql':
invalid_storage_size = 999999
self.cmd('{} flexible-server create -g {} -l {} --storage-size {} --public-access none'.format(database_engine, resource_group, location, invalid_storage_size), expect_failure=True)
server = self.create_random_name(SERVER_NAME_PREFIX, RANDOM_VARIABLE_MAX_LENGTH)
if database_engine == 'postgres':
tier = 'MemoryOptimized'
version = 12
sku_name = 'Standard_E2s_v3'
storage_size = 64
elif database_engine == 'mysql':
tier = 'GeneralPurpose'
version = 5.7
if location == 'eastus2euap':
sku_name = 'Standard_D2s_v3'
else:
sku_name = 'Standard_D2ds_v4'
storage_size = 20
storage_size_mb = storage_size * 1024
backup_retention = 10
list_checks = [JMESPathCheck('name', server),
JMESPathCheck('resourceGroup', resource_group),
JMESPathCheck('sku.name', sku_name),
JMESPathCheck('sku.tier', tier),
JMESPathCheck('version', version),
JMESPathCheck('storageProfile.storageMb', storage_size_mb),
JMESPathCheck('storageProfile.backupRetentionDays', backup_retention)]
self.cmd('{} flexible-server create -g {} -n {} -l {} --tier {} --version {} --sku-name {} --storage-size {} --backup-retention {} --public-access none'
.format(database_engine, resource_group, server, location, tier, version, sku_name, storage_size, backup_retention))
self.cmd('{} flexible-server show -g {} -n {}'.format(database_engine, resource_group, server), checks=list_checks)
# Update
invalid_storage_size_small = storage_size - 1
self.cmd('{} flexible-server update -g {} -n {} --tier {}'.format(database_engine, resource_group, server, invalid_tier), expect_failure=True)
self.cmd('{} flexible-server update -g {} -n {} --tier {} --sku-name {}'.format(database_engine, resource_group, server, valid_tier, invalid_sku_name), expect_failure=True)
self.cmd('{} flexible-server update -g {} -n {} --storage-size {}'.format(database_engine, resource_group, server, invalid_storage_size_small), expect_failure=True)
self.cmd('{} flexible-server update -g {} -n {} --backup-retention {}'.format(database_engine, resource_group, server, invalid_backup_retention), expect_failure=True)
self.cmd('{} flexible-server delete -g {} -n {} --yes'.format(database_engine, resource_group, server), checks=NoneCheck())
class FlexibleServerReplicationMgmtScenarioTest(RdbmsScenarioTest): # pylint: disable=too-few-public-methods
def _test_flexible_server_replica_create(self, database_engine, resource_group, master_server, replicas):
result = self.cmd('{} flexible-server show -g {} --name {} '
.format(database_engine, resource_group, master_server),
checks=[JMESPathCheck('replicationRole', 'None')]).get_output_in_json()
self.cmd('{} flexible-server replica create -g {} --replica-name {} --source-server {}'
.format(database_engine, resource_group, replicas[0], master_server),
checks=[
JMESPathCheck('name', replicas[0]),
JMESPathCheck('resourceGroup', resource_group),
JMESPathCheck('sku.tier', result['sku']['tier']),
JMESPathCheck('sku.name', result['sku']['name']),
JMESPathCheck('replicationRole', 'Replica'),
JMESPathCheck('sourceServerId', result['id']),
JMESPathCheck('replicaCapacity', '0')])
time.sleep(20 * 60)
def _test_flexible_server_replica_list(self, database_engine, resource_group, master_server):
self.cmd('{} flexible-server replica list -g {} --name {}'
.format(database_engine, resource_group, master_server),
checks=[JMESPathCheck('length(@)', 1)])
def _test_flexible_server_replica_stop(self, database_engine, resource_group, master_server, replicas):
result = self.cmd('{} flexible-server show -g {} --name {} '
.format(database_engine, resource_group, master_server),
checks=[JMESPathCheck('replicationRole', 'Source')]).get_output_in_json()
self.cmd('{} flexible-server replica stop-replication -g {} --name {} --yes'
.format(database_engine, resource_group, replicas[0]),
checks=[
JMESPathCheck('name', replicas[0]),
JMESPathCheck('resourceGroup', resource_group),
JMESPathCheck('replicationRole', 'None'),
JMESPathCheck('sourceServerId', ''),
JMESPathCheck('replicaCapacity', result['replicaCapacity'])])
# test show server with replication info, master becomes normal server
self.cmd('{} flexible-server show -g {} --name {}'
.format(database_engine, resource_group, master_server),
checks=[
JMESPathCheck('replicationRole', 'None'),
JMESPathCheck('sourceServerId', ''),
JMESPathCheck('replicaCapacity', result['replicaCapacity'])])
def _test_flexible_server_replica_delete_source(self, database_engine, resource_group, master_server, replicas):
result = self.cmd('{} flexible-server show -g {} --name {} '
.format(database_engine, resource_group, master_server),
checks=[JMESPathCheck('replicationRole', 'None')]).get_output_in_json()
self.cmd('{} flexible-server replica create -g {} --replica-name {} --source-server {}'
.format(database_engine, resource_group, replicas[1], master_server),
checks=[
JMESPathCheck('name', replicas[1]),
JMESPathCheck('resourceGroup', resource_group),
JMESPathCheck('sku.name', result['sku']['name']),
JMESPathCheck('replicationRole', 'Replica'),
JMESPathCheck('sourceServerId', result['id']),
JMESPathCheck('replicaCapacity', '0')])
self.cmd('{} flexible-server delete -g {} --name {} --yes'
.format(database_engine, resource_group, master_server), checks=NoneCheck())
self.cmd('{} flexible-server show -g {} --name {}'
.format(database_engine, resource_group, replicas[1]),
checks=[
JMESPathCheck('replicationRole', 'None'),
JMESPathCheck('sourceServerId', ''),
JMESPathCheck('replicaCapacity', result['replicaCapacity'])])
def _test_flexible_server_replica_delete(self, database_engine, resource_group, replicas):
self.cmd('{} flexible-server delete -g {} --name {} --yes'
.format(database_engine, resource_group, replicas[0]), checks=NoneCheck())
self.cmd('{} flexible-server delete -g {} --name {} --yes'
.format(database_engine, resource_group, replicas[1]), checks=NoneCheck())
self.cmd('az group delete --name {} --yes --no-wait'.format(resource_group), checks=NoneCheck())
class FlexibleServerVnetMgmtScenarioTest(ScenarioTest):
def _test_flexible_server_vnet_mgmt_existing_supplied_subnetid(self, database_engine, resource_group):
# flexible-server create
if self.cli_ctx.local_context.is_on:
self.cmd('local-context off')
if database_engine == 'postgres':
location = self.postgres_location
elif database_engine == 'mysql':
location = self.mysql_location
server = 'testvnetserver10' + database_engine
# Scenario : Provision a server with supplied Subnet ID that exists, where the subnet is not delegated
subnet_id = self.cmd('network vnet subnet show -g {rg} -n default --vnet-name {vnet}').get_output_in_json()['id']
# create server - Delegation should be added.
self.cmd('{} flexible-server create -g {} -n {} --subnet {} -l {}'
.format(database_engine, resource_group, server, subnet_id, location))
# flexible-server show to validate delegation is added to both the created server
show_result_1 = self.cmd('{} flexible-server show -g {} -n {}'
.format(database_engine, resource_group, server)).get_output_in_json()
self.assertEqual(show_result_1['delegatedSubnetArguments']['subnetArmResourceId'], subnet_id)
# delete server
self.cmd('{} flexible-server delete -g {} -n {} --yes'.format(database_engine, resource_group, server),
checks=NoneCheck())
# This is required because the delegations cannot be removed until the server is completely deleted. In the current implementation, there is a delay. Hence, the wait
time.sleep(20 * 60)
def _test_flexible_server_vnet_mgmt_non_existing_supplied_subnetid(self, database_engine, resource_group):
# flexible-server create
if self.cli_ctx.local_context.is_on:
self.cmd('local-context off')
if database_engine == 'postgres':
location = self.postgres_location
elif database_engine == 'mysql':
location = self.mysql_location
vnet_name_2 = 'clitestvnet1'
subnet_name_2 = 'clitestsubnet1'
server = 'testvnetserver2' + database_engine
# Scenario : Provision a server with supplied Subnet ID whose vnet exists, but subnet does not exist and the vnet does not contain any other subnet
# The subnet name is the default created one, not the one in subnet ID
self.cmd('{} flexible-server create -g {} -n {} -l {} --subnet {}'
.format(database_engine, resource_group, server, location, '/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Network/virtualNetworks/{}/subnets/{}'.format(self.get_subscription_id(), resource_group, vnet_name_2, subnet_name_2)))
# flexible-server show to validate delegation is added to both the created server
show_result = self.cmd('{} flexible-server show -g {} -n {}'.format(database_engine, resource_group, server)).get_output_in_json()
self.assertEqual(show_result['delegatedSubnetArguments']['subnetArmResourceId'],
'/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Network/virtualNetworks/{}/subnets/{}'.format(
self.get_subscription_id(), resource_group, vnet_name_2, subnet_name_2))
# Cleanup
self.cmd('{} flexible-server delete -g {} -n {} --yes'.format(database_engine, resource_group, server), checks=NoneCheck())
# This is required because the delegations cannot be removed until the server is completely deleted. In the current implementation, there is a delay. Hence, the wait
time.sleep(20 * 60)
def _test_flexible_server_vnet_mgmt_supplied_vnet(self, database_engine, resource_group):
# flexible-server create
if self.cli_ctx.local_context.is_on:
self.cmd('local-context off')
if database_engine == 'postgres':
location = self.postgres_location
elif database_engine == 'mysql':
location = self.mysql_location
vnet_name = 'clitestvnet2'
address_prefix = '10.0.0.0/16'
subnet_prefix_1 = '10.0.0.0/24'
vnet_name_2 = 'clitestvnet3'
# flexible-servers
servers = ['testvnetserver3' + database_engine, 'testvnetserver4' + database_engine]
# Case 1 : Provision a server with supplied Vname that exists.
# create vnet and subnet. When vnet name is supplied, the subnet created will be given the default name.
vnet_result = self.cmd('network vnet create -n {} -g {} -l {} --address-prefix {} --subnet-name {} --subnet-prefix {}'
.format(vnet_name, resource_group, location, address_prefix, 'Subnet' + servers[0], subnet_prefix_1)).get_output_in_json()
# create server - Delegation should be added.
self.cmd('{} flexible-server create -g {} -n {} --vnet {} -l {}'
.format(database_engine, resource_group, servers[0], vnet_result['newVNet']['name'], location))
# Case 2 : Provision a server with a supplied Vname that does not exist.
self.cmd('{} flexible-server create -g {} -n {} --vnet {} -l {}'
.format(database_engine, resource_group, servers[1], vnet_name_2, location))
# flexible-server show to validate delegation is added to both the created server
show_result_1 = self.cmd('{} flexible-server show -g {} -n {}'
.format(database_engine, resource_group, servers[0])).get_output_in_json()
show_result_2 = self.cmd('{} flexible-server show -g {} -n {}'
.format(database_engine, resource_group, servers[1])).get_output_in_json()
self.assertEqual(show_result_1['delegatedSubnetArguments']['subnetArmResourceId'],
'/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Network/virtualNetworks/{}/subnets/{}'.format(
self.get_subscription_id(), resource_group, vnet_name, 'Subnet' + servers[0]))
self.assertEqual(show_result_2['delegatedSubnetArguments']['subnetArmResourceId'],
'/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Network/virtualNetworks/{}/subnets/{}'.format(
self.get_subscription_id(), resource_group, vnet_name_2, 'Subnet' + servers[1]))
# delete all servers
self.cmd('{} flexible-server delete -g {} -n {} --yes'.format(database_engine, resource_group, servers[0]),
checks=NoneCheck())
self.cmd('{} flexible-server delete -g {} -n {} --yes'.format(database_engine, resource_group, servers[1]),
checks=NoneCheck())
time.sleep(20 * 60)
def _test_flexible_server_vnet_mgmt_supplied_vname_and_subnetname(self, database_engine, resource_group, virtual_network):
# flexible-server create
if self.cli_ctx.local_context.is_on:
self.cmd('local-context off')
vnet_name_2 = 'clitestvnet6'
if database_engine == 'postgres':
location = self.postgres_location
elif database_engine == 'mysql':
location = self.mysql_location
# flexible-servers
servers = ['testvnetserver5' + database_engine, 'testvnetserver6' + database_engine]
# Case 1 : Provision a server with supplied Vname and subnet name that exists.
# create vnet and subnet. When vnet name is supplied, the subnet created will be given the default name.
subnet_id = self.cmd('network vnet subnet show -g {rg} -n default --vnet-name {vnet}').get_output_in_json()[
'id']
# create server - Delegation should be added.
self.cmd('{} flexible-server create -g {} -n {} --vnet {} -l {} --subnet default'
.format(database_engine, resource_group, servers[0], virtual_network, location))
# Case 2 : Provision a server with a supplied Vname and subnet name that does not exist.
self.cmd('{} flexible-server create -g {} -n {} -l {} --vnet {}'
.format(database_engine, resource_group, servers[1], location, vnet_name_2))
# flexible-server show to validate delegation is added to both the created server
show_result_1 = self.cmd('{} flexible-server show -g {} -n {}'
.format(database_engine, resource_group, servers[0])).get_output_in_json()
show_result_2 = self.cmd('{} flexible-server show -g {} -n {}'
.format(database_engine, resource_group, servers[1])).get_output_in_json()
self.assertEqual(show_result_1['delegatedSubnetArguments']['subnetArmResourceId'], subnet_id)
self.assertEqual(show_result_2['delegatedSubnetArguments']['subnetArmResourceId'],
'/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Network/virtualNetworks/{}/subnets/{}'.format(
self.get_subscription_id(), resource_group, vnet_name_2, 'Subnet' + servers[1]))
# delete all servers
self.cmd('{} flexible-server delete -g {} -n {} --yes'.format(database_engine, resource_group, servers[0]),
checks=NoneCheck())
self.cmd('{} flexible-server delete -g {} -n {} --yes'.format(database_engine, resource_group, servers[1]),
checks=NoneCheck())
time.sleep(20 * 60)
def _test_flexible_server_vnet_mgmt_supplied_subnet_id_in_different_rg(self, database_engine, resource_group_1, resource_group_2):
# flexible-server create
if self.cli_ctx.local_context.is_on:
self.cmd('local-context off')
if database_engine == 'postgres':
location = self.postgres_location
elif database_engine == 'mysql':
location = self.mysql_location
vnet_name = 'clitestvnet7'
subnet_name = 'clitestsubnet7'
address_prefix = '172.0.0.0/16'
subnet_prefix_1 = '172.0.0.0/24'
vnet_name_2 = 'clitestvnet8'
subnet_name_2 = 'clitestsubnet8'
# flexible-servers
servers = ['testvnetserver7' + database_engine, 'testvnetserver8' + database_engine]
# Case 1 : Provision a server with supplied subnetid that exists in a different RG
# create vnet and subnet.
vnet_result = self.cmd(
'network vnet create -n {} -g {} -l {} --address-prefix {} --subnet-name {} --subnet-prefix {}'
.format(vnet_name, resource_group_1, location, address_prefix, subnet_name,
subnet_prefix_1)).get_output_in_json()
# create server - Delegation should be added.
self.cmd('{} flexible-server create -g {} -n {} --subnet {} -l {}'
.format(database_engine, resource_group_2, servers[0], vnet_result['newVNet']['subnets'][0]['id'], location))
# Case 2 : Provision a server with supplied subnetid that has a different RG in the ID but does not exist. The vnet and subnet is then created in the RG of the server
self.cmd('{} flexible-server create -g {} -n {} -l {} --subnet {}'
.format(database_engine, resource_group_2, servers[1], location, '/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Network/virtualNetworks/{}/subnets/{}'.format(
self.get_subscription_id(), resource_group_1, vnet_name_2, subnet_name_2)))
# flexible-server show to validate delegation is added to both the created server
show_result_1 = self.cmd('{} flexible-server show -g {} -n {}'
.format(database_engine, resource_group_2, servers[0])).get_output_in_json()
show_result_2 = self.cmd('{} flexible-server show -g {} -n {}'
.format(database_engine, resource_group_2, servers[1])).get_output_in_json()
self.assertEqual(show_result_1['delegatedSubnetArguments']['subnetArmResourceId'],
'/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Network/virtualNetworks/{}/subnets/{}'.format(
self.get_subscription_id(), resource_group_1, vnet_name, subnet_name))
self.assertEqual(show_result_2['delegatedSubnetArguments']['subnetArmResourceId'],
'/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Network/virtualNetworks/{}/subnets/{}'.format(
self.get_subscription_id(), resource_group_1, vnet_name_2, subnet_name_2))
# delete all servers
self.cmd('{} flexible-server delete -g {} -n {} --yes'.format(database_engine, resource_group_2, servers[0]),
checks=NoneCheck())
self.cmd('{} flexible-server delete -g {} -n {} --yes'.format(database_engine, resource_group_2, servers[1]),
checks=NoneCheck())
time.sleep(20 * 60)
class FlexibleServerPublicAccessMgmtScenarioTest(ScenarioTest):
def _test_flexible_server_public_access_mgmt(self, database_engine, resource_group):
# flexible-server create
if self.cli_ctx.local_context.is_on:
self.cmd('local-context off')
if database_engine == 'postgres':
sku_name = 'Standard_D2s_v3'
location = self.postgres_location
elif database_engine == 'mysql':
sku_name = 'Standard_B1ms'
location = self.mysql_location
# flexible-servers
servers = [self.create_random_name('azuredbpaccess', SERVER_NAME_MAX_LENGTH),
self.create_random_name('azuredbpaccess', SERVER_NAME_MAX_LENGTH)]
# Case 1 : Provision a server with public access all
# create server
self.cmd('{} flexible-server create -g {} -n {} --public-access {} -l {}'
.format(database_engine, resource_group, servers[0], 'all', location),
checks=[JMESPathCheck('resourceGroup', resource_group), JMESPathCheck('skuname', sku_name),
StringContainCheck('AllowAll_'),
StringContainCheck(servers[0])])
# Case 2 : Provision a server with public access allowing all azure services
self.cmd('{} flexible-server create -g {} -n {} --public-access {} -l {}'
.format(database_engine, resource_group, servers[1], '0.0.0.0', location),
checks=[JMESPathCheck('resourceGroup', resource_group), JMESPathCheck('skuname', sku_name),
StringContainCheck('AllowAllAzureServicesAndResourcesWithinAzureIps_'),
StringContainCheck(servers[1])])
# delete all servers
self.cmd('{} flexible-server delete -g {} -n {} --yes'.format(database_engine, resource_group, servers[0]),
checks=NoneCheck())
self.cmd('{} flexible-server delete -g {} -n {} --yes'.format(database_engine, resource_group, servers[1]),
checks=NoneCheck())
| 55.16035
| 252
| 0.634056
| 6,087
| 56,760
| 5.675374
| 0.071135
| 0.089967
| 0.10444
| 0.128177
| 0.828084
| 0.78559
| 0.750825
| 0.719562
| 0.682887
| 0.649077
| 0
| 0.011828
| 0.241825
| 56,760
| 1,028
| 253
| 55.214008
| 0.790933
| 0.074207
| 0
| 0.5
| 0
| 0.049716
| 0.256899
| 0.035733
| 0
| 0
| 0
| 0
| 0.022727
| 1
| 0.082386
| false
| 0.00142
| 0.022727
| 0.00142
| 0.12642
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
5434a6767fdadeec33a6d0867db561c1a660c360
| 409
|
py
|
Python
|
lightly/api/routes/users/datasets/tags/__init__.py
|
laurenmoos/lightly
|
2e9ae8bbf433b09c89d666eee0358935d7f9eb9d
|
[
"MIT"
] | null | null | null |
lightly/api/routes/users/datasets/tags/__init__.py
|
laurenmoos/lightly
|
2e9ae8bbf433b09c89d666eee0358935d7f9eb9d
|
[
"MIT"
] | null | null | null |
lightly/api/routes/users/datasets/tags/__init__.py
|
laurenmoos/lightly
|
2e9ae8bbf433b09c89d666eee0358935d7f9eb9d
|
[
"MIT"
] | null | null | null |
""" Tags Routes """
# Copyright (c) 2020. Lightly AG and its affiliates.
# All Rights Reserved
# provided functions
from lightly.api.routes.users.datasets.tags.service import post # noqa: F401, E402, E501
from lightly.api.routes.users.datasets.tags.service import get # noqa: F401, E402, E501
from lightly.api.routes.users.datasets.tags.service import get_samples # noqa: F401, E402, E501
| 40.9
| 96
| 0.731051
| 58
| 409
| 5.137931
| 0.482759
| 0.110738
| 0.14094
| 0.201342
| 0.630872
| 0.630872
| 0.630872
| 0.630872
| 0.630872
| 0.463087
| 0
| 0.090643
| 0.163814
| 409
| 9
| 97
| 45.444444
| 0.780702
| 0.420538
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
54b5c24fdcb3e68cc3136baef3e6beada3a9cff5
| 38
|
py
|
Python
|
tests/__init__.py
|
jorgenwh/bionumpy
|
18c1adbc38be403ceb5d79e411f1562c6afe21c9
|
[
"MIT"
] | null | null | null |
tests/__init__.py
|
jorgenwh/bionumpy
|
18c1adbc38be403ceb5d79e411f1562c6afe21c9
|
[
"MIT"
] | null | null | null |
tests/__init__.py
|
jorgenwh/bionumpy
|
18c1adbc38be403ceb5d79e411f1562c6afe21c9
|
[
"MIT"
] | 1
|
2022-03-07T21:58:03.000Z
|
2022-03-07T21:58:03.000Z
|
"""Unit test package for bionumpy."""
| 19
| 37
| 0.684211
| 5
| 38
| 5.2
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.131579
| 38
| 1
| 38
| 38
| 0.787879
| 0.815789
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
54bdb11c7698e84f31b185964ce24c59ae797ca7
| 193
|
py
|
Python
|
Morphology/max_tree.py
|
Joevaen/Scikit-image_On_CT
|
e3bf0eeadc50691041b4b7c44a19d07546a85001
|
[
"Apache-2.0"
] | null | null | null |
Morphology/max_tree.py
|
Joevaen/Scikit-image_On_CT
|
e3bf0eeadc50691041b4b7c44a19d07546a85001
|
[
"Apache-2.0"
] | null | null | null |
Morphology/max_tree.py
|
Joevaen/Scikit-image_On_CT
|
e3bf0eeadc50691041b4b7c44a19d07546a85001
|
[
"Apache-2.0"
] | null | null | null |
# 从图像构建最大树。
#
# 组件树表示连接的组件的层次结构,这些组件是由应用于图像的顺序阈值操作产生的。 如果更高级别的组件包含在第一层中,则该级别上的已连接组件是该更高级别组件的父级。 最大树是组件树的有效表示。 一个级别上的连接组件由该级别上的一个参考像素表示,该像素是该级别上所有其他像素以及该级别上的参考像素的父级。 max-tree是许多形态运算符(即连通运算符)的基础。
| 64.333333
| 179
| 0.880829
| 12
| 193
| 14.166667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.041451
| 193
| 3
| 179
| 64.333333
| 0.918919
| 0.968912
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
49ce684f5f3684e8cd6b498cb6e1ec858f4bd5ca
| 197
|
py
|
Python
|
website/ttkom/admin.py
|
CharlesRigal/projet_website
|
ba55e3b65d1ab587d9d896d76b6a54d4c3e0b4b6
|
[
"MIT"
] | null | null | null |
website/ttkom/admin.py
|
CharlesRigal/projet_website
|
ba55e3b65d1ab587d9d896d76b6a54d4c3e0b4b6
|
[
"MIT"
] | null | null | null |
website/ttkom/admin.py
|
CharlesRigal/projet_website
|
ba55e3b65d1ab587d9d896d76b6a54d4c3e0b4b6
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from .models import Post, User, Comment, Profile
# Register your models here.
admin.site.register(Post)
admin.site.register(Comment)
admin.site.register(Profile)
| 21.888889
| 48
| 0.796954
| 28
| 197
| 5.607143
| 0.5
| 0.171975
| 0.324841
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.106599
| 197
| 8
| 49
| 24.625
| 0.892045
| 0.13198
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.4
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
49e979e4feee73f3942f051ccb85d354b78365aa
| 69
|
py
|
Python
|
emgen/core/__init__.py
|
etedor/emgen
|
2b891431887f1d4b3f28ec64408e75ee7baa0d66
|
[
"MIT"
] | null | null | null |
emgen/core/__init__.py
|
etedor/emgen
|
2b891431887f1d4b3f28ec64408e75ee7baa0d66
|
[
"MIT"
] | null | null | null |
emgen/core/__init__.py
|
etedor/emgen
|
2b891431887f1d4b3f28ec64408e75ee7baa0d66
|
[
"MIT"
] | null | null | null |
# -*- coding:utf-8 -*-
from emgen.core.local_part import local_part
| 17.25
| 44
| 0.710145
| 11
| 69
| 4.272727
| 0.818182
| 0.382979
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.016667
| 0.130435
| 69
| 3
| 45
| 23
| 0.766667
| 0.289855
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
49f002d16a79c214190642452d3102345c93924c
| 2,392
|
py
|
Python
|
app/egresos/routes.py
|
abazbaz/project_1
|
a0ac32b6aa86df3c920cd59718fe63b935a79dcc
|
[
"MIT"
] | null | null | null |
app/egresos/routes.py
|
abazbaz/project_1
|
a0ac32b6aa86df3c920cd59718fe63b935a79dcc
|
[
"MIT"
] | null | null | null |
app/egresos/routes.py
|
abazbaz/project_1
|
a0ac32b6aa86df3c920cd59718fe63b935a79dcc
|
[
"MIT"
] | null | null | null |
from app.egresos import blueprint
from flask import render_template, request
from flask_login import login_required
from bcrypt import checkpw
from app import db, login_manager
@blueprint.route('/<template>')
@login_required
def route_template(template):
return render_template(template + '.html')
@blueprint.route('capturar_cuenta', methods=['GET', 'POST'])
def captura_cuenta():
formaPago = list(['Banamex','Santander','BBVA'])
vendor = list(['categoria_1','categoria_2','categoria_3','categoria_4','categoria_5'])
proveedor = list(['sub_categoria_1','sub_categoria_2','sub_categoria_3','sub_categoria_4','sub_categoria_5'])
categoria = list(['categoria_1','categoria_2','categoria_3','categoria_4','categoria_5'])
concepto= list(['categoria_1','categoria_2','categoria_3','categoria_4','categoria_5'])
user_inputs = dict(request.form)
print(user_inputs)
return render_template("capturar_cuenta.html",
navbar_data_capture = 'active',
title = "Registro de Egresos",
formaPago = formaPago,
vendor = vendor,
proveedor = proveedor,
categoria = categoria,
concepto = concepto,
velocity_max = 1)
@blueprint.route('capturar_gasto', methods=['GET', 'POST'])
def captura_gasto():
formaPago = list(['Banamex','Santander','BBVA'])
vendor = list(['categoria_1','categoria_2','categoria_3','categoria_4','categoria_5'])
proveedor = list(['sub_categoria_1','sub_categoria_2','sub_categoria_3','sub_categoria_4','sub_categoria_5'])
categoria = list(['categoria_1','categoria_2','categoria_3','categoria_4','categoria_5'])
concepto= list(['categoria_1','categoria_2','categoria_3','categoria_4','categoria_5'])
user_inputs = dict(request.form)
print(user_inputs)
return render_template("capturar_gasto.html",
navbar_data_capture = 'active',
title = "Registro de Egresos",
formaPago = formaPago,
vendor = vendor,
proveedor = proveedor,
categoria = categoria,
concepto = concepto,
velocity_max = 1)
| 43.490909
| 113
| 0.607441
| 247
| 2,392
| 5.578947
| 0.222672
| 0.087083
| 0.060958
| 0.100145
| 0.770682
| 0.735849
| 0.735849
| 0.735849
| 0.735849
| 0.735849
| 0
| 0.023986
| 0.267977
| 2,392
| 54
| 114
| 44.296296
| 0.762993
| 0
| 0
| 0.666667
| 0
| 0
| 0.279264
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.066667
| false
| 0
| 0.111111
| 0.022222
| 0.244444
| 0.133333
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
b70a3e47b8cf41082d976f96360a5ac68601ef87
| 305
|
py
|
Python
|
src/api/routers/room/schemas.py
|
b1team/trada
|
22ceaf4d50fe3a38ff402315c029e574773ca9e0
|
[
"MIT"
] | null | null | null |
src/api/routers/room/schemas.py
|
b1team/trada
|
22ceaf4d50fe3a38ff402315c029e574773ca9e0
|
[
"MIT"
] | 1
|
2021-03-12T15:16:03.000Z
|
2021-03-12T15:16:03.000Z
|
src/api/routers/room/schemas.py
|
b1team/trada
|
22ceaf4d50fe3a38ff402315c029e574773ca9e0
|
[
"MIT"
] | null | null | null |
from typing import Optional
from pydantic import BaseModel
class BasicSchemas(BaseModel):
room_id: Optional[str] = None
member_name: Optional[str] = None
class UpdateRoomSchemas(BaseModel):
room_id: Optional[str] = None
room_name: Optional[str] = None
avatar: Optional[str] = None
| 21.785714
| 37
| 0.731148
| 38
| 305
| 5.763158
| 0.421053
| 0.251142
| 0.342466
| 0.210046
| 0.273973
| 0.273973
| 0
| 0
| 0
| 0
| 0
| 0
| 0.183607
| 305
| 14
| 38
| 21.785714
| 0.879518
| 0
| 0
| 0.222222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.222222
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
b724bc77fade7ac5eddeffc55b06570d1414a8c8
| 51
|
py
|
Python
|
Beta/How many rectangles can you find.py
|
mwk0408/codewars_solutions
|
9b4f502b5f159e68024d494e19a96a226acad5e5
|
[
"MIT"
] | 6
|
2020-09-03T09:32:25.000Z
|
2020-12-07T04:10:01.000Z
|
Beta/How many rectangles can you find.py
|
mwk0408/codewars_solutions
|
9b4f502b5f159e68024d494e19a96a226acad5e5
|
[
"MIT"
] | 1
|
2021-12-13T15:30:21.000Z
|
2021-12-13T15:30:21.000Z
|
Beta/How many rectangles can you find.py
|
mwk0408/codewars_solutions
|
9b4f502b5f159e68024d494e19a96a226acad5e5
|
[
"MIT"
] | null | null | null |
def rectangle_number(n):
return (n*(n+1)//2)**2
| 25.5
| 26
| 0.607843
| 10
| 51
| 3
| 0.7
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.068182
| 0.137255
| 51
| 2
| 26
| 25.5
| 0.613636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
b73347f29fb932c9fe963917ac6e1bf22c2e3512
| 64
|
py
|
Python
|
Applications/SlicerApp/Testing/Python/SlicerTestingExitSuccessTest.py
|
forfullstack/slicersources-src
|
91bcecf037a27f3fad4c0ab57e8286fc258bb0f5
|
[
"Apache-2.0"
] | null | null | null |
Applications/SlicerApp/Testing/Python/SlicerTestingExitSuccessTest.py
|
forfullstack/slicersources-src
|
91bcecf037a27f3fad4c0ab57e8286fc258bb0f5
|
[
"Apache-2.0"
] | null | null | null |
Applications/SlicerApp/Testing/Python/SlicerTestingExitSuccessTest.py
|
forfullstack/slicersources-src
|
91bcecf037a27f3fad4c0ab57e8286fc258bb0f5
|
[
"Apache-2.0"
] | null | null | null |
from __future__ import print_function
print("Hello Slicer !")
| 12.8
| 37
| 0.78125
| 8
| 64
| 5.625
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.140625
| 64
| 4
| 38
| 16
| 0.818182
| 0
| 0
| 0
| 0
| 0
| 0.222222
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 1
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 5
|
3f8bb166964310d4f3e5fbba042aadb7d03aab43
| 1,831
|
py
|
Python
|
tests/test_models/test_inventory_item_image.py
|
stcstores/linnapi
|
88cc1d73444f44b7d901dc1086790a300066ea6e
|
[
"MIT"
] | null | null | null |
tests/test_models/test_inventory_item_image.py
|
stcstores/linnapi
|
88cc1d73444f44b7d901dc1086790a300066ea6e
|
[
"MIT"
] | null | null | null |
tests/test_models/test_inventory_item_image.py
|
stcstores/linnapi
|
88cc1d73444f44b7d901dc1086790a300066ea6e
|
[
"MIT"
] | null | null | null |
import pytest
from linnapi import models
@pytest.fixture
def add_image_to_inventory_item_response():
return {
"StockItemId": "972af264-d768-4c6c-9152-0ad9d9d5b352",
"ImageId": "eea21827-491d-4022-996a-d068dd6b25ea",
"ImageUrl": "https://image.jpg",
"ImageThumbnailUrl": "https://image_thumbanil.jpg",
}
@pytest.fixture
def inventory_item_image_with_response(add_image_to_inventory_item_response):
return models.InventoryItemImage(add_image_to_inventory_item_response)
def test_inventory_item_image_sets_raw(
inventory_item_image_with_response, add_image_to_inventory_item_response
):
assert (
inventory_item_image_with_response.raw == add_image_to_inventory_item_response
)
def test_inventory_item_image_sets_stock_item_id(
inventory_item_image_with_response, add_image_to_inventory_item_response
):
assert (
inventory_item_image_with_response.stock_item_id
== add_image_to_inventory_item_response["StockItemId"]
)
def test_inventory_item_image_sets_image_id(
inventory_item_image_with_response, add_image_to_inventory_item_response
):
assert (
inventory_item_image_with_response.image_id
== add_image_to_inventory_item_response["ImageId"]
)
def test_inventory_item_image_sets_image_url(
inventory_item_image_with_response, add_image_to_inventory_item_response
):
assert (
inventory_item_image_with_response.image_url
== add_image_to_inventory_item_response["ImageUrl"]
)
def test_inventory_item_image_sets_in_order_image_thumbnail_url(
inventory_item_image_with_response, add_image_to_inventory_item_response
):
assert (
inventory_item_image_with_response.image_thumbnail_url
== add_image_to_inventory_item_response["ImageThumbnailUrl"]
)
| 29.063492
| 86
| 0.78864
| 233
| 1,831
| 5.55794
| 0.184549
| 0.29112
| 0.222394
| 0.190734
| 0.745174
| 0.745174
| 0.72278
| 0.533591
| 0.533591
| 0.533591
| 0
| 0.027564
| 0.148007
| 1,831
| 62
| 87
| 29.532258
| 0.802564
| 0
| 0
| 0.361702
| 0
| 0
| 0.110322
| 0.039323
| 0
| 0
| 0
| 0
| 0.106383
| 1
| 0.148936
| false
| 0
| 0.042553
| 0.042553
| 0.234043
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
3f9f124d21f37be7bf1ec8abb39969d7b64ce9aa
| 123
|
py
|
Python
|
pyolite/__init__.py
|
PressLabs/pyolite
|
e8e5a76fc3a714bd71b66d4c0ba3ff6357595ff0
|
[
"BSD-2-Clause"
] | 23
|
2015-02-23T10:53:38.000Z
|
2017-08-22T11:58:06.000Z
|
pyolite/__init__.py
|
sona1111/pyolite
|
e8e5a76fc3a714bd71b66d4c0ba3ff6357595ff0
|
[
"BSD-2-Clause"
] | 11
|
2015-03-20T10:45:52.000Z
|
2017-07-03T16:26:57.000Z
|
pyolite/__init__.py
|
sona1111/pyolite
|
e8e5a76fc3a714bd71b66d4c0ba3ff6357595ff0
|
[
"BSD-2-Clause"
] | 11
|
2015-03-05T20:11:42.000Z
|
2017-01-31T13:16:02.000Z
|
try:
from .pyolite import Pyolite
except ImportError as exp:
# need this in order to make versioneer work
pass
| 20.5
| 48
| 0.715447
| 18
| 123
| 4.888889
| 0.944444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.252033
| 123
| 5
| 49
| 24.6
| 0.956522
| 0.341463
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.25
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
|
0
| 5
|
3fa03152ff8ea66b671b5845fef20126052fe274
| 192
|
py
|
Python
|
backend/user/views.py
|
dzhfrv/dzhfrv-starn
|
cae3261ed85653643032218c4a2f2cd273ff47ae
|
[
"MIT"
] | 1
|
2021-01-19T14:04:53.000Z
|
2021-01-19T14:04:53.000Z
|
apps/user/views.py
|
dzhfrv/django-base-app
|
9dcc27a2e75e7aa3c6d31b468f089e0b70f52df9
|
[
"MIT"
] | 5
|
2021-03-19T01:23:20.000Z
|
2021-09-22T18:48:54.000Z
|
apps/user/views.py
|
dzhfrv/django-base-app
|
9dcc27a2e75e7aa3c6d31b468f089e0b70f52df9
|
[
"MIT"
] | 1
|
2020-04-01T12:37:00.000Z
|
2020-04-01T12:37:00.000Z
|
from rest_framework import generics
from .serializers import UserRegistrationSerializer
class RegistrationEndpoint(generics.CreateAPIView):
serializer_class = UserRegistrationSerializer
| 27.428571
| 51
| 0.869792
| 16
| 192
| 10.3125
| 0.6875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.098958
| 192
| 6
| 52
| 32
| 0.953757
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 1
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
3fccb24b2c597cbc26016f1270eef396a70b2d1f
| 196
|
py
|
Python
|
1201-1300/1243-Number of Segments in a String/1243-Number of Segments in a String.py
|
jiadaizhao/LintCode
|
a8aecc65c47a944e9debad1971a7bc6b8776e48b
|
[
"MIT"
] | 77
|
2017-12-30T13:33:37.000Z
|
2022-01-16T23:47:08.000Z
|
1201-1300/1243-Number of Segments in a String/1243-Number of Segments in a String.py
|
jxhangithub/LintCode-1
|
a8aecc65c47a944e9debad1971a7bc6b8776e48b
|
[
"MIT"
] | 1
|
2018-05-14T14:15:40.000Z
|
2018-05-14T14:15:40.000Z
|
1201-1300/1243-Number of Segments in a String/1243-Number of Segments in a String.py
|
jxhangithub/LintCode-1
|
a8aecc65c47a944e9debad1971a7bc6b8776e48b
|
[
"MIT"
] | 39
|
2017-12-07T14:36:25.000Z
|
2022-03-10T23:05:37.000Z
|
class Solution:
"""
@param s: a string
@return: the number of segments in a string
"""
def countSegments(self, s):
# write yout code here
return len(s.split())
| 21.777778
| 47
| 0.576531
| 26
| 196
| 4.346154
| 0.807692
| 0.123894
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.316327
| 196
| 8
| 48
| 24.5
| 0.843284
| 0.428571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
3fe01a6fedd1c2868f6a55f48644a981f87d94ac
| 255
|
py
|
Python
|
src/mission/task/task.py
|
AuraUAS/aura-core
|
4711521074db72ba9089213e14455d89dc5306c0
|
[
"MIT",
"BSD-2-Clause-FreeBSD"
] | 8
|
2016-08-03T19:35:03.000Z
|
2019-12-15T06:25:05.000Z
|
src/mission/task/task.py
|
AuraUAS/aura-core
|
4711521074db72ba9089213e14455d89dc5306c0
|
[
"MIT",
"BSD-2-Clause-FreeBSD"
] | 4
|
2018-09-27T15:48:56.000Z
|
2018-11-05T12:38:10.000Z
|
src/mission/task/task.py
|
AuraUAS/aura-core
|
4711521074db72ba9089213e14455d89dc5306c0
|
[
"MIT",
"BSD-2-Clause-FreeBSD"
] | 5
|
2017-06-28T19:15:36.000Z
|
2020-02-19T19:31:24.000Z
|
class Task:
def __init__(self):
self.name = "";
self.active = False;
def activate(self):
pass
def update(self, dt):
pass
def is_complete(self):
pass
def close(self):
pass
| 15
| 28
| 0.482353
| 28
| 255
| 4.214286
| 0.535714
| 0.20339
| 0.186441
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.419608
| 255
| 16
| 29
| 15.9375
| 0.797297
| 0
| 0
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.416667
| false
| 0.333333
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
3ffad49a6fa8ced3611bd3f71d9aa3baf6492cf4
| 64
|
py
|
Python
|
backend/lucas/main/scrapyd/__init__.py
|
ralucaportase/lucas
|
035196fc7c42d80ec1b6c0f7d9ce7e1eff200e92
|
[
"MIT"
] | null | null | null |
backend/lucas/main/scrapyd/__init__.py
|
ralucaportase/lucas
|
035196fc7c42d80ec1b6c0f7d9ce7e1eff200e92
|
[
"MIT"
] | 9
|
2021-03-30T14:15:54.000Z
|
2022-03-02T14:54:12.000Z
|
backend/lucas/main/scrapyd/__init__.py
|
ralucaportase/lucas
|
035196fc7c42d80ec1b6c0f7d9ce7e1eff200e92
|
[
"MIT"
] | null | null | null |
from .api import get_scrapyd_api
__all__ = ["get_scrapyd_api"]
| 16
| 32
| 0.78125
| 10
| 64
| 4.2
| 0.6
| 0.47619
| 0.619048
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 64
| 3
| 33
| 21.333333
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0.234375
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
b748ff6308cd70326a3678580b7cb40d6f0652dd
| 59
|
py
|
Python
|
modelmanager/plugins/__init__.py
|
mwort/modelmanager
|
f079d5aa09165fcd43a2bef3cb1cfc2ab36cb47a
|
[
"BSD-3-Clause"
] | 1
|
2020-06-11T17:29:49.000Z
|
2020-06-11T17:29:49.000Z
|
modelmanager/plugins/__init__.py
|
mwort/modelmanager
|
f079d5aa09165fcd43a2bef3cb1cfc2ab36cb47a
|
[
"BSD-3-Clause"
] | 1
|
2020-02-12T01:15:46.000Z
|
2020-02-12T01:15:46.000Z
|
modelmanager/plugins/__init__.py
|
mwort/modelmanager
|
f079d5aa09165fcd43a2bef3cb1cfc2ab36cb47a
|
[
"BSD-3-Clause"
] | 1
|
2021-04-05T13:33:31.000Z
|
2021-04-05T13:33:31.000Z
|
from .templates import templates
from .clones import clone
| 19.666667
| 32
| 0.830508
| 8
| 59
| 6.125
| 0.625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.135593
| 59
| 2
| 33
| 29.5
| 0.960784
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
b7496e96dc4bb9a7c3c93ec0d2ad282854a5e1a6
| 144
|
py
|
Python
|
mmseg/runner/__init__.py
|
yoyoyoohh/spacenet6
|
57829afb6a642d96c30c42434929080b6d927fb4
|
[
"Apache-2.0"
] | null | null | null |
mmseg/runner/__init__.py
|
yoyoyoohh/spacenet6
|
57829afb6a642d96c30c42434929080b6d927fb4
|
[
"Apache-2.0"
] | null | null | null |
mmseg/runner/__init__.py
|
yoyoyoohh/spacenet6
|
57829afb6a642d96c30c42434929080b6d927fb4
|
[
"Apache-2.0"
] | null | null | null |
'''
Author: Shuailin Chen
Created Date: 2021-11-15
Last Modified: 2021-11-15
content:
'''
from .my_iter_based_runner import MyIterBasedRunner
| 18
| 51
| 0.770833
| 21
| 144
| 5.142857
| 0.857143
| 0.111111
| 0.148148
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.126984
| 0.125
| 144
| 8
| 51
| 18
| 0.730159
| 0.618056
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
b7879651168e291663f80e2ce730fe20e214177a
| 23
|
py
|
Python
|
tests/runtime-trace-tests/cases/TODO_poly_print.py
|
jaydeetay/pxt
|
aad1beaf15edc46e1327806367298cbc942dcbc1
|
[
"MIT"
] | 977
|
2019-05-06T23:12:55.000Z
|
2022-03-29T19:11:44.000Z
|
tests/runtime-trace-tests/cases/TODO_poly_print.py
|
jaydeetay/pxt
|
aad1beaf15edc46e1327806367298cbc942dcbc1
|
[
"MIT"
] | 3,980
|
2019-05-09T20:48:14.000Z
|
2022-03-28T20:33:07.000Z
|
tests/runtime-trace-tests/cases/TODO_poly_print.py
|
jaydeetay/pxt
|
aad1beaf15edc46e1327806367298cbc942dcbc1
|
[
"MIT"
] | 306
|
2016-04-09T05:28:07.000Z
|
2019-05-02T14:23:29.000Z
|
print("baz")
print(123)
| 11.5
| 12
| 0.695652
| 4
| 23
| 4
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.136364
| 0.043478
| 23
| 2
| 13
| 11.5
| 0.590909
| 0
| 0
| 0
| 0
| 0
| 0.125
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
b7a30f16910112e68e6f8421110bf4f2ee718945
| 11,032
|
py
|
Python
|
filter_plugins/oc_output/interfaces/interface/routed_vlan/ipv6/router_advertisement/config/__init__.py
|
lnde/ansible-ncyang
|
214d001564a4c2a27d25a20f4f095b5a0b69b378
|
[
"MIT"
] | null | null | null |
filter_plugins/oc_output/interfaces/interface/routed_vlan/ipv6/router_advertisement/config/__init__.py
|
lnde/ansible-ncyang
|
214d001564a4c2a27d25a20f4f095b5a0b69b378
|
[
"MIT"
] | null | null | null |
filter_plugins/oc_output/interfaces/interface/routed_vlan/ipv6/router_advertisement/config/__init__.py
|
lnde/ansible-ncyang
|
214d001564a4c2a27d25a20f4f095b5a0b69b378
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from operator import attrgetter
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType
from pyangbind.lib.yangtypes import RestrictedClassType
from pyangbind.lib.yangtypes import TypedListType
from pyangbind.lib.yangtypes import YANGBool
from pyangbind.lib.yangtypes import YANGListType
from pyangbind.lib.yangtypes import YANGDynClass
from pyangbind.lib.yangtypes import ReferenceType
from pyangbind.lib.base import PybindBase
from collections import OrderedDict
from decimal import Decimal
from bitarray import bitarray
import six
# PY3 support of some PY2 keywords (needs improved)
if six.PY3:
import builtins as __builtin__
long = int
elif six.PY2:
import __builtin__
class config(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-interfaces - based on the path /interfaces/interface/routed-vlan/ipv6/router-advertisement/config. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Configuration parameters relating to router advertisements
for IPv6.
"""
__slots__ = ('_path_helper', '_extmethods', '__interval','__lifetime','__suppress',)
_yang_name = 'config'
_yang_namespace = 'http://openconfig.net/yang/interfaces'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__interval = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="interval", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/interfaces/ip', defining_module='openconfig-if-ip', yang_type='uint32', is_config=True)
self.__lifetime = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lifetime", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/interfaces/ip', defining_module='openconfig-if-ip', yang_type='uint32', is_config=True)
self.__suppress = YANGDynClass(base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="suppress", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/interfaces/ip', defining_module='openconfig-if-ip', yang_type='boolean', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return ['interfaces', 'interface', 'routed-vlan', 'ipv6', 'router-advertisement', 'config']
def _get_interval(self):
"""
Getter method for interval, mapped from YANG variable /interfaces/interface/routed_vlan/ipv6/router_advertisement/config/interval (uint32)
YANG Description: The interval between periodic router advertisement neighbor
discovery messages sent on this interface expressed in
seconds.
"""
return self.__interval
def _set_interval(self, v, load=False):
"""
Setter method for interval, mapped from YANG variable /interfaces/interface/routed_vlan/ipv6/router_advertisement/config/interval (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_interval is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_interval() directly.
YANG Description: The interval between periodic router advertisement neighbor
discovery messages sent on this interface expressed in
seconds.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="interval", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/interfaces/ip', defining_module='openconfig-if-ip', yang_type='uint32', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """interval must be of a type compatible with uint32""",
'defined-type': "uint32",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="interval", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/interfaces/ip', defining_module='openconfig-if-ip', yang_type='uint32', is_config=True)""",
})
self.__interval = t
if hasattr(self, '_set'):
self._set()
def _unset_interval(self):
self.__interval = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="interval", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/interfaces/ip', defining_module='openconfig-if-ip', yang_type='uint32', is_config=True)
def _get_lifetime(self):
"""
Getter method for lifetime, mapped from YANG variable /interfaces/interface/routed_vlan/ipv6/router_advertisement/config/lifetime (uint32)
YANG Description: The lifetime advertised in the router advertisement neighbor
discovery message on this interface.
"""
return self.__lifetime
def _set_lifetime(self, v, load=False):
"""
Setter method for lifetime, mapped from YANG variable /interfaces/interface/routed_vlan/ipv6/router_advertisement/config/lifetime (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_lifetime is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lifetime() directly.
YANG Description: The lifetime advertised in the router advertisement neighbor
discovery message on this interface.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lifetime", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/interfaces/ip', defining_module='openconfig-if-ip', yang_type='uint32', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lifetime must be of a type compatible with uint32""",
'defined-type': "uint32",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lifetime", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/interfaces/ip', defining_module='openconfig-if-ip', yang_type='uint32', is_config=True)""",
})
self.__lifetime = t
if hasattr(self, '_set'):
self._set()
def _unset_lifetime(self):
self.__lifetime = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lifetime", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/interfaces/ip', defining_module='openconfig-if-ip', yang_type='uint32', is_config=True)
def _get_suppress(self):
"""
Getter method for suppress, mapped from YANG variable /interfaces/interface/routed_vlan/ipv6/router_advertisement/config/suppress (boolean)
YANG Description: When set to true, router advertisement neighbor discovery
messages are not transmitted on this interface.
"""
return self.__suppress
def _set_suppress(self, v, load=False):
"""
Setter method for suppress, mapped from YANG variable /interfaces/interface/routed_vlan/ipv6/router_advertisement/config/suppress (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_suppress is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_suppress() directly.
YANG Description: When set to true, router advertisement neighbor discovery
messages are not transmitted on this interface.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="suppress", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/interfaces/ip', defining_module='openconfig-if-ip', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """suppress must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="suppress", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/interfaces/ip', defining_module='openconfig-if-ip', yang_type='boolean', is_config=True)""",
})
self.__suppress = t
if hasattr(self, '_set'):
self._set()
def _unset_suppress(self):
self.__suppress = YANGDynClass(base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="suppress", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/interfaces/ip', defining_module='openconfig-if-ip', yang_type='boolean', is_config=True)
interval = __builtin__.property(_get_interval, _set_interval)
lifetime = __builtin__.property(_get_lifetime, _set_lifetime)
suppress = __builtin__.property(_get_suppress, _set_suppress)
_pyangbind_elements = OrderedDict([('interval', interval), ('lifetime', lifetime), ('suppress', suppress), ])
| 54.613861
| 405
| 0.741207
| 1,418
| 11,032
| 5.555712
| 0.138928
| 0.033003
| 0.044428
| 0.042904
| 0.783828
| 0.746636
| 0.734704
| 0.723661
| 0.708936
| 0.69675
| 0
| 0.016468
| 0.141316
| 11,032
| 201
| 406
| 54.885572
| 0.815159
| 0.260877
| 0
| 0.321429
| 0
| 0.026786
| 0.298914
| 0.068221
| 0
| 0
| 0
| 0
| 0
| 1
| 0.098214
| false
| 0
| 0.133929
| 0
| 0.357143
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
4d3cd8b1df3b76e0dec0d6a98cfec3ece87f00bb
| 40
|
py
|
Python
|
hello.py
|
DanHusariu/session1
|
2efe735079f897274a3ca4308032e6f118e57319
|
[
"MIT"
] | null | null | null |
hello.py
|
DanHusariu/session1
|
2efe735079f897274a3ca4308032e6f118e57319
|
[
"MIT"
] | null | null | null |
hello.py
|
DanHusariu/session1
|
2efe735079f897274a3ca4308032e6f118e57319
|
[
"MIT"
] | null | null | null |
print('Hello World!')
print(':)pitonul')
| 20
| 21
| 0.675
| 5
| 40
| 5.4
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.05
| 40
| 2
| 22
| 20
| 0.710526
| 0
| 0
| 0
| 0
| 0
| 0.512195
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
4d4ca1d0b5bb7f62274c820a7b7248e4df671dc3
| 439
|
py
|
Python
|
abstractfactory/tablefactory/table_factory.py
|
kotaaaa/design_pattern_py
|
4276a5d564b91360c956ea33bf741c40aa6b1abb
|
[
"Apache-2.0"
] | null | null | null |
abstractfactory/tablefactory/table_factory.py
|
kotaaaa/design_pattern_py
|
4276a5d564b91360c956ea33bf741c40aa6b1abb
|
[
"Apache-2.0"
] | null | null | null |
abstractfactory/tablefactory/table_factory.py
|
kotaaaa/design_pattern_py
|
4276a5d564b91360c956ea33bf741c40aa6b1abb
|
[
"Apache-2.0"
] | null | null | null |
from factory.factory import Factory
from tablefactory.table_link import TableLink
from tablefactory.table_tray import TableTray
from tablefactory.table_page import TablePage
class TableFactory(Factory):
def create_link(self, caption, url):
return TableLink(caption, url)
def create_tray(self, caption):
return TableTray(caption)
def create_page(self, title, auther):
return TablePage(title, auther)
| 27.4375
| 45
| 0.758542
| 54
| 439
| 6.055556
| 0.37037
| 0.146789
| 0.192661
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.175399
| 439
| 15
| 46
| 29.266667
| 0.903315
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.272727
| false
| 0
| 0.363636
| 0.272727
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 5
|
4d813ecab449ed90516d66a0446a8399be40be22
| 2,136
|
py
|
Python
|
tests/test_http_nsqd.py
|
aohan237/asyncnsq
|
aa60dad1360c7c4f1eeb1119243555526c86635c
|
[
"MIT"
] | 47
|
2017-10-26T16:11:12.000Z
|
2021-12-27T03:34:02.000Z
|
tests/test_http_nsqd.py
|
aohan237/asyncnsq
|
aa60dad1360c7c4f1eeb1119243555526c86635c
|
[
"MIT"
] | 18
|
2018-02-28T10:33:19.000Z
|
2021-01-11T15:43:47.000Z
|
tests/test_http_nsqd.py
|
aohan237/asyncnsq
|
aa60dad1360c7c4f1eeb1119243555526c86635c
|
[
"MIT"
] | 19
|
2017-12-15T06:43:32.000Z
|
2020-11-06T21:51:14.000Z
|
from ._testutils import run_until_complete, BaseTest
from asyncnsq.http.writer import NsqdHttpWriter
class NsqdHttpWriterTest(BaseTest):
"""
:see: http://nsq.io/components/NsqdHttpWriter.html
"""
@run_until_complete
async def test_ok(self):
conn = NsqdHttpWriter('127.0.0.1', 4151, loop=self.loop)
res = await conn.ping()
self.assertEqual(res, 'OK')
await conn.close()
@run_until_complete
async def test_info(self):
conn = NsqdHttpWriter('127.0.0.1', 4151, loop=self.loop)
res = await conn.info()
self.assertIn('version', res)
await conn.close()
@run_until_complete
async def test_stats(self):
conn = NsqdHttpWriter('127.0.0.1', 4151, loop=self.loop)
res = await conn.stats()
self.assertIn('version', res)
await conn.close()
@run_until_complete
async def test_pub(self):
conn = NsqdHttpWriter('127.0.0.1', 4151, loop=self.loop)
res = await conn.pub('baz', 'baz_msg')
self.assertEqual('OK', res)
await conn.close()
@run_until_complete
async def test_mpub(self):
conn = NsqdHttpWriter('127.0.0.1', 4151, loop=self.loop)
res = await conn.mpub('baz', 'baz_msg:1', 'baz_msg:1')
self.assertEqual('OK', res)
await conn.close()
@run_until_complete
async def test_create_topic(self):
conn = NsqdHttpWriter('127.0.0.1', 4151, loop=self.loop)
res = await conn.create_topic('foo2')
self.assertEqual('', res)
await conn.close()
@run_until_complete
async def test_delete_topic(self):
conn = NsqdHttpWriter('127.0.0.1', 4151, loop=self.loop)
res = await conn.delete_topic('foo2')
self.assertEqual('', res)
await conn.close()
@run_until_complete
async def test_create_channel(self):
conn = NsqdHttpWriter('127.0.0.1', 4151, loop=self.loop)
res = await conn.create_topic('zap')
self.assertEqual('', res)
res = await conn.create_channel('zap', 'bar')
self.assertEqual('', res)
await conn.close()
| 31.880597
| 64
| 0.621255
| 281
| 2,136
| 4.590747
| 0.174377
| 0.118605
| 0.148837
| 0.130233
| 0.758915
| 0.758915
| 0.712403
| 0.712403
| 0.712403
| 0.679845
| 0
| 0.051948
| 0.242978
| 2,136
| 66
| 65
| 32.363636
| 0.745826
| 0.023408
| 0
| 0.603774
| 0
| 0
| 0.067633
| 0
| 0
| 0
| 0
| 0
| 0.169811
| 1
| 0
| false
| 0
| 0.037736
| 0
| 0.056604
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
4d9c1215826fcab716ad6cb95fd89167cdd7d64c
| 130
|
py
|
Python
|
docs/pge-sdk/OAuth2/fetch_and_print.py
|
ianwinsemius/open-energy-view
|
ea4084a63ab7e75572f0234a311f2c041cbbb562
|
[
"MIT"
] | 16
|
2020-10-20T05:51:10.000Z
|
2022-02-27T05:43:40.000Z
|
docs/pge-sdk/OAuth2/fetch_and_print.py
|
ianwinsemius/open-energy-view
|
ea4084a63ab7e75572f0234a311f2c041cbbb562
|
[
"MIT"
] | 13
|
2021-08-19T23:38:00.000Z
|
2022-03-05T20:32:06.000Z
|
docs/pge-sdk/OAuth2/fetch_and_print.py
|
ianwinsemius/open-energy-view
|
ea4084a63ab7e75572f0234a311f2c041cbbb562
|
[
"MIT"
] | 2
|
2022-01-16T12:39:20.000Z
|
2022-02-25T05:45:07.000Z
|
"""
Get token, make request, print results.
"""
from OAuth2.ClientCredentials import ClientCredentials
from OAuth2.Api import Api
| 21.666667
| 54
| 0.792308
| 16
| 130
| 6.4375
| 0.6875
| 0.194175
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.017544
| 0.123077
| 130
| 5
| 55
| 26
| 0.885965
| 0.3
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
4dc556e18d5ff8f71ec82c360d37302b329b0544
| 117
|
py
|
Python
|
Module Sample/MyModule/MyModuleClass.py
|
SauceChord/learning-python
|
eef70d809a3ed82fc2c9e8ac64253d7e372b7440
|
[
"MIT"
] | null | null | null |
Module Sample/MyModule/MyModuleClass.py
|
SauceChord/learning-python
|
eef70d809a3ed82fc2c9e8ac64253d7e372b7440
|
[
"MIT"
] | null | null | null |
Module Sample/MyModule/MyModuleClass.py
|
SauceChord/learning-python
|
eef70d809a3ed82fc2c9e8ac64253d7e372b7440
|
[
"MIT"
] | null | null | null |
class MyModuleClass(object):
def sayHi(self):
print('class "MyModuleClass" in module "MyModule" says hi')
| 39
| 67
| 0.692308
| 14
| 117
| 5.785714
| 0.857143
| 0.444444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.188034
| 117
| 3
| 67
| 39
| 0.852632
| 0
| 0
| 0
| 0
| 0
| 0.423729
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0
| 0.666667
| 0.333333
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
4dd04813a848ff83f960c31485e6158838e272a9
| 52
|
py
|
Python
|
interface/controller/testdata/i_am_not_go.py
|
Syuparn/chapa
|
bc78e33ca5a141264457118baca5d17be3ffa876
|
[
"MIT"
] | null | null | null |
interface/controller/testdata/i_am_not_go.py
|
Syuparn/chapa
|
bc78e33ca5a141264457118baca5d17be3ffa876
|
[
"MIT"
] | 1
|
2021-04-17T08:56:41.000Z
|
2021-04-17T08:56:41.000Z
|
interface/controller/testdata/i_am_not_go.py
|
Syuparn/chapati
|
bc78e33ca5a141264457118baca5d17be3ffa876
|
[
"MIT"
] | null | null | null |
def greet():
print("I am Python! not Golang!!")
| 17.333333
| 38
| 0.596154
| 8
| 52
| 3.875
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.211538
| 52
| 2
| 39
| 26
| 0.756098
| 0
| 0
| 0
| 0
| 0
| 0.480769
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0
| 0.5
| 0.5
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
4dd35800a4721581c35c28114dd5078a23c3de08
| 182
|
py
|
Python
|
django_minifier/filesystem.py
|
realnoobs/django_minifier
|
4750ddacd8ab2efd66680abfa3e267d0363e1a8d
|
[
"MIT"
] | null | null | null |
django_minifier/filesystem.py
|
realnoobs/django_minifier
|
4750ddacd8ab2efd66680abfa3e267d0363e1a8d
|
[
"MIT"
] | null | null | null |
django_minifier/filesystem.py
|
realnoobs/django_minifier
|
4750ddacd8ab2efd66680abfa3e267d0363e1a8d
|
[
"MIT"
] | null | null | null |
from django.template.loaders.filesystem import Loader as FilesystemLoader
from .mixins import TemplateMinifierMixin
class Loader(TemplateMinifierMixin, FilesystemLoader):
pass
| 26
| 73
| 0.846154
| 18
| 182
| 8.555556
| 0.722222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.10989
| 182
| 6
| 74
| 30.333333
| 0.950617
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.25
| 0.5
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
|
0
| 5
|
4ddae184fcb0bd9a93ae0df5e1de3a3f21c91e8d
| 257
|
py
|
Python
|
src/wagtail_live/publishers/piesocket/__init__.py
|
wagtail/wagtail-live
|
dd769be089d457cf36db2506520028bc5f506ac3
|
[
"BSD-3-Clause"
] | 22
|
2021-06-07T20:36:18.000Z
|
2022-03-29T01:48:58.000Z
|
src/wagtail_live/publishers/piesocket/__init__.py
|
wagtail/wagtail-live
|
dd769be089d457cf36db2506520028bc5f506ac3
|
[
"BSD-3-Clause"
] | 73
|
2021-05-21T16:08:44.000Z
|
2022-03-20T23:59:59.000Z
|
src/wagtail_live/publishers/piesocket/__init__.py
|
wagtail/wagtail-live
|
dd769be089d457cf36db2506520028bc5f506ac3
|
[
"BSD-3-Clause"
] | 11
|
2021-06-10T10:05:13.000Z
|
2022-02-12T13:31:34.000Z
|
from .publisher import PieSocketPublisher
from .utils import get_piesocket_api_key, get_piesocket_endpoint, get_piesocket_secret
__all__ = [
"PieSocketPublisher",
"get_piesocket_api_key",
"get_piesocket_endpoint",
"get_piesocket_secret",
]
| 25.7
| 86
| 0.789883
| 29
| 257
| 6.37931
| 0.413793
| 0.389189
| 0.162162
| 0.194595
| 0.605405
| 0.605405
| 0.605405
| 0.605405
| 0.605405
| 0.605405
| 0
| 0
| 0.136187
| 257
| 9
| 87
| 28.555556
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0.315175
| 0.167315
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.25
| 0
| 0.25
| 0
| 1
| 0
| 0
| null | 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
4ddcf323d0d9b51a3e2a2c026a45ffba5ddcbb97
| 4,550
|
py
|
Python
|
TCP_Models/Linux_4.8/Script/enumaration_generate.py
|
seclab-ucr/SCENT
|
f2fbfc5902b2dbd7dc72c8dc23ff28e24c31e50a
|
[
"MIT"
] | 3
|
2019-10-24T00:49:51.000Z
|
2020-12-16T13:34:49.000Z
|
TCP_Models/Linux_4.8/Script/enumaration_generate.py
|
seclab-ucr/SCENT
|
f2fbfc5902b2dbd7dc72c8dc23ff28e24c31e50a
|
[
"MIT"
] | null | null | null |
TCP_Models/Linux_4.8/Script/enumaration_generate.py
|
seclab-ucr/SCENT
|
f2fbfc5902b2dbd7dc72c8dc23ff28e24c31e50a
|
[
"MIT"
] | 1
|
2020-11-14T03:08:53.000Z
|
2020-11-14T03:08:53.000Z
|
#!/bin/bash
# -------------------------
# Filename: enumeration_generation.py
# Revision: 1.0
# Data: long ago
# Author: Yue Cao
# Description: Generate all interations for model used in model checking
# -------------------------
import os
#f = file("../model_part.txt", "w")
f_head = file("../new_model_head", "r")
f_tail = file("../new_model_tail", "r")
f = file("../new_model", "w")
for line in f_head:
f.write(line)
count = 0;
for seq in range(5):
for ack_seq in range(5):
for ack in range(2):
for rst in range(2):
for syn in range(2):
for urg in range(2):
for psh in range(2):
for spoofed in range(2):
for has_payload in range(3):
#with data
f.write("\t\t::c_code{Model0_th = (struct Model0_tcphdr *)(&Model0_Skb_In[packet_dep])->Model0_data; Model0_th->Model0_seq = Model0_htonl(seq_val-2+"+str(seq)+"); Model0_th->Model0_ack_seq = Model0_htonl(ack_val-2+"+str(ack_seq)+"); Model0_th->Model0_ack = " + str(ack) + "; Model0_th->Model0_rst = " + str(rst) + "; Model0_th->Model0_syn = " + str(syn) + "; Model0_th->Model0_urg = " + str(urg) + "; Model0_th->Model0_psh = " + str(psh) + "; Spoofed(" + str(spoofed)+", "+str(has_payload)+", packet_dep); Execution(packet_dep, " + str(count) + "); }; assert( c_expr { Verify() }) \n");
count = count+1
for line in f_tail:
f.write(line)
#os.system("cat ../model_part.txt > ../new_model_tmp")
#os.system("cat ../new_model_head ../model_part.txt > ../new_model_tmp")
#os.system("cat ../new_model_tmp ../new_model_tail > ../new_model")
'''
count = 0;
for seq in range(5):
for ack_seq in range(5):
for rst in range(2):
for syn in range(2):
for spoofed in range(2):
for has_payload in range(3):
#with data
f.write("\t\t::c_code{Model0_th = (struct Model0_tcphdr *)(&Model0_Skb_In[now.packet_dep])->Model0_data; Model0_th->Model0_seq = Model0_htonl("+str(seq)+"); Model0_th->Model0_ack_seq = Model0_htonl("+str(ack_seq)+"); Model0_th->Model0_rst = " + str(rst) + "; Model0_th->Model0_syn = " + str(syn) + "; Spoofed(" + str(spoofed)+", "+str(has_payload)+", now.packet_dep); Execution(now.packet_dep, " + str(count) + "); }; assert( c_expr { Verify() }) \n");
#f.write("\t\t::c_code{Model0_tcb = ((struct Model0_tcp_skb_cb *)&((&Model0_Skb_In[now.packet_dep])->Model0_cb[0])); Model0_th->Model0_seq = Model0_htonl("+str(seq)+"); Model0_th->Model0_ack_seq = Model0_htonl("+str(ack_seq)+"); Model0_th->Model0_rst = " + str(rst) + "; Model0_th->Model0_syn = " + str(syn) + "; Spoofed(" + str(spoofed)+", "+str(has_payload)+", now.packet_dep); Execution(now.packet_dep, " + str(count) + "); }; assert( c_expr { Verify() }) \n");
count = count+1
#f.write("\t\t::c_code{Model0_tcb = ((struct Model0_tcp_skb_cb *)&((&Model0_Skb_In[now.packet_dep])->Model0_cb[0])); Model0_tcb->Model0_seq = " + str(seq+2) + "; Model0_tcb->Model0_end_seq = " + str(seq+2) + "; Model0_tcb->Model0_ack_seq = " + str(ack_seq) + "; Model0_th->Model0_rst = " + str(rst) + "; Model0_th->Model0_syn = " + str(syn) + "; Spoofed(" + str(spoofed)+", "+str(has_payload)+", now.packet_dep); Execution(now.packet_dep); }; assert( c_expr { Verify() }) \n");
#f.write("\t\t::c_code{ tcb->seq = " + str(seq) + "; tcb->end_seq = " + str(seq) + "; tcb->ack_seq = " + str(ack_seq) + "; th->rst = " + str(rst) + "; th->syn = " + str(syn) + "; Spoofed(" + str(spoofed)+", "+str(has_payload)+"); Execution(); }; assert( c_expr { Verify() }) \n");
#without data
#f.write("\t\t::c_code{ Model0_tcb->Model0_seq = " + str(seq) + "; Model0_tcb->Model0_end_seq = " + str(seq) + "; Model0_tcb->Model0_ack_seq = " + str(ack_seq) + "; Model0_th->Model0_rst = " + str(rst) + "; Model0_th->Model0_syn = " + str(syn) + "; Spoofed(" + str(spoofed)+"); Execution(); }; assert( c_expr { Verify() }) \n");
#f.write("\t\t::assert( c_expr { Verify("+str(seq)+", "+str(ack_seq)+", "+str(rst)+", "+str(syn)+", "+str(spoofed)+", "+str(has_payload)+") }) \n");
#f.write("\t\t::c_code { Verify("+str(seq)+", "+str(ack_seq)+", "+str(rst)+", "+str(syn)+", "+str(spoofed)+", "+str(has_payload)+"); } \n");
'''
| 78.448276
| 622
| 0.552527
| 635
| 4,550
| 3.707087
| 0.124409
| 0.071368
| 0.112999
| 0.042056
| 0.799915
| 0.792268
| 0.742991
| 0.708581
| 0.697536
| 0.654206
| 0
| 0.028229
| 0.229231
| 4,550
| 57
| 623
| 79.824561
| 0.643
| 0.09978
| 0
| 0.1
| 1
| 0.05
| 0.356921
| 0.22084
| 0
| 0
| 0
| 0
| 0.05
| 1
| 0
| false
| 0
| 0.05
| 0
| 0.05
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
4de9f36adf25680ae83045f1d135520c47b39437
| 36
|
py
|
Python
|
turbustat/statistics/scf/__init__.py
|
CFD-UTSA/Turbulence-stars
|
354d02e38d15e3b0d1f751b43f430dbd3a14c250
|
[
"MIT"
] | 42
|
2016-04-07T20:49:59.000Z
|
2022-03-28T12:54:13.000Z
|
turbustat/statistics/scf/__init__.py
|
CFD-UTSA/Turbulence-stars
|
354d02e38d15e3b0d1f751b43f430dbd3a14c250
|
[
"MIT"
] | 131
|
2015-03-05T21:42:27.000Z
|
2021-07-22T14:59:04.000Z
|
turbustat/statistics/scf/__init__.py
|
CFD-UTSA/Turbulence-stars
|
354d02e38d15e3b0d1f751b43f430dbd3a14c250
|
[
"MIT"
] | 21
|
2015-06-10T17:10:06.000Z
|
2022-02-28T15:59:42.000Z
|
from .scf import SCF, SCF_Distance
| 18
| 35
| 0.777778
| 6
| 36
| 4.5
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 36
| 1
| 36
| 36
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
4dec515c45705c49b9b21ea1058a88464142194d
| 30
|
py
|
Python
|
goto_cloud/enums/public.py
|
jdepoix/goto_cloud
|
59bb9923026e1b1dc6e8e08fb6b21300c8e8854a
|
[
"MIT"
] | 2
|
2018-02-04T23:22:17.000Z
|
2019-04-15T12:06:04.000Z
|
goto_cloud/enums/public.py
|
jdepoix/goto_cloud
|
59bb9923026e1b1dc6e8e08fb6b21300c8e8854a
|
[
"MIT"
] | null | null | null |
goto_cloud/enums/public.py
|
jdepoix/goto_cloud
|
59bb9923026e1b1dc6e8e08fb6b21300c8e8854a
|
[
"MIT"
] | null | null | null |
from .enums import StringEnum
| 15
| 29
| 0.833333
| 4
| 30
| 6.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.133333
| 30
| 1
| 30
| 30
| 0.961538
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
1290575784421decb7b5b4000fb15e40b45cf2f7
| 186
|
py
|
Python
|
wagtail_hallo/test/apps.py
|
lb-/wagtail-hallo
|
470c5dbb52abecc1ef303dd0f42ba5f20f423ece
|
[
"MIT"
] | 2
|
2022-03-01T21:24:31.000Z
|
2022-03-03T05:57:31.000Z
|
wagtail_hallo/test/apps.py
|
lb-/wagtail-hallo
|
470c5dbb52abecc1ef303dd0f42ba5f20f423ece
|
[
"MIT"
] | 11
|
2022-02-13T02:02:01.000Z
|
2022-03-27T21:56:33.000Z
|
wagtail_hallo/test/apps.py
|
wagtail/wagtail-hallo
|
470c5dbb52abecc1ef303dd0f42ba5f20f423ece
|
[
"MIT"
] | 1
|
2022-02-05T10:14:48.000Z
|
2022-02-05T10:14:48.000Z
|
from django.apps import AppConfig
class WagtailHalloTestAppConfig(AppConfig):
label = "wagtail_hallo_test"
name = "wagtail_hallo.test"
verbose_name = "Wagtail Hallo tests"
| 23.25
| 43
| 0.758065
| 21
| 186
| 6.52381
| 0.666667
| 0.262774
| 0.233577
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 186
| 7
| 44
| 26.571429
| 0.883871
| 0
| 0
| 0
| 0
| 0
| 0.295699
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.2
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
12d41679d093e91e5897ca62b8a59a53f8cba7d0
| 76
|
py
|
Python
|
gmail_api_wrapper/version.py
|
qqgg231/gmail-api-wrapper
|
cd7a9b0d3e9ef5f2f059f4ba74377190edf7ebaa
|
[
"MIT"
] | null | null | null |
gmail_api_wrapper/version.py
|
qqgg231/gmail-api-wrapper
|
cd7a9b0d3e9ef5f2f059f4ba74377190edf7ebaa
|
[
"MIT"
] | null | null | null |
gmail_api_wrapper/version.py
|
qqgg231/gmail-api-wrapper
|
cd7a9b0d3e9ef5f2f059f4ba74377190edf7ebaa
|
[
"MIT"
] | null | null | null |
"""Version file."""
__version__ = '0.1.1a2'
version = (0, 1, 1, 'alpha', 2)
| 19
| 31
| 0.565789
| 12
| 76
| 3.25
| 0.583333
| 0.410256
| 0.461538
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 0.157895
| 76
| 3
| 32
| 25.333333
| 0.484375
| 0.171053
| 0
| 0
| 0
| 0
| 0.210526
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
421c050dff5e15c2fd4ce32223b5d90243def375
| 158
|
py
|
Python
|
tests/impl/string_test.py
|
YAmikep/datasource
|
6c8d72bd299aa0a9e2880228f0f39d2b8721b146
|
[
"MIT"
] | 1
|
2018-06-16T11:33:56.000Z
|
2018-06-16T11:33:56.000Z
|
tests/impl/string_test.py
|
YAmikep/datasource
|
6c8d72bd299aa0a9e2880228f0f39d2b8721b146
|
[
"MIT"
] | 1
|
2020-03-24T17:32:45.000Z
|
2020-03-24T17:32:45.000Z
|
tests/impl/string_test.py
|
YAmikep/datasource
|
6c8d72bd299aa0a9e2880228f0f39d2b8721b146
|
[
"MIT"
] | 2
|
2018-06-16T11:37:34.000Z
|
2020-07-30T17:56:54.000Z
|
import unittest
from datasource.impl.string import StringDataSource
class StringDataSourceTests(unittest.TestCase):
def test_(self):
pass
| 17.555556
| 51
| 0.746835
| 16
| 158
| 7.3125
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.196203
| 158
| 8
| 52
| 19.75
| 0.92126
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0.2
| 0.4
| 0
| 0.8
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
|
0
| 5
|
423b4b14d52a5ca5735070b839a793feb29dbd5d
| 169
|
py
|
Python
|
src/posts/admin.py
|
Jordyvm/Django_portfolio
|
da0c49275b0c3b1ea362e5cf402bef8e86c193e4
|
[
"MIT"
] | null | null | null |
src/posts/admin.py
|
Jordyvm/Django_portfolio
|
da0c49275b0c3b1ea362e5cf402bef8e86c193e4
|
[
"MIT"
] | null | null | null |
src/posts/admin.py
|
Jordyvm/Django_portfolio
|
da0c49275b0c3b1ea362e5cf402bef8e86c193e4
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
# Register your models here.
from .models import Post
from .models import Skill
admin.site.register(Post)
admin.site.register(Skill)
| 16.9
| 32
| 0.792899
| 25
| 169
| 5.36
| 0.48
| 0.149254
| 0.238806
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.130178
| 169
| 10
| 33
| 16.9
| 0.911565
| 0.153846
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.6
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
42455b85b4a3d710089bf17f1cec843567fa9b98
| 263
|
py
|
Python
|
fdint/tests/__init__.py
|
jgukelberger/fdint
|
0237323d6fd5d4161190ff7982811d8ae290f89e
|
[
"BSD-3-Clause"
] | 11
|
2015-10-25T18:51:55.000Z
|
2021-02-26T13:05:07.000Z
|
fdint/tests/__init__.py
|
jgukelberger/fdint
|
0237323d6fd5d4161190ff7982811d8ae290f89e
|
[
"BSD-3-Clause"
] | 19
|
2015-04-23T19:41:20.000Z
|
2017-08-01T02:04:04.000Z
|
fdint/tests/__init__.py
|
jgukelberger/fdint
|
0237323d6fd5d4161190ff7982811d8ae290f89e
|
[
"BSD-3-Clause"
] | 10
|
2017-05-31T07:27:16.000Z
|
2021-08-28T15:34:09.000Z
|
# Copyright (c) 2015, Scott J Maddox. All rights reserved.
# Use of this source code is governed by the BSD-3-Clause
# license that can be found in the LICENSE file.
from .test_fd import *
from .test_ifd import *
from .test_gfd import *
from .test_scfd import *
| 29.222222
| 58
| 0.745247
| 46
| 263
| 4.173913
| 0.76087
| 0.166667
| 0.21875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.023256
| 0.18251
| 263
| 8
| 59
| 32.875
| 0.869767
| 0.604563
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
427269aeef771d1c3ec5ed6b52d2f6e833332cf2
| 252
|
py
|
Python
|
app/es_gestione_ambulanze/database/tables/ambulanze.py
|
nyxgear/PSD-e-service-pronto-soccorso
|
92eb0586c2cfb12a844a106b71911c80e8e3e57b
|
[
"MIT"
] | null | null | null |
app/es_gestione_ambulanze/database/tables/ambulanze.py
|
nyxgear/PSD-e-service-pronto-soccorso
|
92eb0586c2cfb12a844a106b71911c80e8e3e57b
|
[
"MIT"
] | null | null | null |
app/es_gestione_ambulanze/database/tables/ambulanze.py
|
nyxgear/PSD-e-service-pronto-soccorso
|
92eb0586c2cfb12a844a106b71911c80e8e3e57b
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
table = [
{
'id': 500,
'posizione_lat': '45.467269',
'posizione_long': '9.214908',
},
{
'id': 501,
'posizione_lat': '45.481383',
'posizione_long': '9.169981',
}
]
| 16.8
| 37
| 0.43254
| 24
| 252
| 4.375
| 0.666667
| 0.228571
| 0.266667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.228395
| 0.357143
| 252
| 14
| 38
| 18
| 0.419753
| 0.083333
| 0
| 0
| 0
| 0
| 0.401747
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
35f4c9270c15adc7343b59efea74470f9699511e
| 185
|
py
|
Python
|
Code/0.ejercicios_extra/dic_comprehension.py
|
davidMartinVergues/PYTHON
|
dd39d3aabfc43b3cb09aadb2919e51d03364117d
|
[
"DOC"
] | null | null | null |
Code/0.ejercicios_extra/dic_comprehension.py
|
davidMartinVergues/PYTHON
|
dd39d3aabfc43b3cb09aadb2919e51d03364117d
|
[
"DOC"
] | null | null | null |
Code/0.ejercicios_extra/dic_comprehension.py
|
davidMartinVergues/PYTHON
|
dd39d3aabfc43b3cb09aadb2919e51d03364117d
|
[
"DOC"
] | null | null | null |
import math
def run ():
print(dic_generator(10))
def dic_generator(num):
return {num:round(math.sqrt(num),5) for num in range(1,num)}
if __name__ == "__main__":
run()
| 13.214286
| 64
| 0.643243
| 29
| 185
| 3.758621
| 0.689655
| 0.220183
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.027027
| 0.2
| 185
| 14
| 65
| 13.214286
| 0.709459
| 0
| 0
| 0
| 0
| 0
| 0.043243
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| false
| 0
| 0.142857
| 0.142857
| 0.571429
| 0.142857
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
c41730c6c78129a57c94a511d64b0105145e33cf
| 104
|
py
|
Python
|
installer/hooks/hook-wx.py
|
chrisidefix/devide
|
99bfe156e710fa47ba7ae88b0ce1eef592a3a439
|
[
"BSD-3-Clause"
] | 25
|
2015-08-24T16:05:14.000Z
|
2020-12-09T20:07:14.000Z
|
installer/hooks/hook-wx.py
|
chrisidefix/devide
|
99bfe156e710fa47ba7ae88b0ce1eef592a3a439
|
[
"BSD-3-Clause"
] | 1
|
2016-02-16T21:18:10.000Z
|
2016-02-16T21:18:10.000Z
|
installer/hooks/hook-wx.py
|
chrisidefix/devide
|
99bfe156e710fa47ba7ae88b0ce1eef592a3a439
|
[
"BSD-3-Clause"
] | 5
|
2016-02-16T20:05:37.000Z
|
2020-01-31T11:27:39.000Z
|
hiddenimports = ['wx.aui', 'wx.lib.mixins']
print "[*] hook-wx.py - HIDDENIMPORTS"
print hiddenimports
| 20.8
| 43
| 0.701923
| 13
| 104
| 5.615385
| 0.615385
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.115385
| 104
| 4
| 44
| 26
| 0.793478
| 0
| 0
| 0
| 0
| 0
| 0.471154
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 1
| null | null | 0.666667
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 1
|
0
| 5
|
c42547337f7451d45d3b17a0f9c5790a8bad3038
| 95
|
py
|
Python
|
workspace/testdata/tf-test-python.py
|
jarobey/terraform-provider-databricks
|
04e7061ffe3b9728fba6bb6027ba4d888460ad64
|
[
"Apache-2.0"
] | 183
|
2020-06-17T23:18:36.000Z
|
2022-03-23T08:08:26.000Z
|
workspace/testdata/tf-test-python.py
|
jarobey/terraform-provider-databricks
|
04e7061ffe3b9728fba6bb6027ba4d888460ad64
|
[
"Apache-2.0"
] | 1,098
|
2020-06-15T21:17:42.000Z
|
2022-03-31T11:55:59.000Z
|
workspace/testdata/tf-test-python.py
|
jarobey/terraform-provider-databricks
|
04e7061ffe3b9728fba6bb6027ba4d888460ad64
|
[
"Apache-2.0"
] | 140
|
2020-06-18T10:20:19.000Z
|
2022-03-25T01:09:16.000Z
|
# Databricks notebook source
print("hello world")
# COMMAND ----------
print("hello world2")
| 13.571429
| 28
| 0.652632
| 10
| 95
| 6.2
| 0.8
| 0.322581
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.012195
| 0.136842
| 95
| 6
| 29
| 15.833333
| 0.743902
| 0.473684
| 0
| 0
| 0
| 0
| 0.489362
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.