hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
46319fe06fdcde470c38c4ab3bc771fc7eb81445
| 328
|
py
|
Python
|
Validation/EcalDigis/python/ecalUnsuppressedMixingModuleValidation_cff.py
|
ckamtsikis/cmssw
|
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
|
[
"Apache-2.0"
] | 852
|
2015-01-11T21:03:51.000Z
|
2022-03-25T21:14:00.000Z
|
Validation/EcalDigis/python/ecalUnsuppressedMixingModuleValidation_cff.py
|
ckamtsikis/cmssw
|
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
|
[
"Apache-2.0"
] | 30,371
|
2015-01-02T00:14:40.000Z
|
2022-03-31T23:26:05.000Z
|
Validation/EcalDigis/python/ecalUnsuppressedMixingModuleValidation_cff.py
|
ckamtsikis/cmssw
|
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
|
[
"Apache-2.0"
] | 3,240
|
2015-01-02T05:53:18.000Z
|
2022-03-31T17:24:21.000Z
|
import FWCore.ParameterSet.Config as cms
from Validation.EcalDigis.ecalMixingModuleValidation_cfi import *
ecalMixingModuleValidation.EBdigiCollection = 'simEcalUnsuppressedDigis'
ecalMixingModuleValidation.EEdigiCollection = 'simEcalUnsuppressedDigis'
ecalMixingModuleValidation.ESdigiCollection = 'simEcalUnsuppressedDigis'
| 41
| 72
| 0.890244
| 21
| 328
| 13.857143
| 0.714286
| 0.343643
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.057927
| 328
| 7
| 73
| 46.857143
| 0.941748
| 0
| 0
| 0
| 0
| 0
| 0.220183
| 0.220183
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.4
| 0
| 0.4
| 0
| 1
| 0
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
463397c3271e9fb2ca65bd63b7e12d66fb684309
| 304
|
py
|
Python
|
phidl/__init__.py
|
giumc/phidl
|
396030ec78c8e9d12634d2fec5ad7bf21bacdb89
|
[
"MIT"
] | 102
|
2017-06-05T11:42:12.000Z
|
2022-03-23T17:39:27.000Z
|
phidl/__init__.py
|
giumc/phidl
|
396030ec78c8e9d12634d2fec5ad7bf21bacdb89
|
[
"MIT"
] | 66
|
2018-07-19T13:59:59.000Z
|
2022-03-16T02:23:13.000Z
|
phidl/__init__.py
|
giumc/phidl
|
396030ec78c8e9d12634d2fec5ad7bf21bacdb89
|
[
"MIT"
] | 45
|
2018-03-27T23:19:50.000Z
|
2022-03-29T08:00:39.000Z
|
from phidl.device_layout import Device, Group, Path, CrossSection, Port, Layer, LayerSet
from phidl.device_layout import make_device
from phidl.quickplotter import quickplot, quickplot2, set_quickplot_options
from phidl.device_layout import __version__, reset
from phidl.geometry import device_lru_cache
| 50.666667
| 88
| 0.858553
| 42
| 304
| 5.928571
| 0.52381
| 0.180723
| 0.180723
| 0.253012
| 0.325301
| 0
| 0
| 0
| 0
| 0
| 0
| 0.003636
| 0.095395
| 304
| 5
| 89
| 60.8
| 0.901818
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
464563d008ed73070e6bbe17ebf32079f8948051
| 4,931
|
py
|
Python
|
tests/core/templatetags/test_navigation.py
|
tyrasd/osmaxx
|
da4454083d17b2ef8b0623cad62e39992b6bd52a
|
[
"MIT"
] | 27
|
2015-03-30T14:17:26.000Z
|
2022-02-19T17:30:44.000Z
|
tests/core/templatetags/test_navigation.py
|
tyrasd/osmaxx
|
da4454083d17b2ef8b0623cad62e39992b6bd52a
|
[
"MIT"
] | 483
|
2015-03-09T16:58:03.000Z
|
2022-03-14T09:29:06.000Z
|
tests/core/templatetags/test_navigation.py
|
tyrasd/osmaxx
|
da4454083d17b2ef8b0623cad62e39992b6bd52a
|
[
"MIT"
] | 6
|
2015-04-07T07:38:30.000Z
|
2020-04-01T12:45:53.000Z
|
import logging
import pytest
from django.test import override_settings
from osmaxx.core.templatetags.navigation import siteabsoluteurl, logger
@override_settings(
OSMAXX=dict(
)
)
def test_siteabsoluteurl_without_secured_proxy_adds_scheme_and_netloc_and_path_prefix(rf, log_warning_mock):
relative_url = 'foo/bar'
request = rf.get('/another/path')
assert siteabsoluteurl(relative_url, request) == 'http://testserver/another/foo/bar'
log_warning_mock.assert_not_called()
@override_settings(
OSMAXX=dict(
)
)
def test_siteabsoluteurl_without_secured_proxy_adds_scheme_and_netloc(rf, log_warning_mock):
netloc_relative_url = '/foo/bar'
request = rf.get('/another/path')
assert siteabsoluteurl(netloc_relative_url, request) == 'http://testserver/foo/bar'
log_warning_mock.assert_not_called()
@override_settings(
OSMAXX=dict(
)
)
def test_siteabsoluteurl_without_secured_proxy_adds_scheme(rf, log_warning_mock):
scheme_relative_url = '//example.com/foo/bar'
request = rf.get('/another/path')
assert siteabsoluteurl(scheme_relative_url, request) == 'http://example.com/foo/bar'
log_warning_mock.assert_not_called()
@override_settings(
OSMAXX=dict(
)
)
def test_siteabsoluteurl_without_secured_proxy_returns_absolute_http_urls_unchanged(rf, log_warning_mock):
absolute_url = 'http://example.com/foo/bar'
request = rf.get('/another/path')
assert siteabsoluteurl(absolute_url, request) == 'http://example.com/foo/bar'
log_warning_mock.assert_not_called()
@override_settings(
OSMAXX=dict(
)
)
def test_siteabsoluteurl_without_secured_proxy_returns_absolute_https_urls_unchanged(rf, log_warning_mock):
absolute_url = 'https://example.com/foo/bar'
request = rf.get('/another/path')
assert siteabsoluteurl(absolute_url, request) == 'https://example.com/foo/bar'
log_warning_mock.assert_not_called()
@override_settings(
OSMAXX=dict(
)
)
def test_siteabsoluteurl_without_secured_proxy_returns_absolute_nonhttp_urls_unchanged(rf, log_warning_mock):
absolute_url = 'ftp://example.com/foo/bar'
request = rf.get('/another/path')
assert siteabsoluteurl(absolute_url, request) == 'ftp://example.com/foo/bar'
log_warning_mock.assert_not_called()
@override_settings(
OSMAXX=dict(
SECURED_PROXY=True,
)
)
def test_siteabsoluteurl_when_secured_proxy_in_use_adds_https_and_netloc_and_path_prefix(rf, log_warning_mock):
relative_url = 'foo/bar'
request = rf.get('/another/path')
assert siteabsoluteurl(relative_url, request) == 'https://testserver/another/foo/bar'
log_warning_mock.assert_not_called()
@override_settings(
OSMAXX=dict(
SECURED_PROXY=True,
)
)
def test_siteabsoluteurl_when_secured_proxy_in_use_adds_https_and_netloc(rf, log_warning_mock):
netloc_relative_url = '/foo/bar'
request = rf.get('/another/path')
assert siteabsoluteurl(netloc_relative_url, request) == 'https://testserver/foo/bar'
log_warning_mock.assert_not_called()
@override_settings(
OSMAXX=dict(
SECURED_PROXY=True,
)
)
def test_siteabsoluteurl_when_secured_proxy_in_use_adds_https(rf, log_warning_mock):
scheme_relative_url = '//example.com/foo/bar'
request = rf.get('/another/path')
assert siteabsoluteurl(scheme_relative_url, request) == 'https://example.com/foo/bar'
log_warning_mock.assert_not_called()
@override_settings(
OSMAXX=dict(
SECURED_PROXY=True,
)
)
def test_siteabsoluteurl_when_secured_proxy_in_use_returns_absolute_http_urls_converted_to_https(rf, log_warning_mock):
absolute_url = 'http://example.com/foo/bar'
request = rf.get('/another/path')
assert siteabsoluteurl(absolute_url, request) == 'https://example.com/foo/bar'
log_warning_mock.assert_not_called()
@override_settings(
OSMAXX=dict(
SECURED_PROXY=True,
)
)
def test_siteabsoluteurl_when_secured_proxy_in_use_returns_absolute_https_urls_unchanged(rf, log_warning_mock):
absolute_url = 'https://example.com/foo/bar'
request = rf.get('/another/path')
assert siteabsoluteurl(absolute_url, request) == 'https://example.com/foo/bar'
log_warning_mock.assert_not_called()
@override_settings(
OSMAXX=dict(
SECURED_PROXY=True,
)
)
def test_siteabsoluteurl_when_secured_proxy_in_use_returns_absolute_nonhttp_urls_unchanged(rf, log_warning_mock):
absolute_url = 'ftp://example.com/foo/bar'
request = rf.get('/another/path')
assert siteabsoluteurl(absolute_url, request) == 'ftp://example.com/foo/bar'
log_warning_mock.assert_called_with(
"ftp://example.com/foo/bar has not been converted to HTTPS, because it isn't an HTTP URL.")
@pytest.yield_fixture
def log_warning_mock(mocker):
original_level = logger.level
logger.setLevel(logging.WARNING)
yield mocker.patch.object(logger, 'warning')
logger.setLevel(original_level)
| 32.019481
| 119
| 0.756236
| 646
| 4,931
| 5.391641
| 0.111455
| 0.071777
| 0.100488
| 0.078094
| 0.896641
| 0.884582
| 0.884582
| 0.884582
| 0.88085
| 0.88085
| 0
| 0
| 0.133644
| 4,931
| 153
| 120
| 32.228758
| 0.815309
| 0
| 0
| 0.56
| 0
| 0.008
| 0.163658
| 0.033867
| 0
| 0
| 0
| 0
| 0.192
| 1
| 0.104
| false
| 0
| 0.032
| 0
| 0.136
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
46558c2071d018f8bafafc4b81999ab6efada20a
| 2,218
|
py
|
Python
|
quarkc/test/ffi/expected/py/org_example_foo/org_example_foo_md/__init__.py
|
datawire/quark
|
df0058a148b077c0aff535eb6ee382605c556273
|
[
"Apache-2.0"
] | 112
|
2015-10-02T19:51:51.000Z
|
2022-03-07T06:29:44.000Z
|
quarkc/test/ffi/expected/py/org_example_foo/org_example_foo_md/__init__.py
|
datawire/quark
|
df0058a148b077c0aff535eb6ee382605c556273
|
[
"Apache-2.0"
] | 181
|
2015-10-01T20:23:38.000Z
|
2016-12-07T17:21:26.000Z
|
quarkc/test/ffi/expected/py/org_example_foo/org_example_foo_md/__init__.py
|
datawire/quark
|
df0058a148b077c0aff535eb6ee382605c556273
|
[
"Apache-2.0"
] | 31
|
2015-10-13T22:10:00.000Z
|
2020-08-03T02:50:12.000Z
|
from __future__ import unicode_literals
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
from builtins import str as unicode
from quark_runtime import *
_lazyImport.plug("org_example_foo_md.org_example_foo_Foo_test_Method")
import quark.reflect
class org_example_foo_Foo_test_Method(quark.reflect.Method):
def _init(self):
quark.reflect.Method._init(self)
def __init__(self):
super(org_example_foo_Foo_test_Method, self).__init__(u"quark.void", u"test", _List([]));
def invoke(self, object, args):
obj = _cast(object, lambda: org.example.foo.Foo);
(obj).test();
return None
def _getClass(self):
return _cast(None, lambda: unicode)
def _getField(self, name):
return None
def _setField(self, name, value):
pass
class org_example_foo_Foo(quark.reflect.Class):
def _init(self):
quark.reflect.Class._init(self)
def __init__(self):
super(org_example_foo_Foo, self).__init__(u"org.example.foo.Foo");
(self).name = u"org.example.foo.Foo"
(self).parameters = _List([])
(self).fields = _List([])
(self).methods = _List([org_example_foo_Foo_test_Method()])
(self).parents = _List([u"quark.Object"])
def construct(self, args):
return org.example.foo.Foo()
def isAbstract(self):
return False
def _getClass(self):
return _cast(None, lambda: unicode)
def _getField(self, name):
return None
def _setField(self, name, value):
pass
org_example_foo_Foo.singleton = org_example_foo_Foo()
class Root(_QObject):
def _init(self):
pass
def __init__(self): self._init()
def _getClass(self):
return _cast(None, lambda: unicode)
def _getField(self, name):
return None
def _setField(self, name, value):
pass
Root.org_example_foo_Foo_md = org_example_foo_Foo.singleton
def _lazy_import_org_example_foo():
import org.example.foo
globals().update(locals())
_lazyImport("import org.example.foo", _lazy_import_org_example_foo)
_lazyImport.pump("org_example_foo_md.org_example_foo_Foo_test_Method")
| 27.04878
| 97
| 0.68936
| 297
| 2,218
| 4.717172
| 0.20202
| 0.149893
| 0.194861
| 0.171306
| 0.533904
| 0.422555
| 0.359029
| 0.327623
| 0.327623
| 0.327623
| 0
| 0
| 0.202435
| 2,218
| 81
| 98
| 27.382716
| 0.791973
| 0
| 0
| 0.416667
| 0
| 0
| 0.083859
| 0.045086
| 0
| 0
| 0
| 0
| 0
| 1
| 0.316667
| false
| 0.066667
| 0.2
| 0.133333
| 0.716667
| 0.016667
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 6
|
46ac0a724e653ea5dea5269b5eb03811f92574db
| 3,715
|
py
|
Python
|
tests/python_frontend/default_argument_test.py
|
Walon1998/dace
|
95ddfd3e9a5c654f0f0d66d026e0b64ec0f028a0
|
[
"BSD-3-Clause"
] | 1
|
2022-03-11T13:36:34.000Z
|
2022-03-11T13:36:34.000Z
|
tests/python_frontend/default_argument_test.py
|
Walon1998/dace
|
95ddfd3e9a5c654f0f0d66d026e0b64ec0f028a0
|
[
"BSD-3-Clause"
] | null | null | null |
tests/python_frontend/default_argument_test.py
|
Walon1998/dace
|
95ddfd3e9a5c654f0f0d66d026e0b64ec0f028a0
|
[
"BSD-3-Clause"
] | null | null | null |
# Copyright 2019-2022 ETH Zurich and the DaCe authors. All rights reserved.
""" Tests for using default arguments (mutable or not) in different contexts. """
import dace
import numpy as np
def test_default_arg():
@dace.program
def tester(arr: dace.float64[20], qmin: float = 0.0):
arr[:] = qmin
myarr = np.random.rand(20)
tester(myarr)
assert np.allclose(myarr, 0.0)
tester(myarr, 2.0)
assert np.allclose(myarr, 2.0)
def test_single_nested_default_arg_jit():
class MyClass:
def __call__(self, arr, qmin=0.0):
arr[:] = qmin
a = MyClass()
@dace.program
def tester(arr, qmin2):
a(arr)
myarr = np.random.rand(20)
tester(myarr, 2.0)
assert np.allclose(myarr, 0.0)
def test_nested_default_arg_jit():
class MyClass:
def __call__(self, arr, qmin=0.0):
self.nested(arr, qmin)
def nested(self, arr, qmin):
arr[:] = qmin
a = MyClass()
@dace.program
def tester(arr, qmin2):
a(arr)
myarr = np.random.rand(20)
tester(myarr, 2.0)
assert np.allclose(myarr, 0.0)
def test_nested_default_arg():
class MyClass:
def __call__(self, arr: dace.float64[20], qmin: float = 0.0):
self.nested(arr, qmin)
def nested(self, arr: dace.float64[20], qmin: float):
arr[:] = qmin
a = MyClass()
@dace.program
def tester(arr: dace.float64[20], qmin2: float):
a(arr)
myarr = np.random.rand(20)
tester(myarr, 2.0)
assert np.allclose(myarr, 0.0)
def test_nested_default_arg_reuse():
class MyClass:
def __call__(self, arr: dace.float64[20], qmin: float = 0.0):
self.nested(arr, qmin)
def nested(self, arr: dace.float64[20], qmin: float):
arr[:] = qmin
a = MyClass()
@dace.program
def tester(arr: dace.float64[20], qmin: float):
a(arr)
myarr = np.random.rand(20)
tester(myarr, 2.0)
assert np.allclose(myarr, 0.0)
def test_nested_default_arg_reuse_2():
class MyClass:
def __call__(self, arr: dace.float64[20], qmin: float = 0.0):
self.nested(arr, qmin)
def nested(self, arr: dace.float64[20], qmin: float):
arr[:] = qmin
a = MyClass()
@dace.program
def tester(arr: dace.float64[20], arr2: dace.float64[20], qmin: float):
a(arr, qmin=1.0)
a(arr2)
myarr = np.random.rand(20)
myarr2 = np.random.rand(20)
tester(myarr, myarr2, 2.0)
assert np.allclose(myarr, 1.0)
assert np.allclose(myarr2, 0.0)
def test_default_arg_object():
@dace.program
def tester(arr: dace.float64[20], defarg: dace.float64[20] = np.ones(20)):
defarg += 1
arr[:] = defarg
myarr = np.random.rand(20)
b = np.full(20, 5.0)
tester(myarr)
assert np.allclose(myarr, 2.0)
tester(myarr, b)
assert np.allclose(myarr, 6.0)
tester(myarr)
assert np.allclose(myarr, 3.0)
def test_nested_default_arg_object():
class MyClass:
def __call__(self, arr: dace.float64[20], defarg: dace.float64[20] = np.ones(20)):
defarg += 1
arr[:] = defarg
a = MyClass()
@dace.program
def tester(arr: dace.float64[20]):
a(arr)
myarr = np.random.rand(20)
tester(myarr)
assert np.allclose(myarr, 2.0)
tester(myarr)
assert np.allclose(myarr, 3.0)
if __name__ == '__main__':
test_default_arg()
test_single_nested_default_arg_jit()
test_nested_default_arg_jit()
test_nested_default_arg()
test_nested_default_arg_reuse()
test_nested_default_arg_reuse_2()
test_default_arg_object()
test_nested_default_arg_object()
| 23.512658
| 90
| 0.60969
| 534
| 3,715
| 4.069288
| 0.127341
| 0.073631
| 0.09572
| 0.09572
| 0.846295
| 0.802577
| 0.746434
| 0.734008
| 0.679705
| 0.640589
| 0
| 0.058653
| 0.256528
| 3,715
| 157
| 91
| 23.66242
| 0.728096
| 0.040108
| 0
| 0.681416
| 0
| 0
| 0.002248
| 0
| 0
| 0
| 0
| 0
| 0.115044
| 1
| 0.230089
| false
| 0
| 0.017699
| 0
| 0.300885
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
d3d2bf78b5cf9c424431ea5bfbc3e6e6c5152b4f
| 125,949
|
py
|
Python
|
tests/python_client/testcases/test_collection.py
|
BearerPipelineTest/milvus
|
0a953948afe3d491f0b3b2d54de21885194f7136
|
[
"Apache-2.0"
] | null | null | null |
tests/python_client/testcases/test_collection.py
|
BearerPipelineTest/milvus
|
0a953948afe3d491f0b3b2d54de21885194f7136
|
[
"Apache-2.0"
] | 38
|
2021-11-22T11:15:27.000Z
|
2022-03-30T08:14:12.000Z
|
tests/python_client/testcases/test_collection.py
|
BearerPipelineTest/milvus
|
0a953948afe3d491f0b3b2d54de21885194f7136
|
[
"Apache-2.0"
] | 2
|
2021-11-22T11:09:33.000Z
|
2021-11-25T07:08:18.000Z
|
from functools import reduce
import numpy
import pandas as pd
import pytest
from base.client_base import TestcaseBase
from common import common_func as cf
from common import common_type as ct
from common.common_type import CaseLabel, CheckTasks
from utils.util_pymilvus import *
from utils.util_log import test_log as log
prefix = "collection"
exp_name = "name"
exp_schema = "schema"
exp_num = "num_entities"
exp_primary = "primary"
exp_shards_num = "shards_num"
default_schema = cf.gen_default_collection_schema()
default_binary_schema = cf.gen_default_binary_collection_schema()
default_shards_num = 2
uid_count = "collection_count"
tag = "collection_count_tag"
uid_stats = "get_collection_stats"
uid_create = "create_collection"
uid_describe = "describe_collection"
uid_drop = "drop_collection"
uid_has = "has_collection"
uid_list = "list_collections"
uid_load = "load_collection"
field_name = default_float_vec_field_name
default_single_query = {
"data": gen_vectors(1, default_dim),
"anns_field": default_float_vec_field_name,
"param": {"metric_type": "L2", "params": {"nprobe": 10}},
"limit": default_top_k,
}
default_index_params = {"index_type": "IVF_SQ8", "metric_type": "L2", "params": {"nlist": 64}}
default_binary_index_params = {"index_type": "BIN_IVF_FLAT", "metric_type": "JACCARD", "params": {"nlist": 64}}
default_nq = ct.default_nq
default_search_exp = "int64 >= 0"
default_limit = ct.default_limit
vectors = [[random.random() for _ in range(default_dim)] for _ in range(default_nq)]
default_search_field = ct.default_float_vec_field_name
default_search_params = {"metric_type": "L2", "params": {"nprobe": 10}}
class TestCollectionParams(TestcaseBase):
""" Test case of collection interface """
@pytest.fixture(scope="function", params=ct.get_invalid_strs)
def get_none_removed_invalid_strings(self, request):
if request.param is None:
pytest.skip("None schema is valid")
yield request.param
@pytest.fixture(scope="function", params=ct.get_invalid_strs)
def get_invalid_type_fields(self, request):
if isinstance(request.param, list):
pytest.skip("list is valid fields")
yield request.param
@pytest.fixture(scope="function", params=cf.gen_all_type_fields())
def get_unsupported_primary_field(self, request):
if request.param.dtype == DataType.INT64:
pytest.skip("int64 type is valid primary key")
yield request.param
@pytest.fixture(scope="function", params=ct.get_invalid_strs)
def get_invalid_dim(self, request):
if request.param == 1:
pytest.skip("1 is valid dim")
yield request.param
@pytest.mark.tags(CaseLabel.L0)
def test_collection(self):
"""
target: test collection with default schema
method: create collection with default schema
expected: assert collection property
"""
self._connect()
c_name = cf.gen_unique_str(prefix)
self.collection_wrap.init_collection(c_name, schema=default_schema,
check_task=CheckTasks.check_collection_property,
check_items={exp_name: c_name, exp_schema: default_schema, exp_num: 0,
exp_primary: ct.default_int64_field_name})
assert c_name in self.utility_wrap.list_collections()[0]
@pytest.mark.tags(CaseLabel.L2)
def test_collection_empty_name(self):
"""
target: test collection with empty name
method: create collection with an empty name
expected: raise exception
"""
self._connect()
c_name = ""
error = {ct.err_code: -1, ct.err_msg: f'`collection_name` value is illegal'}
self.collection_wrap.init_collection(c_name, schema=default_schema, check_task=CheckTasks.err_res,
check_items=error)
@pytest.mark.tags(CaseLabel.L2)
@pytest.mark.parametrize("name", [[], 1, [1, "2", 3], (1,), {1: 1}, None])
def test_collection_illegal_name(self, name):
"""
target: test collection with illegal name
method: create collection with illegal name
expected: raise exception
"""
self._connect()
error = {ct.err_code: -1, ct.err_msg: "`collection_name` value {} is illegal".format(name)}
self.collection_wrap.init_collection(name, schema=default_schema, check_task=CheckTasks.err_res,
check_items=error)
@pytest.mark.tags(CaseLabel.L2)
@pytest.mark.parametrize("name", ["12-s", "12 s", "(mn)", "中文", "%$#", "a".join("a" for i in range(256))])
def test_collection_invalid_name(self, name):
"""
target: test collection with invalid name
method: create collection with invalid name
expected: raise exception
"""
self._connect()
error = {ct.err_code: 1, ct.err_msg: "Invalid collection name: {}".format(name)}
self.collection_wrap.init_collection(name, schema=default_schema, check_task=CheckTasks.err_res,
check_items=error)
@pytest.mark.tags(CaseLabel.L0)
def test_collection_dup_name(self):
"""
target: test collection with dup name
method: create collection with dup name and none schema and data
expected: collection properties consistent
"""
self._connect()
c_name = cf.gen_unique_str(prefix)
collection_w = self.init_collection_wrap(name=c_name, check_task=CheckTasks.check_collection_property,
check_items={exp_name: c_name, exp_schema: default_schema})
self.collection_wrap.init_collection(collection_w.name)
assert collection_w.name == self.collection_wrap.name
assert collection_w.schema == self.collection_wrap.schema
assert collection_w.num_entities == self.collection_wrap.num_entities
assert collection_w.name in self.utility_wrap.list_collections()[0]
@pytest.mark.tags(CaseLabel.L2)
def test_collection_dup_name_with_desc(self):
"""
target: test collection with dup name
method: 1. default schema with desc 2. dup name collection
expected: desc consistent
"""
self._connect()
c_name = cf.gen_unique_str(prefix)
schema = cf.gen_default_collection_schema(description=ct.collection_desc)
collection_w = self.init_collection_wrap(name=c_name, schema=schema,
check_task=CheckTasks.check_collection_property,
check_items={exp_name: c_name, exp_schema: schema})
self.collection_wrap.init_collection(c_name,
check_task=CheckTasks.check_collection_property,
check_items={exp_name: c_name, exp_schema: schema})
assert collection_w.description == self.collection_wrap.description
@pytest.mark.tags(CaseLabel.L1)
def test_collection_dup_name_new_schema(self):
"""
target: test collection with dup name and new schema
method: 1.create collection with default schema
2. collection with dup name and new schema
expected: raise exception
"""
self._connect()
c_name = cf.gen_unique_str(prefix)
self.init_collection_wrap(name=c_name, check_task=CheckTasks.check_collection_property,
check_items={exp_name: c_name, exp_schema: default_schema})
fields = [cf.gen_int64_field(is_primary=True)]
schema = cf.gen_collection_schema(fields=fields)
error = {ct.err_code: 0, ct.err_msg: "The collection already exist, but the schema is not the same as the "
"schema passed in."}
self.collection_wrap.init_collection(c_name, schema=schema, check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L2)
def test_collection_dup_name_new_primary(self):
"""
target: test collection with dup name and new primary_field schema
method: 1.collection with default schema
2. collection with same fields and new primary_field schema
expected: raise exception
"""
self._connect()
c_name = cf.gen_unique_str(prefix)
int_field_one = cf.gen_int64_field()
int_field_two = cf.gen_int64_field(name="int2")
fields = [int_field_one, int_field_two, cf.gen_float_vec_field()]
schema = cf.gen_collection_schema(fields, primary_field=int_field_one.name)
collection_w = self.init_collection_wrap(name=c_name, schema=schema,
check_task=CheckTasks.check_collection_property,
check_items={exp_name: c_name, exp_schema: schema,
exp_primary: int_field_one.name})
new_schema = cf.gen_collection_schema(fields, primary_field=int_field_two.name)
error = {ct.err_code: 0, ct.err_msg: "The collection already exist, but the schema is not the same as the "
"schema passed in."}
self.collection_wrap.init_collection(c_name, schema=new_schema, check_task=CheckTasks.err_res,
check_items=error)
assert collection_w.primary_field.name == int_field_one.name
@pytest.mark.tags(CaseLabel.L1)
def test_collection_dup_name_new_dim(self):
"""
target: test collection with dup name and new dim schema
method: 1. default schema 2. schema with new dim
expected: raise exception
"""
self._connect()
new_dim = 120
c_name = cf.gen_unique_str(prefix)
collection_w = self.init_collection_wrap(name=c_name, check_task=CheckTasks.check_collection_property,
check_items={exp_name: c_name, exp_schema: default_schema})
schema = cf.gen_default_collection_schema()
new_fields = cf.gen_float_vec_field(dim=new_dim)
schema.fields[-1] = new_fields
error = {ct.err_code: 0, ct.err_msg: "The collection already exist, but the schema is not the same as the "
"schema passed in."}
self.collection_wrap.init_collection(c_name, schema=schema, check_task=CheckTasks.err_res, check_items=error)
dim = collection_w.schema.fields[-1].params['dim']
assert dim == ct.default_dim
@pytest.mark.tags(CaseLabel.L2)
def test_collection_dup_name_invalid_schema_type(self, get_none_removed_invalid_strings):
"""
target: test collection with dup name and invalid schema
method: 1. default schema 2. invalid schema
expected: raise exception
"""
self._connect()
c_name = cf.gen_unique_str(prefix)
collection_w = self.init_collection_wrap(name=c_name, check_task=CheckTasks.check_collection_property,
check_items={exp_name: c_name, exp_schema: default_schema})
error = {ct.err_code: 0, ct.err_msg: "Schema type must be schema.CollectionSchema"}
schema = get_none_removed_invalid_strings
self.collection_wrap.init_collection(collection_w.name, schema=schema,
check_task=CheckTasks.err_res, check_items=error)
assert collection_w.name == c_name
@pytest.mark.tags(CaseLabel.L1)
def test_collection_dup_name_same_schema(self):
"""
target: test collection with dup name and same schema
method: dup name and same schema
expected: two collection object is available
"""
self._connect()
c_name = cf.gen_unique_str(prefix)
collection_w = self.init_collection_wrap(name=c_name, schema=default_schema,
check_task=CheckTasks.check_collection_property,
check_items={exp_name: c_name, exp_schema: default_schema})
self.collection_wrap.init_collection(name=c_name, schema=default_schema,
check_task=CheckTasks.check_collection_property,
check_items={exp_name: c_name, exp_schema: default_schema})
assert collection_w.name == self.collection_wrap.name
@pytest.mark.tags(CaseLabel.L2)
def test_collection_none_schema(self):
"""
target: test collection with none schema
method: create collection with none schema
expected: raise exception
"""
self._connect()
c_name = cf.gen_unique_str(prefix)
error = {ct.err_code: 0, ct.err_msg: "Should be passed into the schema"}
self.collection_wrap.init_collection(c_name, schema=None, check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L2)
def test_collection_invalid_type_schema(self, get_none_removed_invalid_strings):
"""
target: test collection with invalid schema
method: create collection with non-CollectionSchema type schema
expected: raise exception
"""
self._connect()
c_name = cf.gen_unique_str(prefix)
error = {ct.err_code: 0, ct.err_msg: "Schema type must be schema.CollectionSchema"}
self.collection_wrap.init_collection(c_name, schema=get_none_removed_invalid_strings,
check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L2)
def test_collection_invalid_type_fields(self, get_invalid_type_fields):
"""
target: test collection with invalid fields type, non-list
method: create collection schema with non-list invalid fields
expected: exception
"""
self._connect()
fields = get_invalid_type_fields
error = {ct.err_code: 0, ct.err_msg: "The fields of schema must be type list"}
self.collection_schema_wrap.init_collection_schema(fields=fields,
check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L2)
def test_collection_with_unknown_type(self):
"""
target: test collection with unknown type
method: create with DataType.UNKNOWN
expected: raise exception
"""
self._connect()
error = {ct.err_code: 0, ct.err_msg: "Field dtype must be of DataType"}
self.field_schema_wrap.init_field_schema(name="unknown", dtype=DataType.UNKNOWN,
check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L2)
@pytest.mark.xfail(reason="exception not Milvus Exception")
@pytest.mark.parametrize("name", [[], 1, (1,), {1: 1}, "12-s"])
def test_collection_invalid_type_field(self, name):
"""
target: test collection with invalid field name
method: invalid string name
expected: raise exception
"""
self._connect()
c_name = cf.gen_unique_str(prefix)
field, _ = self.field_schema_wrap.init_field_schema(name=name, dtype=5, is_primary=True)
vec_field = cf.gen_float_vec_field()
schema = cf.gen_collection_schema(fields=[field, vec_field])
error = {ct.err_code: 1, ct.err_msg: "expected one of: bytes, unicode"}
self.collection_wrap.init_collection(c_name, schema=schema, check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L2)
@pytest.mark.parametrize("name", ["12-s", "12 s", "(mn)", "中文", "%$#", "a".join("a" for i in range(256))])
def test_collection_invalid_field_name(self, name):
"""
target: test collection with invalid field name
method: invalid string name
expected: raise exception
"""
self._connect()
c_name = cf.gen_unique_str(prefix)
field, _ = self.field_schema_wrap.init_field_schema(name=name, dtype=DataType.INT64, is_primary=True)
vec_field = cf.gen_float_vec_field()
schema = cf.gen_collection_schema(fields=[field, vec_field])
error = {ct.err_code: 1, ct.err_msg: "Invalid field name"}
self.collection_wrap.init_collection(c_name, schema=schema, check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L2)
def test_collection_none_field_name(self):
"""
target: test field schema with None name
method: None field name
expected: raise exception
"""
self._connect()
c_name = cf.gen_unique_str(prefix)
field, _ = self.field_schema_wrap.init_field_schema(name=None, dtype=DataType.INT64, is_primary=True)
schema = cf.gen_collection_schema(fields=[field, cf.gen_float_vec_field()])
error = {ct.err_code: 1, ct.err_msg: "You should specify the name of field"}
self.collection_wrap.init_collection(c_name, schema=schema, check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L2)
@pytest.mark.parametrize("dtype", [6, [[]], {}, (), "", "a"])
def test_collection_invalid_field_type(self, dtype):
"""
target: test collection with invalid field type
method: invalid DataType
expected: raise exception
"""
self._connect()
error = {ct.err_code: 0, ct.err_msg: "Field dtype must be of DataType"}
self.field_schema_wrap.init_field_schema(name="test", dtype=dtype, is_primary=True,
check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L2)
def test_collection_field_dtype_float_value(self):
"""
target: test collection with float type
method: create field with float type
expected: raise exception
"""
self._connect()
c_name = cf.gen_unique_str(prefix)
field, _ = self.field_schema_wrap.init_field_schema(name=ct.default_int64_field_name, dtype=5.0,
is_primary=True)
schema = cf.gen_collection_schema(fields=[field, cf.gen_float_vec_field()])
error = {ct.err_code: 0, ct.err_msg: "Field type must be of DataType!"}
self.collection_wrap.init_collection(c_name, schema=schema, check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L2)
def test_collection_empty_fields(self):
"""
target: test collection with empty fields
method: create collection with fields = []
expected: exception
"""
self._connect()
error = {ct.err_code: 0, ct.err_msg: "Primary field must in dataframe."}
self.collection_schema_wrap.init_collection_schema(fields=[], primary_field=ct.default_int64_field_name,
check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L1)
def test_collection_dup_field(self):
"""
target: test collection with dup field name
method: Two FieldSchema have same name
expected: raise exception
"""
self._connect()
c_name = cf.gen_unique_str(prefix)
field_one = cf.gen_int64_field(is_primary=True)
field_two = cf.gen_int64_field()
schema = cf.gen_collection_schema(fields=[field_one, field_two, cf.gen_float_vec_field()])
error = {ct.err_code: 1, ct.err_msg: "duplicated field name"}
self.collection_wrap.init_collection(c_name, schema=schema, check_task=CheckTasks.err_res, check_items=error)
assert not self.utility_wrap.has_collection(c_name)[0]
@pytest.mark.tags(CaseLabel.L0)
@pytest.mark.parametrize("field", [cf.gen_float_vec_field(), cf.gen_binary_vec_field()])
def test_collection_only_vector_field(self, field):
"""
target: test collection just with vec field
method: create with float-vec fields
expected: raise exception
"""
self._connect()
error = {ct.err_code: 0, ct.err_msg: "Primary field must in dataframe"}
self.collection_schema_wrap.init_collection_schema([field], check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L2)
def test_collection_multi_float_vectors(self):
"""
target: test collection with multi float vectors
method: create collection with two float-vec fields
expected: raise exception (not supported yet)
"""
# 1. connect
self._connect()
# 2. create collection with multiple vectors
c_name = cf.gen_unique_str(prefix)
fields = [cf.gen_int64_field(is_primary=True), cf.gen_float_field(),
cf.gen_float_vec_field(dim=default_dim), cf.gen_float_vec_field(name="tmp", dim=default_dim)]
schema = cf.gen_collection_schema(fields=fields)
err_msg = "multiple vector fields is not supported"
self.collection_wrap.init_collection(c_name, schema=schema,
check_task=CheckTasks.err_res,
check_items={"err_code": 1, "err_msg": err_msg})
@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.skip("https://github.com/milvus-io/milvus/issues/12680")
def test_collection_mix_vectors(self):
"""
target: test collection with mix vectors
method: create with float and binary vec
expected: raise exception
"""
self._connect()
c_name = cf.gen_unique_str(prefix)
fields = [cf.gen_int64_field(is_primary=True), cf.gen_float_vec_field(), cf.gen_binary_vec_field()]
schema = cf.gen_collection_schema(fields=fields, auto_id=True)
self.collection_wrap.init_collection(c_name, schema=schema, check_task=CheckTasks.check_collection_property,
check_items={exp_name: c_name, exp_schema: schema})
@pytest.mark.tags(CaseLabel.L0)
def test_collection_without_vectors(self):
"""
target: test collection without vectors
method: create collection only with int field
expected: raise exception
"""
self._connect()
c_name = cf.gen_unique_str(prefix)
schema = cf.gen_collection_schema([cf.gen_int64_field(is_primary=True)])
error = {ct.err_code: 0, ct.err_msg: "No vector field is found."}
self.collection_wrap.init_collection(c_name, schema=schema, check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L1)
def test_collection_without_primary_field(self):
"""
target: test collection without primary field
method: no primary field specified in collection schema and fields
expected: raise exception
"""
self._connect()
int_fields, _ = self.field_schema_wrap.init_field_schema(name=ct.default_int64_field_name, dtype=DataType.INT64)
vec_fields, _ = self.field_schema_wrap.init_field_schema(name=ct.default_float_vec_field_name,
dtype=DataType.FLOAT_VECTOR, dim=ct.default_dim)
error = {ct.err_code: 0, ct.err_msg: "Primary field must in dataframe."}
self.collection_schema_wrap.init_collection_schema([int_fields, vec_fields],
check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L2)
def test_collection_is_primary_false(self):
"""
target: test collection with all is_primary false
method: set all fields if_primary false
expected: raise exception
"""
self._connect()
fields = [cf.gen_int64_field(is_primary=False), cf.gen_float_field(is_primary=False),
cf.gen_float_vec_field(is_primary=False)]
error = {ct.err_code: 0, ct.err_msg: "Primary field must in dataframe."}
self.collection_schema_wrap.init_collection_schema(fields, check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L2)
@pytest.mark.parametrize("is_primary", ct.get_invalid_strs)
def test_collection_invalid_is_primary(self, is_primary):
"""
target: test collection with invalid primary
method: define field with is_primary=non-bool
expected: raise exception
"""
self._connect()
name = cf.gen_unique_str(prefix)
error = {ct.err_code: 0, ct.err_msg: "Param is_primary must be bool type"}
self.field_schema_wrap.init_field_schema(name=name, dtype=DataType.INT64, is_primary=is_primary,
check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L2)
@pytest.mark.parametrize("primary_field", ["12-s", "12 s", "(mn)", "中文", "%$#", "a".join("a" for i in range(256))])
def test_collection_invalid_primary_field(self, primary_field):
"""
target: test collection with invalid primary_field
method: specify invalid string primary_field in collection schema
expected: raise exception
"""
self._connect()
fields = [cf.gen_int64_field(), cf.gen_float_vec_field()]
error = {ct.err_code: 0, ct.err_msg: "Primary field must in dataframe."}
self.collection_schema_wrap.init_collection_schema(fields=fields, primary_field=primary_field,
check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L2)
@pytest.mark.parametrize("primary_field", [[], 1, [1, "2", 3], (1,), {1: 1}, None])
def test_collection_non_string_primary_field(self, primary_field):
"""
target: test collection with non-string primary_field
method: primary_field type is not string
expected: raise exception
"""
self._connect()
fields = [cf.gen_int64_field(), cf.gen_float_vec_field()]
error = {ct.err_code: 0, ct.err_msg: "Primary field must in dataframe."}
self.collection_schema_wrap.init_collection_schema(fields, primary_field=primary_field,
check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L2)
def test_collection_not_existed_primary_field(self):
"""
target: test collection with not exist primary field
method: specify not existed field as primary_field
expected: raise exception
"""
self._connect()
fake_field = cf.gen_unique_str()
fields = [cf.gen_int64_field(), cf.gen_float_vec_field()]
error = {ct.err_code: 0, ct.err_msg: "Primary field must in dataframe."}
self.collection_schema_wrap.init_collection_schema(fields, primary_field=fake_field,
check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L0)
def test_collection_primary_in_schema(self):
"""
target: test collection with primary field
method: specify primary field in CollectionSchema
expected: collection.primary_field
"""
self._connect()
c_name = cf.gen_unique_str(prefix)
schema = cf.gen_default_collection_schema(primary_field=ct.default_int64_field_name)
self.collection_wrap.init_collection(c_name, schema=schema)
assert self.collection_wrap.primary_field.name == ct.default_int64_field_name
@pytest.mark.tags(CaseLabel.L0)
def test_collection_primary_in_field(self):
"""
target: test collection with primary field
method: specify primary field in FieldSchema
expected: collection.primary_field
"""
self._connect()
fields = [cf.gen_int64_field(is_primary=True), cf.gen_float_field(), cf.gen_float_vec_field()]
schema, _ = self.collection_schema_wrap.init_collection_schema(fields)
self.collection_wrap.init_collection(cf.gen_unique_str(prefix), schema=schema)
assert self.collection_wrap.primary_field.name == ct.default_int64_field_name
@pytest.mark.tags(CaseLabel.L2)
@pytest.mark.xfail(reason="exception not Milvus Exception")
def test_collection_unsupported_primary_field(self, get_unsupported_primary_field):
"""
target: test collection with unsupported primary field type
method: specify non-int64 as primary field
expected: raise exception
"""
self._connect()
field = get_unsupported_primary_field
vec_field = cf.gen_float_vec_field(name="vec")
error = {ct.err_code: 1, ct.err_msg: "Primary key type must be DataType.INT64."}
self.collection_schema_wrap.init_collection_schema(fields=[field, vec_field], primary_field=field.name,
check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L1)
def test_collection_multi_primary_fields(self):
"""
target: test collection with multi primary
method: collection with two primary fields
expected: raise exception
"""
self._connect()
int_field_one = cf.gen_int64_field(is_primary=True)
int_field_two = cf.gen_int64_field(name="int2", is_primary=True)
error = {ct.err_code: 0, ct.err_msg: "Primary key field can only be one."}
self.collection_schema_wrap.init_collection_schema(
fields=[int_field_one, int_field_two, cf.gen_float_vec_field()],
check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L2)
def test_collection_primary_inconsistent(self):
"""
target: test collection with different primary field setting
method: 1. set A field is_primary 2. set primary_field is B
expected: raise exception
"""
self._connect()
int_field_one = cf.gen_int64_field(is_primary=True)
int_field_two = cf.gen_int64_field(name="int2")
fields = [int_field_one, int_field_two, cf.gen_float_vec_field()]
error = {ct.err_code: 0, ct.err_msg: "Primary key field can only be one"}
self.collection_schema_wrap.init_collection_schema(fields, primary_field=int_field_two.name,
check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L1)
def test_collection_primary_consistent(self):
"""
target: test collection with both collection schema and field schema
method: 1. set A field is_primary 2.set primary_field is A
expected: verify primary field
"""
self._connect()
c_name = cf.gen_unique_str(prefix)
int_field_one = cf.gen_int64_field(is_primary=True)
schema = cf.gen_collection_schema(fields=[int_field_one, cf.gen_float_vec_field()],
primary_field=int_field_one.name)
self.collection_wrap.init_collection(c_name, schema=schema, check_task=CheckTasks.check_collection_property,
check_items={exp_name: c_name, exp_schema: schema})
@pytest.mark.tags(CaseLabel.L0)
@pytest.mark.parametrize("auto_id", [True, False])
def test_collection_auto_id_in_field_schema(self, auto_id):
"""
target: test collection with auto_id in field schema
method: specify auto_id True in field schema
expected: verify schema's auto_id
"""
self._connect()
c_name = cf.gen_unique_str(prefix)
int_field = cf.gen_int64_field(is_primary=True, auto_id=auto_id)
vec_field = cf.gen_float_vec_field(name='vec')
schema, _ = self.collection_schema_wrap.init_collection_schema([int_field, vec_field])
assert schema.auto_id == auto_id
self.collection_wrap.init_collection(c_name, schema=schema, check_task=CheckTasks.check_collection_property,
check_items={exp_name: c_name, exp_schema: schema})
@pytest.mark.tags(CaseLabel.L0)
@pytest.mark.parametrize("auto_id", [True, False])
def test_collection_auto_id_in_collection_schema(self, auto_id):
"""
target: test collection with auto_id in collection schema
method: specify auto_id True in collection schema
expected: verify schema auto_id and collection schema
"""
self._connect()
c_name = cf.gen_unique_str(prefix)
int_field = cf.gen_int64_field(is_primary=True)
vec_field = cf.gen_float_vec_field(name='vec')
schema, _ = self.collection_schema_wrap.init_collection_schema([int_field, vec_field], auto_id=auto_id)
assert schema.auto_id == auto_id
self.collection_wrap.init_collection(c_name, schema=schema, check_task=CheckTasks.check_collection_property,
check_items={exp_name: c_name, exp_schema: schema})
@pytest.mark.tags(CaseLabel.L2)
def test_collection_auto_id_non_primary_field(self):
"""
target: test collection set auto_id in non-primary field
method: set auto_id=True in non-primary field
expected: raise exception
"""
self._connect()
error = {ct.err_code: 0, ct.err_msg: "auto_id can only be specified on the primary key field"}
self.field_schema_wrap.init_field_schema(name=ct.default_int64_field_name, dtype=DataType.INT64, auto_id=True,
check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L2)
def test_collection_auto_id_false_non_primary(self):
"""
target: test collection set auto_id in non-primary field
method: set auto_id=True in non-primary field
expected: verify schema auto_id is False
"""
self._connect()
int_field_one = cf.gen_int64_field(is_primary=True)
int_field_two = cf.gen_int64_field(name='int2', auto_id=False)
fields = [int_field_one, int_field_two, cf.gen_float_vec_field()]
schema, _ = self.collection_schema_wrap.init_collection_schema(fields)
assert not schema.auto_id
@pytest.mark.tags(CaseLabel.L2)
def test_collection_auto_id_inconsistent(self):
"""
target: test collection auto_id with both collection schema and field schema
method: 1.set primary field auto_id=True in field schema 2.set auto_id=False in collection schema
expected: raise exception
"""
self._connect()
int_field = cf.gen_int64_field(is_primary=True, auto_id=True)
vec_field = cf.gen_float_vec_field(name='vec')
error = {ct.err_code: 0, ct.err_msg: "The auto_id of the collection is inconsistent with "
"the auto_id of the primary key field"}
self.collection_schema_wrap.init_collection_schema([int_field, vec_field], auto_id=False,
check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L2)
@pytest.mark.parametrize("auto_id", [True, False])
def test_collection_auto_id_consistent(self, auto_id):
"""
target: test collection auto_id with both collection schema and field schema
method: set auto_id=True/False both field and schema
expected: verify auto_id
"""
self._connect()
int_field = cf.gen_int64_field(is_primary=True, auto_id=auto_id)
vec_field = cf.gen_float_vec_field(name='vec')
schema, _ = self.collection_schema_wrap.init_collection_schema([int_field, vec_field], auto_id=auto_id)
assert schema.auto_id == auto_id
@pytest.mark.tags(CaseLabel.L2)
def test_collection_auto_id_none_in_field(self):
"""
target: test collection with auto_id is None
method: set auto_id=None
expected: raise exception
"""
self._connect()
error = {ct.err_code: 0, ct.err_msg: "Param auto_id must be bool type"}
self.field_schema_wrap.init_field_schema(name=ct.default_int64_field_name, dtype=DataType.INT64,
is_primary=True,
auto_id=None, check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L2)
@pytest.mark.parametrize("auto_id", ct.get_invalid_strs)
def test_collection_invalid_auto_id(self, auto_id):
"""
target: test collection with invalid auto_id
method: define field with auto_id=non-bool
expected: raise exception
"""
self._connect()
int_field = cf.gen_int64_field(is_primary=True)
vec_field = cf.gen_float_vec_field(name='vec')
error = {ct.err_code: 0, ct.err_msg: "Param auto_id must be bool type"}
self.collection_schema_wrap.init_collection_schema([int_field, vec_field], auto_id=auto_id,
check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L2)
def test_collection_multi_fields_auto_id(self):
"""
target: test collection auto_id with multi fields
method: specify auto_id=True for multi int64 fields
expected: todo raise exception
"""
self._connect()
error = {ct.err_code: 0, ct.err_msg: "auto_id can only be specified on the primary key field"}
cf.gen_int64_field(is_primary=True, auto_id=True)
self.field_schema_wrap.init_field_schema(name="int", dtype=DataType.INT64, auto_id=True,
check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L2)
@pytest.mark.parametrize("dtype", [DataType.FLOAT_VECTOR, DataType.BINARY_VECTOR])
def test_collection_vector_without_dim(self, dtype):
"""
target: test collection without dimension
method: define vector field without dim
expected: raise exception
"""
self._connect()
c_name = cf.gen_unique_str(prefix)
float_vec_field, _ = self.field_schema_wrap.init_field_schema(name="vec", dtype=dtype)
schema = cf.gen_collection_schema(fields=[cf.gen_int64_field(is_primary=True), float_vec_field])
error = {ct.err_code: 1, ct.err_msg: "dimension is not defined in field type params"}
self.collection_wrap.init_collection(c_name, schema=schema, check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L2)
@pytest.mark.xfail(reason="exception not Milvus Exception")
def test_collection_vector_invalid_dim(self, get_invalid_dim):
"""
target: test collection with invalid dimension
method: define float-vec field with invalid dimension
expected: raise exception
"""
self._connect()
c_name = cf.gen_unique_str(prefix)
float_vec_field = cf.gen_float_vec_field(dim=get_invalid_dim)
schema = cf.gen_collection_schema(fields=[cf.gen_int64_field(is_primary=True), float_vec_field])
error = {ct.err_code: 1, ct.err_msg: f'invalid dim: {get_invalid_dim}'}
self.collection_wrap.init_collection(c_name, schema=schema, check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L2)
@pytest.mark.parametrize("dim", [-1, 0, 32769])
def test_collection_vector_out_bounds_dim(self, dim):
"""
target: test collection with out of bounds dim
method: invalid dim -1 and 32759
expected: raise exception
"""
self._connect()
c_name = cf.gen_unique_str(prefix)
float_vec_field = cf.gen_float_vec_field(dim=dim)
schema = cf.gen_collection_schema(fields=[cf.gen_int64_field(is_primary=True), float_vec_field])
error = {ct.err_code: 1, ct.err_msg: "invalid dimension: {}. should be in range 1 ~ 32768".format(dim)}
self.collection_wrap.init_collection(c_name, schema=schema, check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L1)
def test_collection_non_vector_field_dim(self):
"""
target: test collection with dim for non-vector field
method: define int64 field with dim
expected: no exception
"""
self._connect()
c_name = cf.gen_unique_str(prefix)
int_field, _ = self.field_schema_wrap.init_field_schema(name=ct.default_int64_field_name, dtype=DataType.INT64,
dim=ct.default_dim)
float_vec_field = cf.gen_float_vec_field()
schema = cf.gen_collection_schema(fields=[int_field, float_vec_field],
primary_field=ct.default_int64_field_name)
self.collection_wrap.init_collection(c_name, schema=schema, check_task=CheckTasks.check_collection_property,
check_items={exp_name: c_name, exp_schema: schema})
@pytest.mark.tags(CaseLabel.L1)
def test_collection_desc(self):
"""
target: test collection with description
method: create with description
expected: assert default description
"""
self._connect()
c_name = cf.gen_unique_str(prefix)
schema = cf.gen_default_collection_schema(description=ct.collection_desc)
self.collection_wrap.init_collection(c_name, schema=schema, check_task=CheckTasks.check_collection_property,
check_items={exp_name: c_name, exp_schema: schema})
@pytest.mark.tags(CaseLabel.L2)
def test_collection_none_desc(self):
"""
target: test collection with none description
method: create with none description
expected: raise exception
"""
self._connect()
c_name = cf.gen_unique_str(prefix)
schema = cf.gen_default_collection_schema(description=None)
error = {ct.err_code: 1, ct.err_msg: "None has type NoneType, but expected one of: bytes, unicode"}
self.collection_wrap.init_collection(c_name, schema=schema, check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L2)
def test_collection_long_desc(self):
"""
target: test collection with long desc
method: create with long desc
expected:
"""
self._connect()
c_name = cf.gen_unique_str(prefix)
desc = "a".join("a" for _ in range(256))
schema = cf.gen_default_collection_schema(description=desc)
self.collection_wrap.init_collection(c_name, schema=schema,
check_task=CheckTasks.check_collection_property,
check_items={exp_name: c_name, exp_schema: schema})
@pytest.mark.tags(CaseLabel.L0)
def test_collection_binary(self):
"""
target: test collection with binary-vec
method: create collection with binary field
expected: assert binary field
"""
self._connect()
c_name = cf.gen_unique_str(prefix)
self.collection_wrap.init_collection(c_name, schema=default_binary_schema,
check_task=CheckTasks.check_collection_property,
check_items={exp_name: c_name, exp_schema: default_binary_schema})
assert c_name in self.utility_wrap.list_collections()[0]
@pytest.mark.tags(CaseLabel.L0)
def test_collection_shards_num_with_default_value(self):
"""
target:test collection with shards_num
method:create collection with shards_num
expected: no exception
"""
self._connect()
c_name = cf.gen_unique_str(prefix)
self.collection_wrap.init_collection(c_name, schema=default_schema, shards_num=default_shards_num,
check_task=CheckTasks.check_collection_property,
check_items={exp_name: c_name, exp_shards_num: default_shards_num})
assert c_name in self.utility_wrap.list_collections()[0]
@pytest.mark.tags(CaseLabel.L0)
@pytest.mark.parametrize("shards_num", [-256, 0, 10, 256])
def test_collection_shards_num_with_not_default_value(self, shards_num):
"""
target:test collection with shards_num
method:create collection with not default shards_num
expected: no exception
"""
self._connect()
c_name = cf.gen_unique_str(prefix)
self.collection_wrap.init_collection(c_name, schema=default_schema, shards_num=shards_num,
check_task=CheckTasks.check_collection_property,
check_items={exp_name: c_name, exp_shards_num: shards_num})
assert c_name in self.utility_wrap.list_collections()[0]
@pytest.mark.tags(CaseLabel.L2)
def test_collection_shards_num_with_error_type(self):
"""
target:test collection with error type shards_num
method:create collection with error type shards_num
expected: raise exception
"""
self._connect()
c_name = cf.gen_unique_str(prefix)
error_type_shards_num = "2" # suppose to be int rather than str
error = {ct.err_code: -1, ct.err_msg: f"expected one of: int, long"}
self.collection_wrap.init_collection(c_name, schema=default_schema, shards_num=error_type_shards_num,
check_task=CheckTasks.err_res,
check_items=error)
@pytest.mark.tags(CaseLabel.L1)
def test_create_collection_maximum_fields(self):
"""
target: test create collection with maximum fields
method: create collection with maximum field number
expected: no exception
"""
self._connect()
c_name = cf.gen_unique_str(prefix)
int_fields = []
limit_num = ct.max_field_num - 2
for i in range(limit_num):
int_field_name = cf.gen_unique_str("field_name")
field = cf.gen_int64_field(name=int_field_name)
int_fields.append(field)
int_fields.append(cf.gen_float_vec_field())
int_fields.append(cf.gen_int64_field(is_primary=True))
schema = cf.gen_collection_schema(fields=int_fields)
self.collection_wrap.init_collection(c_name, schema=schema, check_task=CheckTasks.check_collection_property,
check_items={exp_name: c_name, exp_schema: schema})
@pytest.mark.tags(CaseLabel.L2)
def test_create_collection_over_maximum_fields(self):
"""
target: Test create collection with more than the maximum fields
method: create collection with more than the maximum field number
expected: raise exception
"""
self._connect()
c_name = cf.gen_unique_str(prefix)
int_fields = []
limit_num = ct.max_field_num
for i in range(limit_num):
int_field_name = cf.gen_unique_str("field_name")
field = cf.gen_int64_field(name=int_field_name)
int_fields.append(field)
int_fields.append(cf.gen_float_vec_field())
int_fields.append(cf.gen_int64_field(is_primary=True))
schema = cf.gen_collection_schema(fields=int_fields)
error = {ct.err_code: 1, ct.err_msg: "maximum field's number should be limited to 256"}
self.collection_wrap.init_collection(c_name, schema=schema, check_task=CheckTasks.err_res, check_items=error)
class TestCollectionOperation(TestcaseBase):
"""
******************************************************************
The following cases are used to test collection interface operations
******************************************************************
"""
@pytest.mark.tags(CaseLabel.L2)
def test_collection_without_connection(self):
"""
target: test collection without connection
method: 1.create collection after connection removed
expected: raise exception
"""
self._connect()
c_name = cf.gen_unique_str(prefix)
self.connection_wrap.remove_connection(ct.default_alias)
res_list, _ = self.connection_wrap.list_connections()
assert ct.default_alias not in res_list
error = {ct.err_code: 0, ct.err_msg: 'should create connect first'}
self.collection_wrap.init_collection(c_name, schema=default_schema,
check_task=CheckTasks.err_res, check_items=error)
assert self.collection_wrap.collection is None
@pytest.mark.tags(CaseLabel.L1)
def test_collection_multi_create_drop(self):
"""
target: test cycle creation and deletion of multiple collections
method: in a loop, collections are created and deleted sequentially
expected: no exception
"""
self._connect()
c_num = 20
for _ in range(c_num):
c_name = cf.gen_unique_str(prefix)
self.collection_wrap.init_collection(c_name, schema=default_schema,
check_task=CheckTasks.check_collection_property,
check_items={exp_name: c_name, exp_schema: default_schema})
self.collection_wrap.drop()
assert c_name not in self.utility_wrap.list_collections()[0]
@pytest.mark.tags(CaseLabel.L1)
def test_collection_dup_name_drop(self):
"""
target: test collection with dup name, and drop
method: 1. two dup name collection object
2. one object drop collection
expected: collection dropped
"""
self._connect()
c_name = cf.gen_unique_str(prefix)
collection_w = self.init_collection_wrap(name=c_name, check_task=CheckTasks.check_collection_property,
check_items={exp_name: c_name, exp_schema: default_schema})
self.collection_wrap.init_collection(c_name, check_task=CheckTasks.check_collection_property,
check_items={exp_name: c_name, exp_schema: default_schema})
self.collection_wrap.drop()
assert not self.utility_wrap.has_collection(c_name)[0]
error = {ct.err_code: 1, ct.err_msg: f'HasPartition failed: can\'t find collection: {c_name}'}
collection_w.has_partition("p", check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L1)
def test_collection_after_drop(self):
"""
target: test create collection after create and drop
method: 1. create a 2. drop a 3, re-create a
expected: no exception
"""
c_name = cf.gen_unique_str(prefix)
collection_w = self.init_collection_wrap(name=c_name, check_task=CheckTasks.check_collection_property,
check_items={exp_name: c_name, exp_schema: default_schema})
collection_w.drop()
assert not self.utility_wrap.has_collection(collection_w.name)[0]
self.init_collection_wrap(name=c_name, check_task=CheckTasks.check_collection_property,
check_items={exp_name: c_name, exp_schema: default_schema})
assert self.utility_wrap.has_collection(c_name)[0]
@pytest.mark.tags(CaseLabel.L1)
def test_collection_all_datatype_fields(self):
"""
target: test create collection with all dataType fields
method: create collection with all dataType schema
expected: create successfully
"""
self._connect()
fields = []
for k, v in DataType.__members__.items():
if v and v != DataType.UNKNOWN and v != DataType.STRING and v != DataType.FLOAT_VECTOR and v != DataType.BINARY_VECTOR:
field, _ = self.field_schema_wrap.init_field_schema(name=k.lower(), dtype=v)
fields.append(field)
fields.append(cf.gen_float_vec_field())
schema, _ = self.collection_schema_wrap.init_collection_schema(fields,
primary_field=ct.default_int64_field_name)
c_name = cf.gen_unique_str(prefix)
self.collection_wrap.init_collection(c_name, schema=schema, check_task=CheckTasks.check_collection_property,
check_items={exp_name: c_name, exp_schema: schema})
@pytest.mark.tags(CaseLabel.L1)
def test_collection_string_field(self):
"""
target: test create with string field
method: create collection with string field
expected: Raise exception
"""
self._connect()
string_field = self.field_schema_wrap.init_field_schema(name="string", dtype=DataType.STRING)[0]
int_field = cf.gen_int64_field(is_primary=True)
vec_field = cf.gen_float_vec_field()
schema = cf.gen_collection_schema(fields=[int_field, string_field, vec_field])
error = {ct.err_code: 0, ct.err_msg: "string data type not supported yet"}
self.collection_wrap.init_collection(name=cf.gen_unique_str(prefix), schema=schema,
check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L2)
def test_load_collection_after_load_partition(self):
"""
target: test release the partition after load collection
method: load collection and load the partition
expected: raise exception
"""
self._connect()
collection_w = self.init_collection_wrap()
partition_w1 = self.init_partition_wrap(collection_w)
partition_w1.insert(cf.gen_default_list_data())
collection_w.load()
error = {ct.err_code: 1, ct.err_msg: f'load the partition after load collection is not supported'}
partition_w1.load(check_task=CheckTasks.err_res,
check_items=error)
@pytest.mark.tags(CaseLabel.L2)
def test_load_collection_release_partition(self):
"""
target: test release the partition after load collection
method: load collection and release the partition
expected: raise exception
"""
self._connect()
collection_w = self.init_collection_wrap()
partition_w1 = self.init_partition_wrap(collection_w)
partition_w1.insert(cf.gen_default_list_data())
collection_w.load()
error = {ct.err_code: 1, ct.err_msg: f'releasing the partition after load collection is not supported'}
partition_w1.release(check_task=CheckTasks.err_res,
check_items=error)
@pytest.mark.tags(CaseLabel.L2)
def test_load_collection_after_release_collection(self):
"""
target: test release the collection after load collection
method: load collection and release the collection
expected: no exception
"""
self._connect()
c_name = cf.gen_unique_str(prefix)
collection_w = self.init_collection_wrap(name=c_name, check_task=CheckTasks.check_collection_property,
check_items={exp_name: c_name, exp_schema: default_schema})
collection_w.insert(cf.gen_default_list_data())
collection_w.load()
collection_w.release()
class TestCollectionDataframe(TestcaseBase):
"""
******************************************************************
The following cases are used to test construct_from_dataframe
******************************************************************
"""
@pytest.fixture(scope="function", params=ct.get_invalid_strs)
def get_non_df(self, request):
if request.param is None:
pytest.skip("skip None")
yield request.param
@pytest.mark.tags(CaseLabel.L0)
def test_construct_from_dataframe(self):
"""
target: test collection with dataframe data
method: create collection and insert with dataframe
expected: collection num entities equal to nb
"""
self._connect()
c_name = cf.gen_unique_str(prefix)
df = cf.gen_default_dataframe_data(ct.default_nb)
self.collection_wrap.construct_from_dataframe(c_name, df, primary_field=ct.default_int64_field_name,
check_task=CheckTasks.check_collection_property,
check_items={exp_name: c_name, exp_schema: default_schema})
# flush
assert self.collection_wrap.num_entities == ct.default_nb
@pytest.mark.tags(CaseLabel.L0)
def test_construct_from_binary_dataframe(self):
"""
target: test binary collection with dataframe
method: create binary collection with dataframe
expected: collection num entities equal to nb
"""
self._connect()
c_name = cf.gen_unique_str(prefix)
df, _ = cf.gen_default_binary_dataframe_data(nb=ct.default_nb)
self.collection_wrap.construct_from_dataframe(c_name, df, primary_field=ct.default_int64_field_name,
check_task=CheckTasks.check_collection_property,
check_items={exp_name: c_name, exp_schema: default_binary_schema})
assert self.collection_wrap.num_entities == ct.default_nb
@pytest.mark.tags(CaseLabel.L2)
def test_construct_from_none_dataframe(self):
"""
target: test create collection by empty dataframe
method: invalid dataframe type create collection
expected: raise exception
"""
self._connect()
c_name = cf.gen_unique_str(prefix)
error = {ct.err_code: 0, ct.err_msg: "Dataframe can not be None."}
self.collection_wrap.construct_from_dataframe(c_name, None, check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L2)
def test_construct_from_dataframe_only_column(self):
"""
target: test collection with dataframe only columns
method: dataframe only has columns
expected: raise exception
"""
self._connect()
c_name = cf.gen_unique_str(prefix)
df = pd.DataFrame(columns=[ct.default_int64_field_name, ct.default_float_vec_field_name])
error = {ct.err_code: 0, ct.err_msg: "Cannot infer schema from empty dataframe"}
self.collection_wrap.construct_from_dataframe(c_name, df, primary_field=ct.default_int64_field_name,
check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L2)
def test_construct_from_inconsistent_dataframe(self):
"""
target: test collection with data inconsistent
method: create and insert with inconsistent data
expected: raise exception
"""
self._connect()
c_name = cf.gen_unique_str(prefix)
# one field different type df
mix_data = [(1, 2., [0.1, 0.2]), (2, 3., 4)]
df = pd.DataFrame(data=mix_data, columns=list("ABC"))
error = {ct.err_code: 0, ct.err_msg: "The data in the same column must be of the same type"}
self.collection_wrap.construct_from_dataframe(c_name, df, primary_field='A', check_task=CheckTasks.err_res,
check_items=error)
@pytest.mark.tags(CaseLabel.L2)
def test_construct_from_non_dataframe(self, get_non_df):
"""
target: test create collection by invalid dataframe
method: non-dataframe type create collection
expected: raise exception
"""
self._connect()
c_name = cf.gen_unique_str(prefix)
error = {ct.err_code: 0, ct.err_msg: "Data type must be pandas.DataFrame."}
df = get_non_df
self.collection_wrap.construct_from_dataframe(c_name, df, check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L2)
def test_construct_from_data_type_dataframe(self):
"""
target: test collection with invalid dataframe
method: create with invalid dataframe
expected: raise exception
"""
self._connect()
c_name = cf.gen_unique_str(prefix)
df = pd.DataFrame({"date": pd.date_range('20210101', periods=3), ct.default_int64_field_name: [1, 2, 3]})
error = {ct.err_code: 0, ct.err_msg: "Cannot infer schema from empty dataframe."}
self.collection_wrap.construct_from_dataframe(c_name, df, primary_field=ct.default_int64_field_name,
check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L2)
def test_construct_from_invalid_field_name(self):
"""
target: test collection with invalid field name
method: create with invalid field name dataframe
expected: raise exception
"""
self._connect()
c_name = cf.gen_unique_str(prefix)
df = pd.DataFrame({'%$#': cf.gen_vectors(3, 2), ct.default_int64_field_name: [1, 2, 3]})
error = {ct.err_code: 1, ct.err_msg: "Invalid field name"}
self.collection_wrap.construct_from_dataframe(c_name, df, primary_field=ct.default_int64_field_name,
check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L2)
def test_construct_none_primary_field(self):
"""
target: test collection with none primary field
method: primary_field is none
expected: raise exception
"""
self._connect()
c_name = cf.gen_unique_str(prefix)
df = cf.gen_default_dataframe_data(ct.default_nb)
error = {ct.err_code: 0, ct.err_msg: "Schema must have a primary key field."}
self.collection_wrap.construct_from_dataframe(c_name, df, primary_field=None,
check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L2)
def test_construct_not_existed_primary_field(self):
"""
target: test collection with not existed primary field
method: primary field not existed
expected: raise exception
"""
self._connect()
c_name = cf.gen_unique_str(prefix)
df = cf.gen_default_dataframe_data(ct.default_nb)
error = {ct.err_code: 0, ct.err_msg: "Primary field must in dataframe."}
self.collection_wrap.construct_from_dataframe(c_name, df, primary_field=c_name,
check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L2)
def test_construct_with_none_auto_id(self):
"""
target: test construct with non-int64 as primary field
method: non-int64 as primary field
expected: raise exception
"""
self._connect()
c_name = cf.gen_unique_str(prefix)
df = cf.gen_default_dataframe_data(ct.default_nb)
error = {ct.err_code: 0, ct.err_msg: "Param auto_id must be bool type"}
self.collection_wrap.construct_from_dataframe(c_name, df, primary_field=ct.default_int64_field_name,
auto_id=None, check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L1)
def test_construct_auto_id_true_insert(self):
"""
target: test construct with true auto_id
method: auto_id=True and insert values
expected: raise exception
"""
self._connect()
c_name = cf.gen_unique_str(prefix)
df = cf.gen_default_dataframe_data(nb=100)
error = {ct.err_code: 0, ct.err_msg: "Auto_id is True, primary field should not have data."}
self.collection_wrap.construct_from_dataframe(c_name, df, primary_field=ct.default_int64_field_name,
auto_id=True, check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L1)
def test_construct_auto_id_true_no_insert(self):
"""
target: test construct with true auto_id
method: auto_id=True and not insert ids(primary fields all values are None)
expected: verify num entities
"""
self._connect()
c_name = cf.gen_unique_str(prefix)
df = cf.gen_default_dataframe_data()
# df.drop(ct.default_int64_field_name, axis=1, inplace=True)
df[ct.default_int64_field_name] = None
self.collection_wrap.construct_from_dataframe(c_name, df, primary_field=ct.default_int64_field_name,
auto_id=True)
assert self.collection_wrap.num_entities == ct.default_nb
@pytest.mark.tags(CaseLabel.L2)
def test_construct_none_value_auto_id_true(self):
"""
target: test construct with none value, auto_id
method: df primary field with none value, auto_id=true
expected: todo
"""
self._connect()
nb = 100
df = cf.gen_default_dataframe_data(nb)
df.iloc[:, 0] = numpy.NaN
res, _ = self.collection_wrap.construct_from_dataframe(cf.gen_unique_str(prefix), df,
primary_field=ct.default_int64_field_name, auto_id=True)
mutation_res = res[1]
assert cf._check_primary_keys(mutation_res.primary_keys, 100)
assert self.collection_wrap.num_entities == nb
@pytest.mark.tags(CaseLabel.L1)
def test_construct_auto_id_false(self):
"""
target: test construct with false auto_id
method: auto_id=False, primary_field correct
expected: verify auto_id
"""
self._connect()
c_name = cf.gen_unique_str(prefix)
df = cf.gen_default_dataframe_data(ct.default_nb)
self.collection_wrap.construct_from_dataframe(c_name, df, primary_field=ct.default_int64_field_name,
auto_id=False)
assert not self.collection_wrap.schema.auto_id
assert self.collection_wrap.num_entities == ct.default_nb
@pytest.mark.tags(CaseLabel.L2)
def test_construct_none_value_auto_id_false(self):
"""
target: test construct with none value, auto_id
method: df primary field with none value, auto_id=false
expected: raise exception
"""
self._connect()
nb = 100
df = cf.gen_default_dataframe_data(nb)
df.iloc[:, 0] = numpy.NaN
error = {ct.err_code: 0, ct.err_msg: "Primary key type must be DataType.INT64"}
self.collection_wrap.construct_from_dataframe(cf.gen_unique_str(prefix), df,
primary_field=ct.default_int64_field_name, auto_id=False,
check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L1)
def test_construct_auto_id_false_same_values(self):
"""
target: test construct with false auto_id and same value
method: auto_id=False, primary field same values
expected: verify num entities
"""
self._connect()
nb = 100
df = cf.gen_default_dataframe_data(nb)
df.iloc[1:, 0] = 1
res, _ = self.collection_wrap.construct_from_dataframe(cf.gen_unique_str(prefix), df,
primary_field=ct.default_int64_field_name, auto_id=False)
collection_w = res[0]
assert collection_w.num_entities == nb
mutation_res = res[1]
assert mutation_res.primary_keys == df[ct.default_int64_field_name].values.tolist()
@pytest.mark.tags(CaseLabel.L1)
def test_construct_auto_id_false_negative_values(self):
"""
target: test construct with negative values
method: auto_id=False, primary field values is negative
expected: verify num entities
"""
self._connect()
nb = 100
df = cf.gen_default_dataframe_data(nb)
new_values = pd.Series(data=[i for i in range(0, -nb, -1)])
df[ct.default_int64_field_name] = new_values
self.collection_wrap.construct_from_dataframe(cf.gen_unique_str(prefix), df,
primary_field=ct.default_int64_field_name, auto_id=False)
assert self.collection_wrap.num_entities == nb
@pytest.mark.tags(CaseLabel.L1)
def test_construct_from_dataframe_dup_name(self):
"""
target: test collection with dup name and insert dataframe
method: create collection with dup name, none schema, dataframe
expected: two collection object is correct
"""
self._connect()
c_name = cf.gen_unique_str(prefix)
collection_w = self.init_collection_wrap(name=c_name, primary_field=ct.default_int64_field_name,
check_task=CheckTasks.check_collection_property,
check_items={exp_name: c_name, exp_schema: default_schema})
df = cf.gen_default_dataframe_data(ct.default_nb)
self.collection_wrap.construct_from_dataframe(c_name, df, primary_field=ct.default_int64_field_name,
check_task=CheckTasks.check_collection_property,
check_items={exp_name: c_name, exp_schema: default_schema})
# flush
assert collection_w.num_entities == ct.default_nb
assert collection_w.num_entities == self.collection_wrap.num_entities
class TestCollectionCount(TestcaseBase):
@pytest.mark.tags(CaseLabel.L2)
def test_collection_count_no_vectors(self, connect, collection):
"""
target: test collection rows_count is correct or not, if collection is empty
method: create collection and no vectors in it,
assert the value returned by num_entities attribute is equal to 0
expected: the count is equal to 0
"""
self._connect()
collection_w = self.init_collection_wrap()
assert collection_w.num_entities == 0
class TestCollectionCountIP(TestcaseBase):
"""
params means different nb, the nb value may trigger merge, or not
"""
@pytest.fixture(
scope="function",
params=[
1,
1000,
2001
],
)
def insert_count(self, request):
yield request.param
@pytest.mark.tags(CaseLabel.L1)
def test_collection_count_after_index_created(self, insert_count):
"""
target: test count_entities, after index have been created
method: add vectors in db, and create index, then calling num_entities with correct params
expected: count_entities raise exception
"""
self._connect()
collection_w = self.init_collection_wrap()
data = cf.gen_default_list_data(insert_count, ct.default_dim)
collection_w.insert(data)
collection_w.create_index(ct.default_float_vec_field_name, default_index_params,
index_name=ct.default_index_name)
assert collection_w.num_entities == insert_count
class TestCollectionCountBinary(TestcaseBase):
"""
params means different nb, the nb value may trigger merge, or not
"""
@pytest.fixture(
scope="function",
params=[
1,
1000,
2001
],
)
def insert_count(self, request):
yield request.param
# TODO: need to update and enable
@pytest.mark.tags(CaseLabel.L1)
def test_collection_count_after_index_created_binary(self, insert_count):
"""
target: test num_entities, after index have been created
method: add vectors in db, and create binary index, then calling num_entities with correct params
expected: num_entities equals entities count just inserted
"""
self._connect()
c_name = cf.gen_unique_str(prefix)
collection_w = self.init_collection_wrap(name=c_name, schema=default_binary_schema)
df, _ = cf.gen_default_binary_dataframe_data(insert_count)
mutation_res, _ = collection_w.insert(data=df)
collection_w.create_index(ct.default_binary_vec_field_name, default_binary_index_params)
assert collection_w.num_entities == insert_count
@pytest.mark.tags(CaseLabel.L2)
def test_collection_count_no_entities(self):
"""
target: test collection num_entities is correct or not, if collection is empty
method: create collection and no vectors in it,
assert the value returned by num_entities method is equal to 0
expected: the count is equal to 0
"""
self._connect()
c_name = cf.gen_unique_str(prefix)
collection_w = self.init_collection_wrap(name=c_name, schema=default_binary_schema)
assert collection_w.num_entities == 0
class TestCollectionMultiCollections(TestcaseBase):
"""
params means different nb, the nb value may trigger merge, or not
"""
@pytest.fixture(
scope="function",
params=[
1,
1000,
2001
],
)
def insert_count(self, request):
yield request.param
@pytest.mark.tags(CaseLabel.L0)
def test_collection_count_multi_collections_l2(self, insert_count):
"""
target: test collection rows_count is correct or not with multiple collections of L2
method: create collection and add entities in it,
assert the value returned by num_entities is equal to length of entities
expected: the count is equal to the length of entities
"""
self._connect()
data = cf.gen_default_list_data(insert_count)
collection_list = []
collection_num = 20
for i in range(collection_num):
collection_name = gen_unique_str(uid_count)
collection_w = self.init_collection_wrap(name=collection_name)
collection_w.insert(data)
collection_list.append(collection_name)
for i in range(collection_num):
res, _ = self.collection_wrap.init_collection(collection_list[i])
assert self.collection_wrap.num_entities == insert_count
@pytest.mark.tags(CaseLabel.L2)
def test_collection_count_multi_collections_binary(self, insert_count):
"""
target: test collection rows_count is correct or not with multiple collections of JACCARD
method: create collection and add entities in it,
assert the value returned by count_entities method is equal to length of entities
expected: the count is equal to the length of entities
"""
self._connect()
df, _ = cf.gen_default_binary_dataframe_data(insert_count)
collection_list = []
collection_num = 20
for i in range(collection_num):
c_name = cf.gen_unique_str(prefix)
collection_w = self.init_collection_wrap(name=c_name, schema=default_binary_schema)
mutation_res, _ = collection_w.insert(data=df)
collection_list.append(c_name)
for i in range(collection_num):
res, _ = self.collection_wrap.init_collection(collection_list[i])
assert self.collection_wrap.num_entities == insert_count
@pytest.mark.tags(CaseLabel.L2)
def test_collection_count_multi_collections_mix(self):
"""
target: test collection rows_count is correct or not with multiple collections of
method: create collection and add entities in it,
assert the value returned by count_entities method is equal to length of entities
expected: the count is equal to the length of entities
"""
self._connect()
collection_list = []
collection_num = 20
data = cf.gen_default_list_data()
df, _ = cf.gen_default_binary_dataframe_data(ct.default_nb)
for i in range(0, int(collection_num / 2)):
collection_name = gen_unique_str(uid_count)
collection_w = self.init_collection_wrap(name=collection_name)
collection_w.insert(data)
collection_list.append(collection_name)
for i in range(int(collection_num / 2), collection_num):
c_name = cf.gen_unique_str(prefix)
collection_w = self.init_collection_wrap(name=c_name, schema=default_binary_schema)
mutation_res, _ = collection_w.insert(data=df)
collection_list.append(c_name)
for i in range(collection_num):
res, _ = self.collection_wrap.init_collection(collection_list[i])
assert self.collection_wrap.num_entities == ct.default_nb
class TestCreateCollection(TestcaseBase):
@pytest.mark.tags(CaseLabel.L1)
def test_create_collection_multithread(self):
"""
target: test create collection with multi-thread
method: create collection using multi-thread,
expected: collections are created
"""
self._connect()
threads_num = 8
threads = []
collection_names = []
def create():
collection_name = gen_unique_str(uid_create)
collection_names.append(collection_name)
self.init_collection_wrap(name=collection_name)
for i in range(threads_num):
t = MyThread(target=create, args=())
threads.append(t)
t.start()
time.sleep(0.2)
for t in threads:
t.join()
for item in collection_names:
assert item in self.utility_wrap.list_collections()[0]
class TestCreateCollectionInvalid(TestcaseBase):
"""
Test creating collections with invalid params
"""
@pytest.mark.tags(CaseLabel.L2)
def test_create_collection_limit_fields(self):
"""
target: test create collection with maximum fields
method: create collection with maximum field number
expected: raise exception
"""
self._connect()
c_name = cf.gen_unique_str(prefix)
limit_num = ct.max_field_num
field_schema_list = []
field_pr = cf.gen_int64_field(ct.default_int64_field_name, is_primary=True)
field_v = cf.gen_float_vec_field(ct.default_float_vec_field_name)
field_schema_list.append(field_pr)
field_schema_list.append(field_v)
for i in range(limit_num):
field_name_tmp = gen_unique_str("field_name")
field_schema_temp = cf.gen_int64_field(field_name_tmp)
field_schema_list.append(field_schema_temp)
error = {ct.err_code: 1, ct.err_msg: "'maximum field\'s number should be limited to 256'"}
schema, _ = self.collection_schema_wrap.init_collection_schema(fields=field_schema_list)
self.init_collection_wrap(name=c_name, schema=schema, check_task=CheckTasks.err_res, check_items=error)
class TestDropCollection(TestcaseBase):
"""
******************************************************************
The following cases are used to test `drop_collection` function
******************************************************************
"""
@pytest.mark.tags(CaseLabel.L0)
def test_drop_collection_A(self):
"""
target: test delete collection created with correct params
method: create collection and then delete,
assert the value returned by delete method
expected: status ok, and no collection in collections
"""
self._connect()
c_name = cf.gen_unique_str()
collection_wr = self.init_collection_wrap(name=c_name)
collection_wr.drop()
assert not self.utility_wrap.has_collection(c_name)[0]
@pytest.mark.tags(CaseLabel.L2)
def test_drop_collection_without_connection(self):
"""
target: test describe collection, without connection
method: drop collection with correct params, with a disconnected instance
expected: drop raise exception
"""
self._connect()
c_name = cf.gen_unique_str(prefix)
collection_wr = self.init_collection_wrap(c_name)
self.connection_wrap.remove_connection(ct.default_alias)
res_list, _ = self.connection_wrap.list_connections()
assert ct.default_alias not in res_list
error = {ct.err_code: 0, ct.err_msg: 'should create connect first'}
collection_wr.drop(check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L1)
def test_drop_collection_not_existed(self):
"""
target: test if collection not created
method: random a collection name, which not existed in db,
assert the exception raised returned by drp_collection method
expected: False
"""
self._connect()
c_name = cf.gen_unique_str()
self.init_collection_wrap(name=c_name)
c_name_2 = cf.gen_unique_str()
error = {ct.err_code: 0, ct.err_msg: 'DescribeCollection failed: can\'t find collection: %s' % c_name_2}
self.utility_wrap.drop_collection(c_name_2, check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L1)
def test_create_drop_collection_multithread(self):
"""
target: test create and drop collection with multi-thread
method: create and drop collection using multi-thread,
expected: collections are created, and dropped
"""
self._connect()
threads_num = 8
threads = []
collection_names = []
def create():
c_name = cf.gen_unique_str()
collection_names.append(c_name)
collection_wr = self.init_collection_wrap(name=c_name)
collection_wr.drop()
for i in range(threads_num):
t = MyThread(target=create, args=())
threads.append(t)
t.start()
time.sleep(0.2)
for t in threads:
t.join()
for item in collection_names:
assert not self.utility_wrap.has_collection(item)[0]
class TestDropCollectionInvalid(TestcaseBase):
"""
Test drop collection with invalid params
"""
@pytest.mark.tags(CaseLabel.L2)
@pytest.mark.parametrize("name", ["12-s", "12 s", "(mn)", "中文", "%$#", "a".join("a" for i in range(256))])
def test_drop_collection_with_invalid_collection_name(self, name):
"""
target: test drop invalid collection
method: drop collection with invalid collection name
expected: raise exception
"""
self._connect()
error = {ct.err_code: 1, ct.err_msg: "Invalid collection name: {}".format(name)}
self.utility_wrap.drop_collection(name, check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L2)
def test_drop_collection_with_empty_or_None_collection_name(self):
"""
target: test drop invalid collection
method: drop collection with empty or None collection name
expected: raise exception
"""
self._connect()
error = {ct.err_code: -1, ct.err_msg: '`collection_name` value is illegal'}
self.utility_wrap.drop_collection('', check_task=CheckTasks.err_res, check_items=error)
error_none = {ct.err_code: -1, ct.err_msg: '`collection_name` value None is illegal'}
self.utility_wrap.drop_collection(None, check_task=CheckTasks.err_res, check_items=error_none)
class TestHasCollection(TestcaseBase):
"""
******************************************************************
The following cases are used to test `has_collection` function
******************************************************************
"""
@pytest.mark.tags(CaseLabel.L2)
def test_has_collection_without_connection(self):
"""
target: test has collection, without connection
method: calling has collection with correct params, with a disconnected instance
expected: has collection raise exception
"""
self._connect()
c_name = cf.gen_unique_str(prefix)
self.init_collection_wrap(c_name)
self.connection_wrap.remove_connection(ct.default_alias)
res_list, _ = self.connection_wrap.list_connections()
assert ct.default_alias not in res_list
error = {ct.err_code: 0, ct.err_msg: 'should create connect first'}
self.utility_wrap.has_collection(c_name, check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L2)
def test_has_collection_not_existed(self):
"""
target: test if collection not created
method: random a collection name, create this collection then drop it,
assert the value returned by has_collection method
expected: False
"""
self._connect()
c_name = cf.gen_unique_str()
collection_wr = self.init_collection_wrap(name=c_name)
collection_wr.drop()
assert not self.utility_wrap.has_collection(c_name)[0]
@pytest.mark.tags(CaseLabel.L2)
def test_has_collection_multithread(self):
"""
target: test create collection with multi-thread
method: create collection using multi-thread,
expected: collections are created
"""
self._connect()
threads_num = 4
threads = []
c_name = cf.gen_unique_str()
self.init_collection_wrap(name=c_name)
def has():
assert self.utility_wrap.has_collection(c_name)
# assert not assert_collection(connect, collection_name)
for i in range(threads_num):
t = MyThread(target=has, args=())
threads.append(t)
t.start()
time.sleep(0.2)
for t in threads:
t.join()
class TestHasCollectionInvalid(TestcaseBase):
@pytest.mark.tags(CaseLabel.L2)
@pytest.mark.parametrize("name", ["12-s", "12 s", "(mn)", "中文", "%$#", "a".join("a" for i in range(256))])
def test_has_collection_with_invalid_collection_name(self, name):
"""
target: test list collections with invalid scenario
method: show collection with invalid collection name
expected: raise exception
"""
self._connect()
error = {ct.err_code: 1, ct.err_msg: "Invalid collection name: {}".format(name)}
self.utility_wrap.has_collection(name, check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L2)
def test_has_collection_with_empty_collection_name(self):
"""
target: test list collections with invalid scenario
method: show collection with empty collection name
expected: raise exception
"""
self._connect()
error = {ct.err_code: -1, ct.err_msg: '`collection_name` value is illegal'}
self.utility_wrap.has_collection('', check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L2)
def test_has_collection_with_none_collection_name(self):
"""
target: test list collections with invalid scenario
method: show collection with no collection name
expected: raise exception
"""
self._connect()
error = {ct.err_code: -1, ct.err_msg: '`collection_name` value None is illegal'}
self.utility_wrap.has_collection(None, check_task=CheckTasks.err_res, check_items=error)
class TestListCollections(TestcaseBase):
"""
******************************************************************
The following cases are used to test `utility.list_collections()` function
******************************************************************
"""
@pytest.mark.tags(CaseLabel.L0)
def test_list_collections_multi_collections(self):
"""
target: test list collections
method: create collection, assert the value returned by list_collections method
expected: True
"""
self._connect()
collection_num = 50
collection_names = []
for i in range(collection_num):
collection_name = cf.gen_unique_str()
collection_names.append(collection_name)
self.init_collection_wrap(name=collection_name)
for i in range(collection_num):
assert collection_names[i] in self.utility_wrap.list_collections()[0]
self.utility_wrap.drop_collection(collection_names[i])
@pytest.mark.tags(CaseLabel.L2)
def test_list_collections_without_connection(self):
"""
target: test list collections, without connection
method: calling list collections with correct params, with a disconnected instance
expected: list collections raise exception
"""
self._connect()
self.connection_wrap.remove_connection(ct.default_alias)
res_list, _ = self.connection_wrap.list_connections()
assert ct.default_alias not in res_list
error = {ct.err_code: 0, ct.err_msg: 'should create connect first'}
self.utility_wrap.list_collections(check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L2)
def test_list_collections_multithread(self):
"""
target: test list collection with multi-threads
method: list collection using multi-threads
expected: list collections correctly
"""
self._connect()
threads_num = 10
threads = []
collection_name = cf.gen_unique_str()
self.init_collection_wrap(name=collection_name)
def _list():
assert collection_name in self.utility_wrap.list_collections()[0]
for i in range(threads_num):
t = MyThread(target=_list)
threads.append(t)
t.start()
time.sleep(0.2)
for t in threads:
t.join()
class TestLoadCollection(TestcaseBase):
"""
******************************************************************
The following cases are used to test `collection.load()` function
******************************************************************
"""
@pytest.mark.tags(CaseLabel.L0)
def test_load_collection_after_index(self):
"""
target: test load collection, after index created
method: insert and create index, load collection with correct params
expected: no error raised
"""
self._connect()
collection_w = self.init_collection_wrap()
data = cf.gen_default_list_data()
collection_w.insert(data)
collection_w.create_index(ct.default_float_vec_field_name, default_index_params,
index_name=ct.default_index_name)
collection_w.load()
collection_w.release()
@pytest.mark.tags(CaseLabel.L1)
def test_load_collection_after_index_binary(self):
"""
target: test load binary_collection, after index created
method: insert and create index, load binary_collection with correct params
expected: no error raised
"""
self._connect()
c_name = cf.gen_unique_str(prefix)
collection_w = self.init_collection_wrap(name=c_name, schema=default_binary_schema)
df, _ = cf.gen_default_binary_dataframe_data(ct.default_nb)
mutation_res, _ = collection_w.insert(data=df)
collection_w.create_index(ct.default_binary_vec_field_name, default_binary_index_params)
collection_w.load()
collection_w.release()
@pytest.mark.tags(CaseLabel.L2)
def test_load_empty_collection(self):
"""
target: test load an empty collection with no data inserted
method: no entities in collection, load and release the collection
expected: load and release successfully
"""
self._connect()
collection_w = self.init_collection_wrap()
collection_w.load()
collection_w.release()
@pytest.mark.tags(CaseLabel.L2)
def test_load_collection_dis_connect(self):
"""
target: test load collection, without connection
method: load collection with correct params, with a disconnected instance
expected: load raise exception
"""
self._connect()
c_name = cf.gen_unique_str(prefix)
collection_wr = self.init_collection_wrap(c_name)
self.connection_wrap.remove_connection(ct.default_alias)
res_list, _ = self.connection_wrap.list_connections()
assert ct.default_alias not in res_list
error = {ct.err_code: 0, ct.err_msg: 'should create connect first'}
collection_wr.load(check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L2)
def test_release_collection_dis_connect(self):
"""
target: test release collection, without connection
method: release collection with correct params, with a disconnected instance
expected: release raise exception
"""
self._connect()
c_name = cf.gen_unique_str(prefix)
collection_wr = self.init_collection_wrap(c_name)
self.connection_wrap.remove_connection(ct.default_alias)
res_list, _ = self.connection_wrap.list_connections()
assert ct.default_alias not in res_list
error = {ct.err_code: 0, ct.err_msg: 'should create connect first'}
collection_wr.release(check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L2)
def test_load_collection_not_existed(self):
"""
target: test load invalid collection
method: load not existed collection
expected: raise exception
"""
self._connect()
c_name = cf.gen_unique_str()
collection_wr = self.init_collection_wrap(name=c_name)
collection_wr.drop()
error = {ct.err_code: 0,
ct.err_msg: "DescribeCollection failed: can't find collection: %s" % c_name}
collection_wr.load(check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L2)
def test_release_collection_not_existed(self):
"""
target: test release a not existed collection
method: release with a not existed collection name
expected: raise exception
"""
self._connect()
c_name = cf.gen_unique_str()
collection_wr = self.init_collection_wrap(name=c_name)
collection_wr.drop()
error = {ct.err_code: 0,
ct.err_msg: "DescribeCollection failed: can't find collection: %s" % c_name}
collection_wr.release(check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L2)
def test_release_collection_not_load(self):
"""
target: test release collection without load
method: release collection without load
expected: release successfully
"""
self._connect()
c_name = cf.gen_unique_str()
collection_wr = self.init_collection_wrap(name=c_name)
collection_wr.release()
@pytest.mark.tags(CaseLabel.L0)
def test_load_collection_after_load_release(self):
"""
target: test load collection after load and release
method: 1.load and release collection after entities flushed
2.re-load collection
expected: No exception
"""
self._connect()
collection_w = self.init_collection_wrap()
insert_data = cf.gen_default_list_data()
collection_w.insert(data=insert_data)
assert collection_w.num_entities == ct.default_nb
collection_w.load()
collection_w.release()
collection_w.load()
@pytest.mark.tags(CaseLabel.L2)
def test_load_collection_repeatedly(self):
"""
target: test load collection repeatedly
method: load collection twice
expected: No exception
"""
self._connect()
collection_w = self.init_collection_wrap()
insert_data = cf.gen_default_list_data()
collection_w.insert(data=insert_data)
assert collection_w.num_entities == ct.default_nb
collection_w.load()
collection_w.load()
@pytest.mark.tags(CaseLabel.L2)
def test_load_release_collection(self):
"""
target: test load, release non-exist collection
method: 1. load, release and drop collection
2. load and release dropped collection
expected: raise exception
"""
self._connect()
c_name = cf.gen_unique_str()
collection_wr = self.init_collection_wrap(name=c_name)
collection_wr.load()
collection_wr.release()
collection_wr.drop()
error = {ct.err_code: 0,
ct.err_msg: "DescribeCollection failed: can't find collection: %s" % c_name}
collection_wr.load(check_task=CheckTasks.err_res, check_items=error)
collection_wr.release(check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L0)
def test_release_collection_after_drop(self):
"""
target: test release collection after drop
method: insert and flush, then release collection after load and drop
expected: raise exception
"""
self._connect()
c_name = cf.gen_unique_str()
collection_wr = self.init_collection_wrap(name=c_name)
collection_wr.load()
collection_wr.drop()
error = {ct.err_code: 0,
ct.err_msg: "DescribeCollection failed: can't find collection: %s" % c_name}
collection_wr.release(check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L0)
def test_load_partitions_release_collection(self):
"""
target: test release collection after load partitions
method: insert entities into partitions, load partitions and release collection
expected: search result empty
"""
self._connect()
collection_w = self.init_collection_wrap()
patition_w = self.init_partition_wrap(collection_wrap=collection_w, name=ct.default_tag)
data = cf.gen_default_list_data()
collection_w.insert(data=data, partition_name=ct.default_tag)
assert collection_w.num_entities == ct.default_nb
patition_w.load()
collection_w.release()
@pytest.fixture(scope="function", params=ct.get_invalid_strs)
def get_non_number_replicas(self, request):
if request.param == 1:
pytest.skip("1 is valid replica number")
if request.param is None:
pytest.skip("None is valid replica number")
yield request.param
@pytest.mark.tags(CaseLabel.L2)
def test_load_replica_non_number(self, get_non_number_replicas):
"""
target: test load collection with non-number replicas
method: load with non-number replicas
expected: raise exceptions
"""
# create, insert
collection_w = self.init_collection_wrap(cf.gen_unique_str(prefix))
df = cf.gen_default_dataframe_data()
insert_res, _ = collection_w.insert(df)
assert collection_w.num_entities == ct.default_nb
# load with non-number replicas
error = {ct.err_code: 0, ct.err_msg: f"but expected one of: int, long"}
collection_w.load(replica_number=get_non_number_replicas, check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L2)
@pytest.mark.parametrize("replicas", [-1, 0, None])
def test_load_replica_invalid_number(self, replicas):
"""
target: test load partition with invalid replica number
method: load with invalid replica number
expected: raise exception
"""
# create, insert
collection_w = self.init_collection_wrap(cf.gen_unique_str(prefix))
df = cf.gen_default_dataframe_data()
insert_res, _ = collection_w.insert(df)
assert collection_w.num_entities == ct.default_nb
collection_w.load(replica_number=replicas)
replicas = collection_w.get_replicas()[0]
groups = replicas.groups
assert len(groups) == 1
assert len(groups[0].shards) == 2
@pytest.mark.tags(CaseLabel.L2)
def test_load_replica_greater_than_querynodes(self):
"""
target: test load with replicas that greater than querynodes
method: load with 3 replicas (2 querynode)
expected: Raise exception
"""
# create, insert
collection_w = self.init_collection_wrap(cf.gen_unique_str(prefix))
df = cf.gen_default_dataframe_data()
insert_res, _ = collection_w.insert(df)
assert collection_w.num_entities == ct.default_nb
error = {ct.err_code: 1, ct.err_msg: f"no enough nodes to create replicas"}
collection_w.load(replica_number=3, check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.xfail(reason="https://github.com/milvus-io/milvus/issues/16562")
@pytest.mark.tags(CaseLabel.L3)
def test_load_replica_change(self):
"""
target: test load replica change
method: 1.load with replica 1
2.load with a new replica number
3.release collection
4.load with a new replica
expected: The second time successfully loaded with a new replica number
"""
# create, insert
collection_w = self.init_collection_wrap(cf.gen_unique_str(prefix))
df = cf.gen_default_dataframe_data()
insert_res, _ = collection_w.insert(df)
assert collection_w.num_entities == ct.default_nb
collection_w.load(replica_number=1)
collection_w.query(expr=f"{ct.default_int64_field_name} in [0]")
# verify load different replicas thrown an exception
collection_w.load(replica_number=2)
one_replica, _ = collection_w.get_replicas()
assert len(one_replica.groups) == 1
collection_w.release()
collection_w.load(replica_number=2)
two_replicas, _ = collection_w.get_replicas()
log.debug(two_replicas)
assert len(two_replicas.groups) == 2
collection_w.query(expr=f"{ct.default_int64_field_name} in [0]")
# verify loaded segments included 2 replicas and twice num entities
seg_info, _ = self.utility_wrap.get_query_segment_info(collection_w.name)
seg_ids = list(map(lambda seg: seg.segmentID, seg_info))
num_entities = list(map(lambda seg: seg.num_rows, seg_info))
assert reduce(lambda x, y: x ^ y, seg_ids) == 0
assert reduce(lambda x, y: x + y, num_entities) == ct.default_nb * 2
@pytest.mark.tags(CaseLabel.L3)
def test_load_replica_multi(self):
"""
target: test load with multiple replicas
method: 1.create collection with one shards
2.insert multiple segments
3.load with multiple replicas
4.query and search
expected: Query and search successfully
"""
# create, insert
collection_w = self.init_collection_wrap(cf.gen_unique_str(prefix), shards_num=1)
tmp_nb = 1000
replica_number = 5
for i in range(replica_number):
df = cf.gen_default_dataframe_data(nb=tmp_nb, start=i * tmp_nb)
insert_res, _ = collection_w.insert(df)
assert collection_w.num_entities == (i + 1) * tmp_nb
collection_w.load(replica_number=replica_number)
replicas = collection_w.get_replicas()[0]
assert len(replicas.groups) == replica_number
query_res, _ = collection_w.query(expr=f"{ct.default_int64_field_name} in [0, {tmp_nb}]")
assert len(query_res) == 2
search_res, _ = collection_w.search(vectors, default_search_field, default_search_params, default_limit)
assert len(search_res[0]) == ct.default_limit
@pytest.mark.tags(CaseLabel.L3)
def test_load_replica_partitions(self):
"""
target: test load replica with partitions
method: 1.Create collection and one partition
2.Insert data into collection and partition
3.Load multi replicas with partition
4.Query
expected: Verify query result
"""
collection_w = self.init_collection_wrap(cf.gen_unique_str(prefix))
df_1 = cf.gen_default_dataframe_data(nb=default_nb)
df_2 = cf.gen_default_dataframe_data(nb=default_nb, start=default_nb)
collection_w.insert(df_1)
partition_w = self.init_partition_wrap(collection_w, ct.default_tag)
partition_w.insert(df_2)
assert collection_w.num_entities == ct.default_nb * 2
collection_w.load([partition_w.name], replica_number=2)
# default tag query 0 empty
collection_w.query(expr=f"{ct.default_int64_field_name} in [0]", partition_names=[ct.default_tag],
check_tasks=CheckTasks.check_query_empty)
# default query 0 empty
collection_w.query(expr=f"{ct.default_int64_field_name} in [3000]",
check_task=CheckTasks.check_query_results,
check_items={'exp_res': df_2.iloc[:1, :1].to_dict('records')})
error = {ct.err_code: 1, ct.err_msg: f"not loaded into memory"}
collection_w.query(expr=f"{ct.default_int64_field_name} in [0]",
partition_names=[ct.default_partition_name, ct.default_tag],
check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L3)
def test_load_replica_non_shard_leader(self):
"""
target: test replica groups which one of QN is not shard leader
method: 1.deploy cluster with 5 QNs
2.create collection with 2 shards
3.insert and flush
4.load with 2 replica number
5.insert growng data
6.search and query
expected: Verify search and query results
"""
# create and insert entities
collection_w = self.init_collection_wrap(cf.gen_unique_str(prefix), shards_num=2)
df = cf.gen_default_dataframe_data()
collection_w.insert(df)
assert collection_w.num_entities == ct.default_nb
# load with multi replica and insert growing data
collection_w.load(replica_number=2)
df_growing = cf.gen_default_dataframe_data(100, start=ct.default_nb)
collection_w.insert(df_growing)
replicas = collection_w.get_replicas()[0]
# verify there are 2 groups (2 replicas)
assert len(replicas.groups) == 2
log.debug(replicas)
for group in replicas.groups:
# verify each group have 3 shards
assert len(group.shards) == 2
shard_leaders = []
# verify one group has 3 querynodes, and one of the querynode isn't shard leader
if len(group.group_nodes) == 3:
for shard in group.shards:
shard_leaders.append(shard.shard_leader)
assert len(shard_leaders) == 2
# Verify 2 replicas segments loaded
# https://github.com/milvus-io/milvus/issues/16598
seg_info, _ = self.utility_wrap.get_query_segment_info(collection_w.name)
seg_ids = list(map(lambda seg: seg.segmentID, seg_info))
assert reduce(lambda x, y: x ^ y, seg_ids) == 0
# verify search successfully
res, _ = collection_w.search(vectors, default_search_field, default_search_params, default_limit)
assert len(res[0]) == ct.default_limit
# verify query sealed and growing data successfully
exp_res = [{'int64': 0}, {'int64': 3000}]
collection_w.query(expr=f"{ct.default_int64_field_name} in [0, {ct.default_nb}]",
check_task=CheckTasks.check_query_results,
check_items={'exp_res': exp_res})
@pytest.mark.tags(CaseLabel.L3)
def test_load_replica_multiple_shard_leader(self):
"""
target: test replica groups which one of QN is shard leader of multiple shards
method: 1.deploy cluster with 5 QNs
2.create collection with 3 shards
3.insert and flush
4.load with 2 replica number
5.insert growng data
6.search and query
expected: Verify search and query results
"""
# craete and insert
collection_w = self.init_collection_wrap(cf.gen_unique_str(prefix), shards_num=3)
df = cf.gen_default_dataframe_data()
collection_w.insert(df)
assert collection_w.num_entities == ct.default_nb
# load with multi replicas and insert growing data
collection_w.load(replica_number=2)
df_growing = cf.gen_default_dataframe_data(100, start=ct.default_nb)
collection_w.insert(df_growing)
# verify replica infos
replicas, _ = collection_w.get_replicas()
log.debug(replicas)
assert len(replicas.groups) == 2
for group in replicas.groups:
# verify each group have 3 shards
assert len(group.shards) == 3
# verify one group has 2 querynodes, and one of the querynode subscripe 2 dml channel
shard_leaders = []
if len(group.group_nodes) == 2:
for shard in group.shards:
shard_leaders.append(shard.shard_leader)
assert len(shard_leaders) == 3 and len(set(shard_leaders)) == 2
# Verify 2 replicas segments loaded
seg_info, _ = self.utility_wrap.get_query_segment_info(collection_w.name)
seg_ids = list(map(lambda seg: seg.segmentID, seg_info))
assert reduce(lambda x, y: x ^ y, seg_ids) == 0
# Verify search successfully
res, _ = collection_w.search(vectors, default_search_field, default_search_params, default_limit)
assert len(res[0]) == ct.default_limit
# Verify query sealed and growing entities successfully
collection_w.query(expr=f"{ct.default_int64_field_name} in [0, {ct.default_nb}]",
check_task=CheckTasks.check_query_results,
check_items={'exp_res': [{'int64': 0}, {'int64': 3000}]})
class TestReleaseAdvanced(TestcaseBase):
@pytest.mark.tags(CaseLabel.L0)
def test_release_collection_during_searching(self):
"""
target: test release collection during searching
method: insert entities into collection, flush and load collection, release collection during searching
expected: raise exception
"""
self._connect()
data = cf.gen_default_list_data()
c_name = cf.gen_unique_str()
collection_wr = self.init_collection_wrap(name=c_name)
collection_wr.insert(data=data)
assert collection_wr.num_entities == ct.default_nb
collection_wr.load()
search_res, _ = collection_wr.search(vectors, default_search_field, default_search_params,
default_limit, _async=True)
collection_wr.release()
error = {ct.err_code: 1, ct.err_msg: 'collection %s was not loaded into memory' % c_name}
collection_wr.search(vectors, default_search_field, default_search_params, default_limit,
check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L2)
def test_release_partition_during_searching(self):
"""
target: test release partition during searching
method: insert entities into partition, flush and load partition, release partition during searching
expected: raise exception
"""
self._connect()
partition_num = 1
collection_w = self.init_collection_general(prefix, True, 10, partition_num, is_index=True)[0]
par = collection_w.partitions
par_name = par[partition_num].name
par[partition_num].load()
limit = 10
collection_w.search(vectors, default_search_field,
default_search_params, limit, default_search_exp,
[par_name])
par[partition_num].release()
collection_w.search(vectors, default_search_field,
default_search_params, limit, default_search_exp,
[par_name],
check_task=CheckTasks.err_res,
check_items={"err_code": 1,
"err_msg": "partition has been released"})
@pytest.mark.tags(CaseLabel.L0)
def test_release_indexed_collection_during_searching(self):
"""
target: test release indexed collection during searching
method: insert entities into partition, flush and load partition, release collection during searching
expected: raise exception
"""
self._connect()
partition_num = 1
collection_w = self.init_collection_general(prefix, True, 10, partition_num, is_index=True)[0]
par = collection_w.partitions
par_name = par[partition_num].name
par[partition_num].load()
limit = 10
collection_w.search(vectors, default_search_field,
default_search_params, limit, default_search_exp,
[par_name], _async=True)
collection_w.release()
error = {ct.err_code: 1, ct.err_msg: 'collection %s was not loaded into memory' % collection_w.name}
collection_w.search(vectors, default_search_field,
default_search_params, limit, default_search_exp,
[par_name],
check_task=CheckTasks.err_res,
check_items=error)
class TestLoadPartition(TestcaseBase):
"""
******************************************************************
The following cases are used to test `load_collection` function
******************************************************************
"""
@pytest.fixture(
scope="function",
params=gen_simple_index()
)
def get_simple_index(self, request, connect):
# if str(connect._cmd("mode")) == "CPU":
# if request.param["index_type"] in index_cpu_not_support():
# pytest.skip("sq8h not support in cpu mode")
return request.param
@pytest.fixture(
scope="function",
params=gen_binary_index()
)
def get_binary_index(self, request):
log.info(request.param)
if request.param["index_type"] in ct.binary_support:
return request.param
else:
pytest.skip("Skip index Temporary")
@pytest.mark.tags(CaseLabel.L0)
def test_load_partition_after_index_binary(self, get_binary_index):
"""
target: test load binary_collection, after index created
method: insert and create index, load binary_collection with correct params
expected: no error raised
"""
self._connect()
partition_num = 1
collection_w = self.init_collection_general(prefix, True, ct.default_nb, partition_num,
is_binary=True, is_index=True)[0]
for metric_type in ct.binary_metrics:
log.info(metric_type)
get_binary_index["metric_type"] = metric_type
if get_binary_index["index_type"] == "BIN_IVF_FLAT" and metric_type in ct.structure_metrics:
error = {ct.err_code: -1, ct.err_msg: 'Invalid metric_type: SUBSTRUCTURE, '
'which does not match the index type: %s' % metric_type}
collection_w.create_index(ct.default_binary_vec_field_name, get_binary_index,
check_task=CheckTasks.err_res, check_items=error)
else:
collection_w.create_index(ct.default_binary_vec_field_name, get_binary_index)
par = collection_w.partitions
par[partition_num].load()
@pytest.mark.tags(CaseLabel.L2)
def test_load_partition_dis_connect(self):
"""
target: test load partition, without connection
method: load partition with correct params, with a disconnected instance
expected: load raise exception
"""
self._connect()
collection_w = self.init_collection_wrap()
partition_name = cf.gen_unique_str(prefix)
description = cf.gen_unique_str("desc_")
partition_w = self.init_partition_wrap(collection_w, partition_name,
description=description,
check_task=CheckTasks.check_partition_property,
check_items={"name": partition_name, "description": description,
"is_empty": True, "num_entities": 0}
)
partition_w.load()
self.connection_wrap.remove_connection(ct.default_alias)
res_list, _ = self.connection_wrap.list_connections()
assert ct.default_alias not in res_list
error = {ct.err_code: 0, ct.err_msg: 'should create connect first.'}
partition_w.load(check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L2)
def test_release_partition_dis_connect(self, connect, dis_connect, collection):
"""
target: test release collection, without connection
method: release collection with correct params, with a disconnected instance
expected: release raise exception
"""
self._connect()
collection_w = self.init_collection_wrap()
partition_name = cf.gen_unique_str(prefix)
description = cf.gen_unique_str("desc_")
partition_w = self.init_partition_wrap(collection_w, partition_name,
description=description,
check_task=CheckTasks.check_partition_property,
check_items={"name": partition_name, "description": description,
"is_empty": True, "num_entities": 0}
)
partition_w.load()
self.connection_wrap.remove_connection(ct.default_alias)
res_list, _ = self.connection_wrap.list_connections()
assert ct.default_alias not in res_list
error = {ct.err_code: 0, ct.err_msg: 'should create connect first.'}
partition_w.release(check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L2)
def test_load_partition_not_existed(self, connect, collection):
"""
target: test load partition for invalid scenario
method: load not existed partition
expected: raise exception and report the error
"""
self._connect()
collection_w = self.init_collection_wrap()
partition_name = cf.gen_unique_str(prefix)
description = cf.gen_unique_str("desc_")
partition_w = self.init_partition_wrap(collection_w, partition_name,
description=description,
check_task=CheckTasks.check_partition_property,
check_items={"name": partition_name, "description": description,
"is_empty": True, "num_entities": 0}
)
partition_w.drop()
error = {ct.err_code: 0, ct.err_msg: 'partitionID of partitionName:%s can not be find' % partition_name}
partition_w.load(check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L0)
def test_release_partition_not_load(self):
"""
target: test release partition without load
method: release partition without load
expected: release success
"""
self._connect()
collection_w = self.init_collection_wrap()
partition_name = cf.gen_unique_str(prefix)
description = cf.gen_unique_str("desc_")
partition_w = self.init_partition_wrap(collection_w, partition_name,
description=description,
check_task=CheckTasks.check_partition_property,
check_items={"name": partition_name, "description": description,
"is_empty": True, "num_entities": 0}
)
partition_w.release()
@pytest.mark.tags(CaseLabel.L2)
def test_load_release_after_drop(self, connect, collection):
"""
target: test load and release partition after drop
method: drop partition and then load and release it
expected: raise exception
"""
self._connect()
collection_w = self.init_collection_wrap()
partition_name = cf.gen_unique_str(prefix)
description = cf.gen_unique_str("desc_")
partition_w = self.init_partition_wrap(collection_w, partition_name,
description=description,
check_task=CheckTasks.check_partition_property,
check_items={"name": partition_name, "description": description,
"is_empty": True, "num_entities": 0}
)
partition_w.drop()
error = {ct.err_code: 0, ct.err_msg: 'partitionID of partitionName:%s can not be find' % partition_name}
partition_w.load(check_task=CheckTasks.err_res, check_items=error)
partition_w.release(check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L0)
def test_release_partition_after_drop(self, connect, collection):
"""
target: test release collection after drop
method: insert and flush, then release collection after load and drop
expected: raise exception
"""
self._connect()
collection_w = self.init_collection_wrap()
partition_name = cf.gen_unique_str(prefix)
description = cf.gen_unique_str("desc_")
partition_w = self.init_partition_wrap(collection_w, partition_name,
description=description,
check_task=CheckTasks.check_partition_property,
check_items={"name": partition_name, "description": description,
"is_empty": True, "num_entities": 0}
)
partition_w.drop()
error = {ct.err_code: 0, ct.err_msg: 'partitionID of partitionName:%s can not be find' % partition_name}
partition_w.release(check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L0)
def test_load_release_after_collection_drop(self, connect, collection):
"""
target: test release collection after drop
method: insert and flush, then release collection after load and drop
expected: raise exception
"""
self._connect()
collection_w = self.init_collection_wrap()
name = collection_w.name
partition_name = cf.gen_unique_str(prefix)
description = cf.gen_unique_str("desc_")
partition_w = self.init_partition_wrap(collection_w, partition_name,
description=description,
check_task=CheckTasks.check_partition_property,
check_items={"name": partition_name, "description": description,
"is_empty": True, "num_entities": 0}
)
collection_w.drop()
error = {ct.err_code: 0, ct.err_msg: "HasPartition failed: can\'t find collection: %s" % name}
partition_w.load(check_task=CheckTasks.err_res, check_items=error)
partition_w.release(check_task=CheckTasks.err_res, check_items=error)
| 46.441372
| 131
| 0.638766
| 15,393
| 125,949
| 4.939843
| 0.030793
| 0.016768
| 0.026513
| 0.043557
| 0.847302
| 0.807757
| 0.756796
| 0.734873
| 0.703652
| 0.665027
| 0
| 0.009672
| 0.269419
| 125,949
| 2,711
| 132
| 46.458502
| 0.816695
| 0.187576
| 0
| 0.653893
| 0
| 0.014599
| 0.055027
| 0.002891
| 0
| 0
| 0
| 0.001107
| 0.054745
| 1
| 0.096715
| false
| 0.002433
| 0.006083
| 0.000608
| 0.114355
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
3133810a202022e101c58a251727c0d364de71b3
| 118
|
py
|
Python
|
mtools/mlogmerge/mlogmerge.py
|
gianpaj/mtools
|
023135cb95a20ebe77d744d4ae732cb503058e8e
|
[
"Apache-2.0"
] | null | null | null |
mtools/mlogmerge/mlogmerge.py
|
gianpaj/mtools
|
023135cb95a20ebe77d744d4ae732cb503058e8e
|
[
"Apache-2.0"
] | null | null | null |
mtools/mlogmerge/mlogmerge.py
|
gianpaj/mtools
|
023135cb95a20ebe77d744d4ae732cb503058e8e
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/python
print "deprecated since version 1.1.0 of mtools. Use 'mlogfilter <logfile> <logfile> ...' instead."
| 39.333333
| 99
| 0.711864
| 17
| 118
| 4.941176
| 0.882353
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.028846
| 0.118644
| 118
| 3
| 99
| 39.333333
| 0.778846
| 0.135593
| 0
| 0
| 0
| 1
| 0.892157
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 1
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
3196c078e64da6443b77276706da48ccdf3e4fdc
| 12,840
|
py
|
Python
|
spkrepo/tests/test_admin.py
|
publicarray/spkrepo
|
0e919074320d51c1012c16f8449e35d820bab5eb
|
[
"MIT"
] | 1
|
2019-11-04T10:52:57.000Z
|
2019-11-04T10:52:57.000Z
|
spkrepo/tests/test_admin.py
|
publicarray/spkrepo
|
0e919074320d51c1012c16f8449e35d820bab5eb
|
[
"MIT"
] | 3
|
2018-02-17T07:13:02.000Z
|
2021-06-23T20:15:56.000Z
|
spkrepo/tests/test_admin.py
|
ymartin59/spkrepo
|
3e3268007e266f0ea1869251ffbac32e916873b5
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import os
from unittest import TestLoader, TestSuite
from flask import current_app, url_for
from spkrepo.ext import db
from spkrepo.models import Package, Version
from spkrepo.tests.common import (
BaseTestCase,
BuildFactory,
PackageFactory,
VersionFactory,
create_image,
)
class IndexTestCase(BaseTestCase):
def test_anonymous(self):
self.assert302(self.client.get(url_for("admin.index"), follow_redirects=False))
def test_anonymous_redirects_to_login(self):
self.assertRedirectsTo(
self.client.get(url_for("admin.index")), url_for("security.login")
)
def test_user(self):
with self.logged_user():
self.assert403(self.client.get(url_for("admin.index")))
def test_developer(self):
with self.logged_user("developer"):
self.assert200(self.client.get(url_for("admin.index")))
def test_package_admin(self):
with self.logged_user("package_admin"):
self.assert200(self.client.get(url_for("admin.index")))
def test_admin(self):
with self.logged_user("admin"):
self.assert200(self.client.get(url_for("admin.index")))
class UserTestCase(BaseTestCase):
def test_anonymous(self):
self.assert403(self.client.get(url_for("user.index_view")))
def test_user(self):
with self.logged_user():
self.assert403(self.client.get(url_for("user.index_view")))
def test_developer(self):
with self.logged_user("developer"):
self.assert403(self.client.get(url_for("user.index_view")))
def test_package_admin(self):
with self.logged_user("package_admin"):
self.assert403(self.client.get(url_for("user.index_view")))
def test_admin(self):
with self.logged_user("admin"):
self.assert200(self.client.get(url_for("user.index_view")))
def test_action_activate_one(self):
with self.logged_user("admin"):
user = self.create_user()
user.active = False
db.session.commit()
response = self.client.post(
url_for("user.action_view"),
follow_redirects=True,
data=dict(action="activate", rowid=[user.id]),
)
self.assert200(response)
self.assertIn(
"User was successfully activated.",
response.data.decode(response.charset),
)
self.assertTrue(user.active)
def test_action_activate_multi(self):
with self.logged_user("admin"):
user1 = self.create_user()
user1.active = False
user2 = self.create_user()
user2.active = False
db.session.commit()
response = self.client.post(
url_for("user.action_view"),
follow_redirects=True,
data=dict(action="activate", rowid=[user1.id, user2.id]),
)
self.assert200(response)
self.assertIn(
"2 users were successfully activated.",
response.data.decode(response.charset),
)
self.assertTrue(user1.active)
self.assertTrue(user2.active)
def test_action_deactivate(self):
with self.logged_user("admin"):
user = self.create_user()
user.active = True
db.session.commit()
response = self.client.post(
url_for("user.action_view"),
follow_redirects=True,
data=dict(action="deactivate", rowid=[user.id]),
)
self.assert200(response)
self.assertIn(
"User was successfully deactivated.",
response.data.decode(response.charset),
)
self.assertFalse(user.active)
def test_action_deactivate_multi(self):
with self.logged_user("admin"):
user1 = self.create_user()
user1.active = True
user2 = self.create_user()
user2.active = True
db.session.commit()
response = self.client.post(
url_for("user.action_view"),
follow_redirects=True,
data=dict(action="deactivate", rowid=[user1.id, user2.id]),
)
self.assert200(response)
self.assertIn(
"2 users were successfully deactivated.",
response.data.decode(response.charset),
)
self.assertFalse(user1.active)
self.assertFalse(user2.active)
class PackageTestCase(BaseTestCase):
def test_anonymous(self):
self.assert403(self.client.get(url_for("package.index_view")))
def test_user(self):
with self.logged_user():
self.assert403(self.client.get(url_for("package.index_view")))
def test_developer(self):
with self.logged_user("developer"):
self.assert403(self.client.get(url_for("package.index_view")))
def test_package_admin(self):
with self.logged_user("package_admin"):
self.assert200(self.client.get(url_for("package.index_view")))
def test_admin(self):
with self.logged_user("admin"):
self.assert403(self.client.get(url_for("package.index_view")))
def test_on_model_create(self):
self.assertEqual(len(Package.query.all()), 0)
with self.logged_user("package_admin"):
self.client.post(url_for("package.create_view"), data=dict(name="test"))
self.assertEqual(len(Package.query.all()), 1)
package = Package.query.one()
package_path = os.path.join(current_app.config["DATA_PATH"], package.name)
self.assertTrue(os.path.exists(package_path))
def test_on_model_delete(self):
package = PackageFactory()
db.session.commit()
self.assertEqual(len(Package.query.all()), 1)
package_path = os.path.join(current_app.config["DATA_PATH"], package.name)
self.assertTrue(os.path.exists(package_path))
with self.logged_user("package_admin", "admin"):
self.client.post(url_for("package.delete_view", id=str(package.id)))
self.assertEqual(len(Package.query.all()), 0)
self.assertTrue(not os.path.exists(package_path))
class VersionTestCase(BaseTestCase):
def test_anonymous(self):
self.assert403(self.client.get(url_for("version.index_view")))
def test_user(self):
with self.logged_user():
self.assert403(self.client.get(url_for("version.index_view")))
def test_developer(self):
with self.logged_user("developer"):
self.assert200(self.client.get(url_for("version.index_view")))
def test_package_admin(self):
with self.logged_user("package_admin"):
self.assert200(self.client.get(url_for("version.index_view")))
def test_admin(self):
with self.logged_user("admin"):
self.assert403(self.client.get(url_for("version.index_view")))
def test_on_model_delete(self):
version = VersionFactory()
db.session.commit()
self.assertEqual(len(Version.query.all()), 1)
version_path = os.path.join(
current_app.config["DATA_PATH"], version.package.name, str(version.version)
)
self.assertTrue(os.path.exists(version_path))
with self.logged_user("package_admin", "admin"):
self.client.post(url_for("version.delete_view", id=str(version.id)))
self.assertEqual(len(Version.query.all()), 0)
self.assertTrue(not os.path.exists(version_path))
class BuildTestCase(BaseTestCase):
def test_anonymous(self):
self.assert403(self.client.get(url_for("build.index_view")))
def test_user(self):
with self.logged_user():
self.assert403(self.client.get(url_for("build.index_view")))
def test_developer(self):
with self.logged_user("developer"):
self.assert200(self.client.get(url_for("build.index_view")))
def test_package_admin(self):
with self.logged_user("package_admin"):
self.assert200(self.client.get(url_for("build.index_view")))
def test_admin(self):
with self.logged_user("admin"):
self.assert403(self.client.get(url_for("build.index_view")))
def test_action_activate_one(self):
with self.logged_user("package_admin"):
build = BuildFactory(active=False)
db.session.commit()
response = self.client.post(
url_for("build.action_view"),
follow_redirects=True,
data=dict(action="activate", rowid=[build.id]),
)
self.assert200(response)
self.assertIn(
"Build was successfully activated.",
response.data.decode(response.charset),
)
self.assertTrue(build.active)
def test_action_activate_multi(self):
with self.logged_user("package_admin"):
build1 = BuildFactory(active=False)
build2 = BuildFactory(active=False)
db.session.commit()
response = self.client.post(
url_for("build.action_view"),
follow_redirects=True,
data=dict(action="activate", rowid=[build1.id, build2.id]),
)
self.assert200(response)
self.assertIn(
"2 builds were successfully activated.",
response.data.decode(response.charset),
)
self.assertTrue(build1.active)
self.assertTrue(build2.active)
def test_action_deactivate(self):
with self.logged_user("package_admin"):
build = BuildFactory(active=True)
db.session.commit()
response = self.client.post(
url_for("build.action_view"),
follow_redirects=True,
data=dict(action="deactivate", rowid=[build.id]),
)
self.assert200(response)
self.assertIn(
"Build was successfully deactivated.",
response.data.decode(response.charset),
)
self.assertFalse(build.active)
def test_action_deactivate_multi(self):
with self.logged_user("package_admin"):
build1 = BuildFactory(active=True)
build2 = BuildFactory(active=True)
db.session.commit()
response = self.client.post(
url_for("build.action_view"),
follow_redirects=True,
data=dict(action="deactivate", rowid=[build1.id, build2.id]),
)
self.assert200(response)
self.assertIn(
"2 builds were successfully deactivated.",
response.data.decode(response.charset),
)
self.assertFalse(build1.active)
self.assertFalse(build2.active)
class ScreenshotTestCase(BaseTestCase):
def test_anonymous(self):
self.assert403(self.client.get(url_for("screenshot.index_view")))
def test_user(self):
with self.logged_user():
self.assert403(self.client.get(url_for("screenshot.index_view")))
def test_developer(self):
with self.logged_user("developer"):
self.assert403(self.client.get(url_for("screenshot.index_view")))
def test_package_admin(self):
with self.logged_user("package_admin"):
self.assert200(self.client.get(url_for("screenshot.index_view")))
def test_admin(self):
with self.logged_user("admin"):
self.assert403(self.client.get(url_for("screenshot.index_view")))
def test_create(self):
package = PackageFactory(add_screenshot=False)
db.session.commit()
self.assertEqual(len(package.screenshots), 0)
with self.logged_user("package_admin"):
self.client.post(
url_for("screenshot.create_view"),
data=dict(
package=str(package.id),
path=(create_image("Test", 1280, 1024), "test.png"),
),
)
self.assertEqual(len(package.screenshots), 1)
self.assertTrue(package.screenshots[0].path.endswith("screenshot_1.png"))
def suite():
suite = TestSuite()
suite.addTest(TestLoader().loadTestsFromTestCase(IndexTestCase))
suite.addTest(TestLoader().loadTestsFromTestCase(UserTestCase))
suite.addTest(TestLoader().loadTestsFromTestCase(PackageTestCase))
suite.addTest(TestLoader().loadTestsFromTestCase(VersionTestCase))
suite.addTest(TestLoader().loadTestsFromTestCase(BuildTestCase))
suite.addTest(TestLoader().loadTestsFromTestCase(ScreenshotTestCase))
return suite
| 37.002882
| 87
| 0.613941
| 1,430
| 12,840
| 5.346154
| 0.082517
| 0.035317
| 0.065925
| 0.084761
| 0.819097
| 0.803401
| 0.776586
| 0.752387
| 0.742577
| 0.726357
| 0
| 0.017479
| 0.264798
| 12,840
| 346
| 88
| 37.109827
| 0.792373
| 0.001636
| 0
| 0.593857
| 0
| 0
| 0.112507
| 0.009909
| 0
| 0
| 0
| 0
| 0.249147
| 1
| 0.150171
| false
| 0
| 0.020478
| 0
| 0.194539
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
31a90a6a625c2c0c464cf70d29f82beb00ae4aa4
| 179
|
py
|
Python
|
datasets/__init__.py
|
rootadminWalker/autoencoder-siamese-experiments
|
4f3584dea08eefe70dc3383a09a7baf5d2588f00
|
[
"MIT"
] | null | null | null |
datasets/__init__.py
|
rootadminWalker/autoencoder-siamese-experiments
|
4f3584dea08eefe70dc3383a09a7baf5d2588f00
|
[
"MIT"
] | null | null | null |
datasets/__init__.py
|
rootadminWalker/autoencoder-siamese-experiments
|
4f3584dea08eefe70dc3383a09a7baf5d2588f00
|
[
"MIT"
] | null | null | null |
from .MNISTSiamese import SiameseMNISTLoader
from .MNISTTriplet import TripletMNISTLoader
from .Market1501TripletDataset import TripletMarket1501Dataset, SiameseMarket1501Dataset
| 44.75
| 88
| 0.905028
| 13
| 179
| 12.461538
| 0.692308
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.072289
| 0.072626
| 179
| 3
| 89
| 59.666667
| 0.903614
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
31be472f8fc01904158ade9794ecb58060e77743
| 194
|
py
|
Python
|
Data Structure/Binary Tree/437. Path Sum III.py
|
smsubham/Data-Structure-Algorithms-Questions
|
45da68231907068ef4e4a0444ffdac69b337fa7c
|
[
"Apache-2.0"
] | null | null | null |
Data Structure/Binary Tree/437. Path Sum III.py
|
smsubham/Data-Structure-Algorithms-Questions
|
45da68231907068ef4e4a0444ffdac69b337fa7c
|
[
"Apache-2.0"
] | null | null | null |
Data Structure/Binary Tree/437. Path Sum III.py
|
smsubham/Data-Structure-Algorithms-Questions
|
45da68231907068ef4e4a0444ffdac69b337fa7c
|
[
"Apache-2.0"
] | null | null | null |
# https://leetcode.com/problems/path-sum-iii/
# https://leetcode.com/problems/path-sum-iii/discuss/141424/Python-step-by-step-walk-through.-Easy-to-understand.-Two-solutions-comparison.-%3A-)
| 38.8
| 145
| 0.762887
| 29
| 194
| 5.103448
| 0.724138
| 0.175676
| 0.216216
| 0.324324
| 0.459459
| 0.459459
| 0.459459
| 0
| 0
| 0
| 0
| 0.037234
| 0.030928
| 194
| 4
| 146
| 48.5
| 0.75
| 0.963918
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
31e2fb5035fc4e073d58e7c5cd50ec240395549c
| 147
|
py
|
Python
|
core/contexts/__init__.py
|
ChrisLR/BasicDungeonRL
|
b293d40bd9a0d3b7aec41b5e1d58441165997ff1
|
[
"MIT"
] | 3
|
2017-10-28T11:28:38.000Z
|
2018-09-12T09:47:00.000Z
|
core/contexts/__init__.py
|
ChrisLR/BasicDungeonRL
|
b293d40bd9a0d3b7aec41b5e1d58441165997ff1
|
[
"MIT"
] | null | null | null |
core/contexts/__init__.py
|
ChrisLR/BasicDungeonRL
|
b293d40bd9a0d3b7aec41b5e1d58441165997ff1
|
[
"MIT"
] | null | null | null |
from core.contexts.action import Action, TwoTargetAction, MultipleTargetAction
from core.contexts.combat import Combat, RangedCombat, WeaponCombat
| 49
| 78
| 0.863946
| 16
| 147
| 7.9375
| 0.625
| 0.125984
| 0.251969
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.081633
| 147
| 2
| 79
| 73.5
| 0.940741
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
9edc3750bd7b71d1ec103520603f8aebe80fa289
| 160
|
py
|
Python
|
main/admin.py
|
WorkShoft/IkeaScraper
|
f4f61e765c0978cbdab4128e8f389f6f01183725
|
[
"Apache-2.0"
] | 1
|
2020-07-10T04:17:27.000Z
|
2020-07-10T04:17:27.000Z
|
main/admin.py
|
WorkShoft/IkeaScraper
|
f4f61e765c0978cbdab4128e8f389f6f01183725
|
[
"Apache-2.0"
] | 3
|
2021-03-30T13:51:37.000Z
|
2021-06-10T19:42:39.000Z
|
main/admin.py
|
WorkShoft/ikeascraper
|
f4f61e765c0978cbdab4128e8f389f6f01183725
|
[
"Apache-2.0"
] | null | null | null |
from django.contrib import admin
from .models import SofaType, Sofa, Color
admin.site.register(SofaType)
admin.site.register(Sofa)
admin.site.register(Color)
| 20
| 41
| 0.80625
| 23
| 160
| 5.608696
| 0.478261
| 0.209302
| 0.395349
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.09375
| 160
| 7
| 42
| 22.857143
| 0.889655
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.4
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
9ee84256e120880e6190ee612ad891558c8415e1
| 36
|
py
|
Python
|
rlgym_compat/__init__.py
|
VirxEC/rlgym-compat
|
db7b0a41492dcbb1aaa2dbfa5385dae3b9e10558
|
[
"Apache-2.0"
] | 1
|
2021-10-18T15:54:44.000Z
|
2021-10-18T15:54:44.000Z
|
rlgym_compat/__init__.py
|
VirxEC/rlgym-compat
|
db7b0a41492dcbb1aaa2dbfa5385dae3b9e10558
|
[
"Apache-2.0"
] | null | null | null |
rlgym_compat/__init__.py
|
VirxEC/rlgym-compat
|
db7b0a41492dcbb1aaa2dbfa5385dae3b9e10558
|
[
"Apache-2.0"
] | null | null | null |
from .obs_builders import ObsBuilder
| 36
| 36
| 0.888889
| 5
| 36
| 6.2
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083333
| 36
| 1
| 36
| 36
| 0.939394
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
9eeb374901fdb36c7816bb87a233c6acc8f6d31c
| 49
|
py
|
Python
|
pong/__init__.py
|
abhishekchandra2522k/PongPong
|
e3fe4419dfaa91676f4118dd3f9c9ddd651e6592
|
[
"MIT"
] | null | null | null |
pong/__init__.py
|
abhishekchandra2522k/PongPong
|
e3fe4419dfaa91676f4118dd3f9c9ddd651e6592
|
[
"MIT"
] | null | null | null |
pong/__init__.py
|
abhishekchandra2522k/PongPong
|
e3fe4419dfaa91676f4118dd3f9c9ddd651e6592
|
[
"MIT"
] | null | null | null |
from . import rectangle, ball ,load, paddle, util
| 49
| 49
| 0.755102
| 7
| 49
| 5.285714
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 49
| 1
| 49
| 49
| 0.880952
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
731246bc0ade7b3ecc3ab0d68d871f7944b82570
| 446
|
py
|
Python
|
db/validators/__init__.py
|
matchd-ch/matchd-backend
|
84be4aab1b4708cae50a8988301b15df877c8db0
|
[
"Apache-2.0"
] | 1
|
2022-03-03T09:55:57.000Z
|
2022-03-03T09:55:57.000Z
|
db/validators/__init__.py
|
matchd-ch/matchd-backend
|
84be4aab1b4708cae50a8988301b15df877c8db0
|
[
"Apache-2.0"
] | 7
|
2022-02-09T10:44:53.000Z
|
2022-03-28T03:29:43.000Z
|
db/validators/__init__.py
|
matchd-ch/matchd-backend
|
84be4aab1b4708cae50a8988301b15df877c8db0
|
[
"Apache-2.0"
] | null | null | null |
from .password import PasswordValidator
from .nickname import NicknameValidator
from .student_profile import ProfileFormStepValidator
from .student_type import StudentTypeValidator
from .company_type import CompanyTypeValidator
from .attachment import AttachmentKeyValidator, AttachmentKeyNumFilesValidator, AttachmentFileValidator
from .job_posting import JobPostingFormStepValidator
from .project_posting import ProjectPostingFormStepValidator
| 49.555556
| 103
| 0.901345
| 39
| 446
| 10.179487
| 0.564103
| 0.055416
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.076233
| 446
| 8
| 104
| 55.75
| 0.963592
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.125
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
731296ed03172b1422886eb0e43c43fc0c6a150e
| 6,050
|
py
|
Python
|
insights/tests/client/apps/test_compliance.py
|
akshay196/insights-core
|
598865e6563119089c77152599300de38a77c72c
|
[
"Apache-2.0"
] | null | null | null |
insights/tests/client/apps/test_compliance.py
|
akshay196/insights-core
|
598865e6563119089c77152599300de38a77c72c
|
[
"Apache-2.0"
] | null | null | null |
insights/tests/client/apps/test_compliance.py
|
akshay196/insights-core
|
598865e6563119089c77152599300de38a77c72c
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: UTF-8 -*-
from insights.client.apps.compliance import ComplianceClient, COMPLIANCE_CONTENT_TYPE
from mock.mock import patch, Mock
from pytest import raises
import os
PATH = '/usr/share/xml/scap/ref_id.xml'
@patch("insights.client.apps.compliance.ComplianceClient._assert_oscap_rpms_exist")
@patch("insights.client.config.InsightsConfig", base_url='localhost/app', systemid='', proxy=None)
def test_oscap_scan(config, assert_rpms):
compliance_client = ComplianceClient(config)
compliance_client.get_policies = lambda: [{'ref_id': 'foo'}]
compliance_client.find_scap_policy = lambda ref_id: '/usr/share/xml/scap/foo.xml'
compliance_client.run_scan = lambda ref_id, policy_xml, output_path: None
compliance_client.archive.archive_tmp_dir = '/tmp'
compliance_client.archive.archive_name = 'insights-compliance-test'
archive, content_type = compliance_client.oscap_scan()
assert archive == '/tmp/insights-compliance-test.tar.gz'
assert content_type == COMPLIANCE_CONTENT_TYPE
@patch("insights.client.apps.compliance.call", return_value=(0, ''))
@patch("insights.client.config.InsightsConfig", base_url='localhost/app', systemid='', proxy=None)
def test_missing_packages(config, call):
compliance_client = ComplianceClient(config)
compliance_client.get_policies = lambda: [{'ref_id': 'foo'}]
compliance_client.find_scap_policy = lambda ref_id: '/usr/share/xml/scap/foo.xml'
compliance_client.run_scan = lambda ref_id, policy_xml: None
with raises(SystemExit):
compliance_client.oscap_scan()
@patch("insights.client.apps.compliance.call", return_value=(1, ''))
@patch("insights.client.config.InsightsConfig", base_url='localhost/app', systemid='', proxy=None)
def test_errored_rpm_call(config, call):
compliance_client = ComplianceClient(config)
compliance_client.get_policies = lambda: [{'ref_id': 'foo'}]
compliance_client.find_scap_policy = lambda ref_id: '/usr/share/xml/scap/foo.xml'
compliance_client.run_scan = lambda ref_id, policy_xml: None
with raises(SystemExit):
compliance_client.oscap_scan()
@patch("insights.client.config.InsightsConfig", base_url='localhost/app', systemid='', proxy=None)
def test_get_policies(config):
compliance_client = ComplianceClient(config)
compliance_client.hostname = 'foo'
compliance_client.conn.session.get = Mock(return_value=Mock(status_code=200, json=Mock(return_value={'data': [{'attributes': {'profiles': 'data'}}]})))
assert compliance_client.get_policies() == 'data'
compliance_client.conn.session.get.assert_called_with('https://localhost/app/compliance/systems', params={'search': 'name=foo'})
@patch("insights.client.config.InsightsConfig", base_url='localhost/app', systemid='', proxy=None)
def test_get_policies_no_policies(config):
compliance_client = ComplianceClient(config)
compliance_client.hostname = 'foo'
compliance_client.conn.session.get = Mock(return_value=Mock(status_code=200, json=Mock(return_value={'data': []})))
assert compliance_client.get_policies() == []
compliance_client.conn.session.get.assert_called_with('https://localhost/app/compliance/systems', params={'search': 'name=foo'})
@patch("insights.client.config.InsightsConfig", base_url='localhost/app', systemid='', proxy=None)
def test_get_policies_error(config):
compliance_client = ComplianceClient(config)
compliance_client.hostname = 'foo'
compliance_client.conn.session.get = Mock(return_value=Mock(status_code=500))
assert compliance_client.get_policies() == []
compliance_client.conn.session.get.assert_called_with('https://localhost/app/compliance/systems', params={'search': 'name=foo'})
@patch("insights.client.apps.compliance.linux_distribution", return_value=(None, '6.5', None))
@patch("insights.client.config.InsightsConfig")
def test_os_release(config, linux_distro_mock):
compliance_client = ComplianceClient(config)
assert compliance_client.os_release() == '6'
@patch("insights.client.config.InsightsConfig")
def test_profile_files(config):
compliance_client = ComplianceClient(config)
compliance_client.os_release = lambda: '7'
assert compliance_client.profile_files() == []
@patch("insights.client.apps.compliance.call", return_value=(0, PATH))
@patch("insights.client.config.InsightsConfig")
def test_find_scap_policy(config, call):
compliance_client = ComplianceClient(config)
compliance_client.profile_files = lambda: ['/something']
assert compliance_client.find_scap_policy('ref_id') == PATH
@patch("insights.client.apps.compliance.call", return_value=(1, 'bad things happened'.encode('utf-8')))
@patch("insights.client.config.InsightsConfig")
def test_find_scap_policy_not_found(config, call):
compliance_client = ComplianceClient(config)
compliance_client.profile_files = lambda: ['/something']
with raises(SystemExit):
compliance_client.find_scap_policy('ref_id')
@patch("insights.client.apps.compliance.call", return_value=(0, ''.encode('utf-8')))
@patch("insights.client.config.InsightsConfig")
def test_run_scan(config, call):
compliance_client = ComplianceClient(config)
output_path = '/tmp/oscap_results-ref_id.xml'
env = os.environ
env.update({'TZ': 'UTC'})
compliance_client.run_scan('ref_id', '/nonexistent', output_path)
call.assert_called_with("oscap xccdf eval --profile ref_id --results " + output_path + ' /nonexistent', keep_rc=True, env=env)
@patch("insights.client.apps.compliance.call", return_value=(1, 'bad things happened'.encode('utf-8')))
@patch("insights.client.config.InsightsConfig")
def test_run_scan_fail(config, call):
compliance_client = ComplianceClient(config)
output_path = '/tmp/oscap_results-ref_id.xml'
env = os.environ
env.update({'TZ': 'UTC'})
with raises(SystemExit):
compliance_client.run_scan('ref_id', '/nonexistent', output_path)
call.assert_called_with("oscap xccdf eval --profile ref_id --results " + output_path + ' /nonexistent', keep_rc=True, env=env)
| 48.015873
| 155
| 0.755372
| 769
| 6,050
| 5.697009
| 0.145644
| 0.17165
| 0.086738
| 0.068478
| 0.828806
| 0.808948
| 0.800502
| 0.749829
| 0.749829
| 0.709427
| 0
| 0.004245
| 0.104463
| 6,050
| 125
| 156
| 48.4
| 0.804356
| 0.003471
| 0
| 0.612245
| 0
| 0
| 0.26149
| 0.167911
| 0
| 0
| 0
| 0
| 0.153061
| 1
| 0.122449
| false
| 0
| 0.040816
| 0
| 0.163265
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
b42228818bba9221c1e9f0ba8d164f65da2dbe18
| 4,422
|
py
|
Python
|
tests/SphereVoxelization_fft.py
|
SyedZiaul/freud
|
04bcf9b3bcf45c14b05872205eb16205b2dbcf60
|
[
"BSD-3-Clause"
] | 172
|
2018-11-24T03:07:53.000Z
|
2022-02-24T17:18:15.000Z
|
tests/SphereVoxelization_fft.py
|
SyedZiaul/freud
|
04bcf9b3bcf45c14b05872205eb16205b2dbcf60
|
[
"BSD-3-Clause"
] | 631
|
2019-01-23T17:49:33.000Z
|
2022-03-28T19:46:36.000Z
|
tests/SphereVoxelization_fft.py
|
SyedZiaul/freud
|
04bcf9b3bcf45c14b05872205eb16205b2dbcf60
|
[
"BSD-3-Clause"
] | 30
|
2019-07-24T07:57:06.000Z
|
2022-02-25T10:58:19.000Z
|
import numpy as np
def compute_3d(box_size, width, points, r_max, periodic=True):
"""
Does voxelization by doing an aperiodic fft of the sphere over the
points on the grid in 3 dimensions
Args:
box_size (float):
Length of the (assuemd cubic) box for the calculation.
width (int):
Number of grid spaces in each direction of the box
points (:np.ndarray: (N, 3)):
Points within the box to compute the voxelization of
r_max (float):
Radius of the spheres centered at each point
periodic (bool):
True if the box should be considered periodic
"""
eff_rad = r_max / box_size * width
# enlarge the box for the fft by adding more segments of the same length
# we will cut the extra off later so the fft will be aperiodic.
buf_size = 0 if periodic else int(round(eff_rad + 1))
new_width = 2 * buf_size + width
# make the grid with the points on it
arr = _put_points_on_grid(points, new_width, box_size, width, buf_size, ndim=3)
# make the sphere
sphere = _make_sphere_3d(new_width, eff_rad)
# do the ffts
fft_arr = np.fft.fftn(arr) * np.fft.fftn(sphere)
image = np.rint(np.real(np.fft.ifftn(fft_arr))).astype(np.uint32)
# get rid of the buffer
if not periodic:
image = image[buf_size:-buf_size, buf_size:-buf_size, buf_size:-buf_size]
# set the overlaps to 1, instead of larger integers
np.clip(image, 0, 1, out=image)
return image
def compute_2d(box_size, width, points, r_max, periodic=True):
"""
Does voxelization by doing an aperiodic fft of the sphere over the
points on the grid in 3 dimensions
Args:
box_size (float):
Length of the (assuemd cubic) box for the calculation.
width (int):
Number of grid spaces in each direction of the box
points (:np.ndarray: (N, 3)):
Points within the box to compute the voxelization of
r_max (float):
Radius of the spheres centered at each point
periodic (bool):
True if the box should be considered periodic
"""
eff_rad = r_max / box_size * width
# enlarge the box for the fft by adding more segments of the same length
# we will cut the extra off later so the fft will be aperiodic.
buf_size = 0 if periodic else int(round(eff_rad + 1))
new_width = 2 * buf_size + width
# make the grid with the points on it
arr = _put_points_on_grid(points, new_width, box_size, width, buf_size, ndim=2)
# make the sphere
sphere = _make_sphere_2d(new_width, eff_rad)
# do the ffts
fft_arr = np.fft.fft2(arr) * np.fft.fft2(sphere)
image = np.rint(np.real(np.fft.ifft2(fft_arr))).astype(np.uint32)
# get rid of the buffer
if not periodic:
image = image[buf_size:-buf_size, buf_size:-buf_size]
# set the overlaps to 1, instead of larger integers
np.clip(image, 0, 1, out=image)
return image
def _put_points_on_grid(points, new_width, box_size, width, buf_size, ndim):
"""
Creates a grid where the voxels are 1 if there is a point there and 0 if
not.
"""
d = (new_width,) * ndim
arr = np.zeros(d)
img_points = points / (box_size / width) # points in units of grid spacing
for pt in img_points:
shifted_pt = tuple(int(round(pt[i])) for i in range(ndim))
arr[shifted_pt] = 1
return arr
def _make_sphere_3d(new_width, eff_rad):
"""Makes a grid in 3D with voxels that are within ``eff_rad`` of the
center having value 1 and other voxels having value 0."""
r_rad = int(round(eff_rad))
ctr = new_width // 2
arr = np.zeros((new_width, new_width, new_width))
for i in range(-r_rad, r_rad):
for j in range(-r_rad, r_rad):
for k in range(-r_rad, r_rad):
if np.linalg.norm([i, j, k]) < eff_rad:
arr[ctr + i, ctr + j, ctr + k] = 1
return arr
def _make_sphere_2d(new_width, eff_rad):
"""makes a grid in 2D with voxels that are within eff_rad of the center
having value 1 (else 0)"""
r_rad = round(eff_rad)
ctr = new_width // 2
arr = np.zeros((new_width, new_width))
for i in range(-r_rad, r_rad):
for j in range(-r_rad, r_rad):
if np.linalg.norm([i, j]) <= eff_rad:
arr[ctr + i, ctr + j] = 1
return arr
| 33.755725
| 83
| 0.634555
| 727
| 4,422
| 3.708391
| 0.188446
| 0.044139
| 0.035608
| 0.041543
| 0.871662
| 0.871662
| 0.847923
| 0.816395
| 0.776335
| 0.776335
| 0
| 0.013763
| 0.277024
| 4,422
| 130
| 84
| 34.015385
| 0.829528
| 0.426504
| 0
| 0.403846
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.096154
| false
| 0
| 0.019231
| 0
| 0.211538
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
b43ad4a93ba062796f32f17c8701b2575efac3ab
| 40
|
py
|
Python
|
src/optimizers/__init__.py
|
data-sachez-2511/pl_classification
|
637b85c58d723925ae4d3fce08db2842786c750a
|
[
"MIT"
] | 8
|
2021-11-13T19:21:36.000Z
|
2021-12-14T15:00:16.000Z
|
src/optimizers/__init__.py
|
data-sachez-2511/pl_classification
|
637b85c58d723925ae4d3fce08db2842786c750a
|
[
"MIT"
] | 6
|
2021-11-11T13:05:21.000Z
|
2021-12-13T09:59:05.000Z
|
src/optimizers/__init__.py
|
data-sachez-2511/pl_classification
|
637b85c58d723925ae4d3fce08db2842786c750a
|
[
"MIT"
] | null | null | null |
from optimizers.wrapper import Optimizer
| 40
| 40
| 0.9
| 5
| 40
| 7.2
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.075
| 40
| 1
| 40
| 40
| 0.972973
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
b47d94737bf53015aad85566fbe131b6aeb7f51c
| 37
|
py
|
Python
|
spec/example/quux.py
|
joe81/rak
|
21a64636b40ebd5dbf81b9cc451ac6fa2999ebd7
|
[
"MIT",
"Unlicense"
] | 13
|
2015-07-24T01:32:59.000Z
|
2021-12-30T02:47:27.000Z
|
spec/example/quux.py
|
joe81/rak
|
21a64636b40ebd5dbf81b9cc451ac6fa2999ebd7
|
[
"MIT",
"Unlicense"
] | 2
|
2016-05-11T12:31:53.000Z
|
2017-06-02T21:04:07.000Z
|
spec/example/quux.py
|
joe81/rak
|
21a64636b40ebd5dbf81b9cc451ac6fa2999ebd7
|
[
"MIT",
"Unlicense"
] | 3
|
2015-04-03T19:17:22.000Z
|
2016-09-09T07:02:12.000Z
|
quux quux quux quux Virgon quux quux
| 18.5
| 36
| 0.810811
| 7
| 37
| 4.285714
| 0.285714
| 1.066667
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.189189
| 37
| 1
| 37
| 37
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 1
| 1
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
c335e4ad9b4828d20d1cf5c12d6e27f316b170e8
| 57
|
py
|
Python
|
extra_tests/snippets/os_info.py
|
dbrgn/RustPython
|
6d371cea8a62d84dbbeec5a53cfd040f45899211
|
[
"CC-BY-4.0",
"MIT"
] | 11,058
|
2018-05-29T07:40:06.000Z
|
2022-03-31T11:38:42.000Z
|
extra_tests/snippets/os_info.py
|
dbrgn/RustPython
|
6d371cea8a62d84dbbeec5a53cfd040f45899211
|
[
"CC-BY-4.0",
"MIT"
] | 2,105
|
2018-06-01T10:07:16.000Z
|
2022-03-31T14:56:42.000Z
|
extra_tests/snippets/os_info.py
|
dbrgn/RustPython
|
6d371cea8a62d84dbbeec5a53cfd040f45899211
|
[
"CC-BY-4.0",
"MIT"
] | 914
|
2018-07-27T09:36:14.000Z
|
2022-03-31T19:56:34.000Z
|
import os
assert os.name == 'posix' or os.name == 'nt'
| 14.25
| 44
| 0.614035
| 10
| 57
| 3.5
| 0.7
| 0.342857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.210526
| 57
| 3
| 45
| 19
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0.122807
| 0
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
c3440ffb2198f6e2231aef02ad1468cbd269fbc3
| 16,390
|
py
|
Python
|
app/tests/client_tests/test_handle_compliance_event.py
|
AirWalk-Digital/airview-api
|
1c4ce3569a5e3834d53f937a35ec5d8b0e991cdb
|
[
"Apache-2.0"
] | 2
|
2021-11-29T13:28:42.000Z
|
2021-12-21T14:37:58.000Z
|
app/tests/client_tests/test_handle_compliance_event.py
|
AirWalk-Digital/airview-api
|
1c4ce3569a5e3834d53f937a35ec5d8b0e991cdb
|
[
"Apache-2.0"
] | 2
|
2021-11-04T10:24:10.000Z
|
2021-11-12T15:16:20.000Z
|
app/tests/client_tests/test_handle_compliance_event.py
|
AirWalk-Digital/airview-api
|
1c4ce3569a5e3834d53f937a35ec5d8b0e991cdb
|
[
"Apache-2.0"
] | null | null | null |
from airview_api.models import MonitoredResourceState
from tests.client_tests.common import *
from airview_api import app
from airview_api.database import db
from airview_api import models as api_models
from tests.common import client, instance
from tests.factories import *
import requests_mock
from requests_flask_adapter import Session
import pytest
from client.airviewclient import models
@pytest.fixture()
def compliance_event():
application = models.Application(name="app one", reference="app-ref-1")
technical_control = models.TechnicalControl(
name="ctrl a",
reference="tc-ref-1",
quality_model=models.QualityModel.SECURITY,
type=models.TechnicalControlType.LOG,
can_delete_resources=False,
is_blocking=True,
ttl=20,
)
evt = models.ComplianceEvent(
application=application,
technical_control=technical_control,
resource_reference="res-ref-1",
status=models.MonitoredResourceState.FLAGGED,
type=models.MonitoredResourceType.VIRTUAL_MACHINE,
additional_data="Additional",
)
yield evt
def setup():
setup_factories()
def test_monitored_resource_creates_missing_system(handler, compliance_event):
"""
Given: A missing system
When: When a call is made to set a monitored resource
Then: The monitored resource is persisted against a new system
"""
print(compliance_event)
# Arrange
EnvironmentFactory(id=1, name="Env One", abbreviation="ONE")
ApplicationFactory(id=2, environment_id=1)
ApplicationReferenceFactory(
application_id=2, type="aws_account_id", reference="app-ref-1"
)
# Act
handler.handle_compliance_event(compliance_event)
# Assert
monitored = MonitoredResource.query.all()
assert len(monitored) == 1
assert monitored[0].state == MonitoredResourceState.FLAGGED
assert monitored[0].type == MonitoredResourceType.VIRTUAL_MACHINE
assert monitored[0].reference == "res-ref-1"
assert (
monitored[0].application_technical_control.technical_control.system.name
== "one"
)
assert (
monitored[0].application_technical_control.technical_control.system.stage.name
== "BUILD"
)
def test_monitored_resource_persisted_for_linked(handler, compliance_event):
"""
Given: An existing linked application
When: When a call is made to set a monitored resource
Then: The monitored resource is persisted
"""
# Arrange
EnvironmentFactory(id=1, name="Env One", abbreviation="ONE")
ApplicationFactory(id=2, environment_id=1)
ApplicationReferenceFactory(
application_id=2, type="aws_account_id", reference="app-ref-1"
)
SystemFactory(id=111, name="one", stage=api_models.SystemStage.BUILD)
TechnicalControlFactory(id=4, system_id=111, reference="tc-ref-1", severity="HIGH")
ApplicationTechnicalControlFactory(id=5, application_id=2, technical_control_id=4)
# Act
handler.handle_compliance_event(compliance_event)
# Assert
monitored = MonitoredResource.query.all()
assert len(monitored) == 1
assert monitored[0].state == MonitoredResourceState.FLAGGED
assert monitored[0].type == MonitoredResourceType.VIRTUAL_MACHINE
assert monitored[0].reference == "res-ref-1"
assert monitored[0].application_technical_control_id == 5
def test_triggered_resource_creates_new_control(handler, compliance_event):
"""
Given: An existing linked application, no known control
When: When a call is made to set a triggered resource
Then: New control created and linked, the triggerend resource is sent to the backend
"""
# Arrange
EnvironmentFactory(id=1, name="Env One", abbreviation="ONE")
ApplicationFactory(id=2, environment_id=1)
ApplicationReferenceFactory(
application_id=2, type="aws_account_id", reference="app-ref-1"
)
SystemFactory(id=111, name="one", stage=api_models.SystemStage.BUILD)
TechnicalControlFactory(
id=4, system_id=111, reference="tc-ref-other", severity="HIGH"
)
ApplicationTechnicalControlFactory(id=5, application_id=2, technical_control_id=4)
# Act
handler.handle_compliance_event(compliance_event)
# Assert
tc = TechnicalControl.query.all()
len(tc) == 2
assert tc[1].name == "ctrl a"
assert tc[1].reference == "tc-ref-1"
assert tc[1].is_blocking == True
assert tc[1].can_delete_resources == False
assert tc[1].ttl == 20
monitored = MonitoredResource.query.all()
assert len(monitored) == 1
assert monitored[0].state == MonitoredResourceState.FLAGGED
assert monitored[0].type == MonitoredResourceType.VIRTUAL_MACHINE
assert monitored[0].reference == "res-ref-1"
assert monitored[0].application_technical_control_id == 6
def test_triggered_resource_creates_new_control_with_parent(handler, compliance_event):
"""
Given: An existing linked application, no known control, control has a parent
When: When a call is made to set a triggered resource
Then: New control created and linked to parent, the triggerend resource is sent to the backend
"""
# Arrange
EnvironmentFactory(id=1, name="Env One", abbreviation="ONE")
ApplicationFactory(id=2, environment_id=1)
ApplicationReferenceFactory(
application_id=2, type="aws_account_id", reference="app-ref-1"
)
SystemFactory(id=111, name="one", stage=api_models.SystemStage.BUILD)
TechnicalControlFactory(
id=4, system_id=111, reference="tc-ref-other", severity="HIGH"
)
ApplicationTechnicalControlFactory(id=5, application_id=2, technical_control_id=4)
# Act
compliance_event.technical_control.parent_id = 4
handler.handle_compliance_event(compliance_event)
compliance_event.technical_control.parent_id = None
# Assert
tc = TechnicalControl.query.all()
len(tc) == 2
assert tc[1].name == "ctrl a"
assert tc[1].reference == "tc-ref-1"
assert tc[1].is_blocking == True
assert tc[1].can_delete_resources == False
assert tc[1].ttl == 20
assert tc[1].parent_id == 4
monitored = MonitoredResource.query.all()
assert len(monitored) == 1
assert monitored[0].state == MonitoredResourceState.FLAGGED
assert monitored[0].type == MonitoredResourceType.VIRTUAL_MACHINE
assert monitored[0].reference == "res-ref-1"
assert monitored[0].application_technical_control_id == 6
def test_triggered_resource_creates_new_control_with_defaults(
handler, compliance_event
):
"""
Given: An existing linked application, no known control
When: When a call is made to set a triggered resource with missing optional fields
Then: New control created with defaults and linked, the triggerend resource is sent to the backend
"""
# Arrange
EnvironmentFactory(id=1, name="Env One", abbreviation="ONE")
ApplicationFactory(id=2, environment_id=1)
ApplicationReferenceFactory(
application_id=2, type="aws_account_id", reference="app-ref-1"
)
SystemFactory(id=111, name="one", stage=api_models.SystemStage.BUILD)
TechnicalControlFactory(
id=4, system_id=111, reference="tc-ref-other", severity="HIGH"
)
ApplicationTechnicalControlFactory(id=5, application_id=2, technical_control_id=4)
# Act
compliance_event.technical_control.ttl = None
compliance_event.technical_control.can_delete_resources = None
compliance_event.technical_control.is_blocking = None
handler.handle_compliance_event(compliance_event)
# Assert
tc = TechnicalControl.query.all()
len(tc) == 2
assert tc[1].name == "ctrl a"
assert tc[1].reference == "tc-ref-1"
assert tc[1].is_blocking == False
assert tc[1].can_delete_resources == True
assert tc[1].ttl == None
monitored = MonitoredResource.query.all()
assert len(monitored) == 1
assert monitored[0].state == MonitoredResourceState.FLAGGED
assert monitored[0].type == MonitoredResourceType.VIRTUAL_MACHINE
assert monitored[0].reference == "res-ref-1"
assert monitored[0].application_technical_control_id == 6
def test_triggered_resource_creates_new_app(handler, compliance_event):
"""
Given: No existing linked application
When: When a call is made to set a triggered resource
Then: A new application is created, linked and triggered
"""
# Arrange
SystemFactory(id=111, name="one", stage=api_models.SystemStage.BUILD)
# Act
handler.handle_compliance_event(compliance_event)
# Assert
monitored = MonitoredResource.query.all()
assert len(monitored) == 1
app = Application.query.first()
assert app.name == "app one"
assert app.application_type == api_models.ApplicationType.BUSINESS_APPLICATION
assert app.environment_id == None
refs = app.references.all()
assert len(refs) == 2
assert refs[1].type == "aws_account_id"
assert refs[1].reference == "app-ref-1"
def test_account_cache_handle_unexpected_code_for_get_control(
handler, compliance_event, adapter
):
"""
Given: Status code 500 returned by GET technical control
When: When a call is made to set a triggered resource
Then: An exception is raised
"""
# Arrange
adapter.register_uri(
"GET",
f"{base_url}/systems/?name=one",
status_code=200,
json={
"id": 111,
"name": "name",
},
)
adapter.register_uri(
"GET",
f"{base_url}/referenced-applications/?type=aws_account_id&reference=app-ref-1",
status_code=200,
json={
"id": 111,
"name": "app-name",
"reference": "app-ref",
"applicationTypeId": 222,
"environmentId": 333,
"parentId": 444,
},
)
adapter.register_uri(
"GET",
f"{base_url}/technical-controls/?systemId=111&reference=tc-ref-1",
status_code=500,
)
# Act
with pytest.raises(models.BackendFailureException) as excinfo:
handler.handle_compliance_event(compliance_event)
def test_triggered_resource_handle_unexpected_code_for_get_app_technical_control(
handler, adapter, compliance_event
):
"""
Given: Status code 500 returned by GET application technical control
When: When a call is made to set a triggered resource
Then: An exception is raised
"""
# Arrange
adapter.register_uri(
"GET",
f"{base_url}/systems/?name=one",
status_code=200,
json={
"id": 111,
"name": "name",
},
)
adapter.register_uri(
"GET",
f"{base_url}/referenced-applications/?type=aws_account_id&reference=app-ref-1",
status_code=200,
json={
"id": 111,
"name": "app-name",
"reference": "app-ref",
"applicationTypeId": 222,
"environmentId": 333,
"parentId": 444,
},
)
adapter.register_uri(
"GET",
f"{base_url}/technical-controls/?systemId=111&reference=tc-ref-1",
status_code=200,
json=[
{
"id": 222,
"name": "tc1",
"reference": "ref1",
"qualityModel": "SECURITY",
"controlAction": "LOG",
}
],
)
adapter.register_uri(
"GET",
f"{base_url}/application-technical-controls/?applicationId=111&technicalControlId=222",
status_code=500,
)
# Act
with pytest.raises(models.BackendFailureException) as excinfo:
handler.handle_compliance_event(compliance_event)
def test_triggered_resource_handle_unexpected_code_for_create_technical_control(
handler, adapter, compliance_event
):
"""
Given: Status code 500 returned by POST technical-control
When: When a call is made to set a triggered resource
Then: An exception is raised
"""
adapter.register_uri(
"GET",
f"{base_url}/systems/?name=one",
status_code=200,
json={
"id": 111,
"name": "name",
},
)
adapter.register_uri(
"GET",
f"{base_url}/referenced-applications/?type=aws_account_id&reference=app-ref-1",
status_code=200,
json={
"id": 111,
"name": "app-name",
"reference": "app-ref",
"applicationTypeId": 222,
"environmentId": 333,
"parentId": 444,
},
)
adapter.register_uri(
"GET",
f"{base_url}/technical-controls/?systemId=111&reference=tc-ref-1",
status_code=200,
json=[],
)
adapter.register_uri(
"POST",
f"{base_url}/technical-controls/",
status_code=500,
)
# Act
with pytest.raises(models.BackendFailureException) as excinfo:
handler.handle_compliance_event(compliance_event)
def test_triggered_resource_handle_unexpected_code_for_link_technical_control(
handler, adapter, compliance_event
):
"""
Given: Status code 500 returned by POST application-technical-control
When: When a call is made to set a triggered resource
Then: An exception is raised
"""
adapter.register_uri(
"GET",
f"{base_url}/systems/?name=one",
status_code=200,
json={
"id": 111,
"name": "name",
},
)
adapter.register_uri(
"GET",
f"{base_url}/referenced-applications/?type=aws_account_id&reference=app-ref-1",
status_code=200,
json={
"id": 111,
"name": "app-name",
"reference": "app-ref",
"applicationTypeId": 222,
"environmentId": 333,
"parentId": 444,
},
)
adapter.register_uri(
"GET",
f"{base_url}/technical-controls/?systemId=111&reference=tc-ref-1",
status_code=200,
json=[
{
"id": 222,
"name": "tc1",
"reference": "ref1",
"qualityModel": "SECURITY",
"controlAction": "LOG",
}
],
)
adapter.register_uri(
"GET",
f"{base_url}/application-technical-controls/?applicationId=111&technicalControlId=222",
status_code=404,
)
adapter.register_uri(
"POST",
f"{base_url}/application-technical-controls/",
status_code=500,
)
# Act
with pytest.raises(models.BackendFailureException) as excinfo:
handler.handle_compliance_event(compliance_event)
def test_triggered_resource_handle_unexpected_code_for_monitored_resource(
handler, adapter, compliance_event
):
"""
Given: Status code 500 returned by PUT monitored resource
When: When a call is made to set a triggered resource
Then: An exception is raised
"""
adapter.register_uri(
"GET",
f"{base_url}/systems/?name=one",
status_code=200,
json={
"id": 111,
"name": "name",
},
)
adapter.register_uri(
"GET",
f"{base_url}/referenced-applications/?type=aws_account_id&reference=app-ref-1",
status_code=200,
json={
"id": 111,
"name": "app-name",
"reference": "app-ref",
"applicationTypeId": 222,
"environmentId": 333,
"parentId": 444,
},
)
adapter.register_uri(
"GET",
f"{base_url}/technical-controls/?systemId=111&reference=tc-ref-1",
status_code=200,
json=[
{
"id": 222,
"name": "tc1",
"reference": "ref1",
"qualityModel": "SECURITY",
"controlAction": "LOG",
}
],
)
adapter.register_uri(
"GET",
f"{base_url}/application-technical-controls/?applicationId=111&technicalControlId=222",
status_code=200,
json={"id": 444},
)
adapter.register_uri(
"PUT",
f"{base_url}/monitored-resources/?applicationTechnicalControlId=444&reference=res-ref-1",
status_code=500,
)
# Act
with pytest.raises(models.BackendFailureException) as excinfo:
handler.handle_compliance_event(compliance_event)
| 31.702128
| 102
| 0.648566
| 1,867
| 16,390
| 5.526513
| 0.098018
| 0.058151
| 0.032564
| 0.036635
| 0.842121
| 0.824094
| 0.811688
| 0.798604
| 0.795794
| 0.787362
| 0
| 0.029
| 0.242587
| 16,390
| 516
| 103
| 31.763566
| 0.802159
| 0.119646
| 0
| 0.676166
| 0
| 0
| 0.160837
| 0.08699
| 0
| 0
| 0
| 0
| 0.126943
| 1
| 0.033679
| false
| 0
| 0.028497
| 0
| 0.062176
| 0.002591
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
c352a0b78b3a600ea736c0d8ee024c88918c850e
| 99
|
py
|
Python
|
app/Todo.py
|
GuMarYae/penguin_masonite_todos_backend
|
870dc8e619cdd4364ea4e10cdec2023c7296c533
|
[
"MIT"
] | null | null | null |
app/Todo.py
|
GuMarYae/penguin_masonite_todos_backend
|
870dc8e619cdd4364ea4e10cdec2023c7296c533
|
[
"MIT"
] | null | null | null |
app/Todo.py
|
GuMarYae/penguin_masonite_todos_backend
|
870dc8e619cdd4364ea4e10cdec2023c7296c533
|
[
"MIT"
] | null | null | null |
"""Todo Model."""
from masoniteorm.models import Model
class Todo(Model):
__table__="todos"
| 12.375
| 36
| 0.69697
| 12
| 99
| 5.416667
| 0.75
| 0.276923
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.161616
| 99
| 7
| 37
| 14.142857
| 0.783133
| 0.111111
| 0
| 0
| 0
| 0
| 0.060976
| 0
| 0
| 0
| 0
| 0.142857
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
c35df5968d3aa5e891533bfa8290dac86be6891a
| 124
|
py
|
Python
|
utilities/prev_projects/DATAFIT/TESTS/__init__.py
|
Saldenisov/pyconlyse
|
1de301b4a4c15ee0bd19034aa8d5da1beacfd124
|
[
"MIT"
] | null | null | null |
utilities/prev_projects/DATAFIT/TESTS/__init__.py
|
Saldenisov/pyconlyse
|
1de301b4a4c15ee0bd19034aa8d5da1beacfd124
|
[
"MIT"
] | null | null | null |
utilities/prev_projects/DATAFIT/TESTS/__init__.py
|
Saldenisov/pyconlyse
|
1de301b4a4c15ee0bd19034aa8d5da1beacfd124
|
[
"MIT"
] | null | null | null |
from .TESTS_models import *
from .TESTS_controllers import *
from .TESTS_configuration import *
from .TESTS_openers import *
| 31
| 34
| 0.814516
| 16
| 124
| 6.0625
| 0.4375
| 0.371134
| 0.463918
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.120968
| 124
| 4
| 35
| 31
| 0.889908
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
c37bb922f60efafaccb31e706c9bb0b6c6f74162
| 1,185
|
py
|
Python
|
expenses_trasker/expenses_tracker/main_app/urls.py
|
ivailoiliev84/Web_basic_exams_preparation_and_exam
|
411cc7b737fc5044029a52bd14a5d75ce79cd39e
|
[
"MIT"
] | null | null | null |
expenses_trasker/expenses_tracker/main_app/urls.py
|
ivailoiliev84/Web_basic_exams_preparation_and_exam
|
411cc7b737fc5044029a52bd14a5d75ce79cd39e
|
[
"MIT"
] | null | null | null |
expenses_trasker/expenses_tracker/main_app/urls.py
|
ivailoiliev84/Web_basic_exams_preparation_and_exam
|
411cc7b737fc5044029a52bd14a5d75ce79cd39e
|
[
"MIT"
] | null | null | null |
from django.urls import path
from expenses_tracker.main_app.views import *
"""urlpatterns = (
path('', home_page, name='home page'),
path('create/', create_expense_page, name='create expense page'),
path('edit/<int:pk>', edit_expense_page, name='edit expense page'),
path('delete<int:pk>', delete_expense_page, name='delete expense page'),
path('profile/', profile_page, name='profile page'),
path('profile/create/', create_profile, name='create profile'),
path('profile/edit/', profile_edit_page, name='profile page'),
path('profile/delete/', delete_profile_page, name='profile edit page'),
)"""
urlpatterns = (
path('', home_page, name='home page'),
path('create/', create_expense_page, name='create expense page'),
path('edit/<int:pk>', edit_expense_page, name='edit expense page'),
path('delete/<int:pk>', delete_expense_page, name='delete expense page'),
path('profile/', profile_page, name='profile page'),
path('profile/create/', create_profile, name='create profile'),
path('profile/edit/', profile_edit_page, name='edit profile'),
path('profile/delete/', delete_profile_page, name='delete profile'),
)
| 35.909091
| 77
| 0.689451
| 155
| 1,185
| 5.090323
| 0.135484
| 0.141952
| 0.114068
| 0.08365
| 0.873257
| 0.873257
| 0.859316
| 0.762991
| 0.762991
| 0.762991
| 0
| 0
| 0.135865
| 1,185
| 32
| 78
| 37.03125
| 0.770508
| 0
| 0
| 0
| 0
| 0
| 0.322169
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.166667
| 0
| 0.166667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
5ef0b8096618d25a3a0e3d52e440dc6f76f5ab34
| 11,969
|
py
|
Python
|
test/test_kripke.py
|
ocnkr/pycubexr
|
ca17f0afcb01a0688a08b90949183089433112df
|
[
"BSD-3-Clause"
] | 5
|
2020-07-11T22:45:41.000Z
|
2020-11-20T16:06:26.000Z
|
test/test_kripke.py
|
ocnkr/pycubexr
|
ca17f0afcb01a0688a08b90949183089433112df
|
[
"BSD-3-Clause"
] | 2
|
2020-07-16T08:54:16.000Z
|
2022-03-19T05:24:58.000Z
|
test/test_kripke.py
|
ocnkr/pycubexr
|
ca17f0afcb01a0688a08b90949183089433112df
|
[
"BSD-3-Clause"
] | 3
|
2020-07-14T14:39:38.000Z
|
2022-01-05T07:02:51.000Z
|
import unittest
from pathlib import Path
from pycubexr import CubexParser
from pycubexr.utils.exceptions import MissingMetricError
class TestMetricValuesKripke(unittest.TestCase):
cubex: CubexParser = None
@classmethod
def setUpClass(cls) -> None:
cubex_file_path = Path("../data/kripke.p8.d2.g32.r1/profile.cubex").resolve()
cls.cubex = CubexParser(cubex_file_path).__enter__()
@classmethod
def tearDownClass(cls) -> None:
cls.cubex.__exit__(None, None, None)
def get_values_for_metric(self, name, convert_to_exclusive=False, convert_to_inclusive=False):
metric = self.cubex.get_metric_by_name(name)
metric_values = self.cubex.get_metric_values(metric)
# iterate over all callpaths in cubex file
cnode_values = []
for cnode in self.cubex.all_cnodes():
# return the measurement values for all mpi processes for the current metric and callpath
cnode_values.append(metric_values.value(cnode, convert_to_exclusive, convert_to_inclusive))
return cnode_values
def test_visits_exclusive(self):
correct_values = [8, 8, 33, 16, 8, 8000, 8000, 8000, 8000, 96000, 169025, 96000, 8000, 8, ]
metric_values = self.get_values_for_metric('visits', convert_to_exclusive=True)
for correct, cnode_values in zip(correct_values, metric_values):
self.assertAlmostEqual(correct, cnode_values)
def test_visits_inclusive(self):
correct_values = [401106, 8, 33, 16, 401033, 8000, 8000, 385025, 8000, 96000, 169025, 96000, 8000, 8, ]
metric_values = self.get_values_for_metric('visits', convert_to_inclusive=True)
for correct, cnode_values in zip(correct_values, metric_values):
self.assertAlmostEqual(correct, cnode_values)
def test_time_exclusive(self):
correct_values = [0.049351889999996956, 0.41779200749999995, 4.7357375E-4, 1.7047999999999998E-4,
0.4765991312500262, 59.994445453124996, 59.901360724375, 18.81082184375, 0.0791653,
1.9761004281249999, 3.677575010625, 2.70347292125, 0.5036856562500001, 0.04049549125, ]
metric_values = self.get_values_for_metric('time', convert_to_exclusive=True)
for correct, cnode_values in zip(correct_values, metric_values):
self.assertAlmostEqual(correct, cnode_values)
def test_time_inclusive(self):
correct_values = [148.63150991125, 0.41779200749999995, 4.7357375E-4, 1.7047999999999998E-4, 148.12322646875,
59.994445453124996, 59.901360724375, 27.750821159999997, 0.0791653, 1.9761004281249999,
3.677575010625, 2.70347292125, 0.5036856562500001, 0.04049549125, ]
metric_values = self.get_values_for_metric('time', convert_to_inclusive=True)
for correct, cnode_values in zip(correct_values, metric_values):
self.assertAlmostEqual(correct, cnode_values)
def test_min_time_exclusive(self):
correct_values = [18.57139089625, 0.0446771925, 5.263125E-6, 5.80125E-6, 18.5020277025, 0.00741907125,
0.00703084, 0.00325334125, 7.605E-6, 1.7570625E-5, 7.595E-6, 2.031375E-5, 5.414375E-5,
4.776875E-5, ]
metric_values = self.get_values_for_metric('min_time', convert_to_exclusive=True)
for correct, cnode_values in zip(correct_values, metric_values):
self.assertAlmostEqual(correct, cnode_values)
def test_min_time_inclusive(self):
correct_values = [5.263125E-6, 0.0446771925, 5.263125E-6, 5.80125E-6, 7.595E-6, 0.00741907125, 0.00703084,
7.595E-6, 7.605E-6, 1.7570625E-5, 7.595E-6, 2.031375E-5, 5.414375E-5, 4.776875E-5, ]
metric_values = self.get_values_for_metric('min_time', convert_to_inclusive=True)
for correct, cnode_values in zip(correct_values, metric_values):
self.assertAlmostEqual(correct, cnode_values)
def test_max_time_exclusive(self):
correct_values = [18.60063626375, 0.073913345, 6.36525E-5, 3.40825E-5, 18.51830332, 0.008342615, 0.00768267875,
0.0177915775, 1.236375E-5, 6.542625E-5, 1.2951E-4, 7.841E-5, 8.488E-5, 0.00588251125, ]
metric_values = self.get_values_for_metric('max_time', convert_to_exclusive=True)
for correct, cnode_values in zip(correct_values, metric_values):
self.assertAlmostEqual(correct, cnode_values)
def test_max_time_inclusive(self):
correct_values = [18.60063626375, 0.073913345, 6.36525E-5, 3.40825E-5, 18.51830332, 0.008342615, 0.00768267875,
0.0177915775, 1.236375E-5, 6.542625E-5, 1.2951E-4, 7.841E-5, 8.488E-5, 0.00588251125, ]
metric_values = self.get_values_for_metric('max_time', convert_to_inclusive=True)
for correct, cnode_values in zip(correct_values, metric_values):
self.assertAlmostEqual(correct, cnode_values)
def test_task_migration_loss(self):
self.assertRaises(MissingMetricError, self.get_values_for_metric, 'task_migration_loss',
convert_to_exclusive=True)
self.assertRaises(MissingMetricError, self.get_values_for_metric, 'task_migration_loss',
convert_to_inclusive=True)
def test_task_migration_win(self):
self.assertRaises(MissingMetricError, self.get_values_for_metric, 'task_migration_win',
convert_to_exclusive=True)
self.assertRaises(MissingMetricError, self.get_values_for_metric, 'task_migration_win',
convert_to_inclusive=True)
def test_bytes_put(self):
self.assertRaises(MissingMetricError, self.get_values_for_metric, 'bytes_put', convert_to_exclusive=True)
self.assertRaises(MissingMetricError, self.get_values_for_metric, 'bytes_put', convert_to_inclusive=True)
def test_bytes_get(self):
self.assertRaises(MissingMetricError, self.get_values_for_metric, 'bytes_get', convert_to_exclusive=True)
self.assertRaises(MissingMetricError, self.get_values_for_metric, 'bytes_get', convert_to_inclusive=True)
def test_PAPI_TOT_INS_exclusive(self):
correct_values = [16380495, 218090062, 82702, 41913, 92374590, 18428764176, 18440603145, 4575020158, 20131404,
550715334, 919093540, 580021611, 131607814, 9039208, ]
metric_values = self.get_values_for_metric('PAPI_TOT_INS', convert_to_exclusive=True)
for correct, cnode_values in zip(correct_values, metric_values):
self.assertAlmostEqual(correct, cnode_values)
def test_PAPI_TOT_INS_inclusive(self):
correct_values = [43981966152, 218090062, 82702, 41913, 43738331772, 18428764176, 18440603145, 6776589861,
20131404, 550715334, 919093540, 580021611, 131607814, 9039208, ]
metric_values = self.get_values_for_metric('PAPI_TOT_INS', convert_to_inclusive=True)
for correct, cnode_values in zip(correct_values, metric_values):
self.assertAlmostEqual(correct, cnode_values)
def test_PAPI_FP_INS_exclusive(self):
correct_values = [2135, 496, 0, 0, 144000, 2764800000, 2764800000, 1109152000, 0, 0, 0, 0, 0, 0, ]
metric_values = self.get_values_for_metric('PAPI_FP_INS', convert_to_exclusive=True)
for correct, cnode_values in zip(correct_values, metric_values):
self.assertAlmostEqual(correct, cnode_values)
def test_PAPI_FP_INS_inclusive(self):
correct_values = [6638898631, 496, 0, 0, 6638896000, 2764800000, 2764800000, 1109152000, 0, 0, 0, 0, 0, 0, ]
metric_values = self.get_values_for_metric('PAPI_FP_INS', convert_to_inclusive=True)
for correct, cnode_values in zip(correct_values, metric_values):
self.assertAlmostEqual(correct, cnode_values)
def test_PAPI_FP_OPS_exclusive(self):
correct_values = [2183, 480, 0, 0, 144000, 5529600000, 5529600000, 2697987161, 0, 0, 0, 0, 0, 0, ]
metric_values = self.get_values_for_metric('PAPI_FP_OPS', convert_to_exclusive=True)
for correct, cnode_values in zip(correct_values, metric_values):
self.assertAlmostEqual(correct, cnode_values)
def test_PAPI_FP_OPS_inclusive(self):
correct_values = [13757333824, 480, 0, 0, 13757331161, 5529600000, 5529600000, 2697987161, 0, 0, 0, 0, 0, 0, ]
metric_values = self.get_values_for_metric('PAPI_FP_OPS', convert_to_inclusive=True)
for correct, cnode_values in zip(correct_values, metric_values):
self.assertAlmostEqual(correct, cnode_values)
def test_PEVT_L2_FETCH_LINE_exclusive(self):
correct_values = [236294, 301627, 2475, 466, 9951320, 63665366, 621100937, 72281973, 484451, 14599752, 16286754,
7812501, 1204831, 8663, ]
metric_values = self.get_values_for_metric('PEVT_L2_FETCH_LINE', convert_to_exclusive=True)
for correct, cnode_values in zip(correct_values, metric_values):
self.assertAlmostEqual(correct, cnode_values)
def test_PEVT_L2_FETCH_LINE_inclusive(self):
correct_values = [807937410, 301627, 2475, 466, 807387885, 63665366, 621100937, 112670262, 484451, 14599752,
16286754, 7812501, 1204831, 8663, ]
metric_values = self.get_values_for_metric('PEVT_L2_FETCH_LINE', convert_to_inclusive=True)
for correct, cnode_values in zip(correct_values, metric_values):
self.assertAlmostEqual(correct, cnode_values)
def test_PEVT_L2_STORE_LINE_exclusive(self):
correct_values = [2279935, 2075179, 6347, 509, 12419687, 131884217, 761931928, 456598356, 980669, 47246817,
101259237, 55485219, 22286633, 6776, ]
metric_values = self.get_values_for_metric('PEVT_L2_STORE_LINE', convert_to_exclusive=True)
for correct, cnode_values in zip(correct_values, metric_values):
self.assertAlmostEqual(correct, cnode_values)
def test_PEVT_L2_STORE_LINE_inclusive(self):
correct_values = [1594461509, 2075179, 6347, 509, 1590092763, 131884217, 761931928, 683856931, 980669, 47246817,
101259237, 55485219, 22286633, 6776, ]
metric_values = self.get_values_for_metric('PEVT_L2_STORE_LINE', convert_to_inclusive=True)
for correct, cnode_values in zip(correct_values, metric_values):
self.assertAlmostEqual(correct, cnode_values)
def test_bytes_sent_exclusive(self):
correct_values = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1770240000, 0, 0, ]
metric_values = self.get_values_for_metric('bytes_sent', convert_to_exclusive=True)
for correct, cnode_values in zip(correct_values, metric_values):
self.assertAlmostEqual(correct, cnode_values)
def test_bytes_sent_inclusive(self):
correct_values = [1770240000, 0, 0, 0, 1770240000, 0, 0, 1770240000, 0, 0, 0, 1770240000, 0, 0, ]
metric_values = self.get_values_for_metric('bytes_sent', convert_to_inclusive=True)
for correct, cnode_values in zip(correct_values, metric_values):
self.assertAlmostEqual(correct, cnode_values)
def test_bytes_received_exclusive(self):
correct_values = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1770240000, 0, 0, 0, ]
metric_values = self.get_values_for_metric('bytes_received', convert_to_exclusive=True)
for correct, cnode_values in zip(correct_values, metric_values):
self.assertAlmostEqual(correct, cnode_values)
def test_bytes_received_inclusive(self):
correct_values = [1770240000, 0, 0, 0, 1770240000, 0, 0, 1770240000, 0, 0, 1770240000, 0, 0, 0, ]
metric_values = self.get_values_for_metric('bytes_received', convert_to_inclusive=True)
for correct, cnode_values in zip(correct_values, metric_values):
self.assertAlmostEqual(correct, cnode_values)
if __name__ == '__main__':
unittest.main()
| 59.845
| 120
| 0.703735
| 1,520
| 11,969
| 5.242763
| 0.145395
| 0.014556
| 0.09035
| 0.070021
| 0.792571
| 0.749655
| 0.739992
| 0.733718
| 0.714895
| 0.714895
| 0
| 0.210058
| 0.202523
| 11,969
| 199
| 121
| 60.145729
| 0.62483
| 0.010694
| 0
| 0.368098
| 0
| 0
| 0.033705
| 0.003463
| 0
| 0
| 0
| 0
| 0.184049
| 1
| 0.177914
| false
| 0
| 0.02454
| 0
| 0.220859
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
5ef6578f378caba4b83e04c9cbf1c86e3258587b
| 101
|
py
|
Python
|
CodeHS/Creating and Altering Data Structures/DivingContest.py
|
Kev-in123/ICS2O7
|
425c59975d4ce6aa0937fd8715b51d04487e4fa9
|
[
"MIT"
] | 2
|
2021-08-10T18:16:08.000Z
|
2021-09-26T19:49:26.000Z
|
CodeHS/Creating and Altering Data Structures/DivingContest.py
|
Kev-in123/ICS2O7
|
425c59975d4ce6aa0937fd8715b51d04487e4fa9
|
[
"MIT"
] | null | null | null |
CodeHS/Creating and Altering Data Structures/DivingContest.py
|
Kev-in123/ICS2O7
|
425c59975d4ce6aa0937fd8715b51d04487e4fa9
|
[
"MIT"
] | null | null | null |
def calculate_score (judges_scores):
return judges_scores[0]+judges_scores[1]+judges_scores[2]
| 33.666667
| 61
| 0.782178
| 15
| 101
| 4.933333
| 0.6
| 0.648649
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.033333
| 0.108911
| 101
| 3
| 61
| 33.666667
| 0.788889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
6f0f35d579c9066f25dd5b242cd614ac8ac84562
| 19
|
py
|
Python
|
examples/pow/ex2.py
|
mcorne/python-by-example
|
15339c0909c84b51075587a6a66391100971c033
|
[
"MIT"
] | null | null | null |
examples/pow/ex2.py
|
mcorne/python-by-example
|
15339c0909c84b51075587a6a66391100971c033
|
[
"MIT"
] | null | null | null |
examples/pow/ex2.py
|
mcorne/python-by-example
|
15339c0909c84b51075587a6a66391100971c033
|
[
"MIT"
] | null | null | null |
print(pow(10, -2))
| 9.5
| 18
| 0.578947
| 4
| 19
| 2.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.176471
| 0.105263
| 19
| 1
| 19
| 19
| 0.470588
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
6f459588d169f229475293054371fc4404d9ebc9
| 53
|
py
|
Python
|
config_script.py
|
ethanabrooks/oyster
|
08b758b15ca19c50c43a137cba733b79be55654a
|
[
"MIT"
] | null | null | null |
config_script.py
|
ethanabrooks/oyster
|
08b758b15ca19c50c43a137cba733b79be55654a
|
[
"MIT"
] | null | null | null |
config_script.py
|
ethanabrooks/oyster
|
08b758b15ca19c50c43a137cba733b79be55654a
|
[
"MIT"
] | null | null | null |
#! /usr/bin/env python3.8
print("--log-dir=LOG_DIR")
| 17.666667
| 26
| 0.660377
| 10
| 53
| 3.4
| 0.8
| 0.352941
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.040816
| 0.075472
| 53
| 2
| 27
| 26.5
| 0.653061
| 0.45283
| 0
| 0
| 0
| 0
| 0.607143
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
d2d0fe49db2b34180fa25c6e1b45c2a875f14c07
| 8,353
|
py
|
Python
|
pccm/targets/cuda.py
|
FindDefinition/PCCM
|
fa0cc4e41f886f288bbacf92cea1625d927a54ad
|
[
"MIT"
] | 3
|
2021-10-21T06:26:46.000Z
|
2022-03-10T11:14:40.000Z
|
pccm/targets/cuda.py
|
FindDefinition/PCCM
|
fa0cc4e41f886f288bbacf92cea1625d927a54ad
|
[
"MIT"
] | 1
|
2021-09-13T02:25:05.000Z
|
2021-09-13T02:27:50.000Z
|
pccm/targets/cuda.py
|
FindDefinition/PCCM
|
fa0cc4e41f886f288bbacf92cea1625d927a54ad
|
[
"MIT"
] | null | null | null |
import contextlib
import enum
from typing import Dict, List, Optional, Set, Tuple, Union
from ccimport import compat
from pccm.core import (Argument, ConstructorMeta, DestructorMeta,
ExternalFunctionMeta, FunctionCode, MemberFunctionMeta,
StaticMemberFunctionMeta, markers)
if compat.Python3_6AndLater:
from .cuda_ptx import CacheOpLd, CacheOpSt, PTXCode, PTXContext, RegDType
class CudaMemberFunctionMeta(MemberFunctionMeta):
def is_header_only(self):
return super().is_header_only() or "__forceinline__" in self.attrs
class CudaStaticMemberFunctionMeta(StaticMemberFunctionMeta):
def is_header_only(self):
return super().is_header_only() or "__forceinline__" in self.attrs
class CudaConstructorMeta(ConstructorMeta):
def is_header_only(self):
return super().is_header_only() or "__forceinline__" in self.attrs
class CudaDestructorMeta(DestructorMeta):
def is_header_only(self):
return super().is_header_only() or "__forceinline__" in self.attrs
class CudaExternalFunctionMeta(ExternalFunctionMeta):
def is_header_only(self):
return self.inline or "__forceinline__" in self.attrs
def cuda_global_function(func=None,
inline: bool = False,
attrs: Optional[List[str]] = None,
macro_guard: Optional[str] = None,
impl_loc: str = "",
impl_file_suffix: str = ".cu",
launch_bounds: Optional[Tuple[int, int]] = None,
header_only: Optional[bool] = None,
name=None):
if attrs is None:
attrs = []
cuda_global_attrs = attrs + ["__global__"]
if launch_bounds is not None:
cuda_global_attrs.append("__launch_bounds__({}, {})".format(
launch_bounds[0], launch_bounds[1]))
return markers.external_function(func,
name=name,
inline=inline,
constexpr=False,
macro_guard=macro_guard,
impl_loc=impl_loc,
impl_file_suffix=impl_file_suffix,
attrs=cuda_global_attrs,
header_only=header_only)
def member_function(func=None,
host: bool = False,
device: bool = False,
inline: bool = False,
forceinline: bool = False,
constexpr: bool = False,
const: bool = False,
attrs: Optional[List[str]] = None,
macro_guard: Optional[str] = None,
impl_loc: str = "",
impl_file_suffix: str = ".cu",
header_only: Optional[bool] = None,
name=None):
if forceinline or inline:
assert forceinline is not inline, "can't set both inline and forceinline"
cuda_global_attrs = []
if forceinline:
cuda_global_attrs.append("__forceinline__")
if host:
cuda_global_attrs.append("__host__")
if device:
cuda_global_attrs.append("__device__")
if attrs is None:
attrs = []
attrs.extend(cuda_global_attrs)
meta = CudaMemberFunctionMeta(name=name,
inline=inline,
constexpr=constexpr,
virtual=False,
override=False,
final=False,
const=const,
macro_guard=macro_guard,
impl_loc=impl_loc,
impl_file_suffix=impl_file_suffix,
header_only=header_only,
attrs=attrs)
return markers.meta_decorator(func, meta)
def static_function(func=None,
host: bool = False,
device: bool = False,
inline: bool = False,
forceinline: bool = False,
constexpr: bool = False,
attrs: Optional[List[str]] = None,
macro_guard: Optional[str] = None,
impl_loc: str = "",
impl_file_suffix: str = ".cu",
header_only: Optional[bool] = None,
name=None):
if forceinline or inline:
assert forceinline is not inline, "can't set both inline and forceinline"
cuda_global_attrs = []
if forceinline:
cuda_global_attrs.append("__forceinline__")
if host:
cuda_global_attrs.append("__host__")
if device:
cuda_global_attrs.append("__device__")
if attrs is None:
attrs = []
attrs.extend(cuda_global_attrs)
meta = CudaStaticMemberFunctionMeta(name=name,
inline=inline,
constexpr=constexpr,
attrs=attrs,
macro_guard=macro_guard,
impl_loc=impl_loc,
impl_file_suffix=impl_file_suffix,
header_only=header_only)
return markers.meta_decorator(func, meta)
def external_function(func=None,
host: bool = False,
device: bool = False,
inline: bool = False,
forceinline: bool = False,
constexpr: bool = False,
attrs: Optional[List[str]] = None,
macro_guard: Optional[str] = None,
impl_loc: str = "",
impl_file_suffix: str = ".cu",
header_only: Optional[bool] = None,
name=None):
if forceinline or inline:
assert forceinline is not inline, "can't set both inline and forceinline"
cuda_global_attrs = []
if forceinline:
cuda_global_attrs.append("__forceinline__")
if host:
cuda_global_attrs.append("__host__")
if device:
cuda_global_attrs.append("__device__")
if attrs is None:
attrs = []
attrs.extend(cuda_global_attrs)
meta = CudaExternalFunctionMeta(name=name,
inline=inline,
constexpr=constexpr,
attrs=attrs,
macro_guard=macro_guard,
impl_loc=impl_loc,
impl_file_suffix=impl_file_suffix,
header_only=header_only)
return markers.meta_decorator(func, meta)
def constructor(func=None,
host: bool = False,
device: bool = False,
inline: bool = False,
forceinline: bool = False,
constexpr: bool = False,
attrs: Optional[List[str]] = None,
macro_guard: Optional[str] = None,
impl_loc: str = "",
impl_file_suffix: str = ".cu",
header_only: Optional[bool] = None,
name=None):
if forceinline or inline:
assert forceinline is not inline, "can't set both inline and forceinline"
cuda_global_attrs = []
if forceinline:
cuda_global_attrs.append("__forceinline__")
if host:
cuda_global_attrs.append("__host__")
if device:
cuda_global_attrs.append("__device__")
if attrs is None:
attrs = []
attrs.extend(cuda_global_attrs)
meta = CudaConstructorMeta(inline=inline,
constexpr=constexpr,
attrs=attrs,
macro_guard=macro_guard,
impl_loc=impl_loc,
impl_file_suffix=impl_file_suffix,
header_only=header_only)
return markers.meta_decorator(func, meta)
| 40.158654
| 81
| 0.511912
| 757
| 8,353
| 5.34214
| 0.122853
| 0.059347
| 0.085312
| 0.067507
| 0.757418
| 0.740356
| 0.724777
| 0.715628
| 0.706726
| 0.706726
| 0
| 0.000819
| 0.4153
| 8,353
| 207
| 82
| 40.352657
| 0.827191
| 0
| 0
| 0.782609
| 0
| 0
| 0.048486
| 0.002514
| 0
| 0
| 0
| 0
| 0.021739
| 1
| 0.054348
| false
| 0
| 0.032609
| 0.027174
| 0.168478
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
d2daae0de32e8806ef9e4076b7077caef6c07fac
| 299
|
py
|
Python
|
unittest/person.py
|
MahanBi/python-tests
|
b7d8b8a1b6176bd991bd81fdc357203abc6c5288
|
[
"Apache-2.0"
] | null | null | null |
unittest/person.py
|
MahanBi/python-tests
|
b7d8b8a1b6176bd991bd81fdc357203abc6c5288
|
[
"Apache-2.0"
] | null | null | null |
unittest/person.py
|
MahanBi/python-tests
|
b7d8b8a1b6176bd991bd81fdc357203abc6c5288
|
[
"Apache-2.0"
] | null | null | null |
class person:
def __init__(self, first_name, last_name):
self.first_name = first_name
self.last_name = last_name
def fullname(self):
return f"{self.first_name} {self.last_name}"
def email(self):
return f"{self.first_name}{self.last_name}@gmail.com"
| 27.181818
| 61
| 0.648829
| 43
| 299
| 4.186047
| 0.325581
| 0.25
| 0.288889
| 0.283333
| 0.516667
| 0.4
| 0.4
| 0.4
| 0.4
| 0
| 0
| 0
| 0.237458
| 299
| 10
| 62
| 29.9
| 0.789474
| 0
| 0
| 0
| 0
| 0
| 0.257525
| 0.143813
| 0
| 0
| 0
| 0
| 0
| 1
| 0.375
| false
| 0
| 0
| 0.25
| 0.75
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
d2f36e2e3afc35b1fc3093ca3b2fefb4d7039d72
| 62
|
py
|
Python
|
datasets/__init__.py
|
msr-fiddle/folded-cnns
|
c87ac633b0b700f6085018f1aeb2c3ca615d73bf
|
[
"MIT"
] | 2
|
2021-08-30T05:55:39.000Z
|
2021-10-04T17:31:06.000Z
|
datasets/__init__.py
|
msr-fiddle/folded-cnns
|
c87ac633b0b700f6085018f1aeb2c3ca615d73bf
|
[
"MIT"
] | null | null | null |
datasets/__init__.py
|
msr-fiddle/folded-cnns
|
c87ac633b0b700f6085018f1aeb2c3ca615d73bf
|
[
"MIT"
] | null | null | null |
import datasets.distilled
import datasets.subset_image_folder
| 20.666667
| 35
| 0.903226
| 8
| 62
| 6.75
| 0.75
| 0.518519
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.064516
| 62
| 2
| 36
| 31
| 0.931034
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
d2fc504a61ce966b545fd4c3bd0be40acf497261
| 6,299
|
py
|
Python
|
src/tests/TestNoteName.py
|
ytyaru/Python.MusicTheory.Pitch.Key.i18n.201709181544
|
606030e3c86ccf4302b5fcef1b1ba36207ded999
|
[
"CC0-1.0"
] | null | null | null |
src/tests/TestNoteName.py
|
ytyaru/Python.MusicTheory.Pitch.Key.i18n.201709181544
|
606030e3c86ccf4302b5fcef1b1ba36207ded999
|
[
"CC0-1.0"
] | null | null | null |
src/tests/TestNoteName.py
|
ytyaru/Python.MusicTheory.Pitch.Key.i18n.201709181544
|
606030e3c86ccf4302b5fcef1b1ba36207ded999
|
[
"CC0-1.0"
] | null | null | null |
#!python3.6
import unittest
from MusicTheory.pitch.NoteName import NoteName
from MusicTheory.pitch.NoteNumber import NoteNumber
from MusicTheory.pitch.Key import Key
from MusicTheory.pitch.PitchClass import PitchClass
import Framework.ConstMeta
"""
NoteNameのテスト。
"""
class TestNoteName(unittest.TestCase):
def test_Get_SPN(self):
self.assertEqual(0, NoteName.Get('C-1'))
self.assertEqual(69, NoteName.Get('A4'))
self.assertEqual(127, NoteName.Get('G9'))
lowerLimit = -1
for k in Key.Keys.keys():
for o in range(lowerLimit, lowerLimit+10, 1):
with self.subTest(key=k, octave=o):
octave = o + abs(lowerLimit) if lowerLimit < 0 else o - abs(lowerLimit)
expected = PitchClass.Get(Key.Get(k))[0] + (octave * (PitchClass.Max+1))
self.assertEqual(expected, NoteName.Get(k + str(o)))
expected = PitchClass.Get(Key.Get(k)+1)[0] + (octave * (PitchClass.Max+1))
self.assertEqual(expected, NoteName.Get(k + '#' + str(o)))
expected = PitchClass.Get(Key.Get(k)-1)[0] + (octave * (PitchClass.Max+1))
self.assertEqual(expected, NoteName.Get(k + 'b' + str(o)))
def test_Get_YAMAHA(self):
lowerLimit = -2
self.assertEqual(0, NoteName.Get('C-2', lowerLimit))
self.assertEqual(69, NoteName.Get('A3', lowerLimit))
self.assertEqual(127, NoteName.Get('G8', lowerLimit))
for k in Key.Keys.keys():
for o in range(lowerLimit, lowerLimit+10, 1):
with self.subTest(key=k, octave=o):
octave = o + abs(lowerLimit) if lowerLimit < 0 else o - abs(lowerLimit)
expected = PitchClass.Get(Key.Get(k))[0] + (octave * (PitchClass.Max+1))
self.assertEqual(expected, NoteName.Get(k + str(o), lowerLimit))
expected = PitchClass.Get(Key.Get(k)+1)[0] + (octave * (PitchClass.Max+1))
self.assertEqual(expected, NoteName.Get(k + '#' + str(o), lowerLimit))
expected = PitchClass.Get(Key.Get(k)-1)[0] + (octave * (PitchClass.Max+1))
self.assertEqual(expected, NoteName.Get(k + 'b' + str(o), lowerLimit))
def test_Get_ZERO(self):
lowerLimit = 0
self.assertEqual(0, NoteName.Get('C0', lowerLimit))
self.assertEqual(69, NoteName.Get('A5', lowerLimit))
self.assertEqual(127, NoteName.Get('G10', lowerLimit))
for k in Key.Keys.keys():
for o in range(lowerLimit, lowerLimit+10, 1):
with self.subTest(key=k, octave=o):
octave = o + abs(lowerLimit) if lowerLimit < 0 else o - abs(lowerLimit)
expected = PitchClass.Get(Key.Get(k))[0] + (octave * (PitchClass.Max+1))
self.assertEqual(expected, NoteName.Get(k + str(o), lowerLimit))
expected = PitchClass.Get(Key.Get(k)+1)[0] + (octave * (PitchClass.Max+1))
self.assertEqual(expected, NoteName.Get(k + '#' + str(o), lowerLimit))
expected = PitchClass.Get(Key.Get(k)-1)[0] + (octave * (PitchClass.Max+1))
self.assertEqual(expected, NoteName.Get(k + 'b' + str(o), lowerLimit))
def test_Get_Invalid_Type(self):
with self.assertRaises(TypeError) as e:
NoteName.Get(100)
self.assertIn('引数nameはstr型にしてください。', str(e.exception))
def test_Get_Invalid_Format(self):
with self.assertRaises(ValueError) as e:
NoteName.Get('無効値')
self.assertIn('引数nameが有効な書式ではありません。', str(e.exception))
with self.assertRaises(ValueError) as e:
NoteName.Get('C')
self.assertIn('引数nameが有効な書式ではありません。', str(e.exception))
with self.assertRaises(ValueError) as e:
NoteName.Get('c1')
self.assertIn('引数nameが有効な書式ではありません。', str(e.exception))
with self.assertRaises(ValueError) as e:
NoteName.Get('C--1')
self.assertIn('引数nameが有効な書式ではありません。', str(e.exception))
with self.assertRaises(ValueError) as e:
NoteName.Get('C+-1')
self.assertIn('引数nameが有効な書式ではありません。', str(e.exception))
def test_GetOctave_Invalid_Type(self):
with self.assertRaises(TypeError) as e:
NoteName.Get('C0', '無効値')
self.assertIn(f"引数lowerLimitはint型にしてください。", str(e.exception))
def test_GetOctave_SPN_OutOfRange_LOW(self):
lowerLimit = -1
with self.assertRaises(ValueError) as e:
NoteName.Get('C-2', lowerLimit)
typ = 'SPN'
self.assertIn(f"lowerLimit={lowerLimit}のときoctaveは{lowerLimit}〜{lowerLimit+10}までです。", str(e.exception))
def test_GetOctave_SPN_OutOfRange_UP(self):
lowerLimit = -1
with self.assertRaises(ValueError) as e:
NoteName.Get('C10', lowerLimit)
self.assertIn(f"lowerLimit={lowerLimit}のときoctaveは{lowerLimit}〜{lowerLimit+10}までです。", str(e.exception))
def test_GetOctave_YAMAHA_OutOfRange_LOW(self):
lowerLimit = -2
with self.assertRaises(ValueError) as e:
NoteName.Get('C-3', lowerLimit)
typ = 'SPN'
self.assertIn(f"lowerLimit={lowerLimit}のときoctaveは{lowerLimit}〜{lowerLimit+10}までです。", str(e.exception))
def test_GetOctave_YAMAHA_OutOfRange_UP(self):
lowerLimit = -2
with self.assertRaises(ValueError) as e:
NoteName.Get('C9', lowerLimit)
self.assertIn(f"lowerLimit={lowerLimit}のときoctaveは{lowerLimit}〜{lowerLimit+10}までです。", str(e.exception))
def test_GetOctave_ZERO_OutOfRange_LOW(self):
lowerLimit = 0
with self.assertRaises(ValueError) as e:
NoteName.Get('C-1', lowerLimit)
typ = 'SPN'
self.assertIn(f"lowerLimit={lowerLimit}のときoctaveは{lowerLimit}〜{lowerLimit+10}までです。", str(e.exception))
def test_GetOctave_ZERO_OutOfRange_UP(self):
lowerLimit = 0
with self.assertRaises(ValueError) as e:
NoteName.Get('C11', lowerLimit)
self.assertIn(f"lowerLimit={lowerLimit}のときoctaveは{lowerLimit}〜{lowerLimit+10}までです。", str(e.exception))
if __name__ == '__main__':
unittest.main()
| 48.829457
| 110
| 0.610414
| 740
| 6,299
| 5.140541
| 0.117568
| 0.089642
| 0.068349
| 0.047844
| 0.869611
| 0.841483
| 0.782072
| 0.782072
| 0.756046
| 0.755521
| 0
| 0.02173
| 0.254802
| 6,299
| 128
| 111
| 49.210938
| 0.787388
| 0.001588
| 0
| 0.567568
| 0
| 0
| 0.098931
| 0.067177
| 0
| 0
| 0
| 0
| 0.396396
| 1
| 0.108108
| false
| 0
| 0.054054
| 0
| 0.171171
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
8248e116b3c71914668178132869ccf3d3599f33
| 31,154
|
py
|
Python
|
tests/licensedcode/test_index.py
|
s4-2/scancode-toolkit
|
8931b42e2630b94d0cabc834dfb3c16f01f82321
|
[
"Apache-2.0",
"CC-BY-4.0"
] | 2
|
2021-04-08T07:04:55.000Z
|
2021-05-14T04:20:33.000Z
|
tests/licensedcode/test_index.py
|
s4-2/scancode-toolkit
|
8931b42e2630b94d0cabc834dfb3c16f01f82321
|
[
"Apache-2.0",
"CC-BY-4.0"
] | null | null | null |
tests/licensedcode/test_index.py
|
s4-2/scancode-toolkit
|
8931b42e2630b94d0cabc834dfb3c16f01f82321
|
[
"Apache-2.0",
"CC-BY-4.0"
] | null | null | null |
#
# Copyright (c) nexB Inc. and others. All rights reserved.
# ScanCode is a trademark of nexB Inc.
# SPDX-License-Identifier: Apache-2.0
# See http://www.apache.org/licenses/LICENSE-2.0 for the license text.
# See https://github.com/nexB/scancode-toolkit for support or download.
# See https://aboutcode.org for more information about nexB OSS projects.
#
import os
import pytest
from commoncode.testcase import FileBasedTesting
from licensedcode import index
from licensedcode import match_seq
from licensedcode import models
from licensedcode.query import Query
from licensedcode.spans import Span
from licensedcode.tracing import get_texts
from licensedcode_test_utils import mini_legalese # NOQA
def MiniLicenseIndex(*args, **kwargs):
return index.LicenseIndex(*args, _legalese=mini_legalese, **kwargs)
TEST_DATA_DIR = os.path.join(os.path.dirname(__file__), 'data')
class IndexTesting(FileBasedTesting):
test_data_dir = TEST_DATA_DIR
def get_test_rules(self, base, subset=None):
base = self.get_test_loc(base)
test_files = sorted(os.listdir(base))
if subset:
test_files = [t for t in test_files if t in subset]
return [models.Rule(text_file=os.path.join(base, license_key), license_expression=license_key) for license_key in test_files]
class TestIndexing(IndexTesting):
def test_index_structures(self):
# rule text, unique low/high len, low/high len
test_rules = [
(u'a one a two a three licensed.', (4, 1, 4, 1)),
(u'a four a five a six licensed.', (4, 1, 4, 1)),
(u'one two three four five gpl', (6, 0, 6, 0)),
(u'The rose is a rose mit', (4, 0, 5, 0)),
(u'The license is GPL', (4, 1, 4, 1)),
(u'The license is this GPL', (5, 1, 5, 1)),
(u'a license is a rose', (3, 1, 3, 1)),
(u'the gpl', (2, 0, 2, 0)),
(u'the mit', (2, 0, 2, 0)),
(u'the bsd', (2, 0, 2, 0)),
(u'the lgpl', (2, 0, 2, 0)),
]
idx = MiniLicenseIndex()
rules = [models.Rule(stored_text=t[0]) for t in test_rules]
idx._add_rules(rules, _legalese=mini_legalese,)
assert idx.len_legalese == 40
expected_lengths = [r[1] for r in test_rules]
results = [
(rule.length_unique, rule.high_length_unique,
rule.length, rule.high_length) for rule in rules]
assert results == expected_lengths
expected = set([
'bsd',
'five',
'four',
'gpl',
'is',
'lgpl',
'mit',
'one',
'rose',
'six',
'the',
'this',
'three',
'two'])
xdict = {key for key, val in idx.dictionary.items() if val >= idx.len_legalese}
assert xdict == expected
xtbi = sorted([
'one',
'two',
'three',
'four',
'five',
'six',
'gpl',
'the',
'rose',
'is',
'mit',
'this',
'bsd',
'lgpl'])
assert sorted([t for i, t in enumerate(idx.tokens_by_tid) if i >= idx.len_legalese]) == xtbi
def test_index_structures_with__add_rules(self):
base = self.get_test_loc('index/tokens_count')
keys = sorted(os.listdir(base))
idx = MiniLicenseIndex()
rules = []
for key in keys:
rules.append(models.Rule(
text_file=os.path.join(base, key), license_expression='gpl-2.0'))
idx._add_rules(rules, _legalese=mini_legalese)
assert idx.len_legalese == 40
expected = set([
'all',
'allowed',
'and',
'any',
'for',
'is',
'redistribution',
'thing',
'yes'])
xdict = {key for key, val in idx.dictionary.items() if val >= idx.len_legalese}
assert xdict == expected
xtbi = sorted([
'all',
'allowed',
'and',
'any',
'for',
'is',
'redistribution',
'thing',
'yes'
])
assert sorted([t for i, t in enumerate(idx.tokens_by_tid) if i >= idx.len_legalese]) == xtbi
expected_msets_by_rid = [
{u'redistribution': 1},
{u'is': 1, u'redistribution': 1, u'yes': 1},
{u'allowed': 1, u'is': 1, u'redistribution': 1, u'yes': 1},
{u'allowed': 1, u'for': 1, u'is': 1, u'redistribution': 1, u'yes': 1},
{u'all': 1, u'allowed': 1, u'for': 1, u'is': 1, u'redistribution': 1},
{u'all': 1,
u'allowed': 1,
u'and': 1,
u'any': 1,
u'is': 1,
u'redistribution': 1,
u'thing': 1},
{u'is': 1, u'redistribution': 1},
{u'allowed': 1, u'is': 1, u'redistribution': 1},
{u'allowed': 1, u'for': 1, u'is': 1, u'redistribution': 1},
{u'all': 1, u'allowed': 1, u'is': 1, u'redistribution': 1, u'yes': 1},
{u'all': 1, u'allowed': 1, u'and': 1, u'is': 1, u'redistribution': 1},
{u'all': 1, u'allowed': 1, u'is': 1, u'redistribution': 1},
{u'all': 1,
u'allowed': 1,
u'and': 1,
u'any': 1,
u'is': 1,
u'redistribution': 1}]
htmset = [{idx.tokens_by_tid[tok]: freq for (tok, freq) in tids_mset.items()}
for tids_mset in idx.msets_by_rid]
assert htmset == expected_msets_by_rid
def test_index_fails_on_duplicated_rules(self):
rule_dir = self.get_test_loc('index/no_duplicated_rule')
try:
MiniLicenseIndex(models.load_rules(rule_dir))
self.fail('Exception on dupes not raised')
except AssertionError as e:
assert u'Duplicate rules' in str(e)
@pytest.mark.scanslow
def test_index_does_not_fail_on_rules_with_similar_normalized_names(self):
rule_dir = self.get_test_loc('index/similar_names/rules')
lics_dir = self.get_test_loc('index/similar_names/licenses')
index.LicenseIndex(models.get_rules(lics_dir, rule_dir))
class TestMatchNoTemplates(IndexTesting):
test_data_dir = TEST_DATA_DIR
def test_match_exact_from_string_once(self):
rule_text = 'Redistribution and use in source and binary forms, with or without modification, are permitted'
idx = MiniLicenseIndex([models.Rule(stored_text=rule_text, license_expression='bsd')])
querys = '''
The
Redistribution and use in source and binary forms, with or without modification, are permitted.
Always'''
result = idx.match(query_string=querys)
assert len(result) == 1
match = result[0]
qtext, itext = get_texts(match)
assert qtext == 'Redistribution and use in source and binary forms, with or without modification,\nare permitted.'
assert itext == 'redistribution and use in source and binary forms with or without modification\nare permitted'
assert match.qspan == Span(0, 13)
assert match.ispan == Span(0, 13)
def test_match_exact_from_string_twice_with_repeated_text(self):
_stored_text = u'licensed under the GPL, licensed under the GPL'
# 0 1 2 3 4 5 6 7
license_expression = 'tst'
rule = models.Rule(license_expression=license_expression, stored_text=_stored_text)
idx = MiniLicenseIndex([rule])
querys = u'Hi licensed under the GPL, licensed under the GPL yes.'
# 0 1 2 3 4 5 6 7 8 9
result = idx.match(query_string=querys)
assert len(result) == 1
match = result[0]
qtext, itext = get_texts(match)
assert qtext == 'licensed under the GPL, licensed under the GPL'
assert itext == 'licensed under the gpl licensed under the gpl'
assert match.qspan == Span(0, 7)
assert match.ispan == Span(0, 7)
# match again to ensure that there are no state side effects
result = idx.match(query_string=querys)
assert len(result) == 1
match = result[0]
assert match.qspan == Span(0, 7)
assert match.ispan == Span(0, 7)
qtext, itext = get_texts(match)
assert qtext == u'licensed under the GPL, licensed under the GPL'
assert itext == u'licensed under the gpl licensed under the gpl'
def test_match_exact_with_junk_in_between_good_tokens(self):
_stored_text = u'licensed under the GPL, licensed under the GPL'
license_expression = 'tst'
rule = models.Rule(license_expression=license_expression, stored_text=_stored_text)
idx = MiniLicenseIndex([rule])
querys = u'Hi licensed that under is the that GPL, licensed or under not the GPL by yes.'
result = idx.match(query_string=querys)
assert len(result) == 1
match = result[0]
qtext, itext = get_texts(match)
assert qtext == u'licensed [that] under [is] the [that] GPL, licensed [or] under [not] the GPL'
assert itext == u'licensed under the gpl licensed under the gpl'
def test_match_exact_from_file(self):
idx = MiniLicenseIndex(self.get_test_rules('index/mini'))
query_loc = self.get_test_loc('index/queryperfect-mini')
result = idx.match(location=query_loc)
assert len(result) == 1
match = result[0]
qtext, itext = get_texts(match)
assert qtext == 'Redistribution and use in source and binary forms, with or without modification,\nare permitted.'
assert itext == 'redistribution and use in source and binary forms with or without modification\nare permitted'
assert match.qspan == Span(0, 13)
assert match.ispan == Span(0, 13)
def test_match_multiple(self):
test_rules = self.get_test_rules('index/bsd')
idx = MiniLicenseIndex(test_rules)
query = self.get_test_loc('index/querysimple')
result = idx.match(location=query)
assert len(result) == 1
match = result[0]
assert match.qspan == Span(0, 211)
assert match.ispan == Span(0, 211)
def test_match_return_correct_offsets(self):
# notes: A is a stopword. This and that are not
_stored_text = u'This GPL. A MIT. That LGPL.'
# 0 1 2 3 4 5
license_expression = 'tst'
rule = models.Rule(license_expression=license_expression, stored_text=_stored_text)
idx = MiniLicenseIndex([rule])
querys = u'some junk. this GPL. A MIT. that LGPL.'
# 0 1 2 3 4 5 6 7
result = idx.match(query_string=querys)
assert len(result) == 1
match = result[0]
qtext, itext = get_texts(match)
assert qtext == 'this GPL. A MIT. that LGPL.'
assert itext == 'this gpl mit that lgpl'
assert match.qspan == Span(0, 4)
assert match.ispan == Span(0, 4)
class TestMatchWithTemplates(IndexTesting):
test_data_dir = TEST_DATA_DIR
def test_match_with_template_and_multiple_rules(self):
test_rules = self.get_test_rules('index/bsd_templates',)
idx = MiniLicenseIndex(test_rules)
querys = u'''
Hello, what about this
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of nexB Inc. nor the names of its contributors may be
used to endorse or promote products derived from this software without
specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
Goodbye
No part of match '''
result = idx.match(query_string=querys)
print('here3')
assert len(result) == 1
match = result[0]
assert match.matcher == match_seq.MATCH_SEQ
exp_qtext = u"""
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of [nexB] [Inc]. nor the names of its contributors may be
used to endorse or promote products derived from this software without
specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
""".split()
exp_itext = u"""
Redistribution and use in source and binary forms with or without
modification are permitted provided that the following conditions
are met
Redistributions of source code must retain the above copyright
notice this list of conditions and the following disclaimer
Redistributions in binary form must reproduce the above copyright
notice this list of conditions and the following disclaimer in the
documentation and or other materials provided with the distribution
Neither the name of nor the names of its contributors may be
used to endorse or promote products derived from this software
without specific prior written permission
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES INCLUDING BUT NOT
LIMITED TO THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
PARTICULAR PURPOSE ARE DISCLAIMED IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT INDIRECT INCIDENTAL
SPECIAL EXEMPLARY OR CONSEQUENTIAL DAMAGES INCLUDING BUT NOT LIMITED
TO PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES LOSS OF USE DATA OR
PROFITS OR BUSINESS INTERRUPTION HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY WHETHER IN CONTRACT STRICT LIABILITY OR TORT INCLUDING
NEGLIGENCE OR OTHERWISE ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE
""".lower().split()
qtext, itext = get_texts(match)
assert qtext.split() == exp_qtext
assert itext.split() == exp_itext
assert match.qspan == (Span(1, 72) | Span(74, 211))
assert match.ispan == Span(0, 209)
assert match.coverage() == 100
def test_match_to_indexed_template_with_few_tokens_around_gaps(self):
# Was failing when a gap in a template starts very close to the start of
# a rule tokens seq. We may still skip that, but we capture a large
# match anyway.
rule = models.Rule(text_file=self.get_test_loc('index/templates/idx.txt'),
license_expression='test')
legalese = (
mini_legalese
| set(['permission', 'written', 'registered', 'derived', 'damage', 'due']))
idx = index.LicenseIndex([rule], _legalese=legalese)
query_loc = self.get_test_loc('index/templates/query.txt')
result = idx.match(location=query_loc)
assert len(result) == 1
match = result[0]
exp_qtext = u"""
All Rights Reserved.
Redistribution and use of this software and associated documentation
("Software"), with or without modification, are permitted provided
that the following conditions are met:
1. Redistributions of source code must retain copyright
statements and notices. Redistributions must also contain a
copy of this document.
2. Redistributions in binary form must reproduce the
above copyright notice, this list of conditions and the
following disclaimer in the documentation and/or other
materials provided with the distribution.
3. The name "[groovy]" must not be used to endorse or promote
products derived from this Software without prior written
permission of [The] [Codehaus]. For written permission,
please contact [info]@[codehaus].[org].
4. Products derived from this Software may not be called "[groovy]"
nor may "[groovy]" appear in their names without prior written
permission of [The] [Codehaus]. "[groovy]" is a registered
trademark of [The] [Codehaus].
5. Due credit should be given to [The] [Codehaus] -
[http]://[groovy].[codehaus].[org]/
[THIS] [SOFTWARE] [IS] [PROVIDED] [BY] [THE] [CODEHAUS] AND CONTRIBUTORS
``AS IS'' AND ANY EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT
NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL
[THE] [CODEHAUS] OR ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
OF THE [POSSIBILITY] [OF] [SUCH] DAMAGE.
""".split()
exp_itext = u"""
All Rights Reserved
Redistribution and use of this software and associated documentation
Software with or without modification are permitted provided that
the following conditions are met
1 Redistributions of source code must retain copyright statements
and notices Redistributions must also contain copy of this
document
2 Redistributions in binary form must reproduce the above copyright
notice this list of conditions and the following disclaimer in the
documentation and or other materials provided with the distribution
3 The name must not be used to endorse or promote products
derived from this Software without prior written permission of
For written permission please contact
4 Products derived from this Software may not be called nor
may appear in their names without prior written permission of
is registered trademark of
5 Due credit should be given to
<THIS> <SOFTWARE> <IS> <PROVIDED> <BY>
AND CONTRIBUTORS AS IS AND ANY
EXPRESSED OR IMPLIED WARRANTIES INCLUDING BUT NOT LIMITED TO THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR PARTICULAR
PURPOSE ARE DISCLAIMED IN NO EVENT SHALL OR ITS CONTRIBUTORS
BE LIABLE FOR ANY DIRECT INDIRECT INCIDENTAL SPECIAL EXEMPLARY OR
CONSEQUENTIAL DAMAGES INCLUDING BUT NOT LIMITED TO PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES LOSS OF USE DATA OR PROFITS OR BUSINESS
INTERRUPTION HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY WHETHER
IN CONTRACT STRICT LIABILITY OR TORT INCLUDING NEGLIGENCE OR
OTHERWISE ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE EVEN IF
ADVISED OF THE DAMAGE
""".lower().split()
qtext, itext = get_texts(match)
assert qtext.split() == exp_qtext
assert itext.split() == exp_itext
assert match.coverage() > 97
assert match.matcher == match_seq.MATCH_SEQ
def test_match_with_templates_with_redundant_tokens_yield_single_exact_match(self):
_stored_text = u'copyright reserved mit is license, {{}} copyright reserved mit is license'
# 0 1 2 3 4 5 6 7 8 9
license_expression = 'tst'
rule = models.Rule(license_expression=license_expression, stored_text=_stored_text)
idx = MiniLicenseIndex([rule])
querys = u'Hi my copyright reserved mit is license is the copyright reserved mit is license yes.'
# 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14
qry = Query(query_string=querys, idx=idx)
# convert tid to actual token strings
tks_as_str = lambda tks: [None if tid is None else idx.tokens_by_tid[tid] for tid in tks]
expected = [None, None, u'copyright', u'reserved', u'mit', u'is', u'license', u'is', None, u'copyright', u'reserved', u'mit', u'is', u'license', None]
# 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14
assert tks_as_str(qry.tokens_with_unknowns()) == expected
result = idx.match(query_string=querys)
assert len(result) == 1
match = result[0]
assert match.qspan == Span(0, 4) | Span(6, 10)
assert match.ispan == Span(0, 9)
assert match.coverage() == 100
qtext, itext = get_texts(match)
assert qtext == 'copyright reserved mit is license [is] [the] copyright reserved mit is license'
assert itext == 'copyright reserved mit is license copyright reserved mit is license'
class TestIndexDumpLoad(IndexTesting):
test_data_dir = TEST_DATA_DIR
def test_dumps_loads_default(self):
test_rules = self.get_test_rules('index/dump_load')
idx = MiniLicenseIndex(test_rules)
dumps = idx.dumps()
idx2 = index.LicenseIndex.loads(dumps)
expected = [
u'and', u'are', u'as', u'binary', u'by', u'conditions',
u'copyright', u'following', u'forms', u'holder', u'in', u'is',
u'met', u'permitted', u'provided', u'redistribution', u'software',
u'source', u'that', u'the', u'this', u'use']
assert sorted([k for k, v in idx2.dictionary.items() if v >= idx2.len_legalese]) == expected
def test_dump_load_default(self):
test_rules = self.get_test_rules('index/dump_load')
idx = MiniLicenseIndex(test_rules)
test_dump = self.get_temp_file()
with open(test_dump, 'wb') as td:
idx.dump(td)
with open(test_dump, 'rb') as td:
idx2 = index.LicenseIndex.load(td)
expected = [
u'and', u'are', u'as', u'binary', u'by', u'conditions',
u'copyright', u'following', u'forms', u'holder', u'in', u'is',
u'met', u'permitted', u'provided', u'redistribution', u'software',
u'source', u'that', u'the', u'this', u'use']
assert sorted([k for k, v in idx2.dictionary.items() if v >= idx2.len_legalese]) == expected
with open(test_dump, 'rb') as td:
idx3 = index.LicenseIndex.loads(td.read())
assert sorted([k for k, v in idx3.dictionary.items() if v >= idx3.len_legalese]) == expected
def test_dumps_fast_loads_fast(self):
test_rules = self.get_test_rules('index/dump_load')
idx = MiniLicenseIndex(test_rules)
dumps = idx.dumps(fast=True)
idx2 = index.LicenseIndex.loads(dumps, fast=True)
expected = [
u'and', u'are', u'as', u'binary', u'by', u'conditions',
u'copyright', u'following', u'forms', u'holder', u'in', u'is',
u'met', u'permitted', u'provided', u'redistribution', u'software',
u'source', u'that', u'the', u'this', u'use']
assert sorted([k for k, v in idx2.dictionary.items() if v >= idx2.len_legalese]) == expected
def test_dumps_slow_loads_slow(self):
test_rules = self.get_test_rules('index/dump_load')
idx = MiniLicenseIndex(test_rules)
dumps = idx.dumps(fast=False)
idx2 = index.LicenseIndex.loads(dumps, fast=False)
expected = [
u'and', u'are', u'as', u'binary', u'by', u'conditions',
u'copyright', u'following', u'forms', u'holder', u'in', u'is',
u'met', u'permitted', u'provided', u'redistribution', u'software',
u'source', u'that', u'the', u'this', u'use']
assert sorted([k for k, v in idx2.dictionary.items() if v >= idx2.len_legalese]) == expected
def test_dumps_fast_loads_slow(self):
test_rules = self.get_test_rules('index/dump_load')
idx = MiniLicenseIndex(test_rules)
dumps = idx.dumps(fast=True)
idx2 = index.LicenseIndex.loads(dumps, fast=False)
expected = [
u'and', u'are', u'as', u'binary', u'by', u'conditions',
u'copyright', u'following', u'forms', u'holder', u'in', u'is',
u'met', u'permitted', u'provided', u'redistribution', u'software',
u'source', u'that', u'the', u'this', u'use']
assert sorted([k for k, v in idx2.dictionary.items() if v >= idx2.len_legalese]) == expected
def test_dumps_slow_loads_fast(self):
test_rules = self.get_test_rules('index/dump_load')
idx = MiniLicenseIndex(test_rules)
dumps = idx.dumps(fast=False)
idx2 = index.LicenseIndex.loads(dumps, fast=True)
expected = [
u'and', u'are', u'as', u'binary', u'by', u'conditions',
u'copyright', u'following', u'forms', u'holder', u'in', u'is',
u'met', u'permitted', u'provided', u'redistribution', u'software',
u'source', u'that', u'the', u'this', u'use']
assert sorted([k for k, v in idx2.dictionary.items() if v >= idx2.len_legalese]) == expected
def test_dump_fast_load_fast(self):
test_rules = self.get_test_rules('index/dump_load')
idx = MiniLicenseIndex(test_rules)
test_dump = self.get_temp_file()
with open(test_dump, 'wb') as td:
idx.dump(td, fast=True)
with open(test_dump, 'rb') as td:
idx2 = index.LicenseIndex.load(td, fast=True)
expected = [
u'and', u'are', u'as', u'binary', u'by', u'conditions',
u'copyright', u'following', u'forms', u'holder', u'in', u'is',
u'met', u'permitted', u'provided', u'redistribution', u'software',
u'source', u'that', u'the', u'this', u'use']
assert sorted([k for k, v in idx2.dictionary.items() if v >= idx2.len_legalese]) == expected
def test_dump_fast_load_slow(self):
test_rules = self.get_test_rules('index/dump_load')
idx = MiniLicenseIndex(test_rules)
test_dump = self.get_temp_file()
with open(test_dump, 'wb') as td:
idx.dump(td, fast=True)
with open(test_dump, 'rb') as td:
idx2 = index.LicenseIndex.load(td, fast=False)
expected = [
u'and', u'are', u'as', u'binary', u'by', u'conditions',
u'copyright', u'following', u'forms', u'holder', u'in', u'is',
u'met', u'permitted', u'provided', u'redistribution', u'software',
u'source', u'that', u'the', u'this', u'use']
assert sorted([k for k, v in idx2.dictionary.items() if v >= idx2.len_legalese]) == expected
def test_dump_slow_load_slow(self):
test_rules = self.get_test_rules('index/dump_load')
idx = MiniLicenseIndex(test_rules)
test_dump = self.get_temp_file()
with open(test_dump, 'wb') as td:
idx.dump(td, fast=False)
with open(test_dump, 'rb') as td:
idx2 = index.LicenseIndex.load(td, fast=False)
expected = [
u'and', u'are', u'as', u'binary', u'by', u'conditions',
u'copyright', u'following', u'forms', u'holder', u'in', u'is',
u'met', u'permitted', u'provided', u'redistribution', u'software',
u'source', u'that', u'the', u'this', u'use']
assert sorted([k for k, v in idx2.dictionary.items() if v >= idx2.len_legalese]) == expected
def test_dump_slow_load_fast(self):
test_rules = self.get_test_rules('index/dump_load')
idx = MiniLicenseIndex(test_rules)
test_dump = self.get_temp_file()
with open(test_dump, 'wb') as td:
idx.dump(td, fast=False)
with open(test_dump, 'rb') as td:
idx2 = index.LicenseIndex.load(td, fast=True)
expected = [
u'and', u'are', u'as', u'binary', u'by', u'conditions',
u'copyright', u'following', u'forms', u'holder', u'in', u'is',
u'met', u'permitted', u'provided', u'redistribution', u'software',
u'source', u'that', u'the', u'this', u'use']
assert sorted([k for k, v in idx2.dictionary.items() if v >= idx2.len_legalese]) == expected
| 44.761494
| 158
| 0.612634
| 4,158
| 31,154
| 4.491823
| 0.098365
| 0.006211
| 0.012957
| 0.016277
| 0.808642
| 0.791615
| 0.777427
| 0.762221
| 0.748835
| 0.726776
| 0
| 0.014228
| 0.289369
| 31,154
| 695
| 159
| 44.825899
| 0.829396
| 0.038679
| 0
| 0.555957
| 0
| 0.00361
| 0.43863
| 0.006917
| 0
| 0
| 0
| 0
| 0.128159
| 1
| 0.045126
| false
| 0
| 0.018051
| 0.001805
| 0.083032
| 0.001805
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
82a9a6beea9ff2c4324cd8f23199253668d656c9
| 194
|
py
|
Python
|
backtracking/bits_clean.py
|
massimo-nocentini/PhD-thesis
|
f30ec2cb9cdf1e93532935448c3438700b9fcbba
|
[
"MIT"
] | null | null | null |
backtracking/bits_clean.py
|
massimo-nocentini/PhD-thesis
|
f30ec2cb9cdf1e93532935448c3438700b9fcbba
|
[
"MIT"
] | null | null | null |
backtracking/bits_clean.py
|
massimo-nocentini/PhD-thesis
|
f30ec2cb9cdf1e93532935448c3438700b9fcbba
|
[
"MIT"
] | null | null | null |
def is_on(S, j):
return (S & (1 << j)) >> j
def set_all(n):
return (1 << n) - 1
def low_bit(S):
return (S & (-S)).bit_length() - 1
def clear_bit(S, j):
return S & ~(1 << j)
| 16.166667
| 38
| 0.479381
| 37
| 194
| 2.378378
| 0.378378
| 0.238636
| 0.181818
| 0.204545
| 0.25
| 0.25
| 0
| 0
| 0
| 0
| 0
| 0.036496
| 0.293814
| 194
| 11
| 39
| 17.636364
| 0.605839
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
82ddaf16dcdb53c2818baf1b0b9729f88771b6ee
| 846
|
py
|
Python
|
src/tf_transformers/utils/__init__.py
|
ashishpatel26/tf-transformers
|
2a1128247898f6222544ecda0f1bcea9b0576ce2
|
[
"Apache-2.0"
] | 2
|
2021-03-31T17:48:16.000Z
|
2021-08-22T11:52:19.000Z
|
src/tf_transformers/utils/__init__.py
|
ashishpatel26/tf-transformers
|
2a1128247898f6222544ecda0f1bcea9b0576ce2
|
[
"Apache-2.0"
] | null | null | null |
src/tf_transformers/utils/__init__.py
|
ashishpatel26/tf-transformers
|
2a1128247898f6222544ecda0f1bcea9b0576ce2
|
[
"Apache-2.0"
] | null | null | null |
from tf_transformers.utils.convert.convert_albert import \
convert_albert_hf_to_tf_transformers
from tf_transformers.utils.convert.convert_bert import \
convert_bert_hf_to_tf_transformers
from tf_transformers.utils.convert.convert_gpt2 import \
convert_gpt2_hf_to_tf_transformers
from tf_transformers.utils.convert.convert_roberta import \
convert_roberta_hf_to_tf_transformers
from tf_transformers.utils.convert.convert_t5 import \
convert_t5_hf_to_tf_transformers
from tf_transformers.utils.convert.convert_mt5 import \
convert_mt5_hf_to_tf_transformers
from tf_transformers.utils.fast_sp_alignment import fast_sp_alignment
from tf_transformers.utils.tokenization import BasicTokenizer
from tf_transformers.utils.utils import (get_config, get_model_wrapper,
validate_model_name)
| 49.764706
| 71
| 0.830969
| 116
| 846
| 5.594828
| 0.215517
| 0.323575
| 0.249615
| 0.318952
| 0.543914
| 0.543914
| 0.486903
| 0.486903
| 0.423729
| 0.423729
| 0
| 0.008141
| 0.128842
| 846
| 16
| 72
| 52.875
| 0.872456
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5625
| 0
| 0.5625
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
7d8fc1f000b070c202b53e0fdcc61bb494451e61
| 138
|
py
|
Python
|
index_based/metrics.py
|
paulsbrookes/subcipher
|
80a8ffe1753af542e76b62080d891a0a982d8d99
|
[
"Apache-2.0"
] | null | null | null |
index_based/metrics.py
|
paulsbrookes/subcipher
|
80a8ffe1753af542e76b62080d891a0a982d8d99
|
[
"Apache-2.0"
] | null | null | null |
index_based/metrics.py
|
paulsbrookes/subcipher
|
80a8ffe1753af542e76b62080d891a0a982d8d99
|
[
"Apache-2.0"
] | null | null | null |
import numpy as np
def pair_metric(rates, natural_rates):
metric = -np.sum(rates.rates*(natural_rates.rates+1e-8))
return metric
| 23
| 60
| 0.731884
| 22
| 138
| 4.454545
| 0.590909
| 0.244898
| 0.346939
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.017094
| 0.152174
| 138
| 5
| 61
| 27.6
| 0.820513
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.25
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 6
|
81a36d115a4d5e5854f0257244afe4e9509a9e13
| 183
|
py
|
Python
|
tests/test_module.py
|
willrayeo/sentinel3-olci
|
c4fa8a831c1b229ea38c8067a2bca6d0dadc47e3
|
[
"Apache-2.0"
] | null | null | null |
tests/test_module.py
|
willrayeo/sentinel3-olci
|
c4fa8a831c1b229ea38c8067a2bca6d0dadc47e3
|
[
"Apache-2.0"
] | null | null | null |
tests/test_module.py
|
willrayeo/sentinel3-olci
|
c4fa8a831c1b229ea38c8067a2bca6d0dadc47e3
|
[
"Apache-2.0"
] | null | null | null |
import unittest
import stactools.sentinel3_olci
class TestModule(unittest.TestCase):
def test_version(self):
self.assertIsNotNone(stactools.sentinel3_olci.__version__)
| 20.333333
| 66
| 0.797814
| 20
| 183
| 6.95
| 0.65
| 0.258993
| 0.316547
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.012579
| 0.131148
| 183
| 8
| 67
| 22.875
| 0.861635
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2
| 1
| 0.2
| false
| 0
| 0.4
| 0
| 0.8
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
81b43fba13edd53c7e45ee1c6394cf32849bed3f
| 86
|
py
|
Python
|
tests/test_expr.py
|
eltonn/toki
|
22efd9ce84414380904e3a5ac84e84de9bdb5bce
|
[
"Apache-2.0"
] | 1
|
2020-04-27T11:55:25.000Z
|
2020-04-27T11:55:25.000Z
|
tests/test_expr.py
|
eltonn/toki
|
22efd9ce84414380904e3a5ac84e84de9bdb5bce
|
[
"Apache-2.0"
] | 7
|
2020-05-29T23:22:21.000Z
|
2020-11-30T20:49:37.000Z
|
tests/test_expr.py
|
eltonn/toki
|
22efd9ce84414380904e3a5ac84e84de9bdb5bce
|
[
"Apache-2.0"
] | 1
|
2020-04-29T21:59:25.000Z
|
2020-04-29T21:59:25.000Z
|
from .backend_example import TokiExample
def test_expr_compile():
TokiExample()
| 14.333333
| 40
| 0.77907
| 10
| 86
| 6.4
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.151163
| 86
| 5
| 41
| 17.2
| 0.876712
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
c4932b6221e1508d169ca8049c405ae85a3280b2
| 26
|
py
|
Python
|
nama/ayya1.py
|
Ghani280706/introducing-yourself
|
64a0931e147d115f08e0a50c882d7af3fc9cfe61
|
[
"Unlicense"
] | 6
|
2021-10-03T14:17:05.000Z
|
2021-11-22T07:12:39.000Z
|
nama/ayya1.py
|
Ghani280706/introducing-yourself
|
64a0931e147d115f08e0a50c882d7af3fc9cfe61
|
[
"Unlicense"
] | null | null | null |
nama/ayya1.py
|
Ghani280706/introducing-yourself
|
64a0931e147d115f08e0a50c882d7af3fc9cfe61
|
[
"Unlicense"
] | 34
|
2021-10-03T00:47:02.000Z
|
2021-11-22T07:07:32.000Z
|
print('Vitria Widiasari')
| 13
| 25
| 0.769231
| 3
| 26
| 6.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.076923
| 26
| 1
| 26
| 26
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0.615385
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
c4aab8e5cf3034e049d2ea7c10cc361517a58fe8
| 18
|
py
|
Python
|
odoo-13.0/odoo/addons/base/controllers/__init__.py
|
VaibhavBhujade/Blockchain-ERP-interoperability
|
b5190a037fb6615386f7cbad024d51b0abd4ba03
|
[
"MIT"
] | 12
|
2021-03-26T08:39:40.000Z
|
2022-03-16T02:20:10.000Z
|
odoo-13.0/odoo/addons/base/controllers/__init__.py
|
VaibhavBhujade/Blockchain-ERP-interoperability
|
b5190a037fb6615386f7cbad024d51b0abd4ba03
|
[
"MIT"
] | 13
|
2020-12-20T16:00:21.000Z
|
2022-03-14T14:55:30.000Z
|
odoo-13.0/odoo/addons/base/controllers/__init__.py
|
VaibhavBhujade/Blockchain-ERP-interoperability
|
b5190a037fb6615386f7cbad024d51b0abd4ba03
|
[
"MIT"
] | 17
|
2020-08-31T11:18:49.000Z
|
2022-02-09T05:57:31.000Z
|
from . import rpc
| 9
| 17
| 0.722222
| 3
| 18
| 4.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.222222
| 18
| 1
| 18
| 18
| 0.928571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
c4afee0cda7608a7e157098fe9e7b9028a07846d
| 15,264
|
py
|
Python
|
library/sources/core/yepCore_Add_Haswell.py
|
rguthrie3/Yeppp-Mirror
|
23cc725a7489d376558bef3e92e31fda014b6c47
|
[
"BSD-3-Clause"
] | 1
|
2020-11-07T13:33:14.000Z
|
2020-11-07T13:33:14.000Z
|
library/sources/core/yepCore_Add_Haswell.py
|
rguthrie3/Yeppp-Mirror
|
23cc725a7489d376558bef3e92e31fda014b6c47
|
[
"BSD-3-Clause"
] | null | null | null |
library/sources/core/yepCore_Add_Haswell.py
|
rguthrie3/Yeppp-Mirror
|
23cc725a7489d376558bef3e92e31fda014b6c47
|
[
"BSD-3-Clause"
] | null | null | null |
from peachpy.x86_64 import *
from peachpy import *
from kernels.binop.binop_VV_V import binop_VV_V
from kernels.binop.binop_VS_V import binop_VS_V
from kernels.binop.binop_IVV_IV import binop_IVV_IV
from kernels.add.add_IVS_IV_generic import add_IVS_IV_generic
from common.YepStatus import *
# =======================================================================
# =======================================================================
# ADD VECTOR TO VECTOR
# =======================================================================
# =======================================================================
arg_x = Argument(ptr(const_Yep8s), name="xPointer")
arg_y = Argument(ptr(const_Yep8s), name="yPointer")
arg_z = Argument(ptr(Yep8s), name="zPointer")
arg_n = Argument(YepSize, name="length")
with Function("yepCore_Add_V8sV8s_V8s",
(arg_x, arg_y, arg_z, arg_n),
YepStatus, target=uarch.haswell + isa.avx2) as Add_V8sV8s_V8s:
binop_VV_V(arg_x, arg_y, arg_z, arg_n, "add", "AVX2")
arg_x = Argument(ptr(const_Yep8s), name="xPointer")
arg_y = Argument(ptr(const_Yep8s), name="yPointer")
arg_z = Argument(ptr(Yep16s), name="zPointer")
arg_n = Argument(YepSize, name="length")
with Function("yepCore_Add_V8sV8s_V16s",
(arg_x, arg_y, arg_z, arg_n),
YepStatus, target=uarch.haswell + isa.avx2) as Add_V8sV8s_V16s:
binop_VV_V(arg_x, arg_y, arg_z, arg_n, "add", "AVX2")
arg_x = Argument(ptr(const_Yep8u), name="xPointer")
arg_y = Argument(ptr(const_Yep8u), name="yPointer")
arg_z = Argument(ptr(Yep16u), name="zPointer")
arg_n = Argument(YepSize, name="length")
with Function("yepCore_Add_V8uV8u_V16u",
(arg_x, arg_y, arg_z, arg_n),
YepStatus, target=uarch.haswell + isa.avx2) as Add_V8uV8u_V16u:
binop_VV_V(arg_x, arg_y, arg_z, arg_n, "add", "AVX2")
arg_x = Argument(ptr(const_Yep16s), name="xPointer")
arg_y = Argument(ptr(const_Yep16s), name="yPointer")
arg_z = Argument(ptr(Yep16s), name="zPointer")
arg_n = Argument(YepSize, name="length")
with Function("yepCore_Add_V16sV16s_V16s",
(arg_x, arg_y, arg_z, arg_n),
YepStatus, target=uarch.haswell + isa.avx2) as Add_V16sV16s_V16s:
binop_VV_V(arg_x, arg_y, arg_z, arg_n, "add", "AVX2")
arg_x = Argument(ptr(const_Yep16s), name="xPointer")
arg_y = Argument(ptr(const_Yep16s), name="yPointer")
arg_z = Argument(ptr(Yep32s), name="zPointer")
arg_n = Argument(YepSize, name="length")
with Function("yepCore_Add_V16sV16s_V32s",
(arg_x, arg_y, arg_z, arg_n),
YepStatus, target=uarch.haswell + isa.avx2) as Add_V16sV16s_V32s:
binop_VV_V(arg_x, arg_y, arg_z, arg_n, "add", "AVX2")
arg_x = Argument(ptr(const_Yep16u), name="xPointer")
arg_y = Argument(ptr(const_Yep16u), name="yPointer")
arg_z = Argument(ptr(Yep32u), name="zPointer")
arg_n = Argument(YepSize, name="length")
with Function("yepCore_Add_V16uV16u_V32u",
(arg_x, arg_y, arg_z, arg_n),
YepStatus, target=uarch.haswell + isa.avx2) as Add_V16uV16u_V32u:
binop_VV_V(arg_x, arg_y, arg_z, arg_n, "add", "AVX2")
arg_x = Argument(ptr(const_Yep32s), name="xPointer")
arg_y = Argument(ptr(const_Yep32s), name="yPointer")
arg_z = Argument(ptr(Yep32s), name="zPointer")
arg_n = Argument(YepSize, name="length")
with Function("yepCore_Add_V32sV32s_V32s",
(arg_x, arg_y, arg_z, arg_n),
YepStatus, target=uarch.haswell + isa.avx2) as Add_V32sV32s_V32s:
binop_VV_V(arg_x, arg_y, arg_z, arg_n, "add", "AVX2")
arg_x = Argument(ptr(const_Yep32s), name="xPointer")
arg_y = Argument(ptr(const_Yep32s), name="yPointer")
arg_z = Argument(ptr(Yep64s), name="zPointer")
arg_n = Argument(YepSize, name="length")
with Function("yepCore_Add_V32sV32s_V64s",
(arg_x, arg_y, arg_z, arg_n),
YepStatus, target=uarch.haswell + isa.avx2) as Add_V32sV32s_V64s:
binop_VV_V(arg_x, arg_y, arg_z, arg_n, "add", "AVX2")
arg_x = Argument(ptr(const_Yep32u), name="xPointer")
arg_y = Argument(ptr(const_Yep32u), name="yPointer")
arg_z = Argument(ptr(Yep64u), name="zPointer")
arg_n = Argument(YepSize, name="length")
with Function("yepCore_Add_V32uV32u_V64u",
(arg_x, arg_y, arg_z, arg_n),
YepStatus, target=uarch.haswell + isa.avx2) as Add_V32uV32u_V64u:
binop_VV_V(arg_x, arg_y, arg_z, arg_n, "add", "AVX2")
arg_x = Argument(ptr(const_Yep64s), name="xPointer")
arg_y = Argument(ptr(const_Yep64s), name="yPointer")
arg_z = Argument(ptr(Yep64s), name="zPointer")
arg_n = Argument(YepSize, name="length")
with Function("yepCore_Add_V64sV64s_V64s",
(arg_x, arg_y, arg_z, arg_n),
YepStatus, target=uarch.haswell + isa.avx2) as Add_V64sV64s_V64s:
binop_VV_V(arg_x, arg_y, arg_z, arg_n, "add", "AVX2")
arg_x = Argument(ptr(const_Yep32f), name="xPointer")
arg_y = Argument(ptr(const_Yep32f), name="yPointer")
arg_z = Argument(ptr(Yep32f), name="zPointer")
arg_n = Argument(YepSize, name="length")
with Function("yepCore_Add_V32fV32f_V32f",
(arg_x, arg_y, arg_z, arg_n),
YepStatus, target=uarch.haswell + isa.avx2) as Add_V32fV32f_V32f:
binop_VV_V(arg_x, arg_y, arg_z, arg_n, "add", "AVX2")
arg_x = Argument(ptr(const_Yep64f), name="xPointer")
arg_y = Argument(ptr(const_Yep64f), name="yPointer")
arg_z = Argument(ptr(Yep64f), name="zPointer")
arg_n = Argument(YepSize, name="length")
with Function("yepCore_Add_V64fV64f_V64f",
(arg_x, arg_y, arg_z, arg_n),
YepStatus, target=uarch.haswell + isa.avx2) as Add_V64fV64f_V64f:
binop_VV_V(arg_x, arg_y, arg_z, arg_n, "add", "AVX2")
# =======================================================================
# =======================================================================
# ADD SCALAR TO VECTOR
# =======================================================================
# =======================================================================
arg_x = Argument(ptr(const_Yep8s), name="xPointer")
arg_y = Argument(Yep8s, name="y")
arg_z = Argument(ptr(Yep8s), name="zPointer")
arg_n = Argument(YepSize, name="length")
with Function("yepCore_Add_V8sS8s_V8s",
(arg_x, arg_y, arg_z, arg_n),
YepStatus, target=uarch.haswell + isa.avx2) as Add_V8sS8s_V8s:
binop_VS_V(arg_x, arg_y, arg_z, arg_n, "add", "AVX2")
arg_x = Argument(ptr(const_Yep8s), name="xPointer")
arg_y = Argument(Yep8s, name="y")
arg_z = Argument(ptr(Yep16s), name="zPointer")
arg_n = Argument(YepSize, name="length")
with Function("yepCore_Add_V8sS8s_V16s",
(arg_x, arg_y, arg_z, arg_n),
YepStatus, target=uarch.haswell + isa.avx2) as Add_V8sS8s_V16s:
binop_VS_V(arg_x, arg_y, arg_z, arg_n, "add", "AVX2")
arg_x = Argument(ptr(const_Yep8u), name="xPointer")
arg_y = Argument(Yep8u, name="y")
arg_z = Argument(ptr(Yep16u), name="zPointer")
arg_n = Argument(YepSize, name="length")
with Function("yepCore_Add_V8uS8u_V16u",
(arg_x, arg_y, arg_z, arg_n),
YepStatus, target=uarch.haswell + isa.avx2) as Add_V8uS8u_V16u:
binop_VS_V(arg_x, arg_y, arg_z, arg_n, "add", "AVX2")
arg_x = Argument(ptr(const_Yep16s), name="xPointer")
arg_y = Argument(Yep16s, name="y")
arg_z = Argument(ptr(Yep16s), name="zPointer")
arg_n = Argument(YepSize, name="length")
with Function("yepCore_Add_V16sS16s_V16s",
(arg_x, arg_y, arg_z, arg_n),
YepStatus, target=uarch.haswell + isa.avx2) as Add_V16sS16s_V16s:
binop_VS_V(arg_x, arg_y, arg_z, arg_n, "add", "AVX2")
arg_x = Argument(ptr(const_Yep16s), name="xPointer")
arg_y = Argument(Yep16s, name="y")
arg_z = Argument(ptr(Yep32s), name="zPointer")
arg_n = Argument(YepSize, name="length")
with Function("yepCore_Add_V16sS16s_V32s",
(arg_x, arg_y, arg_z, arg_n),
YepStatus, target=uarch.haswell + isa.avx2) as Add_V16sS16s_V32s:
binop_VS_V(arg_x, arg_y, arg_z, arg_n, "add", "AVX2")
arg_x = Argument(ptr(const_Yep16u), name="xPointer")
arg_y = Argument(Yep16u, name="y")
arg_z = Argument(ptr(Yep32u), name="zPointer")
arg_n = Argument(YepSize, name="length")
with Function("yepCore_Add_V16uS16u_V32u",
(arg_x, arg_y, arg_z, arg_n),
YepStatus, target=uarch.haswell + isa.avx2) as Add_V16uS16u_V32u:
binop_VS_V(arg_x, arg_y, arg_z, arg_n, "add", "AVX2")
arg_x = Argument(ptr(const_Yep32s), name="xPointer")
arg_y = Argument(Yep32s, name="y")
arg_z = Argument(ptr(Yep32s), name="zPointer")
arg_n = Argument(YepSize, name="length")
with Function("yepCore_Add_V32sS32s_V32s",
(arg_x, arg_y, arg_z, arg_n),
YepStatus, target=uarch.haswell + isa.avx2) as Add_V32sS32s_V32s:
binop_VS_V(arg_x, arg_y, arg_z, arg_n, "add", "AVX2")
arg_x = Argument(ptr(const_Yep32s), name="xPointer")
arg_y = Argument(Yep32s, name="y")
arg_z = Argument(ptr(Yep64s), name="zPointer")
arg_n = Argument(YepSize, name="length")
with Function("yepCore_Add_V32sS32s_V64s",
(arg_x, arg_y, arg_z, arg_n),
YepStatus, target=uarch.haswell + isa.avx2) as Add_V32sS32s_V64s:
binop_VS_V(arg_x, arg_y, arg_z, arg_n, "add", "AVX2")
arg_x = Argument(ptr(const_Yep32u), name="xPointer")
arg_y = Argument(Yep32u, name="y")
arg_z = Argument(ptr(Yep64u), name="zPointer")
arg_n = Argument(YepSize, name="length")
with Function("yepCore_Add_V32uS32u_V64u",
(arg_x, arg_y, arg_z, arg_n),
YepStatus, target=uarch.haswell + isa.avx2) as Add_V32uS32u_V64u:
binop_VS_V(arg_x, arg_y, arg_z, arg_n, "add", "AVX2")
arg_x = Argument(ptr(const_Yep64s), name="xPointer")
arg_y = Argument(Yep64s, name="y")
arg_z = Argument(ptr(Yep64s), name="zPointer")
arg_n = Argument(YepSize, name="length")
with Function("yepCore_Add_V64sS64s_V64s",
(arg_x, arg_y, arg_z, arg_n),
YepStatus, target=uarch.haswell + isa.avx2) as Add_V64sS64s_V64s:
binop_VS_V(arg_x, arg_y, arg_z, arg_n, "add", "AVX2")
arg_x = Argument(ptr(const_Yep32f), name="xPointer")
arg_y = Argument(Yep32f, name="y")
arg_z = Argument(ptr(Yep32f), name="zPointer")
arg_n = Argument(YepSize, name="length")
with Function("yepCore_Add_V32fS32f_V32f",
(arg_x, arg_y, arg_z, arg_n),
YepStatus, target=uarch.haswell + isa.avx2) as Add_V32fS32f_V32f:
binop_VS_V(arg_x, arg_y, arg_z, arg_n, "add", "AVX2")
arg_x = Argument(ptr(const_Yep64f), name="xPointer")
arg_y = Argument(Yep64f, name="y")
arg_z = Argument(ptr(Yep64f), name="zPointer")
arg_n = Argument(YepSize, name="length")
with Function("yepCore_Add_V64fS64f_V64f",
(arg_x, arg_y, arg_z, arg_n),
YepStatus, target=uarch.haswell + isa.avx2) as Add_V64fS64f_V64f:
binop_VS_V(arg_x, arg_y, arg_z, arg_n, "add", "AVX2")
# =======================================================================
# =======================================================================
# ADD VECTOR TO IMMEDIATE VECTOR
# =======================================================================
# =======================================================================
arg_x = Argument(ptr(Yep8s), name="xPointer")
arg_y = Argument(ptr(const_Yep8s), name="yPointer")
arg_n = Argument(YepSize, name="length")
with Function("yepCore_Add_IV8sV8s_IV8s",
(arg_x, arg_y, arg_n),
YepStatus, target=uarch.haswell + isa.avx2) as Add_IV8sV8s_IV8s:
binop_IVV_IV(arg_x, arg_y, arg_n, "add", "AVX2")
arg_x = Argument(ptr(Yep16s), name="xPointer")
arg_y = Argument(ptr(const_Yep16s), name="yPointer")
arg_n = Argument(YepSize, name="length")
with Function("yepCore_Add_IV16sV16s_IV16s",
(arg_x, arg_y, arg_n),
YepStatus, target=uarch.haswell + isa.avx2) as Add_IV16sV16s_IV16s:
binop_IVV_IV(arg_x, arg_y, arg_n, "add", "AVX2")
arg_x = Argument(ptr(Yep32s), name="xPointer")
arg_y = Argument(ptr(const_Yep32s), name="yPointer")
arg_n = Argument(YepSize, name="length")
with Function("yepCore_Add_IV32sV32s_IV32s",
(arg_x, arg_y, arg_n),
YepStatus, target=uarch.haswell + isa.avx2) as Add_IV32sV32s_IV32s:
binop_IVV_IV(arg_x, arg_y, arg_n, "add", "AVX2")
arg_x = Argument(ptr(Yep64s), name="xPointer")
arg_y = Argument(ptr(const_Yep64s), name="yPointer")
arg_n = Argument(YepSize, name="length")
with Function("yepCore_Add_IV64sV64s_IV64s",
(arg_x, arg_y, arg_n),
YepStatus, target=uarch.haswell + isa.avx2) as Add_IV64sV64s_IV64s:
binop_IVV_IV(arg_x, arg_y, arg_n, "add", "AVX2")
arg_x = Argument(ptr(Yep32f), name="xPointer")
arg_y = Argument(ptr(const_Yep32f), name="yPointer")
arg_n = Argument(YepSize, name="length")
with Function("yepCore_Add_IV32fV32f_IV32f",
(arg_x, arg_y, arg_n),
YepStatus, target=uarch.haswell + isa.avx2) as Add_IV32fV32f_IV32f:
binop_IVV_IV(arg_x, arg_y, arg_n, "add", "AVX2")
arg_x = Argument(ptr(Yep64f), name="xPointer")
arg_y = Argument(ptr(const_Yep64f), name="yPointer")
arg_n = Argument(YepSize, name="length")
with Function("yepCore_Add_IV64fV64f_IV64f",
(arg_x, arg_y, arg_n),
YepStatus, target=uarch.haswell + isa.avx2) as Add_IV64fV64f_IV64f:
binop_IVV_IV(arg_x, arg_y, arg_n, "add", "AVX2")
# =======================================================================
# =======================================================================
# ADD SCALAR TO IMMEDIATE VECTOR
# =======================================================================
# =======================================================================
arg_x = Argument(ptr(Yep8s), name="xPointer")
arg_y = Argument(Yep8s, name="yPointer")
arg_n = Argument(YepSize, name="length")
with Function("yepCore_Add_IV8sS8s_IV8s",
(arg_x, arg_y, arg_n),
YepStatus, target=uarch.haswell + isa.avx2) as Add_IV8sS8s_IV8s:
add_IVS_IV_generic(arg_x, arg_y, arg_n, "avx")
arg_x = Argument(ptr(Yep16s), name="xPointer")
arg_y = Argument(Yep16s, name="yPointer")
arg_n = Argument(YepSize, name="length")
with Function("yepCore_Add_IV16sS16s_IV16s",
(arg_x, arg_y, arg_n),
YepStatus, target=uarch.haswell + isa.avx2) as Add_IV16sS16s_IV16s:
add_IVS_IV_generic(arg_x, arg_y, arg_n, "avx")
arg_x = Argument(ptr(Yep32s), name="xPointer")
arg_y = Argument(Yep32s, name="yPointer")
arg_n = Argument(YepSize, name="length")
with Function("yepCore_Add_IV32sS32s_IV32s",
(arg_x, arg_y, arg_n),
YepStatus, target=uarch.haswell + isa.avx2) as Add_IV32sS32s_IV32s:
add_IVS_IV_generic(arg_x, arg_y, arg_n, "avx")
arg_x = Argument(ptr(Yep64s), name="xPointer")
arg_y = Argument(Yep64s, name="yPointer")
arg_n = Argument(YepSize, name="length")
with Function("yepCore_Add_IV64sS64s_IV64s",
(arg_x, arg_y, arg_n),
YepStatus, target=uarch.haswell + isa.avx2) as Add_IV64sS64s_IV64s:
add_IVS_IV_generic(arg_x, arg_y, arg_n, "avx")
arg_x = Argument(ptr(Yep32f), name="xPointer")
arg_y = Argument(Yep32f, name="yPointer")
arg_n = Argument(YepSize, name="length")
with Function("yepCore_Add_IV32fS32f_IV32f",
(arg_x, arg_y, arg_n),
YepStatus, target=uarch.haswell + isa.avx2) as Add_IV32fS32f_IV32f:
add_IVS_IV_generic(arg_x, arg_y, arg_n, "avx")
arg_x = Argument(ptr(Yep64f), name="xPointer")
arg_y = Argument(Yep64f, name="yPointer")
arg_n = Argument(YepSize, name="length")
with Function("yepCore_Add_IV64fS64f_IV64f",
(arg_x, arg_y, arg_n),
YepStatus, target=uarch.haswell + isa.avx2) as Add_IV64fS64f_IV64f:
add_IVS_IV_generic(arg_x, arg_y, arg_n, "avx")
| 37.048544
| 75
| 0.654678
| 2,268
| 15,264
| 4.090829
| 0.044092
| 0.046562
| 0.054322
| 0.062082
| 0.922828
| 0.916254
| 0.910541
| 0.906338
| 0.903535
| 0.903535
| 0
| 0.048153
| 0.134696
| 15,264
| 411
| 76
| 37.138686
| 0.6543
| 0.08222
| 0
| 0.667845
| 0
| 0
| 0.145267
| 0.064627
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.024735
| 0
| 0.024735
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
c4c33f7af14428514aa054ee5db133f96cdac371
| 217
|
py
|
Python
|
blog/admin.py
|
FarhadurFahim/HomeLand
|
5236f46fd99bcf97cddf7cf222836dff9ed9c305
|
[
"MIT"
] | null | null | null |
blog/admin.py
|
FarhadurFahim/HomeLand
|
5236f46fd99bcf97cddf7cf222836dff9ed9c305
|
[
"MIT"
] | null | null | null |
blog/admin.py
|
FarhadurFahim/HomeLand
|
5236f46fd99bcf97cddf7cf222836dff9ed9c305
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from .models import Post
#from blog.models import UserProfile
from .models import UserProfile
# Register your models here.
admin.site.register(UserProfile)
admin.site.register(Post)
| 21.7
| 36
| 0.815668
| 30
| 217
| 5.9
| 0.433333
| 0.20339
| 0.180791
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.115207
| 217
| 9
| 37
| 24.111111
| 0.921875
| 0.285714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.6
| 0
| 0.6
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
c4c8a02c8583b668071cf0f1e311b28879d853cc
| 37
|
py
|
Python
|
m4/trainers/__init__.py
|
autoliuweijie/m3py
|
a4c6209d7266cf226699f73bb6ebb52146856ac1
|
[
"MIT"
] | 1
|
2021-07-22T04:58:53.000Z
|
2021-07-22T04:58:53.000Z
|
m4/trainers/__init__.py
|
autoliuweijie/m3py
|
a4c6209d7266cf226699f73bb6ebb52146856ac1
|
[
"MIT"
] | 2
|
2022-01-30T07:26:48.000Z
|
2022-03-16T10:04:06.000Z
|
m4/trainers/__init__.py
|
autoliuweijie/m4
|
a4c6209d7266cf226699f73bb6ebb52146856ac1
|
[
"MIT"
] | null | null | null |
from .trainer import FineTuneTrainer
| 18.5
| 36
| 0.864865
| 4
| 37
| 8
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.108108
| 37
| 1
| 37
| 37
| 0.969697
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
f2096ca17d2f8c906065ecc571b68d51a6478477
| 204
|
py
|
Python
|
brew/selection/pruning/base.py
|
va26/brew
|
3531560df785fa44b39094f3ffad83d3b795b15b
|
[
"MIT"
] | 344
|
2015-10-02T19:35:15.000Z
|
2022-03-23T07:18:50.000Z
|
brew/selection/pruning/base.py
|
va26/brew
|
3531560df785fa44b39094f3ffad83d3b795b15b
|
[
"MIT"
] | 19
|
2015-03-24T01:26:51.000Z
|
2018-07-26T20:11:32.000Z
|
brew/selection/pruning/base.py
|
va26/brew
|
3531560df785fa44b39094f3ffad83d3b795b15b
|
[
"MIT"
] | 93
|
2015-03-13T18:23:51.000Z
|
2021-12-11T11:14:38.000Z
|
class Prunner(object):
def __init__(self):
pass
def fit(self, ensemble, X, y):
return self
def get(self, p=0.1):
return self.ensemble[:int(p * len(self.ensemble))]
| 17
| 58
| 0.573529
| 29
| 204
| 3.896552
| 0.62069
| 0.318584
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.013793
| 0.289216
| 204
| 11
| 59
| 18.545455
| 0.765517
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.428571
| false
| 0.142857
| 0
| 0.285714
| 0.857143
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 6
|
1ee95eb48d53e49c3ff1f805687705cc394a170e
| 29,006
|
py
|
Python
|
slac_tf/slac/agents/slac/ablation_model_distribution_network.py
|
alfaevc/vlRLstack
|
6413c9bbed188b1b2365c2d96e76c3764755b9aa
|
[
"MIT"
] | 147
|
2019-07-02T14:26:04.000Z
|
2022-03-12T11:37:51.000Z
|
slac/agents/slac/ablation_model_distribution_network.py
|
xinleipan/slac
|
58fb526057afb69be626552932fbddae09ae71f8
|
[
"MIT"
] | 3
|
2019-09-18T02:02:20.000Z
|
2020-02-14T02:35:56.000Z
|
slac/agents/slac/ablation_model_distribution_network.py
|
xinleipan/slac
|
58fb526057afb69be626552932fbddae09ae71f8
|
[
"MIT"
] | 35
|
2019-08-20T12:39:55.000Z
|
2022-03-28T02:52:44.000Z
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import functools
import gin
import numpy as np
from slac.agents.slac.model_distribution_network import Bernoulli
from slac.agents.slac.model_distribution_network import Compressor
from slac.agents.slac.model_distribution_network import ConstantMultivariateNormalDiag
from slac.agents.slac.model_distribution_network import Decoder
from slac.agents.slac.model_distribution_network import MultivariateNormalDiag
from slac.agents.slac.model_distribution_network import Normal
from slac.utils import nest_utils
import tensorflow as tf
import tensorflow_probability as tfp
from tf_agents.trajectories import time_step as ts
tfd = tfp.distributions
@gin.configurable
class SlacModelDistributionNetwork(tf.Module):
"""Equivalent to model_distribution_network.ModelDistributionNetwork.
We keep the implementations separate to minimize cluttering the implementation
of the main method.
"""
def __init__(self,
observation_spec,
action_spec,
latent1_first_prior_distribution_ctor=ConstantMultivariateNormalDiag,
latent1_prior_distribution_ctor=MultivariateNormalDiag,
latent1_posterior_distribution_ctor=MultivariateNormalDiag,
latent2_prior_distribution_ctor=MultivariateNormalDiag,
latent2_posterior_distribution_ctor=MultivariateNormalDiag,
base_depth=32,
latent1_size=32,
latent2_size=256,
kl_analytic=True,
skip_first_kl=False,
sequential_latent1_prior=True,
sequential_latent2_prior=True,
sequential_latent1_posterior=True,
sequential_latent2_posterior=True,
model_reward=False,
model_discount=False,
decoder_stddev=np.sqrt(0.1, dtype=np.float32),
reward_stddev=None,
name=None):
super(SlacModelDistributionNetwork, self).__init__(name=name)
self.observation_spec = observation_spec
self.action_spec = action_spec
self.base_depth = base_depth
self.latent1_size = latent1_size
self.latent2_size = latent2_size
self.kl_analytic = kl_analytic
self.skip_first_kl = skip_first_kl
self.model_reward = model_reward
self.model_discount = model_discount
# p(z_1^1)
self.latent1_first_prior = latent1_first_prior_distribution_ctor(latent1_size)
# p(z_1^2 | z_1^1)
self.latent2_first_prior = latent2_prior_distribution_ctor(8 * base_depth, latent2_size)
if sequential_latent1_prior:
# p(z_{t+1}^1 | z_t^2, a_t)
self.latent1_prior = latent1_prior_distribution_ctor(8 * base_depth, latent1_size)
else:
# p(z_{t+1}^1)
self.latent1_prior = lambda prev_latent, prev_action: self.latent1_first_prior(prev_latent[..., 0]) # prev_latent is only used to determine the batch shape
if sequential_latent2_prior:
# p(z_{t+1}^2 | z_{t+1}^1, z_t^2, a_t)
self.latent2_prior = latent2_prior_distribution_ctor(8 * base_depth, latent2_size)
else:
# p(z_{t+1}^2 | z_{t+1}^1)
self.latent2_prior = lambda latent1, prev_latent2, prev_action: self.latent2_first_prior(latent1)
# q(z_1^1 | x_1)
self.latent1_first_posterior = latent1_posterior_distribution_ctor(8 * base_depth, latent1_size)
# q(z_1^2 | z_1^1) = p(z_1^2 | z_1^1)
if latent2_posterior_distribution_ctor == latent2_prior_distribution_ctor:
self.latent2_first_posterior = self.latent2_first_prior # share
else:
self.latent2_first_posterior = latent2_posterior_distribution_ctor(8 * base_depth, latent2_size)
if sequential_latent1_posterior:
# q(z_{t+1}^1 | x_{t+1}, z_t^2, a_t)
self.latent1_posterior = latent1_posterior_distribution_ctor(8 * base_depth, latent1_size)
else:
# q(z_{t+1}^1 | x_{t+1})
self.latent1_posterior = lambda feature, prev_latent2, prev_action: self.latent1_first_posterior(feature)
if sequential_latent2_posterior:
# q(z_{t+1}^2 | z_{t+1}^1, z_t^2, a_t) = p(z_{t+1}^2 | z_{t+1}^1, z_t^2, a_t)
if latent2_posterior_distribution_ctor == latent2_prior_distribution_ctor:
self.latent2_posterior = self.latent2_prior
else:
self.latent2_posterior = latent2_posterior_distribution_ctor(8 * base_depth, latent2_size)
else:
# q(z_{t+1}^2 | z_{t+1}^1) = p(z_{t+1}^2 | z_{t+1}^1)
self.latent2_posterior = lambda latent1, prev_latent2, prev_action: self.latent2_first_posterior(latent1)
# compresses x_t into a vector
self.compressor = Compressor(base_depth, 8 * base_depth)
# p(x_t | z_t^1, z_t^2)
self.decoder = Decoder(base_depth, scale=decoder_stddev)
if self.model_reward:
# p(r_t | z_t^1, z_t^2, a_t, z_{t+1}^1, z_{t+1}^2)
self.reward_predictor = Normal(8 * base_depth, scale=reward_stddev)
else:
self.reward_predictor = None
if self.model_discount:
# p(d_t | z_{t+1}^1, z_{t+1}^2)
self.discount_predictor = Bernoulli(8 * base_depth)
else:
self.discount_predictor = None
@property
def state_size(self):
return self.latent1_size + self.latent2_size
def compute_loss(self, images, actions, step_types, rewards=None, discounts=None, latent_posterior_samples_and_dists=None):
sequence_length = step_types.shape[1].value - 1
if latent_posterior_samples_and_dists is None:
latent_posterior_samples_and_dists = self.sample_posterior(images, actions, step_types)
(latent1_posterior_samples, latent2_posterior_samples), (latent1_posterior_dists, latent2_posterior_dists) = (
latent_posterior_samples_and_dists)
(latent1_prior_samples, latent2_prior_samples), _ = self.sample_prior_or_posterior(actions, step_types) # for visualization
(latent1_conditional_prior_samples, latent2_conditional_prior_samples), _ = self.sample_prior_or_posterior(
actions, step_types, images=images[:, :1]) # for visualization. condition on first image only
def where_and_concat(reset_masks, first_prior_tensors, after_first_prior_tensors):
after_first_prior_tensors = tf.where(reset_masks[:, 1:], first_prior_tensors[:, 1:], after_first_prior_tensors)
prior_tensors = tf.concat([first_prior_tensors[:, 0:1], after_first_prior_tensors], axis=1)
return prior_tensors
reset_masks = tf.concat([tf.ones_like(step_types[:, 0:1], dtype=tf.bool),
tf.equal(step_types[:, 1:], ts.StepType.FIRST)], axis=1)
latent1_reset_masks = tf.tile(reset_masks[:, :, None], [1, 1, self.latent1_size])
latent1_first_prior_dists = self.latent1_first_prior(step_types)
# these distributions start at t=1 and the inputs are from t-1
latent1_after_first_prior_dists = self.latent1_prior(
latent2_posterior_samples[:, :sequence_length],
actions[:, :sequence_length])
latent1_prior_dists = nest_utils.map_distribution_structure(
functools.partial(where_and_concat, latent1_reset_masks),
latent1_first_prior_dists,
latent1_after_first_prior_dists)
latent2_reset_masks = tf.tile(reset_masks[:, :, None], [1, 1, self.latent2_size])
latent2_first_prior_dists = self.latent2_first_prior(latent1_posterior_samples)
# these distributions start at t=1 and the last 2 inputs are from t-1
latent2_after_first_prior_dists = self.latent2_prior(
latent1_posterior_samples[:, 1:sequence_length+1],
latent2_posterior_samples[:, :sequence_length],
actions[:, :sequence_length])
latent2_prior_dists = nest_utils.map_distribution_structure(
functools.partial(where_and_concat, latent2_reset_masks),
latent2_first_prior_dists,
latent2_after_first_prior_dists)
outputs = {}
if self.kl_analytic:
latent1_kl_divergences = tfd.kl_divergence(latent1_posterior_dists, latent1_prior_dists)
else:
latent1_kl_divergences = (latent1_posterior_dists.log_prob(latent1_posterior_samples)
- latent1_prior_dists.log_prob(latent1_posterior_samples))
if self.skip_first_kl:
latent1_kl_divergences = latent1_kl_divergences[:, 1:]
latent1_kl_divergences = tf.reduce_sum(latent1_kl_divergences, axis=1)
outputs.update({
'latent1_kl_divergence': tf.reduce_mean(latent1_kl_divergences),
})
if self.latent2_posterior == self.latent2_prior:
latent2_kl_divergences = 0.0
else:
if self.kl_analytic:
latent2_kl_divergences = tfd.kl_divergence(latent2_posterior_dists, latent2_prior_dists)
else:
latent2_kl_divergences = (latent2_posterior_dists.log_prob(latent2_posterior_samples)
- latent2_prior_dists.log_prob(latent2_posterior_samples))
if self.skip_first_kl:
latent2_kl_divergences = latent2_kl_divergences[:, 1:]
latent2_kl_divergences = tf.reduce_sum(latent2_kl_divergences, axis=1)
outputs.update({
'latent2_kl_divergence': tf.reduce_mean(latent2_kl_divergences),
})
outputs.update({
'kl_divergence': tf.reduce_mean(latent1_kl_divergences + latent2_kl_divergences),
})
likelihood_dists = self.decoder(latent1_posterior_samples, latent2_posterior_samples)
likelihood_log_probs = likelihood_dists.log_prob(images)
likelihood_log_probs = tf.reduce_sum(likelihood_log_probs, axis=1)
reconstruction_error = tf.reduce_sum(tf.square(images - likelihood_dists.distribution.loc),
axis=list(range(-len(likelihood_dists.event_shape), 0)))
reconstruction_error = tf.reduce_sum(reconstruction_error, axis=1)
outputs.update({
'log_likelihood': tf.reduce_mean(likelihood_log_probs),
'reconstruction_error': tf.reduce_mean(reconstruction_error),
})
# summed over the time dimension
elbo = likelihood_log_probs - latent1_kl_divergences - latent2_kl_divergences
if self.model_reward:
reward_dists = self.reward_predictor(
latent1_posterior_samples[:, :sequence_length],
latent2_posterior_samples[:, :sequence_length],
actions[:, :sequence_length],
latent1_posterior_samples[:, 1:sequence_length + 1],
latent2_posterior_samples[:, 1:sequence_length + 1])
reward_valid_mask = tf.cast(tf.not_equal(step_types[:, :sequence_length], ts.StepType.LAST), tf.float32)
reward_log_probs = reward_dists.log_prob(rewards[:, :sequence_length])
reward_log_probs = tf.reduce_sum(reward_log_probs * reward_valid_mask, axis=1)
reward_reconstruction_error = tf.square(rewards[:, :sequence_length] - reward_dists.loc)
reward_reconstruction_error = tf.reduce_sum(reward_reconstruction_error * reward_valid_mask, axis=1)
outputs.update({
'reward_log_likelihood': tf.reduce_mean(reward_log_probs),
'reward_reconstruction_error': tf.reduce_mean(reward_reconstruction_error),
})
elbo += reward_log_probs
if self.model_discount:
discount_dists = self.discount_predictor(
latent1_posterior_samples[:, 1:sequence_length + 1],
latent2_posterior_samples[:, 1:sequence_length + 1])
discount_log_probs = discount_dists.log_prob(discounts[:, :sequence_length])
discount_log_probs = tf.reduce_sum(discount_log_probs, axis=1)
discount_accuracy = tf.cast(
tf.equal(tf.cast(discount_dists.mode(), tf.float32), discounts[:, :sequence_length]), tf.float32)
discount_accuracy = tf.reduce_sum(discount_accuracy, axis=1)
outputs.update({
'discount_log_likelihood': tf.reduce_mean(discount_log_probs),
'discount_accuracy': tf.reduce_mean(discount_accuracy),
})
elbo += discount_log_probs
# average over the batch dimension
loss = -tf.reduce_mean(elbo)
posterior_images = likelihood_dists.mean()
prior_images = self.decoder(latent1_prior_samples, latent2_prior_samples).mean()
conditional_prior_images = self.decoder(latent1_conditional_prior_samples, latent2_conditional_prior_samples).mean()
outputs.update({
'elbo': tf.reduce_mean(elbo),
'images': images,
'posterior_images': posterior_images,
'prior_images': prior_images,
'conditional_prior_images': conditional_prior_images,
})
return loss, outputs
def sample_prior_or_posterior(self, actions, step_types=None, images=None):
"""Samples from the prior, except for the first time steps in which conditioning images are given."""
if step_types is None:
batch_size = tf.shape(actions)[0]
sequence_length = actions.shape[1].value # should be statically defined
step_types = tf.fill(
[batch_size, sequence_length + 1], ts.StepType.MID)
else:
sequence_length = step_types.shape[1].value - 1
actions = actions[:, :sequence_length]
if images is not None:
features = self.compressor(images)
# swap batch and time axes
actions = tf.transpose(actions, [1, 0, 2])
step_types = tf.transpose(step_types, [1, 0])
if images is not None:
features = tf.transpose(features, [1, 0, 2])
latent1_dists = []
latent1_samples = []
latent2_dists = []
latent2_samples = []
for t in range(sequence_length + 1):
is_conditional = images is not None and (t < images.shape[1].value)
if t == 0:
if is_conditional:
latent1_dist = self.latent1_first_posterior(features[t])
else:
latent1_dist = self.latent1_first_prior(step_types[t]) # step_types is only used to infer batch_size
latent1_sample = latent1_dist.sample()
if is_conditional:
latent2_dist = self.latent2_first_posterior(latent1_sample)
else:
latent2_dist = self.latent2_first_prior(latent1_sample)
latent2_sample = latent2_dist.sample()
else:
reset_mask = tf.equal(step_types[t], ts.StepType.FIRST)
if is_conditional:
latent1_first_dist = self.latent1_first_posterior(features[t])
latent1_dist = self.latent1_posterior(features[t], latent2_samples[t-1], actions[t-1])
else:
latent1_first_dist = self.latent1_first_prior(step_types[t])
latent1_dist = self.latent1_prior(latent2_samples[t-1], actions[t-1])
latent1_dist = nest_utils.map_distribution_structure(
functools.partial(tf.where, reset_mask), latent1_first_dist, latent1_dist)
latent1_sample = latent1_dist.sample()
if is_conditional:
latent2_first_dist = self.latent2_first_posterior(latent1_sample)
latent2_dist = self.latent2_posterior(latent1_sample, latent2_samples[t-1], actions[t-1])
else:
latent2_first_dist = self.latent2_first_prior(latent1_sample)
latent2_dist = self.latent2_prior(latent1_sample, latent2_samples[t-1], actions[t-1])
latent2_dist = nest_utils.map_distribution_structure(
functools.partial(tf.where, reset_mask), latent2_first_dist, latent2_dist)
latent2_sample = latent2_dist.sample()
latent1_dists.append(latent1_dist)
latent1_samples.append(latent1_sample)
latent2_dists.append(latent2_dist)
latent2_samples.append(latent2_sample)
try:
latent1_dists = nest_utils.map_distribution_structure(lambda *x: tf.stack(x, axis=1), *latent1_dists)
except:
latent1_dists = None
latent1_samples = tf.stack(latent1_samples, axis=1)
try:
latent2_dists = nest_utils.map_distribution_structure(lambda *x: tf.stack(x, axis=1), *latent2_dists)
except:
latent2_dists = None
latent2_samples = tf.stack(latent2_samples, axis=1)
return (latent1_samples, latent2_samples), (latent1_dists, latent2_dists)
def sample_posterior(self, images, actions, step_types, features=None):
sequence_length = step_types.shape[1].value - 1
actions = actions[:, :sequence_length]
if features is None:
features = self.compressor(images)
# swap batch and time axes
features = tf.transpose(features, [1, 0, 2])
actions = tf.transpose(actions, [1, 0, 2])
step_types = tf.transpose(step_types, [1, 0])
latent1_dists = []
latent1_samples = []
latent2_dists = []
latent2_samples = []
for t in range(sequence_length + 1):
if t == 0:
latent1_dist = self.latent1_first_posterior(features[t])
latent1_sample = latent1_dist.sample()
latent2_dist = self.latent2_first_posterior(latent1_sample)
latent2_sample = latent2_dist.sample()
else:
prev_latent2_sample = latent2_samples[t-1]
reset_mask = tf.equal(step_types[t], ts.StepType.FIRST)
latent1_first_dist = self.latent1_first_posterior(features[t])
latent1_dist = self.latent1_posterior(features[t], prev_latent2_sample, actions[t-1])
latent1_dist = nest_utils.map_distribution_structure(
functools.partial(tf.where, reset_mask), latent1_first_dist, latent1_dist)
latent1_sample = latent1_dist.sample()
latent2_first_dist = self.latent2_first_posterior(latent1_sample)
latent2_dist = self.latent2_posterior(latent1_sample, prev_latent2_sample, actions[t-1])
latent2_dist = nest_utils.map_distribution_structure(
functools.partial(tf.where, reset_mask), latent2_first_dist, latent2_dist)
latent2_sample = latent2_dist.sample()
latent1_dists.append(latent1_dist)
latent1_samples.append(latent1_sample)
latent2_dists.append(latent2_dist)
latent2_samples.append(latent2_sample)
latent1_dists = nest_utils.map_distribution_structure(lambda *x: tf.stack(x, axis=1), *latent1_dists)
latent1_samples = tf.stack(latent1_samples, axis=1)
latent2_dists = nest_utils.map_distribution_structure(lambda *x: tf.stack(x, axis=1), *latent2_dists)
latent2_samples = tf.stack(latent2_samples, axis=1)
return (latent1_samples, latent2_samples), (latent1_dists, latent2_dists)
@gin.configurable
class SimpleModelDistributionNetwork(tf.Module):
def __init__(self,
observation_spec,
action_spec,
base_depth=32,
latent_size=256,
kl_analytic=True,
sequential_latent_prior=True,
sequential_latent_posterior=True,
model_reward=False,
model_discount=False,
decoder_stddev=np.sqrt(0.1, dtype=np.float32),
reward_stddev=None,
name=None):
super(SimpleModelDistributionNetwork, self).__init__(name=name)
self.observation_spec = observation_spec
self.action_spec = action_spec
self.base_depth = base_depth
self.latent_size = latent_size
self.kl_analytic = kl_analytic
self.model_reward = model_reward
self.model_discount = model_discount
# p(z_1)
self.latent_first_prior = ConstantMultivariateNormalDiag(latent_size)
if sequential_latent_prior:
# p(z_{t+1} | z_t, a_t)
self.latent_prior = MultivariateNormalDiag(8 * base_depth, latent_size)
else:
# p(z_{t+1})
self.latent_prior = lambda prev_latent, prev_action: self.latent_first_prior(prev_latent[..., 0]) # prev_latent is only used to determine the batch shape
# q(z_1 | x_1)
self.latent_first_posterior = MultivariateNormalDiag(8 * base_depth, latent_size)
if sequential_latent_posterior:
# q(z_{t+1} | x_{t+1}, z_t, a_t)
self.latent_posterior = MultivariateNormalDiag(8 * base_depth, latent_size)
else:
# q(z_{t+1} | x_{t+1})
self.latent_posterior = lambda feature, prev_latent, prev_action: self.latent_first_posterior(feature)
# compresses x_t into a vector
self.compressor = Compressor(base_depth, 8 * base_depth)
# p(x_t | z_t)
self.decoder = Decoder(base_depth, scale=decoder_stddev)
if self.model_reward:
# p(r_t | z_t, a_t, z_{t+1})
self.reward_predictor = Normal(8 * base_depth, scale=reward_stddev)
else:
self.reward_predictor = None
if self.model_discount:
# p(d_t | z_{t+1})
self.discount_predictor = Bernoulli(8 * base_depth)
else:
self.discount_predictor = None
@property
def state_size(self):
return self.latent_size
def compute_loss(self, images, actions, step_types, rewards=None, discounts=None, latent_posterior_samples_and_dists=None):
sequence_length = step_types.shape[1].value - 1
if latent_posterior_samples_and_dists is None:
latent_posterior_samples_and_dists = self.sample_posterior(images, actions, step_types)
latent_posterior_samples, latent_posterior_dists = latent_posterior_samples_and_dists
latent_prior_samples, _ = self.sample_prior_or_posterior(actions, step_types) # for visualization
latent_conditional_prior_samples, _ = self.sample_prior_or_posterior(
actions, step_types, images=images[:, :1]) # for visualization. condition on first image only
def where_and_concat(reset_masks, first_prior_tensors, after_first_prior_tensors):
after_first_prior_tensors = tf.where(reset_masks[:, 1:], first_prior_tensors[:, 1:], after_first_prior_tensors)
prior_tensors = tf.concat([first_prior_tensors[:, 0:1], after_first_prior_tensors], axis=1)
return prior_tensors
reset_masks = tf.concat([tf.ones_like(step_types[:, 0:1], dtype=tf.bool),
tf.equal(step_types[:, 1:], ts.StepType.FIRST)], axis=1)
latent_reset_masks = tf.tile(reset_masks[:, :, None], [1, 1, self.latent_size])
latent_first_prior_dists = self.latent_first_prior(step_types)
# these distributions start at t=1 and the inputs are from t-1
latent_after_first_prior_dists = self.latent_prior(
latent_posterior_samples[:, :sequence_length], actions[:, :sequence_length])
latent_prior_dists = nest_utils.map_distribution_structure(
functools.partial(where_and_concat, latent_reset_masks),
latent_first_prior_dists,
latent_after_first_prior_dists)
outputs = {}
if self.kl_analytic:
latent_kl_divergences = tfd.kl_divergence(latent_posterior_dists, latent_prior_dists)
else:
latent_kl_divergences = (latent_posterior_dists.log_prob(latent_posterior_samples)
- latent_prior_dists.log_prob(latent_posterior_samples))
latent_kl_divergences = tf.reduce_sum(latent_kl_divergences, axis=1)
outputs.update({
'latent_kl_divergence': tf.reduce_mean(latent_kl_divergences),
})
outputs.update({
'kl_divergence': tf.reduce_mean(latent_kl_divergences),
})
likelihood_dists = self.decoder(latent_posterior_samples)
likelihood_log_probs = likelihood_dists.log_prob(images)
likelihood_log_probs = tf.reduce_sum(likelihood_log_probs, axis=1)
reconstruction_error = tf.reduce_sum(tf.square(images - likelihood_dists.distribution.loc),
axis=list(range(-len(likelihood_dists.event_shape), 0)))
reconstruction_error = tf.reduce_sum(reconstruction_error, axis=1)
outputs.update({
'log_likelihood': tf.reduce_mean(likelihood_log_probs),
'reconstruction_error': tf.reduce_mean(reconstruction_error),
})
# summed over the time dimension
elbo = likelihood_log_probs - latent_kl_divergences
if self.model_reward:
reward_dists = self.reward_predictor(
latent_posterior_samples[:, :sequence_length],
actions[:, :sequence_length],
latent_posterior_samples[:, 1:sequence_length + 1])
reward_valid_mask = tf.cast(tf.not_equal(step_types[:, :sequence_length], ts.StepType.LAST), tf.float32)
reward_log_probs = reward_dists.log_prob(rewards[:, :sequence_length])
reward_log_probs = tf.reduce_sum(reward_log_probs * reward_valid_mask, axis=1)
reward_reconstruction_error = tf.square(rewards[:, :sequence_length] - reward_dists.loc)
reward_reconstruction_error = tf.reduce_sum(reward_reconstruction_error * reward_valid_mask, axis=1)
outputs.update({
'reward_log_likelihood': tf.reduce_mean(reward_log_probs),
'reward_reconstruction_error': tf.reduce_mean(reward_reconstruction_error),
})
elbo += reward_log_probs
if self.model_discount:
discount_dists = self.discount_predictor(
latent_posterior_samples[:, 1:sequence_length + 1])
discount_log_probs = discount_dists.log_prob(discounts[:, :sequence_length])
discount_log_probs = tf.reduce_sum(discount_log_probs, axis=1)
discount_accuracy = tf.cast(
tf.equal(tf.cast(discount_dists.mode(), tf.float32), discounts[:, :sequence_length]), tf.float32)
discount_accuracy = tf.reduce_sum(discount_accuracy, axis=1)
outputs.update({
'discount_log_likelihood': tf.reduce_mean(discount_log_probs),
'discount_accuracy': tf.reduce_mean(discount_accuracy),
})
elbo += discount_log_probs
# average over the batch dimension
loss = -tf.reduce_mean(elbo)
posterior_images = likelihood_dists.mean()
prior_images = self.decoder(latent_prior_samples).mean()
conditional_prior_images = self.decoder(latent_conditional_prior_samples).mean()
outputs.update({
'elbo': tf.reduce_mean(elbo),
'images': images,
'posterior_images': posterior_images,
'prior_images': prior_images,
'conditional_prior_images': conditional_prior_images,
})
return loss, outputs
def sample_prior_or_posterior(self, actions, step_types=None, images=None):
"""Samples from the prior, except for the first time steps in which conditioning images are given."""
if step_types is None:
batch_size = tf.shape(actions)[0]
sequence_length = actions.shape[1].value # should be statically defined
step_types = tf.fill(
[batch_size, sequence_length + 1], ts.StepType.MID)
else:
sequence_length = step_types.shape[1].value - 1
actions = actions[:, :sequence_length]
if images is not None:
features = self.compressor(images)
# swap batch and time axes
actions = tf.transpose(actions, [1, 0, 2])
step_types = tf.transpose(step_types, [1, 0])
if images is not None:
features = tf.transpose(features, [1, 0, 2])
latent_dists = []
latent_samples = []
for t in range(sequence_length + 1):
is_conditional = images is not None and (t < images.shape[1].value)
if t == 0:
if is_conditional:
latent_dist = self.latent_first_posterior(features[t])
else:
latent_dist = self.latent_first_prior(step_types[t]) # step_types is only used to infer batch_size
latent_sample = latent_dist.sample()
else:
reset_mask = tf.equal(step_types[t], ts.StepType.FIRST)
if is_conditional:
latent_first_dist = self.latent_first_posterior(features[t])
latent_dist = self.latent_posterior(features[t], latent_samples[t-1], actions[t-1])
else:
latent_first_dist = self.latent_first_prior(step_types[t])
latent_dist = self.latent_prior(latent_samples[t-1], actions[t-1])
latent_dist = nest_utils.map_distribution_structure(
functools.partial(tf.where, reset_mask), latent_first_dist, latent_dist)
latent_sample = latent_dist.sample()
latent_dists.append(latent_dist)
latent_samples.append(latent_sample)
latent_dists = nest_utils.map_distribution_structure(lambda *x: tf.stack(x, axis=1), *latent_dists)
latent_samples = tf.stack(latent_samples, axis=1)
return latent_samples, latent_dists
def sample_posterior(self, images, actions, step_types, features=None):
sequence_length = step_types.shape[1].value - 1
actions = actions[:, :sequence_length]
if features is None:
features = self.compressor(images)
# swap batch and time axes
features = tf.transpose(features, [1, 0, 2])
actions = tf.transpose(actions, [1, 0, 2])
step_types = tf.transpose(step_types, [1, 0])
latent_dists = []
latent_samples = []
for t in range(sequence_length + 1):
if t == 0:
latent_dist = self.latent_first_posterior(features[t])
latent_sample = latent_dist.sample()
else:
reset_mask = tf.equal(step_types[t], ts.StepType.FIRST)
latent_first_dist = self.latent_first_posterior(features[t])
latent_dist = self.latent_posterior(features[t], latent_samples[t-1], actions[t-1])
latent_dist = nest_utils.map_distribution_structure(
functools.partial(tf.where, reset_mask), latent_first_dist, latent_dist)
latent_sample = latent_dist.sample()
latent_dists.append(latent_dist)
latent_samples.append(latent_sample)
latent_dists = nest_utils.map_distribution_structure(lambda *x: tf.stack(x, axis=1), *latent_dists)
latent_samples = tf.stack(latent_samples, axis=1)
return latent_samples, latent_dists
| 46.114467
| 162
| 0.712646
| 3,781
| 29,006
| 5.107643
| 0.056334
| 0.005696
| 0.00435
| 0.018641
| 0.874379
| 0.828086
| 0.795153
| 0.759424
| 0.687604
| 0.653117
| 0
| 0.024109
| 0.193477
| 29,006
| 628
| 163
| 46.187898
| 0.801402
| 0.063987
| 0
| 0.719298
| 0
| 0
| 0.016839
| 0.008567
| 0
| 0
| 0
| 0
| 0
| 1
| 0.023392
| false
| 0
| 0.031189
| 0.003899
| 0.077973
| 0.001949
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
4806f2fb8ee152fb015ec64ee13378de9a018814
| 41
|
py
|
Python
|
src/tests/test_sanity.py
|
DmytroKaminskiy/ltt
|
d08df4d102e678651cd42928e2343733c3308d71
|
[
"Apache-2.0"
] | null | null | null |
src/tests/test_sanity.py
|
DmytroKaminskiy/ltt
|
d08df4d102e678651cd42928e2343733c3308d71
|
[
"Apache-2.0"
] | null | null | null |
src/tests/test_sanity.py
|
DmytroKaminskiy/ltt
|
d08df4d102e678651cd42928e2343733c3308d71
|
[
"Apache-2.0"
] | null | null | null |
def test_sanity():
assert 200 == 200
| 13.666667
| 21
| 0.634146
| 6
| 41
| 4.166667
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.193548
| 0.243902
| 41
| 2
| 22
| 20.5
| 0.612903
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0.5
| true
| 0
| 0
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
4809f10c72907ec78d4de831915f882936027b39
| 41,959
|
py
|
Python
|
tests/test_codebase/test_mmdet/test_mmdet_models.py
|
zhiqwang/mmdeploy
|
997d111a6f4ca9624ab3b36717748e6ce002037d
|
[
"Apache-2.0"
] | null | null | null |
tests/test_codebase/test_mmdet/test_mmdet_models.py
|
zhiqwang/mmdeploy
|
997d111a6f4ca9624ab3b36717748e6ce002037d
|
[
"Apache-2.0"
] | null | null | null |
tests/test_codebase/test_mmdet/test_mmdet_models.py
|
zhiqwang/mmdeploy
|
997d111a6f4ca9624ab3b36717748e6ce002037d
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) OpenMMLab. All rights reserved.
import copy
import os
import random
from typing import Dict, List
import mmcv
import numpy as np
import pytest
import torch
from mmdeploy.codebase import import_codebase
from mmdeploy.utils import Backend, Codebase
from mmdeploy.utils.test import (WrapModel, check_backend, get_model_outputs,
get_rewrite_outputs)
import_codebase(Codebase.MMDET)
def seed_everything(seed=1029):
random.seed(seed)
os.environ['PYTHONHASHSEED'] = str(seed)
np.random.seed(seed)
torch.manual_seed(seed)
if torch.cuda.is_available():
torch.cuda.manual_seed(seed)
torch.cuda.manual_seed_all(seed) # if you are using multi-GPU.
torch.backends.cudnn.benchmark = False
torch.backends.cudnn.deterministic = True
torch.backends.cudnn.enabled = False
def convert_to_list(rewrite_output: Dict, output_names: List[str]) -> List:
"""Converts output from a dictionary to a list.
The new list will contain only those output values, whose names are in list
'output_names'.
"""
outputs = [
value for name, value in rewrite_output.items() if name in output_names
]
return outputs
def get_anchor_head_model():
"""AnchorHead Config."""
test_cfg = mmcv.Config(
dict(
deploy_nms_pre=0,
min_bbox_size=0,
score_thr=0.05,
nms=dict(type='nms', iou_threshold=0.5),
max_per_img=100))
from mmdet.models.dense_heads import AnchorHead
model = AnchorHead(num_classes=4, in_channels=1, test_cfg=test_cfg)
model.requires_grad_(False)
return model
def get_ssd_head_model():
"""SSDHead Config."""
test_cfg = mmcv.Config(
dict(
nms_pre=1000,
nms=dict(type='nms', iou_threshold=0.45),
min_bbox_size=0,
score_thr=0.02,
max_per_img=200))
from mmdet.models import SSDHead
model = SSDHead(
in_channels=(96, 1280, 512, 256, 256, 128),
num_classes=4,
use_depthwise=True,
norm_cfg=dict(type='BN', eps=0.001, momentum=0.03),
act_cfg=dict(type='ReLU6'),
init_cfg=dict(type='Normal', layer='Conv2d', std=0.001),
anchor_generator=dict(
type='SSDAnchorGenerator',
scale_major=False,
strides=[16, 32, 64, 107, 160, 320],
ratios=[[2, 3], [2, 3], [2, 3], [2, 3], [2, 3], [2, 3]],
min_sizes=[48, 100, 150, 202, 253, 304],
max_sizes=[100, 150, 202, 253, 304, 320]),
bbox_coder=dict(
type='DeltaXYWHBBoxCoder',
target_means=[.0, .0, .0, .0],
target_stds=[0.1, 0.1, 0.2, 0.2]),
test_cfg=test_cfg)
model.requires_grad_(False)
return model
def get_fcos_head_model():
"""FCOS Head Config."""
test_cfg = mmcv.Config(
dict(
deploy_nms_pre=0,
min_bbox_size=0,
score_thr=0.05,
nms=dict(type='nms', iou_threshold=0.5),
max_per_img=100))
from mmdet.models.dense_heads import FCOSHead
model = FCOSHead(num_classes=4, in_channels=1, test_cfg=test_cfg)
model.requires_grad_(False)
return model
def get_l2norm_forward_model():
"""L2Norm Neck Config."""
from mmdet.models.necks.ssd_neck import L2Norm
model = L2Norm(16)
model.requires_grad_(False)
return model
def get_rpn_head_model():
"""RPN Head Config."""
test_cfg = mmcv.Config(
dict(
deploy_nms_pre=0,
nms_pre=0,
max_per_img=100,
nms=dict(type='nms', iou_threshold=0.7),
min_bbox_size=0))
from mmdet.models.dense_heads import RPNHead
model = RPNHead(in_channels=1, test_cfg=test_cfg)
model.requires_grad_(False)
return model
def get_single_roi_extractor():
"""SingleRoIExtractor Config."""
from mmdet.models.roi_heads import SingleRoIExtractor
roi_layer = dict(type='RoIAlign', output_size=7, sampling_ratio=2)
out_channels = 1
featmap_strides = [4, 8, 16, 32]
model = SingleRoIExtractor(roi_layer, out_channels, featmap_strides).eval()
return model
@pytest.mark.parametrize('backend_type', [Backend.ONNXRUNTIME])
def test_l2norm_forward(backend_type):
check_backend(backend_type)
l2norm_neck = get_l2norm_forward_model()
l2norm_neck.cpu().eval()
s = 128
deploy_cfg = mmcv.Config(
dict(
backend_config=dict(type=backend_type.value),
onnx_config=dict(input_shape=None)))
feat = torch.rand(1, 16, s, s)
model_outputs = [l2norm_neck.forward(feat)]
wrapped_model = WrapModel(l2norm_neck, 'forward')
rewrite_inputs = {
'x': feat,
}
rewrite_outputs, is_backend_output = get_rewrite_outputs(
wrapped_model=wrapped_model,
model_inputs=rewrite_inputs,
deploy_cfg=deploy_cfg)
if is_backend_output:
for model_output, rewrite_output in zip(model_outputs[0],
rewrite_outputs[0]):
model_output = model_output.squeeze().cpu().numpy()
rewrite_output = rewrite_output.squeeze()
assert np.allclose(
model_output, rewrite_output, rtol=1e-03, atol=1e-05)
else:
for model_output, rewrite_output in zip(model_outputs[0],
rewrite_outputs[0]):
model_output = model_output.squeeze().cpu().numpy()
rewrite_output = rewrite_output.squeeze()
assert np.allclose(
model_output[0], rewrite_output, rtol=1e-03, atol=1e-05)
def test_get_bboxes_of_fcos_head_ncnn():
backend_type = Backend.NCNN
check_backend(backend_type)
fcos_head = get_fcos_head_model()
fcos_head.cpu().eval()
s = 128
img_metas = [{
'scale_factor': np.ones(4),
'pad_shape': (s, s, 3),
'img_shape': (s, s, 3)
}]
output_names = ['detection_output']
deploy_cfg = mmcv.Config(
dict(
backend_config=dict(type=backend_type.value),
onnx_config=dict(output_names=output_names, input_shape=None),
codebase_config=dict(
type='mmdet',
task='ObjectDetection',
model_type='ncnn_end2end',
post_processing=dict(
score_threshold=0.05,
iou_threshold=0.5,
max_output_boxes_per_class=200,
pre_top_k=5000,
keep_top_k=100,
background_label_id=-1,
))))
# the cls_score's size: (1, 36, 32, 32), (1, 36, 16, 16),
# (1, 36, 8, 8), (1, 36, 4, 4), (1, 36, 2, 2).
# the bboxes's size: (1, 36, 32, 32), (1, 36, 16, 16),
# (1, 36, 8, 8), (1, 36, 4, 4), (1, 36, 2, 2)
seed_everything(1234)
cls_score = [
torch.rand(1, fcos_head.num_classes, pow(2, i), pow(2, i))
for i in range(5, 0, -1)
]
seed_everything(5678)
bboxes = [torch.rand(1, 4, pow(2, i), pow(2, i)) for i in range(5, 0, -1)]
seed_everything(9101)
centernesses = [
torch.rand(1, 1, pow(2, i), pow(2, i)) for i in range(5, 0, -1)
]
# to get outputs of onnx model after rewrite
img_metas[0]['img_shape'] = torch.Tensor([s, s])
wrapped_model = WrapModel(
fcos_head, 'get_bboxes', img_metas=img_metas, with_nms=True)
rewrite_inputs = {
'cls_scores': cls_score,
'bbox_preds': bboxes,
'centernesses': centernesses
}
rewrite_outputs, is_backend_output = get_rewrite_outputs(
wrapped_model=wrapped_model,
model_inputs=rewrite_inputs,
deploy_cfg=deploy_cfg)
# output should be of shape [1, N, 6]
if is_backend_output:
assert rewrite_outputs[0].shape[-1] == 6
else:
assert rewrite_outputs.shape[-1] == 6
@pytest.mark.parametrize('backend_type', [Backend.ONNXRUNTIME, Backend.NCNN])
def test_get_bboxes_of_rpn_head(backend_type: Backend):
check_backend(backend_type)
head = get_rpn_head_model()
head.cpu().eval()
s = 4
img_metas = [{
'scale_factor': np.ones(4),
'pad_shape': (s, s, 3),
'img_shape': (s, s, 3)
}]
output_names = ['dets']
deploy_cfg = mmcv.Config(
dict(
backend_config=dict(type=backend_type.value),
onnx_config=dict(output_names=output_names, input_shape=None),
codebase_config=dict(
type='mmdet',
task='ObjectDetection',
post_processing=dict(
score_threshold=0.05,
iou_threshold=0.5,
max_output_boxes_per_class=200,
pre_top_k=5000,
keep_top_k=100,
background_label_id=-1,
))))
# the cls_score's size: (1, 36, 32, 32), (1, 36, 16, 16),
# (1, 36, 8, 8), (1, 36, 4, 4), (1, 36, 2, 2).
# the bboxes's size: (1, 36, 32, 32), (1, 36, 16, 16),
# (1, 36, 8, 8), (1, 36, 4, 4), (1, 36, 2, 2)
seed_everything(1234)
cls_score = [
torch.rand(1, 9, pow(2, i), pow(2, i)) for i in range(5, 0, -1)
]
seed_everything(5678)
bboxes = [torch.rand(1, 36, pow(2, i), pow(2, i)) for i in range(5, 0, -1)]
# to get outputs of onnx model after rewrite
img_metas[0]['img_shape'] = torch.Tensor([s, s])
wrapped_model = WrapModel(
head, 'get_bboxes', img_metas=img_metas, with_nms=True)
rewrite_inputs = {
'cls_scores': cls_score,
'bbox_preds': bboxes,
}
# do not run with ncnn backend
run_with_backend = False if backend_type in [Backend.NCNN] else True
rewrite_outputs, is_backend_output = get_rewrite_outputs(
wrapped_model=wrapped_model,
model_inputs=rewrite_inputs,
deploy_cfg=deploy_cfg,
run_with_backend=run_with_backend)
assert rewrite_outputs is not None
def _replace_r50_with_r18(model):
"""Replace ResNet50 with ResNet18 in config."""
model = copy.deepcopy(model)
if model.backbone.type == 'ResNet':
model.backbone.depth = 18
model.backbone.base_channels = 2
model.neck.in_channels = [2, 4, 8, 16]
return model
@pytest.mark.parametrize('backend', [Backend.ONNXRUNTIME])
@pytest.mark.parametrize('model_cfg_path', [
'tests/test_codebase/test_mmdet/data/single_stage_model.json',
'tests/test_codebase/test_mmdet/data/mask_model.json'
])
def test_forward_of_base_detector(model_cfg_path, backend):
check_backend(backend)
deploy_cfg = mmcv.Config(
dict(
backend_config=dict(type=backend.value),
onnx_config=dict(
output_names=['dets', 'labels'], input_shape=None),
codebase_config=dict(
type='mmdet',
task='ObjectDetection',
post_processing=dict(
score_threshold=0.05,
iou_threshold=0.5,
max_output_boxes_per_class=200,
pre_top_k=-1,
keep_top_k=100,
background_label_id=-1,
))))
model_cfg = mmcv.Config(dict(model=mmcv.load(model_cfg_path)))
model_cfg.model = _replace_r50_with_r18(model_cfg.model)
from mmdet.apis import init_detector
model = init_detector(model_cfg, None, 'cpu')
img = torch.randn(1, 3, 64, 64)
rewrite_inputs = {'img': img}
rewrite_outputs, _ = get_rewrite_outputs(
wrapped_model=model,
model_inputs=rewrite_inputs,
deploy_cfg=deploy_cfg)
assert rewrite_outputs is not None
@pytest.mark.parametrize('backend_type',
[Backend.ONNXRUNTIME, Backend.OPENVINO])
def test_single_roi_extractor(backend_type: Backend):
check_backend(backend_type)
single_roi_extractor = get_single_roi_extractor()
output_names = ['roi_feat']
deploy_cfg = mmcv.Config(
dict(
backend_config=dict(type=backend_type.value),
onnx_config=dict(output_names=output_names, input_shape=None),
codebase_config=dict(
type='mmdet',
task='ObjectDetection',
)))
seed_everything(1234)
out_channels = single_roi_extractor.out_channels
feats = [
torch.rand((1, out_channels, 200, 336)),
torch.rand((1, out_channels, 100, 168)),
torch.rand((1, out_channels, 50, 84)),
torch.rand((1, out_channels, 25, 42)),
]
seed_everything(5678)
rois = torch.tensor([[0.0000, 587.8285, 52.1405, 886.2484, 341.5644]])
model_inputs = {
'feats': feats,
'rois': rois,
}
model_outputs = get_model_outputs(single_roi_extractor, 'forward',
model_inputs)
backend_outputs, _ = get_rewrite_outputs(
wrapped_model=single_roi_extractor,
model_inputs=model_inputs,
deploy_cfg=deploy_cfg)
if isinstance(backend_outputs, dict):
backend_outputs = backend_outputs.values()
for model_output, backend_output in zip(model_outputs[0], backend_outputs):
model_output = model_output.squeeze().cpu().numpy()
backend_output = backend_output.squeeze()
assert np.allclose(
model_output, backend_output, rtol=1e-03, atol=1e-05)
def get_cascade_roi_head(is_instance_seg=False):
"""CascadeRoIHead Config."""
num_stages = 3
stage_loss_weights = [1, 0.5, 0.25]
bbox_roi_extractor = {
'type': 'SingleRoIExtractor',
'roi_layer': {
'type': 'RoIAlign',
'output_size': 7,
'sampling_ratio': 0
},
'out_channels': 64,
'featmap_strides': [4, 8, 16, 32]
}
all_target_stds = [[0.1, 0.1, 0.2, 0.2], [0.05, 0.05, 0.1, 0.1],
[0.033, 0.033, 0.067, 0.067]]
bbox_head = [{
'type': 'Shared2FCBBoxHead',
'in_channels': 64,
'fc_out_channels': 1024,
'roi_feat_size': 7,
'num_classes': 80,
'bbox_coder': {
'type': 'DeltaXYWHBBoxCoder',
'target_means': [0.0, 0.0, 0.0, 0.0],
'target_stds': target_stds
},
'reg_class_agnostic': True,
'loss_cls': {
'type': 'CrossEntropyLoss',
'use_sigmoid': False,
'loss_weight': 1.0
},
'loss_bbox': {
'type': 'SmoothL1Loss',
'beta': 1.0,
'loss_weight': 1.0
}
} for target_stds in all_target_stds]
mask_roi_extractor = {
'type': 'SingleRoIExtractor',
'roi_layer': {
'type': 'RoIAlign',
'output_size': 14,
'sampling_ratio': 0
},
'out_channels': 64,
'featmap_strides': [4, 8, 16, 32]
}
mask_head = {
'type': 'FCNMaskHead',
'num_convs': 4,
'in_channels': 64,
'conv_out_channels': 64,
'num_classes': 80,
'loss_mask': {
'type': 'CrossEntropyLoss',
'use_mask': True,
'loss_weight': 1.0
}
}
test_cfg = mmcv.Config(
dict(
score_thr=0.05,
nms=mmcv.Config(dict(type='nms', iou_threshold=0.5)),
max_per_img=100,
mask_thr_binary=0.5))
args = [num_stages, stage_loss_weights, bbox_roi_extractor, bbox_head]
kwargs = {'test_cfg': test_cfg}
if is_instance_seg:
args += [mask_roi_extractor, mask_head]
from mmdet.models.roi_heads import CascadeRoIHead
model = CascadeRoIHead(*args, **kwargs).eval()
return model
@pytest.mark.parametrize('backend_type',
[Backend.ONNXRUNTIME, Backend.OPENVINO])
def test_cascade_roi_head(backend_type: Backend):
check_backend(backend_type)
cascade_roi_head = get_cascade_roi_head()
seed_everything(1234)
x = [
torch.rand((1, 64, 200, 304)),
torch.rand((1, 64, 100, 152)),
torch.rand((1, 64, 50, 76)),
torch.rand((1, 64, 25, 38)),
]
proposals = torch.tensor([[587.8285, 52.1405, 886.2484, 341.5644, 0.5]])
img_metas = {
'img_shape': torch.tensor([800, 1216]),
'ori_shape': torch.tensor([800, 1216]),
'scale_factor': torch.tensor([1, 1, 1, 1])
}
model_inputs = {
'x': x,
'proposal_list': [proposals],
'img_metas': [img_metas]
}
output_names = ['results']
deploy_cfg = mmcv.Config(
dict(
backend_config=dict(type=backend_type.value),
onnx_config=dict(output_names=output_names, input_shape=None),
codebase_config=dict(
type='mmdet',
task='ObjectDetection',
post_processing=dict(
score_threshold=0.05,
iou_threshold=0.5,
max_output_boxes_per_class=200,
pre_top_k=-1,
keep_top_k=100,
background_label_id=-1))))
model_inputs = {'x': x, 'proposals': proposals.unsqueeze(0)}
wrapped_model = WrapModel(
cascade_roi_head, 'simple_test', img_metas=[img_metas])
backend_outputs, _ = get_rewrite_outputs(
wrapped_model=wrapped_model,
model_inputs=model_inputs,
deploy_cfg=deploy_cfg)
assert backend_outputs is not None
def get_fovea_head_model():
"""FoveaHead Config."""
test_cfg = mmcv.Config(
dict(
deploy_nms_pre=0,
min_bbox_size=0,
score_thr=0.05,
nms=dict(type='nms', iou_threshold=0.5),
max_per_img=100))
from mmdet.models.dense_heads import FoveaHead
model = FoveaHead(num_classes=4, in_channels=1, test_cfg=test_cfg)
model.requires_grad_(False)
return model
@pytest.mark.parametrize('backend_type',
[Backend.ONNXRUNTIME, Backend.OPENVINO])
def test_get_bboxes_of_fovea_head(backend_type: Backend):
check_backend(backend_type)
fovea_head = get_fovea_head_model()
fovea_head.cpu().eval()
s = 128
img_metas = [{
'scale_factor': np.ones(4),
'pad_shape': (s, s, 3),
'img_shape': (s, s, 3)
}]
output_names = ['dets', 'labels']
deploy_cfg = mmcv.Config(
dict(
backend_config=dict(type=backend_type.value),
onnx_config=dict(output_names=output_names, input_shape=None),
codebase_config=dict(
type='mmdet',
task='ObjectDetection',
post_processing=dict(
score_threshold=0.05,
iou_threshold=0.5,
max_output_boxes_per_class=200,
pre_top_k=-1,
keep_top_k=100,
background_label_id=-1,
))))
# the cls_score's size: (1, 36, 32, 32), (1, 36, 16, 16),
# (1, 36, 8, 8), (1, 36, 4, 4), (1, 36, 2, 2).
# the bboxes's size: (1, 36, 32, 32), (1, 36, 16, 16),
# (1, 36, 8, 8), (1, 36, 4, 4), (1, 36, 2, 2)
seed_everything(1234)
cls_score = [
torch.rand(1, fovea_head.num_classes, pow(2, i), pow(2, i))
for i in range(5, 0, -1)
]
seed_everything(5678)
bboxes = [torch.rand(1, 4, pow(2, i), pow(2, i)) for i in range(5, 0, -1)]
model_inputs = {
'cls_scores': cls_score,
'bbox_preds': bboxes,
'img_metas': img_metas
}
model_outputs = get_model_outputs(fovea_head, 'get_bboxes', model_inputs)
# to get outputs of onnx model after rewrite
img_metas[0]['img_shape'] = torch.Tensor([s, s])
wrapped_model = WrapModel(fovea_head, 'get_bboxes', img_metas=img_metas)
rewrite_inputs = {
'cls_scores': cls_score,
'bbox_preds': bboxes,
}
rewrite_outputs, is_backend_output = get_rewrite_outputs(
wrapped_model=wrapped_model,
model_inputs=rewrite_inputs,
deploy_cfg=deploy_cfg)
if is_backend_output:
if isinstance(rewrite_outputs, dict):
rewrite_outputs = convert_to_list(rewrite_outputs, output_names)
for model_output, rewrite_output in zip(model_outputs[0],
rewrite_outputs):
model_output = model_output.squeeze().cpu().numpy()
rewrite_output = rewrite_output.squeeze()
# hard code to make two tensors with the same shape
# rewrite and original codes applied different nms strategy
assert np.allclose(
model_output[:rewrite_output.shape[0]],
rewrite_output,
rtol=1e-03,
atol=1e-05)
else:
assert rewrite_outputs is not None
@pytest.mark.parametrize('backend_type', [Backend.OPENVINO])
def test_cascade_roi_head_with_mask(backend_type: Backend):
check_backend(backend_type)
cascade_roi_head = get_cascade_roi_head(is_instance_seg=True)
seed_everything(1234)
x = [
torch.rand((1, 64, 200, 304)),
torch.rand((1, 64, 100, 152)),
torch.rand((1, 64, 50, 76)),
torch.rand((1, 64, 25, 38)),
]
proposals = torch.tensor([[587.8285, 52.1405, 886.2484, 341.5644, 0.5]])
img_metas = {
'img_shape': torch.tensor([800, 1216]),
'ori_shape': torch.tensor([800, 1216]),
'scale_factor': torch.tensor([1, 1, 1, 1])
}
output_names = ['bbox_results', 'segm_results']
deploy_cfg = mmcv.Config(
dict(
backend_config=dict(type=backend_type.value),
onnx_config=dict(output_names=output_names, input_shape=None),
codebase_config=dict(
type='mmdet',
task='ObjectDetection',
post_processing=dict(
score_threshold=0.05,
iou_threshold=0.5,
max_output_boxes_per_class=200,
pre_top_k=-1,
keep_top_k=100,
background_label_id=-1))))
model_inputs = {'x': x, 'proposals': proposals.unsqueeze(0)}
wrapped_model = WrapModel(
cascade_roi_head, 'simple_test', img_metas=[img_metas])
backend_outputs, _ = get_rewrite_outputs(
wrapped_model=wrapped_model,
model_inputs=model_inputs,
deploy_cfg=deploy_cfg)
bbox_results = backend_outputs[0]
segm_results = backend_outputs[1]
assert bbox_results is not None
assert segm_results is not None
def get_yolov3_head_model():
"""yolov3 Head Config."""
test_cfg = mmcv.Config(
dict(
nms_pre=1000,
min_bbox_size=0,
score_thr=0.05,
conf_thr=0.005,
nms=dict(type='nms', iou_threshold=0.45),
max_per_img=100))
from mmdet.models.dense_heads import YOLOV3Head
model = YOLOV3Head(
num_classes=4,
in_channels=[16, 8, 4],
out_channels=[32, 16, 8],
test_cfg=test_cfg)
model.requires_grad_(False)
return model
@pytest.mark.parametrize('backend_type',
[Backend.ONNXRUNTIME, Backend.OPENVINO])
def test_yolov3_head_get_bboxes(backend_type):
"""Test get_bboxes rewrite of yolov3 head."""
check_backend(backend_type)
yolov3_head = get_yolov3_head_model()
yolov3_head.cpu().eval()
s = 128
img_metas = [{
'scale_factor': np.ones(4),
'pad_shape': (s, s, 3),
'img_shape': (s, s, 3)
}]
output_names = ['dets', 'labels']
deploy_cfg = mmcv.Config(
dict(
backend_config=dict(type=backend_type.value),
onnx_config=dict(output_names=output_names, input_shape=None),
codebase_config=dict(
type='mmdet',
task='ObjectDetection',
post_processing=dict(
score_threshold=0.05,
iou_threshold=0.45,
confidence_threshold=0.005,
max_output_boxes_per_class=200,
pre_top_k=-1,
keep_top_k=100,
background_label_id=-1,
))))
seed_everything(1234)
pred_maps = [
torch.rand(1, 27, 5, 5),
torch.rand(1, 27, 10, 10),
torch.rand(1, 27, 20, 20)
]
# to get outputs of pytorch model
model_inputs = {'pred_maps': pred_maps, 'img_metas': img_metas}
model_outputs = get_model_outputs(yolov3_head, 'get_bboxes', model_inputs)
# to get outputs of onnx model after rewrite
wrapped_model = WrapModel(
yolov3_head, 'get_bboxes', img_metas=img_metas, with_nms=True)
rewrite_inputs = {
'pred_maps': pred_maps,
}
rewrite_outputs, is_backend_output = get_rewrite_outputs(
wrapped_model=wrapped_model,
model_inputs=rewrite_inputs,
deploy_cfg=deploy_cfg)
if is_backend_output:
if isinstance(rewrite_outputs, dict):
rewrite_outputs = convert_to_list(rewrite_outputs, output_names)
for model_output, rewrite_output in zip(model_outputs[0],
rewrite_outputs):
model_output = model_output.squeeze().cpu().numpy()
rewrite_output = rewrite_output.squeeze()
# hard code to make two tensors with the same shape
# rewrite and original codes applied different nms strategy
assert np.allclose(
model_output[:rewrite_output.shape[0]],
rewrite_output,
rtol=1e-03,
atol=1e-05)
else:
assert rewrite_outputs is not None
def test_yolov3_head_get_bboxes_ncnn():
"""Test get_bboxes rewrite of yolov3 head."""
backend_type = Backend.NCNN
check_backend(backend_type)
yolov3_head = get_yolov3_head_model()
yolov3_head.cpu().eval()
s = 128
img_metas = [{
'scale_factor': np.ones(4),
'pad_shape': (s, s, 3),
'img_shape': (s, s, 3)
}]
output_names = ['detection_output']
deploy_cfg = mmcv.Config(
dict(
backend_config=dict(type=backend_type.value),
onnx_config=dict(output_names=output_names, input_shape=None),
codebase_config=dict(
type='mmdet',
model_type='ncnn_end2end',
task='ObjectDetection',
post_processing=dict(
score_threshold=0.05,
iou_threshold=0.45,
confidence_threshold=0.005,
max_output_boxes_per_class=200,
pre_top_k=-1,
keep_top_k=10,
background_label_id=-1,
))))
seed_everything(1234)
pred_maps = [
torch.rand(1, 27, 5, 5),
torch.rand(1, 27, 10, 10),
torch.rand(1, 27, 20, 20)
]
# to get outputs of onnx model after rewrite
wrapped_model = WrapModel(
yolov3_head, 'get_bboxes', img_metas=img_metas[0], with_nms=True)
rewrite_inputs = {
'pred_maps': pred_maps,
}
rewrite_outputs, is_backend_output = get_rewrite_outputs(
wrapped_model=wrapped_model,
model_inputs=rewrite_inputs,
deploy_cfg=deploy_cfg)
# output should be of shape [1, N, 6]
if is_backend_output:
assert rewrite_outputs[0].shape[-1] == 6
else:
assert rewrite_outputs.shape[-1] == 6
def get_yolox_head_model():
"""YOLOX Head Config."""
test_cfg = mmcv.Config(
dict(
deploy_nms_pre=0,
min_bbox_size=0,
score_thr=0.05,
nms=dict(type='nms', iou_threshold=0.5),
max_per_img=100))
from mmdet.models.dense_heads import YOLOXHead
model = YOLOXHead(num_classes=4, in_channels=1, test_cfg=test_cfg)
model.requires_grad_(False)
return model
@pytest.mark.parametrize('backend_type',
[Backend.ONNXRUNTIME, Backend.OPENVINO])
def test_yolox_head_get_bboxes(backend_type: Backend):
"""Test get_bboxes rewrite of YOLOXHead."""
check_backend(backend_type)
yolox_head = get_yolox_head_model()
yolox_head.cpu().eval()
s = 128
img_metas = [{
'scale_factor': np.ones(4),
'pad_shape': (s, s, 3),
'img_shape': (s, s, 3)
}]
output_names = ['dets', 'labels']
deploy_cfg = mmcv.Config(
dict(
backend_config=dict(type=backend_type.value),
onnx_config=dict(output_names=output_names, input_shape=None),
codebase_config=dict(
type='mmdet',
task='ObjectDetection',
post_processing=dict(
score_threshold=0.05,
iou_threshold=0.5,
max_output_boxes_per_class=20,
pre_top_k=-1,
keep_top_k=10,
background_label_id=-1,
))))
seed_everything(1234)
cls_scores = [
torch.rand(1, yolox_head.num_classes, pow(2, i), pow(2, i))
for i in range(3, 0, -1)
]
seed_everything(5678)
bbox_preds = [
torch.rand(1, 4, pow(2, i), pow(2, i)) for i in range(3, 0, -1)
]
seed_everything(9101)
objectnesses = [
torch.rand(1, 1, pow(2, i), pow(2, i)) for i in range(3, 0, -1)
]
# to get outputs of pytorch model
model_inputs = {
'cls_scores': cls_scores,
'bbox_preds': bbox_preds,
'objectnesses': objectnesses,
'img_metas': img_metas
}
model_outputs = get_model_outputs(yolox_head, 'get_bboxes', model_inputs)
# to get outputs of onnx model after rewrite
wrapped_model = WrapModel(
yolox_head, 'get_bboxes', img_metas=img_metas, with_nms=True)
rewrite_inputs = {
'cls_scores': cls_scores,
'bbox_preds': bbox_preds,
'objectnesses': objectnesses,
}
rewrite_outputs, is_backend_output = get_rewrite_outputs(
wrapped_model=wrapped_model,
model_inputs=rewrite_inputs,
deploy_cfg=deploy_cfg)
if is_backend_output:
if isinstance(rewrite_outputs, dict):
rewrite_outputs = convert_to_list(rewrite_outputs, output_names)
for model_output, rewrite_output in zip(model_outputs[0],
rewrite_outputs):
model_output = model_output.squeeze().cpu().numpy()
rewrite_output = rewrite_output.squeeze().cpu().numpy()
# hard code to make two tensors with the same shape
# rewrite and original codes applied different nms strategy
min_shape = min(model_output.shape[0], rewrite_output.shape[0], 5)
assert np.allclose(
model_output[:min_shape],
rewrite_output[:min_shape],
rtol=1e-03,
atol=1e-05)
else:
assert rewrite_outputs is not None
def get_vfnet_head_model():
"""VFNet Head Config."""
test_cfg = mmcv.Config(
dict(
deploy_nms_pre=0,
min_bbox_size=0,
score_thr=0.05,
nms=dict(type='nms', iou_threshold=0.5),
max_per_img=100))
from mmdet.models.dense_heads import VFNetHead
model = VFNetHead(num_classes=4, in_channels=1, test_cfg=test_cfg)
model.requires_grad_(False)
model.cpu().eval()
return model
@pytest.mark.parametrize('backend_type',
[Backend.OPENVINO, Backend.ONNXRUNTIME])
def test_get_bboxes_of_vfnet_head(backend_type: Backend):
"""Test get_bboxes rewrite of VFNet head."""
check_backend(backend_type)
vfnet_head = get_vfnet_head_model()
vfnet_head.cpu().eval()
s = 16
img_metas = [{
'scale_factor': np.ones(4),
'pad_shape': (s, s, 3),
'img_shape': (s, s, 3)
}]
output_names = ['dets', 'labels']
deploy_cfg = mmcv.Config(
dict(
backend_config=dict(type=backend_type.value),
onnx_config=dict(output_names=output_names, input_shape=None),
codebase_config=dict(
type='mmdet',
task='ObjectDetection',
post_processing=dict(
score_threshold=0.05,
iou_threshold=0.5,
max_output_boxes_per_class=200,
pre_top_k=-1,
keep_top_k=100,
background_label_id=-1,
))))
seed_everything(1234)
cls_score = [
torch.rand(1, vfnet_head.num_classes, pow(2, i), pow(2, i))
for i in range(5, 0, -1)
]
seed_everything(5678)
bboxes = [torch.rand(1, 4, pow(2, i), pow(2, i)) for i in range(5, 0, -1)]
seed_everything(9101)
model_inputs = {
'cls_scores': cls_score,
'bbox_preds': bboxes,
'img_metas': img_metas
}
model_outputs = get_model_outputs(vfnet_head, 'get_bboxes', model_inputs)
img_metas[0]['img_shape'] = torch.Tensor([s, s])
wrapped_model = WrapModel(
vfnet_head, 'get_bboxes', img_metas=img_metas, with_nms=True)
rewrite_inputs = {'cls_scores': cls_score, 'bbox_preds': bboxes}
rewrite_outputs, is_backend_output = get_rewrite_outputs(
wrapped_model=wrapped_model,
model_inputs=rewrite_inputs,
deploy_cfg=deploy_cfg)
if is_backend_output:
if isinstance(rewrite_outputs, dict):
rewrite_outputs = convert_to_list(rewrite_outputs, output_names)
for model_output, rewrite_output in zip(model_outputs[0],
rewrite_outputs):
model_output = model_output.squeeze().cpu().numpy()
rewrite_output = rewrite_output.squeeze()
min_shape = min(model_output.shape[0], rewrite_output.shape[0])
assert np.allclose(
model_output[:min_shape],
rewrite_output[:min_shape],
rtol=1e-03,
atol=1e-05)
else:
assert rewrite_outputs is not None
@pytest.mark.parametrize('backend_type',
[Backend.ONNXRUNTIME, Backend.OPENVINO])
def test_base_dense_head_get_bboxes(backend_type: Backend):
"""Test get_bboxes rewrite of base dense head."""
check_backend(backend_type)
anchor_head = get_anchor_head_model()
anchor_head.cpu().eval()
s = 128
img_metas = [{
'scale_factor': np.ones(4),
'pad_shape': (s, s, 3),
'img_shape': (s, s, 3)
}]
output_names = ['dets', 'labels']
deploy_cfg = mmcv.Config(
dict(
backend_config=dict(type=backend_type.value),
onnx_config=dict(output_names=output_names, input_shape=None),
codebase_config=dict(
type='mmdet',
task='ObjectDetection',
post_processing=dict(
score_threshold=0.05,
iou_threshold=0.5,
max_output_boxes_per_class=200,
pre_top_k=5000,
keep_top_k=100,
background_label_id=-1,
))))
# the cls_score's size: (1, 36, 32, 32), (1, 36, 16, 16),
# (1, 36, 8, 8), (1, 36, 4, 4), (1, 36, 2, 2).
# the bboxes's size: (1, 36, 32, 32), (1, 36, 16, 16),
# (1, 36, 8, 8), (1, 36, 4, 4), (1, 36, 2, 2)
seed_everything(1234)
cls_score = [
torch.rand(1, 36, pow(2, i), pow(2, i)) for i in range(5, 0, -1)
]
seed_everything(5678)
bboxes = [torch.rand(1, 36, pow(2, i), pow(2, i)) for i in range(5, 0, -1)]
# to get outputs of pytorch model
model_inputs = {
'cls_scores': cls_score,
'bbox_preds': bboxes,
'img_metas': img_metas
}
model_outputs = get_model_outputs(anchor_head, 'get_bboxes', model_inputs)
# to get outputs of onnx model after rewrite
img_metas[0]['img_shape'] = torch.Tensor([s, s])
wrapped_model = WrapModel(
anchor_head, 'get_bboxes', img_metas=img_metas, with_nms=True)
rewrite_inputs = {
'cls_scores': cls_score,
'bbox_preds': bboxes,
}
rewrite_outputs, is_backend_output = get_rewrite_outputs(
wrapped_model=wrapped_model,
model_inputs=rewrite_inputs,
deploy_cfg=deploy_cfg)
if is_backend_output:
if isinstance(rewrite_outputs, dict):
rewrite_outputs = convert_to_list(rewrite_outputs, output_names)
for model_output, rewrite_output in zip(model_outputs[0],
rewrite_outputs):
model_output = model_output.squeeze().cpu().numpy()
rewrite_output = rewrite_output.squeeze()
# hard code to make two tensors with the same shape
# rewrite and original codes applied different nms strategy
assert np.allclose(
model_output[:rewrite_output.shape[0]],
rewrite_output,
rtol=1e-03,
atol=1e-05)
else:
assert rewrite_outputs is not None
def test_base_dense_head_get_bboxes__ncnn():
"""Test get_bboxes rewrite of base dense head."""
backend_type = Backend.NCNN
check_backend(backend_type)
anchor_head = get_anchor_head_model()
anchor_head.cpu().eval()
s = 128
img_metas = [{
'scale_factor': np.ones(4),
'pad_shape': (s, s, 3),
'img_shape': (s, s, 3)
}]
output_names = ['output']
deploy_cfg = mmcv.Config(
dict(
backend_config=dict(type=backend_type.value),
onnx_config=dict(output_names=output_names, input_shape=None),
codebase_config=dict(
type='mmdet',
task='ObjectDetection',
model_type='ncnn_end2end',
post_processing=dict(
score_threshold=0.05,
iou_threshold=0.5,
max_output_boxes_per_class=200,
pre_top_k=5000,
keep_top_k=100,
background_label_id=-1,
))))
# the cls_score's size: (1, 36, 32, 32), (1, 36, 16, 16),
# (1, 36, 8, 8), (1, 36, 4, 4), (1, 36, 2, 2).
# the bboxes's size: (1, 36, 32, 32), (1, 36, 16, 16),
# (1, 36, 8, 8), (1, 36, 4, 4), (1, 36, 2, 2)
seed_everything(1234)
cls_score = [
torch.rand(1, 36, pow(2, i), pow(2, i)) for i in range(5, 0, -1)
]
seed_everything(5678)
bboxes = [torch.rand(1, 36, pow(2, i), pow(2, i)) for i in range(5, 0, -1)]
# to get outputs of onnx model after rewrite
img_metas[0]['img_shape'] = torch.Tensor([s, s])
wrapped_model = WrapModel(
anchor_head, 'get_bboxes', img_metas=img_metas, with_nms=True)
rewrite_inputs = {
'cls_scores': cls_score,
'bbox_preds': bboxes,
}
rewrite_outputs, is_backend_output = get_rewrite_outputs(
wrapped_model=wrapped_model,
model_inputs=rewrite_inputs,
deploy_cfg=deploy_cfg)
# output should be of shape [1, N, 6]
if is_backend_output:
rewrite_outputs = rewrite_outputs[0]
assert rewrite_outputs.shape[-1] == 6
@pytest.mark.parametrize('is_dynamic', [True, False])
def test_ssd_head_get_bboxes__ncnn(is_dynamic: bool):
"""Test get_bboxes rewrite of ssd head for ncnn."""
check_backend(Backend.NCNN)
ssd_head = get_ssd_head_model()
ssd_head.cpu().eval()
s = 128
img_metas = [{
'scale_factor': np.ones(4),
'pad_shape': (s, s, 3),
'img_shape': (s, s, 3)
}]
output_names = ['output']
input_names = ['input']
dynamic_axes = None
if is_dynamic:
dynamic_axes = {
input_names[0]: {
2: 'height',
3: 'width'
},
output_names[0]: {
1: 'num_dets',
}
}
deploy_cfg = mmcv.Config(
dict(
backend_config=dict(type=Backend.NCNN.value),
onnx_config=dict(
input_names=input_names,
output_names=output_names,
input_shape=None,
dynamic_axes=dynamic_axes),
codebase_config=dict(
type='mmdet',
task='ObjectDetection',
model_type='ncnn_end2end',
post_processing=dict(
score_threshold=0.05,
iou_threshold=0.5,
max_output_boxes_per_class=200,
pre_top_k=5000,
keep_top_k=100,
background_label_id=-1,
))))
# For the ssd_head:
# the cls_score's size: (1, 30, 20, 20), (1, 30, 10, 10),
# (1, 30, 5, 5), (1, 30, 3, 3), (1, 30, 2, 2), (1, 30, 1, 1)
# the bboxes's size: (1, 24, 20, 20), (1, 24, 10, 10),
# (1, 24, 5, 5), (1, 24, 3, 3), (1, 24, 2, 2), (1, 24, 1, 1)
feat_shape = [20, 10, 5, 3, 2, 1]
num_prior = 6
seed_everything(1234)
cls_score = [
torch.rand(1, 30, feat_shape[i], feat_shape[i])
for i in range(num_prior)
]
seed_everything(5678)
bboxes = [
torch.rand(1, 24, feat_shape[i], feat_shape[i])
for i in range(num_prior)
]
# to get outputs of onnx model after rewrite
img_metas[0]['img_shape'] = torch.tensor([s, s]) if is_dynamic else [s, s]
wrapped_model = WrapModel(
ssd_head, 'get_bboxes', img_metas=img_metas, with_nms=True)
rewrite_inputs = {
'cls_scores': cls_score,
'bbox_preds': bboxes,
}
rewrite_outputs, is_backend_output = get_rewrite_outputs(
wrapped_model=wrapped_model,
model_inputs=rewrite_inputs,
deploy_cfg=deploy_cfg)
# output should be of shape [1, N, 6]
if is_backend_output:
rewrite_outputs = rewrite_outputs[0]
assert rewrite_outputs.shape[-1] == 6
| 33.620994
| 79
| 0.582926
| 5,338
| 41,959
| 4.304796
| 0.068565
| 0.030463
| 0.016102
| 0.018495
| 0.806606
| 0.784499
| 0.758736
| 0.737238
| 0.708952
| 0.687367
| 0
| 0.054426
| 0.301556
| 41,959
| 1,247
| 80
| 33.647955
| 0.72968
| 0.0735
| 0
| 0.684363
| 0
| 0
| 0.062749
| 0.002842
| 0
| 0
| 0
| 0
| 0.023166
| 1
| 0.027992
| false
| 0
| 0.023166
| 0
| 0.063707
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
4821710b642b7ec6cb7d4217a235111959b6cbc2
| 119
|
py
|
Python
|
ghtorrent/__init__.py
|
eddie-chiang/prca
|
c74c21034a4fcb785faedc8069470a70a74342e6
|
[
"MIT"
] | null | null | null |
ghtorrent/__init__.py
|
eddie-chiang/prca
|
c74c21034a4fcb785faedc8069470a70a74342e6
|
[
"MIT"
] | 40
|
2020-09-01T06:25:35.000Z
|
2022-03-01T03:00:59.000Z
|
ghtorrent/__init__.py
|
eddie-chiang/prca
|
c74c21034a4fcb785faedc8069470a70a74342e6
|
[
"MIT"
] | null | null | null |
from .BigQueryCsvFileProcessor import BigQueryCsvFileProcessor
from .CommentResourceAccess import CommentResourceAccess
| 59.5
| 62
| 0.92437
| 8
| 119
| 13.75
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.058824
| 119
| 2
| 63
| 59.5
| 0.982143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
484ccaec07ddec26944d2f1a9a34af3efeaebe48
| 272
|
py
|
Python
|
metaworld/metaworld/benchmarks/base.py
|
prasoongoyal/pixl2r
|
b0691be6b27e705a62534b58f97ff7b8b6655c7d
|
[
"MIT"
] | 46
|
2020-12-05T21:40:47.000Z
|
2022-03-26T04:15:03.000Z
|
metaworld/metaworld/benchmarks/base.py
|
prasoongoyal/pixl2r
|
b0691be6b27e705a62534b58f97ff7b8b6655c7d
|
[
"MIT"
] | 12
|
2021-02-02T22:53:59.000Z
|
2022-03-12T00:41:30.000Z
|
metaworld/metaworld/benchmarks/base.py
|
prasoongoyal/pixl2r
|
b0691be6b27e705a62534b58f97ff7b8b6655c7d
|
[
"MIT"
] | 10
|
2021-01-24T14:24:20.000Z
|
2022-03-23T17:58:52.000Z
|
class Benchmark:
@classmethod
def get_train_tasks(cls, sample_all=False):
return cls(env_type='train', sample_all=sample_all)
@classmethod
def get_test_tasks(cls, sample_all=False):
return cls(env_type='test', sample_all=sample_all)
| 24.727273
| 59
| 0.698529
| 38
| 272
| 4.684211
| 0.394737
| 0.303371
| 0.191011
| 0.191011
| 0.426966
| 0.426966
| 0.426966
| 0.426966
| 0.426966
| 0
| 0
| 0
| 0.202206
| 272
| 10
| 60
| 27.2
| 0.820277
| 0
| 0
| 0.285714
| 0
| 0
| 0.03321
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| false
| 0
| 0
| 0.285714
| 0.714286
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
4862c73741cbf247ad74e6ff91c1564ebb1860d5
| 179
|
py
|
Python
|
solver/__init__.py
|
jackie840129/CF-AAN
|
2b357bf5823837908f0fe04a22b47deaf9e3da4e
|
[
"MIT"
] | 16
|
2021-05-31T09:15:55.000Z
|
2022-03-04T14:24:54.000Z
|
solver/__init__.py
|
jackie840129/CF-AAN
|
2b357bf5823837908f0fe04a22b47deaf9e3da4e
|
[
"MIT"
] | 2
|
2021-06-09T07:15:00.000Z
|
2022-03-16T09:42:03.000Z
|
solver/__init__.py
|
jackie840129/CF-AAN
|
2b357bf5823837908f0fe04a22b47deaf9e3da4e
|
[
"MIT"
] | 4
|
2021-06-29T04:14:24.000Z
|
2022-03-04T14:24:57.000Z
|
# encoding: utf-8
from .build import make_optimizer, make_optimizer_with_center
from .lr_scheduler import WarmupMultiStepLR
from torch.optim.lr_scheduler import StepLR,MultiStepLR
| 44.75
| 61
| 0.865922
| 25
| 179
| 5.96
| 0.68
| 0.174497
| 0.228188
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006098
| 0.083799
| 179
| 4
| 62
| 44.75
| 0.902439
| 0.083799
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
486d6aecf3c90be90c1d322c8088ccd2b9041fc5
| 92
|
py
|
Python
|
tests/test_skforecast.py
|
anderitur-cepsa/skforecast
|
8ecee6a4493f93d5e44cb5f5ea5020af38ef7659
|
[
"CC-BY-4.0"
] | null | null | null |
tests/test_skforecast.py
|
anderitur-cepsa/skforecast
|
8ecee6a4493f93d5e44cb5f5ea5020af38ef7659
|
[
"CC-BY-4.0"
] | null | null | null |
tests/test_skforecast.py
|
anderitur-cepsa/skforecast
|
8ecee6a4493f93d5e44cb5f5ea5020af38ef7659
|
[
"CC-BY-4.0"
] | null | null | null |
from skforecast import __version__
def test_version():
assert __version__ == '0.1.8.1'
| 23
| 36
| 0.728261
| 13
| 92
| 4.461538
| 0.769231
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.051948
| 0.163043
| 92
| 4
| 36
| 23
| 0.701299
| 0
| 0
| 0
| 0
| 0
| 0.075269
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
486eb967f72924d93f9f6dbc457dae4e8a56b1a1
| 35
|
py
|
Python
|
dodgy/__main__.py
|
tirkarthi/dodgy
|
403dc2678e27de7c247fbf2201edcfec7670bd3b
|
[
"MIT"
] | 89
|
2015-01-28T20:47:32.000Z
|
2022-03-23T01:54:44.000Z
|
dodgy/__main__.py
|
tirkarthi/dodgy
|
403dc2678e27de7c247fbf2201edcfec7670bd3b
|
[
"MIT"
] | 23
|
2015-01-31T10:23:12.000Z
|
2021-09-22T09:20:26.000Z
|
dodgy/__main__.py
|
tirkarthi/dodgy
|
403dc2678e27de7c247fbf2201edcfec7670bd3b
|
[
"MIT"
] | 22
|
2015-01-05T10:12:42.000Z
|
2022-01-13T10:33:48.000Z
|
import dodgy.run
dodgy.run.main()
| 8.75
| 16
| 0.742857
| 6
| 35
| 4.333333
| 0.666667
| 0.615385
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.114286
| 35
| 3
| 17
| 11.666667
| 0.83871
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
6fb7443cc54c22ce6cdedb7d47264ea79d8246cc
| 79,060
|
py
|
Python
|
weipa/test/python/run_savesilo_tests.py
|
markendr/esys-escript.github.io
|
0023eab09cd71f830ab098cb3a468e6139191e8d
|
[
"Apache-2.0"
] | null | null | null |
weipa/test/python/run_savesilo_tests.py
|
markendr/esys-escript.github.io
|
0023eab09cd71f830ab098cb3a468e6139191e8d
|
[
"Apache-2.0"
] | 1
|
2019-01-14T03:07:43.000Z
|
2019-01-14T03:07:43.000Z
|
weipa/test/python/run_savesilo_tests.py
|
markendr/esys-escript.github.io
|
0023eab09cd71f830ab098cb3a468e6139191e8d
|
[
"Apache-2.0"
] | null | null | null |
##############################################################################
#
# Copyright (c) 2003-2018 by The University of Queensland
# http://www.uq.edu.au
#
# Primary Business: Queensland, Australia
# Licensed under the Apache License, version 2.0
# http://www.apache.org/licenses/LICENSE-2.0
#
# Development until 2012 by Earth Systems Science Computational Center (ESSCC)
# Development 2012-2013 by School of Earth Sciences
# Development from 2014 by Centre for Geoscience Computing (GeoComp)
#
##############################################################################
from __future__ import print_function, division
__copyright__="""Copyright (c) 2003-2018 by The University of Queensland
http://www.uq.edu.au
Primary Business: Queensland, Australia"""
__license__="""Licensed under the Apache License, version 2.0
http://www.apache.org/licenses/LICENSE-2.0"""
__url__="https://launchpad.net/escript-finley"
import os, math
import esys.escriptcore.utestselect as unittest
from esys.escriptcore.testing import *
from esys.escript import ContinuousFunction, Function, ReducedFunction,\
FunctionOnBoundary, ReducedFunctionOnBoundary,\
FunctionOnContactZero, ReducedFunctionOnContactZero,\
FunctionOnContactOne, ReducedFunctionOnContactOne,\
Solution, ReducedSolution, getMPISizeWorld
from esys.weipa import saveSilo
try:
import Silo
HAVE_SILO=True
except ImportError:
HAVE_SILO=False
try:
from esys import dudley
HAVE_DUDLEY=True
except ImportError:
HAVE_DUDLEY=False
try:
from esys import finley
HAVE_FINLEY=True
except ImportError:
HAVE_FINLEY=False
try:
from esys import ripley
HAVE_RIPLEY=True
except ImportError:
HAVE_RIPLEY=False
try:
WEIPA_TEST_MESHES=os.environ['WEIPA_TEST_MESHES']
except KeyError:
WEIPA_TEST_MESHES='meshes'
try:
WEIPA_WORKDIR=os.environ['WEIPA_WORKDIR']
except KeyError:
WEIPA_WORKDIR=os.path.join(os.getcwd(),'weipa/test/python/')
class SiloReader():
"""
Silo file reader that uses the Python interface to the Silo library.
"""
def __init__(self):
self.f=None
def open(self, filename):
try:
self.f=Silo.Open(filename)
self.filename=filename
return True
except:
return False
def close(self):
if self.f:
self.f.Close()
def getDirNames(self):
return self.f.GetToc().dir_names
def setDir(self, dirname):
self.f.SetDir(dirname)
def getTimeAndCycle(self):
return self.f.GetVar('dtime'), self.f.GetVar('cycle')
def getElementNames(self):
names=[]
for v in self.f.GetToc().var_names:
if v.endswith('_coord0'):
names.append(v[:-7])
return names
def getCellCoords(self, elementName):
ret=()
for i in range(3):
try:
data=self.f.GetVar("%s_coord%d"%(elementName,i))
ret+=data,
except:
ret+=None,
return ret
def getCellExtents(self, elementName):
ret=()
for i in ('min','max'):
try:
data=self.f.GetVar("%s_%s_extents"%(elementName,i))
ret+=data,
except:
ret+=None,
return ret
def getCellNodeList(self, elementName):
try:
return self.f.GetVar("%s_zones_nodelist"%elementName)
except:
return None
def getCellShapeInfo(self, elementName):
infoNames=('zones_shapecnt', 'zones_shapesize', 'zones_shapetype')
ret=()
for n in infoNames:
try:
data=self.f.GetVar("%s_%s"%(elementName,n))
ret+=data,
except:
ret+=None,
return ret
def getData(self):
data={}
names=self.f.GetToc().var_names
for v in names:
if v[-5:]=='_data':
data[v[:-5]] = self.f.GetVar(v)
return data
class SiloSaver(unittest.TestCase): #requires subclassing
def numericCompareL2(self, vector1, vector2):
"""
Compares two lists of numbers using the L2 norm, returns true if they
match up to a tolerance TOL, false otherwise
"""
if vector2 == None: return False
TOL = 2.0e-5
try:
l1=len(vector1)
except TypeError:
vector1=[vector1]
try:
l2=len(vector2)
except TypeError:
vector2=[vector2]
if len(vector1) != len(vector2): return False
diff = 0.0
for i in range(0, len(vector1)):
tmp = vector1[i] - vector2[i]
diff += tmp * tmp
if math.sqrt(diff) > TOL: return False
return True
def compareSiloFiles(self, file1, file2):
"""
Compares two Silo files, asserts if any element is not equal.
file2 is the reference file to compare against
"""
p1=SiloReader()
p2=SiloReader()
self.assertTrue(p1.open(file1), "Invalid Silo file '%s'"%file1)
p2.open(file2)
self.assertEqual(p1.getTimeAndCycle(), p2.getTimeAndCycle())
dirNames1=p1.getDirNames()
dirNames2=p2.getDirNames()
for dirName in dirNames2:
self.assertTrue(dirName in dirNames1, "Silo file is missing directories")
p1.setDir('/block0000')
p2.setDir('/block0000')
elementNames1=p1.getElementNames()
elementNames2=p2.getElementNames()
for elementName in elementNames2:
self.assertTrue(elementName in elementNames1, "Mesh '%s' missing in Silo file"%elementName)
cx1,cy1,cz1=p1.getCellCoords(elementName)
cx2,cy2,cz2=p2.getCellCoords(elementName)
self.assertEqual(len(cx1), len(cx2))
self.assertEqual(len(cy1), len(cy2))
if cz2 is not None:
self.assertEqual(len(cz1), len(cz2))
coords1=zip(cx1,cy1,cz1)
coords2=zip(cx2,cy2,cz2)
else:
coords1=zip(cx1,cy1)
coords2=zip(cx2,cy2)
# Find mapping of nodes in file 1 to file 2 (they may be
# permuted)
#nodeMap1to2 = {}
for i1 in range(len(coords1)):
indexList=[]
for i2 in range(len(coords2)):
if self.numericCompareL2(coords1[i1], coords2[i2]):
indexList.append(i2)
self.assertNotEquals(len(indexList), 0,
"Node with coordinates %s missing in '%s'"%(str(coords1[i1]),elementName))
#nodeMap1to2[i1]=indexList
extents1=p1.getCellExtents(elementName)
extents2=p2.getCellExtents(elementName)
self.assertEqual(extents1, extents2)
nodelist1=p1.getCellNodeList(elementName)
nodelist2=p2.getCellNodeList(elementName)
ccount1,csize1,ctype1=p1.getCellShapeInfo(elementName)
ccount2,csize2,ctype2=p2.getCellShapeInfo(elementName)
# data
data1=p1.getData()
data2=p2.getData()
p1.close()
p2.close()
# TODO: The Silo module does not allow checking for the mesh of
# a variable yet so we cannot compare permuted entries using the
# node mappings from above (see vtk tests)
self.assertEqual(len(data1), len(data2))
for name in data2:
self.assertTrue(name in data1, "Variable '%s' missing"%name)
if name.startswith('data'):
self.assertTrue(self.numericCompareL2(
data1[name], data2[name]),
"Values in '%s' do not match" % name)
def check_silo(self, reference, **data):
outFileBase="out_"+reference
saveSilo(os.path.join(WEIPA_WORKDIR, outFileBase), write_meshdata=True, **data)
ref=os.path.join(WEIPA_TEST_MESHES, reference+".silo")
out=os.path.join(WEIPA_WORKDIR, outFileBase+".silo")
self.compareSiloFiles(out, ref)
self.cleanup(out)
def cleanup(self, filename):
os.remove(filename)
class Test_Silo_import(unittest.TestCase):
def test_import(self):
if not HAVE_SILO:
try:
import Silo
except ImportError as e:
if "No module named Silo" not in str(e):
raise unittest.SkipTest("Silo module broken")
@unittest.skipIf(getMPISizeWorld()>1, "MPI size > 1")
@unittest.skipIf(not HAVE_FINLEY, "finley module not available")
@unittest.skipIf(not HAVE_SILO, "Silo module not available")
class Test_Finley_SaveSilo(SiloSaver):
# === Finley hex 2D order 1 with contacts ===================================
def test_hex_contact_2D_order1_ContinuousFunction(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_contact_2D_order1.msh"),optimize=False)
x=ContinuousFunction(dom).getX()
self.check_silo("hex_2D_o1_node", data_s=x[0], data_v=x[0]*[1.,2.],
data_t=x[0]*[[11.,12.],[21.,22.]])
def test_hex_contact_2D_order1_Solution(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_contact_2D_order1.msh"),optimize=False)
x=Solution(dom).getX()
self.check_silo("hex_2D_o1_node", data_s=x[0], data_v=x[0]*[1.,2.],
data_t=x[0]*[[11.,12.],[21.,22.]])
def test_hex_contact_2D_order1_ReducedSolution(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_contact_2D_order1.msh"),optimize=False)
x=ReducedSolution(dom).getX()
self.check_silo("hex_2D_o1_node", data_s=x[0], data_v=x[0]*[1.,2.],
data_t=x[0]*[[11.,12.],[21.,22.]])
def test_hex_contact_2D_order1_Function(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_contact_2D_order1.msh"),optimize=False)
x=Function(dom).getX()
self.check_silo("hex_2D_o1_cell", data_s=x[0], data_v=x[0]*[1.,2.],
data_t=x[0]*[[11.,12.],[21.,22.]])
def test_hex_contact_2D_order1_ReducedFunction(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_contact_2D_order1.msh"),optimize=False)
x=ReducedFunction(dom).getX()
self.check_silo("hex_2D_o1_cell", data_s=x[0], data_v=x[0]*[1.,2.],
data_t=x[0]*[[11.,12.],[21.,22.]])
def test_hex_contact_2D_order1_FunctionOnBoundary(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_contact_2D_order1.msh"),optimize=False)
x=FunctionOnBoundary(dom).getX()
self.check_silo("hex_2D_o1_boundary", data_s=x[0], data_v=x[0]*[1.,2.],
data_t=x[0]*[[11.,12.],[21.,22.]])
def test_hex_contact_2D_order1_ReducedFunctionOnBoundary(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_contact_2D_order1.msh"),optimize=False)
x=ReducedFunctionOnBoundary(dom).getX()
self.check_silo("hex_2D_o1_boundary", data_s=x[0], data_v=x[0]*[1.,2.],
data_t=x[0]*[[11.,12.],[21.,22.]])
def test_hex_contact_2D_order1_onFace_FunctionOnBoundary(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_contact_2D_order1_onFace.msh"),optimize=False)
x=FunctionOnBoundary(dom).getX()
self.check_silo("hex_2D_o1_f_boundary", data_s=x[0], data_v=x[0]*[1.,2.],
data_t=x[0]*[[11.,12.],[21.,22.]])
def test_hex_contact_2D_order1_onFace_ReducedFunctionOnBoundary(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_contact_2D_order1_onFace.msh"),optimize=False)
x=ReducedFunctionOnBoundary(dom).getX()
self.check_silo("hex_2D_o1_f_boundary", data_s=x[0], data_v=x[0]*[1.,2.],
data_t=x[0]*[[11.,12.],[21.,22.]])
def test_hex_contact_2D_order1_FunctionOnContactZero(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_contact_2D_order1.msh"),optimize=False)
x=FunctionOnContactZero(dom).getX()
self.check_silo("hex_2D_o1_contact", data_s=x[0], data_v=x[0]*[1.,2.],
data_t=x[0]*[[11.,12.],[21.,22.]])
def test_hex_contact_2D_order1_ReducedFunctionOnContactZero(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_contact_2D_order1.msh"),optimize=False)
x=ReducedFunctionOnContactZero(dom).getX()
self.check_silo("hex_2D_o1_contact", data_s=x[0], data_v=x[0]*[1.,2.],
data_t=x[0]*[[11.,12.],[21.,22.]])
def test_hex_contact_2D_order1_onFace_FunctionOnContactZero(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_contact_2D_order1_onFace.msh"),optimize=False)
x=FunctionOnContactZero(dom).getX()
self.check_silo("hex_2D_o1_contact", data_s=x[0], data_v=x[0]*[1.,2.],
data_t=x[0]*[[11.,12.],[21.,22.]])
def test_hex_contact_2D_order1_onFace_ReducedFunctionOnContactZero(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_contact_2D_order1_onFace.msh"),optimize=False)
x=ReducedFunctionOnContactZero(dom).getX()
self.check_silo("hex_2D_o1_contact", data_s=x[0], data_v=x[0]*[1.,2.],
data_t=x[0]*[[11.,12.],[21.,22.]])
def test_hex_contact_2D_order1_FunctionOnContactOne(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_contact_2D_order1.msh"),optimize=False)
x=FunctionOnContactOne(dom).getX()
self.check_silo("hex_2D_o1_contact", data_s=x[0], data_v=x[0]*[1.,2.],
data_t=x[0]*[[11.,12.],[21.,22.]])
def test_hex_contact_2D_order1_ReducedFunctionOnContactOne(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_contact_2D_order1.msh"),optimize=False)
x=ReducedFunctionOnContactOne(dom).getX()
self.check_silo("hex_2D_o1_contact", data_s=x[0], data_v=x[0]*[1.,2.],
data_t=x[0]*[[11.,12.],[21.,22.]])
def test_hex_contact_2D_order1_onFace_FunctionOnContactOne(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_contact_2D_order1_onFace.msh"),optimize=False)
x=FunctionOnContactOne(dom).getX()
self.check_silo("hex_2D_o1_contact", data_s=x[0], data_v=x[0]*[1.,2.],
data_t=x[0]*[[11.,12.],[21.,22.]])
def test_hex_contact_2D_order1_onFace_ReducedFunctionOnContactOne(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_contact_2D_order1_onFace.msh"),optimize=False)
x=ReducedFunctionOnContactOne(dom).getX()
self.check_silo("hex_2D_o1_contact", data_s=x[0], data_v=x[0]*[1.,2.],
data_t=x[0]*[[11.,12.],[21.,22.]])
# === Finley hex 2D order 2 with contacts ===================================
def test_hex_contact_2D_order2_ContinuousFunction(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_contact_2D_order2.msh"),optimize=False)
x=ContinuousFunction(dom).getX()
self.check_silo("hex_2D_o2_node", data_s=x[0], data_v=x[0]*[1.,2.],
data_t=x[0]*[[11.,12.],[21.,22.]])
def test_hex_contact_2D_order2_Solution(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_contact_2D_order2.msh"),optimize=False)
x=Solution(dom).getX()
self.check_silo("hex_2D_o2_node", data_s=x[0], data_v=x[0]*[1.,2.],
data_t=x[0]*[[11.,12.],[21.,22.]])
def test_hex_contact_2D_order2_ReducedSolution(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_contact_2D_order2.msh"),optimize=False)
x=ReducedSolution(dom).getX()
self.check_silo("hex_2D_o2_rnode", data_s=x[0], data_v=x[0]*[1.,2.],
data_t=x[0]*[[11.,12.],[21.,22.]])
def test_hex_contact_2D_order2_Function(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_contact_2D_order2.msh"),optimize=False)
x=Function(dom).getX()
self.check_silo("hex_2D_o2_cell", data_s=x[0], data_v=x[0]*[1.,2.],
data_t=x[0]*[[11.,12.],[21.,22.]])
def test_hex_contact_2D_order2_ReducedFunction(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_contact_2D_order2.msh"),optimize=False)
x=ReducedFunction(dom).getX()
self.check_silo("hex_2D_o2_cell", data_s=x[0], data_v=x[0]*[1.,2.],
data_t=x[0]*[[11.,12.],[21.,22.]])
def test_hex_contact_2D_order2_FunctionOnBoundary(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_contact_2D_order2.msh"),optimize=False)
x=FunctionOnBoundary(dom).getX()
self.check_silo("hex_2D_o2_boundary", data_s=x[0], data_v=x[0]*[1.,2.],
data_t=x[0]*[[11.,12.],[21.,22.]])
def test_hex_contact_2D_order2_ReducedFunctionOnBoundary(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_contact_2D_order2.msh"),optimize=False)
x=ReducedFunctionOnBoundary(dom).getX()
self.check_silo("hex_2D_o2_boundary", data_s=x[0], data_v=x[0]*[1.,2.],
data_t=x[0]*[[11.,12.],[21.,22.]])
def test_hex_contact_2D_order2_onFace_FunctionOnBoundary(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_contact_2D_order2_onFace.msh"),optimize=False)
x=FunctionOnBoundary(dom).getX()
self.check_silo("hex_2D_o2_f_boundary", data_s=x[0], data_v=x[0]*[1.,2.],
data_t=x[0]*[[11.,12.],[21.,22.]])
def test_hex_contact_2D_order2_onFace_ReducedFunctionOnBoundary(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_contact_2D_order2_onFace.msh"),optimize=False)
x=ReducedFunctionOnBoundary(dom).getX()
self.check_silo("hex_2D_o2_f_boundary", data_s=x[0], data_v=x[0]*[1.,2.],
data_t=x[0]*[[11.,12.],[21.,22.]])
def test_hex_contact_2D_order2_FunctionOnContactZero(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_contact_2D_order2.msh"),optimize=False)
x=FunctionOnContactZero(dom).getX()
self.check_silo("hex_2D_o2_contact", data_s=x[0], data_v=x[0]*[1.,2.],
data_t=x[0]*[[11.,12.],[21.,22.]])
def test_hex_contact_2D_order2_ReducedFunctionOnContactZero(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_contact_2D_order2.msh"),optimize=False)
x=ReducedFunctionOnContactZero(dom).getX()
self.check_silo("hex_2D_o2_rcontact", data_s=x[0], data_v=x[0]*[1.,2.],
data_t=x[0]*[[11.,12.],[21.,22.]])
def test_hex_contact_2D_order2_onFace_FunctionOnContactZero(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_contact_2D_order2_onFace.msh"),optimize=False)
x=FunctionOnContactZero(dom).getX()
self.check_silo("hex_2D_o2_f_contact", data_s=x[0], data_v=x[0]*[1.,2.],
data_t=x[0]*[[11.,12.],[21.,22.]])
def test_hex_contact_2D_order2_onFace_ReducedFunctionOnContactZero(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_contact_2D_order2_onFace.msh"),optimize=False)
x=ReducedFunctionOnContactZero(dom).getX()
self.check_silo("hex_2D_o2_f_contact", data_s=x[0], data_v=x[0]*[1.,2.],
data_t=x[0]*[[11.,12.],[21.,22.]])
def test_hex_contact_2D_order2_FunctionOnContactOne(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_contact_2D_order2.msh"),optimize=False)
x=FunctionOnContactOne(dom).getX()
self.check_silo("hex_2D_o2_contact", data_s=x[0], data_v=x[0]*[1.,2.],
data_t=x[0]*[[11.,12.],[21.,22.]])
def test_hex_contact_2D_order2_ReducedFunctionOnContactOne(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_contact_2D_order2.msh"),optimize=False)
x=ReducedFunctionOnContactOne(dom).getX()
self.check_silo("hex_2D_o2_rcontact", data_s=x[0], data_v=x[0]*[1.,2.],
data_t=x[0]*[[11.,12.],[21.,22.]])
def test_hex_contact_2D_order2_onFace_FunctionOnContactOne(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_contact_2D_order2_onFace.msh"),optimize=False)
x=FunctionOnContactOne(dom).getX()
self.check_silo("hex_2D_o2_f_contact", data_s=x[0], data_v=x[0]*[1.,2.],
data_t=x[0]*[[11.,12.],[21.,22.]])
def test_hex_contact_2D_order2_onFace_ReducedFunctionOnContactOne(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_contact_2D_order2_onFace.msh"),optimize=False)
x=ReducedFunctionOnContactOne(dom).getX()
self.check_silo("hex_2D_o2_f_contact", data_s=x[0], data_v=x[0]*[1.,2.],
data_t=x[0]*[[11.,12.],[21.,22.]])
# === Finley hex 2D order 2 =================================================
def test_hex_2D_order2_empty(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_2D_order2.msh"),optimize=False)
self.check_silo("hex_2D_o2", domain=dom)
def test_hex_2D_order2_AllPoints_Scalar(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_2D_order2.msh"),optimize=False)
x=Solution(dom).getX()
x_r=ReducedSolution(dom).getX()
x_n=ContinuousFunction(dom).getX()
self.check_silo("hex_2D_o2_node_3xs", data_r=x_r[0], data_n=x_n[0], data=x[0])
def test_hex_2D_order2_2Cells_Scalar(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_2D_order2.msh"),optimize=False)
x=Function(dom).getX()
x_b=FunctionOnBoundary(dom).getX()
self.check_silo("hex_2D_o2_cell_2xs", data_b=x_b[0], data=x[0])
def test_hex_2D_order2_BoundaryPoint_Scalar(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_2D_order2.msh"),optimize=False)
x=ContinuousFunction(dom).getX()
x_b=FunctionOnBoundary(dom).getX()
self.check_silo("hex_2D_o2_boundary_2xs", data=x[0],data_b=x_b[0])
def test_hex_2D_order2_Cells_AllData(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_2D_order2.msh"),optimize=False)
x=Function(dom).getX()
self.check_silo("hex_2D_o2_cell_all",
data_s=x[0],
data_v=x[0]*[1.,2.],
data_t=x[0]*[[11.,12.],[21.,22.]])
def test_hex_2D_order2_CellsPoints_AllData(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_2D_order2.msh"),optimize=False)
x_c=Function(dom).getX()
x_p=ContinuousFunction(dom).getX()
self.check_silo("hex_2D_o2_cellnode_all",
data_sp=x_p[0],
data_vp=x_p[0]*[1.,2.],
data_tp=x_p[0]*[[11.,12.],[21.,22.]],
data_sc=x_c[0],
data_vc=x_c[0]*[1.,2.],
data_tc=x_c[0]*[[11.,12.],[21.,22.]])
# === Finley hex 2D order 2 (full) ==========================================
def test_hex_2D_order2p_empty(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_2D_order2p.msh"),optimize=False)
self.check_silo("hex_2D_o2p", domain=dom)
def test_hex_2D_order2p_AllPoints_Scalar(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_2D_order2p.msh"),optimize=False)
x=Solution(dom).getX()
x_r=ReducedSolution(dom).getX()
x_n=ContinuousFunction(dom).getX()
self.check_silo("hex_2D_o1_node_3xs", data_r=x_r[0], data_n=x_n[0], data=x[0])
def test_hex_2D_order2p_2Cells_Scalar(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_2D_order2p.msh"),optimize=False)
x=Function(dom).getX()
x_b=FunctionOnBoundary(dom).getX()
self.check_silo("hex_2D_o2p_cell_2xs", data_b=x_b[0], data=x[0])
def test_hex_2D_order2p_BoundaryPoint_Scalar(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_2D_order2p.msh"),optimize=False)
x=ContinuousFunction(dom).getX()
x_b=FunctionOnBoundary(dom).getX()
self.check_silo("hex_2D_o2p_boundary_2xs", data=x[0],data_b=x_b[0])
def test_hex_2D_order2p_CellsPoints_AllData(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_2D_order2p.msh"),optimize=False)
x_c=Function(dom).getX()
x_p=ContinuousFunction(dom).getX()
self.check_silo("hex_2D_o2p_cellnode_all",
data_sp=x_p[0],
data_vp=x_p[0]*[1.,2.],
data_tp=x_p[0]*[[11.,12.],[21.,22.]],
data_sc=x_c[0],
data_vc=x_c[0]*[1.,2.],
data_tc=x_c[0]*[[11.,12.],[21.,22.]])
def test_hex_2D_order2p_ContinuousFunction(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_2D_order2p.msh"),optimize=False)
x=ContinuousFunction(dom).getX()
self.check_silo("hex_2D_o2p_node", data_s=x[0], data_v=x[0]*[1.,2.],
data_t=x[0]*[[11.,12.],[21.,22.]])
def test_hex_2D_order2p_Solution(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_2D_order2p.msh"),optimize=False)
x=Solution(dom).getX()
self.check_silo("hex_2D_o2p_node", data_s=x[0], data_v=x[0]*[1.,2.],
data_t=x[0]*[[11.,12.],[21.,22.]])
def test_hex_2D_order2p_ReducedSolution(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_2D_order2p.msh"),optimize=False)
x=ReducedSolution(dom).getX()
self.check_silo("hex_2D_o2p_rnode", data_s=x[0], data_v=x[0]*[1.,2.],
data_t=x[0]*[[11.,12.],[21.,22.]])
def test_hex_2D_order2p_Function(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_2D_order2p.msh"),optimize=False)
x=Function(dom).getX()
self.check_silo("hex_2D_o2p_cell", data_s=x[0], data_v=x[0]*[1.,2.],
data_t=x[0]*[[11.,12.],[21.,22.]])
def test_hex_2D_order2p_ReducedFunction(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_2D_order2p.msh"),optimize=False)
x=ReducedFunction(dom).getX()
self.check_silo("hex_2D_o2p_rcell", data_s=x[0], data_v=x[0]*[1.,2.],
data_t=x[0]*[[11.,12.],[21.,22.]])
def test_hex_2D_order2p_FunctionOnBoundary(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_2D_order2p.msh"),optimize=False)
x=FunctionOnBoundary(dom).getX()
self.check_silo("hex_2D_o2p_boundary", data_s=x[0], data_v=x[0]*[1.,2.],
data_t=x[0]*[[11.,12.],[21.,22.]])
def test_hex_2D_order2p_ReducedFunctionOnBoundary(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_2D_order2p.msh"),optimize=False)
x=ReducedFunctionOnBoundary(dom).getX()
self.check_silo("hex_2D_o2p_boundary", data_s=x[0], data_v=x[0]*[1.,2.],
data_t=x[0]*[[11.,12.],[21.,22.]])
# === Finley hex 2D macro ===================================================
def test_hex_2D_macro_empty(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_2D_macro.msh"),optimize=False)
self.check_silo("hex_2D_o2p", domain=dom)
def test_hex_2D_macro_AllPoints_Scalar(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_2D_macro.msh"),optimize=False)
x=Solution(dom).getX()
x_r=ReducedSolution(dom).getX()
x_n=ContinuousFunction(dom).getX()
self.check_silo("hex_2D_o1_node_3xs", data_r=x_r[0], data_n=x_n[0], data=x[0])
def test_hex_2D_macro_CellsPoints(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_2D_macro.msh"),optimize=False)
x_c=Function(dom).getX()
x_p=ContinuousFunction(dom).getX()
self.check_silo("hex_2D_macro_cellnode_all",
data_sp=x_p[0],
data_vp=x_p[0]*[1.,2.],
data_tp=x_p[0]*[[11.,12.],[21.,22.]],
data_sc=x_c[0],
data_vc=x_c[0]*[1.,2.],
data_tc=x_c[0]*[[11.,12.],[21.,22.]])
def test_hex_2D_macro_2Cells_Scalar(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_2D_macro.msh"),optimize=False)
x=Function(dom).getX()
x_b=FunctionOnBoundary(dom).getX()
self.check_silo("hex_2D_cell_2xs", data_b=x_b[0], data=x[0])
def test_hex_2D_macro_BoundaryPoint_Scalar(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_2D_macro.msh"),optimize=False)
x=ContinuousFunction(dom).getX()
x_b=FunctionOnBoundary(dom).getX()
self.check_silo("hex_2D_boundary_2xs", data_b=x_b[0], data=x[0])
def test_hex_2D_macro_ContinuousFunction(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_2D_macro.msh"),optimize=False)
x=ContinuousFunction(dom).getX()
self.check_silo("hex_2D_o2p_node", data_s=x[0], data_v=x[0]*[1.,2.],
data_t=x[0]*[[11.,12.],[21.,22.]])
def test_hex_2D_macro_Solution(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_2D_macro.msh"),optimize=False)
x=Solution(dom).getX()
self.check_silo("hex_2D_o2p_node", data_s=x[0], data_v=x[0]*[1.,2.],
data_t=x[0]*[[11.,12.],[21.,22.]])
def test_hex_2D_macro_ReducedSolution(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_2D_macro.msh"),optimize=False)
x=ReducedSolution(dom).getX()
self.check_silo("hex_2D_o2p_rnode", data_s=x[0], data_v=x[0]*[1.,2.],
data_t=x[0]*[[11.,12.],[21.,22.]])
def test_hex_2D_macro_Function(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_2D_macro.msh"),optimize=False)
x=Function(dom).getX()
self.check_silo("hex_2D_macro_cell", data_s=x[0], data_v=x[0]*[1.,2.],
data_t=x[0]*[[11.,12.],[21.,22.]])
def test_hex_2D_macro_ReducedFunction(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_2D_macro.msh"),optimize=False)
x=ReducedFunction(dom).getX()
self.check_silo("hex_2D_macro_rcell", data_s=x[0], data_v=x[0]*[1.,2.],
data_t=x[0]*[[11.,12.],[21.,22.]])
def test_hex_2D_macro_FunctionOnBoundary(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_2D_macro.msh"),optimize=False)
x=FunctionOnBoundary(dom).getX()
self.check_silo("hex_2D_macro_boundary", data_s=x[0], data_v=x[0]*[1.,2.],
data_t=x[0]*[[11.,12.],[21.,22.]])
def test_hex_2D_macro_ReducedFunctionOnBoundary(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_2D_macro.msh"),optimize=False)
x=ReducedFunctionOnBoundary(dom).getX()
self.check_silo("hex_2D_macro_boundary", data_s=x[0], data_v=x[0]*[1.,2.],
data_t=x[0]*[[11.,12.],[21.,22.]])
# === Finley hex 3D order 1 with contacts ===================================
def test_hex_contact_3D_order1_ContinuousFunction(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_contact_3D_order1.msh"),optimize=False)
x=ContinuousFunction(dom).getX()
self.check_silo("hex_3D_o1_node", data_s=x[0], data_v=x[0]*[1.,2.,3.],
data_t=x[0]*[[11.,12.,13.],[21.,22.,23.],[31.,32.,33.]])
def test_hex_contact_3D_order1_Solution(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_contact_3D_order1.msh"),optimize=False)
x=Solution(dom).getX()
self.check_silo("hex_3D_o1_node", data_s=x[0], data_v=x[0]*[1.,2.,3.],
data_t=x[0]*[[11.,12.,13.],[21.,22.,23.],[31.,32.,33.]])
def test_hex_contact_3D_order1_ReducedSolution(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_contact_3D_order1.msh"),optimize=False)
x=ReducedSolution(dom).getX()
self.check_silo("hex_3D_o1_node", data_s=x[0], data_v=x[0]*[1.,2.,3.],
data_t=x[0]*[[11.,12.,13.],[21.,22.,23.],[31.,32.,33.]])
def test_hex_contact_3D_order1_Function(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_contact_3D_order1.msh"),optimize=False)
x=Function(dom).getX()
self.check_silo("hex_3D_o1_cell", data_s=x[0], data_v=x[0]*[1.,2.,3.],
data_t=x[0]*[[11.,12.,13.],[21.,22.,23.],[31.,32.,33.]])
def test_hex_contact_3D_order1_ReducedFunction(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_contact_3D_order1.msh"),optimize=False)
x=ReducedFunction(dom).getX()
self.check_silo("hex_3D_o1_cell", data_s=x[0], data_v=x[0]*[1.,2.,3.],
data_t=x[0]*[[11.,12.,13.],[21.,22.,23.],[31.,32.,33.]])
def test_hex_contact_3D_order1_FunctionOnBoundary(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_contact_3D_order1.msh"),optimize=False)
x=FunctionOnBoundary(dom).getX()
self.check_silo("hex_3D_o1_boundary", data_s=x[0], data_v=x[0]*[1.,2.,3.],
data_t=x[0]*[[11.,12.,13.],[21.,22.,23.],[31.,32.,33.]])
def test_hex_contact_3D_order1_ReducedFunctionOnBoundary(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_contact_3D_order1.msh"),optimize=False)
x=ReducedFunctionOnBoundary(dom).getX()
self.check_silo("hex_3D_o1_boundary", data_s=x[0], data_v=x[0]*[1.,2.,3.],
data_t=x[0]*[[11.,12.,13.],[21.,22.,23.],[31.,32.,33.]])
def test_hex_contact_3D_order1_onFace_FunctionOnBoundary(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_contact_3D_order1_onFace.msh"),optimize=False)
x=FunctionOnBoundary(dom).getX()
self.check_silo("hex_3D_o1_f_boundary", data_s=x[0], data_v=x[0]*[1.,2.,3.],
data_t=x[0]*[[11.,12.,13.],[21.,22.,23.],[31.,32.,33.]])
def test_hex_contact_3D_order1_onFace_ReducedFunctionOnBoundary(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_contact_3D_order1_onFace.msh"),optimize=False)
x=ReducedFunctionOnBoundary(dom).getX()
self.check_silo("hex_3D_o1_f_boundary", data_s=x[0], data_v=x[0]*[1.,2.,3.],
data_t=x[0]*[[11.,12.,13.],[21.,22.,23.],[31.,32.,33.]])
def test_hex_contact_3D_order1_FunctionOnContactZero(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_contact_3D_order1.msh"),optimize=False)
x=FunctionOnContactZero(dom).getX()
self.check_silo("hex_3D_o1_contact", data_s=x[0], data_v=x[0]*[1.,2.,3.],
data_t=x[0]*[[11.,12.,13.],[21.,22.,23.],[31.,32.,33.]])
def test_hex_contact_3D_order1_ReducedFunctionOnContactZero(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_contact_3D_order1.msh"),optimize=False)
x=ReducedFunctionOnContactZero(dom).getX()
self.check_silo("hex_3D_o1_contact", data_s=x[0], data_v=x[0]*[1.,2.,3.],
data_t=x[0]*[[11.,12.,13.],[21.,22.,23.],[31.,32.,33.]])
def test_hex_contact_3D_order1_onFace_FunctionOnContactZero(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_contact_3D_order1_onFace.msh"),optimize=False)
x=FunctionOnContactZero(dom).getX()
self.check_silo("hex_3D_o1_contact", data_s=x[0], data_v=x[0]*[1.,2.,3.],
data_t=x[0]*[[11.,12.,13.],[21.,22.,23.],[31.,32.,33.]])
def test_hex_contact_3D_order1_onFace_ReducedFunctionOnContactZero(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_contact_3D_order1_onFace.msh"),optimize=False)
x=ReducedFunctionOnContactZero(dom).getX()
self.check_silo("hex_3D_o1_contact", data_s=x[0], data_v=x[0]*[1.,2.,3.],
data_t=x[0]*[[11.,12.,13.],[21.,22.,23.],[31.,32.,33.]])
def test_hex_contact_3D_order1_FunctionOnContactOne(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_contact_3D_order1.msh"),optimize=False)
x=FunctionOnContactOne(dom).getX()
self.check_silo("hex_3D_o1_contact", data_s=x[0], data_v=x[0]*[1.,2.,3.],
data_t=x[0]*[[11.,12.,13.],[21.,22.,23.],[31.,32.,33.]])
def test_hex_contact_3D_order1_ReducedFunctionOnContactOne(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_contact_3D_order1.msh"),optimize=False)
x=ReducedFunctionOnContactOne(dom).getX()
self.check_silo("hex_3D_o1_contact", data_s=x[0], data_v=x[0]*[1.,2.,3.],
data_t=x[0]*[[11.,12.,13.],[21.,22.,23.],[31.,32.,33.]])
def test_hex_contact_3D_order1_onFace_FunctionOnContactOne(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_contact_3D_order1_onFace.msh"),optimize=False)
x=FunctionOnContactOne(dom).getX()
self.check_silo("hex_3D_o1_contact", data_s=x[0], data_v=x[0]*[1.,2.,3.],
data_t=x[0]*[[11.,12.,13.],[21.,22.,23.],[31.,32.,33.]])
def test_hex_contact_3D_order1_onFace_ReducedFunctionOnContactOne(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_contact_3D_order1_onFace.msh"),optimize=False)
x=ReducedFunctionOnContactOne(dom).getX()
self.check_silo("hex_3D_o1_contact", data_s=x[0], data_v=x[0]*[1.,2.,3.],
data_t=x[0]*[[11.,12.,13.],[21.,22.,23.],[31.,32.,33.]])
# === Finley hex 3D order 2 with contacts ===================================
def test_hex_contact_3D_order2_ContinuousFunction(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_contact_3D_order2.msh"),optimize=False)
x=ContinuousFunction(dom).getX()
self.check_silo("hex_3D_o2_node", data_s=x[0], data_v=x[0]*[1.,2.,3.],
data_t=x[0]*[[11.,12.,13.],[21.,22.,23.],[31.,32.,33.]])
def test_hex_contact_3D_order2_Solution(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_contact_3D_order2.msh"),optimize=False)
x=Solution(dom).getX()
self.check_silo("hex_3D_o2_node", data_s=x[0], data_v=x[0]*[1.,2.,3.],
data_t=x[0]*[[11.,12.,13.],[21.,22.,23.],[31.,32.,33.]])
def test_hex_contact_3D_order2_ReducedSolution(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_contact_3D_order2.msh"),optimize=False)
x=ReducedSolution(dom).getX()
self.check_silo("hex_3D_o2_rnode", data_s=x[0], data_v=x[0]*[1.,2.,3.],
data_t=x[0]*[[11.,12.,13.],[21.,22.,23.],[31.,32.,33.]])
def test_hex_contact_3D_order2_Function(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_contact_3D_order2.msh"),optimize=False)
x=Function(dom).getX()
self.check_silo("hex_3D_o2_cell", data_s=x[0], data_v=x[0]*[1.,2.,3.],
data_t=x[0]*[[11.,12.,13.],[21.,22.,23.],[31.,32.,33.]])
def test_hex_contact_3D_order2_ReducedFunction(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_contact_3D_order2.msh"),optimize=False)
x=ReducedFunction(dom).getX()
self.check_silo("hex_3D_o2_cell", data_s=x[0], data_v=x[0]*[1.,2.,3.],
data_t=x[0]*[[11.,12.,13.],[21.,22.,23.],[31.,32.,33.]])
def test_hex_contact_3D_order2_FunctionOnBoundary(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_contact_3D_order2.msh"),optimize=False)
x=FunctionOnBoundary(dom).getX()
self.check_silo("hex_3D_o2_boundary", data_s=x[0], data_v=x[0]*[1.,2.,3.],
data_t=x[0]*[[11.,12.,13.],[21.,22.,23.],[31.,32.,33.]])
def test_hex_contact_3D_order2_ReducedFunctionOnBoundary(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_contact_3D_order2.msh"),optimize=False)
x=ReducedFunctionOnBoundary(dom).getX()
self.check_silo("hex_3D_o2_boundary", data_s=x[0], data_v=x[0]*[1.,2.,3.],
data_t=x[0]*[[11.,12.,13.],[21.,22.,23.],[31.,32.,33.]])
def test_hex_contact_3D_order2_onFace_FunctionOnBoundary(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_contact_3D_order2_onFace.msh"),optimize=False)
x=FunctionOnBoundary(dom).getX()
self.check_silo("hex_3D_o2_f_boundary", data_s=x[0], data_v=x[0]*[1.,2.,3.],
data_t=x[0]*[[11.,12.,13.],[21.,22.,23.],[31.,32.,33.]])
def test_hex_contact_3D_order2_onFace_ReducedFunctionOnBoundary(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_contact_3D_order2_onFace.msh"),optimize=False)
x=ReducedFunctionOnBoundary(dom).getX()
self.check_silo("hex_3D_o2_f_boundary", data_s=x[0], data_v=x[0]*[1.,2.,3.],
data_t=x[0]*[[11.,12.,13.],[21.,22.,23.],[31.,32.,33.]])
def test_hex_contact_3D_order2_FunctionOnContactZero(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_contact_3D_order2.msh"),optimize=False)
x=FunctionOnContactZero(dom).getX()
self.check_silo("hex_3D_o2_contact", data_s=x[0], data_v=x[0]*[1.,2.,3.],
data_t=x[0]*[[11.,12.,13.],[21.,22.,23.],[31.,32.,33.]])
def test_hex_contact_3D_order2_ReducedFunctionOnContactZero(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_contact_3D_order2.msh"),optimize=False)
x=ReducedFunctionOnContactZero(dom).getX()
self.check_silo("hex_3D_o2_contact", data_s=x[0], data_v=x[0]*[1.,2.,3.],
data_t=x[0]*[[11.,12.,13.],[21.,22.,23.],[31.,32.,33.]])
def test_hex_contact_3D_order2_onFace_FunctionOnContactZero(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_contact_3D_order2_onFace.msh"),optimize=False)
x=FunctionOnContactZero(dom).getX()
self.check_silo("hex_3D_o2_f_contact", data_s=x[0], data_v=x[0]*[1.,2.,3.],
data_t=x[0]*[[11.,12.,13.],[21.,22.,23.],[31.,32.,33.]])
def test_hex_contact_3D_order2_onFace_ReducedFunctionOnContactZero(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_contact_3D_order2_onFace.msh"),optimize=False)
x=ReducedFunctionOnContactZero(dom).getX()
self.check_silo("hex_3D_o2_f_contact", data_s=x[0], data_v=x[0]*[1.,2.,3.],
data_t=x[0]*[[11.,12.,13.],[21.,22.,23.],[31.,32.,33.]])
def test_hex_contact_3D_order2_FunctionOnContactOne(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_contact_3D_order2.msh"),optimize=False)
x=FunctionOnContactOne(dom).getX()
self.check_silo("hex_3D_o2_contact", data_s=x[0], data_v=x[0]*[1.,2.,3.],
data_t=x[0]*[[11.,12.,13.],[21.,22.,23.],[31.,32.,33.]])
def test_hex_contact_3D_order2_ReducedFunctionOnContactOne(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_contact_3D_order2.msh"),optimize=False)
x=ReducedFunctionOnContactOne(dom).getX()
self.check_silo("hex_3D_o2_contact", data_s=x[0], data_v=x[0]*[1.,2.,3.],
data_t=x[0]*[[11.,12.,13.],[21.,22.,23.],[31.,32.,33.]])
def test_hex_contact_3D_order2_onFace_FunctionOnContactOne(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_contact_3D_order2_onFace.msh"),optimize=False)
x=FunctionOnContactOne(dom).getX()
self.check_silo("hex_3D_o2_f_contact", data_s=x[0], data_v=x[0]*[1.,2.,3.],
data_t=x[0]*[[11.,12.,13.],[21.,22.,23.],[31.,32.,33.]])
def test_hex_contact_3D_order2_onFace_ReducedFunctionOnContactOne(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_contact_3D_order2_onFace.msh"),optimize=False)
x=ReducedFunctionOnContactOne(dom).getX()
self.check_silo("hex_3D_o2_f_contact", data_s=x[0], data_v=x[0]*[1.,2.,3.],
data_t=x[0]*[[11.,12.,13.],[21.,22.,23.],[31.,32.,33.]])
# === Finley hex 3D order 2 (full) ==========================================
def test_hex_3D_order2p_ContinuousFunction(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_3D_order2p.msh"),optimize=False)
x=ContinuousFunction(dom).getX()
self.check_silo("hex_3D_o2p_node", data_s=x[0], data_v=x[0]*[1.,2.,3.],
data_t=x[0]*[[11.,12.,13.],[21.,22.,23.],[31.,32.,33.]])
def test_hex_3D_order2p_Solution(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_3D_order2p.msh"),optimize=False)
x=Solution(dom).getX()
self.check_silo("hex_3D_o2p_node", data_s=x[0], data_v=x[0]*[1.,2.,3.],
data_t=x[0]*[[11.,12.,13.],[21.,22.,23.],[31.,32.,33.]])
def test_hex_3D_order2p_ReducedSolution(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_3D_order2p.msh"),optimize=False)
x=ReducedSolution(dom).getX()
self.check_silo("hex_3D_o2p_rnode", data_s=x[0], data_v=x[0]*[1.,2.,3.],
data_t=x[0]*[[11.,12.,13.],[21.,22.,23.],[31.,32.,33.]])
def test_hex_3D_order2p_Function(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_3D_order2p.msh"),optimize=False)
x=Function(dom).getX()
self.check_silo("hex_3D_o2p_cell", data_s=x[0], data_v=x[0]*[1.,2.,3.],
data_t=x[0]*[[11.,12.,13.],[21.,22.,23.],[31.,32.,33.]])
def test_hex_3D_order2p_ReducedFunction(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_3D_order2p.msh"),optimize=False)
x=ReducedFunction(dom).getX()
self.check_silo("hex_3D_o2p_rcell", data_s=x[0], data_v=x[0]*[1.,2.,3.],
data_t=x[0]*[[11.,12.,13.],[21.,22.,23.],[31.,32.,33.]])
def test_hex_3D_order2p_FunctionOnBoundary(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_3D_order2p.msh"),optimize=False)
x=FunctionOnBoundary(dom).getX()
self.check_silo("hex_3D_o2p_boundary", data_s=x[0], data_v=x[0]*[1.,2.,3.],
data_t=x[0]*[[11.,12.,13.],[21.,22.,23.],[31.,32.,33.]])
def test_hex_3D_order2p_ReducedFunctionOnBoundary(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_3D_order2p.msh"),optimize=False)
x=ReducedFunctionOnBoundary(dom).getX()
self.check_silo("hex_3D_o2p_rboundary", data_s=x[0], data_v=x[0]*[1.,2.,3.],
data_t=x[0]*[[11.,12.,13.],[21.,22.,23.],[31.,32.,33.]])
# === Finley hex 3D macro ===================================================
def test_hex_3D_macro_ContinuousFunction(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_3D_macro.msh"),optimize=False)
x=ContinuousFunction(dom).getX()
self.check_silo("hex_3D_o2p_node", data_s=x[0], data_v=x[0]*[1.,2.,3.],
data_t=x[0]*[[11.,12.,13.],[21.,22.,23.],[31.,32.,33.]])
def test_hex_3D_macro_Solution(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_3D_macro.msh"),optimize=False)
x=Solution(dom).getX()
self.check_silo("hex_3D_o2p_node", data_s=x[0], data_v=x[0]*[1.,2.,3.],
data_t=x[0]*[[11.,12.,13.],[21.,22.,23.],[31.,32.,33.]])
def test_hex_3D_macro_ReducedSolution(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_3D_macro.msh"),optimize=False)
x=ReducedSolution(dom).getX()
self.check_silo("hex_3D_o2p_rnode", data_s=x[0], data_v=x[0]*[1.,2.,3.],
data_t=x[0]*[[11.,12.,13.],[21.,22.,23.],[31.,32.,33.]])
def test_hex_3D_macro_Function(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_3D_macro.msh"),optimize=False)
x=Function(dom).getX()
self.check_silo("hex_3D_macro_cell", data_s=x[0], data_v=x[0]*[1.,2.,3.],
data_t=x[0]*[[11.,12.,13.],[21.,22.,23.],[31.,32.,33.]])
def test_hex_3D_macro_ReducedFunction(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_3D_macro.msh"),optimize=False)
x=ReducedFunction(dom).getX()
self.check_silo("hex_3D_macro_cell", data_s=x[0], data_v=x[0]*[1.,2.,3.],
data_t=x[0]*[[11.,12.,13.],[21.,22.,23.],[31.,32.,33.]])
def test_hex_3D_macro_FunctionOnBoundary(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_3D_macro.msh"),optimize=False)
x=FunctionOnBoundary(dom).getX()
self.check_silo("hex_3D_macro_boundary", data_s=x[0], data_v=x[0]*[1.,2.,3.],
data_t=x[0]*[[11.,12.,13.],[21.,22.,23.],[31.,32.,33.]])
def test_hex_3D_macro_ReducedFunctionOnBoundary(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"hex_3D_macro.msh"),optimize=False)
x=ReducedFunctionOnBoundary(dom).getX()
self.check_silo("hex_3D_macro_boundary", data_s=x[0], data_v=x[0]*[1.,2.,3.],
data_t=x[0]*[[11.,12.,13.],[21.,22.,23.],[31.,32.,33.]])
# === Finley tet 2D order 1 =================================================
def test_tet_2D_order1_ContinuousFunction(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"tet_2D_order1.fly"),optimize=False)
x=ContinuousFunction(dom).getX()
self.check_silo("tet_2D_o1_node", data_s=x[0], data_v=x[0]*[1.,2.],
data_t=x[0]*[[11.,12.],[21.,22.]])
def test_tet_2D_order1_Solution(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"tet_2D_order1.fly"),optimize=False)
x=Solution(dom).getX()
self.check_silo("tet_2D_o1_node", data_s=x[0], data_v=x[0]*[1.,2.],
data_t=x[0]*[[11.,12.],[21.,22.]])
def test_tet_2D_order1_ReducedSolution(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"tet_2D_order1.fly"),optimize=False)
x=ReducedSolution(dom).getX()
self.check_silo("tet_2D_o1_node", data_s=x[0], data_v=x[0]*[1.,2.],
data_t=x[0]*[[11.,12.],[21.,22.]])
def test_tet_2D_order1_Function(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"tet_2D_order1.fly"),optimize=False)
x=Function(dom).getX()
self.check_silo("tet_2D_o1_cell", data_s=x[0], data_v=x[0]*[1.,2.],
data_t=x[0]*[[11.,12.],[21.,22.]])
def test_tet_2D_order1_ReducedFunction(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"tet_2D_order1.fly"),optimize=False)
x=ReducedFunction(dom).getX()
self.check_silo("tet_2D_o1_cell", data_s=x[0], data_v=x[0]*[1.,2.],
data_t=x[0]*[[11.,12.],[21.,22.]])
def test_tet_2D_order1_FunctionOnBoundary(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"tet_2D_order1.fly"),optimize=False)
x=FunctionOnBoundary(dom).getX()
self.check_silo("tet_2D_o1_boundary", data_s=x[0], data_v=x[0]*[1.,2.],
data_t=x[0]*[[11.,12.],[21.,22.]])
def test_tet_2D_order1_ReducedFunctionOnBoundary(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"tet_2D_order1.fly"),optimize=False)
x=ReducedFunctionOnBoundary(dom).getX()
self.check_silo("tet_2D_o1_boundary", data_s=x[0], data_v=x[0]*[1.,2.],
data_t=x[0]*[[11.,12.],[21.,22.]])
# === Finley tet 2D order 2 =================================================
def test_tet_2D_order2(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"tet_2D_order2.fly"),optimize=False)
self.check_silo("tet_2D_o2", domain=dom)
def test_tet_2D_order2_AllPoints_Scalar(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"tet_2D_order2.fly"),optimize=False)
x=Solution(dom).getX()
x_r=ReducedSolution(dom).getX()
x_n=ContinuousFunction(dom).getX()
self.check_silo("tet_2D_o1_node_3xs", data_r=x_r[0], data_n=x_n[0], data=x[0])
def test_tet_2D_order2_02Points_Scalar(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"tet_2D_order2.fly"),optimize=False)
x=Solution(dom).getX()
x_n=ContinuousFunction(dom).getX()
self.check_silo("tet_2D_o2_node_2xs", data_n=x_n[0], data=x[0])
def test_tet_2D_order2_2Cells_Scalar(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"tet_2D_order2.fly"),optimize=False)
x=Function(dom).getX()
x_b=FunctionOnBoundary(dom).getX()
self.check_silo("tet_2D_o2_cell_2xs", data_b=x_b[0], data=x[0])
def test_tet_2D_order2_BoundaryPoint_Scalar(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"tet_2D_order2.fly"),optimize=False)
x=ContinuousFunction(dom).getX()
x_b=FunctionOnBoundary(dom).getX()
self.check_silo("tet_2D_o2_boundary_2xs", data=x[0],data_b=x_b[0])
def test_tet_2D_order2_Cells_AllData(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"tet_2D_order2.fly"),optimize=False)
x=Function(dom).getX()
self.check_silo("tet_2D_o2_cell_all",
data_s=x[0],
data_v=x[0]*[1.,2.],
data_t=x[0]*[[11.,12.],[21.,22.]],
data_t2=x[0]*[[-11.,-12.],[-21.,-22.]])
def test_tet_2D_order2_CellsPoints_AllData(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"tet_2D_order2.fly"),optimize=False)
x_c=Function(dom).getX()
x_p=ContinuousFunction(dom).getX()
self.check_silo("tet_2D_o2_cellnode_all",
data_sp=x_p[0],
data_vp=x_p[0]*[1.,2.],
data_tp=x_p[0]*[[11.,12.],[21.,22.]],
data_sc=x_c[0],
data_vc=x_c[0]*[1.,2.],
data_tc=x_c[0]*[[11.,12.],[21.,22.]])
def test_tet_2D_order2_ContinuousFunction(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"tet_2D_order2.fly"),optimize=False)
x=ContinuousFunction(dom).getX()
self.check_silo("tet_2D_o2_node", data_s=x[0], data_v=x[0]*[1.,2.],
data_t=x[0]*[[11.,12.],[21.,22.]])
def test_tet_2D_order2_Solution(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"tet_2D_order2.fly"),optimize=False)
x=Solution(dom).getX()
self.check_silo("tet_2D_o2_node", data_s=x[0], data_v=x[0]*[1.,2.],
data_t=x[0]*[[11.,12.],[21.,22.]])
def test_tet_2D_order2_ReducedSolution(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"tet_2D_order2.fly"),optimize=False)
x=ReducedSolution(dom).getX()
self.check_silo("tet_2D_o2_rnode", data_s=x[0], data_v=x[0]*[1.,2.],
data_t=x[0]*[[11.,12.],[21.,22.]])
def test_tet_2D_order2_Function(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"tet_2D_order2.fly"),optimize=False)
x=Function(dom).getX()
self.check_silo("tet_2D_o2_cell", data_s=x[0], data_v=x[0]*[1.,2.],
data_t=x[0]*[[11.,12.],[21.,22.]])
def test_tet_2D_order2_ReducedFunction(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"tet_2D_order2.fly"),optimize=False)
x=ReducedFunction(dom).getX()
self.check_silo("tet_2D_o2_rcell", data_s=x[0], data_v=x[0]*[1.,2.],
data_t=x[0]*[[11.,12.],[21.,22.]])
def test_tet_2D_order2_FunctionOnBoundary(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"tet_2D_order2.fly"),optimize=False)
x=FunctionOnBoundary(dom).getX()
self.check_silo("tet_2D_o2_boundary", data_s=x[0], data_v=x[0]*[1.,2.],
data_t=x[0]*[[11.,12.],[21.,22.]])
def test_tet_2D_order2_ReducedFunctionOnBoundary(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"tet_2D_order2.fly"),optimize=False)
x=ReducedFunctionOnBoundary(dom).getX()
self.check_silo("tet_2D_o2_boundary", data_s=x[0], data_v=x[0]*[1.,2.],
data_t=x[0]*[[11.,12.],[21.,22.]])
# === Finley tet 2D macro ===================================================
def test_tet_2D_macro(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"tet_2D_macro.fly"),optimize=False)
self.check_silo("tet_2D_o2", domain=dom)
def test_tet_2D_macro_AllPoints_Scalar(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"tet_2D_macro.fly"),optimize=False)
x=Solution(dom).getX()
x_r=ReducedSolution(dom).getX()
x_n=ContinuousFunction(dom).getX()
self.check_silo("tet_2D_o1_node_3xs", data_r=x_r[0], data_n=x_n[0], data=x[0])
def test_tet_2D_macro_02Points_Scalar(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"tet_2D_macro.fly"),optimize=False)
x=Solution(dom).getX()
x_n=ContinuousFunction(dom).getX()
self.check_silo("tet_2D_o2_node_2xs", data_n=x_n[0], data=x[0])
def test_tet_2D_macro_2Cells_Scalar(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"tet_2D_macro.fly"),optimize=False)
x=Function(dom).getX()
x_b=FunctionOnBoundary(dom).getX()
self.check_silo("tet_2D_cell_2xs", data_b=x_b[0], data=x[0])
def test_tet_2D_macro_BoundaryPoint_Scalar(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"tet_2D_macro.fly"),optimize=False)
x=ContinuousFunction(dom).getX()
x_b=FunctionOnBoundary(dom).getX()
self.check_silo("tet_2D_boundary_2xs", data_b=x_b[0], data=x[0])
def test_tet_2D_macro_CellsPoints_AllData(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"tet_2D_macro.fly"),optimize=False)
x_c=Function(dom).getX()
x_p=ContinuousFunction(dom).getX()
self.check_silo("tet_2D_macro_cellnode_all",
data_sp=x_p[0],
data_vp=x_p[0]*[1.,2.],
data_tp=x_p[0]*[[11.,12.],[21.,22.]],
data_sc=x_c[0],
data_vc=x_c[0]*[1.,2.],
data_tc=x_c[0]*[[11.,12.],[21.,22.]])
def test_tet_2D_macro_ContinuousFunction(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"tet_2D_macro.fly"),optimize=False)
x=ContinuousFunction(dom).getX()
self.check_silo("tet_2D_o2_node", data_s=x[0], data_v=x[0]*[1.,2.],
data_t=x[0]*[[11.,12.],[21.,22.]])
def test_tet_2D_macro_Solution(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"tet_2D_macro.fly"),optimize=False)
x=Solution(dom).getX()
self.check_silo("tet_2D_o2_node", data_s=x[0], data_v=x[0]*[1.,2.],
data_t=x[0]*[[11.,12.],[21.,22.]])
def test_tet_2D_macro_ReducedSolution(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"tet_2D_macro.fly"),optimize=False)
x=ReducedSolution(dom).getX()
self.check_silo("tet_2D_o2_rnode", data_s=x[0], data_v=x[0]*[1.,2.],
data_t=x[0]*[[11.,12.],[21.,22.]])
def test_tet_2D_macro_Function(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"tet_2D_macro.fly"),optimize=False)
x=Function(dom).getX()
self.check_silo("tet_2D_macro_cell", data_s=x[0], data_v=x[0]*[1.,2.],
data_t=x[0]*[[11.,12.],[21.,22.]])
def test_tet_2D_macro_ReducedFunction(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"tet_2D_macro.fly"),optimize=False)
x=ReducedFunction(dom).getX()
self.check_silo("tet_2D_macro_rcell", data_s=x[0], data_v=x[0]*[1.,2.],
data_t=x[0]*[[11.,12.],[21.,22.]])
def test_tet_2D_macro_FunctionOnBoundary(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"tet_2D_macro.fly"),optimize=False)
x=FunctionOnBoundary(dom).getX()
self.check_silo("tet_2D_macro_boundary", data_s=x[0], data_v=x[0]*[1.,2.],
data_t=x[0]*[[11.,12.],[21.,22.]])
def test_tet_2D_macro_ReducedFunctionOnBoundary(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"tet_2D_macro.fly"),optimize=False)
x=ReducedFunctionOnBoundary(dom).getX()
self.check_silo("tet_2D_macro_boundary", data_s=x[0], data_v=x[0]*[1.,2.],
data_t=x[0]*[[11.,12.],[21.,22.]])
# === Finley tet 3D order 1 =================================================
def test_tet_3D_order1_ContinuousFunction(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"tet_3D_order1.fly"),optimize=False)
x=ContinuousFunction(dom).getX()
self.check_silo("tet_3D_o1_node", data_s=x[0], data_v=x[0]*[1.,2.,3.],
data_t=x[0]*[[11.,12.,13.],[21.,22.,23.],[31.,32.,33.]])
def test_tet_3D_order1_Solution(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"tet_3D_order1.fly"),optimize=False)
x=Solution(dom).getX()
self.check_silo("tet_3D_o1_node", data_s=x[0], data_v=x[0]*[1.,2.,3.],
data_t=x[0]*[[11.,12.,13.],[21.,22.,23.],[31.,32.,33.]])
def test_tet_3D_order1_ReducedSolution(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"tet_3D_order1.fly"),optimize=False)
x=ReducedSolution(dom).getX()
self.check_silo("tet_3D_o1_node", data_s=x[0], data_v=x[0]*[1.,2.,3.],
data_t=x[0]*[[11.,12.,13.],[21.,22.,23.],[31.,32.,33.]])
def test_tet_3D_order1_Function(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"tet_3D_order1.fly"),optimize=False)
x=Function(dom).getX()
self.check_silo("tet_3D_o1_cell", data_s=x[0], data_v=x[0]*[1.,2.,3.],
data_t=x[0]*[[11.,12.,13.],[21.,22.,23.],[31.,32.,33.]])
def test_tet_3D_order1_ReducedFunction(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"tet_3D_order1.fly"),optimize=False)
x=ReducedFunction(dom).getX()
self.check_silo("tet_3D_o1_cell", data_s=x[0], data_v=x[0]*[1.,2.,3.],
data_t=x[0]*[[11.,12.,13.],[21.,22.,23.],[31.,32.,33.]])
def test_tet_3D_order1_FunctionOnBoundary(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"tet_3D_order1.fly"),optimize=False)
x=FunctionOnBoundary(dom).getX()
self.check_silo("tet_3D_o1_boundary", data_s=x[0], data_v=x[0]*[1.,2.,3.],
data_t=x[0]*[[11.,12.,13.],[21.,22.,23.],[31.,32.,33.]])
def test_tet_3D_order1_ReducedFunctionOnBoundary(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"tet_3D_order1.fly"),optimize=False)
x=ReducedFunctionOnBoundary(dom).getX()
self.check_silo("tet_3D_o1_boundary", data_s=x[0], data_v=x[0]*[1.,2.,3.],
data_t=x[0]*[[11.,12.,13.],[21.,22.,23.],[31.,32.,33.]])
# === Finley tet 3D order 2 =================================================
def test_tet_3D_order2_ContinuousFunction(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"tet_3D_order2.fly"),optimize=False)
x=ContinuousFunction(dom).getX()
self.check_silo("tet_3D_o2_node", data_s=x[0], data_v=x[0]*[1.,2.,3.],
data_t=x[0]*[[11.,12.,13.],[21.,22.,23.],[31.,32.,33.]])
def test_tet_3D_order2_Solution(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"tet_3D_order2.fly"),optimize=False)
x=Solution(dom).getX()
self.check_silo("tet_3D_o2_node", data_s=x[0], data_v=x[0]*[1.,2.,3.],
data_t=x[0]*[[11.,12.,13.],[21.,22.,23.],[31.,32.,33.]])
def test_tet_3D_order2_ReducedSolution(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"tet_3D_order2.fly"),optimize=False)
x=ReducedSolution(dom).getX()
self.check_silo("tet_3D_o2_rnode", data_s=x[0], data_v=x[0]*[1.,2.,3.],
data_t=x[0]*[[11.,12.,13.],[21.,22.,23.],[31.,32.,33.]])
def test_tet_3D_order2_Function(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"tet_3D_order2.fly"),optimize=False)
x=Function(dom).getX()
self.check_silo("tet_3D_o2_cell", data_s=x[0], data_v=x[0]*[1.,2.,3.],
data_t=x[0]*[[11.,12.,13.],[21.,22.,23.],[31.,32.,33.]])
def test_tet_3D_order2_ReducedFunction(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"tet_3D_order2.fly"),optimize=False)
x=ReducedFunction(dom).getX()
self.check_silo("tet_3D_o2_rcell", data_s=x[0], data_v=x[0]*[1.,2.,3.],
data_t=x[0]*[[11.,12.,13.],[21.,22.,23.],[31.,32.,33.]])
def test_tet_3D_order2_FunctionOnBoundary(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"tet_3D_order2.fly"),optimize=False)
x=FunctionOnBoundary(dom).getX()
self.check_silo("tet_3D_o2_boundary", data_s=x[0], data_v=x[0]*[1.,2.,3.],
data_t=x[0]*[[11.,12.,13.],[21.,22.,23.],[31.,32.,33.]])
def test_tet_3D_order2_ReducedFunctionOnBoundary(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"tet_3D_order2.fly"),optimize=False)
x=ReducedFunctionOnBoundary(dom).getX()
self.check_silo("tet_3D_o2_rboundary", data_s=x[0], data_v=x[0]*[1.,2.,3.],
data_t=x[0]*[[11.,12.,13.],[21.,22.,23.],[31.,32.,33.]])
# === Finley tet 3D macro ===================================================
def test_tet_3D_macro_ContinuousFunction(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"tet_3D_macro.fly"),optimize=False)
x=ContinuousFunction(dom).getX()
self.check_silo("tet_3D_o2_node", data_s=x[0], data_v=x[0]*[1.,2.,3.],
data_t=x[0]*[[11.,12.,13.],[21.,22.,23.],[31.,32.,33.]])
def test_tet_3D_macro_Solution(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"tet_3D_macro.fly"),optimize=False)
x=Solution(dom).getX()
self.check_silo("tet_3D_o2_node", data_s=x[0], data_v=x[0]*[1.,2.,3.],
data_t=x[0]*[[11.,12.,13.],[21.,22.,23.],[31.,32.,33.]])
def test_tet_3D_macro_ReducedSolution(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"tet_3D_macro.fly"),optimize=False)
x=ReducedSolution(dom).getX()
self.check_silo("tet_3D_o2_rnode", data_s=x[0], data_v=x[0]*[1.,2.,3.],
data_t=x[0]*[[11.,12.,13.],[21.,22.,23.],[31.,32.,33.]])
def test_tet_3D_macro_Function(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"tet_3D_macro.fly"),optimize=False)
x=Function(dom).getX()
self.check_silo("tet_3D_macro_cell", data_s=x[0], data_v=x[0]*[1.,2.,3.],
data_t=x[0]*[[11.,12.,13.],[21.,22.,23.],[31.,32.,33.]])
def test_tet_3D_macro_ReducedFunction(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"tet_3D_macro.fly"),optimize=False)
x=ReducedFunction(dom).getX()
self.check_silo("tet_3D_macro_rcell", data_s=x[0], data_v=x[0]*[1.,2.,3.],
data_t=x[0]*[[11.,12.,13.],[21.,22.,23.],[31.,32.,33.]])
def test_tet_3D_macro_FunctionOnBoundary(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"tet_3D_macro.fly"),optimize=False)
x=FunctionOnBoundary(dom).getX()
self.check_silo("tet_3D_macro_boundary", data_s=x[0], data_v=x[0]*[1.,2.,3.],
data_t=x[0]*[[11.,12.,13.],[21.,22.,23.],[31.,32.,33.]])
def test_tet_3D_macro_ReducedFunctionOnBoundary(self):
dom=finley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"tet_3D_macro.fly"),optimize=False)
x=ReducedFunctionOnBoundary(dom).getX()
self.check_silo("tet_3D_macro_rboundary", data_s=x[0], data_v=x[0]*[1.,2.,3.],
data_t=x[0]*[[11.,12.,13.],[21.,22.,23.],[31.,32.,33.]])
@unittest.skipIf(getMPISizeWorld()>1, "MPI size > 1")
@unittest.skipIf(not HAVE_DUDLEY, "dudley module not available")
@unittest.skipIf(not HAVE_SILO, "Silo module not available")
class Test_Dudley_SaveSilo(SiloSaver):
# === Dudley 2D =============================================================
def test_tet_2D_dudley_ContinuousFunction(self):
dom=dudley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"tet_2D_dudley.fly"),optimize=False)
x=ContinuousFunction(dom).getX()
self.check_silo("tet_2D_o1_node", data_s=x[0], data_v=x[0]*[1.,2.],
data_t=x[0]*[[11.,12.],[21.,22.]])
def test_tet_2D_dudley_Solution(self):
dom=dudley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"tet_2D_dudley.fly"),optimize=False)
x=Solution(dom).getX()
self.check_silo("tet_2D_o1_node", data_s=x[0], data_v=x[0]*[1.,2.],
data_t=x[0]*[[11.,12.],[21.,22.]])
def test_tet_2D_dudley_ReducedSolution(self):
dom=dudley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"tet_2D_dudley.fly"),optimize=False)
x=ReducedSolution(dom).getX()
self.check_silo("tet_2D_o1_node", data_s=x[0], data_v=x[0]*[1.,2.],
data_t=x[0]*[[11.,12.],[21.,22.]])
def test_tet_2D_dudley_Function(self):
dom=dudley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"tet_2D_dudley.fly"),optimize=False)
x=Function(dom).getX()
self.check_silo("tet_2D_o1_cell", data_s=x[0], data_v=x[0]*[1.,2.],
data_t=x[0]*[[11.,12.],[21.,22.]])
def test_tet_2D_dudley_ReducedFunction(self):
dom=dudley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"tet_2D_dudley.fly"),optimize=False)
x=ReducedFunction(dom).getX()
self.check_silo("tet_2D_o1_cell", data_s=x[0], data_v=x[0]*[1.,2.],
data_t=x[0]*[[11.,12.],[21.,22.]])
def test_tet_2D_dudley_FunctionOnBoundary(self):
dom=dudley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"tet_2D_dudley.fly"),optimize=False)
x=FunctionOnBoundary(dom).getX()
self.check_silo("tet_2D_o1_boundary", data_s=x[0], data_v=x[0]*[1.,2.],
data_t=x[0]*[[11.,12.],[21.,22.]])
def test_tet_2D_dudley_ReducedFunctionOnBoundary(self):
dom=dudley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"tet_2D_dudley.fly"),optimize=False)
x=ReducedFunctionOnBoundary(dom).getX()
self.check_silo("tet_2D_o1_boundary", data_s=x[0], data_v=x[0]*[1.,2.],
data_t=x[0]*[[11.,12.],[21.,22.]])
# === Dudley 3D =============================================================
def test_tet_3D_dudley_ContinuousFunction(self):
dom=dudley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"tet_3D_dudley.fly"),optimize=False)
x=ContinuousFunction(dom).getX()
self.check_silo("tet_3D_o1_node", data_s=x[0], data_v=x[0]*[1.,2.,3.],
data_t=x[0]*[[11.,12.,13.],[21.,22.,23.],[31.,32.,33.]])
def test_tet_3D_dudley_Solution(self):
dom=dudley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"tet_3D_dudley.fly"),optimize=False)
x=Solution(dom).getX()
self.check_silo("tet_3D_o1_node", data_s=x[0], data_v=x[0]*[1.,2.,3.],
data_t=x[0]*[[11.,12.,13.],[21.,22.,23.],[31.,32.,33.]])
def test_tet_3D_dudley_ReducedSolution(self):
dom=dudley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"tet_3D_dudley.fly"),optimize=False)
x=ReducedSolution(dom).getX()
self.check_silo("tet_3D_o1_node", data_s=x[0], data_v=x[0]*[1.,2.,3.],
data_t=x[0]*[[11.,12.,13.],[21.,22.,23.],[31.,32.,33.]])
def test_tet_3D_dudley_Function(self):
dom=dudley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"tet_3D_dudley.fly"),optimize=False)
x=Function(dom).getX()
self.check_silo("tet_3D_o1_cell", data_s=x[0], data_v=x[0]*[1.,2.,3.],
data_t=x[0]*[[11.,12.,13.],[21.,22.,23.],[31.,32.,33.]])
def test_tet_3D_dudley_ReducedFunction(self):
dom=dudley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"tet_3D_dudley.fly"),optimize=False)
x=ReducedFunction(dom).getX()
self.check_silo("tet_3D_o1_cell", data_s=x[0], data_v=x[0]*[1.,2.,3.],
data_t=x[0]*[[11.,12.,13.],[21.,22.,23.],[31.,32.,33.]])
def test_tet_3D_dudley_FunctionOnBoundary(self):
dom=dudley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"tet_3D_dudley.fly"),optimize=False)
x=FunctionOnBoundary(dom).getX()
self.check_silo("tet_3D_o1_boundary", data_s=x[0], data_v=x[0]*[1.,2.,3.],
data_t=x[0]*[[11.,12.,13.],[21.,22.,23.],[31.,32.,33.]])
def test_tet_3D_dudley_ReducedFunctionOnBoundary(self):
dom=dudley.ReadMesh(os.path.join(WEIPA_TEST_MESHES,"tet_3D_dudley.fly"),optimize=False)
x=ReducedFunctionOnBoundary(dom).getX()
self.check_silo("tet_3D_o1_boundary", data_s=x[0], data_v=x[0]*[1.,2.,3.],
data_t=x[0]*[[11.,12.,13.],[21.,22.,23.],[31.,32.,33.]])
@unittest.skipIf(getMPISizeWorld()>1, "MPI size > 1")
@unittest.skipIf(not HAVE_RIPLEY, "ripley module not available")
@unittest.skipIf(not HAVE_SILO, "Silo module not available")
class Test_Ripley_SaveSilo(SiloSaver):
# === Ripley 2D =============================================================
def test_ripley_2D_ContinuousFunction(self):
dom=ripley.Rectangle(n0=11, n1=3, l0=(-2.5,8.0), l1=(1.2,3.8), d0=getMPISizeWorld())
x=ContinuousFunction(dom).getX()
self.check_silo("ripley_2D_node", data_s=x[0], data_v=x[0]*[1.,2.],
data_t=x[0]*[[11.,12.],[21.,22.]])
def test_ripley_2D_Solution(self):
dom=ripley.Rectangle(n0=11, n1=3, l0=(-2.5,8.0), l1=(1.2,3.8), d0=getMPISizeWorld())
x=Solution(dom).getX()
self.check_silo("ripley_2D_node", data_s=x[0], data_v=x[0]*[1.,2.],
data_t=x[0]*[[11.,12.],[21.,22.]])
def test_ripley_2D_ReducedSolution(self):
dom=ripley.Rectangle(n0=11, n1=3, l0=(-2.5,8.0), l1=(1.2,3.8), d0=getMPISizeWorld())
x=ReducedSolution(dom).getX()
self.check_silo("ripley_2D_node", data_s=x[0], data_v=x[0]*[1.,2.],
data_t=x[0]*[[11.,12.],[21.,22.]])
def test_ripley_2D_Function(self):
dom=ripley.Rectangle(n0=11, n1=3, l0=(-2.5,8.0), l1=(1.2,3.8), d0=getMPISizeWorld())
x=Function(dom).getX()
self.check_silo("ripley_2D_cell", data_s=x[0], data_v=x[0]*[1.,2.],
data_t=x[0]*[[11.,12.],[21.,22.]])
def test_ripley_2D_ReducedFunction(self):
dom=ripley.Rectangle(n0=11, n1=3, l0=(-2.5,8.0), l1=(1.2,3.8), d0=getMPISizeWorld())
x=ReducedFunction(dom).getX()
self.check_silo("ripley_2D_cell", data_s=x[0], data_v=x[0]*[1.,2.],
data_t=x[0]*[[11.,12.],[21.,22.]])
def test_ripley_2D_FunctionOnBoundary(self):
dom=ripley.Rectangle(n0=11, n1=3, l0=(-2.5,8.0), l1=(1.2,3.8), d0=getMPISizeWorld())
x=FunctionOnBoundary(dom).getX()
self.check_silo("ripley_2D_boundary", data_s=x[0], data_v=x[0]*[1.,2.],
data_t=x[0]*[[11.,12.],[21.,22.]])
def test_ripley_2D_ReducedFunctionOnBoundary(self):
dom=ripley.Rectangle(n0=11, n1=3, l0=(-2.5,8.0), l1=(1.2,3.8), d0=getMPISizeWorld())
x=ReducedFunctionOnBoundary(dom).getX()
self.check_silo("ripley_2D_boundary", data_s=x[0], data_v=x[0]*[1.,2.],
data_t=x[0]*[[11.,12.],[21.,22.]])
# === Ripley 3D =============================================================
def test_ripley_3D_ContinuousFunction(self):
dom=ripley.Brick(n0=11, n1=3, n2=2, l0=(-2.5,7.0), l1=(1.2,3.8), l2=4., d0=getMPISizeWorld(), d1=1, d2=1)
x=ContinuousFunction(dom).getX()
self.check_silo("ripley_3D_node", data_s=x[0], data_v=x[0]*[1.,2.,3.],
data_t=x[0]*[[11.,12.,13.],[21.,22.,23.],[31.,32.,33.]])
def test_ripley_3D_Solution(self):
dom=ripley.Brick(n0=11, n1=3, n2=2, l0=(-2.5,7.0), l1=(1.2,3.8), l2=4., d0=getMPISizeWorld(), d1=1, d2=1)
x=Solution(dom).getX()
self.check_silo("ripley_3D_node", data_s=x[0], data_v=x[0]*[1.,2.,3.],
data_t=x[0]*[[11.,12.,13.],[21.,22.,23.],[31.,32.,33.]])
def test_ripley_3D_ReducedSolution(self):
dom=ripley.Brick(n0=11, n1=3, n2=2, l0=(-2.5,7.0), l1=(1.2,3.8), l2=4., d0=getMPISizeWorld(), d1=1, d2=1)
x=ReducedSolution(dom).getX()
self.check_silo("ripley_3D_node", data_s=x[0], data_v=x[0]*[1.,2.,3.],
data_t=x[0]*[[11.,12.,13.],[21.,22.,23.],[31.,32.,33.]])
def test_ripley_3D_Function(self):
dom=ripley.Brick(n0=11, n1=3, n2=2, l0=(-2.5,7.0), l1=(1.2,3.8), l2=4., d0=getMPISizeWorld(), d1=1, d2=1)
x=Function(dom).getX()
self.check_silo("ripley_3D_cell", data_s=x[0], data_v=x[0]*[1.,2.,3.],
data_t=x[0]*[[11.,12.,13.],[21.,22.,23.],[31.,32.,33.]])
def test_ripley_3D_ReducedFunction(self):
dom=ripley.Brick(n0=11, n1=3, n2=2, l0=(-2.5,7.0), l1=(1.2,3.8), l2=4., d0=getMPISizeWorld(), d1=1, d2=1)
x=ReducedFunction(dom).getX()
self.check_silo("ripley_3D_cell", data_s=x[0], data_v=x[0]*[1.,2.,3.],
data_t=x[0]*[[11.,12.,13.],[21.,22.,23.],[31.,32.,33.]])
def test_ripley_3D_FunctionOnBoundary(self):
dom=ripley.Brick(n0=11, n1=3, n2=2, l0=(-2.5,7.0), l1=(1.2,3.8), l2=4., d0=getMPISizeWorld(), d1=1, d2=1)
x=FunctionOnBoundary(dom).getX()
self.check_silo("ripley_3D_boundary", data_s=x[0], data_v=x[0]*[1.,2.,3.],
data_t=x[0]*[[11.,12.,13.],[21.,22.,23.],[31.,32.,33.]])
def test_ripley_3D_ReducedFunctionOnBoundary(self):
dom=ripley.Brick(n0=11, n1=3, n2=2, l0=(-2.5,7.0), l1=(1.2,3.8), l2=4., d0=getMPISizeWorld(), d1=1, d2=1)
x=ReducedFunctionOnBoundary(dom).getX()
self.check_silo("ripley_3D_boundary", data_s=x[0], data_v=x[0]*[1.,2.,3.],
data_t=x[0]*[[11.,12.,13.],[21.,22.,23.],[31.,32.,33.]])
if __name__ == '__main__':
run_tests(__name__, exit_on_failure=True)
| 51.910703
| 110
| 0.612294
| 11,925
| 79,060
| 3.789099
| 0.030776
| 0.023105
| 0.056103
| 0.067279
| 0.896337
| 0.889919
| 0.885028
| 0.884143
| 0.875246
| 0.874759
| 0
| 0.075535
| 0.198394
| 79,060
| 1,522
| 111
| 51.944809
| 0.637444
| 0.030799
| 0
| 0.671053
| 0
| 0
| 0.102986
| 0.02909
| 0
| 0
| 0
| 0.000657
| 0.009868
| 1
| 0.174342
| false
| 0
| 0.014803
| 0.001645
| 0.203947
| 0.000822
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
6ff211584b24dd7bb8fd3ffc7554a1ced831e76f
| 7,580
|
py
|
Python
|
Papadikis-2010/kinetics.py
|
Lyle-zhang/literature
|
c1266d326ee8a733e801c23aca89ab34edc09b6e
|
[
"MIT"
] | 1
|
2020-05-23T15:38:54.000Z
|
2020-05-23T15:38:54.000Z
|
Papadikis-2010/kinetics.py
|
Lyle-zhang/literature
|
c1266d326ee8a733e801c23aca89ab34edc09b6e
|
[
"MIT"
] | null | null | null |
Papadikis-2010/kinetics.py
|
Lyle-zhang/literature
|
c1266d326ee8a733e801c23aca89ab34edc09b6e
|
[
"MIT"
] | 1
|
2021-08-14T13:39:25.000Z
|
2021-08-14T13:39:25.000Z
|
"""
Kinetic Reaction Scheme Functions for Fast Pyrolysis of Biomass.
Each function is for a particular kinetic scheme.
Reference for each scheme is provided as main author and publication year.
"""
# modules
# -----------------------------------------------------------------------------
import numpy as np
# Sadhukhan2009
# volatiles+gases, char, primary and secondary reactions
# -----------------------------------------------------------------------------
def kn1(T, pw, pc, pg, dt, i, H):
R = 0.008314 # universal gas constant, kJ/mol*K
# A as pre-factor (1/s) and E as activation energy (kJ/mol)
A1 = 168.4; E1 = 51.965 # biomass -> volatiles + gases
A2 = 13.2; E2 = 45.960 # biomass -> char
A3 = 5.7e6; E3 = 92.4 # (vol+gases)1 -> (vol+gases)2
# evaluate reaction rate constant for each reaction, 1/s
K1 = A1 * np.exp(-E1 / (R * T[i])) # biomass -> volatiles + gases
K2 = A2 * np.exp(-E2 / (R * T[i])) # biomass -> char
K3 = A3 * np.exp(-E3 / (R * T[i])) # (vol+gases)1 -> (vol+gases)2
# determine reaction rate for each reaction, rho/s
rw = -(K1+K2) * pw[i-1] # wood rate
rg1 = K1 * pw[i-1] - K3*pg[i-1] * pc[i-1] # gas 1 rate
rc1 = K2 * pw[i-1] - K3*pg[i-1] * pc[i-1] # char 1 rate
rg2 = K3 * pg[i-1] * pc[i-1] # gas 2 rate
rc2 = K3 * pg[i-1] * pc[i-1] # char 2 rate
# update wood, char, gas concentration as a density, kg/m^3
pww = pw[i-1] + rw * dt # wood
pcc = pc[i-1] + (rc1 + rc2) * dt # char
pgg = pg[i-1] + (rg1 + rg2) * dt # gas
# calculate heat of generation term
rp = -K1*pww # rate of pyrolysis
g = H*rp # heat generation
# return the wood, char, gas concentration and the heat of generation
return pww, pcc, pgg, g
# Chan1985, Blasi1993b
# primary and secondary reactions
# -----------------------------------------------------------------------------
def kn2(T, pw, pc, pg, pt, dt, i, H):
R = 0.008314 # universal gas constant, kJ/mol*K
# A = pre-factor (1/s) and E = activation energy (kJ/mol)
A1 = 1.3e8; E1 = 140 # wood -> gas
A2 = 2e8; E2 = 133 # wood -> tar
A3 = 1.08e7; E3 = 121 # wood -> char
A4 = 4.28e6; E4 = 108 # tar -> gas
A5 = 1e6; E5 = 108 # tar -> char
# evaluate reaction rate constant for each reaction, 1/s
K1 = A1 * np.exp(-E1 / (R * T[i])) # wood -> gas
K2 = A2 * np.exp(-E2 / (R * T[i])) # wood -> tar
K3 = A3 * np.exp(-E3 / (R * T[i])) # wood -> char
K4 = A4 * np.exp(-E4 / (R * T[i])) # tar -> gas
K5 = A5 * np.exp(-E5 / (R * T[i])) # tar -> char
# determine reaction rate for each reaction, rho/s
rww = -(K1+K2+K3) * pw[i-1] # wood rate
rwg = K1 * pw[i-1] # wood -> gas rate
rwt = K2 * pw[i-1] # wood -> tar rate
rwc = K3 * pw[i-1] # wood -> char rate
rtg = K4 * pt[i-1] # tar -> gas rate
rtc = K5 * pt[i-1] # tar -> char rate
# update wood, char, gas concentration as a density, kg/m^3
pww = pw[i-1] + rww*dt # wood
pgg = pg[i-1] + (rwg + rtg)*dt # gas
ptt = pt[i-1] + (rwt - rtg - rtc)*dt # tar
pcc = pc[i-1] + (rwc + rtc)*dt # char
# calculate heat of generation term
g = H*rww # heat generation, W/m^3
# return the wood, char, gas, tar concentration and the heat generation
return pww, pcc, pgg, ptt, g
# Chan1985
# moisture content, heat of vaporization, no secondary reactions
# -----------------------------------------------------------------------------
def kn3(T, pw, pc, pg, pt, pwa, pva, dt, i, H):
R = 0.008314 # universal gas constant, kJ/mol*K
# A = pre-factor (1/s) and E = activation energy (kJ/mol)
A1 = 1.3e8; E1 = 140 # wood -> gas
A2 = 2e8; E2 = 133 # wood -> tar
A3 = 1.08e7; E3 = 121 # wood -> char
Aw = 5.13e6; Ew = 87.9 # water -> vapor
# evaluate reaction rate constant for each reaction, 1/s
K1 = A1 * np.exp(-E1 / (R * T[i])) # wood -> gas
K2 = A2 * np.exp(-E2 / (R * T[i])) # wood -> tar
K3 = A3 * np.exp(-E3 / (R * T[i])) # wood -> char
Kw = Aw * np.exp(-Ew / (R * T[i])) # water -> vapor
# determine reaction rate for each reaction, rho/s
rww = -(K1+K2+K3) * pw[i-1] # rate of wood pyrolysis
rwg = K1 * pw[i-1] # rate of wood -> gas
rwt = K2 * pw[i-1] # rate of wood -> tar
rwc = K3 * pw[i-1] # rate of wood -> char
rwa = -Kw * pwa[i-1] # rate of water vaporization
rva = Kw * pwa[i-1] # rate of water -> vapor
# update concentrations as a density, kg/m^3
pww = pw[i-1] + rww*dt # wood
pgg = pg[i-1] + rwg*dt # gas
ptt = pt[i-1] + rwt*dt # tar
pcc = pc[i-1] + rwc*dt # char
pwwa = pwa[i-1] + rwa*dt # water
pvva = pva[i-1] + rva*dt # vapor
# calculate heat of generation term
Hv = 2260000 # heat of vaporization, J/kg
g = H*rww + Hv*rwa # heat generation, W/m^3
# return wood, char, gas, tar, water, vapor concentration & heat generation
return pww, pcc, pgg, ptt, pwwa, pvva, g
# Chan1985, Blasi1993b
# moisture content, heat of vaporization, primary and secondary reactions
# -----------------------------------------------------------------------------
def kn4(T, pw, pc, pg, pt, pwa, pva, dt, i, H):
R = 0.008314 # universal gas constant, kJ/mol*K
# A = pre-factor (1/s) and E = activation energy (kJ/mol)
A1 = 1.3e8; E1 = 140 # wood -> gas
A2 = 2e8; E2 = 133 # wood -> tar
A3 = 1.08e7; E3 = 121 # wood -> char
A4 = 4.28e6; E4 = 108 # tar -> gas
A5 = 1e6; E5 = 108 # tar -> char
Aw = 5.13e6; Ew = 87.9 # water -> vapor
# evaluate reaction rate constant for each reaction, 1/s
K1 = A1 * np.exp(-E1 / (R * T[i])) # wood -> gas
K2 = A2 * np.exp(-E2 / (R * T[i])) # wood -> tar
K3 = A3 * np.exp(-E3 / (R * T[i])) # wood -> char
K4 = A4 * np.exp(-E4 / (R * T[i])) # tar -> gas
K5 = A5 * np.exp(-E5 / (R * T[i])) # tar -> char
Kw = Aw * np.exp(-Ew / (R * T[i])) # water -> vapor
# determine reaction rate for each reaction, rho/s
rww = -(K1+K2+K3) * pw[i-1] # wood rate
rwg = K1 * pw[i-1] # wood -> gas rate
rwt = K2 * pw[i-1] # wood -> tar rate
rwc = K3 * pw[i-1] # wood -> char rate
rtg = K4 * pt[i-1] # tar -> gas rate
rtc = K5 * pt[i-1] # tar -> char rate
rwa = -Kw * pwa[i-1] # rate of water vaporization
rva = Kw * pwa[i-1] # rate of water -> vapor
# update wood, char, gas concentration as a density, kg/m^3
pww = pw[i-1] + rww*dt # wood
pgg = pg[i-1] + (rwg + rtg)*dt # gas
ptt = pt[i-1] + (rwt - rtg - rtc)*dt # tar
pcc = pc[i-1] + (rwc + rtc)*dt # char
pwwa = pwa[i-1] + rwa*dt # water
pvva = pva[i-1] + rva*dt # vapor
# calculate heat of generation term
Hv = 2260000 # heat of vaporization, J/kg
g = H*rww + Hv*rwa # heat generation, W/m^3
# return the wood, char, gas, tar concentration and the heat generation
return pww, pcc, pgg, ptt, pwwa, pvva, g
| 40.752688
| 79
| 0.475198
| 1,136
| 7,580
| 3.170775
| 0.139965
| 0.027762
| 0.021099
| 0.019989
| 0.827318
| 0.738201
| 0.716269
| 0.700722
| 0.672404
| 0.672404
| 0
| 0.074742
| 0.334565
| 7,580
| 186
| 80
| 40.752688
| 0.639374
| 0.460818
| 0
| 0.76
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.04
| false
| 0
| 0.01
| 0
| 0.09
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
82ef00ff1245d8223c653c27dc24c2a25c2e21a1
| 585
|
py
|
Python
|
src/the_tale/the_tale/accounts/jinjaglobals.py
|
al-arz/the-tale
|
542770257eb6ebd56a5ac44ea1ef93ff4ab19eb5
|
[
"BSD-3-Clause"
] | null | null | null |
src/the_tale/the_tale/accounts/jinjaglobals.py
|
al-arz/the-tale
|
542770257eb6ebd56a5ac44ea1ef93ff4ab19eb5
|
[
"BSD-3-Clause"
] | null | null | null |
src/the_tale/the_tale/accounts/jinjaglobals.py
|
al-arz/the-tale
|
542770257eb6ebd56a5ac44ea1ef93ff4ab19eb5
|
[
"BSD-3-Clause"
] | null | null | null |
import smart_imports
smart_imports.all()
@utils_jinja2.jinjaglobal
def login_page_url(next_url='/'):
return utils_jinja2.Markup(logic.login_page_url(next_url))
@utils_jinja2.jinjaglobal
def login_url(next_url='/'):
return utils_jinja2.Markup(logic.login_url(next_url))
@utils_jinja2.jinjaglobal
def logout_url():
return utils_jinja2.Markup(logic.logout_url())
@utils_jinja2.jinjaglobal
def register_url():
return utils_jinja2.Markup(logic.register_url())
@utils_jinja2.jinjaglobal
def forum_complaint_theme():
return conf.settings.FORUM_COMPLAINT_THEME
| 19.5
| 62
| 0.791453
| 81
| 585
| 5.358025
| 0.271605
| 0.228111
| 0.253456
| 0.288018
| 0.730415
| 0.502304
| 0.359447
| 0.198157
| 0.198157
| 0
| 0
| 0.017143
| 0.102564
| 585
| 29
| 63
| 20.172414
| 0.809524
| 0
| 0
| 0.294118
| 0
| 0
| 0.003425
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.294118
| false
| 0
| 0.117647
| 0.294118
| 0.705882
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
82f6751e314f2754a1a262f0744dbdda133f43c8
| 24
|
py
|
Python
|
addition_module/DMUE/preprocess/mtcnn/__init__.py
|
weihaoxie/FaceX-Zoo
|
db0b087e4f4d28152e172d6c8d3767a8870733b4
|
[
"Apache-2.0"
] | 1,329
|
2021-01-13T07:06:30.000Z
|
2022-03-31T07:23:39.000Z
|
addition_module/DMUE/preprocess/mtcnn/__init__.py
|
weihaoxie/FaceX-Zoo
|
db0b087e4f4d28152e172d6c8d3767a8870733b4
|
[
"Apache-2.0"
] | 115
|
2021-01-13T10:42:57.000Z
|
2022-03-28T03:57:52.000Z
|
addition_module/DMUE/preprocess/mtcnn/__init__.py
|
weihaoxie/FaceX-Zoo
|
db0b087e4f4d28152e172d6c8d3767a8870733b4
|
[
"Apache-2.0"
] | 351
|
2021-01-13T07:21:00.000Z
|
2022-03-29T14:11:39.000Z
|
from .mtcnn import MTCNN
| 24
| 24
| 0.833333
| 4
| 24
| 5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 24
| 1
| 24
| 24
| 0.952381
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
d236145a92f67ae103ebd15fe558992900888481
| 167
|
py
|
Python
|
tests/test_main.py
|
ronhanson/python-jobmanager-client
|
b59e3d5e29ee4312ca6c4bfc3c7bc953e2478b37
|
[
"MIT"
] | 1
|
2017-05-05T16:07:46.000Z
|
2017-05-05T16:07:46.000Z
|
tests/test_main.py
|
ronhanson/python-jobmanager-client
|
b59e3d5e29ee4312ca6c4bfc3c7bc953e2478b37
|
[
"MIT"
] | null | null | null |
tests/test_main.py
|
ronhanson/python-jobmanager-client
|
b59e3d5e29ee4312ca6c4bfc3c7bc953e2478b37
|
[
"MIT"
] | null | null | null |
import pytest
def f():
raise SystemExit(1)
def test_mytest():
import jobmanager
import jobmanager.client
with pytest.raises(SystemExit):
f()
| 15.181818
| 35
| 0.664671
| 20
| 167
| 5.5
| 0.65
| 0.290909
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.007937
| 0.245509
| 167
| 10
| 36
| 16.7
| 0.865079
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0.375
| 0
| 0.625
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
d243641ebdb12d7c64609538757d4cf3ee2bf8d8
| 431
|
py
|
Python
|
ex108/ex108.py
|
jefernathan/Python
|
2f840a625e8d46d41ab36df07ef50ae15a03c5ab
|
[
"MIT"
] | null | null | null |
ex108/ex108.py
|
jefernathan/Python
|
2f840a625e8d46d41ab36df07ef50ae15a03c5ab
|
[
"MIT"
] | null | null | null |
ex108/ex108.py
|
jefernathan/Python
|
2f840a625e8d46d41ab36df07ef50ae15a03c5ab
|
[
"MIT"
] | null | null | null |
# Adapte o código do desafio #107, criando uma função adicional chamada moeda() que consiga mostrar os números como um valor monetário formatado.
import moeda
n = input('Digite um preço: R$')
print(f'Com 10% de aumento fica {moeda.moeda(moeda.aumentar(n, 10))}')
print(f'Com 10% de desconto fica {moeda.moeda(moeda.dimimuir(n, 10))}')
print(f'A metade é {moeda.moeda(moeda.metade(n))} e o dobro é {moeda.moeda(moeda.dobro(n))}')
| 47.888889
| 145
| 0.728538
| 75
| 431
| 4.186667
| 0.573333
| 0.254777
| 0.191083
| 0.070064
| 0.082803
| 0
| 0
| 0
| 0
| 0
| 0
| 0.029333
| 0.12993
| 431
| 8
| 146
| 53.875
| 0.808
| 0.329466
| 0
| 0
| 0
| 0.2
| 0.77972
| 0.416084
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.2
| 0
| 0.2
| 0.6
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
d2836e1a5330cc77cf15ecb576f0323416feab2e
| 111
|
py
|
Python
|
keris/activations/__init__.py
|
arrizalamin/keris
|
d6925cfc5ec918355fcc41351799ef419e233a2c
|
[
"MIT"
] | 4
|
2018-02-01T00:32:37.000Z
|
2018-06-28T07:09:47.000Z
|
keris/activations/__init__.py
|
arrizalamin/keris
|
d6925cfc5ec918355fcc41351799ef419e233a2c
|
[
"MIT"
] | null | null | null |
keris/activations/__init__.py
|
arrizalamin/keris
|
d6925cfc5ec918355fcc41351799ef419e233a2c
|
[
"MIT"
] | 1
|
2018-08-29T23:39:06.000Z
|
2018-08-29T23:39:06.000Z
|
from keris.activations.rectifier import ReLU, LeakyReLU, ELU
from keris.activations.probability import Softmax
| 37
| 60
| 0.855856
| 14
| 111
| 6.785714
| 0.714286
| 0.189474
| 0.421053
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.09009
| 111
| 2
| 61
| 55.5
| 0.940594
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
96364001095e9ce6ed383d5c005ce6e1287795f7
| 111
|
py
|
Python
|
keras/datasets/boston_housing.py
|
ikingye/keras
|
1a3ee8441933fc007be6b2beb47af67998d50737
|
[
"MIT"
] | 5
|
2020-11-30T22:26:03.000Z
|
2020-12-01T22:34:25.000Z
|
keras/datasets/boston_housing.py
|
ikingye/keras
|
1a3ee8441933fc007be6b2beb47af67998d50737
|
[
"MIT"
] | 10
|
2020-12-01T22:55:29.000Z
|
2020-12-11T18:31:46.000Z
|
keras/datasets/boston_housing.py
|
ikingye/keras
|
1a3ee8441933fc007be6b2beb47af67998d50737
|
[
"MIT"
] | 15
|
2020-11-30T22:12:22.000Z
|
2020-12-09T01:32:48.000Z
|
"""Boston housing price regression dataset."""
from tensorflow.keras.datasets.boston_housing import load_data
| 27.75
| 62
| 0.81982
| 14
| 111
| 6.357143
| 0.857143
| 0.292135
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.09009
| 111
| 3
| 63
| 37
| 0.881188
| 0.36036
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
96391360bbc5c47eb92026d3da51c1512a345610
| 6,939
|
py
|
Python
|
django/sso/tests/test_user_roles.py
|
ArnaudGallardo/boss
|
c0d3bbca31575ac5442822b8d7f962def32d9072
|
[
"Apache-2.0"
] | null | null | null |
django/sso/tests/test_user_roles.py
|
ArnaudGallardo/boss
|
c0d3bbca31575ac5442822b8d7f962def32d9072
|
[
"Apache-2.0"
] | null | null | null |
django/sso/tests/test_user_roles.py
|
ArnaudGallardo/boss
|
c0d3bbca31575ac5442822b8d7f962def32d9072
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2016 The Johns Hopkins University Applied Physics Laboratory
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from unittest import mock
import json
from .test_base import TestBase, raise_error
from sso.views.views_user import BossUserRole
from django.conf import settings
version = settings.BOSS_VERSION
class TestBossUserRole(TestBase):
@mock.patch('sso.views.views_user.KeyCloakClient', autospec = True)
def test_get_role_no_role(self, mKCC):
ctxMgr = mKCC.return_value.__enter__.return_value
ctxMgr.get_realm_roles.return_value = [{'name': 'test'},{'name': 'admin'}]
request = self.makeRequest(get='/' + version + '/sso/user-role/test')
response = BossUserRole.as_view()(request, 'test')
self.assertEqual(response.status_code, 200)
# Role 'test' will be filtered out by the view
self.assertEqual(response.data, ['admin'])
call = mock.call.get_realm_roles('test')
self.assertEqual(ctxMgr.mock_calls, [call])
@mock.patch('sso.views.views_user.KeyCloakClient', autospec = True)
def test_get_role_with_role(self, mKCC):
ctxMgr = mKCC.return_value.__enter__.return_value
ctxMgr.get_realm_roles.return_value = [{'name': 'test'},{'name': 'admin'}]
request = self.makeRequest(get='/' + version + '/sso/user-role/test/admin')
response = BossUserRole.as_view()(request, 'test', 'admin')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.data, True)
call = mock.call.get_realm_roles('test')
self.assertEqual(ctxMgr.mock_calls, [call])
@mock.patch('sso.views.views_user.KeyCloakClient', autospec = True)
def test_get_role_with_role1(self, mKCC):
ctxMgr = mKCC.return_value.__enter__.return_value
ctxMgr.get_realm_roles.return_value = [{'name': 'test'},{'name': 'admin'}]
request = self.makeRequest(get='/' + version + '/sso/user-role/test/admin')
response = BossUserRole.as_view()(request, 'test', role_name='admin')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.data, True)
call = mock.call.get_realm_roles('test')
self.assertEqual(ctxMgr.mock_calls, [call])
@mock.patch('sso.views.views_user.KeyCloakClient', autospec = True)
def test_failed_get_role_bad_role(self, mKCC):
ctxMgr = mKCC.return_value.__enter__.return_value
ctxMgr.get_realm_roles.return_value = [{'name': 'test'},{'name': 'admin'}]
request = self.makeRequest(get='/' + version + '/sso/user-role/test/test')
response = BossUserRole.as_view()(request, 'test', 'test')
self.assertEqual(response.status_code, 403)
@mock.patch('sso.views.views_user.KeyCloakClient', autospec = True)
def test_failed_get_role(self, mKCC):
ctxMgr = mKCC.return_value.__enter__.return_value
ctxMgr.get_realm_roles.side_effect = raise_error
request = self.makeRequest(get='/' + version + '/sso/user-role/test')
response = BossUserRole.as_view()(request, 'test')
self.assertEqual(response.status_code, 500)
@mock.patch('sso.views.views_user.KeyCloakClient', autospec = True)
def test_post_role(self, mKCC):
ctxMgr = mKCC.return_value.__enter__.return_value
request = self.makeRequest(post='/' + version + '/sso/user-role/test/resource-manager')
response = BossUserRole.as_view()(request, 'test', 'resource-manager')
self.assertEqual(response.status_code, 201)
self.assertIsNone(response.data)
call = mock.call.map_role_to_user('test', 'resource-manager')
self.assertEqual(ctxMgr.mock_calls, [call])
@mock.patch('sso.views.views_user.KeyCloakClient', autospec = True)
def test_failed_post_admin_role(self, mKCC):
"""The admin roles is not allowed to be assigned through the API"""
ctxMgr = mKCC.return_value.__enter__.return_value
request = self.makeRequest(post='/' + version + '/sso/user-role/test/admin')
response = BossUserRole.as_view()(request, 'test', 'admin')
self.assertEqual(response.status_code, 403)
@mock.patch('sso.views.views_user.KeyCloakClient', autospec = True)
def test_failed_post_role_bad_role(self, mKCC):
ctxMgr = mKCC.return_value.__enter__.return_value
request = self.makeRequest(post='/' + version + '/sso/user-role/test/test')
response = BossUserRole.as_view()(request, 'test', 'test')
self.assertEqual(response.status_code, 403)
@mock.patch('sso.views.views_user.KeyCloakClient', autospec = True)
def test_failed_post_role(self, mKCC):
ctxMgr = mKCC.return_value.__enter__.return_value
ctxMgr.map_role_to_user.side_effect = raise_error
request = self.makeRequest(post='/' + version + '/sso/user-role/test/resource-manager')
response = BossUserRole.as_view()(request, 'test', 'resource-manager')
self.assertEqual(response.status_code, 500)
@mock.patch('sso.views.views_user.KeyCloakClient', autospec = True)
def test_delete_role(self, mKCC):
ctxMgr = mKCC.return_value.__enter__.return_value
request = self.makeRequest(delete='/' + version + '/sso/user-role/test/resource-manager')
response = BossUserRole.as_view()(request, 'test', 'resource-manager')
self.assertEqual(response.status_code, 204)
self.assertIsNone(response.data)
call = mock.call.remove_role_from_user('test', 'resource-manager')
self.assertEqual(ctxMgr.mock_calls, [call])
@mock.patch('sso.views.views_user.KeyCloakClient', autospec = True)
def test_failed_delete_role_bad_role(self, mKCC):
ctxMgr = mKCC.return_value.__enter__.return_value
request = self.makeRequest(delete='/' + version + '/sso/user-role/test/test')
response = BossUserRole.as_view()(request, 'test', 'test')
self.assertEqual(response.status_code, 403)
@mock.patch('sso.views.views_user.KeyCloakClient', autospec = True)
def test_failed_delete_role(self, mKCC):
ctxMgr = mKCC.return_value.__enter__.return_value
ctxMgr.remove_role_from_user.side_effect = raise_error
request = self.makeRequest(delete='/' + version + '/sso/user-role/test/resource-manager')
response = BossUserRole.as_view()(request, 'test', 'resource-manager')
self.assertEqual(response.status_code, 500)
| 42.054545
| 97
| 0.696498
| 874
| 6,939
| 5.300915
| 0.160183
| 0.06648
| 0.074466
| 0.047701
| 0.812648
| 0.812648
| 0.812001
| 0.790417
| 0.780056
| 0.780056
| 0
| 0.007867
| 0.175674
| 6,939
| 164
| 98
| 42.310976
| 0.802098
| 0.100735
| 0
| 0.683168
| 0
| 0
| 0.164765
| 0.11429
| 0
| 0
| 0
| 0
| 0.217822
| 1
| 0.118812
| false
| 0
| 0.049505
| 0
| 0.178218
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
963f1cc3fb1b4180d0392ba99ee6d6e807ae1459
| 12,511
|
py
|
Python
|
src/datadog/azext_datadog/generated/_params.py
|
haroonf/azure-cli-extensions
|
61c044d34c224372f186934fa7c9313f1cd3a525
|
[
"MIT"
] | 207
|
2017-11-29T06:59:41.000Z
|
2022-03-31T10:00:53.000Z
|
src/datadog/azext_datadog/generated/_params.py
|
haroonf/azure-cli-extensions
|
61c044d34c224372f186934fa7c9313f1cd3a525
|
[
"MIT"
] | 4,061
|
2017-10-27T23:19:56.000Z
|
2022-03-31T23:18:30.000Z
|
src/datadog/azext_datadog/generated/_params.py
|
haroonf/azure-cli-extensions
|
61c044d34c224372f186934fa7c9313f1cd3a525
|
[
"MIT"
] | 802
|
2017-10-11T17:36:26.000Z
|
2022-03-31T22:24:32.000Z
|
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
# pylint: disable=too-many-lines
# pylint: disable=too-many-statements
from azure.cli.core.commands.parameters import (
tags_type,
get_three_state_flag,
get_enum_type,
resource_group_name_type,
get_location_type
)
from azure.cli.core.commands.validators import get_default_location_from_resource_group
from azext_datadog.action import (
AddMarketplaceagreementsProperties,
AddDatadogOrganizationProperties,
AddUserInfo,
AddFilteringTags,
AddLogRulesFilteringTags,
AddSinglesignonconfigurationsProperties
)
def load_arguments(self, _):
with self.argument_context('datadog terms create') as c:
c.argument('properties', action=AddMarketplaceagreementsProperties, nargs='+', help='Represents the properties '
'of the resource.')
with self.argument_context('datadog terms update') as c:
c.argument('properties', action=AddMarketplaceagreementsProperties, nargs='+', help='Represents the properties '
'of the resource.')
with self.argument_context('datadog monitor list') as c:
c.argument('resource_group_name', resource_group_name_type)
with self.argument_context('datadog monitor show') as c:
c.argument('resource_group_name', resource_group_name_type)
c.argument('monitor_name', options_list=['--name', '-n', '--monitor-name'], type=str, help='Monitor resource '
'name', id_part='name')
with self.argument_context('datadog monitor create') as c:
c.argument('resource_group_name', resource_group_name_type)
c.argument('monitor_name', options_list=['--name', '-n', '--monitor-name'], type=str, help='Monitor resource '
'name')
c.argument('tags', tags_type)
c.argument('location', arg_type=get_location_type(self.cli_ctx), required=False,
validator=get_default_location_from_resource_group)
c.argument('type_', options_list=['--type'], arg_type=get_enum_type(['SystemAssigned', 'UserAssigned']),
help='Identity type', arg_group='Identity')
c.argument('datadog_organization_properties', action=AddDatadogOrganizationProperties, nargs='+',
help='Datadog organization properties')
c.argument('user_info', action=AddUserInfo, nargs='+', help='User info')
c.argument('sku_name', type=str, help='Name of the SKU.', arg_group='Sku')
with self.argument_context('datadog monitor update') as c:
c.argument('resource_group_name', resource_group_name_type)
c.argument('monitor_name', options_list=['--name', '-n', '--monitor-name'], type=str, help='Monitor resource '
'name', id_part='name')
c.argument('tags', tags_type)
c.argument('monitoring_status', type=str, help='Flag specifying if the resource monitoring is enabled or '
'disabled. Allowed values: "Enabled", "Disabled".')
c.argument('sku_name', type=str, help='Name of the SKU.', arg_group='Sku')
with self.argument_context('datadog monitor delete') as c:
c.argument('resource_group_name', resource_group_name_type)
c.argument('monitor_name', options_list=['--name', '-n', '--monitor-name'], type=str, help='Monitor resource '
'name', id_part='name')
with self.argument_context('datadog monitor get-default-key') as c:
c.argument('resource_group_name', resource_group_name_type)
c.argument('monitor_name', options_list=['--name', '-n', '--monitor-name'], type=str, help='Monitor resource '
'name', id_part='name')
with self.argument_context('datadog monitor list-api-key') as c:
c.argument('resource_group_name', resource_group_name_type)
c.argument('monitor_name', options_list=['--name', '-n', '--monitor-name'], type=str, help='Monitor resource '
'name')
with self.argument_context('datadog monitor list-host') as c:
c.argument('resource_group_name', resource_group_name_type)
c.argument('monitor_name', options_list=['--name', '-n', '--monitor-name'], type=str, help='Monitor resource '
'name')
with self.argument_context('datadog monitor list-linked-resource') as c:
c.argument('resource_group_name', resource_group_name_type)
c.argument('monitor_name', options_list=['--name', '-n', '--monitor-name'], type=str, help='Monitor resource '
'name')
with self.argument_context('datadog monitor list-monitored-resource') as c:
c.argument('resource_group_name', resource_group_name_type)
c.argument('monitor_name', options_list=['--name', '-n', '--monitor-name'], type=str, help='Monitor resource '
'name')
with self.argument_context('datadog monitor refresh-set-password-link') as c:
c.argument('resource_group_name', resource_group_name_type)
c.argument('monitor_name', options_list=['--name', '-n', '--monitor-name'], type=str, help='Monitor resource '
'name', id_part='name')
with self.argument_context('datadog monitor set-default-key') as c:
c.argument('resource_group_name', resource_group_name_type)
c.argument('monitor_name', type=str, help='Monitor resource name', id_part='name')
c.argument('created_by', type=str, help='The user that created the API key.')
c.argument('name', type=str, help='The name of the API key.')
c.argument('key', type=str, help='The value of the API key.')
c.argument('created', type=str, help='The time of creation of the API key.')
with self.argument_context('datadog monitor wait') as c:
c.argument('resource_group_name', resource_group_name_type)
c.argument('monitor_name', options_list=['--name', '-n', '--monitor-name'], type=str, help='Monitor resource '
'name', id_part='name')
with self.argument_context('datadog tag-rule list') as c:
c.argument('resource_group_name', resource_group_name_type)
c.argument('monitor_name', type=str, help='Monitor resource name')
with self.argument_context('datadog tag-rule show') as c:
c.argument('resource_group_name', resource_group_name_type)
c.argument('monitor_name', type=str, help='Monitor resource name', id_part='name')
c.argument('rule_set_name', type=str, help='Rule set name', id_part='child_name_1')
with self.argument_context('datadog tag-rule create') as c:
c.argument('resource_group_name', resource_group_name_type)
c.argument('monitor_name', type=str, help='Monitor resource name')
c.argument('rule_set_name', type=str, help='Rule set name')
c.argument('filtering_tags', action=AddFilteringTags, nargs='+', help='List of filtering tags to be used for '
'capturing metrics. If empty, all resources will be captured. If only Exclude action is specified, '
'the rules will apply to the list of all available resources. If Include actions are specified, the '
'rules will only include resources with the associated tags.', arg_group='Metric Rules')
c.argument('send_aad_logs', arg_type=get_three_state_flag(), help='Flag specifying if AAD logs should be sent '
'for the Monitor resource.', arg_group='Log Rules')
c.argument('send_subscription_logs', arg_type=get_three_state_flag(), help='Flag specifying if Azure '
'subscription logs should be sent for the Monitor resource.', arg_group='Log Rules')
c.argument('send_resource_logs', arg_type=get_three_state_flag(), help='Flag specifying if Azure resource logs '
'should be sent for the Monitor resource.', arg_group='Log Rules')
c.argument('log_rules_filtering_tags', action=AddLogRulesFilteringTags, nargs='+', help='List of filtering '
'tags to be used for capturing logs. This only takes effect if SendResourceLogs flag is enabled. If '
'empty, all resources will be captured. If only Exclude action is specified, the rules will apply '
'to the list of all available resources. If Include actions are specified, the rules will only '
'include resources with the associated tags.', arg_group='Log Rules')
with self.argument_context('datadog tag-rule update') as c:
c.argument('resource_group_name', resource_group_name_type)
c.argument('monitor_name', type=str, help='Monitor resource name', id_part='name')
c.argument('rule_set_name', type=str, help='Rule set name', id_part='child_name_1')
c.argument('filtering_tags', action=AddFilteringTags, nargs='+', help='List of filtering tags to be used for '
'capturing metrics. If empty, all resources will be captured. If only Exclude action is specified, '
'the rules will apply to the list of all available resources. If Include actions are specified, the '
'rules will only include resources with the associated tags.', arg_group='Metric Rules')
c.argument('send_aad_logs', arg_type=get_three_state_flag(), help='Flag specifying if AAD logs should be sent '
'for the Monitor resource.', arg_group='Log Rules')
c.argument('send_subscription_logs', arg_type=get_three_state_flag(), help='Flag specifying if Azure '
'subscription logs should be sent for the Monitor resource.', arg_group='Log Rules')
c.argument('send_resource_logs', arg_type=get_three_state_flag(), help='Flag specifying if Azure resource logs '
'should be sent for the Monitor resource.', arg_group='Log Rules')
c.argument('log_rules_filtering_tags', action=AddLogRulesFilteringTags, nargs='+', help='List of filtering '
'tags to be used for capturing logs. This only takes effect if SendResourceLogs flag is enabled. If '
'empty, all resources will be captured. If only Exclude action is specified, the rules will apply '
'to the list of all available resources. If Include actions are specified, the rules will only '
'include resources with the associated tags.', arg_group='Log Rules')
c.ignore('body')
with self.argument_context('datadog sso-config list') as c:
c.argument('resource_group_name', resource_group_name_type)
c.argument('monitor_name', type=str, help='Monitor resource name')
with self.argument_context('datadog sso-config show') as c:
c.argument('resource_group_name', resource_group_name_type)
c.argument('monitor_name', type=str, help='Monitor resource name', id_part='name')
c.argument('configuration_name', type=str, help='Configuration name', id_part='child_name_1')
with self.argument_context('datadog sso-config create') as c:
c.argument('resource_group_name', resource_group_name_type)
c.argument('monitor_name', type=str, help='Monitor resource name')
c.argument('configuration_name', type=str, help='Configuration name')
c.argument('properties', action=AddSinglesignonconfigurationsProperties, nargs='+', help='')
with self.argument_context('datadog sso-config update') as c:
c.argument('resource_group_name', resource_group_name_type)
c.argument('monitor_name', type=str, help='Monitor resource name', id_part='name')
c.argument('configuration_name', type=str, help='Configuration name', id_part='child_name_1')
c.argument('properties', action=AddSinglesignonconfigurationsProperties, nargs='+', help='')
c.ignore('body')
with self.argument_context('datadog sso-config wait') as c:
c.argument('resource_group_name', resource_group_name_type)
c.argument('monitor_name', type=str, help='Monitor resource name', id_part='name')
c.argument('configuration_name', type=str, help='Configuration name', id_part='child_name_1')
| 63.831633
| 120
| 0.673967
| 1,604
| 12,511
| 5.068579
| 0.106608
| 0.08524
| 0.094096
| 0.057196
| 0.851907
| 0.839729
| 0.812177
| 0.782042
| 0.773555
| 0.773555
| 0
| 0.000497
| 0.195908
| 12,511
| 195
| 121
| 64.158974
| 0.807654
| 0.040444
| 0
| 0.643312
| 0
| 0
| 0.411073
| 0.014258
| 0
| 0
| 0
| 0
| 0
| 1
| 0.006369
| false
| 0.006369
| 0.019108
| 0
| 0.025478
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
9671859b9a8e653e44bf1b10efec73bfb886ed5b
| 233
|
py
|
Python
|
selection/algorithms/api.py
|
Xiaoying-Tian/selective-inference
|
a20c5ad3f527beb709d5b8d7301016640738b092
|
[
"BSD-3-Clause"
] | null | null | null |
selection/algorithms/api.py
|
Xiaoying-Tian/selective-inference
|
a20c5ad3f527beb709d5b8d7301016640738b092
|
[
"BSD-3-Clause"
] | null | null | null |
selection/algorithms/api.py
|
Xiaoying-Tian/selective-inference
|
a20c5ad3f527beb709d5b8d7301016640738b092
|
[
"BSD-3-Clause"
] | 1
|
2019-07-13T04:14:12.000Z
|
2019-07-13T04:14:12.000Z
|
from .lasso import lasso, data_carving as data_carving_lasso
from .sqrt_lasso import (sqrt_lasso,
choose_lambda as choose_lambda_sqrt_lasso,
data_carving as data_carving_sqrt_lasso)
| 46.6
| 67
| 0.678112
| 30
| 233
| 4.833333
| 0.3
| 0.303448
| 0.22069
| 0.248276
| 0.4
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0.291845
| 233
| 4
| 68
| 58.25
| 0.878788
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
96b4a51961735d8f06e9a21d565462f9089bb8b6
| 99
|
py
|
Python
|
inventory/views.py
|
piiok/ciberc
|
0b90c3786aeb817e2e164ade8924d4d12d84a266
|
[
"MIT"
] | null | null | null |
inventory/views.py
|
piiok/ciberc
|
0b90c3786aeb817e2e164ade8924d4d12d84a266
|
[
"MIT"
] | null | null | null |
inventory/views.py
|
piiok/ciberc
|
0b90c3786aeb817e2e164ade8924d4d12d84a266
|
[
"MIT"
] | null | null | null |
from django.shortcuts import redirect
def login_redirect(request):
return redirect('admin/')
| 16.5
| 37
| 0.767677
| 12
| 99
| 6.25
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.141414
| 99
| 5
| 38
| 19.8
| 0.882353
| 0
| 0
| 0
| 0
| 0
| 0.060606
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 6
|
7388428a0b38e68ab96708a740fde5d5044152b0
| 12,780
|
py
|
Python
|
scikits/odes/tests/test_on_funcs_ida.py
|
cklb/odes
|
945a461754c8155bca88aa83d725f77720a40539
|
[
"BSD-3-Clause"
] | 95
|
2015-02-12T15:33:24.000Z
|
2022-03-07T12:57:46.000Z
|
scikits/odes/tests/test_on_funcs_ida.py
|
cklb/odes
|
945a461754c8155bca88aa83d725f77720a40539
|
[
"BSD-3-Clause"
] | 89
|
2015-01-22T12:42:26.000Z
|
2022-01-09T17:02:49.000Z
|
scikits/odes/tests/test_on_funcs_ida.py
|
cklb/odes
|
945a461754c8155bca88aa83d725f77720a40539
|
[
"BSD-3-Clause"
] | 29
|
2015-01-30T09:20:56.000Z
|
2022-02-23T03:50:15.000Z
|
# Authors: B. Malengier, russel (scipy trac)
from __future__ import print_function
"""
Tests for differential algebraic equation solvers.
Here we test onroot and ontstop
"""
import numpy as np
from numpy import (arange, zeros, array, dot, sqrt, cos, sin, allclose,
empty, alen)
from numpy.testing import TestCase, run_module_suite
from scikits.odes import dae
from scikits.odes.sundials.ida import StatusEnumIDA
from scikits.odes.sundials.common_defs import DTYPE
#data
g = 9.81 # gravitational constant
Y0 = 1000.0 # Initial height
Y1 = 10.0 # Bottom height - when reached, changes to Y0 (teleport)
T1 = 10.0
v0 = 0.0 # Initial speed
#initial data at t=0, y[0] = Y, y[1] = \dot{Y}
y0 = [Y0, v0]
yp0 = [v0, -g]
t_end1 = 10.0 # Time of free fall for experiments 1,2
t_end2 = 100.0 # Time of free fall for experiments 3,4
atol = 1e-4
rtol = 1e-4
def rhs_fn(t, y, ydot, res):
""" rhs equations for the problem """
res[0] = ydot[0] - y[1]
res[1] = ydot[1] + g
def root_fn(t, y, ydot, out):
""" root function to check the object reached height Y1 """
out[0] = Y1 - y[0]
return 0
def root_fn2(t, y, ydot, out):
""" root function to check the object reached height Y1 """
out[0] = Y1 - y[0]
out[1] = (t-10)*(t-20)*(t-30)
return 0
def root_fn3(t, y, ydot, out):
""" root function to check the object reached time 10 """
out[0] = (t - 10)*(t-20)*(t-30)
return 0
def onroot_va(t, y, ydot, solver):
"""
onroot function to reset the solver back at the start, but keep the current
velocity
"""
# Teleport the object back to height Y0, but retain its speed
solver.reinit_IC(t, [Y0, y[1]], ydot)
return 0
def onroot_vb(t, y, ydot, solver):
"""
onroot function to stop solver when root is found
"""
return 1
def onroot_vc(t, y, ydot, solver):
"""
onroot function to reset the solver back at the start, but keep the current
velocity as long as the time is less than a given amount
"""
if t > 28: # we have found 4 interruption points, so we stop
return 1
solver.reinit_IC(t, [Y0, y[1]], ydot)
return 0
def onroot_vd(t, y, ydot, solver):
"""
onroot function to just continue if time <28
"""
if t > 28:
return 1
return 0
n=0
def ontstop_va(t, y, ydot, solver):
"""
ontstop function to reset the solver back at the start, but keep the current
velocity
"""
# Teleport the object back to height Y0, but retain its speed
global n
solver.reinit_IC(t, [Y0, y[1]], ydot)
n += 1
solver.set_options(tstop=T1+n*10)
return 0
def ontstop_vb(t, y, ydot, solver):
"""
ontstop function to stop solver when tstop is reached
"""
return 1
def ontstop_vc(t, y, ydot, solver):
"""
ontstop function to reset the solver back at the start, but keep the current
velocity as long as the time is less than a given amount
"""
global n
if t > 28: # we have found 3 interruption points, so we stop
return 1
solver.reinit_IC(t, [Y0, y[1]], ydot)
n += 1
solver.set_options(tstop=T1+n*10)
return 0
class TestOn(TestCase):
"""
Check integrate.dae
"""
def test_ida_rootfn_noroot(self):
#test calling sequence. End is reached before root is found
tspan = np.arange(0, t_end1 + 1, 1.0, DTYPE)
solver = dae('ida', rhs_fn, nr_rootfns=1, rootfn=root_fn,
old_api=False)
soln = solver.solve(tspan, y0, yp0)
assert soln.flag==StatusEnumIDA.SUCCESS, "ERROR: Error occurred"
assert allclose([soln.values.t[-1], soln.values.y[-1,0], soln.values.y[-1,1]],
[10.0, 509.4995, -98.10],
atol=atol, rtol=rtol)
def test_ida_rootfn(self):
#test root finding and stopping: End is reached at a root
tspan = np.arange(0, t_end2 + 1, 1.0, DTYPE)
solver = dae('ida', rhs_fn, nr_rootfns=1, rootfn=root_fn,
old_api=False)
soln = solver.solve(tspan, y0, yp0)
assert soln.flag==StatusEnumIDA.ROOT_RETURN, "ERROR: Root not found!"
assert allclose([soln.roots.t[0], soln.roots.y[0,0], soln.roots.y[0,1]],
[14.206856, 10.0000, -139.3693],
atol=atol, rtol=rtol)
def test_ida_rootfnacc(self):
#test root finding and accumilating: End is reached normally, roots stored
tspan = np.arange(0, t_end2 + 1, 1.0, DTYPE)
solver = dae('ida', rhs_fn, nr_rootfns=1, rootfn=root_fn,
onroot=onroot_va,
old_api=False)
soln = solver.solve(tspan, y0, yp0)
assert soln.flag==StatusEnumIDA.SUCCESS, "ERROR: Error occurred"
assert allclose([soln.values.t[-1], soln.values.y[-1,0], soln.values.y[-1,1]],
[100.0, 459.999, -981.0000],
atol=atol, rtol=rtol)
assert len(soln.roots.t) == 49, "ERROR: Did not find all 49 roots"
assert allclose([soln.roots.t[-1], soln.roots.y[-1,0], soln.roots.y[-1,1]],
[99.447910, 10.0000, -975.5840],
atol=atol, rtol=rtol)
def test_ida_rootfn_stop(self):
#test root finding and stopping: End is reached at a root with a function
tspan = np.arange(0, t_end2 + 1, 1.0, DTYPE)
solver = dae('ida', rhs_fn, nr_rootfns=1, rootfn=root_fn,
onroot=onroot_vb,
old_api=False)
soln = solver.solve(tspan, y0, yp0)
assert soln.flag==StatusEnumIDA.ROOT_RETURN, "ERROR: Root not found!"
assert allclose([soln.roots.t[-1], soln.roots.y[-1,0], soln.roots.y[-1,1]],
[14.206856, 10.0000, -139.3693],
atol=atol, rtol=rtol)
def test_ida_rootfn_test(self):
#test root finding and accumilating: End is reached after a number of root
tspan = np.arange(0, t_end2 + 1, 1.0, DTYPE)
solver = dae('ida', rhs_fn, nr_rootfns=1, rootfn=root_fn,
onroot=onroot_vc,
old_api=False)
soln = solver.solve(tspan, y0, yp0)
assert soln.flag==StatusEnumIDA.ROOT_RETURN, "ERROR: Not sufficient root found"
assert allclose([soln.values.t[-1], soln.values.y[-1,0], soln.values.y[-1,1]],
[28.0, 124.4724, -274.6800],
atol=atol, rtol=rtol)
assert len(soln.roots.t) == 4, "ERROR: Did not find all 4 roots"
assert allclose([soln.roots.t[-1], soln.roots.y[-1,0], soln.roots.y[-1,1]],
[28.413692, 10.0000, -278.7383],
atol=atol, rtol=rtol)
def test_ida_rootfn_two(self):
#test two root finding
tspan = np.arange(0, t_end2 + 1, 1.0, DTYPE)
solver = dae('ida', rhs_fn, nr_rootfns=2, rootfn=root_fn2,
onroot=onroot_vc,
old_api=False)
soln = solver.solve(tspan, y0, yp0)
assert soln.flag==StatusEnumIDA.ROOT_RETURN, "ERROR: Not sufficient root found"
assert allclose([soln.values.t[-1], soln.values.y[-1,0], soln.values.y[-1,1]],
[28.0, 106.4753, -274.6800],
atol=atol, rtol=rtol)
assert len(soln.roots.t) == 5, "ERROR: Did not find all 4 roots"
assert allclose([soln.roots.t[-1], soln.roots.y[-1,0], soln.roots.y[-1,1]],
[28.349052, 10.0000, -278.1042],
atol=atol, rtol=rtol)
def test_ida_rootfn_end(self):
#test root finding with root at endtime
tspan = np.arange(0, 30 + 1, 1.0, DTYPE)
solver = dae('ida', rhs_fn, nr_rootfns=1, rootfn=root_fn3,
onroot=onroot_vc,
old_api=False)
soln = solver.solve(tspan, y0, yp0)
assert soln.flag==StatusEnumIDA.ROOT_RETURN, "ERROR: Not sufficient root found"
assert allclose([soln.values.t[-1], soln.values.y[-1,0], soln.values.y[-1,1]],
[30.0, -1452.5024, -294.3000],
atol=atol, rtol=rtol)
assert len(soln.roots.t) == 3, "ERROR: Did not find all 4 roots"
assert allclose([soln.roots.t[-1], soln.roots.y[-1,0], soln.roots.y[-1,1]],
[30.0, -1452.5024, -294.3000],
atol=atol, rtol=rtol)
def test_ida_tstopfn_notstop(self):
#test calling sequence. End is reached before tstop is found
global n
n = 0
tspan = np.arange(0, t_end1 + 1, 1.0, DTYPE)
solver = dae('ida', rhs_fn, tstop=T1+1, ontstop=ontstop_va,
old_api=False)
soln = solver.solve(tspan, y0, yp0)
assert soln.flag==StatusEnumIDA.SUCCESS, "ERROR: Error occurred"
assert allclose([soln.values.t[-1], soln.values.y[-1,0], soln.values.y[-1,1]],
[10.0, 509.4995, -98.10],
atol=atol, rtol=rtol)
def test_ida_tstopfn(self):
#test tstop finding and stopping: End is reached at a tstop
global n
n = 0
tspan = np.arange(0, t_end2 + 1, 1.0, DTYPE)
solver = dae('ida', rhs_fn, tstop=T1,
old_api=False)
soln = solver.solve(tspan, y0, yp0)
assert soln.flag==StatusEnumIDA.TSTOP_RETURN, "ERROR: Tstop not found!"
assert allclose([soln.tstop.t[0], soln.tstop.y[0,0], soln.tstop.y[0,1]],
[10.0, 509.4995, -98.10],
atol=atol, rtol=rtol)
assert allclose([soln.values.t[-1], soln.values.y[-1,0], soln.values.y[-1,1]],
[10.0, 509.4995, -98.10],
atol=atol, rtol=rtol)
def test_ida_tstopfnacc(self):
#test tstop finding and accumilating: End is reached normally, tstop stored
global n
n = 0
tspan = np.arange(0, t_end2 + 1, 1.0, DTYPE)
solver = dae('ida', rhs_fn, tstop=T1, ontstop=ontstop_va,
old_api=False)
soln = solver.solve(tspan, y0, yp0)
assert soln.flag==StatusEnumIDA.SUCCESS, "ERROR: Error occurred"
assert allclose([soln.values.t[-1], soln.values.y[-1,0], soln.values.y[-1,1]],
[100.0, -8319.5023, -981.00],
atol=atol, rtol=rtol)
assert len(soln.tstop.t) == 9, "ERROR: Did not find all tstop"
assert allclose([soln.tstop.t[-1], soln.tstop.y[-1,0], soln.tstop.y[-1,1]],
[90.0, -7338.5023, -882.90],
atol=atol, rtol=rtol)
def test_ida_tstopfn_stop(self):
#test calling sequence. End is reached at a tstop
global n
n = 0
tspan = np.arange(0, t_end2 + 1, 1.0, DTYPE)
solver = dae('ida', rhs_fn, tstop=T1, ontstop=ontstop_vb,
old_api=False)
soln = solver.solve(tspan, y0, yp0)
assert soln.flag==StatusEnumIDA.TSTOP_RETURN, "ERROR: Error occurred"
assert allclose([soln.values.t[-1], soln.values.y[-1,0], soln.values.y[-1,1]],
[10.0, 509.4995, -98.10],
atol=atol, rtol=rtol)
assert len(soln.tstop.t) == 1, "ERROR: Did not find all tstop"
assert len(soln.values.t) == 11, "ERROR: Did not find all output"
assert allclose([soln.tstop.t[-1], soln.tstop.y[-1,0], soln.tstop.y[-1,1]],
[10.0, 509.4995, -98.10],
atol=atol, rtol=rtol)
def test_ida_tstopfn_test(self):
#test calling sequence. tsop function continues up to a time
global n
n = 0
tspan = np.arange(0, t_end2 + 1, 1.0, DTYPE)
solver = dae('ida', rhs_fn, tstop=T1, ontstop=ontstop_vc,
old_api=False)
soln = solver.solve(tspan, y0, yp0)
assert soln.flag==StatusEnumIDA.TSTOP_RETURN, "ERROR: Error occurred"
assert allclose([soln.values.t[-1], soln.values.y[-1,0], soln.values.y[-1,1]],
[30.0, -1452.5024, -294.30],
atol=atol, rtol=rtol)
assert len(soln.tstop.t) == 3, "ERROR: Did not find all tstop"
assert len(soln.values.t) == 31, "ERROR: Did not find all output"
assert allclose([soln.tstop.t[-1], soln.tstop.y[-1,0], soln.tstop.y[-1,1]],
[30.0, -1452.5024, -294.30],
atol=atol, rtol=rtol)
if __name__ == "__main__":
try:
run_module_suite()
except NameError:
test = TestOn()
test.test_ida_rootfn_noroot()
test.test_ida_rootfn()
test.test_ida_rootfnacc()
test.test_ida_rootfn_stop()
test.test_ida_rootfn_test()
| 39.323077
| 87
| 0.566667
| 1,892
| 12,780
| 3.749471
| 0.12685
| 0.011841
| 0.050747
| 0.033831
| 0.806879
| 0.77784
| 0.77333
| 0.739357
| 0.702988
| 0.666338
| 0
| 0.076338
| 0.300939
| 12,780
| 324
| 88
| 39.444444
| 0.717708
| 0.154538
| 0
| 0.621739
| 0
| 0
| 0.057455
| 0
| 0
| 0
| 0
| 0
| 0.178261
| 1
| 0.1
| false
| 0
| 0.030435
| 0
| 0.191304
| 0.004348
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
739d254024c10f35b9e4147520126a8fb18b9e33
| 105
|
py
|
Python
|
urls_and_templates/main_app/views.py
|
nrgxtra/web_basics
|
073ccb361af666c9fe2b3fa0b5cf74d721acb1b4
|
[
"MIT"
] | null | null | null |
urls_and_templates/main_app/views.py
|
nrgxtra/web_basics
|
073ccb361af666c9fe2b3fa0b5cf74d721acb1b4
|
[
"MIT"
] | null | null | null |
urls_and_templates/main_app/views.py
|
nrgxtra/web_basics
|
073ccb361af666c9fe2b3fa0b5cf74d721acb1b4
|
[
"MIT"
] | null | null | null |
from django.shortcuts import render
def index(req):
return render(req, 'main_app/index.html')
| 17.5
| 46
| 0.704762
| 15
| 105
| 4.866667
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.190476
| 105
| 5
| 47
| 21
| 0.858824
| 0
| 0
| 0
| 0
| 0
| 0.19
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 6
|
73b62e8b2ee761e369ea8246dfe80069a336e724
| 75
|
py
|
Python
|
multi-output-glucose-forecasting/lib/__init__.py
|
LinjianMa/neuralODE-282
|
28d7b520beeeeb773e651110d1faedda6cb835e7
|
[
"MIT"
] | null | null | null |
multi-output-glucose-forecasting/lib/__init__.py
|
LinjianMa/neuralODE-282
|
28d7b520beeeeb773e651110d1faedda6cb835e7
|
[
"MIT"
] | null | null | null |
multi-output-glucose-forecasting/lib/__init__.py
|
LinjianMa/neuralODE-282
|
28d7b520beeeeb773e651110d1faedda6cb835e7
|
[
"MIT"
] | null | null | null |
from .glucose_dataset import *
from .trainer import *
from .model import *
| 18.75
| 30
| 0.76
| 10
| 75
| 5.6
| 0.6
| 0.357143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.16
| 75
| 3
| 31
| 25
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
73eca7476e43a6541882788ecd71b4711fb40035
| 104
|
py
|
Python
|
run.py
|
cygnus-dev/le-hamster
|
ae60c189076aca133b3717d850e19849400fb0c7
|
[
"MIT"
] | null | null | null |
run.py
|
cygnus-dev/le-hamster
|
ae60c189076aca133b3717d850e19849400fb0c7
|
[
"MIT"
] | null | null | null |
run.py
|
cygnus-dev/le-hamster
|
ae60c189076aca133b3717d850e19849400fb0c7
|
[
"MIT"
] | null | null | null |
import main
from dotenv import load_dotenv
import os
load_dotenv()
main.ham.run(os.getenv("BOT_TOKEN"))
| 17.333333
| 36
| 0.798077
| 18
| 104
| 4.444444
| 0.611111
| 0.3
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.096154
| 104
| 6
| 36
| 17.333333
| 0.851064
| 0
| 0
| 0
| 0
| 0
| 0.085714
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.6
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
73fdf074009b55eb5f6154788922f341009e547e
| 31
|
py
|
Python
|
mould/__init__.py
|
ramk95/mould-it
|
884db51715587dd79dd08fb09f0fe70e552a09fe
|
[
"MIT"
] | null | null | null |
mould/__init__.py
|
ramk95/mould-it
|
884db51715587dd79dd08fb09f0fe70e552a09fe
|
[
"MIT"
] | null | null | null |
mould/__init__.py
|
ramk95/mould-it
|
884db51715587dd79dd08fb09f0fe70e552a09fe
|
[
"MIT"
] | null | null | null |
from mould._executor import it
| 15.5
| 30
| 0.83871
| 5
| 31
| 5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.129032
| 31
| 1
| 31
| 31
| 0.925926
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
fb47ff3ef691dd83fb75a5d3040be83c6d7298fc
| 10,102
|
py
|
Python
|
causalimpact/tests/test_inferences.py
|
cdutr/causalimpact-1
|
73b4b8f077626ae2999a88f5acf1731cce121407
|
[
"Apache-2.0"
] | 152
|
2016-11-02T20:39:43.000Z
|
2022-03-30T16:26:30.000Z
|
causalimpact/tests/test_inferences.py
|
cdutr/causalimpact-1
|
73b4b8f077626ae2999a88f5acf1731cce121407
|
[
"Apache-2.0"
] | 21
|
2016-12-07T16:44:42.000Z
|
2022-01-05T12:06:54.000Z
|
causalimpact/tests/test_inferences.py
|
cdutr/causalimpact-1
|
73b4b8f077626ae2999a88f5acf1731cce121407
|
[
"Apache-2.0"
] | 44
|
2016-10-06T18:59:45.000Z
|
2022-03-18T08:45:56.000Z
|
"""Unit Tests for inferences module"""
import pytest
import numpy as np
import pandas as pd
from pandas.testing import assert_series_equal
from statsmodels.tsa.statespace.structural import UnobservedComponents
from statsmodels.tsa.arima_process import ArmaProcess
import causalimpact
compile_posterior = causalimpact.inferences.compile_posterior_inferences
np.random.seed(1)
@pytest.fixture
def data():
ar = np.r_[1, 0.9]
ma = np.array([1])
arma_process = ArmaProcess(ar, ma)
X = 1 + arma_process.generate_sample(nsample=100)
X = X.reshape(-1, 1)
y = 1.2 * X + np.random.normal(size=(100, 1))
data = np.concatenate((y, X), axis=1)
data = pd.DataFrame(data)
return data
def test_compile_posterior_inferences_w_data(data):
pre_period = [0, 70]
post_period = [71, 100]
df_pre = data.loc[pre_period[0]: pre_period[1], :]
df_post = data.loc[post_period[0]: post_period[1], :]
post_period_response = None
alpha = 0.05
orig_std_params = (0., 1.)
model = UnobservedComponents(
endog=df_pre.iloc[:, 0].values,
level='llevel',
exog=df_pre.iloc[:, 1:].values
)
trained_model = model.fit()
inferences = compile_posterior(
trained_model,
data,
df_pre,
df_post,
post_period_response,
alpha,
orig_std_params
)
expected_response = pd.Series(data.iloc[:, 0], name='response')
assert_series_equal(expected_response, inferences['series']['response'])
expected_cumsum = pd.Series(
np.cumsum(expected_response),
name='cum_response'
)
assert_series_equal(expected_cumsum, inferences['series']['cum_response'])
predictor = trained_model.get_prediction()
forecaster = trained_model.get_forecast(
steps=len(df_post),
exog=df_post.iloc[:, 1].values.reshape(-1, 1),
alpha=alpha
)
pre_pred = predictor.predicted_mean
post_pred = forecaster.predicted_mean
point_pred = np.concatenate([pre_pred, post_pred])
expected_point_pred = pd.Series(point_pred, name='point_pred')
assert_series_equal(
expected_point_pred,
inferences['series']['point_pred']
)
pre_ci = pd.DataFrame(predictor.conf_int(alpha=alpha))
pre_ci.index = df_pre.index
post_ci = pd.DataFrame(forecaster.conf_int(alpha=alpha))
post_ci.index = df_post.index
ci = pd.concat([pre_ci, post_ci])
expected_pred_upper = ci.iloc[:, 1]
expected_pred_upper = expected_pred_upper.rename('point_pred_upper')
expected_pred_lower = ci.iloc[:, 0]
expected_pred_lower = expected_pred_lower.rename('point_pred_lower')
assert_series_equal(
expected_pred_upper,
inferences['series']['point_pred_upper']
)
assert_series_equal(
expected_pred_lower,
inferences['series']['point_pred_lower']
)
expected_cum_pred = pd.Series(
np.cumsum(point_pred),
name='cum_pred'
)
assert_series_equal(
expected_cum_pred,
inferences['series']['cum_pred']
)
expected_cum_pred_lower = pd.Series(
np.cumsum(expected_pred_lower),
name='cum_pred_lower'
)
assert_series_equal(
expected_cum_pred_lower,
inferences['series']['cum_pred_lower']
)
expected_cum_pred_upper = pd.Series(
np.cumsum(expected_pred_upper),
name='cum_pred_upper'
)
assert_series_equal(
expected_cum_pred_upper,
inferences['series']['cum_pred_upper']
)
expected_point_effect = pd.Series(
expected_response - expected_point_pred,
name='point_effect'
)
assert_series_equal(
expected_point_effect,
inferences['series']['point_effect']
)
expected_point_effect_lower = pd.Series(
expected_response - expected_pred_lower,
name='point_effect_lower'
)
assert_series_equal(
expected_point_effect_lower,
inferences['series']['point_effect_lower']
)
expected_point_effect_upper = pd.Series(
expected_response - expected_pred_upper,
name='point_effect_upper'
)
assert_series_equal(
expected_point_effect_upper,
inferences['series']['point_effect_upper']
)
expected_cum_effect = pd.Series(
np.concatenate((np.zeros(len(df_pre)),
np.cumsum(expected_point_effect.iloc[len(df_pre):]))),
name='cum_effect'
)
assert_series_equal(
expected_cum_effect,
inferences['series']['cum_effect']
)
expected_cum_effect_lower = pd.Series(
np.concatenate(
(np.zeros(len(df_pre)),
np.cumsum(expected_point_effect_lower.iloc[len(df_pre):]))),
name='cum_effect_lower'
)
assert_series_equal(
expected_cum_effect_lower,
inferences['series']['cum_effect_lower']
)
expected_cum_effect_upper = pd.Series(
np.concatenate((
np.zeros(len(df_pre)),
np.cumsum(expected_point_effect_upper.iloc[len(df_pre):])
)),
name='cum_effect_upper'
)
assert_series_equal(
expected_cum_effect_upper,
inferences['series']['cum_effect_upper']
)
def test_compile_posterior_inferences_w_post_period_response(data):
pre_period = [0, 70]
post_period = [71, 100]
df_pre = data.loc[pre_period[0]: pre_period[1], :]
df_post = data.loc[post_period[0]: post_period[1], :]
post_period_response = df_post.loc[post_period[0]: post_period[1]]
X = df_post.iloc[:, 1:]
y = X.copy()
y[:] = np.nan
df_post = pd.DataFrame(np.concatenate([y, X], axis=1))
data_index = data.index
data = pd.concat([df_pre, df_post], axis=0)
data.index = data_index
alpha = 0.05
orig_std_params = (0., 1.)
model = UnobservedComponents(
endog=data.iloc[:, 0].values,
level='llevel',
exog=data.iloc[:, 1:].values
)
trained_model = model.fit()
inferences = compile_posterior(
trained_model,
data,
df_pre,
None,
post_period_response,
alpha,
orig_std_params
)
expected_response = pd.Series(data.iloc[:, 0], name='response')
assert_series_equal(expected_response, inferences['series']['response'])
expected_cumsum = pd.Series(
np.cumsum(expected_response),
name='cum_response'
)
assert_series_equal(expected_cumsum, inferences['series']['cum_response'])
predictor = trained_model.get_prediction(end=len(df_pre) - 1)
forecaster = trained_model.get_prediction(start=len(df_pre))
pre_pred = predictor.predicted_mean
post_pred = forecaster.predicted_mean
point_pred = np.concatenate([pre_pred, post_pred])
expected_point_pred = pd.Series(point_pred, name='point_pred')
assert_series_equal(
expected_point_pred,
inferences['series']['point_pred']
)
pre_ci = pd.DataFrame(predictor.conf_int(alpha=alpha))
pre_ci.index = df_pre.index
post_ci = pd.DataFrame(forecaster.conf_int(alpha=alpha))
post_ci.index = df_post.index
ci = pd.concat([pre_ci, post_ci])
expected_pred_upper = ci.iloc[:, 1]
expected_pred_upper = expected_pred_upper.rename('point_pred_upper')
expected_pred_upper.index = data.index
expected_pred_lower = ci.iloc[:, 0]
expected_pred_lower = expected_pred_lower.rename('point_pred_lower')
expected_pred_lower.index = data.index
assert_series_equal(
expected_pred_upper,
inferences['series']['point_pred_upper']
)
assert_series_equal(
expected_pred_lower,
inferences['series']['point_pred_lower']
)
expected_cum_pred = pd.Series(
np.cumsum(point_pred),
name='cum_pred'
)
assert_series_equal(
expected_cum_pred,
inferences['series']['cum_pred']
)
expected_cum_pred_lower = pd.Series(
np.cumsum(expected_pred_lower),
name='cum_pred_lower'
)
assert_series_equal(
expected_cum_pred_lower,
inferences['series']['cum_pred_lower']
)
expected_cum_pred_upper = pd.Series(
np.cumsum(expected_pred_upper),
name='cum_pred_upper'
)
assert_series_equal(
expected_cum_pred_upper,
inferences['series']['cum_pred_upper']
)
expected_point_effect = pd.Series(
expected_response - expected_point_pred,
name='point_effect'
)
assert_series_equal(
expected_point_effect,
inferences['series']['point_effect']
)
expected_point_effect_lower = pd.Series(
expected_response - expected_pred_lower,
name='point_effect_lower'
)
assert_series_equal(
expected_point_effect_lower,
inferences['series']['point_effect_lower']
)
expected_point_effect_upper = pd.Series(
expected_response - expected_pred_upper,
name='point_effect_upper'
)
assert_series_equal(
expected_point_effect_upper,
inferences['series']['point_effect_upper']
)
expected_cum_effect = pd.Series(
np.concatenate((
np.zeros(len(df_pre)),
np.cumsum(expected_point_effect.iloc[len(df_pre):])
)),
name='cum_effect'
)
assert_series_equal(
expected_cum_effect,
inferences['series']['cum_effect']
)
expected_cum_effect_lower = pd.Series(
np.concatenate((
np.zeros(len(df_pre)),
np.cumsum(expected_point_effect_lower.iloc[len(df_pre):])
)),
name='cum_effect_lower'
)
assert_series_equal(
expected_cum_effect_lower,
inferences['series']['cum_effect_lower']
)
expected_cum_effect_upper = pd.Series(
np.concatenate((
np.zeros(len(df_pre)),
np.cumsum(expected_point_effect_upper.iloc[len(df_pre):])
)),
name='cum_effect_upper'
)
assert_series_equal(
expected_cum_effect_upper,
inferences['series']['cum_effect_upper']
)
| 26.938667
| 78
| 0.653633
| 1,237
| 10,102
| 4.95958
| 0.088116
| 0.05379
| 0.080359
| 0.114099
| 0.866341
| 0.862755
| 0.843195
| 0.831296
| 0.831296
| 0.831296
| 0
| 0.009197
| 0.235795
| 10,102
| 374
| 79
| 27.010695
| 0.785492
| 0.003168
| 0
| 0.666667
| 0
| 0
| 0.092617
| 0
| 0
| 0
| 0
| 0
| 0.094771
| 1
| 0.009804
| false
| 0
| 0.022876
| 0
| 0.035948
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
fb49122c19d9943f8d4e04362df2ec0fabefd1d0
| 199
|
py
|
Python
|
sign_app/models.py
|
ACienZ/DjangoWeb_eiheiheiProject
|
0d55637d8c71092279ebde9e1c486560f47b9e1a
|
[
"MIT"
] | 1
|
2018-06-22T11:13:05.000Z
|
2018-06-22T11:13:05.000Z
|
sign_app/models.py
|
ACienZ/DjangoWeb_eiheiheiProject
|
0d55637d8c71092279ebde9e1c486560f47b9e1a
|
[
"MIT"
] | null | null | null |
sign_app/models.py
|
ACienZ/DjangoWeb_eiheiheiProject
|
0d55637d8c71092279ebde9e1c486560f47b9e1a
|
[
"MIT"
] | 1
|
2018-06-22T11:14:03.000Z
|
2018-06-22T11:14:03.000Z
|
from django.db import models
# Create your models here.
class UserModel(models.Model):
username=models.CharField(max_length=20,blank=False)
passwd=models.CharField(max_length=20,blank=False)
| 33.166667
| 56
| 0.788945
| 29
| 199
| 5.344828
| 0.655172
| 0.193548
| 0.232258
| 0.309677
| 0.464516
| 0.464516
| 0.464516
| 0
| 0
| 0
| 0
| 0.022472
| 0.105528
| 199
| 6
| 57
| 33.166667
| 0.848315
| 0.120603
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.25
| 0.25
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 6
|
fb6c460dd51c64fedee2e27c2cbda9d433767f9a
| 49
|
py
|
Python
|
app/data/usecases/getters/__init__.py
|
lokaimoma/Bugza
|
93ffe344cb0be7dc4c45965f52798e02d05d320b
|
[
"Unlicense"
] | 2
|
2022-02-14T23:53:00.000Z
|
2022-03-24T12:19:49.000Z
|
app/data/usecases/getters/__init__.py
|
lokaimoma/Bugza
|
93ffe344cb0be7dc4c45965f52798e02d05d320b
|
[
"Unlicense"
] | null | null | null |
app/data/usecases/getters/__init__.py
|
lokaimoma/Bugza
|
93ffe344cb0be7dc4c45965f52798e02d05d320b
|
[
"Unlicense"
] | null | null | null |
# Created by Kelvin_Clark on 1/31/2022, 12:59 PM
| 24.5
| 48
| 0.734694
| 11
| 49
| 3.181818
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.268293
| 0.163265
| 49
| 1
| 49
| 49
| 0.585366
| 0.938776
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
fb87953a045395fdad4c2bba9ece2a360414346b
| 127
|
py
|
Python
|
app/blueprints/admin/__init__.py
|
neurothrone/project-dot
|
20889075611bed645689a76a30257f96e4b55988
|
[
"MIT"
] | null | null | null |
app/blueprints/admin/__init__.py
|
neurothrone/project-dot
|
20889075611bed645689a76a30257f96e4b55988
|
[
"MIT"
] | null | null | null |
app/blueprints/admin/__init__.py
|
neurothrone/project-dot
|
20889075611bed645689a76a30257f96e4b55988
|
[
"MIT"
] | null | null | null |
from flask import Blueprint
bp_admin = Blueprint(name="admin", import_name=__name__)
from app.blueprints.admin import routes
| 21.166667
| 56
| 0.811024
| 18
| 127
| 5.388889
| 0.555556
| 0.226804
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.110236
| 127
| 5
| 57
| 25.4
| 0.858407
| 0
| 0
| 0
| 0
| 0
| 0.03937
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 1
| 0
| 1
| 1
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
|
0
| 6
|
83c5e5b85240e7ec3a8a0d166a7ff427445f2e88
| 91
|
py
|
Python
|
src/sage/combinat/ncsf_qsym/all.py
|
bopopescu/sage-5
|
9d85b34956ca2edd55af307f99c5d3859acd30bf
|
[
"BSL-1.0"
] | 5
|
2015-01-04T07:15:06.000Z
|
2022-03-04T15:15:18.000Z
|
src/sage/combinat/ncsf_qsym/all.py
|
bopopescu/sage-5
|
9d85b34956ca2edd55af307f99c5d3859acd30bf
|
[
"BSL-1.0"
] | null | null | null |
src/sage/combinat/ncsf_qsym/all.py
|
bopopescu/sage-5
|
9d85b34956ca2edd55af307f99c5d3859acd30bf
|
[
"BSL-1.0"
] | 10
|
2016-09-28T13:12:40.000Z
|
2022-02-12T09:28:34.000Z
|
from qsym import QuasiSymmetricFunctions
from ncsf import NonCommutativeSymmetricFunctions
| 30.333333
| 49
| 0.912088
| 8
| 91
| 10.375
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.087912
| 91
| 2
| 50
| 45.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
83ed787f6fb7e60dd2b3e3c5990ce9988a577196
| 22
|
py
|
Python
|
tests/__init__.py
|
friedi1/panama-dv
|
19bef7966d5f482c2ebd8ec5adb3d9f79ef30c94
|
[
"Apache-2.0"
] | null | null | null |
tests/__init__.py
|
friedi1/panama-dv
|
19bef7966d5f482c2ebd8ec5adb3d9f79ef30c94
|
[
"Apache-2.0"
] | 1
|
2015-09-03T22:18:30.000Z
|
2015-09-03T22:18:30.000Z
|
tests/__init__.py
|
Vauxoo/panama-dv
|
19bef7966d5f482c2ebd8ec5adb3d9f79ef30c94
|
[
"Apache-2.0"
] | 2
|
2016-04-27T04:02:24.000Z
|
2018-04-10T04:07:58.000Z
|
from . import test_ruc
| 22
| 22
| 0.818182
| 4
| 22
| 4.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.136364
| 22
| 1
| 22
| 22
| 0.894737
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
83f67d500026cb2ed1c305c71d41cd425e43fe48
| 147
|
py
|
Python
|
calculator.py
|
utsav-walia/Coding-Blocks-Python-
|
2343d4934e9685f6ccf41eb091161b488c42e7a9
|
[
"MIT"
] | null | null | null |
calculator.py
|
utsav-walia/Coding-Blocks-Python-
|
2343d4934e9685f6ccf41eb091161b488c42e7a9
|
[
"MIT"
] | null | null | null |
calculator.py
|
utsav-walia/Coding-Blocks-Python-
|
2343d4934e9685f6ccf41eb091161b488c42e7a9
|
[
"MIT"
] | null | null | null |
def add(a,b):
return a+b
def sub(a,b):
return a-b
def mul(a,b):
return a*b
def div(a,b):
return a/b
print(__name__)
| 11.307692
| 15
| 0.52381
| 30
| 147
| 2.433333
| 0.333333
| 0.219178
| 0.438356
| 0.493151
| 0.671233
| 0.534247
| 0
| 0
| 0
| 0
| 0
| 0
| 0.326531
| 147
| 13
| 16
| 11.307692
| 0.737374
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.444444
| false
| 0
| 0
| 0.444444
| 0.888889
| 0.111111
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
f7cf37824237d1c4580bdd201bab20c65f967f09
| 248
|
py
|
Python
|
stoys/deps/common.py
|
stoys-io/stoys-python
|
c2095ff74152690b1d8ea799c5bc8910a71921fb
|
[
"Apache-2.0"
] | 1
|
2021-11-01T19:22:33.000Z
|
2021-11-01T19:22:33.000Z
|
stoys/deps/common.py
|
stoys-io/stoys-python
|
c2095ff74152690b1d8ea799c5bc8910a71921fb
|
[
"Apache-2.0"
] | null | null | null |
stoys/deps/common.py
|
stoys-io/stoys-python
|
c2095ff74152690b1d8ea799c5bc8910a71921fb
|
[
"Apache-2.0"
] | null | null | null |
import pathlib
from ..config import STOYS_EMBEDDED_DEPENDENCIES_PACKAGE
_repository_dir = pathlib.Path(__file__).parent.parent.parent.resolve()
embedded_dependencies_dir = _repository_dir.joinpath(*STOYS_EMBEDDED_DEPENDENCIES_PACKAGE.split("."))
| 35.428571
| 101
| 0.850806
| 29
| 248
| 6.724138
| 0.551724
| 0.307692
| 0.25641
| 0.328205
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.056452
| 248
| 6
| 102
| 41.333333
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0.004032
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
f7ea90f5a56b6fa290e860e6180c7fc20ff42110
| 2,208
|
py
|
Python
|
Environment/sire/testRandomNet.py
|
Mariaojruiz/Sibling-Rewiring
|
d70b96c33766d6177407e64bd733a004044351e1
|
[
"MIT"
] | null | null | null |
Environment/sire/testRandomNet.py
|
Mariaojruiz/Sibling-Rewiring
|
d70b96c33766d6177407e64bd733a004044351e1
|
[
"MIT"
] | 164
|
2020-11-01T09:21:12.000Z
|
2021-07-07T19:06:28.000Z
|
Environment/sire/testRandomNet.py
|
Mariaojruiz/Sibling-Rewiring
|
d70b96c33766d6177407e64bd733a004044351e1
|
[
"MIT"
] | 3
|
2021-07-08T15:11:48.000Z
|
2021-07-08T18:01:09.000Z
|
import networkx as nx
def test_same_graph_graphml():
f1 = nx.read_graphml("net.graphml")
f2 = nx.read_graphml("../Environment/sire/uploads/netUploaded.graphml")
edges1 = f1.edges()
edges2 = f2.edges()
nodes1 = f1.nodes()
nodes2 = f2.nodes()
assert (nx.get_node_attributes(f1,'Nombre')) == (nx.get_node_attributes(f2,'Nombre')), "Los nodos no tienen los mismos nombres"
assert (nx.get_node_attributes(f1,'Etapa')) == (nx.get_node_attributes(f2,'Etapa')), "No se corresponden las etapas"
assert (nx.get_node_attributes(f1,'Curso')) == (nx.get_node_attributes(f2,'Curso')), "No se corresponden los cursos"
assert (nx.get_node_attributes(f1,'Clase')) == (nx.get_node_attributes(f2,'Clase')), "No se correspondn las clases"
assert (len([i for i, j in zip(edges1, edges2) if i == j])) == len(edges1) == len(edges2), "El número de vértices ha cambiado"
assert (len([i for i, j in zip(nodes1, nodes2) if i == j])) == len(nodes1) == len(nodes2), "El número de nodos ha cambiado"
def test_same_graph_gexf():
f1 = nx.read_gexf("randomGraphuploaded.gexf")
f2 = nx.read_gexf("../Environment/sire/uploads/randomGraph.gexf")
edges1 = f1.edges()
edges2 = f2.edges()
nodes1 = f1.nodes()
nodes2 = f2.nodes()
assert (nx.get_node_attributes(f1,'Nombre')) == (nx.get_node_attributes(f2,'Nombre')), "Los nodos no tienen los mismos nombres"
assert (nx.get_node_attributes(f1,'Etapa')) == (nx.get_node_attributes(f2,'Etapa')), "No se corresponden las etapas"
assert (nx.get_node_attributes(f1,'Curso')) == (nx.get_node_attributes(f2,'Curso')), "No se corresponden los cursos"
assert (nx.get_node_attributes(f1,'Clase')) == (nx.get_node_attributes(f2,'Clase')), "No se correspondn las clases"
assert (len([i for i, j in zip(edges1, edges2) if i == j])) == len(edges1) == len(edges2), "El número de vértices ha cambiado"
assert (len([i for i, j in zip(nodes1, nodes2) if i == j])) == len(nodes1) == len(nodes2), "El número de nodos ha cambiado"
if __name__ == "__main__":
test_same_graph_graphml()
test_same_graph_gexf
print("Graphs are exactly the same.")
| 49.066667
| 131
| 0.662138
| 325
| 2,208
| 4.326154
| 0.206154
| 0.056899
| 0.102418
| 0.216216
| 0.786629
| 0.786629
| 0.786629
| 0.786629
| 0.786629
| 0.786629
| 0
| 0.028698
| 0.179348
| 2,208
| 45
| 132
| 49.066667
| 0.747241
| 0
| 0
| 0.645161
| 0
| 0
| 0.28067
| 0.05206
| 0
| 0
| 0
| 0
| 0.387097
| 1
| 0.064516
| false
| 0
| 0.032258
| 0
| 0.096774
| 0.032258
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
79111c1fb6e4f1481046a422568be7c96c589604
| 444
|
py
|
Python
|
Geometry/CMSCommonData/python/cmsRecoIdealGeometryXML_cff.py
|
ckamtsikis/cmssw
|
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
|
[
"Apache-2.0"
] | 852
|
2015-01-11T21:03:51.000Z
|
2022-03-25T21:14:00.000Z
|
Geometry/CMSCommonData/python/cmsRecoIdealGeometryXML_cff.py
|
ckamtsikis/cmssw
|
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
|
[
"Apache-2.0"
] | 30,371
|
2015-01-02T00:14:40.000Z
|
2022-03-31T23:26:05.000Z
|
Geometry/CMSCommonData/python/cmsRecoIdealGeometryXML_cff.py
|
ckamtsikis/cmssw
|
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
|
[
"Apache-2.0"
] | 3,240
|
2015-01-02T05:53:18.000Z
|
2022-03-31T17:24:21.000Z
|
import FWCore.ParameterSet.Config as cms
from Geometry.CMSCommonData.cmsRecoIdealGeometryXML_cfi import *
from Geometry.TrackerNumberingBuilder.trackerNumberingGeometry_cfi import *
from Geometry.EcalCommonData.ecalSimulationParameters_cff import *
from Geometry.HcalCommonData.hcalDDDSimConstants_cff import *
from Geometry.HcalCommonData.hcalDDDRecConstants_cfi import *
from Geometry.MuonNumbering.muonNumberingInitialization_cfi import *
| 44.4
| 75
| 0.887387
| 42
| 444
| 9.238095
| 0.5
| 0.185567
| 0.231959
| 0.162371
| 0.180412
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.067568
| 444
| 9
| 76
| 49.333333
| 0.937198
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
7930903859be754fa0c69ff9e3f68d2181e6a7be
| 177
|
py
|
Python
|
python/8kyu/convert_a_string_to_an_array.py
|
Sigmanificient/codewars
|
b34df4bf55460d312b7ddf121b46a707b549387a
|
[
"MIT"
] | 3
|
2021-06-08T01:57:13.000Z
|
2021-06-26T10:52:47.000Z
|
python/8kyu/convert_a_string_to_an_array.py
|
Sigmanificient/codewars
|
b34df4bf55460d312b7ddf121b46a707b549387a
|
[
"MIT"
] | null | null | null |
python/8kyu/convert_a_string_to_an_array.py
|
Sigmanificient/codewars
|
b34df4bf55460d312b7ddf121b46a707b549387a
|
[
"MIT"
] | 2
|
2021-06-10T21:20:13.000Z
|
2021-06-30T10:13:26.000Z
|
"""Kata url: https://www.codewars.com/kata/57e76bc428d6fbc2d500036d."""
from typing import List
def string_to_array(s: str) -> List[str]:
return s.split() if s else ['']
| 22.125
| 71
| 0.689266
| 26
| 177
| 4.615385
| 0.807692
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.098684
| 0.141243
| 177
| 7
| 72
| 25.285714
| 0.690789
| 0.367232
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 6
|
793a69c4516be50126a081abb979084240e7b97b
| 369
|
py
|
Python
|
hw/denis_asipenko/test_da_hw.py
|
alexander-sidorov/qap-05
|
6db7c0a1eeadd15f7d3f826e7f0ac4be3949ec8c
|
[
"MIT"
] | 9
|
2021-12-10T21:30:07.000Z
|
2022-02-25T21:32:34.000Z
|
hw/denis_asipenko/test_da_hw.py
|
alexander-sidorov/qap-05
|
6db7c0a1eeadd15f7d3f826e7f0ac4be3949ec8c
|
[
"MIT"
] | 22
|
2021-12-11T08:46:58.000Z
|
2022-02-02T15:56:37.000Z
|
hw/denis_asipenko/test_da_hw.py
|
alexander-sidorov/qap-05
|
6db7c0a1eeadd15f7d3f826e7f0ac4be3949ec8c
|
[
"MIT"
] | 8
|
2021-12-11T09:15:45.000Z
|
2022-02-02T08:09:09.000Z
|
from hw.denis_asipenko.func_da_hw import d
from hw.denis_asipenko.func_da_hw import e
from hw.denis_asipenko.func_da_hw import i
from hw.denis_asipenko.func_da_hw import n
from hw.denis_asipenko.func_da_hw import s
def test_denis() -> None:
assert d() is True
assert e() is False
assert n() is None # type: ignore
assert i() < 0
assert s() == ""
| 26.357143
| 42
| 0.723577
| 68
| 369
| 3.691176
| 0.323529
| 0.119522
| 0.219124
| 0.378486
| 0.657371
| 0.657371
| 0.657371
| 0.657371
| 0
| 0
| 0
| 0.003344
| 0.189702
| 369
| 13
| 43
| 28.384615
| 0.83612
| 0.03252
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.454545
| 1
| 0.090909
| true
| 0
| 0.454545
| 0
| 0.545455
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
f7022f7075bdd6537b307688382d872a3f7fd177
| 53
|
py
|
Python
|
Interfaces/__init__.py
|
ahmadryan/TurbAn
|
b8866d103a2ca2f5fbad73bcd4416f19299f22b2
|
[
"BSD-2-Clause-Patent"
] | null | null | null |
Interfaces/__init__.py
|
ahmadryan/TurbAn
|
b8866d103a2ca2f5fbad73bcd4416f19299f22b2
|
[
"BSD-2-Clause-Patent"
] | null | null | null |
Interfaces/__init__.py
|
ahmadryan/TurbAn
|
b8866d103a2ca2f5fbad73bcd4416f19299f22b2
|
[
"BSD-2-Clause-Patent"
] | 10
|
2019-03-22T15:30:12.000Z
|
2021-02-10T02:55:50.000Z
|
from . import Simulations
from . import Spacecraft
| 17.666667
| 26
| 0.773585
| 6
| 53
| 6.833333
| 0.666667
| 0.487805
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.188679
| 53
| 2
| 27
| 26.5
| 0.953488
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
f725abd59251190289dabcf79ac0661b33697890
| 1,839
|
py
|
Python
|
alembic/versions/4f53ec506661_add_study_template.py
|
jonathanzong/dmca
|
70157cff983310e5951024aa80e99e7a5404d758
|
[
"MIT"
] | 2
|
2022-02-16T22:50:06.000Z
|
2022-02-21T19:38:02.000Z
|
alembic/versions/4f53ec506661_add_study_template.py
|
jonathanzong/dmca
|
70157cff983310e5951024aa80e99e7a5404d758
|
[
"MIT"
] | 2
|
2022-02-01T05:48:07.000Z
|
2022-02-01T05:49:29.000Z
|
alembic/versions/4f53ec506661_add_study_template.py
|
jonathanzong/bartleby
|
70157cff983310e5951024aa80e99e7a5404d758
|
[
"MIT"
] | null | null | null |
"""add study template
Revision ID: 4f53ec506661
Revises: 4302608638bc
Create Date: 2018-05-23 15:44:01.450488
"""
# revision identifiers, used by Alembic.
revision = '4f53ec506661'
down_revision = '4302608638bc'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_development():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('twitter_user_recruitment_tweet_attempt', sa.Column('study_template', sa.String(length=64), nullable=True))
# ### end Alembic commands ###
def downgrade_development():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('twitter_user_recruitment_tweet_attempt', 'study_template')
# ### end Alembic commands ###
def upgrade_test():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('twitter_user_recruitment_tweet_attempt', sa.Column('study_template', sa.String(length=64), nullable=True))
# ### end Alembic commands ###
def downgrade_test():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('twitter_user_recruitment_tweet_attempt', 'study_template')
# ### end Alembic commands ###
def upgrade_production():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('twitter_user_recruitment_tweet_attempt', sa.Column('study_template', sa.String(length=64), nullable=True))
# ### end Alembic commands ###
def downgrade_production():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('twitter_user_recruitment_tweet_attempt', 'study_template')
# ### end Alembic commands ###
| 28.292308
| 125
| 0.712887
| 221
| 1,839
| 5.701357
| 0.276018
| 0.072222
| 0.1
| 0.109524
| 0.734127
| 0.734127
| 0.734127
| 0.734127
| 0.734127
| 0.694444
| 0
| 0.041184
| 0.154976
| 1,839
| 64
| 126
| 28.734375
| 0.769627
| 0.330614
| 0
| 0.272727
| 0
| 0
| 0.314864
| 0.200528
| 0
| 0
| 0
| 0
| 0
| 1
| 0.363636
| false
| 0
| 0.090909
| 0
| 0.454545
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
f7926a108322aa714971a30131061866c4350a0f
| 70
|
py
|
Python
|
skcv/image/__init__.py
|
guillempalou/scikit-cv
|
66b5455f0097a158f0498b5cade4e8e8a0094c08
|
[
"BSD-3-Clause"
] | 6
|
2015-03-22T18:54:17.000Z
|
2021-02-25T02:11:36.000Z
|
skcv/image/__init__.py
|
guillempalou/scikit-cv
|
66b5455f0097a158f0498b5cade4e8e8a0094c08
|
[
"BSD-3-Clause"
] | null | null | null |
skcv/image/__init__.py
|
guillempalou/scikit-cv
|
66b5455f0097a158f0498b5cade4e8e8a0094c08
|
[
"BSD-3-Clause"
] | 1
|
2016-06-05T04:22:57.000Z
|
2016-06-05T04:22:57.000Z
|
from . import features
from . import segmentation
from . import filter
| 23.333333
| 26
| 0.8
| 9
| 70
| 6.222222
| 0.555556
| 0.535714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.157143
| 70
| 3
| 27
| 23.333333
| 0.949153
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
e3abdc4715e428a8ad0d9f8a2b835fb5ff2c60cc
| 588
|
py
|
Python
|
Modulo 2/lista07/1.py
|
BelfortJoao/Programacao-1
|
2d463744379ad3e4b0f5882ad923aae7ff80197a
|
[
"MIT"
] | 2
|
2021-08-17T14:02:13.000Z
|
2021-08-19T02:37:28.000Z
|
Modulo 2/lista07/1.py
|
BelfortJoao/Programacao-1
|
2d463744379ad3e4b0f5882ad923aae7ff80197a
|
[
"MIT"
] | null | null | null |
Modulo 2/lista07/1.py
|
BelfortJoao/Programacao-1
|
2d463744379ad3e4b0f5882ad923aae7ff80197a
|
[
"MIT"
] | 1
|
2021-09-05T20:18:45.000Z
|
2021-09-05T20:18:45.000Z
|
L = int(input())
Tot = 0
Med = 0
T = str(input()).upper()
M = [[0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0]]
for i in range(12):
for j in range(12):
M[i][j] = float(input())
for j in range(12):
Tot =+ M[L][j]
Med = Tot/12
if T == "M":
print('{:.1f}'.format(Med))
else:
print('{:.1f}'.format(Tot))
| 36.75
| 317
| 0.479592
| 195
| 588
| 1.446154
| 0.112821
| 1.014184
| 1.510638
| 2
| 0.602837
| 0.510638
| 0.510638
| 0.510638
| 0.510638
| 0.510638
| 0
| 0.299424
| 0.113946
| 588
| 15
| 318
| 39.2
| 0.241843
| 0
| 0
| 0.133333
| 0
| 0
| 0.022109
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.133333
| 0
| 0
| 1
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
e3da780313ab383b5252272e344eeea548cf03bc
| 2,788
|
py
|
Python
|
_unittests/ut_talk_examples/test_pydata2016.py
|
sdpython/jupytalk
|
34abdf128de24becb21a9f08f243c3a74dadbfd9
|
[
"MIT"
] | null | null | null |
_unittests/ut_talk_examples/test_pydata2016.py
|
sdpython/jupytalk
|
34abdf128de24becb21a9f08f243c3a74dadbfd9
|
[
"MIT"
] | 16
|
2016-11-13T19:52:35.000Z
|
2021-12-29T10:59:41.000Z
|
_unittests/ut_talk_examples/test_pydata2016.py
|
sdpython/jupytalk
|
34abdf128de24becb21a9f08f243c3a74dadbfd9
|
[
"MIT"
] | 4
|
2016-09-10T10:44:50.000Z
|
2021-09-22T16:28:56.000Z
|
"""
@brief test log(time=35s)
"""
import sys
import os
import unittest
import warnings
from pyquickhelper.loghelper import fLOG
from pyquickhelper.pycode import get_temp_folder, fix_tkinter_issues_virtualenv, ExtTestCase
from jupytalk.talk_examples.pydata2016 import example_networkx, example_confidence_interval, example_cartopy
class TestPyData2016(ExtTestCase):
def test_example_networkx(self):
fLOG(
__file__,
self._testMethodName,
OutputPrint=__name__ == "__main__")
try:
import cairo
except ImportError as e:
warnings.warn("Unable to import cairo %r." % e)
return
temp = get_temp_folder(__file__, "temp_example_networkx")
fix_tkinter_issues_virtualenv()
import matplotlib.pyplot as plt
fig, ax = plt.subplots(nrows=1, ncols=1, figsize=(8, 8))
example_networkx(ax=ax)
assert ax is not None
img = os.path.join(temp, "img.png")
fig.savefig(img)
self.assertExists(img)
if __name__ == "__main__":
fig.show()
plt.close('all')
fLOG("end")
def test_example_confidence_interval(self):
fLOG(
__file__,
self._testMethodName,
OutputPrint=__name__ == "__main__")
try:
import cairo
except ImportError as e:
warnings.warn("Unable to import cairo %r." % e)
return
temp = get_temp_folder(__file__, "temp_example_networkx")
fix_tkinter_issues_virtualenv()
import matplotlib.pyplot as plt
fig, ax = plt.subplots(nrows=1, ncols=1, figsize=(8, 8))
example_confidence_interval(ax=ax)
assert ax is not None
img = os.path.join(temp, "img.png")
fig.savefig(img)
self.assertExists(img)
if __name__ == "__main__":
fig.show()
plt.close('all')
fLOG("end")
def test_example_cartopy(self):
fLOG(
__file__,
self._testMethodName,
OutputPrint=__name__ == "__main__")
try:
import cairo
except ImportError as e:
warnings.warn("Unable to import cairo %r." % e)
return
temp = get_temp_folder(__file__, "temp_example_networkx")
fix_tkinter_issues_virtualenv()
import matplotlib.pyplot as plt
fig, ax = plt.subplots(nrows=1, ncols=1, figsize=(8, 8))
example_cartopy(ax=ax)
assert ax is not None
img = os.path.join(temp, "img.png")
fig.savefig(img)
self.assertExists(img)
if __name__ == "__main__":
fig.show()
plt.close('all')
fLOG("end")
if __name__ == "__main__":
unittest.main()
| 30.637363
| 108
| 0.598278
| 317
| 2,788
| 4.867508
| 0.246057
| 0.036293
| 0.033701
| 0.067401
| 0.737524
| 0.737524
| 0.737524
| 0.737524
| 0.737524
| 0.737524
| 0
| 0.011358
| 0.305237
| 2,788
| 90
| 109
| 30.977778
| 0.785235
| 0.01076
| 0
| 0.810127
| 0
| 0
| 0.085818
| 0.022909
| 0
| 0
| 0
| 0
| 0.075949
| 1
| 0.037975
| false
| 0
| 0.240506
| 0
| 0.329114
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
e3fec3a0f4b26fe3fe830abf8f89408eeb9ef174
| 128
|
py
|
Python
|
models/__init__.py
|
xinyandai/structural-nn
|
373cec9ca2ee766ddb1d2a09eac4dd551d57e648
|
[
"MIT"
] | 2
|
2020-01-01T05:21:58.000Z
|
2020-01-02T02:06:19.000Z
|
models/__init__.py
|
xinyandai/structural-nn
|
373cec9ca2ee766ddb1d2a09eac4dd551d57e648
|
[
"MIT"
] | null | null | null |
models/__init__.py
|
xinyandai/structural-nn
|
373cec9ca2ee766ddb1d2a09eac4dd551d57e648
|
[
"MIT"
] | null | null | null |
from .fcn import *
from .cnn import *
from .resnet import *
from .vgg import *
from .densenet import *
from .wideresnet import *
| 21.333333
| 25
| 0.726563
| 18
| 128
| 5.166667
| 0.444444
| 0.537634
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.179688
| 128
| 6
| 25
| 21.333333
| 0.885714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
587d248745a485a125a18c8a437d0cae29e87553
| 41
|
py
|
Python
|
monitor/dump_daily.py
|
mikroncoin/mikron_restapi_py
|
79cd47c8f26615ccd27c9764c92299f8cebd578a
|
[
"BSD-2-Clause"
] | null | null | null |
monitor/dump_daily.py
|
mikroncoin/mikron_restapi_py
|
79cd47c8f26615ccd27c9764c92299f8cebd578a
|
[
"BSD-2-Clause"
] | 6
|
2018-09-27T07:12:28.000Z
|
2019-08-14T10:13:13.000Z
|
monitor/dump_daily.py
|
mikroncoin/mikron_restapi_py
|
79cd47c8f26615ccd27c9764c92299f8cebd578a
|
[
"BSD-2-Clause"
] | null | null | null |
import dump
dump.dump_compressed(86400)
| 10.25
| 27
| 0.829268
| 6
| 41
| 5.5
| 0.666667
| 0.484848
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.135135
| 0.097561
| 41
| 3
| 28
| 13.666667
| 0.756757
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
587efceddf89b43943d2b187c37dd2a627e9df51
| 31
|
py
|
Python
|
torch_kalman/kalman_filter/__init__.py
|
strongio/torch_kalman
|
9b6632687ddc6705a991cce85e17b163eb227c63
|
[
"MIT"
] | 105
|
2018-04-07T23:51:37.000Z
|
2022-03-29T11:51:31.000Z
|
torch_kalman/kalman_filter/__init__.py
|
strongio/torch-kalman
|
9b6632687ddc6705a991cce85e17b163eb227c63
|
[
"MIT"
] | 26
|
2018-04-01T17:02:51.000Z
|
2020-02-07T22:31:43.000Z
|
torch_kalman/kalman_filter/__init__.py
|
strongio/torch_kalman
|
9b6632687ddc6705a991cce85e17b163eb227c63
|
[
"MIT"
] | 14
|
2019-06-21T15:11:47.000Z
|
2021-12-02T15:40:02.000Z
|
from .base import KalmanFilter
| 15.5
| 30
| 0.83871
| 4
| 31
| 6.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.129032
| 31
| 1
| 31
| 31
| 0.962963
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
58a739c9a45b6f10a554f30be57e319c4089cdf3
| 211
|
py
|
Python
|
qqbot/model/ws_context.py
|
tencent-connect/botpy
|
275f96f0859b63110b095711838c738ad6a9cc1e
|
[
"MIT"
] | 63
|
2021-12-27T05:55:07.000Z
|
2022-03-28T12:28:53.000Z
|
qqbot/model/ws_context.py
|
tencent-connect/botpy
|
275f96f0859b63110b095711838c738ad6a9cc1e
|
[
"MIT"
] | 9
|
2022-01-06T03:33:30.000Z
|
2022-03-27T10:49:36.000Z
|
qqbot/model/ws_context.py
|
tencent-connect/botpy
|
275f96f0859b63110b095711838c738ad6a9cc1e
|
[
"MIT"
] | 12
|
2021-12-31T07:46:12.000Z
|
2022-03-28T13:34:09.000Z
|
class WsContext:
"""
被动事件里携带的上下文信息,目前仅有部分事件支持
"""
def __init__(self, event_type: str, event_id: str):
self.event_type = str(event_type or "")
self.event_id = str(event_id or "")
| 23.444444
| 55
| 0.616114
| 27
| 211
| 4.444444
| 0.444444
| 0.225
| 0.216667
| 0.266667
| 0.35
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.255924
| 211
| 8
| 56
| 26.375
| 0.764331
| 0.113744
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
546cc5fb6359a952da186649665199a4b83b2e0c
| 371
|
py
|
Python
|
spam_v1/key_file.py
|
Alpha-Demon404/RE-14
|
b5b46a9f0eee218f2a642b615c77135c33c6f4ad
|
[
"MIT"
] | 39
|
2020-02-26T09:44:36.000Z
|
2022-03-23T00:18:25.000Z
|
spam_v1/key_file.py
|
B4BY-DG/reverse-enginnering
|
b5b46a9f0eee218f2a642b615c77135c33c6f4ad
|
[
"MIT"
] | 15
|
2020-05-14T10:07:26.000Z
|
2022-01-06T02:55:32.000Z
|
spam_v1/key_file.py
|
B4BY-DG/reverse-enginnering
|
b5b46a9f0eee218f2a642b615c77135c33c6f4ad
|
[
"MIT"
] | 41
|
2020-03-16T22:36:38.000Z
|
2022-03-17T14:47:19.000Z
|
# Time Succses Parser : Mon Jun 15 19:45:42 2020
# Auto Parser Dis Version : 1.1.0
# Source : https://www.github.com/Datez-Kun
import hashlib
#return hashlib.sha512(__file__).hexdigest()
print(hashlib.sha512('spam.py').hexdigest())
#e20e1040eb5b81e3db7a14b6cab2754c575ccbeb6c1405d67088e9434c699ce9ea601e8ec14357b66b7bfd6b3dbe07a2c84790d4d7a2d523f75377aaccdd09e5 #result
| 41.222222
| 137
| 0.822102
| 39
| 371
| 7.717949
| 0.846154
| 0.086379
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.280702
| 0.078167
| 371
| 8
| 138
| 46.375
| 0.599415
| 0.803235
| 0
| 0
| 0
| 0
| 0.106061
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0.5
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 6
|
5495880822d5ee1c688c8fb4abbbf0d8a68d419e
| 148
|
py
|
Python
|
examples/more/ED/30orb/get_fock.py
|
danielballan/edrixs
|
57fbd11ba9aaeaa393c3e2f06af41e4e386749e4
|
[
"BSD-3-Clause"
] | null | null | null |
examples/more/ED/30orb/get_fock.py
|
danielballan/edrixs
|
57fbd11ba9aaeaa393c3e2f06af41e4e386749e4
|
[
"BSD-3-Clause"
] | null | null | null |
examples/more/ED/30orb/get_fock.py
|
danielballan/edrixs
|
57fbd11ba9aaeaa393c3e2f06af41e4e386749e4
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python
from edrixs.fock_basis import write_fock_dec_by_N
if __name__ == "__main__":
write_fock_dec_by_N(30, 15, "fock_i.in")
| 21.142857
| 51
| 0.736486
| 27
| 148
| 3.37037
| 0.740741
| 0.197802
| 0.263736
| 0.307692
| 0.32967
| 0
| 0
| 0
| 0
| 0
| 0
| 0.031496
| 0.141892
| 148
| 6
| 52
| 24.666667
| 0.685039
| 0.135135
| 0
| 0
| 0
| 0
| 0.133858
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
54a7e85a24b7f6910ba9b36c90bfa982c423715e
| 229
|
py
|
Python
|
bin/deps/pydocstyle/__init__.py
|
keplersj/linter-pylama
|
3e8e3ffd34195ad705b7a4bb6b12b3af44fdf3bc
|
[
"MIT"
] | null | null | null |
bin/deps/pydocstyle/__init__.py
|
keplersj/linter-pylama
|
3e8e3ffd34195ad705b7a4bb6b12b3af44fdf3bc
|
[
"MIT"
] | null | null | null |
bin/deps/pydocstyle/__init__.py
|
keplersj/linter-pylama
|
3e8e3ffd34195ad705b7a4bb6b12b3af44fdf3bc
|
[
"MIT"
] | null | null | null |
from .checker import check
from .violations import Error, conventions
from .utils import __version__
# Temporary hotfix for flake8-docstrings
from .checker import PEP257Checker, tokenize_open
from .parser import AllError
| 28.625
| 50
| 0.80786
| 28
| 229
| 6.428571
| 0.678571
| 0.122222
| 0.188889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.020619
| 0.152838
| 229
| 7
| 51
| 32.714286
| 0.907216
| 0.165939
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
54b34f23ba9f5d3892ec38454bcf2f6b351595c2
| 15,151
|
py
|
Python
|
app/mapproxy_webconf/test/test_geojson.py
|
mapproxy/mapproxy-webconf
|
b1010c9bf3aa04dd95b893912bbec20438d29116
|
[
"Apache-2.0"
] | 12
|
2015-10-03T01:49:34.000Z
|
2020-10-20T20:06:20.000Z
|
app/mapproxy_webconf/test/test_geojson.py
|
mapproxy/mapproxy-webconf
|
b1010c9bf3aa04dd95b893912bbec20438d29116
|
[
"Apache-2.0"
] | 2
|
2015-05-21T21:14:56.000Z
|
2017-07-19T08:22:28.000Z
|
app/mapproxy_webconf/test/test_geojson.py
|
mapproxy/mapproxy-webconf
|
b1010c9bf3aa04dd95b893912bbec20438d29116
|
[
"Apache-2.0"
] | 6
|
2015-05-06T08:12:16.000Z
|
2020-11-30T05:37:46.000Z
|
from nose.tools import assert_almost_equal, assert_raises
from mapproxy_webconf import defaults
from mapproxy_webconf.lib.geojson import ConfigGeoJSONGrid, polygons, \
point_feature, polygon_feature, features, \
InvalidGridBBoxTransformationException, InvalidTileBBoxTransformationException
GLOBAL_BBOX_4326 = [-180.0, -90.0, 180.0, 90.0]
GLOBAL_BBOX_4326_ALIGNED = [-180, -85.05112877, 180, 85.05112877]
LOCAL_BBOX_4326_1 = [-20.0, -20.0, 20.0, 20.0]
LOCAL_BBOX_4326_2 = [-40.0, -40.0, 40.0, 40.0]
GLOBAL_BBOX_3857 = [-20037508.342789236, -20037508.342789236,
20037508.342789236, 20037508.342789236]
GLOBAL_BBOX_TRANSFORMED_TO_3857 = [-20037508.342789244, -
147730762.66992167,
20037508.342789244,
147730758.19456753]
LOCAL_BBOX_3857_1 = [-2226389.8158654715, -2273030.92698769,
2226389.8158654715, 2273030.926987689]
LOCAL_BBOX_3857_2 = [-4452779.631730943, -4865942.279503176,
4452779.631730943, 4865942.279503176]
OVERGLOBAL_BBOX_4326 = [-703.125, -421.825, 703.125, 421.825]
GLOBAL_POLYGON_4326 = [
(-180.0, -90.0), (180.0, -90.0), (180.0, 90.0), (-180.0, 90.0), (-180.0, -90.0)]
GLOBAL_POLYGON_3857 = [(-20037508.342789236, -20037508.342789236),
(20037508.342789236, -20037508.342789236),
(20037508.342789236, 20037508.342789236),
(-20037508.342789236, 20037508.342789236),
(-20037508.342789236, -20037508.342789236)]
LOCAL_POLYGON_3857 = [(-2226389.8158654715, -2273030.92698769),
(2226389.8158654715, -2273030.92698769),
(2226389.8158654715, 2273030.926987689),
(-2226389.8158654715, 2273030.926987689),
(-2226389.8158654715, -2273030.92698769)]
def assert_list_almost_equal(list_a, list_b):
assert len(list_a) == len(list_b)
for i in range(len(list_a)):
assert_almost_equal(list_a[i], list_b[i])
def assert_point_list_almost_equal(list_a, list_b):
assert len(list_a) == len(list_b)
for i in range(len(list_a)):
assert_almost_equal(list_a[i][0], list_b[i][0])
assert_almost_equal(list_a[i][1], list_b[i][1])
class TestConfigGeoJSONGrid(object):
def test_without_parameters(self):
config = ConfigGeoJSONGrid()
assert config.request_bbox == None
assert config.grid_bbox == None
assert config.level == None
assert config.grid_srs == None
assert config.grid_bbox_srs == None
assert config.map_srs == None
assert config.map_bbox == None
assert config.res == None
assert config.origin == 'll'
def test_all_bboxes_in_grid_srs(self):
# global
with assert_raises(InvalidTileBBoxTransformationException) as cm:
config = ConfigGeoJSONGrid(grid_srs='EPSG:4326', map_srs='EPSG:3857',
grid_bbox_srs='EPSG:3857',
request_bbox=GLOBAL_BBOX_3857,
grid_bbox=GLOBAL_BBOX_3857)
assert cm.exception.args[0] == 'Invalid transformation for tile in level 0'
with assert_raises(InvalidTileBBoxTransformationException) as cm:
config = ConfigGeoJSONGrid(grid_srs='EPSG:4326', map_srs='EPSG:3857',
grid_bbox_srs='EPSG:4326',
request_bbox=GLOBAL_BBOX_3857,
grid_bbox=GLOBAL_BBOX_4326)
assert cm.exception.args[0] == 'Invalid transformation for tile in level 0'
with assert_raises(InvalidGridBBoxTransformationException) as cm:
config = ConfigGeoJSONGrid(grid_srs='EPSG:3857', map_srs='EPSG:4326',
grid_bbox_srs='EPSG:4326',
request_bbox=GLOBAL_BBOX_4326,
grid_bbox=[-180, -90, 180, 270])
assert cm.exception.args[0] == 'Invalid transformation for grid_bbox'
config = ConfigGeoJSONGrid(grid_srs='EPSG:3857', map_srs='EPSG:4326',
grid_bbox_srs='EPSG:3857',
request_bbox=GLOBAL_BBOX_4326,
grid_bbox=GLOBAL_BBOX_3857)
assert config.map_bbox == None
assert_list_almost_equal(config.grid_bbox, GLOBAL_BBOX_3857)
# local
config = ConfigGeoJSONGrid(grid_srs='EPSG:4326', map_srs='EPSG:3857',
grid_bbox_srs='EPSG:3857',
request_bbox=LOCAL_BBOX_3857_1,
grid_bbox=LOCAL_BBOX_3857_2)
assert_list_almost_equal(config.map_bbox, LOCAL_BBOX_4326_1)
assert_list_almost_equal(config.grid_bbox, LOCAL_BBOX_4326_2)
config = ConfigGeoJSONGrid(grid_srs='EPSG:3857', map_srs='EPSG:4326',
grid_bbox_srs='EPSG:4326',
request_bbox=LOCAL_BBOX_4326_1,
grid_bbox=LOCAL_BBOX_4326_2)
assert_list_almost_equal(config.map_bbox, LOCAL_BBOX_3857_1)
assert_list_almost_equal(config.grid_bbox, LOCAL_BBOX_3857_2)
# overglobal
with assert_raises(InvalidGridBBoxTransformationException) as cm:
config = ConfigGeoJSONGrid(grid_srs='EPSG:3857', map_srs='EPSG:4326',
grid_bbox_srs='EPSG:4326',
request_bbox=OVERGLOBAL_BBOX_4326,
grid_bbox=GLOBAL_BBOX_4326)
assert cm.exception.args[0] == 'Invalid transformation for grid_bbox'
def test_view_box(self):
grid_srs = 'EPSG:4326'
grid_bbox_srs = 'EPSG:4326'
map_srs = 'EPSG:4326'
level = 0
config = ConfigGeoJSONGrid(grid_bbox=[-30, -30, 30, 30],
request_bbox=LOCAL_BBOX_4326_1,
grid_srs=grid_srs, grid_bbox_srs=grid_bbox_srs,
map_srs=map_srs, level=level)
assert_list_almost_equal(config.view_bbox, LOCAL_BBOX_4326_1)
config = ConfigGeoJSONGrid(grid_bbox=[-30, -30, 30, 30],
request_bbox=LOCAL_BBOX_4326_2,
grid_srs=grid_srs, grid_bbox_srs=grid_bbox_srs,
map_srs=map_srs, level=level)
assert_list_almost_equal(config.view_bbox, [-30.0, -30.0, 30.0, 30.0])
config = ConfigGeoJSONGrid(grid_bbox=[-10, -30, 10, 30],
request_bbox=LOCAL_BBOX_4326_1,
grid_srs=grid_srs, grid_bbox_srs=grid_bbox_srs,
map_srs=map_srs, level=level)
assert_list_almost_equal(config.view_bbox, [-10.0, -20.0, 10.0, 20.0])
map_srs = 'EPSG:3857'
config = ConfigGeoJSONGrid(grid_bbox=[-30, -30, 30, 30],
request_bbox=LOCAL_BBOX_3857_1,
grid_srs=grid_srs, grid_bbox_srs=grid_bbox_srs,
map_srs=map_srs, level=level)
assert_list_almost_equal(config.view_bbox, LOCAL_BBOX_4326_1)
config = ConfigGeoJSONGrid(grid_bbox=[-30, -30, 30, 30],
request_bbox=LOCAL_BBOX_3857_2,
grid_srs=grid_srs, grid_bbox_srs=grid_bbox_srs,
map_srs=map_srs, level=level)
assert_list_almost_equal(config.view_bbox, [-30.0, -30.0, 30.0, 30.0])
config = ConfigGeoJSONGrid(grid_bbox=[-10, -30, 10, 30],
request_bbox=LOCAL_BBOX_3857_1,
grid_srs=grid_srs, grid_bbox_srs=grid_bbox_srs,
map_srs=map_srs, level=level)
assert_list_almost_equal(config.view_bbox, [-10.0, -20.0, 10.0, 20.0])
grid_srs = 'EPSG:3857'
map_srs = 'EPSG:4326'
config = ConfigGeoJSONGrid(grid_bbox=[-30, -30, 30, 30],
request_bbox=LOCAL_BBOX_4326_1,
grid_srs=grid_srs, grid_bbox_srs=grid_bbox_srs,
map_srs=map_srs, level=level)
assert_list_almost_equal(config.view_bbox, LOCAL_BBOX_3857_1)
config = ConfigGeoJSONGrid(grid_bbox=[-30, -30, 30, 30],
request_bbox=LOCAL_BBOX_4326_2,
grid_srs=grid_srs, grid_bbox_srs=grid_bbox_srs,
map_srs=map_srs, level=level)
assert_list_almost_equal(
config.view_bbox, [-3339584.723798206, -3503549.8435043744,
3339584.723798206, 3503549.843504374])
config = ConfigGeoJSONGrid(grid_bbox=[-10, -30, 10, 30],
request_bbox=LOCAL_BBOX_4326_1,
grid_srs=grid_srs,
grid_bbox_srs=grid_bbox_srs,
map_srs=map_srs,
level=level)
assert_list_almost_equal(
config.view_bbox, [-1113194.9079327343, -2273030.92698769,
1113194.9079327343, 2273030.926987689])
with assert_raises(InvalidGridBBoxTransformationException) as cm:
config = ConfigGeoJSONGrid(grid_srs='EPSG:3857', map_srs='EPSG:4326',
grid_bbox_srs='EPSG:4326',
request_bbox=OVERGLOBAL_BBOX_4326,
grid_bbox=GLOBAL_BBOX_4326)
assert cm.exception.args[0] == 'Invalid transformation for grid_bbox'
def test_global_polygon(self):
defaults.TILE_POLYGON_POINTS = 4
map_srs = 'EPSG:4326'
grid_srs = 'EPSG:4326'
grid_bbox_srs = 'EPSG:4326'
grid_bbox = GLOBAL_BBOX_4326
request_bbox = GLOBAL_BBOX_4326
config = ConfigGeoJSONGrid(map_srs=map_srs, grid_srs=grid_srs,
grid_bbox_srs=grid_bbox_srs,
grid_bbox=grid_bbox, request_bbox=request_bbox)
result = list(polygons(config, [(0, 0, 0)], False))[0]
assert_point_list_almost_equal(result[0][0], GLOBAL_POLYGON_4326)
result = list(polygons(config, [(0, 0, 0)], True))[0]
assert_point_list_almost_equal(result[0][0], GLOBAL_POLYGON_4326)
assert_list_almost_equal(result[1], [0.0, 0.0])
assert result[2] == (0, 0, 0)
config = ConfigGeoJSONGrid(grid_srs='EPSG:4326', map_srs='EPSG:4326',
grid_bbox_srs='EPSG:4326',
request_bbox=OVERGLOBAL_BBOX_4326,
grid_bbox=GLOBAL_BBOX_4326)
result = list(polygons(config, [(0, 0, 0)], False))[0]
assert_point_list_almost_equal(result[0][0], GLOBAL_POLYGON_4326)
with assert_raises(InvalidGridBBoxTransformationException) as cm:
config = ConfigGeoJSONGrid(grid_srs='EPSG:3857', map_srs='EPSG:4326',
grid_bbox_srs='EPSG:4326',
request_bbox=OVERGLOBAL_BBOX_4326,
grid_bbox=GLOBAL_BBOX_4326)
assert cm.exception.args[0] == 'Invalid transformation for grid_bbox'
def test_local_polygon(self):
defaults.TILE_POLYGON_POINTS = 4
map_srs = 'EPSG:3857'
grid_srs = 'EPSG:4326'
grid_bbox_srs = 'EPSG:4326'
grid_bbox = LOCAL_BBOX_4326_1
request_bbox = LOCAL_BBOX_3857_1
config = ConfigGeoJSONGrid(map_srs=map_srs, grid_srs=grid_srs,
grid_bbox_srs=grid_bbox_srs, grid_bbox=grid_bbox,
request_bbox=request_bbox)
result = list(polygons(config, [(0, 0, 0)], False))[0]
assert_point_list_almost_equal(result[0][0], LOCAL_POLYGON_3857)
result = list(polygons(config, [(0, 0, 0)], True))[0]
assert_point_list_almost_equal(result[0][0], LOCAL_POLYGON_3857)
assert_list_almost_equal(result[1], [0.0, 0.0])
assert result[2] == (0, 0, 0)
class TestFeatureCreation(object):
def test_point_feature(self):
assert point_feature([4, 4]) == {
"type": "Feature",
"geometry": {
"type": "Point",
"coordinates": [4, 4]
},
"properties": {}
}
assert point_feature([4, 4], {"foo": "bar"}) == {
"type": "Feature",
"geometry": {
"type": "Point",
"coordinates": [4, 4]
},
"properties": {"foo": "bar"}
}
def test_polygon_feature(self):
assert polygon_feature([[[1, 1], [2, 1], [2, 2], [1, 2], [1, 1]]]) == {
"type": "Feature",
"geometry": {
"type": "Polygon",
"coordinates": [[[1, 1], [2, 1], [2, 2], [1, 2], [1, 1]]]
},
"properties": {}
}
assert polygon_feature([[[1, 1], [2, 1], [2, 2],
[1, 2], [1, 1]]], {"foo": "bar"}) == {
"type": "Feature",
"geometry": {
"type": "Polygon",
"coordinates": [[[1, 1], [2, 1], [2, 2], [1, 2], [1, 1]]]
},
"properties": {"foo": "bar"}
}
def test_feature_list(self):
request_bbox = GLOBAL_BBOX_4326
grid_bbox = GLOBAL_BBOX_4326
level = 0
grid_srs = 'EPSG:4326'
grid_bbox_srs = 'EPSG:4326'
map_srs = 'EPSG:4326'
config = ConfigGeoJSONGrid(request_bbox=request_bbox, grid_bbox=grid_bbox,
level=level, grid_srs=grid_srs,
grid_bbox_srs=grid_bbox_srs, map_srs=map_srs)
result = features(config)
assert len(result) == 2
level = 1
config = ConfigGeoJSONGrid(request_bbox=request_bbox, grid_bbox=grid_bbox,
level=level, grid_srs=grid_srs,
grid_bbox_srs=grid_bbox_srs, map_srs=map_srs)
result = features(config)
assert len(result) == 4
level = 0
with assert_raises(InvalidGridBBoxTransformationException) as cm:
config = ConfigGeoJSONGrid(grid_srs='EPSG:3857', map_srs='EPSG:4326',
level=level,
grid_bbox_srs='EPSG:4326',
request_bbox=OVERGLOBAL_BBOX_4326,
grid_bbox=GLOBAL_BBOX_4326)
assert cm.exception.args[0] == 'Invalid transformation for grid_bbox'
| 46.333333
| 84
| 0.549271
| 1,683
| 15,151
| 4.620915
| 0.071301
| 0.084351
| 0.059406
| 0.046805
| 0.850585
| 0.823711
| 0.796837
| 0.771377
| 0.750932
| 0.698084
| 0
| 0.158327
| 0.34968
| 15,151
| 326
| 85
| 46.47546
| 0.630975
| 0.001518
| 0
| 0.636704
| 0
| 0
| 0.060963
| 0
| 0
| 0
| 0
| 0
| 0.228464
| 1
| 0.037453
| false
| 0
| 0.011236
| 0
| 0.05618
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
54b9173b64d7528ac99a206321a34dd2a20875e5
| 93
|
py
|
Python
|
nxcl/rich/__init__.py
|
yuneg11/NXCL
|
98a0f39071d7ae97aaf9dc5f1284faecfb7663ce
|
[
"MIT"
] | null | null | null |
nxcl/rich/__init__.py
|
yuneg11/NXCL
|
98a0f39071d7ae97aaf9dc5f1284faecfb7663ce
|
[
"MIT"
] | 1
|
2022-03-26T19:50:17.000Z
|
2022-03-26T19:51:41.000Z
|
nxcl/rich/__init__.py
|
yuneg11/NXCL
|
98a0f39071d7ae97aaf9dc5f1284faecfb7663ce
|
[
"MIT"
] | null | null | null |
from . import logging
from . import progress
from .logging import *
from .progress import *
| 15.5
| 23
| 0.752688
| 12
| 93
| 5.833333
| 0.333333
| 0.285714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.182796
| 93
| 5
| 24
| 18.6
| 0.921053
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
3f8896fa2ae4b4265f1b23a1544acd414a9efa45
| 30
|
py
|
Python
|
automatewithpython/.practicecode/snippets/xkcd.py
|
Coalemus/Python-Projects
|
4b0e0c12a2fdcfbaf491df5715885c61f44bdb1c
|
[
"MIT"
] | null | null | null |
automatewithpython/.practicecode/snippets/xkcd.py
|
Coalemus/Python-Projects
|
4b0e0c12a2fdcfbaf491df5715885c61f44bdb1c
|
[
"MIT"
] | null | null | null |
automatewithpython/.practicecode/snippets/xkcd.py
|
Coalemus/Python-Projects
|
4b0e0c12a2fdcfbaf491df5715885c61f44bdb1c
|
[
"MIT"
] | null | null | null |
#!/bin/zsh
import antigravity
| 10
| 18
| 0.766667
| 4
| 30
| 5.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1
| 30
| 3
| 18
| 10
| 0.851852
| 0.3
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
3f8b44d0caf39e75470ecbb318c0e036405998d6
| 85
|
py
|
Python
|
S9/gradcam/__init__.py
|
abishek-raju/EVA4B2
|
189f4062c85d91f43c1381087a9c89ff794e5428
|
[
"Apache-2.0"
] | 2
|
2020-09-04T18:34:19.000Z
|
2020-09-04T18:34:26.000Z
|
S9/gradcam/__init__.py
|
abishek-raju/EVA4B2
|
189f4062c85d91f43c1381087a9c89ff794e5428
|
[
"Apache-2.0"
] | null | null | null |
S9/gradcam/__init__.py
|
abishek-raju/EVA4B2
|
189f4062c85d91f43c1381087a9c89ff794e5428
|
[
"Apache-2.0"
] | 1
|
2020-07-18T18:58:08.000Z
|
2020-07-18T18:58:08.000Z
|
from .gradcam import GradCAM
from .gradcam_pp import GradCAMPP
from .visual import *
| 21.25
| 33
| 0.811765
| 12
| 85
| 5.666667
| 0.5
| 0.323529
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.141176
| 85
| 3
| 34
| 28.333333
| 0.931507
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
3f9df986e8fe45a1ce1e87a5ec0b010cccf03a36
| 158
|
py
|
Python
|
PSNN/loss/__init__.py
|
Lukas0025/PSNN
|
00577851e1fe1a70410c29ae4b5f024bc8b9b598
|
[
"Apache-2.0"
] | 3
|
2020-01-30T21:33:37.000Z
|
2022-01-18T21:25:33.000Z
|
PSNN/loss/__init__.py
|
Lukas0025/PSNN
|
00577851e1fe1a70410c29ae4b5f024bc8b9b598
|
[
"Apache-2.0"
] | 4
|
2019-08-27T19:16:25.000Z
|
2019-11-09T21:53:43.000Z
|
PSNN/loss/__init__.py
|
Lukas0025/PYSNN
|
00577851e1fe1a70410c29ae4b5f024bc8b9b598
|
[
"Apache-2.0"
] | 1
|
2019-12-11T13:36:56.000Z
|
2019-12-11T13:36:56.000Z
|
## @package PSNN.loss
# @author Lukáš Plevač <lukasplevac@gmail.com>
# @date 22.12.2019
#
# loss functions
from .basic import mse
from .basic import mae
| 17.555556
| 47
| 0.708861
| 23
| 158
| 4.869565
| 0.826087
| 0.160714
| 0.267857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.061538
| 0.177215
| 158
| 8
| 48
| 19.75
| 0.8
| 0.626582
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
b7defea338914a355888ebf25b9d917931b06b61
| 182
|
py
|
Python
|
paxes_nova/virt/ibmpowervm/vif/ivm/__init__.py
|
windskyer/k_nova
|
63579dbfcfcda5def5b588a6728bfff85ad4564e
|
[
"Apache-2.0"
] | null | null | null |
paxes_nova/virt/ibmpowervm/vif/ivm/__init__.py
|
windskyer/k_nova
|
63579dbfcfcda5def5b588a6728bfff85ad4564e
|
[
"Apache-2.0"
] | null | null | null |
paxes_nova/virt/ibmpowervm/vif/ivm/__init__.py
|
windskyer/k_nova
|
63579dbfcfcda5def5b588a6728bfff85ad4564e
|
[
"Apache-2.0"
] | null | null | null |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# =================================================================
# =================================================================
| 36.4
| 67
| 0.186813
| 7
| 182
| 4.857143
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.017442
| 0.054945
| 182
| 4
| 68
| 45.5
| 0.180233
| 0.950549
| 0
| null | 0
| null | 0
| 0
| null | 1
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
b7f8d54e745f55a78246088770aeb8167eeff2ce
| 117
|
py
|
Python
|
project/carlos/__init__.py
|
ArturoMorales93/Plataformas_II_Project
|
7dd54c8c5159a1eb8c761a3a8e4f4bfb96a078eb
|
[
"Unlicense"
] | 1
|
2021-01-29T15:16:49.000Z
|
2021-01-29T15:16:49.000Z
|
project/carlos/__init__.py
|
ArturoMorales93/Plataformas_II_Project
|
7dd54c8c5159a1eb8c761a3a8e4f4bfb96a078eb
|
[
"Unlicense"
] | 12
|
2021-02-01T20:31:31.000Z
|
2021-04-15T07:34:54.000Z
|
project/carlos/__init__.py
|
ArturoMorales93/Plataformas_II_Project
|
7dd54c8c5159a1eb8c761a3a8e4f4bfb96a078eb
|
[
"Unlicense"
] | 1
|
2021-03-08T23:34:37.000Z
|
2021-03-08T23:34:37.000Z
|
from flask import Blueprint
carlos = Blueprint('carlos',__name__, template_folder='templates')
from . import routes
| 23.4
| 66
| 0.794872
| 14
| 117
| 6.285714
| 0.714286
| 0.340909
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 117
| 5
| 67
| 23.4
| 0.846154
| 0
| 0
| 0
| 0
| 0
| 0.127119
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0.666667
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
|
0
| 6
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.