hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
d50c52f3c9748c1f443feb673e3640f69a84aa9d
| 52
|
py
|
Python
|
geeksw/plotting/__init__.py
|
guitargeek/geeksw
|
d6e774824eae713e68cf23ae242933be9df52fcf
|
[
"MIT"
] | 2
|
2019-04-11T22:26:29.000Z
|
2021-06-23T19:59:36.000Z
|
geeksw/plotting/__init__.py
|
guitargeek/geeksw
|
d6e774824eae713e68cf23ae242933be9df52fcf
|
[
"MIT"
] | null | null | null |
geeksw/plotting/__init__.py
|
guitargeek/geeksw
|
d6e774824eae713e68cf23ae242933be9df52fcf
|
[
"MIT"
] | null | null | null |
from .Plot import Plot
from .RocPlot import RocPlot
| 17.333333
| 28
| 0.807692
| 8
| 52
| 5.25
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.153846
| 52
| 2
| 29
| 26
| 0.954545
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
1d2c2c1e010a5b123d466df29a8f19833986b8f0
| 32
|
py
|
Python
|
app.py
|
ganggas95/meetup_back
|
fb0f7f92db6b49b6c6644c0018591e7aca14097e
|
[
"MIT"
] | null | null | null |
app.py
|
ganggas95/meetup_back
|
fb0f7f92db6b49b6c6644c0018591e7aca14097e
|
[
"MIT"
] | null | null | null |
app.py
|
ganggas95/meetup_back
|
fb0f7f92db6b49b6c6644c0018591e7aca14097e
|
[
"MIT"
] | 1
|
2020-02-12T09:22:10.000Z
|
2020-02-12T09:22:10.000Z
|
from meetup import app
app.run()
| 16
| 22
| 0.78125
| 6
| 32
| 4.166667
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 32
| 2
| 23
| 16
| 0.892857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
1d53a5f71ceb3a7aeb764da343e068795e93f380
| 94
|
py
|
Python
|
shortener/admin.py
|
pennlabs/django-shortener
|
a8f362863d4d8f13916e9e924ed316384f588373
|
[
"MIT"
] | 3
|
2018-11-04T15:46:01.000Z
|
2020-01-06T13:49:46.000Z
|
shortener/admin.py
|
pennlabs/django-shortener
|
a8f362863d4d8f13916e9e924ed316384f588373
|
[
"MIT"
] | 1
|
2020-01-08T13:41:06.000Z
|
2020-01-08T13:41:06.000Z
|
shortener/admin.py
|
pennlabs/shortener
|
a8f362863d4d8f13916e9e924ed316384f588373
|
[
"MIT"
] | 2
|
2021-02-22T18:12:27.000Z
|
2021-09-16T18:51:47.000Z
|
from django.contrib import admin
from shortener.models import Url
admin.site.register(Url)
| 13.428571
| 32
| 0.808511
| 14
| 94
| 5.428571
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.12766
| 94
| 6
| 33
| 15.666667
| 0.926829
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
1d65af596a823273a7da6abe6c41bc5827681411
| 383
|
py
|
Python
|
rampage/format.py
|
shreyaspotnis/rampage
|
e2565aef7ee16ee06523de975e8aa41aca14e3b2
|
[
"MIT"
] | null | null | null |
rampage/format.py
|
shreyaspotnis/rampage
|
e2565aef7ee16ee06523de975e8aa41aca14e3b2
|
[
"MIT"
] | null | null | null |
rampage/format.py
|
shreyaspotnis/rampage
|
e2565aef7ee16ee06523de975e8aa41aca14e3b2
|
[
"MIT"
] | null | null | null |
"""Provides HTML tags wrappers for pretty printing."""
def bold(s):
return '<b>'+s+'</b>'
def italic(s):
return '<i>'+s+'</i>'
def b(s):
return bold(s)
def i(s):
return italic(s)
def red(s):
return '<font color="red">'+s+'</font>'
def green(s):
return '<font color="green">'+s+'</font>'
def blue(s):
return '<font color="blue">'+s+'</font>'
| 12.766667
| 54
| 0.54047
| 60
| 383
| 3.45
| 0.316667
| 0.236715
| 0.15942
| 0.231884
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.208877
| 383
| 29
| 55
| 13.206897
| 0.683168
| 0.125326
| 0
| 0
| 0
| 0
| 0.279635
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
1d7e7509cd039c6fd56d8ba1f9e6fe11dbc2962d
| 53
|
py
|
Python
|
blog/models/__init__.py
|
sharif-42/Personal_Website
|
7c385bec272ec7b5c816eab92e3b5bfb8cd80016
|
[
"MIT"
] | null | null | null |
blog/models/__init__.py
|
sharif-42/Personal_Website
|
7c385bec272ec7b5c816eab92e3b5bfb8cd80016
|
[
"MIT"
] | null | null | null |
blog/models/__init__.py
|
sharif-42/Personal_Website
|
7c385bec272ec7b5c816eab92e3b5bfb8cd80016
|
[
"MIT"
] | null | null | null |
from .post import Post
from .comments import Comment
| 17.666667
| 29
| 0.811321
| 8
| 53
| 5.375
| 0.625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.150943
| 53
| 2
| 30
| 26.5
| 0.955556
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
1d839a9c3f881b1363f2beb1bbd4fbbb0603a2c0
| 134
|
py
|
Python
|
board/templatetags/split.py
|
Bio7269/chieftain
|
da985acbbd8970aa879cd727460881f4399ad6d0
|
[
"MIT",
"Unlicense"
] | 38
|
2015-04-04T15:56:30.000Z
|
2022-03-15T15:16:18.000Z
|
board/templatetags/split.py
|
Bio7269/chieftain
|
da985acbbd8970aa879cd727460881f4399ad6d0
|
[
"MIT",
"Unlicense"
] | 1
|
2016-08-30T15:37:36.000Z
|
2016-08-30T15:37:36.000Z
|
board/templatetags/split.py
|
paulmillr/chieftain
|
da985acbbd8970aa879cd727460881f4399ad6d0
|
[
"Unlicense",
"MIT"
] | 16
|
2015-06-30T20:08:30.000Z
|
2021-03-03T22:01:39.000Z
|
from django.template import Library
register = Library()
@register.filter
def split(str, splitter):
return str.split(splitter)
| 14.888889
| 35
| 0.753731
| 17
| 134
| 5.941176
| 0.705882
| 0.29703
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.149254
| 134
| 8
| 36
| 16.75
| 0.885965
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0.2
| 0.2
| 0.6
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
1d9735ee5573e486f469a3a70225758aade7754b
| 2,361
|
py
|
Python
|
tests/pytests/unit/beacons/test_service.py
|
tomdoherty/salt
|
f87d5d7abbf9777773c4d91fdafecb8b1a728e76
|
[
"Apache-2.0"
] | 9,425
|
2015-01-01T05:59:24.000Z
|
2022-03-31T20:44:05.000Z
|
tests/pytests/unit/beacons/test_service.py
|
tomdoherty/salt
|
f87d5d7abbf9777773c4d91fdafecb8b1a728e76
|
[
"Apache-2.0"
] | 33,507
|
2015-01-01T00:19:56.000Z
|
2022-03-31T23:48:20.000Z
|
tests/pytests/unit/beacons/test_service.py
|
tomdoherty/salt
|
f87d5d7abbf9777773c4d91fdafecb8b1a728e76
|
[
"Apache-2.0"
] | 5,810
|
2015-01-01T19:11:45.000Z
|
2022-03-31T02:37:20.000Z
|
# Python libs
import pytest
# Salt libs
import salt.beacons.service as service_beacon
from tests.support.mock import MagicMock, patch
@pytest.fixture
def configure_loader_modules():
return {service_beacon: {"__context__": {}, "__salt__": {}}}
def test_non_list_config():
config = {}
ret = service_beacon.validate(config)
assert ret == (False, "Configuration for service beacon must be a list.")
def test_empty_config():
config = [{}]
ret = service_beacon.validate(config)
assert ret == (False, "Configuration for service beacon requires services.")
def test_validate_config_services_none():
config = [{"services": None}]
ret = service_beacon.validate(config)
assert ret == (
False,
"Services configuration item for service beacon must be a dictionary.",
)
def test_validate_config_services_list():
config = [{"services": [{"sshd": {}}]}]
ret = service_beacon.validate(config)
assert ret == (
False,
"Services configuration item for service beacon must be a dictionary.",
)
def test_validate_config_services_valid():
config = [{"services": {"sshd": {}}}]
ret = service_beacon.validate(config)
assert ret == (True, "Valid beacon configuration")
def test_service_running():
with patch.dict(
service_beacon.__salt__, {"service.status": MagicMock(return_value=True)}
):
config = [{"services": {"salt-master": {}}}]
ret = service_beacon.validate(config)
assert ret == (True, "Valid beacon configuration")
ret = service_beacon.beacon(config)
assert ret == [
{
"service_name": "salt-master",
"tag": "salt-master",
"salt-master": {"running": True},
}
]
def test_service_not_running():
with patch.dict(
service_beacon.__salt__, {"service.status": MagicMock(return_value=False)}
):
config = [{"services": {"salt-master": {}}}]
ret = service_beacon.validate(config)
assert ret == (True, "Valid beacon configuration")
ret = service_beacon.beacon(config)
assert ret == [
{
"service_name": "salt-master",
"tag": "salt-master",
"salt-master": {"running": False},
}
]
| 23.848485
| 82
| 0.605252
| 245
| 2,361
| 5.6
| 0.216327
| 0.161079
| 0.104956
| 0.122449
| 0.779155
| 0.758017
| 0.752915
| 0.752915
| 0.752915
| 0.752915
| 0
| 0
| 0.263448
| 2,361
| 98
| 83
| 24.091837
| 0.788959
| 0.008895
| 0
| 0.483871
| 0
| 0
| 0.231065
| 0
| 0
| 0
| 0
| 0
| 0.145161
| 1
| 0.129032
| false
| 0
| 0.048387
| 0.016129
| 0.193548
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
1daacc871a5b5269bee8343e0fa99b1856591e44
| 106
|
py
|
Python
|
src/cms/forms/offer_templates/__init__.py
|
S10MC2015/cms-django
|
b08f2be60a9db6c8079ee923de2cd8912f550b12
|
[
"Apache-2.0"
] | null | null | null |
src/cms/forms/offer_templates/__init__.py
|
S10MC2015/cms-django
|
b08f2be60a9db6c8079ee923de2cd8912f550b12
|
[
"Apache-2.0"
] | null | null | null |
src/cms/forms/offer_templates/__init__.py
|
S10MC2015/cms-django
|
b08f2be60a9db6c8079ee923de2cd8912f550b12
|
[
"Apache-2.0"
] | null | null | null |
"""
Forms for creating and modifying offer objects
"""
from .offer_template_form import OfferTemplateForm
| 21.2
| 50
| 0.811321
| 13
| 106
| 6.461538
| 0.923077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.122642
| 106
| 4
| 51
| 26.5
| 0.903226
| 0.433962
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
d5216c51e897e71522c3619146902457c4c47594
| 233
|
py
|
Python
|
server/src/weaverbird/pipeline/steps/uppercase.py
|
JeremyJacquemont/weaverbird
|
e04ab6f9c8381986ab71078e5199ece7a875e743
|
[
"BSD-3-Clause"
] | 54
|
2019-11-20T15:07:39.000Z
|
2022-03-24T22:13:51.000Z
|
server/src/weaverbird/pipeline/steps/uppercase.py
|
JeremyJacquemont/weaverbird
|
e04ab6f9c8381986ab71078e5199ece7a875e743
|
[
"BSD-3-Clause"
] | 786
|
2019-10-20T11:48:37.000Z
|
2022-03-23T08:58:18.000Z
|
server/src/weaverbird/pipeline/steps/uppercase.py
|
JeremyJacquemont/weaverbird
|
e04ab6f9c8381986ab71078e5199ece7a875e743
|
[
"BSD-3-Clause"
] | 10
|
2019-11-21T10:16:16.000Z
|
2022-03-21T10:34:06.000Z
|
from pydantic import Field
from weaverbird.pipeline.steps.utils.base import BaseStep
from weaverbird.pipeline.types import ColumnName
class UppercaseStep(BaseStep):
name = Field('uppercase', const=True)
column: ColumnName
| 23.3
| 57
| 0.793991
| 28
| 233
| 6.607143
| 0.678571
| 0.151351
| 0.237838
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.133047
| 233
| 9
| 58
| 25.888889
| 0.915842
| 0
| 0
| 0
| 0
| 0
| 0.038627
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
d5278986a0c0f6e3bf10ee1daf30409afe6e0f52
| 41
|
py
|
Python
|
tests/__init__.py
|
msicilia/crowdtangle
|
399602998bcdc335b80bfc9beeeac98fae535eaf
|
[
"MIT"
] | null | null | null |
tests/__init__.py
|
msicilia/crowdtangle
|
399602998bcdc335b80bfc9beeeac98fae535eaf
|
[
"MIT"
] | null | null | null |
tests/__init__.py
|
msicilia/crowdtangle
|
399602998bcdc335b80bfc9beeeac98fae535eaf
|
[
"MIT"
] | null | null | null |
"""Unit test package for crowdtangle."""
| 20.5
| 40
| 0.707317
| 5
| 41
| 5.8
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.121951
| 41
| 1
| 41
| 41
| 0.805556
| 0.829268
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
d5586739122864d274f998f7ba7cd5b51357be23
| 80
|
py
|
Python
|
codeChef/practice/easy/double.py
|
ferhatelmas/algo
|
a7149c7a605708bc01a5cd30bf5455644cefd04d
|
[
"WTFPL"
] | 25
|
2015-01-21T16:39:18.000Z
|
2021-05-24T07:01:24.000Z
|
codeChef/practice/easy/double.py
|
ferhatelmas/algo
|
a7149c7a605708bc01a5cd30bf5455644cefd04d
|
[
"WTFPL"
] | 2
|
2020-09-30T19:39:36.000Z
|
2020-10-01T17:15:16.000Z
|
codeChef/practice/easy/double.py
|
ferhatelmas/algo
|
a7149c7a605708bc01a5cd30bf5455644cefd04d
|
[
"WTFPL"
] | 15
|
2015-01-21T16:39:27.000Z
|
2020-10-01T17:00:22.000Z
|
for _ in xrange(int(raw_input())):
n = int(raw_input())
print n - n % 2
| 20
| 34
| 0.5625
| 14
| 80
| 3
| 0.642857
| 0.285714
| 0.52381
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.016949
| 0.2625
| 80
| 3
| 35
| 26.666667
| 0.694915
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.333333
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
6378b93fb60aa12dba78e44c8ffee4abf07ae148
| 5,722
|
py
|
Python
|
tests/interfaces/test_dataset.py
|
havaeimo/smartlearner
|
9fa0d4e757310b185e5c0d2ff43279d21ab4cb17
|
[
"BSD-3-Clause"
] | null | null | null |
tests/interfaces/test_dataset.py
|
havaeimo/smartlearner
|
9fa0d4e757310b185e5c0d2ff43279d21ab4cb17
|
[
"BSD-3-Clause"
] | null | null | null |
tests/interfaces/test_dataset.py
|
havaeimo/smartlearner
|
9fa0d4e757310b185e5c0d2ff43279d21ab4cb17
|
[
"BSD-3-Clause"
] | null | null | null |
import numpy as np
import theano
import theano.tensor as T
from nose.tools import assert_true
from numpy.testing import assert_equal, assert_array_equal
from smartlearner.interfaces.dataset import Dataset
floatX = theano.config.floatX
ALL_DTYPES = np.sctypes['int'] + np.sctypes['uint'] + np.sctypes['float']
def test_dataset_used_in_theano_function():
rng = np.random.RandomState(1234)
nb_examples = 10
inputs = (rng.randn(nb_examples, 5) * 100).astype(floatX)
targets = (rng.randn(nb_examples, 1) > 0.5).astype(floatX)
dataset = Dataset(inputs, targets)
input_sqr_norm = T.sum(dataset.symb_inputs**2)
result = input_sqr_norm - dataset.symb_targets
f = theano.function([dataset.symb_inputs, dataset.symb_targets], result)
assert_array_equal(f(inputs, targets), np.sum(inputs**2)-targets)
def test_dataset_without_targets():
rng = np.random.RandomState(1234)
nb_examples = 10
nb_features = 3
sequences_length = 4
nb_channels = 2
image_shape = (5, 5)
# Test creating dataset with different example shapes:
# scalar feature, vector features, sequence of vector features, multiple channels images features.
for example_shape in [(), (nb_features,), (sequences_length, nb_features), (nb_channels,)+image_shape]:
inputs_shape = (nb_examples,) + example_shape
for dtype in ALL_DTYPES:
inputs = (rng.randn(*inputs_shape) * 100).astype(dtype)
dataset = Dataset(inputs)
# Data should be converted into `floatX`.
assert_equal(dataset.inputs.dtype, floatX)
assert_equal(dataset.symb_inputs.dtype, floatX)
assert_equal(dataset.symb_inputs.ndim, inputs.ndim)
assert_equal(dataset.input_shape, example_shape)
assert_array_equal(dataset.inputs.get_value(), inputs.astype(floatX))
# Everything related to target should be None
assert_true(dataset.targets is None)
assert_true(dataset.symb_targets is None)
assert_true(dataset.target_shape is None)
assert_true(dataset.target_size is None)
# Create dataset from nested Pyton lists.
inputs = [[1, 2, 3]] * nb_examples
dataset = Dataset(inputs)
# Data should be converted into `floatX`.
assert_equal(dataset.inputs.dtype, floatX)
assert_equal(dataset.symb_inputs.dtype, floatX)
assert_equal(dataset.symb_inputs.ndim, 2)
assert_equal(dataset.input_shape, (3,))
assert_array_equal(dataset.inputs.get_value(), np.array(inputs, dtype=floatX))
def test_dataset_with_targets():
rng = np.random.RandomState(1234)
nb_examples = 10
nb_features = 3
sequences_length = 4
nb_channels = 2
image_shape = (5, 5)
# Test creating dataset with different example shapes and target shapes:
# scalar feature, vector features, sequence of vector features, multiple channels images features.
for target_shape in [(), (nb_features,), (sequences_length, nb_features), (nb_channels,)+image_shape]:
for example_shape in [(), (nb_features,), (sequences_length, nb_features), (nb_channels,)+image_shape]:
inputs_shape = (nb_examples,) + example_shape
targets_shape = (nb_examples,) + target_shape
for example_dtype in ALL_DTYPES:
for target_dtype in ALL_DTYPES:
inputs = (rng.randn(*inputs_shape) * 100).astype(example_dtype)
targets = (rng.randn(*targets_shape) * 100).astype(target_dtype)
dataset = Dataset(inputs, targets)
# Data should be converted into `floatX`.
assert_equal(dataset.inputs.dtype, floatX)
assert_equal(dataset.symb_inputs.dtype, floatX)
assert_equal(dataset.symb_inputs.ndim, inputs.ndim)
assert_equal(dataset.input_shape, example_shape)
assert_array_equal(dataset.inputs.get_value(), inputs.astype(floatX))
assert_equal(dataset.targets.dtype, floatX)
assert_equal(dataset.symb_targets.dtype, floatX)
assert_equal(dataset.symb_targets.ndim, targets.ndim)
assert_equal(dataset.target_shape, target_shape)
assert_array_equal(dataset.targets.get_value(), targets.astype(floatX))
# Create dataset from nested Pyton lists.
inputs = [[1, 2, 3]] * nb_examples
targets = [[1, 2, 3]] * nb_examples
dataset = Dataset(inputs, targets)
# Data should be converted into `floatX`.
assert_equal(dataset.inputs.dtype, floatX)
assert_equal(dataset.symb_inputs.dtype, floatX)
assert_equal(dataset.symb_inputs.ndim, 2)
assert_equal(dataset.input_shape, (3,))
assert_array_equal(dataset.inputs.get_value(), np.array(inputs, dtype=floatX))
assert_equal(dataset.targets.dtype, floatX)
assert_equal(dataset.symb_targets.dtype, floatX)
assert_equal(dataset.symb_targets.ndim, 2)
assert_equal(dataset.target_shape, (3,))
assert_array_equal(dataset.targets.get_value(), np.array(targets, dtype=floatX))
def test_dataset_with_test_value():
rng = np.random.RandomState(1234)
nb_examples = 10
theano.config.compute_test_value = 'warn'
try:
inputs = (rng.randn(nb_examples, 5) * 100).astype(floatX)
targets = (rng.randn(nb_examples, 1) > 0.5).astype(floatX)
dataset = Dataset(inputs, targets)
input_sqr_norm = T.sum(dataset.symb_inputs**2)
result = input_sqr_norm - dataset.symb_targets
assert_array_equal(result.tag.test_value, np.sum(inputs**2)-targets)
finally:
theano.config.compute_test_value = 'off'
| 40.58156
| 111
| 0.680357
| 727
| 5,722
| 5.122421
| 0.141678
| 0.09667
| 0.116004
| 0.116004
| 0.805317
| 0.764232
| 0.72449
| 0.704082
| 0.680183
| 0.680183
| 0
| 0.016998
| 0.21863
| 5,722
| 140
| 112
| 40.871429
| 0.815925
| 0.105033
| 0
| 0.612245
| 0
| 0
| 0.003719
| 0
| 0
| 0
| 0
| 0
| 0.387755
| 1
| 0.040816
| false
| 0
| 0.061224
| 0
| 0.102041
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
637e09d1428be6af978aab9878f44966e8caafb0
| 90
|
py
|
Python
|
src/osms/tts_modules/synthesizer/__init__.py
|
adasegroup/OSM---one-shot-multispeaker
|
90c1bbea4db1d49667fcfecb51676ee3281f9458
|
[
"MIT"
] | 12
|
2021-05-31T21:09:23.000Z
|
2022-01-30T03:48:10.000Z
|
src/osms/tts_modules/synthesizer/__init__.py
|
adasegroup/OSM---one-shot-multispeaker
|
90c1bbea4db1d49667fcfecb51676ee3281f9458
|
[
"MIT"
] | null | null | null |
src/osms/tts_modules/synthesizer/__init__.py
|
adasegroup/OSM---one-shot-multispeaker
|
90c1bbea4db1d49667fcfecb51676ee3281f9458
|
[
"MIT"
] | 6
|
2021-05-13T20:28:19.000Z
|
2021-09-28T10:24:31.000Z
|
from .synthesizer_manager import SynthesizerManager
from .synthesize import run_synthesis
| 30
| 51
| 0.888889
| 10
| 90
| 7.8
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.088889
| 90
| 2
| 52
| 45
| 0.95122
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
893b150afc3f2321f77df3df774c815c4bf21795
| 152
|
py
|
Python
|
Getting_Started/goswimming.py
|
Dannnno/Computing_For_The_Web
|
405dae3ab73abcd9daad38ce1362336bea3b7d3a
|
[
"MIT"
] | null | null | null |
Getting_Started/goswimming.py
|
Dannnno/Computing_For_The_Web
|
405dae3ab73abcd9daad38ce1362336bea3b7d3a
|
[
"MIT"
] | 11
|
2015-03-29T03:36:47.000Z
|
2015-04-05T02:36:11.000Z
|
Getting_Started/goswimming.py
|
Dannnno/Computing_For_The_Web
|
405dae3ab73abcd9daad38ce1362336bea3b7d3a
|
[
"MIT"
] | null | null | null |
def goSwimming(temp):
if temp <= 75:
print "Too cold!"
elif temp >= 105:
print "Too hot!"
else:
print "Just right!"
| 19
| 27
| 0.506579
| 19
| 152
| 4.052632
| 0.736842
| 0.207792
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.052083
| 0.368421
| 152
| 7
| 28
| 21.714286
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0.184211
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.428571
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
893ca86767f987bdfea9fa9bc9d09ace7c59eef9
| 16
|
py
|
Python
|
tests/CompileTests/Python_tests/test2011_020.py
|
maurizioabba/rose
|
7597292cf14da292bdb9a4ef573001b6c5b9b6c0
|
[
"BSD-3-Clause"
] | 488
|
2015-01-09T08:54:48.000Z
|
2022-03-30T07:15:46.000Z
|
tests/CompileTests/Python_tests/test2011_020.py
|
sujankh/rose-matlab
|
7435d4fa1941826c784ba97296c0ec55fa7d7c7e
|
[
"BSD-3-Clause"
] | 174
|
2015-01-28T18:41:32.000Z
|
2022-03-31T16:51:05.000Z
|
tests/CompileTests/Python_tests/test2011_020.py
|
sujankh/rose-matlab
|
7435d4fa1941826c784ba97296c0ec55fa7d7c7e
|
[
"BSD-3-Clause"
] | 146
|
2015-04-27T02:48:34.000Z
|
2022-03-04T07:32:53.000Z
|
# test comments
| 8
| 15
| 0.75
| 2
| 16
| 6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1875
| 16
| 1
| 16
| 16
| 0.923077
| 0.8125
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
89551d39007f4d08d7a28d7bd87e95e75ddfaef3
| 81
|
py
|
Python
|
18 - Errors tratements and exceptions/Ex_115/titulo.py
|
o-Ian/Practice-Python
|
1e4b2d0788e70006096a53a7cf038db3148ba4b7
|
[
"MIT"
] | 4
|
2021-04-23T18:07:58.000Z
|
2021-05-12T11:38:14.000Z
|
18 - Errors tratements and exceptions/Ex_115/titulo.py
|
o-Ian/Practice-Python
|
1e4b2d0788e70006096a53a7cf038db3148ba4b7
|
[
"MIT"
] | null | null | null |
18 - Errors tratements and exceptions/Ex_115/titulo.py
|
o-Ian/Practice-Python
|
1e4b2d0788e70006096a53a7cf038db3148ba4b7
|
[
"MIT"
] | null | null | null |
def titulo(txt):
print('-' * 35)
print(f'{txt:^35}')
print('-' * 35)
| 16.2
| 23
| 0.45679
| 11
| 81
| 3.363636
| 0.545455
| 0.378378
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1
| 0.259259
| 81
| 4
| 24
| 20.25
| 0.516667
| 0
| 0
| 0.5
| 0
| 0
| 0.135802
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0
| 0
| 0.25
| 0.75
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
896dbea585977f12489d00db88d44dce3dfd7217
| 46
|
py
|
Python
|
day1_1.py
|
kangsup/maybler0
|
0128054800c4afbe842e711a881378382ffa5c6f
|
[
"MIT"
] | null | null | null |
day1_1.py
|
kangsup/maybler0
|
0128054800c4afbe842e711a881378382ffa5c6f
|
[
"MIT"
] | null | null | null |
day1_1.py
|
kangsup/maybler0
|
0128054800c4afbe842e711a881378382ffa5c6f
|
[
"MIT"
] | null | null | null |
print("Hello World!")
print(3+5)
print(15+3)
| 15.333333
| 22
| 0.652174
| 9
| 46
| 3.333333
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.121951
| 0.108696
| 46
| 3
| 23
| 15.333333
| 0.609756
| 0
| 0
| 0
| 0
| 0
| 0.266667
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
89739cb59834f4062bc745fb182734b2f4f85279
| 33
|
py
|
Python
|
web/__init__.py
|
pwh19920920/spiders
|
b0543a2583a296a80dfaaa63ed7f80a00d51cd07
|
[
"MIT"
] | 390
|
2020-07-20T05:21:45.000Z
|
2022-03-12T14:55:59.000Z
|
web/__init__.py
|
pwh19920920/spiders
|
b0543a2583a296a80dfaaa63ed7f80a00d51cd07
|
[
"MIT"
] | 21
|
2020-04-07T02:06:36.000Z
|
2020-07-20T02:10:38.000Z
|
web/__init__.py
|
pwh19920920/spiders
|
b0543a2583a296a80dfaaa63ed7f80a00d51cd07
|
[
"MIT"
] | 108
|
2020-07-23T03:35:54.000Z
|
2022-03-12T14:56:04.000Z
|
from ._response import response
| 11
| 31
| 0.818182
| 4
| 33
| 6.5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.151515
| 33
| 2
| 32
| 16.5
| 0.928571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
981e2bcc1725a8d220946980f79ebb30c24a2d70
| 134
|
py
|
Python
|
test/test_api/test_encodings.py
|
SunChuquin/pyqode.core
|
edf29204446e3679701e74343288cf692eb07d86
|
[
"MIT"
] | 23
|
2015-01-08T15:04:47.000Z
|
2022-03-08T07:47:08.000Z
|
test/test_api/test_encodings.py
|
SunChuquin/pyqode.core
|
edf29204446e3679701e74343288cf692eb07d86
|
[
"MIT"
] | 16
|
2021-02-01T08:54:08.000Z
|
2022-01-09T10:23:57.000Z
|
test/test_api/test_encodings.py
|
SunChuquin/pyqode.core
|
edf29204446e3679701e74343288cf692eb07d86
|
[
"MIT"
] | 24
|
2015-01-09T14:16:41.000Z
|
2021-12-06T15:11:22.000Z
|
from pyqode.core.api import encodings
def test_convert_to_code_key():
assert encodings.convert_to_codec_key('UTF-8') == 'utf_8'
| 22.333333
| 61
| 0.768657
| 22
| 134
| 4.318182
| 0.727273
| 0.189474
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.016949
| 0.119403
| 134
| 5
| 62
| 26.8
| 0.788136
| 0
| 0
| 0
| 0
| 0
| 0.074627
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
98273d03fc4ffbdb22237077acf63df1731a5168
| 22
|
py
|
Python
|
hello_world.py
|
palsumitdev/profiles-rest-api
|
eaf8f86d3919028526797cdd269ae34d8f73fb41
|
[
"MIT"
] | null | null | null |
hello_world.py
|
palsumitdev/profiles-rest-api
|
eaf8f86d3919028526797cdd269ae34d8f73fb41
|
[
"MIT"
] | null | null | null |
hello_world.py
|
palsumitdev/profiles-rest-api
|
eaf8f86d3919028526797cdd269ae34d8f73fb41
|
[
"MIT"
] | null | null | null |
print('Helllo world')
| 11
| 21
| 0.727273
| 3
| 22
| 5.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.090909
| 22
| 1
| 22
| 22
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0.545455
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
982d6ff08c3c0f817cebd11644e249549e3cdc5d
| 165
|
py
|
Python
|
tests/web_platform/css_grid_1/grid_model/test_grid_margins_no_collapse.py
|
fletchgraham/colosseum
|
77be4896ee52b8f5956a3d77b5f2ccd2c8608e8f
|
[
"BSD-3-Clause"
] | null | null | null |
tests/web_platform/css_grid_1/grid_model/test_grid_margins_no_collapse.py
|
fletchgraham/colosseum
|
77be4896ee52b8f5956a3d77b5f2ccd2c8608e8f
|
[
"BSD-3-Clause"
] | null | null | null |
tests/web_platform/css_grid_1/grid_model/test_grid_margins_no_collapse.py
|
fletchgraham/colosseum
|
77be4896ee52b8f5956a3d77b5f2ccd2c8608e8f
|
[
"BSD-3-Clause"
] | 1
|
2020-01-16T01:56:41.000Z
|
2020-01-16T01:56:41.000Z
|
from tests.utils import W3CTestCase
class TestGridMarginsNoCollapse(W3CTestCase):
vars().update(W3CTestCase.find_tests(__file__, 'grid-margins-no-collapse-'))
| 27.5
| 80
| 0.8
| 18
| 165
| 7.055556
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.019868
| 0.084848
| 165
| 5
| 81
| 33
| 0.821192
| 0
| 0
| 0
| 0
| 0
| 0.152439
| 0.152439
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
985952323d328d3c0c302456dab03a422af00040
| 347
|
py
|
Python
|
rich/tui/widgets/window.py
|
FFY00/rich.tui
|
490462783b0f4cad7ee99aa16304ffeb52a4936d
|
[
"MIT"
] | null | null | null |
rich/tui/widgets/window.py
|
FFY00/rich.tui
|
490462783b0f4cad7ee99aa16304ffeb52a4936d
|
[
"MIT"
] | null | null | null |
rich/tui/widgets/window.py
|
FFY00/rich.tui
|
490462783b0f4cad7ee99aa16304ffeb52a4936d
|
[
"MIT"
] | null | null | null |
from typing import Optional
from rich.console import RenderableType
from ..widget import Widget
class Window(Widget):
renderable: Optional[RenderableType]
def __init__(self, renderable: RenderableType):
self.renderable = renderable
def update(self, renderable: RenderableType) -> None:
self.renderable = renderable
| 24.785714
| 57
| 0.743516
| 36
| 347
| 7.055556
| 0.444444
| 0.220472
| 0.220472
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.184438
| 347
| 14
| 58
| 24.785714
| 0.897527
| 0
| 0
| 0.222222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.222222
| false
| 0
| 0.333333
| 0
| 0.777778
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
9865a42888244abac96eb55ab60f7f3dac1d11e9
| 68
|
py
|
Python
|
learn-to-code-with-python/20-Modules/project/feature/__init__.py
|
MaciejZurek/python_practicing
|
0a426f2aed151573e1f8678e0239ff596d92bbde
|
[
"MIT"
] | null | null | null |
learn-to-code-with-python/20-Modules/project/feature/__init__.py
|
MaciejZurek/python_practicing
|
0a426f2aed151573e1f8678e0239ff596d92bbde
|
[
"MIT"
] | null | null | null |
learn-to-code-with-python/20-Modules/project/feature/__init__.py
|
MaciejZurek/python_practicing
|
0a426f2aed151573e1f8678e0239ff596d92bbde
|
[
"MIT"
] | null | null | null |
print("I'm the dunder __init__.py file inside the 'feature' folder")
| 68
| 68
| 0.764706
| 12
| 68
| 4
| 0.916667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.117647
| 68
| 1
| 68
| 68
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0.855072
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
989287e6dc1a8486662c20958c8c506a53b94782
| 23,116
|
py
|
Python
|
experiments/experiments_toy/test_varying_missing/nmtf_gibbs.py
|
ThomasBrouwer/BNMTF
|
34df0c3cebc5e67a5e39762b9305b75d73a2a0e0
|
[
"Apache-2.0"
] | 16
|
2017-04-19T12:04:47.000Z
|
2021-12-03T00:50:43.000Z
|
experiments/experiments_toy/test_varying_missing/nmtf_gibbs.py
|
ThomasBrouwer/BNMTF
|
34df0c3cebc5e67a5e39762b9305b75d73a2a0e0
|
[
"Apache-2.0"
] | 1
|
2017-04-20T11:26:16.000Z
|
2017-04-20T11:26:16.000Z
|
experiments/experiments_toy/test_varying_missing/nmtf_gibbs.py
|
ThomasBrouwer/BNMTF
|
34df0c3cebc5e67a5e39762b9305b75d73a2a0e0
|
[
"Apache-2.0"
] | 8
|
2015-12-15T05:29:43.000Z
|
2019-06-05T03:14:11.000Z
|
"""
Test the performance of Gibbs sampling for recovering a toy dataset, where we
vary the fraction of entries that are missing.
We repeat this 10 times per fraction and average that.
We use the correct number of latent factors and same priors as used to generate the data.
I, J, K, L = 100, 80, 5, 5
"""
import sys, os
project_location = os.path.dirname(__file__)+"/../../../../"
sys.path.append(project_location)
from BNMTF.code.models.bnmtf_gibbs_optimised import bnmtf_gibbs_optimised
from BNMTF.data_toy.bnmf.generate_bnmf import try_generate_M
from BNMTF.code.cross_validation.mask import calc_inverse_M
import numpy, matplotlib.pyplot as plt
##########
fractions_unknown = [0.05, 0.1, 0.15, 0.2, 0.25, 0.3, 0.35, 0.4, 0.45, 0.5, 0.55, 0.6, 0.65, 0.7, 0.75, 0.8, 0.85, 0.9, 0.95] #[ 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9 ]
input_folder = project_location+"BNMTF/data_toy/bnmtf/"
repeats = 10 # number of times we try each fraction
iterations = 1000
burn_in = 800
thinning = 5
init_S = 'random'
init_FG = 'kmeans'
I,J,K,L = 100, 80, 5, 5
alpha, beta = 1., 1.
lambdaF = numpy.ones((I,K))/10.
lambdaS = numpy.ones((K,L))/10.
lambdaG = numpy.ones((J,L))/10.
priors = { 'alpha':alpha, 'beta':beta, 'lambdaF':lambdaF, 'lambdaS':lambdaS, 'lambdaG':lambdaG }
metrics = ['MSE', 'R^2', 'Rp']
# Load in data
R = numpy.loadtxt(input_folder+"R.txt")
# Seed all of the methods the same
numpy.random.seed(3)
# Generate matrices M - one list of M's for each fraction
M_attempts = 100
all_Ms = [
[try_generate_M(I,J,fraction,M_attempts) for r in range(0,repeats)]
for fraction in fractions_unknown
]
all_Ms_test = [ [calc_inverse_M(M) for M in Ms] for Ms in all_Ms ]
# Make sure each M has no empty rows or columns
def check_empty_rows_columns(M,fraction):
sums_columns = M.sum(axis=0)
sums_rows = M.sum(axis=1)
for i,c in enumerate(sums_rows):
assert c != 0, "Fully unobserved row in M, row %s. Fraction %s." % (i,fraction)
for j,c in enumerate(sums_columns):
assert c != 0, "Fully unobserved column in M, column %s. Fraction %s." % (j,fraction)
for Ms,fraction in zip(all_Ms,fractions_unknown):
for M in Ms:
check_empty_rows_columns(M,fraction)
# We now run the Gibbs sampler on each of the M's for each fraction.
all_performances = {metric:[] for metric in metrics}
average_performances = {metric:[] for metric in metrics} # averaged over repeats
for (fraction,Ms,Ms_test) in zip(fractions_unknown,all_Ms,all_Ms_test):
print "Trying fraction %s." % fraction
# Run the algorithm <repeats> times and store all the performances
for metric in metrics:
all_performances[metric].append([])
for (repeat,M,M_test) in zip(range(0,repeats),Ms,Ms_test):
print "Repeat %s of fraction %s." % (repeat+1, fraction)
# Run the Gibbs sampler
BNMTF = bnmtf_gibbs_optimised(R,M,K,L,priors)
BNMTF.initialise(init_S, init_FG)
BNMTF.run(iterations)
# Measure the performances
performances = BNMTF.predict(M_test,burn_in,thinning)
for metric in metrics:
# Add this metric's performance to the list of <repeat> performances for this fraction
all_performances[metric][-1].append(performances[metric])
# Compute the average across attempts
for metric in metrics:
average_performances[metric].append(sum(all_performances[metric][-1])/repeats)
print "repeats=%s \nfractions_unknown = %s \nall_performances = %s \naverage_performances = %s" % \
(repeats,fractions_unknown,all_performances,average_performances)
'''
repeats=10
fractions_unknown = [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9]
all_performances = {'R^2': [[0.998185524304205, 0.9951921450688566, 0.998047302865917, 0.9979297007628247, 0.9980625717620314, 0.998378361980241, 0.9975783292477035, 0.9980969649399992, 0.9978271991362235, 0.9979706579627173], [0.997723944694483, 0.9976717932557062, 0.9979283182367938, 0.9979044934487292, 0.9978787138668564, 0.9978090755612372, 0.9980795897686455, 0.9978830357811582, 0.9978341649867523, 0.9978573890051081], [0.9979083188011675, 0.9971158506788501, 0.9977732504857821, 0.9976214575531412, 0.9977090529028667, 0.9977815698725234, 0.9978868803897982, 0.9980511456785648, 0.9976944005620709, 0.9977959916656446], [0.9979177373002986, 0.9978004131318091, 0.997829350371225, 0.99780183337049, 0.9978994246007876, 0.9976844457355755, 0.9978511309201912, 0.9978247398255468, 0.9977256142072066, 0.9977870877840281], [0.9977234628694933, 0.9976852127966278, 0.9976071695270259, 0.9977859850373401, 0.9976132566922483, 0.9976079271348435, 0.997578230648432, 0.9976254659027803, 0.9976079355283997, 0.9976454739808525], [0.997265259479572, 0.9975247556973685, 0.9973786511663026, 0.9961660989980509, 0.9974865218552995, 0.9972506763740759, 0.9975673652535634, 0.9976273304247582, 0.9975941120548334, 0.9975653934212548], [0.9969585094263553, 0.997109006550793, 0.9969670467901287, 0.9969727936979911, 0.996913632325234, 0.9970156140254414, 0.9973842281908838, 0.9969572559224866, 0.9971345207674233, 0.9932651382409295], [0.9899713517883306, 0.9884157279987293, 0.9944292790671685, 0.9953493028356852, 0.995238434769166, 0.9955427054635841, 0.9946775860106445, 0.9959215116703484, 0.9954410623106186, 0.995332232643651], [0.9091400319245768, 0.9382383944759967, 0.9716975989003879, 0.6305571752981689, 0.9239030213291858, 0.938537688800508, 0.9667833708402461, 0.94836777578012, 0.9140916693584875, 0.9186176141782751]], 'MSE': [[1.044130764499906, 2.4139080359128964, 1.174428094440781, 1.1566256157501953, 1.2086740245842329, 1.0743758985283698, 1.1968751870910701, 1.0711357758414488, 1.1079168527324941, 1.1240827365415809], [1.173765399856709, 1.1959007007153171, 1.224971734594779, 1.232630411841106, 1.1485666675968322, 1.2014701188076251, 1.1855453745811888, 1.1614322408030453, 1.2017315515847555, 1.1958011354092706], [1.2769991864744963, 1.7995554215940275, 1.1805496214511799, 1.2587603241528436, 1.2672897357533193, 1.1779994486717276, 1.219228000984155, 1.1765313124260157, 1.2306338733957012, 1.2116574978377836], [1.2808309070364796, 1.2569301076499761, 1.260573621936822, 1.2527015001756732, 1.2632051787402248, 1.2968694875345366, 1.1923630725527183, 1.2616223819897514, 1.2557209546512635, 1.2321210808332639], [1.267550124381795, 1.3505814547898933, 1.3889670863756638, 1.3413898710816359, 1.343374918637654, 1.3321739978233957, 1.3636928344478072, 1.2915660732473415, 1.3694959327698075, 1.3156670856004009], [1.4753610142200577, 1.4143945088000158, 1.5437207196644922, 2.1326412026451087, 1.4441471316678816, 1.4466596195057999, 1.4283718400593597, 1.4732136761022112, 1.4428493946266663, 1.4071468300730523], [1.7632238986108744, 1.6905153642210393, 1.7693326322920491, 1.7426467708059412, 1.7771316614806749, 1.7365641247512897, 1.5751189955367642, 1.712675402288736, 1.6437985708622473, 3.8051370686753114], [5.5581304664405806, 6.4771216251795556, 3.2415948718074321, 2.7215311301175076, 2.6222991473624346, 2.5604231415857353, 3.0733280716712916, 2.3804113323328675, 2.6784281225778197, 2.592153036828059], [51.951819511913257, 36.265800647062591, 16.62730305107635, 206.18532128377154, 43.73050111129875, 35.356542003298834, 19.045634587537538, 28.968309668740257, 49.227672608650025, 47.281594631408829]], 'Rp': [[0.99909250017920925, 0.99761844083589213, 0.99903945323235999, 0.9989778896652548, 0.99903914439579089, 0.99919142448208687, 0.9988040895207041, 0.99905604640044021, 0.99892144781606063, 0.99898622117657587], [0.99886484386624896, 0.99883562421300864, 0.99896469790006648, 0.99895181783185061, 0.99894116021977508, 0.99890471566946004, 0.99904011194899633, 0.99894612350718592, 0.9989169762501855, 0.99892897390532198], [0.99895424519569498, 0.99855773549452265, 0.99888914758589042, 0.99881120658850497, 0.99885640031361655, 0.99889032864094907, 0.99894536004257384, 0.99902516695731458, 0.99884687435442043, 0.99889911796498354], [0.99895888553845535, 0.99889979633693793, 0.99891469941796951, 0.99890165269091669, 0.99895216655777896, 0.99884186623078364, 0.9989252997587762, 0.99891318105401083, 0.99886504782766938, 0.99889412271436839], [0.9988627669968384, 0.99884447164728374, 0.99880587095634854, 0.99889468571464179, 0.99880593674998785, 0.99880400616233955, 0.99878897123152843, 0.99881295002839798, 0.99880472744854498, 0.99882416422016929], [0.99863285670978785, 0.99876172284556386, 0.99869120999842176, 0.99808198155755901, 0.9987449011022056, 0.99862539017529961, 0.99878376010502345, 0.99881334791813337, 0.99879671546335402, 0.99878226577274787], [0.99848189520699726, 0.99855443832412261, 0.99848307278147952, 0.99848656793117407, 0.99845766495764365, 0.99851235748913691, 0.99869520612017348, 0.99847913824422296, 0.9985682153194877, 0.99664066589941136], [0.995001457350564, 0.99433541390483104, 0.99721523562188163, 0.9976906456123551, 0.99761676089746587, 0.99777277941086762, 0.99734725597516205, 0.99796029073810621, 0.99773563153387479, 0.99766568802104738], [0.95707697063246222, 0.9700767467356135, 0.98587621212658116, 0.86539133938137569, 0.96122616458200916, 0.97077634374349631, 0.9835812601299353, 0.97555456397485762, 0.95823767716712493, 0.95859963448773389]]}
average_performances = {'R^2': [0.99772687580307196, 0.99785705186054707, 0.99773379185904099, 0.99781217772471575, 0.99764801201180442, 0.99734261647250799, 0.99666777459376665, 0.99403191945579261, 0.90599343408859545], 'MSE': [1.2572152985922973, 1.1921815335790629, 1.279920442274125, 1.2552938293100708, 1.3364459379155396, 1.5208505937364645, 1.9216144489524929, 3.3905420945903288, 53.464049910475794], 'Rp': [0.99887266577043743, 0.9989295045312101, 0.9988675583138471, 0.99890667181276671, 0.99882485511560815, 0.99867141516480973, 0.99833592222738488, 0.99703411590661573, 0.95863969129611915]}
repeats=10
fractions_unknown = [0.05, 0.1, 0.15, 0.2, 0.25, 0.3, 0.35, 0.4, 0.45, 0.5, 0.55, 0.6, 0.65, 0.7, 0.75, 0.8, 0.85, 0.9, 0.95]
all_performances = {'R^2': [[0.9977304165364558, 0.9958484337632015, 0.9946991016733259, 0.9984600739509231, 0.9987108787043687, 0.996918438442937, 0.9948947326980404, 0.9965477614437267, 0.9973521482525584, 0.9962344815335836], [0.9980538990931249, 0.9928462779556669, 0.9979315128565555, 0.9964088417402559, 0.9978681516926907, 0.997851062731593, 0.9978759873895352, 0.9984341365572093, 0.9981768331751985, 0.9977355874876593], [0.9979372115606541, 0.9979564006205363, 0.997607370815898, 0.9981244572138728, 0.997943406603862, 0.9980147312842443, 0.997698448099084, 0.9973346164189013, 0.9946206357137068, 0.9960210703758193], [0.9979192849747648, 0.997875686232387, 0.9954254684143276, 0.9960777066575863, 0.9977538800289228, 0.9950654155796524, 0.9975941079546732, 0.9978209146167978, 0.9959879080678158, 0.9980606226324391], [0.9978631539344189, 0.9968971611684223, 0.9979163383832613, 0.9970255282842687, 0.9975297939828608, 0.9975890628673468, 0.9979592472996135, 0.9953038274366172, 0.9946044992763476, 0.997983657882271], [0.9980061989678627, 0.9978309203206511, 0.9960003804822812, 0.997731132462974, 0.9955441583020259, 0.9979322189560865, 0.9954272774806003, 0.9978409791549313, 0.9978356182616736, 0.9977561243686403], [0.9978252190764109, 0.995744293399712, 0.9956179546015647, 0.9977825028495262, 0.9978391158972572, 0.9976395620825507, 0.9979872893188194, 0.9976854992430177, 0.9950414979175285, 0.9980448774039337], [0.9952238578462257, 0.9979680379835076, 0.9976040206927023, 0.9975997087894845, 0.9963816900624036, 0.9977648921751733, 0.9978366527544721, 0.997802338360167, 0.9977982539923363, 0.9978584594775837], [0.9976237067126693, 0.9977117613166041, 0.9949296918384868, 0.9951114165984976, 0.9951770856192504, 0.9954427959890109, 0.9978314653181303, 0.9977853782959724, 0.9977420714019365, 0.9978177091175003], [0.9977083941295597, 0.9976228126025786, 0.997654897078054, 0.9977085881025979, 0.9949896410877794, 0.9974815415080514, 0.9949751208076849, 0.9977608283878059, 0.997556248956862, 0.9976322746711433], [0.9957009786671515, 0.9956539545901317, 0.9975842356345869, 0.9977503247384579, 0.9974858105198536, 0.9975752413262848, 0.9975736007229752, 0.9950953702091059, 0.99766257041422, 0.9973899945326758], [0.9970442018601582, 0.9976247828095824, 0.9949035494677398, 0.9975591063704334, 0.9974854118932346, 0.9975776876275864, 0.997289132264497, 0.9974530372888757, 0.9946404069298848, 0.9974764131696962], [0.9969754175746416, 0.9939918966459342, 0.9974484905937855, 0.997347449525218, 0.9970998847678205, 0.9973947898180123, 0.997154826214691, 0.9972984924795981, 0.9973768839481293, 0.9966568609787594], [0.9936107074071697, 0.9972190435801113, 0.9969188708847818, 0.9968944647921016, 0.9969701172449037, 0.9968688433844136, 0.9970095089744409, 0.9972244442686635, 0.9969616128769491, 0.9935512469348506], [0.9957694164840568, 0.9966817665698352, 0.9964889866122081, 0.9963220982964596, 0.9942473237474563, 0.9963712066265045, 0.9965521408016388, 0.9965206204573674, 0.9941395701342747, 0.9962691245209846], [0.9956429696524687, 0.9925720422461043, 0.9923862213753434, 0.9800294091232294, 0.9944171902306017, 0.9956514078810652, 0.9942821793687842, 0.9934481874328244, 0.9909567184971732, 0.9934276568284861], [0.9284840463670238, 0.9841006014517468, 0.9859174422297259, 0.9877755198689593, 0.9874925388349071, 0.988815307608537, 0.9889044182039436, 0.9880012131722612, 0.9828373990992629, 0.9656637780949258], [0.877071356940868, 0.9286596827164708, 0.958341938650275, 0.9162842138097168, 0.8588077078987749, 0.9100375647128197, 0.8978820872621931, 0.922918161117212, 0.9424646030474616, 0.8979264645170958], [0.7222324451651807, 0.8708184244358632, 0.665323596322362, 0.7233247555599016, 0.7552643235075459, 0.6450585159311075, 0.8287125342544006, 0.8553790737977089, 0.8057501684345134, 0.7922859863574737]], 'MSE': [[1.1764781531117299, 2.2275267674425798, 3.0008788176762722, 1.0639343340632765, 1.1392254341337529, 1.1875563783066223, 2.892553784925521, 2.1339559335571807, 1.2247585831930456, 2.2967294364415536], [1.1803215262907516, 3.1059682683281493, 1.153275511421664, 2.3179314857194093, 1.2838584845914454, 1.240168934762079, 1.2578026389128505, 1.1238460304079094, 1.3004741417558332, 1.1947565294902596], [1.2337280450057084, 1.0670773988282041, 1.2469462387313637, 1.1656196215235559, 1.140297530025616, 1.1860713534706495, 1.1472861462583426, 1.1802521728911124, 2.8631901993078483, 2.456931605434002], [1.1520538262655575, 1.2011201215774403, 2.660359827132297, 2.262296770641834, 1.2401820571594289, 2.8402413746499171, 1.1747837483098442, 1.2341097057140873, 2.2680139560281973, 1.1958060798926251], [1.3126768480036195, 1.6678581173075409, 1.2464267675030476, 1.6582535439212407, 1.2405664537642731, 1.2799233460800901, 1.1504638902909821, 2.525480052360046, 2.8019452621614764, 1.1523968139924883], [1.1636160336539527, 1.1580139066269926, 2.4554229232127529, 1.2254300694321481, 2.6920393970882777, 1.2282702031861594, 2.5991006580092622, 1.1835171919965726, 1.2467398653818922, 1.2785127730698094], [1.1994087891429377, 2.6279296184877015, 2.6113469811628405, 1.218802873814155, 1.2553153931171701, 1.3468169231593705, 1.1794255333025006, 1.3110730640435284, 2.6067560442611928, 1.2318244850215094], [2.5588185200468128, 1.2899601201023896, 1.3105371516573641, 1.3274033010474411, 1.9068054395473326, 1.2234479351143532, 1.2189996214231322, 1.3322041576845336, 1.2584199778951619, 1.2342248450130042], [1.285888611280585, 1.3614857285631687, 2.6258003829505649, 2.7895632744672088, 2.6192514367075241, 2.7522118830721376, 1.3169063003962225, 1.2926458801770926, 1.3104472168488477, 1.3566619502360693], [1.3195154321636966, 1.3278570702140824, 1.3080960704059392, 1.2644817265375026, 2.832768947903928, 1.3558555904253387, 3.0105499727886116, 1.2675577345437978, 1.3276610982244237, 1.3819080487654307], [2.626593170193356, 2.373156604497793, 1.3988342270760086, 1.2966570017110333, 1.3968875195785748, 1.39657370460328, 1.4026288702843954, 2.7987935129102994, 1.3463790757842173, 1.409414662594535], [1.738970057658604, 1.3982789906654829, 2.9734517363468287, 1.4195411741123394, 1.4323766372494673, 1.4378570708761791, 1.4908805845158193, 1.4491336987441736, 3.0794107159520352, 1.4595244188402428], [1.7196998043574991, 3.2792724966986979, 1.4257403109494302, 1.5745483650618026, 1.7399394025536181, 1.5499407780839021, 1.5736742288496135, 1.5588950118473246, 1.5499680440207659, 1.9395369924172021], [3.8351315866346574, 1.6409483741360269, 1.7421067320155894, 1.7168250070872448, 1.7440359184134477, 1.7725827063391923, 1.7635646492243144, 1.6327018790008012, 1.7795082979576189, 3.692859445615976], [2.3635685338693491, 1.9388580929788004, 2.0327703213822947, 2.0963067743668486, 3.3926983981491414, 2.1303089819107734, 1.8977976220850989, 2.0275177577218249, 3.4049450702135653, 2.1200353727774393], [2.5010206593364201, 4.3832137267840023, 4.1653661304123641, 11.712182665661985, 3.2269644084958906, 2.553673699017013, 3.4168242050315256, 3.8494172527655803, 5.1459810423263708, 3.7558274843814434], [40.192124911138158, 9.3678106616751915, 8.1396809324352351, 7.0302210414553068, 7.2514938545056298, 6.311214300028106, 6.3817466252832826, 6.9012545395731983, 9.6268825906850548, 18.900030438202464], [70.426523646657557, 41.016554485200288, 23.810101911622631, 48.222969262549064, 81.941121699778208, 50.980350054607946, 58.411863950644786, 44.65380776459606, 32.65388627783161, 57.385325905147006], [159.73498747447229, 74.739066062873007, 193.80481627602379, 157.50512178291694, 140.20450214609838, 204.53990476002966, 98.457613867779301, 82.670359094909983, 111.14616452473722, 118.66610721031037]], 'Rp': [[0.9988664028033124, 0.9979419322352554, 0.99735088221433044, 0.99923042837419118, 0.99935902425521062, 0.99846304494818339, 0.9974441821437382, 0.99830792210140418, 0.9986872774683101, 0.99811829291181753], [0.99903060725383863, 0.99646237008423866, 0.99896559748208813, 0.99821172297128269, 0.99894446632996625, 0.99892864312559104, 0.99893760939875575, 0.99921690700531551, 0.99909018723536014, 0.99887949163852929], [0.99897081547119149, 0.99897949331737235, 0.99880335393416542, 0.99906635056065274, 0.99897165267322585, 0.99900744942121467, 0.9988488942620517, 0.99867127453838911, 0.99730809792921915, 0.99800936804549689], [0.99895910615773564, 0.99893808080172464, 0.99771178787382808, 0.99803734781345466, 0.99887969192466897, 0.99754492867336364, 0.99879759936041146, 0.99891054199954632, 0.99800476140487293, 0.99903157866230374], [0.9989318488819926, 0.99844966769490484, 0.99896086125592742, 0.99851288162213314, 0.99876697403460568, 0.99879567088920951, 0.99898168731432335, 0.99765956847022508, 0.9973087294250329, 0.99899205326650975], [0.9990032619191388, 0.9989187086636736, 0.99799823510018359, 0.99886605211542678, 0.99777362618386212, 0.99896603192708389, 0.99771182640077172, 0.99892033826379212, 0.99891877609614643, 0.99887767717525777], [0.99891214252607508, 0.99787121009084412, 0.99781015795367911, 0.99889352336837156, 0.99892196828044477, 0.99882005870415935, 0.99899657806967401, 0.99884210368785165, 0.99752022541174701, 0.99902547786136686], [0.9976122832805242, 0.99898841950810402, 0.99880410175538781, 0.99880607225040063, 0.99818928488999892, 0.99888454204569921, 0.99891878741005902, 0.99890083509046079, 0.99890214616724537, 0.99892910898479947], [0.99881140888417719, 0.99885608988628061, 0.99747738994065893, 0.99755707166497365, 0.99758579207313913, 0.99772931671322462, 0.99891548243027461, 0.99889216976106576, 0.99887463493268325, 0.99890886768249065], [0.99885365200687526, 0.99881260188628229, 0.99882761053670344, 0.99885532512567932, 0.99749361225587363, 0.99874015909443925, 0.9974937329880571, 0.9988801279435302, 0.99878186186373219, 0.99881835093407856], [0.99784829109829476, 0.99782729909663959, 0.99879383447501913, 0.99887513496588753, 0.99874977321910174, 0.99879193382063747, 0.99878803014433815, 0.99754621369726526, 0.99883153592109475, 0.99869641528079722], [0.99852257422424306, 0.9988117555909205, 0.99747959174142076, 0.99878042251638643, 0.99874621772452743, 0.99878820679202696, 0.99864442568684841, 0.99872911379759943, 0.99732256515265527, 0.99873762912129482], [0.99848727554465766, 0.99699979348434098, 0.99872583842906837, 0.99867437267994164, 0.99854906695260881, 0.99869786898223567, 0.99857988392404295, 0.9986485530360163, 0.99868879620721074, 0.99832869999350915], [0.99680256308799131, 0.99860889803210817, 0.99845889004856847, 0.9984462527681528, 0.99848521316747108, 0.99843439888885099, 0.99850531428715483, 0.99861185420023002, 0.99848086244765388, 0.99677157377272962], [0.99789193543621613, 0.99834052781750726, 0.99824568406675962, 0.99816592055606435, 0.99714627961696289, 0.99818423274421164, 0.99827519338624104, 0.99826208699033914, 0.99707933814311833, 0.99813309695788277], [0.99782091199974199, 0.99630577280155519, 0.99623452374566734, 0.99014358038785444, 0.99720579030256118, 0.99782474148921185, 0.99715687519500751, 0.9967201783455174, 0.9954681710462302, 0.99670919475922704], [0.96695613696128979, 0.99207217395155045, 0.99301461839459038, 0.99393285159056477, 0.99373344960994792, 0.99441728623683134, 0.99447297804009793, 0.99402229679606413, 0.99170209618039229, 0.98290847371189616], [0.94318260822999733, 0.96500687689542797, 0.97953452633476068, 0.95736138467100629, 0.9340562192933326, 0.95639483923981206, 0.95293454168750036, 0.96400422235651617, 0.97162856531622932, 0.95281340409056725], [0.85997653964949738, 0.93520796435754228, 0.85182560181273481, 0.87140377802299651, 0.8875239151940425, 0.86176506842112843, 0.91277377460973708, 0.92937107857990342, 0.91459481837498291, 0.89854150575191216]]}
average_performances = {'R^2': [0.9967396466999121, 0.9973182290679489, 0.9973258348706577, 0.9969580995159367, 0.9970672270515427, 0.9971905008757727, 0.9971207811790321, 0.9973837912134055, 0.9967173082208058, 0.9971090347332117, 0.9969472081355445, 0.9969053729681688, 0.996874499254659, 0.9963228860348388, 0.9959362254250788, 0.9922813982636081, 0.9787992264931292, 0.9110393780672886, 0.7664149823766058], 'MSE': [1.8343597622851533, 1.5158403551680351, 1.4687400311476402, 1.7228967467371228, 1.6035991095384801, 1.6230663021657818, 1.6588699705512906, 1.4660821069531527, 1.8710862664699419, 1.639625169197275, 1.7445918349233493, 1.7879425084961174, 1.7911215434839858, 2.1320264596424869, 2.3404806925455137, 4.4710471274212598, 12.010245989498163, 50.950250495863514, 134.14686432001514], 'Rp': [0.99837693894557522, 0.99866676025249679, 0.99866367501529785, 0.99848154246719112, 0.99853599428548634, 0.9985954533845337, 0.99856134459542134, 0.99869355813826799, 0.99836082239689694, 0.99855570346352507, 0.99847484617190752, 0.99845625023479234, 0.99843801492336315, 0.99816058207009117, 0.99797242957153043, 0.99615897400725739, 0.98972323614732249, 0.95769171881151516, 0.89229840447744768]}
'''
# Plot the MSE, R^2 and Rp
for metric in metrics:
plt.figure()
x = fractions_unknown
y = average_performances[metric]
plt.plot(x,y)
plt.xlabel("Fraction missing")
plt.ylabel(metric)
| 186.419355
| 11,687
| 0.811775
| 2,558
| 23,116
| 7.304144
| 0.444097
| 0.006851
| 0.003532
| 0.00578
| 0.030079
| 0.019696
| 0.014558
| 0.009955
| 0.008778
| 0.008778
| 0
| 0.752805
| 0.07458
| 23,116
| 124
| 11,688
| 186.419355
| 0.120606
| 0.025048
| 0
| 0.061538
| 0
| 0
| 0.114598
| 0.014666
| 0
| 0
| 0
| 0
| 0.030769
| 0
| null | null | 0
| 0.076923
| null | null | 0.046154
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
98a749a7e019015b054e5a31012319a72afae56b
| 51
|
py
|
Python
|
src/simple_s_model/__init__.py
|
cpaxton/simple_s_model_server
|
172f69e1f3af0a4e382bde2750cdff1fc2070afb
|
[
"BSD-2-Clause"
] | null | null | null |
src/simple_s_model/__init__.py
|
cpaxton/simple_s_model_server
|
172f69e1f3af0a4e382bde2750cdff1fc2070afb
|
[
"BSD-2-Clause"
] | null | null | null |
src/simple_s_model/__init__.py
|
cpaxton/simple_s_model_server
|
172f69e1f3af0a4e382bde2750cdff1fc2070afb
|
[
"BSD-2-Clause"
] | null | null | null |
### ROS imports
import rospy
from server import *
| 10.2
| 20
| 0.72549
| 7
| 51
| 5.285714
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.196078
| 51
| 4
| 21
| 12.75
| 0.902439
| 0.215686
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
98b3550a0f744214038b512c76809185929cfb28
| 222
|
py
|
Python
|
test/mixins/test_case.py
|
evilinc-dev/hive
|
898691bcae6dc9733d1e239113c327a96d2e6501
|
[
"MIT"
] | 1
|
2019-10-26T06:32:41.000Z
|
2019-10-26T06:32:41.000Z
|
test/mixins/test_case.py
|
evilinc-dev/hive
|
898691bcae6dc9733d1e239113c327a96d2e6501
|
[
"MIT"
] | 2
|
2020-11-06T04:24:53.000Z
|
2020-11-06T04:26:04.000Z
|
test/mixins/test_case.py
|
evilinc-dev/hive
|
898691bcae6dc9733d1e239113c327a96d2e6501
|
[
"MIT"
] | null | null | null |
from hive.models import User
from test import app
class TestCaseMixin:
@staticmethod
def client():
return app.test_client()
@staticmethod
def get_admin_user():
return User.query.first()
| 15.857143
| 33
| 0.671171
| 27
| 222
| 5.407407
| 0.62963
| 0.205479
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.252252
| 222
| 13
| 34
| 17.076923
| 0.879518
| 0
| 0
| 0.222222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.222222
| true
| 0
| 0.222222
| 0.222222
| 0.777778
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
|
0
| 5
|
7f285eec0f68e2d4799fe96aed6311b7d65a9706
| 49
|
py
|
Python
|
src/mmac_net/train_helpers.py
|
cmolder/medical-materials
|
e51807c211b718b3191ebde5c65c835b563f997a
|
[
"MIT"
] | null | null | null |
src/mmac_net/train_helpers.py
|
cmolder/medical-materials
|
e51807c211b718b3191ebde5c65c835b563f997a
|
[
"MIT"
] | null | null | null |
src/mmac_net/train_helpers.py
|
cmolder/medical-materials
|
e51807c211b718b3191ebde5c65c835b563f997a
|
[
"MIT"
] | null | null | null |
# Code pending permission from original authors
| 16.333333
| 47
| 0.816327
| 6
| 49
| 6.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.163265
| 49
| 2
| 48
| 24.5
| 0.97561
| 0.918367
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
7f350e0383417767db8251b221861535e2453861
| 92
|
py
|
Python
|
erudition/data_challenge/__init__.py
|
papsebestyen/erudition
|
35aa502a96189131baff714a6212eb56de2b1272
|
[
"MIT"
] | null | null | null |
erudition/data_challenge/__init__.py
|
papsebestyen/erudition
|
35aa502a96189131baff714a6212eb56de2b1272
|
[
"MIT"
] | null | null | null |
erudition/data_challenge/__init__.py
|
papsebestyen/erudition
|
35aa502a96189131baff714a6212eb56de2b1272
|
[
"MIT"
] | 1
|
2022-02-21T21:17:17.000Z
|
2022-02-21T21:17:17.000Z
|
from invoke import Collection
from . import tasks
data_ns = Collection.from_module(tasks)
| 15.333333
| 39
| 0.804348
| 13
| 92
| 5.538462
| 0.615385
| 0.388889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.141304
| 92
| 5
| 40
| 18.4
| 0.911392
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
7f3d5f3408bf2d8740ed91ef09955dc9fcee491d
| 24,254
|
py
|
Python
|
protos/gen/python/protos/public/uac/Collaborator_pb2_grpc.py
|
fool-sec-review/modeldb
|
44e7f3c1af6768c4c23a2d134f9a322fcf0320b5
|
[
"Apache-2.0"
] | null | null | null |
protos/gen/python/protos/public/uac/Collaborator_pb2_grpc.py
|
fool-sec-review/modeldb
|
44e7f3c1af6768c4c23a2d134f9a322fcf0320b5
|
[
"Apache-2.0"
] | null | null | null |
protos/gen/python/protos/public/uac/Collaborator_pb2_grpc.py
|
fool-sec-review/modeldb
|
44e7f3c1af6768c4c23a2d134f9a322fcf0320b5
|
[
"Apache-2.0"
] | null | null | null |
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
import grpc
from ..uac import Collaborator_pb2 as uac_dot_Collaborator__pb2
from ..uac import UACService_pb2 as uac_dot_UACService__pb2
class CollaboratorServiceStub(object):
# missing associated documentation comment in .proto file
pass
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.getResources = channel.unary_unary(
'/ai.verta.uac.CollaboratorService/getResources',
request_serializer=uac_dot_Collaborator__pb2.GetResources.SerializeToString,
response_deserializer=uac_dot_Collaborator__pb2.GetResources.Response.FromString,
)
self.getResourcesSpecialPersonalWorkspace = channel.unary_unary(
'/ai.verta.uac.CollaboratorService/getResourcesSpecialPersonalWorkspace',
request_serializer=uac_dot_Collaborator__pb2.GetResources.SerializeToString,
response_deserializer=uac_dot_Collaborator__pb2.GetResources.Response.FromString,
)
self.setResource = channel.unary_unary(
'/ai.verta.uac.CollaboratorService/setResource',
request_serializer=uac_dot_Collaborator__pb2.SetResource.SerializeToString,
response_deserializer=uac_dot_Collaborator__pb2.SetResource.Response.FromString,
)
self.deleteResources = channel.unary_unary(
'/ai.verta.uac.CollaboratorService/deleteResources',
request_serializer=uac_dot_Collaborator__pb2.DeleteResources.SerializeToString,
response_deserializer=uac_dot_Collaborator__pb2.DeleteResources.Response.FromString,
)
self.getResourceAdmins = channel.unary_unary(
'/ai.verta.uac.CollaboratorService/getResourceAdmins',
request_serializer=uac_dot_Collaborator__pb2.GetResourceAdmins.SerializeToString,
response_deserializer=uac_dot_Collaborator__pb2.ResourceAdmins.FromString,
)
self.addResourceAdmins = channel.unary_unary(
'/ai.verta.uac.CollaboratorService/addResourceAdmins',
request_serializer=uac_dot_Collaborator__pb2.ModifyResourceAdmins.SerializeToString,
response_deserializer=uac_dot_UACService__pb2.Empty.FromString,
)
self.removeResourceAdmins = channel.unary_unary(
'/ai.verta.uac.CollaboratorService/removeResourceAdmins',
request_serializer=uac_dot_Collaborator__pb2.ModifyResourceAdmins.SerializeToString,
response_deserializer=uac_dot_UACService__pb2.Empty.FromString,
)
self.addOrUpdateProjectCollaborator = channel.unary_unary(
'/ai.verta.uac.CollaboratorService/addOrUpdateProjectCollaborator',
request_serializer=uac_dot_Collaborator__pb2.AddCollaboratorRequest.SerializeToString,
response_deserializer=uac_dot_Collaborator__pb2.AddCollaboratorRequest.Response.FromString,
)
self.removeProjectCollaborator = channel.unary_unary(
'/ai.verta.uac.CollaboratorService/removeProjectCollaborator',
request_serializer=uac_dot_Collaborator__pb2.RemoveCollaborator.SerializeToString,
response_deserializer=uac_dot_Collaborator__pb2.RemoveCollaborator.Response.FromString,
)
self.getProjectCollaborators = channel.unary_unary(
'/ai.verta.uac.CollaboratorService/getProjectCollaborators',
request_serializer=uac_dot_Collaborator__pb2.GetCollaborator.SerializeToString,
response_deserializer=uac_dot_Collaborator__pb2.GetCollaborator.Response.FromString,
)
self.addOrUpdateDatasetCollaborator = channel.unary_unary(
'/ai.verta.uac.CollaboratorService/addOrUpdateDatasetCollaborator',
request_serializer=uac_dot_Collaborator__pb2.AddCollaboratorRequest.SerializeToString,
response_deserializer=uac_dot_Collaborator__pb2.AddCollaboratorRequest.Response.FromString,
)
self.removeDatasetCollaborator = channel.unary_unary(
'/ai.verta.uac.CollaboratorService/removeDatasetCollaborator',
request_serializer=uac_dot_Collaborator__pb2.RemoveCollaborator.SerializeToString,
response_deserializer=uac_dot_Collaborator__pb2.RemoveCollaborator.Response.FromString,
)
self.getDatasetCollaborators = channel.unary_unary(
'/ai.verta.uac.CollaboratorService/getDatasetCollaborators',
request_serializer=uac_dot_Collaborator__pb2.GetCollaborator.SerializeToString,
response_deserializer=uac_dot_Collaborator__pb2.GetCollaborator.Response.FromString,
)
self.addOrUpdateRepositoryCollaborator = channel.unary_unary(
'/ai.verta.uac.CollaboratorService/addOrUpdateRepositoryCollaborator',
request_serializer=uac_dot_Collaborator__pb2.AddCollaboratorRequest.SerializeToString,
response_deserializer=uac_dot_Collaborator__pb2.AddCollaboratorRequest.Response.FromString,
)
self.removeRepositoryCollaborator = channel.unary_unary(
'/ai.verta.uac.CollaboratorService/removeRepositoryCollaborator',
request_serializer=uac_dot_Collaborator__pb2.RemoveCollaborator.SerializeToString,
response_deserializer=uac_dot_Collaborator__pb2.RemoveCollaborator.Response.FromString,
)
self.getRepositoryCollaborators = channel.unary_unary(
'/ai.verta.uac.CollaboratorService/getRepositoryCollaborators',
request_serializer=uac_dot_Collaborator__pb2.GetCollaborator.SerializeToString,
response_deserializer=uac_dot_Collaborator__pb2.GetCollaborator.Response.FromString,
)
self.addOrUpdateEndpointCollaborator = channel.unary_unary(
'/ai.verta.uac.CollaboratorService/addOrUpdateEndpointCollaborator',
request_serializer=uac_dot_Collaborator__pb2.AddCollaboratorRequest.SerializeToString,
response_deserializer=uac_dot_Collaborator__pb2.AddCollaboratorRequest.Response.FromString,
)
self.removeEndpointCollaborator = channel.unary_unary(
'/ai.verta.uac.CollaboratorService/removeEndpointCollaborator',
request_serializer=uac_dot_Collaborator__pb2.RemoveCollaborator.SerializeToString,
response_deserializer=uac_dot_Collaborator__pb2.RemoveCollaborator.Response.FromString,
)
self.getEndpointCollaborators = channel.unary_unary(
'/ai.verta.uac.CollaboratorService/getEndpointCollaborators',
request_serializer=uac_dot_Collaborator__pb2.GetCollaborator.SerializeToString,
response_deserializer=uac_dot_Collaborator__pb2.GetCollaborator.Response.FromString,
)
self.addOrUpdateRegisteredModelCollaborator = channel.unary_unary(
'/ai.verta.uac.CollaboratorService/addOrUpdateRegisteredModelCollaborator',
request_serializer=uac_dot_Collaborator__pb2.AddCollaboratorRequest.SerializeToString,
response_deserializer=uac_dot_Collaborator__pb2.AddCollaboratorRequest.Response.FromString,
)
self.removeRegisteredModelCollaborator = channel.unary_unary(
'/ai.verta.uac.CollaboratorService/removeRegisteredModelCollaborator',
request_serializer=uac_dot_Collaborator__pb2.RemoveCollaborator.SerializeToString,
response_deserializer=uac_dot_Collaborator__pb2.RemoveCollaborator.Response.FromString,
)
self.getRegisteredModelCollaborators = channel.unary_unary(
'/ai.verta.uac.CollaboratorService/getRegisteredModelCollaborators',
request_serializer=uac_dot_Collaborator__pb2.GetCollaborator.SerializeToString,
response_deserializer=uac_dot_Collaborator__pb2.GetCollaborator.Response.FromString,
)
self.addOrUpdateMonitoredEntityCollaborator = channel.unary_unary(
'/ai.verta.uac.CollaboratorService/addOrUpdateMonitoredEntityCollaborator',
request_serializer=uac_dot_Collaborator__pb2.AddCollaboratorRequest.SerializeToString,
response_deserializer=uac_dot_Collaborator__pb2.AddCollaboratorRequest.Response.FromString,
)
self.removeMonitoredEntityCollaborator = channel.unary_unary(
'/ai.verta.uac.CollaboratorService/removeMonitoredEntityCollaborator',
request_serializer=uac_dot_Collaborator__pb2.RemoveCollaborator.SerializeToString,
response_deserializer=uac_dot_Collaborator__pb2.RemoveCollaborator.Response.FromString,
)
self.getMonitoredEntityCollaborators = channel.unary_unary(
'/ai.verta.uac.CollaboratorService/getMonitoredEntityCollaborators',
request_serializer=uac_dot_Collaborator__pb2.GetCollaborator.SerializeToString,
response_deserializer=uac_dot_Collaborator__pb2.GetCollaborator.Response.FromString,
)
class CollaboratorServiceServicer(object):
# missing associated documentation comment in .proto file
pass
def getResources(self, request, context):
"""The caller must have permission to GET the resource accordingly
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def getResourcesSpecialPersonalWorkspace(self, request, context):
"""The caller must have permission to GET the resource accordingly
gets resources that is available in personal workspace (all except organization workspace resources)
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def setResource(self, request, context):
"""The caller must have permission to CREATE or UPDATE the resource accordingly
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def deleteResources(self, request, context):
"""The caller must have permission to DELETE the resource accordingly
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def getResourceAdmins(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def addResourceAdmins(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def removeResourceAdmins(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def addOrUpdateProjectCollaborator(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def removeProjectCollaborator(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def getProjectCollaborators(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def addOrUpdateDatasetCollaborator(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def removeDatasetCollaborator(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def getDatasetCollaborators(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def addOrUpdateRepositoryCollaborator(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def removeRepositoryCollaborator(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def getRepositoryCollaborators(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def addOrUpdateEndpointCollaborator(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def removeEndpointCollaborator(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def getEndpointCollaborators(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def addOrUpdateRegisteredModelCollaborator(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def removeRegisteredModelCollaborator(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def getRegisteredModelCollaborators(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def addOrUpdateMonitoredEntityCollaborator(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def removeMonitoredEntityCollaborator(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def getMonitoredEntityCollaborators(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_CollaboratorServiceServicer_to_server(servicer, server):
rpc_method_handlers = {
'getResources': grpc.unary_unary_rpc_method_handler(
servicer.getResources,
request_deserializer=uac_dot_Collaborator__pb2.GetResources.FromString,
response_serializer=uac_dot_Collaborator__pb2.GetResources.Response.SerializeToString,
),
'getResourcesSpecialPersonalWorkspace': grpc.unary_unary_rpc_method_handler(
servicer.getResourcesSpecialPersonalWorkspace,
request_deserializer=uac_dot_Collaborator__pb2.GetResources.FromString,
response_serializer=uac_dot_Collaborator__pb2.GetResources.Response.SerializeToString,
),
'setResource': grpc.unary_unary_rpc_method_handler(
servicer.setResource,
request_deserializer=uac_dot_Collaborator__pb2.SetResource.FromString,
response_serializer=uac_dot_Collaborator__pb2.SetResource.Response.SerializeToString,
),
'deleteResources': grpc.unary_unary_rpc_method_handler(
servicer.deleteResources,
request_deserializer=uac_dot_Collaborator__pb2.DeleteResources.FromString,
response_serializer=uac_dot_Collaborator__pb2.DeleteResources.Response.SerializeToString,
),
'getResourceAdmins': grpc.unary_unary_rpc_method_handler(
servicer.getResourceAdmins,
request_deserializer=uac_dot_Collaborator__pb2.GetResourceAdmins.FromString,
response_serializer=uac_dot_Collaborator__pb2.ResourceAdmins.SerializeToString,
),
'addResourceAdmins': grpc.unary_unary_rpc_method_handler(
servicer.addResourceAdmins,
request_deserializer=uac_dot_Collaborator__pb2.ModifyResourceAdmins.FromString,
response_serializer=uac_dot_UACService__pb2.Empty.SerializeToString,
),
'removeResourceAdmins': grpc.unary_unary_rpc_method_handler(
servicer.removeResourceAdmins,
request_deserializer=uac_dot_Collaborator__pb2.ModifyResourceAdmins.FromString,
response_serializer=uac_dot_UACService__pb2.Empty.SerializeToString,
),
'addOrUpdateProjectCollaborator': grpc.unary_unary_rpc_method_handler(
servicer.addOrUpdateProjectCollaborator,
request_deserializer=uac_dot_Collaborator__pb2.AddCollaboratorRequest.FromString,
response_serializer=uac_dot_Collaborator__pb2.AddCollaboratorRequest.Response.SerializeToString,
),
'removeProjectCollaborator': grpc.unary_unary_rpc_method_handler(
servicer.removeProjectCollaborator,
request_deserializer=uac_dot_Collaborator__pb2.RemoveCollaborator.FromString,
response_serializer=uac_dot_Collaborator__pb2.RemoveCollaborator.Response.SerializeToString,
),
'getProjectCollaborators': grpc.unary_unary_rpc_method_handler(
servicer.getProjectCollaborators,
request_deserializer=uac_dot_Collaborator__pb2.GetCollaborator.FromString,
response_serializer=uac_dot_Collaborator__pb2.GetCollaborator.Response.SerializeToString,
),
'addOrUpdateDatasetCollaborator': grpc.unary_unary_rpc_method_handler(
servicer.addOrUpdateDatasetCollaborator,
request_deserializer=uac_dot_Collaborator__pb2.AddCollaboratorRequest.FromString,
response_serializer=uac_dot_Collaborator__pb2.AddCollaboratorRequest.Response.SerializeToString,
),
'removeDatasetCollaborator': grpc.unary_unary_rpc_method_handler(
servicer.removeDatasetCollaborator,
request_deserializer=uac_dot_Collaborator__pb2.RemoveCollaborator.FromString,
response_serializer=uac_dot_Collaborator__pb2.RemoveCollaborator.Response.SerializeToString,
),
'getDatasetCollaborators': grpc.unary_unary_rpc_method_handler(
servicer.getDatasetCollaborators,
request_deserializer=uac_dot_Collaborator__pb2.GetCollaborator.FromString,
response_serializer=uac_dot_Collaborator__pb2.GetCollaborator.Response.SerializeToString,
),
'addOrUpdateRepositoryCollaborator': grpc.unary_unary_rpc_method_handler(
servicer.addOrUpdateRepositoryCollaborator,
request_deserializer=uac_dot_Collaborator__pb2.AddCollaboratorRequest.FromString,
response_serializer=uac_dot_Collaborator__pb2.AddCollaboratorRequest.Response.SerializeToString,
),
'removeRepositoryCollaborator': grpc.unary_unary_rpc_method_handler(
servicer.removeRepositoryCollaborator,
request_deserializer=uac_dot_Collaborator__pb2.RemoveCollaborator.FromString,
response_serializer=uac_dot_Collaborator__pb2.RemoveCollaborator.Response.SerializeToString,
),
'getRepositoryCollaborators': grpc.unary_unary_rpc_method_handler(
servicer.getRepositoryCollaborators,
request_deserializer=uac_dot_Collaborator__pb2.GetCollaborator.FromString,
response_serializer=uac_dot_Collaborator__pb2.GetCollaborator.Response.SerializeToString,
),
'addOrUpdateEndpointCollaborator': grpc.unary_unary_rpc_method_handler(
servicer.addOrUpdateEndpointCollaborator,
request_deserializer=uac_dot_Collaborator__pb2.AddCollaboratorRequest.FromString,
response_serializer=uac_dot_Collaborator__pb2.AddCollaboratorRequest.Response.SerializeToString,
),
'removeEndpointCollaborator': grpc.unary_unary_rpc_method_handler(
servicer.removeEndpointCollaborator,
request_deserializer=uac_dot_Collaborator__pb2.RemoveCollaborator.FromString,
response_serializer=uac_dot_Collaborator__pb2.RemoveCollaborator.Response.SerializeToString,
),
'getEndpointCollaborators': grpc.unary_unary_rpc_method_handler(
servicer.getEndpointCollaborators,
request_deserializer=uac_dot_Collaborator__pb2.GetCollaborator.FromString,
response_serializer=uac_dot_Collaborator__pb2.GetCollaborator.Response.SerializeToString,
),
'addOrUpdateRegisteredModelCollaborator': grpc.unary_unary_rpc_method_handler(
servicer.addOrUpdateRegisteredModelCollaborator,
request_deserializer=uac_dot_Collaborator__pb2.AddCollaboratorRequest.FromString,
response_serializer=uac_dot_Collaborator__pb2.AddCollaboratorRequest.Response.SerializeToString,
),
'removeRegisteredModelCollaborator': grpc.unary_unary_rpc_method_handler(
servicer.removeRegisteredModelCollaborator,
request_deserializer=uac_dot_Collaborator__pb2.RemoveCollaborator.FromString,
response_serializer=uac_dot_Collaborator__pb2.RemoveCollaborator.Response.SerializeToString,
),
'getRegisteredModelCollaborators': grpc.unary_unary_rpc_method_handler(
servicer.getRegisteredModelCollaborators,
request_deserializer=uac_dot_Collaborator__pb2.GetCollaborator.FromString,
response_serializer=uac_dot_Collaborator__pb2.GetCollaborator.Response.SerializeToString,
),
'addOrUpdateMonitoredEntityCollaborator': grpc.unary_unary_rpc_method_handler(
servicer.addOrUpdateMonitoredEntityCollaborator,
request_deserializer=uac_dot_Collaborator__pb2.AddCollaboratorRequest.FromString,
response_serializer=uac_dot_Collaborator__pb2.AddCollaboratorRequest.Response.SerializeToString,
),
'removeMonitoredEntityCollaborator': grpc.unary_unary_rpc_method_handler(
servicer.removeMonitoredEntityCollaborator,
request_deserializer=uac_dot_Collaborator__pb2.RemoveCollaborator.FromString,
response_serializer=uac_dot_Collaborator__pb2.RemoveCollaborator.Response.SerializeToString,
),
'getMonitoredEntityCollaborators': grpc.unary_unary_rpc_method_handler(
servicer.getMonitoredEntityCollaborators,
request_deserializer=uac_dot_Collaborator__pb2.GetCollaborator.FromString,
response_serializer=uac_dot_Collaborator__pb2.GetCollaborator.Response.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'ai.verta.uac.CollaboratorService', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
| 53.07221
| 106
| 0.787045
| 2,188
| 24,254
| 8.402651
| 0.053931
| 0.033288
| 0.094969
| 0.110797
| 0.78477
| 0.781398
| 0.761273
| 0.629589
| 0.629589
| 0.624476
| 0
| 0.005017
| 0.145254
| 24,254
| 456
| 107
| 53.188596
| 0.88182
| 0.073843
| 0
| 0.548052
| 1
| 0
| 0.149339
| 0.093873
| 0
| 0
| 0
| 0
| 0
| 1
| 0.07013
| false
| 0.05974
| 0.007792
| 0
| 0.083117
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
7f9b23eb1b4488b06ed7426a8cf50574a3d87ebe
| 110
|
py
|
Python
|
python/tvm/auto_tensorize/hw_abstraction/__init__.py
|
QinHan-Erin/AMOS
|
634bf48edf4015e4a69a8c32d49b96bce2b5f16f
|
[
"Apache-2.0"
] | 22
|
2022-03-18T07:29:31.000Z
|
2022-03-23T14:54:32.000Z
|
python/tvm/auto_tensorize/hw_abstraction/__init__.py
|
QinHan-Erin/AMOS
|
634bf48edf4015e4a69a8c32d49b96bce2b5f16f
|
[
"Apache-2.0"
] | null | null | null |
python/tvm/auto_tensorize/hw_abstraction/__init__.py
|
QinHan-Erin/AMOS
|
634bf48edf4015e4a69a8c32d49b96bce2b5f16f
|
[
"Apache-2.0"
] | 2
|
2022-03-18T08:26:34.000Z
|
2022-03-20T06:02:48.000Z
|
from .cuda import *
from .llvm import *
from .opencl import *
from .tenet import *
from .hw_abs_base import *
| 18.333333
| 26
| 0.727273
| 17
| 110
| 4.588235
| 0.529412
| 0.512821
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.181818
| 110
| 5
| 27
| 22
| 0.866667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
7fa17a55513e716632fc05513270a0938687e08f
| 195
|
py
|
Python
|
opyoid/bindings/multi_binding/__init__.py
|
illuin-tech/opyoid
|
a2ca485e1820ba0d12a86ba91100aa097a1e5736
|
[
"MIT"
] | 37
|
2020-08-25T07:22:41.000Z
|
2022-03-18T03:05:53.000Z
|
opyoid/bindings/multi_binding/__init__.py
|
illuin-tech/opyoid
|
a2ca485e1820ba0d12a86ba91100aa097a1e5736
|
[
"MIT"
] | 18
|
2020-10-04T17:33:24.000Z
|
2021-12-16T16:28:35.000Z
|
opyoid/bindings/multi_binding/__init__.py
|
illuin-tech/opyoid
|
a2ca485e1820ba0d12a86ba91100aa097a1e5736
|
[
"MIT"
] | 2
|
2021-01-26T19:58:15.000Z
|
2021-11-30T01:10:25.000Z
|
from .item_binding import ItemBinding
from .list_provider import ListProvider
from .multi_binding import MultiBinding
from .multi_binding_to_provider_adapter import MultiBindingToProviderAdapter
| 39
| 76
| 0.897436
| 23
| 195
| 7.304348
| 0.565217
| 0.154762
| 0.190476
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.082051
| 195
| 4
| 77
| 48.75
| 0.938547
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
f6860ffba64048c3722b0b34da73a9cf8bd5c5ef
| 427
|
py
|
Python
|
Face Reconstruction/Self-Supervised Monocular 3D Face Reconstruction by Occlusion-Aware Multi-view Geometry Consistency/src_common/geometry/gpmm/HDF5IO.py
|
swapnilgarg7/Face-X
|
fab21bf667fa7387b8e73e5a1d72fcba4fba2818
|
[
"MIT"
] | 302
|
2020-07-15T11:42:08.000Z
|
2022-03-29T15:22:00.000Z
|
Face Reconstruction/Self-Supervised Monocular 3D Face Reconstruction by Occlusion-Aware Multi-view Geometry Consistency/src_common/geometry/gpmm/HDF5IO.py
|
swapnilgarg7/Face-X
|
fab21bf667fa7387b8e73e5a1d72fcba4fba2818
|
[
"MIT"
] | 704
|
2020-09-30T10:44:13.000Z
|
2022-03-30T07:18:28.000Z
|
Face Reconstruction/Self-Supervised Monocular 3D Face Reconstruction by Occlusion-Aware Multi-view Geometry Consistency/src_common/geometry/gpmm/HDF5IO.py
|
swapnilgarg7/Face-X
|
fab21bf667fa7387b8e73e5a1d72fcba4fba2818
|
[
"MIT"
] | 342
|
2020-10-02T14:04:49.000Z
|
2022-03-31T10:14:20.000Z
|
#!/usr/bin/env python
import h5py
import numpy as np
class HDF5IO:
def __init__(self, path_file, handler_file = None, mode='a'):
if(handler_file == None):
self.handler_file = h5py.File(path_file, mode=mode)
else:
self.handler_file = handler_file
def GetMainKeys(self):
return self.handler_file.keys()
def GetValue(self, name):
return self.handler_file[name]
| 28.466667
| 65
| 0.64637
| 58
| 427
| 4.534483
| 0.465517
| 0.292776
| 0.228137
| 0.159696
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.009346
| 0.248244
| 427
| 15
| 66
| 28.466667
| 0.809969
| 0.046838
| 0
| 0
| 0
| 0
| 0.002457
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.166667
| 0.166667
| 0.666667
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
f68fbe54d381718538ce663d80895a9b861a3e1a
| 461
|
py
|
Python
|
python2sky/context/noop_span.py
|
alonelaval/skywalking-client-python
|
414c16a71ae7d48c199dc25ae2641a4b983d44ec
|
[
"Apache-2.0"
] | 19
|
2020-05-05T12:09:13.000Z
|
2020-06-29T11:50:49.000Z
|
python2sky/context/noop_span.py
|
alonelaval/skywalking-client-python
|
414c16a71ae7d48c199dc25ae2641a4b983d44ec
|
[
"Apache-2.0"
] | null | null | null |
python2sky/context/noop_span.py
|
alonelaval/skywalking-client-python
|
414c16a71ae7d48c199dc25ae2641a4b983d44ec
|
[
"Apache-2.0"
] | 2
|
2020-05-05T14:07:22.000Z
|
2020-05-06T01:49:40.000Z
|
# -*- coding:utf-8 -*-
# author:huawei
from python2sky.context.span import Span
class NoopSpan(Span):
def is_entry(self):
return False
def is_exit(self):
return False
def tag(self, key, value):
return self
def start(self):
return self
def end(self):
return self
def ref(self, context_carrier):
pass
def log(self, ex):
return self
def transform(self):
pass
| 15.366667
| 40
| 0.577007
| 59
| 461
| 4.457627
| 0.508475
| 0.152091
| 0.197719
| 0.136882
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006452
| 0.327549
| 461
| 30
| 41
| 15.366667
| 0.841935
| 0.073753
| 0
| 0.444444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.444444
| false
| 0.111111
| 0.055556
| 0.333333
| 0.888889
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 5
|
f6a57691feb2bffa7be7da1705b96cddd4d6d2d6
| 20,764
|
py
|
Python
|
netbox_graphql/ipam_schema.py
|
ninech/django-netbox-graphql
|
8383570bdf3a8ce8d9d912c5b8f7b053b31c7363
|
[
"MIT"
] | 17
|
2017-08-17T02:38:09.000Z
|
2022-01-05T15:36:20.000Z
|
netbox_graphql/ipam_schema.py
|
ninech/django-netbox-graphql
|
8383570bdf3a8ce8d9d912c5b8f7b053b31c7363
|
[
"MIT"
] | 2
|
2017-09-13T14:53:56.000Z
|
2018-02-08T14:06:54.000Z
|
netbox_graphql/ipam_schema.py
|
ninech/django-netbox-graphql
|
8383570bdf3a8ce8d9d912c5b8f7b053b31c7363
|
[
"MIT"
] | 2
|
2020-03-04T11:51:10.000Z
|
2021-03-11T19:24:37.000Z
|
import graphene
from graphene import AbstractType, Node
from graphene_django.converter import convert_django_field
from graphene_django.filter import DjangoFilterConnectionField
from graphene_django.types import DjangoObjectType
from graphene import AbstractType, Field, Node, ClientIDMutation, AbstractType
from graphene import ID, Boolean, Float, Int, List, String
from graphql_relay.node.node import from_global_id
from .custom_filter_fields import date_types, string_types, number_types
from .helper_methods import not_none, set_and_save
from ipam.models import IPAddress, VLANGroup, Role, VLAN, VRF, RIR, Aggregate, IPAddress, Prefix
from tenancy.models import Tenant
from ipam.fields import IPNetworkField, IPAddressField
from dcim.models import Site, Interface
@convert_django_field.register(IPNetworkField)
def iPNetworkFieldConvert(field, registry=None):
return graphene.String()
@convert_django_field.register(IPAddressField)
def iPAddressFieldConvert(field, registry=None):
return graphene.String()
# Nodes
class IPAddressNode(DjangoObjectType):
class Meta:
model = IPAddress
interfaces = (Node, )
filter_fields = {
'id': ['exact']
}
class RoleNode(DjangoObjectType):
class Meta:
model = Role
interfaces = (Node, )
filter_fields = {
'id': ['exact'],
'name': string_types,
'slug': ['exact'],
}
class VLANGroupNode(DjangoObjectType):
class Meta:
model = VLANGroup
interfaces = (Node, )
filter_fields = {
'id': ['exact'],
'name': string_types,
'slug': ['exact'],
}
class VLANNode(DjangoObjectType):
class Meta:
model = VLAN
interfaces = (Node, )
filter_fields = {
'id': ['exact'],
'name': string_types,
}
class VRFNode(DjangoObjectType):
class Meta:
model = VRF
interfaces = (Node, )
filter_fields = {
'id': ['exact'],
'name': string_types,
}
class RIRNode(DjangoObjectType):
class Meta:
model = RIR
interfaces = (Node, )
filter_fields = {
'id': ['exact'],
'name': string_types,
'slug': ['exact'],
}
class AggregateNode(DjangoObjectType):
class Meta:
model = Aggregate
interfaces = (Node, )
filter_fields = {
'id': ['exact'],
}
class PrefixNode(DjangoObjectType):
class Meta:
model = Prefix
interfaces = (Node, )
filter_fields = {
'id': ['exact'],
}
# Queries
class IpamQuery(AbstractType):
ip_address = DjangoFilterConnectionField(IPAddressNode)
vlan_roles = DjangoFilterConnectionField(RoleNode)
vlan_groups = DjangoFilterConnectionField(VLANGroupNode)
vlans = DjangoFilterConnectionField(VLANNode)
vrfs = DjangoFilterConnectionField(VRFNode)
rirs = DjangoFilterConnectionField(RIRNode)
aggregates = DjangoFilterConnectionField(AggregateNode)
prefixes = DjangoFilterConnectionField(PrefixNode)
# Mutations
class NewRole(ClientIDMutation):
vlan_role = Field(RoleNode)
class Input:
slug = String()
name = String(default_value=None)
weight = Int(default_value=None)
@classmethod
def mutate_and_get_payload(cls, input, context, info):
temp = Role()
fields = ['name', 'slug', 'weight']
return NewRole(vlan_role=set_and_save(fields, input, temp))
class UpdateRole(ClientIDMutation):
vlan_role = Field(RoleNode)
class Input:
id = String()
slug = String(default_value=None)
name = String(default_value=None)
weight = Int(default_value=None)
@classmethod
def mutate_and_get_payload(cls, input, context, info):
temp = Role.objects.get(pk=from_global_id(input.get('id'))[1])
fields = [ 'name', 'slug', 'weight' ]
return UpdateRole(vlan_role=set_and_save(fields, input, temp))
class DeleteRole(ClientIDMutation):
vlan_role = Field(RoleNode)
class Input:
id = String()
@classmethod
def mutate_and_get_payload(cls, input, context, info):
temp = Role.objects.get(pk=from_global_id(input.get('id'))[1])
temp.delete()
return DeleteRole(vlan_role=temp)
class NewVLANGroup(ClientIDMutation):
vlan_group = Field(VLANGroupNode)
class Input:
name = String(default_value=None)
slug = String(default_value=None)
site = String(default_value=None)
@classmethod
def mutate_and_get_payload(cls, input, context, info):
site = input.get('site')
temp = VLANGroup()
if not_none(site):
temp.site = Site.objects.get(pk=from_global_id(site)[1])
fields = [ 'name', 'slug' ]
return NewVLANGroup(vlan_group=set_and_save(fields, input, temp))
class UpdateVLANGroup(ClientIDMutation):
vlan_group = Field(VLANGroupNode)
class Input:
id = String()
name = String(default_value=None)
slug = String(default_value=None)
site = String(default_value=None)
@classmethod
def mutate_and_get_payload(cls, input, context, info):
temp = VLANGroup.objects.get(pk=from_global_id(input.get('id'))[1])
site = input.get('site')
if not_none(site):
temp.site = Site.objects.get(pk=from_global_id(site)[1])
fields = [ 'name', 'slug' ]
return UpdateVLANGroup(vlan_group=set_and_save(fields, input, temp))
class DeleteVLANGroup(ClientIDMutation):
vlan_group = Field(VLANGroupNode)
class Input:
id = String()
@classmethod
def mutate_and_get_payload(cls, input, context, info):
temp = VLANGroup.objects.get(pk=from_global_id(input.get('id'))[1])
temp.delete()
return DeleteVLANGroup(vlan_group=temp)
class NewVLAN(ClientIDMutation):
vlan = Field(VLANNode)
class Input:
site = String(default_value=None)
group = String(default_value=None)
vid = Int(default_value=None)
name = String(default_value=None)
tenant = String(default_value=None)
status = Int(default_value=None)
role = String(default_value=None)
description = String(default_value=None)
@classmethod
def mutate_and_get_payload(cls, input, context, info):
site = input.get('site')
group = input.get('group')
tenant = input.get('tenant')
role = input.get('role')
temp = VLAN()
if not_none(site):
temp.site = Site.objects.get(pk=from_global_id(site)[1])
if not_none(group):
temp.group = VLANGroup.objects.get(pk=from_global_id(group)[1])
if not_none(tenant):
temp.tenant = Tenant.objects.get(pk=from_global_id(tenant)[1])
if not_none(role):
temp.role = Role.objects.get(pk=from_global_id(role)[1])
fields = [ 'name', 'vid', 'name', 'description' ]
return NewVLAN(vlan=set_and_save(fields, input, temp))
class UpdateVLAN(ClientIDMutation):
vlan = Field(VLANNode)
class Input:
id = String()
site = String(default_value=None)
group = String(default_value=None)
vid = Int(default_value=None)
name = String(default_value=None)
tenant = String(default_value=None)
status = Int(default_value=None)
role = String(default_value=None)
description = String(default_value=None)
@classmethod
def mutate_and_get_payload(cls, input, context, info):
temp = VLAN.objects.get(pk=from_global_id(input.get('id'))[1])
site = input.get('site')
group = input.get('group')
tenant = input.get('tenant')
role = input.get('role')
if not_none(site):
temp.site = Site.objects.get(pk=from_global_id(site)[1])
if not_none(group):
temp.group = VLANGroup.objects.get(pk=from_global_id(group)[1])
if not_none(tenant):
temp.tenant = Tenant.objects.get(pk=from_global_id(tenant)[1])
if not_none(role):
temp.role = Role.objects.get(pk=from_global_id(role)[1])
fields = [ 'name', 'vid', 'name', 'description' ]
return UpdateVLAN(vlan=set_and_save(fields, input, temp))
class DeleteVLAN(ClientIDMutation):
vlan = Field(VLANNode)
class Input:
id = String()
@classmethod
def mutate_and_get_payload(cls, input, context, info):
temp = VLAN.objects.get(pk=from_global_id(input.get('id'))[1])
temp.delete()
return DeleteVLAN(vlan=temp)
# VRF
class NewVRF(ClientIDMutation):
vrf = Field(VRFNode)
class Input:
name = String(default_value=None)
rd = String(default_value=None)
tenant = String(default_value=None)
enforce_unique = Boolean(default_value=None)
description = String(default_value=None)
@classmethod
def mutate_and_get_payload(cls, input, context, info):
tenant = input.get('tenant')
temp = VRF()
if not_none(tenant):
temp.tenant = Tenant.objects.get(pk=from_global_id(tenant)[1])
fields = ['name', 'rd', 'enforce_unique', 'description']
return NewVRF(vrf=set_and_save(fields, input, temp))
class UpdateVRF(ClientIDMutation):
vrf = Field(VRFNode)
class Input:
id = String()
name = String(default_value=None)
rd = String(default_value=None)
tenant = String(default_value=None)
enforce_unique = Boolean(default_value=None)
description = String(default_value=None)
@classmethod
def mutate_and_get_payload(cls, input, context, info):
temp = VRF.objects.get(pk=from_global_id(input.get('id'))[1])
tenant = input.get('tenant')
if not_none(tenant):
temp.tenant = Tenant.objects.get(pk=from_global_id(tenant)[1])
fields = ['name', 'rd', 'enforce_unique', 'description']
return UpdateVRF(vrf=set_and_save(fields, input, temp))
class DeleteVRF(ClientIDMutation):
vrf = Field(VRFNode)
class Input:
id = String()
@classmethod
def mutate_and_get_payload(cls, input, context, info):
temp = VRF.objects.get(pk=from_global_id(input.get('id'))[1])
temp.delete()
return DeleteVRF(vrf=temp)
# RIR
class NewRIR(ClientIDMutation):
rir = Field(RIRNode)
class Input:
name = String(default_value=None)
slug = String(default_value=None)
is_private = Boolean(default_value=None)
@classmethod
def mutate_and_get_payload(cls, input, context, info):
temp = RIR()
fields = ['name', 'slug', 'is_private']
return NewRIR(rir=set_and_save(fields, input, temp))
class UpdateRIR(ClientIDMutation):
rir = Field(RIRNode)
class Input:
id = String()
name = String(default_value=None)
slug = String(default_value=None)
is_private = Boolean(default_value=None)
@classmethod
def mutate_and_get_payload(cls, input, context, info):
temp = RIR.objects.get(pk=from_global_id(input.get('id'))[1])
fields = ['name', 'slug', 'is_private']
return UpdateRIR(rir=set_and_save(fields, input, temp))
class DeleteRIR(ClientIDMutation):
rir = Field(RIRNode)
class Input:
id = String()
@classmethod
def mutate_and_get_payload(cls, input, context, info):
temp = RIR.objects.get(pk=from_global_id(input.get('id'))[1])
temp.delete()
return DeleteRIR(rir=temp)
# Aggregate
class NewAggregate(ClientIDMutation):
aggregate = Field(AggregateNode)
class Input:
family = Int(default_value=None)
prefix = String(default_value=None)
rir = String(default_value=None)
date_added = String(default_value=None)
description = String(default_value=None)
@classmethod
def mutate_and_get_payload(cls, input, context, info):
rir = input.get('rir')
temp = Aggregate()
if not_none(rir):
temp.rir = RIR.objects.get(pk=from_global_id(rir)[1])
fields = ['family', 'prefix', 'date_added', 'description']
return NewAggregate(aggregate=set_and_save(fields, input, temp))
class UpdateAggregate(ClientIDMutation):
aggregate = Field(AggregateNode)
class Input:
id = String()
family = Int(default_value=None)
prefix = String(default_value=None)
rir = String(default_value=None)
date_added = String(default_value=None)
description = String(default_value=None)
@classmethod
def mutate_and_get_payload(cls, input, context, info):
rir = input.get('rir')
temp = Aggregate.objects.get(pk=from_global_id(input.get('id'))[1])
if not_none(rir):
temp.rir = RIR.objects.get(pk=from_global_id(rir)[1])
fields = ['family', 'prefix', 'date_added', 'description']
return UpdateAggregate(aggregate=set_and_save(fields, input, temp))
class DeleteAggregate(ClientIDMutation):
aggregate = Field(AggregateNode)
class Input:
id = String()
@classmethod
def mutate_and_get_payload(cls, input, context, info):
temp = Aggregate.objects.get(pk=from_global_id(input.get('id'))[1])
temp.delete()
return DeleteAggregate(aggregate=temp)
# IPAddress
class NewIPAddress(ClientIDMutation):
ip_address = Field(IPAddressNode)
class Input:
family = Int(default_value=None)
address = String(default_value=None)
vrf = String(default_value=None)
tenant = String(default_value=None)
status = Int(default_value=None)
interface = String(default_value=None)
nat_inside = String(default_value=None)
description = String(default_value=None)
@classmethod
def mutate_and_get_payload(cls, input, context, info):
temp = IPAddress()
vrf = input.get('vrf')
tenant = input.get('tenant')
interface = input.get('interface')
nat_inside = input.get('nat_inside')
if not_none(vrf):
temp.vrf = VRF.objects.get(pk=from_global_id(vrf)[1])
if not_none(tenant):
temp.tenant = Tenant.objects.get(pk=from_global_id(tenant)[1])
if not_none(interface):
temp.interface = Interface.objects.get(pk=from_global_id(interface)[1])
if not_none(nat_inside):
temp.nat_inside = IPAddress.objects.get(pk=from_global_id(nat_inside)[1])
fields = ['family', 'address', 'status', 'description']
return NewIPAddress(ip_address=set_and_save(fields, input, temp))
class UpdateIPAddress(ClientIDMutation):
ip_address = Field(IPAddressNode)
class Input:
id = String()
family = Int(default_value=None)
address = String(default_value=None)
vrf = String(default_value=None)
tenant = String(default_value=None)
status = Int(default_value=None)
interface = String(default_value=None)
nat_inside = String(default_value=None)
description = String(default_value=None)
@classmethod
def mutate_and_get_payload(cls, input, context, info):
temp = IPAddress.objects.get(pk=from_global_id(input.get('id'))[1])
vrf = input.get('vrf')
tenant = input.get('tenant')
interface = input.get('interface')
nat_inside = input.get('nat_inside')
if not_none(vrf):
temp.vrf = VRF.objects.get(pk=from_global_id(vrf)[1])
if not_none(tenant):
temp.tenant = Tenant.objects.get(pk=from_global_id(tenant)[1])
if not_none(interface):
temp.interface = Interface.objects.get(pk=from_global_id(interface)[1])
if not_none(nat_inside):
temp.nat_inside = IPAddress.objects.get(pk=from_global_id(nat_inside)[1])
fields = ['family', 'address', 'status', 'description']
return UpdateIPAddress(ip_address=set_and_save(fields, input, temp))
class DeleteIPAddress(ClientIDMutation):
ip_address = Field(IPAddressNode)
class Input:
id = String()
@classmethod
def mutate_and_get_payload(cls, input, context, info):
temp = IPAddress.objects.get(pk=from_global_id(input.get('id'))[1])
temp.delete()
return DeleteIPAddress(ip_address=temp)
# Prefix
class NewPrefix(ClientIDMutation):
prefix = Field(PrefixNode)
class Input:
family = Int(default_value=None)
prefix = String(default_value=None)
site = String(default_value=None)
vrf = String(default_value=None)
tenant = String(default_value=None)
vlan = String(default_value=None)
status = Int(default_value=None)
role = String(default_value=None)
description = String(default_value=None)
is_pool = Boolean(default_value=None)
@classmethod
def mutate_and_get_payload(cls, input, context, info):
temp = Prefix()
site = input.get('site')
vrf = input.get('vrf')
tenant = input.get('tenant')
vlan = input.get('vlan')
role = input.get('role')
if not_none(vrf):
temp.vrf = VRF.objects.get(pk=from_global_id(vrf)[1])
if not_none(tenant):
temp.tenant = Tenant.objects.get(pk=from_global_id(tenant)[1])
if not_none(site):
temp.site = Site.objects.get(pk=from_global_id(site)[1])
if not_none(vlan):
temp.vlan = VLAN.objects.get(pk=from_global_id(vlan)[1])
if not_none(role):
temp.role = Role.objects.get(pk=from_global_id(role)[1])
fields = ['family', 'prefix', 'status', 'is_pool', 'description']
return NewPrefix(prefix=set_and_save(fields, input, temp))
class UpdatePrefix(ClientIDMutation):
prefix = Field(PrefixNode)
class Input:
id = String()
family = Int(default_value=None)
prefix = String(default_value=None)
site = String(default_value=None)
vrf = String(default_value=None)
tenant = String(default_value=None)
vlan = String(default_value=None)
status = Int(default_value=None)
role = String(default_value=None)
is_pool = Boolean(default_value=None)
description = String(default_value=None)
@classmethod
def mutate_and_get_payload(cls, input, context, info):
temp = Prefix.objects.get(pk=from_global_id(input.get('id'))[1])
site = input.get('site')
vrf = input.get('vrf')
tenant = input.get('tenant')
vlan = input.get('vlan')
role = input.get('role')
if not_none(vrf):
temp.vrf = VRF.objects.get(pk=from_global_id(vrf)[1])
if not_none(tenant):
temp.tenant = Tenant.objects.get(pk=from_global_id(tenant)[1])
if not_none(site):
temp.site = Site.objects.get(pk=from_global_id(site)[1])
if not_none(vlan):
temp.vlan = VLAN.objects.get(pk=from_global_id(vlan)[1])
if not_none(role):
temp.role = Role.objects.get(pk=from_global_id(role)[1])
fields = ['family', 'prefix', 'status', 'is_pool', 'description']
return UpdatePrefix(prefix=set_and_save(fields, input, temp))
class DeletePrefix(ClientIDMutation):
prefix = Field(PrefixNode)
class Input:
id = String()
@classmethod
def mutate_and_get_payload(cls, input, context, info):
temp = Prefix.objects.get(pk=from_global_id(input.get('id'))[1])
temp.delete()
return DeletePrefix(prefix=temp)
class IpamMutations(AbstractType):
# Roles
new_vlan_role = NewRole.Field()
update_vlan_role = UpdateRole.Field()
delete_vlan_role = DeleteRole.Field()
# VLAN Group
new_vlan_group = NewVLANGroup.Field()
update_vlan_group = UpdateVLANGroup.Field()
delete_vlan_group = DeleteVLANGroup.Field()
# VLAN
new_vlan = NewVLAN.Field()
update_vlan = UpdateVLAN.Field()
delete_vlan = DeleteVLAN.Field()
# VRF
new_vrf = NewVRF.Field()
update_vrf = UpdateVRF.Field()
delete_vrf = DeleteVRF.Field()
# RIR
new_rir = NewRIR.Field()
update_rir = UpdateRIR.Field()
delete_rir = DeleteRIR.Field()
# Aggregate
new_aggregate = NewAggregate.Field()
update_aggregate = UpdateAggregate.Field()
delete_aggregate = DeleteAggregate.Field()
# IPAddress
new_ip_address = NewIPAddress.Field()
update_ip_address = UpdateIPAddress.Field()
delete_ip_address = DeleteIPAddress.Field()
# Prefixes
new_prefix = NewPrefix.Field()
update_prefix = UpdatePrefix.Field()
delete_prefix = DeletePrefix.Field()
| 31.604262
| 96
| 0.642843
| 2,448
| 20,764
| 5.274101
| 0.058415
| 0.08272
| 0.110294
| 0.114166
| 0.766013
| 0.762296
| 0.735807
| 0.71807
| 0.661219
| 0.620014
| 0
| 0.003045
| 0.240801
| 20,764
| 656
| 97
| 31.652439
| 0.815973
| 0.005635
| 0
| 0.738281
| 0
| 0
| 0.033012
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.050781
| false
| 0
| 0.027344
| 0.003906
| 0.367188
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
f6a893a4c9dc6ff2ac7493c6887f147764b4db17
| 40
|
py
|
Python
|
tests/__init__.py
|
HaigangLiu/covid19-us
|
be60401142d5f517456606a8afa7a6b5f5f399dd
|
[
"MIT"
] | 2
|
2021-04-10T22:53:41.000Z
|
2022-02-20T13:20:19.000Z
|
tests/__init__.py
|
HaigangLiu/covid19-us
|
be60401142d5f517456606a8afa7a6b5f5f399dd
|
[
"MIT"
] | null | null | null |
tests/__init__.py
|
HaigangLiu/covid19-us
|
be60401142d5f517456606a8afa7a6b5f5f399dd
|
[
"MIT"
] | null | null | null |
"""Unit test package for covid19_us."""
| 20
| 39
| 0.7
| 6
| 40
| 4.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.057143
| 0.125
| 40
| 1
| 40
| 40
| 0.714286
| 0.825
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
f6bb6ec15f96cd8fc35c408234bd6466817fcc31
| 214
|
py
|
Python
|
covidprognosis/data/__init__.py
|
olopade-lab/CovidPrognosis
|
d66f49def441d4f89939732abd9bec5525cc384b
|
[
"MIT"
] | 151
|
2021-01-13T19:50:19.000Z
|
2022-03-30T07:16:10.000Z
|
covidprognosis/data/__init__.py
|
olopade-lab/CovidPrognosis
|
d66f49def441d4f89939732abd9bec5525cc384b
|
[
"MIT"
] | 14
|
2021-01-29T15:11:07.000Z
|
2022-01-28T04:34:03.000Z
|
covidprognosis/data/__init__.py
|
olopade-lab/CovidPrognosis
|
d66f49def441d4f89939732abd9bec5525cc384b
|
[
"MIT"
] | 35
|
2021-01-15T21:21:50.000Z
|
2022-01-17T06:17:04.000Z
|
from .base_dataset import BaseDataset
from .chexpert import CheXpertDataset
from .combined_datasets import CombinedXrayDataset
from .mimic_cxr import MimicCxrJpgDataset
from .nih_chest_xrays import NIHChestDataset
| 35.666667
| 50
| 0.883178
| 25
| 214
| 7.36
| 0.68
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.093458
| 214
| 5
| 51
| 42.8
| 0.948454
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
f6cf4d17eebecd705755bfd45792b5fd727da591
| 63
|
py
|
Python
|
dbinorm/tests/test_oracle.py
|
rabus-t48/dbinorm
|
9cbf8df7e496f1b1e3accfb10e5ede0dc4b4d423
|
[
"MIT"
] | null | null | null |
dbinorm/tests/test_oracle.py
|
rabus-t48/dbinorm
|
9cbf8df7e496f1b1e3accfb10e5ede0dc4b4d423
|
[
"MIT"
] | null | null | null |
dbinorm/tests/test_oracle.py
|
rabus-t48/dbinorm
|
9cbf8df7e496f1b1e3accfb10e5ede0dc4b4d423
|
[
"MIT"
] | null | null | null |
#coding: utf-8
""" Tests for Oracle Api.
"""
import unittest
| 9
| 25
| 0.650794
| 9
| 63
| 4.555556
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.019608
| 0.190476
| 63
| 6
| 26
| 10.5
| 0.784314
| 0.555556
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
f6da26c0b1a2def7ed5610ffec6add78528d6546
| 367
|
py
|
Python
|
adet/modeling/backbone/__init__.py
|
chungjung-d/SSIS
|
ed9e4f468a9bc8a4a3a945e4d718c6a4ba454a64
|
[
"BSD-2-Clause"
] | 32
|
2021-03-03T12:25:25.000Z
|
2022-03-30T09:59:50.000Z
|
adet/modeling/backbone/__init__.py
|
chungjung-d/SSIS
|
ed9e4f468a9bc8a4a3a945e4d718c6a4ba454a64
|
[
"BSD-2-Clause"
] | 3
|
2021-06-21T17:03:16.000Z
|
2021-12-07T07:14:56.000Z
|
adet/modeling/backbone/__init__.py
|
chungjung-d/SSIS
|
ed9e4f468a9bc8a4a3a945e4d718c6a4ba454a64
|
[
"BSD-2-Clause"
] | 6
|
2021-06-22T08:26:34.000Z
|
2021-12-19T07:38:34.000Z
|
from .fpn import build_fcos_resnet_fpn_backbone
# from .vovnet import build_vovnet_fpn_backbone, build_vovnet_backbone
from .dla import build_fcos_dla_fpn_backbone
from .resnet_lpf import build_resnet_lpf_backbone
from .bifpn import build_fcos_resnet_bifpn_backbone
from .vovnet import build_vovnet_fpn_backbone, build_vovnet_backbone, build_fcos_vovnet_fpn_backbone
| 52.428571
| 100
| 0.896458
| 57
| 367
| 5.245614
| 0.192982
| 0.220736
| 0.150502
| 0.140468
| 0.434783
| 0.434783
| 0.434783
| 0.434783
| 0.434783
| 0.434783
| 0
| 0
| 0.076294
| 367
| 6
| 101
| 61.166667
| 0.882006
| 0.185286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
1005f2d4616ba01101b66450d94f65683108b10c
| 24,050
|
py
|
Python
|
analysis/figures.py
|
rge123/c19sim
|
d5e817b13b9f1c6640963dfa414e549e3c3a3f40
|
[
"MIT"
] | 1
|
2022-01-02T18:01:34.000Z
|
2022-01-02T18:01:34.000Z
|
analysis/figures.py
|
rge123/c19sim
|
d5e817b13b9f1c6640963dfa414e549e3c3a3f40
|
[
"MIT"
] | 11
|
2022-01-24T14:43:46.000Z
|
2022-03-30T18:47:45.000Z
|
analysis/figures.py
|
rge123/c19sim
|
d5e817b13b9f1c6640963dfa414e549e3c3a3f40
|
[
"MIT"
] | null | null | null |
"""
@dependencies: requirements.txt
@authors: Robert Elston
@coding: UTF-8
@url: https://github.com/rge123/c19sim
Functions to produce analysis figures
"""
import numpy as np
import pandas as pd
from plotly.subplots import make_subplots
import plotly.graph_objects as go
def contours():
data = np.loadtxt("analysis/images/2022-03-21peak_vs_threshold_94.csv",
dtype=float, delimiter=',')
data2 = np.loadtxt("analysis/images/2022-03-21dur_vs_sev_25.csv",
dtype=float, delimiter=',')
fig = make_subplots(rows=1, cols=2, shared_yaxes=True, shared_xaxes=True)
fig1 = go.Contour(z=data2, line_smoothing=0.85,
colorscale='Reds',
contours_coloring='heatmap',
x=np.arange(0, 63, 2.5),
y=np.arange(15, 50, 2.5),
autocontour=True,
ncontours=20,
colorbar={'title': 'Unhappiness',
'x': .445})
fig2 = go.Contour(z=data, line_smoothing=0.85,
colorscale='Reds',
contours_coloring='heatmap',
x=np.arange(60, 100, 2.5),
y=np.arange(15, 50, 2.5),
autocontour=True,
ncontours=20,
colorbar={'x': .49, 'len': 0.9})
fig.add_trace(fig1, row=1, col=1)
fig.add_trace(fig2, row=1, col=2)
fig.update_layout(xaxis={'title': 'Lockdown Severity (%)'},
yaxis={'title': 'Lockdown Duration (days)'},
width=1750,
height=700,
font={'size': 22}
)
fig.show()
#fig.write_image("analysis/images/1.png")
def severity_overshoot():
fig = make_subplots(rows=2, cols=1, shared_xaxes=True, shared_yaxes=False)
data = np.loadtxt("analysis/images/2022-03-23peak_vs_thres_556.csv",
dtype=float, delimiter=',')
data = pd.DataFrame(data, columns=['Threshold Overshoot',
'Beta Severity',
'Delay',
'Threshold Error',
'Delay Error'])
beta = go.Scatter(x=data['Beta Severity'],
y=data['Threshold Overshoot'], mode='markers',
marker={'color': 'black', 'symbol': 'circle-open'},
error_y={'array': data['Threshold Error'] / 2}
)
delay = go.Scatter(x=data['Beta Severity'],
y=data['Delay'], mode='markers',
marker={'color': 'black', 'symbol': 'circle-open'},
error_y={'array': data['Delay Error']})
fig.add_trace(beta, row=1, col=1)
fig.add_trace(delay, row=2, col=1)
fig.update_yaxes(title_text='Threshold Overshoot',
row=1, col=1, mirror=True)
fig.update_yaxes(title_text='Peak Delay (Days)',
row=2, col=1,
mirror=True)
fig.update_xaxes(mirror=True, row=1, col=1, showline=True)
fig.update_xaxes(mirror=True, row=2, col=1)
fig.update_layout(xaxis={'title': 'Beta Severity (%)'},
width=875,
height=700,
font={'size': 22},
template='simple_white',
showlegend=False
)
#fig.show()
fig.write_image('analysis/images/two.png', scale=1.5)
def nat_loc_unhappy():
data = np.loadtxt("analysis/images/2022-03-24loc_vs_nat_116.csv",
dtype=float, delimiter=',', max_rows=101)
data = pd.DataFrame(data, columns=['Duration (Days)',
'National Heterogeneous',
'National Homogenous',
'Local Heterogeneous',
'Local Homogenous'])
fig = go.Figure(go.Scatter(x=data['Duration (Days)'],
y=data['National Heterogeneous'],
mode='lines',
line={'color': 'black', 'dash':
'dot', 'width': 3},
name='National Heterogeneous'
))
fig.add_trace(go.Scatter(x=data['Duration (Days)'],
y=data['National Homogenous'],
mode='lines',
line={'color': 'red', 'dash':
'dash', 'width': 3},
name='National Homogeneous'
))
fig.add_trace(go.Scatter(x=data['Duration (Days)'],
y=data['Local Heterogeneous'],
mode='lines',
line={'color': 'green', 'dash':
'longdashdot', 'width': 3},
name='Local Heterogeneous'
))
fig.add_trace(go.Scatter(x=data['Duration (Days)'],
y=data['Local Homogenous'],
mode='lines',
line={'color': 'blue', 'dash':
'longdash', 'width': 3},
name='Local Homogeneous'
))
fig.update_yaxes(title_text='Unhappiness (Days)', mirror=True)
fig.update_xaxes(mirror=True)
fig.update_layout(xaxis={'title': 'Duration (Days)'},
yaxis={'exponentformat': 'power'},
width=875,
height=700,
font={'size': 20},
template='simple_white',
legend={'x': 0, 'y': 1}
)
#fig.show()
fig.write_image('analysis/images/three.png', scale=1.5)
def trans_severity():
data = np.loadtxt("analysis/images/2022-03-24nat_trans_404.csv",
dtype=float, delimiter=',', max_rows=101)
data = pd.DataFrame(data, columns=['Transport Severity (%)',
'National Heterogeneous',
'Het Nat Error',
'Local Heterogeneous',
'Het Loc Error',
'National Homogenous',
'Hom Nat Error',
'Local Homogenous',
'Hom Loc Error'])
fig = go.Figure(go.Scatter(x=data['Transport Severity (%)'],
y=data['National Heterogeneous'],
mode='markers',
marker={'color': 'black', 'symbol':
'circle-open', 'size': 10},
name='National Heterogeneous'
))
fig.add_trace(go.Scatter(x=data['Transport Severity (%)'],
y=data['National Homogenous'],
mode='markers',
marker={'color': 'red', 'symbol':
'square-open', 'size': 10},
name='National Homogeneous'
))
fig.add_trace(go.Scatter(x=data['Transport Severity (%)'],
y=data['Local Heterogeneous'],
mode='markers',
marker={'color': 'green', 'symbol':
'asterisk-open', 'size': 10},
name='Local Heterogeneous'
))
fig.add_trace(go.Scatter(x=data['Transport Severity (%)'],
y=data['Local Homogenous'],
mode='markers',
marker={'color': 'blue', 'symbol':
'triangle-up-open', 'size': 10},
name='Local Homogeneous'
))
fig.update_yaxes(title_text='Peak New Cases', mirror=True)
fig.update_xaxes(mirror=True)
fig.update_layout(xaxis={'title': 'Transport Severity (%)'},
yaxis={'exponentformat': 'power'},
width=875,
height=700,
font={'size': 22},
template='simple_white',
legend={'x': 0, 'y': 0}
)
#fig.show()
fig.write_image('analysis/images/four.png', scale=1.5)
def qual_trans_delay():
data = np.loadtxt("analysis/images/2022-03-24delay_transport_89.csv",
dtype=float, delimiter=',')
data_nat = np.loadtxt("analysis/images/2022-03-24delay_transport_103.csv",
dtype=float, delimiter=',')
data = pd.DataFrame(data, columns=['Duration (Days)',
'0',
'25',
'50',
'75',
'100'])
data_nat = pd.DataFrame(data_nat, columns=['Duration (Days)',
'0',
'25',
'50',
'75',
'100'])
fig = make_subplots(rows=2, cols=1, shared_xaxes=True, shared_yaxes=False)
fig.add_trace(go.Scatter(x=data['Duration (Days)'],
y=data['0'],
mode='lines',
line={'color': 'black'},
name='Severity = 0',
showlegend=False
),
row=1, col=1)
fig.add_trace(go.Scatter(x=data['Duration (Days)'][::20],
y=data['0'][::20],
mode='markers',
marker={'color': 'black',
'symbol': 'circle-open', 'size': 10},
name='Severity = 0'
),
row=1, col=1)
fig.add_trace(go.Scatter(x=data['Duration (Days)'],
y=data['25'],
mode='lines',
line={'color': 'red'},
name='Severity = 25',
showlegend=False
),
row=1, col=1)
fig.add_trace(go.Scatter(x=data['Duration (Days)'][::20],
y=data['25'][::20],
mode='markers',
marker={'color': 'red',
'symbol': 'square-open', 'size': 10},
name='Severity = 25'
),
row=1, col=1)
fig.add_trace(go.Scatter(x=data['Duration (Days)'],
y=data['50'],
mode='lines',
line={'color': 'green'},
name='Severity = 50',
showlegend=False
),
row=1, col=1)
fig.add_trace(go.Scatter(x=data['Duration (Days)'][::20],
y=data['50'][::20],
mode='markers',
marker={'color': 'green',
'symbol': 'asterisk-open', 'size': 10},
name='Severity = 50'
),
row=1, col=1)
fig.add_trace(go.Scatter(x=data['Duration (Days)'],
y=data['75'],
mode='lines',
line={'color': 'blue'},
name='Severity = 75',
showlegend=False
),
row=1, col=1)
fig.add_trace(go.Scatter(x=data['Duration (Days)'][::20],
y=data['75'][::20],
mode='markers',
marker={'color': 'blue',
'symbol': 'triangle-up-open', 'size': 10},
name='Severity = 75'
),
row=1, col=1)
fig.add_trace(go.Scatter(x=data['Duration (Days)'],
y=data['100'],
mode='lines',
line={'color': 'purple'},
name='Severity = 100',
showlegend=False
),
row=1, col=1)
fig.add_trace(go.Scatter(x=data['Duration (Days)'][::20],
y=data['100'][::20],
mode='markers',
marker={'color': 'purple',
'symbol': 'star-open', 'size': 10},
name='Severity = 100'
),
row=1, col=1)
fig.add_trace(go.Scatter(x=data_nat['Duration (Days)'],
y=data_nat['0'],
mode='lines',
line={'color': 'black'},
name='Severity = 0',
showlegend=False
),
row=2, col=1)
fig.add_trace(go.Scatter(x=data_nat['Duration (Days)'][::10],
y=data_nat['0'][::10],
mode='markers',
marker={'color': 'black',
'symbol': 'circle-open', 'size': 10},
name='Severity = 0',
showlegend=False
),
row=2, col=1)
fig.add_trace(go.Scatter(x=data_nat['Duration (Days)'],
y=data_nat['25'],
mode='lines',
line={'color': 'red'},
name='Severity = 25',
showlegend=False
),
row=2, col=1)
fig.add_trace(go.Scatter(x=data_nat['Duration (Days)'][::10],
y=data_nat['25'][::10],
mode='markers',
marker={'color': 'red',
'symbol': 'square-open', 'size': 10},
name='Severity = 25',
showlegend=False
),
row=2, col=1)
fig.add_trace(go.Scatter(x=data_nat['Duration (Days)'],
y=data_nat['50'],
mode='lines',
line={'color': 'green'},
name='Severity = 50',
showlegend=False
),
row=2, col=1)
fig.add_trace(go.Scatter(x=data_nat['Duration (Days)'][::10],
y=data_nat['50'][::10],
mode='markers',
marker={'color': 'green',
'symbol': 'asterisk-open', 'size': 10},
name='Severity = 0',
showlegend=False
),
row=2, col=1)
fig.add_trace(go.Scatter(x=data_nat['Duration (Days)'],
y=data_nat['75'],
mode='lines',
line={'color': 'blue'},
name='Severity = 75',
showlegend=False
),
row=2, col=1)
fig.add_trace(go.Scatter(x=data_nat['Duration (Days)'][::10],
y=data_nat['75'][::10],
mode='markers',
marker={'color': 'blue',
'symbol': 'triangle-up-open', 'size': 10},
name='Severity = 0',
showlegend=False
),
row=2, col=1)
fig.add_trace(go.Scatter(x=data_nat['Duration (Days)'],
y=data_nat['100'],
mode='lines',
line={'color': 'purple'},
name='Severity = 100',
showlegend=False
),
row=2, col=1)
fig.add_trace(go.Scatter(x=data_nat['Duration (Days)'][::10],
y=data_nat['100'][::10],
mode='markers',
marker={'color': 'purple',
'symbol': 'star-open', 'size': 10},
name='Severity = 0',
showlegend=False
),
row=2, col=1)
fig.update_yaxes(title_text='New Cases', mirror=True,
exponentformat='power')
fig.update_xaxes(mirror=True)
fig.update_layout(xaxis={'title': 'Duration (Days)'},
yaxis={'exponentformat': 'power'},
width=875,
height=700,
font={'size': 22},
template='simple_white',
legend={'x': 0.68, 'y': 1, 'bgcolor': 'rgba(0,0,0,0)'},
xaxis_range=[100, 320]
)
#fig.show()
fig.write_image('analysis/images/five.png', scale=1.5)
def contours_overshoot():
data = np.loadtxt("analysis/images/2022-03-24grid_over_275.csv",
dtype=float, delimiter=',')
fig = go.Figure(go.Contour(z=data, line_smoothing=0.85,
colorscale='Reds',
contours_coloring='heatmap',
x=np.arange(15000, 25000, 1000),
y=np.arange(0, 100, 5),
autocontour=True,
ncontours=20,
colorbar={'title': 'Overshoot'}))
fig.update_layout(xaxis={'title': 'Lockdown Threshold',
'exponentformat': 'power'},
yaxis={'title': 'Lockdown Severity (%)'},
width=875,
height=700,
font={'size': 22}
)
#fig.show()
fig.write_image("analysis/images/six.png")
def qual_trans_delay_2():
data = np.loadtxt("analysis/images/2022-03-24delay_transport_945.csv",
dtype=float, delimiter=',')
data = pd.DataFrame(data, columns=['Duration (Days)',
'75',
'80',
'85',
'90',
'95'])
fig = go.Figure(go.Scatter(x=data['Duration (Days)'],
y=data['75'],
mode='lines',
line={'color': 'black'},
name='Severity = 75',
showlegend=False
))
fig.add_trace(go.Scatter(x=data['Duration (Days)'][::10],
y=data['75'][::10],
mode='markers',
marker={'color': 'black',
'symbol': 'circle-open',
'size': 10},
name='Severity = 75'
))
fig.add_trace(go.Scatter(x=data['Duration (Days)'],
y=data['80'],
mode='lines',
line={'color': 'red'},
name='Severity = 80',
showlegend=False
))
fig.add_trace(go.Scatter(x=data['Duration (Days)'][::10],
y=data['80'][::10],
mode='markers',
marker={'color': 'red',
'symbol': 'square-open',
'size': 10},
name='Severity = 80'
))
fig.add_trace(go.Scatter(x=data['Duration (Days)'],
y=data['85'],
mode='lines',
line={'color': 'green'},
name='Severity = 85',
showlegend=False
))
fig.add_trace(go.Scatter(x=data['Duration (Days)'][::10],
y=data['85'][::10],
mode='markers',
marker={'color': 'green',
'symbol': 'asterisk-open',
'size': 10},
name='Severity = 85'
))
fig.add_trace(go.Scatter(x=data['Duration (Days)'],
y=data['90'],
mode='lines',
line={'color': 'blue'},
name='Severity = 90',
showlegend=False
))
fig.add_trace(go.Scatter(x=data['Duration (Days)'][::10],
y=data['90'][::10],
mode='markers',
marker={'color': 'blue',
'symbol': 'triangle-up-open',
'size': 10},
name='Severity = 90'
))
fig.add_trace(go.Scatter(x=data['Duration (Days)'],
y=data['95'],
mode='lines',
line={'color': 'purple'},
name='Severity = 95',
showlegend=False
))
fig.add_trace(go.Scatter(x=data['Duration (Days)'][::10],
y=data['95'][::10],
mode='markers',
marker={'color': 'purple',
'symbol': 'star-open',
'size': 10},
name='Severity = 95'
))
fig.update_yaxes(title_text='New Cases', mirror=True)
fig.update_xaxes(mirror=True)
fig.update_layout(xaxis={'title': 'Duration (Days)'},
yaxis={'exponentformat': 'power'},
width=875,
height=700,
font={'size': 22},
template='simple_white',
legend={'x': 0.7, 'y': 1, 'bgcolor': 'rgba(0,0,0,0)'},
xaxis_range=[80, 480]
)
#fig.show()
fig.write_image('analysis/images/seven.png', scale=1.5)
if __name__ == '__main__':
qual_trans_delay()
| 46.339114
| 79
| 0.362453
| 1,949
| 24,050
| 4.388404
| 0.110826
| 0.058927
| 0.046767
| 0.065474
| 0.835029
| 0.795627
| 0.776219
| 0.695195
| 0.665731
| 0.629954
| 0
| 0.049814
| 0.508358
| 24,050
| 519
| 80
| 46.339114
| 0.673545
| 0.010353
| 0
| 0.657676
| 0
| 0
| 0.173953
| 0.023541
| 0
| 0
| 0
| 0
| 0
| 1
| 0.014523
| false
| 0
| 0.008299
| 0
| 0.022822
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
10112df25d536ad3eee092b3c856bdb3959bed70
| 2,656
|
py
|
Python
|
application/models.py
|
DanishDanialZurkanain/Cardiovascular
|
13ab578bdcc934be119f5178ad6e668c36b681aa
|
[
"CC0-1.0"
] | null | null | null |
application/models.py
|
DanishDanialZurkanain/Cardiovascular
|
13ab578bdcc934be119f5178ad6e668c36b681aa
|
[
"CC0-1.0"
] | 6
|
2021-03-31T19:54:03.000Z
|
2022-03-12T00:30:27.000Z
|
application/models.py
|
DanishDanialZurkanain/Cardiovascular
|
13ab578bdcc934be119f5178ad6e668c36b681aa
|
[
"CC0-1.0"
] | null | null | null |
from . import app, db
from datetime import datetime
from flask_login import UserMixin
from werkzeug.security import generate_password_hash, check_password_hash
class User(db.Model, UserMixin):
__tablename__ = 'user'
id = db.Column(db.Integer, primary_key = True)
fullname = db.Column(db.String(200))
ic = db.Column(db.String(14))
email = db.Column(db.String(200), unique = True)
password = db.Column(db.String(15))
hashCode = db.Column(db.String(120))
phone = db.Column(db.String(15))
dob = db.Column(db.String(10))
age = db.Column(db.Integer)
sex = db.Column(db.Integer)
access_level = db.Column(db.Integer)
date_created = db.Column(db.DateTime, default = datetime.utcnow)
health = db.relationship('Health', backref = db.backref('user', lazy='joined'))
previous_record = db.relationship('PreviousRecord', backref = db.backref('user', lazy='joined'))
def set_password(self, password):
self.password = generate_password_hash(password, method="sha256")
def check_password(self, password):
return check_password_hash(self.password, password)
@property
def serialize(self):
return {'fullname': self.fullname}
class Health(db.Model):
__tablename__ = "health"
id = db.Column(db.Integer, primary_key=True)
user_id = db.Column(db.Integer, db.ForeignKey('user.id'))
cp = db.Column(db.Integer)
trestbps = db.Column(db.Integer)
chol = db.Column(db.Integer)
fbs = db.Column(db.Integer)
restecg = db.Column(db.Integer)
thalach = db.Column(db.Integer)
exang = db.Column(db.Integer)
oldpeak = db.Column(db.Float)
slope = db.Column(db.Integer)
ca = db.Column(db.Float)
thal = db.Column(db.Float)
target = db.Column(db.Integer)
date_created = db.Column(db.DateTime, default=datetime.utcnow)
class PreviousRecord(db.Model):
__tablename__ = "previous_record"
id = db.Column(db.Integer, primary_key=True)
user_id = db.Column(db.Integer, db.ForeignKey('user.id'))
cp = db.Column(db.Integer)
trestbps = db.Column(db.Integer)
chol = db.Column(db.Integer)
fbs = db.Column(db.Integer)
restecg = db.Column(db.Integer)
thalach = db.Column(db.Integer)
exang = db.Column(db.Integer)
oldpeak = db.Column(db.Float)
slope = db.Column(db.Integer)
ca = db.Column(db.Float)
thal = db.Column(db.Float)
target = db.Column(db.Integer)
comment = db.Column(db.String(500))
comment_by = db.Column(db.String(150))
date_created = db.Column(db.DateTime, default=datetime.utcnow)
| 35.413333
| 101
| 0.663404
| 361
| 2,656
| 4.783934
| 0.218837
| 0.203822
| 0.254777
| 0.255935
| 0.580197
| 0.537348
| 0.50029
| 0.50029
| 0.481181
| 0.452229
| 0
| 0.012258
| 0.201431
| 2,656
| 75
| 102
| 35.413333
| 0.80198
| 0
| 0
| 0.507937
| 1
| 0
| 0.036005
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.047619
| false
| 0.095238
| 0.063492
| 0.031746
| 0.968254
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
63e54abf8eb652c246b117140b77365f2739f500
| 70
|
py
|
Python
|
angr/angr/misc/__init__.py
|
Ruide/angr-dev
|
964dc80c758e25c698c2cbcc454ef5954c5fa0a0
|
[
"BSD-2-Clause"
] | null | null | null |
angr/angr/misc/__init__.py
|
Ruide/angr-dev
|
964dc80c758e25c698c2cbcc454ef5954c5fa0a0
|
[
"BSD-2-Clause"
] | null | null | null |
angr/angr/misc/__init__.py
|
Ruide/angr-dev
|
964dc80c758e25c698c2cbcc454ef5954c5fa0a0
|
[
"BSD-2-Clause"
] | null | null | null |
from . import ux
from . import autoimport
from .loggers import Loggers
| 23.333333
| 28
| 0.8
| 10
| 70
| 5.6
| 0.5
| 0.357143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.157143
| 70
| 3
| 28
| 23.333333
| 0.949153
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
63f7068f3582cd64fe98672186fecfafd506f7dc
| 10,869
|
py
|
Python
|
src/tests/cauliflowervest/client/util_test.py
|
cooljeanius/cauliflowervest
|
a9bc209b610a927083bf16274d8451c6c45227bf
|
[
"Apache-2.0"
] | 1
|
2020-10-13T19:53:04.000Z
|
2020-10-13T19:53:04.000Z
|
src/tests/cauliflowervest/client/util_test.py
|
cooljeanius/cauliflowervest
|
a9bc209b610a927083bf16274d8451c6c45227bf
|
[
"Apache-2.0"
] | null | null | null |
src/tests/cauliflowervest/client/util_test.py
|
cooljeanius/cauliflowervest
|
a9bc209b610a927083bf16274d8451c6c45227bf
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
#
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# #
"""Tests for oauth_client."""
import os
import stat
import unittest
import mox
import stubout
from cauliflowervest.client import util
MOUNT_OUTPUT_NOMINAL = """
/dev/disk0s2 on / (hfs, local, journaled)
devfs on /dev (devfs, local, nobrowse)
/dev/disk0s4 on /Volumes/Untitled 2 (hfs, local, journaled)
map -hosts on /net (autofs, nosuid, automounted, nobrowse)
map auto_home on /home (autofs, automounted, nobrowse)
""".strip()
MOUNT_OUTPUT_OUT_OF_ORDER = """
devfs on /dev (devfs, local, nobrowse)
/dev/disk0s4 on /Volumes/Untitled 2 (hfs, local, journaled)
/dev/disk0s2 on / (hfs, local, journaled)
map -hosts on /net (autofs, nosuid, automounted, nobrowse)
map auto_home on /home (autofs, automounted, nobrowse)
""".strip()
MOUNT_OUTPUT_TRAILING_BLANK = """
devfs on /dev (devfs, local, nobrowse)
/dev/disk0s4 on /Volumes/Untitled 2 (hfs, local, journaled)
map -hosts on /net (autofs, nosuid, automounted, nobrowse)
map auto_home on /home (autofs, automounted, nobrowse)
""".lstrip()
class GetRootDiskTest(mox.MoxTestBase):
"""Test the GetRootDisk() function."""
def setUp(self):
super(GetRootDiskTest, self).setUp()
self.mox = mox.Mox()
def tearDown(self):
self.mox.UnsetStubs()
def testEnumerationFailure(self):
self.mox.StubOutWithMock(util, 'Exec')
util.Exec(('/sbin/mount')).AndReturn((1, '', ''))
self.mox.ReplayAll()
self.assertRaises(util.Error, util.GetRootDisk)
self.mox.VerifyAll()
def testOk(self):
self.mox.StubOutWithMock(util, 'Exec')
util.Exec(('/sbin/mount')).AndReturn((0, MOUNT_OUTPUT_NOMINAL, ''))
self.mox.ReplayAll()
self.assertEquals('/dev/disk0s2', util.GetRootDisk())
self.mox.VerifyAll()
def testOutOfOrder(self):
self.mox.StubOutWithMock(util, 'Exec')
util.Exec(('/sbin/mount')).AndReturn((0, MOUNT_OUTPUT_OUT_OF_ORDER, ''))
self.mox.ReplayAll()
self.assertEquals('/dev/disk0s2', util.GetRootDisk())
self.mox.VerifyAll()
def testTrailingBlank(self):
self.mox.StubOutWithMock(util, 'Exec')
util.Exec(('/sbin/mount')).AndReturn((0, MOUNT_OUTPUT_TRAILING_BLANK, ''))
self.mox.ReplayAll()
self.assertRaises(util.Error, util.GetRootDisk)
self.mox.VerifyAll()
def testException(self):
self.mox.StubOutWithMock(util, 'Exec')
util.Exec(('/sbin/mount')).AndRaise(util.ExecError)
self.mox.ReplayAll()
self.assertRaises(util.Error, util.GetRootDisk)
self.mox.VerifyAll()
class SafeOpenTest(mox.MoxTestBase):
"""Test the oauth_client.OAuthClient class."""
dir = '/var/root/Library/cauliflowervest'
path = '/var/root/Library/cauliflowervest/access_token.dat'
def setUp(self):
super(SafeOpenTest, self).setUp()
self.mox = mox.Mox()
def tearDown(self):
self.mox.UnsetStubs()
def testDirExists(self):
self.mox.StubOutWithMock(os, 'makedirs')
os.makedirs(self.dir, 0700).AndRaise(OSError)
result = object()
open_ = self.mox.CreateMockAnything()
open_(self.path, 'r').AndReturn(result)
mox.Replay(open_)
self.mox.ReplayAll()
self.assertEqual(
util.SafeOpen(self.path, 'r', open_=open_), result)
self.mox.VerifyAll()
mox.Verify(open_)
def testFileExists(self):
self.mox.StubOutWithMock(os, 'makedirs')
os.makedirs(self.dir, 0700)
self.mox.StubOutWithMock(os, 'mknod')
os.mknod(self.path, 0600 | stat.S_IFREG).AndRaise(OSError)
result = object()
open_ = self.mox.CreateMockAnything()
open_(self.path, 'r').AndReturn(result)
mox.Replay(open_)
self.mox.ReplayAll()
self.assertEqual(
util.SafeOpen(self.path, 'r', open_=open_), result)
self.mox.VerifyAll()
mox.Verify(open_)
def testOk(self):
self.mox.StubOutWithMock(os, 'makedirs')
os.makedirs(self.dir, 0700)
self.mox.StubOutWithMock(os, 'mknod')
os.mknod(self.path, 0600 | stat.S_IFREG)
result = object()
open_ = self.mox.CreateMockAnything()
open_(self.path, 'r').AndReturn(result)
mox.Replay(open_)
self.mox.ReplayAll()
self.assertEqual(
util.SafeOpen(self.path, 'r', open_=open_), result)
self.mox.VerifyAll()
mox.Verify(open_)
class UtilModuleTest(mox.MoxTestBase):
"""Test module level functions in util."""
def setUp(self):
super(UtilModuleTest, self).setUp()
self.mox = mox.Mox()
def tearDown(self):
self.mox.UnsetStubs()
def testGetPlistFromExec(self):
self.mox.StubOutWithMock(util, 'Exec')
self.mox.StubOutWithMock(util.plistlib, 'readPlistFromString')
util.Exec('cmd', stdin='stdin').AndReturn((0, 'stdout', 'stderr'))
util.plistlib.readPlistFromString('stdout').AndReturn('plist')
self.mox.ReplayAll()
self.assertEqual('plist', util.GetPlistFromExec('cmd', stdin='stdin'))
self.mox.VerifyAll()
def testGetPlistFromExecNonZeroReturncode(self):
self.mox.StubOutWithMock(util, 'Exec')
util.Exec('cmd', stdin=None).AndReturn((1, 'stdout', 'stderr'))
self.mox.ReplayAll()
self.assertRaises(util.ExecError, util.GetPlistFromExec, 'cmd')
self.mox.VerifyAll()
def testGetPlistFromExecPlistParseError(self):
self.mox.StubOutWithMock(util, 'Exec')
self.mox.StubOutWithMock(util.plistlib, 'readPlistFromString')
util.Exec('cmd', stdin=None).AndReturn((0, 'stdout', 'stderr'))
util.plistlib.readPlistFromString('stdout').AndRaise(util.expat.ExpatError)
self.mox.ReplayAll()
self.assertRaises(util.ExecError, util.GetPlistFromExec, 'cmd')
self.mox.VerifyAll()
def testJoinURL(self):
base_url = 'http://example.com'
part1 = 'foo'
part2 = 'bar'
out = util.JoinURL(base_url, part1, part2)
self.assertEqual(out, 'http://example.com/foo/bar')
def testJoinURLWithTrailingSlashOnBaseURL(self):
base_url = 'http://example.com/'
part1 = 'foo'
part2 = 'bar'
out = util.JoinURL(base_url, part1, part2)
self.assertEqual(out, 'http://example.com/foo/bar')
def testJoinURLWithLeadingSlashOnInnerURLPart(self):
base_url = 'http://example.com'
part1 = '/foo'
part2 = 'bar'
out = util.JoinURL(base_url, part1, part2)
self.assertEqual(out, 'http://example.com/foo/bar')
def testJoinURLWithLeadingAndTrailingSlashOnInnerURLPart(self):
base_url = 'http://example.com'
part1 = '/foo/'
part2 = '/bar'
out = util.JoinURL(base_url, part1, part2)
self.assertEqual(out, 'http://example.com/foo/bar')
def testJoinURLWithTrailingSlashOnInnerURLPart(self):
base_url = 'http://example.com'
part1 = 'foo/'
part2 = 'bar'
out = util.JoinURL(base_url, part1, part2)
self.assertEqual(out, 'http://example.com/foo/bar')
def testJoinURLWithTrailingSlashOnLastURLPart(self):
base_url = 'http://example.com'
part1 = 'foo'
part2 = 'bar/'
out = util.JoinURL(base_url, part1, part2)
self.assertEqual(out, 'http://example.com/foo/bar/')
def testRetrieveEntropy(self):
self.mox.StubOutWithMock(util, 'Exec')
rc = 0
stdout = 'HIDIdleTime=100\nWhateverOtherCrap\n'
stderr = ''
expected_entropy = 'HIDIdleTime=100'
util.Exec(['/usr/sbin/ioreg', '-l']).AndReturn((rc, stdout, stderr))
self.mox.ReplayAll()
self.assertEqual(expected_entropy, util.RetrieveEntropy())
self.mox.VerifyAll()
def testRetrieveEntropyWhenNoOutputResult(self):
self.mox.StubOutWithMock(util, 'Exec')
rc = 0
stdout = 'CrapThatWontMatchTheRegex\n'
stderr = ''
util.Exec(['/usr/sbin/ioreg', '-l']).AndReturn((rc, stdout, stderr))
self.mox.ReplayAll()
self.assertRaises(util.RetrieveEntropyError, util.RetrieveEntropy)
self.mox.VerifyAll()
def testRetrieveEntropyWhenErrorIoRegOutput(self):
self.mox.StubOutWithMock(util, 'Exec')
rc = 0
stdout = ''
stderr = ''
util.Exec(['/usr/sbin/ioreg', '-l']).AndReturn((rc, stdout, stderr))
self.mox.ReplayAll()
self.assertRaises(util.RetrieveEntropyError, util.RetrieveEntropy)
self.mox.VerifyAll()
def testRetrieveEntropyWhenErrorIoRegRc(self):
self.mox.StubOutWithMock(util, 'Exec')
rc = 1
stdout = ''
stderr = ''
util.Exec(['/usr/sbin/ioreg', '-l']).AndReturn((rc, stdout, stderr))
self.mox.ReplayAll()
self.assertRaises(util.RetrieveEntropyError, util.RetrieveEntropy)
self.mox.VerifyAll()
def testRetrieveEntropyWhenErrorIoRegExec(self):
self.mox.StubOutWithMock(util, 'Exec')
rc = 1
stdout = ''
stderr = ''
util.Exec(['/usr/sbin/ioreg', '-l']).AndRaise(util.ExecError)
self.mox.ReplayAll()
self.assertRaises(util.RetrieveEntropyError, util.RetrieveEntropy)
self.mox.VerifyAll()
def testSupplyEntropy(self):
mock_open = self.mox.CreateMockAnything()
entropy = 'entropy'
mock_open('/dev/random', 'w').AndReturn(mock_open)
mock_open.write(entropy).AndReturn(None)
mock_open.close().AndReturn(None)
self.mox.ReplayAll()
util.SupplyEntropy(entropy, open_=mock_open)
self.mox.VerifyAll()
def testSupplyEntropyWhenIOErrorOpen(self):
mock_open = self.mox.CreateMockAnything()
entropy = 'entropy'
mock_open('/dev/random', 'w').AndRaise(IOError)
self.mox.ReplayAll()
self.assertRaises(
util.SupplyEntropyError, util.SupplyEntropy, entropy, open_=mock_open)
self.mox.VerifyAll()
def testSupplyEntropyWhenIOErrorWrite(self):
mock_open = self.mox.CreateMockAnything()
entropy = 'entropy'
mock_open('/dev/random', 'w').AndReturn(mock_open)
mock_open.write(entropy).AndRaise(IOError)
self.mox.ReplayAll()
self.assertRaises(
util.SupplyEntropyError, util.SupplyEntropy, entropy, open_=mock_open)
self.mox.VerifyAll()
def testSupplyEntropyWhenIOErrorClose(self):
mock_open = self.mox.CreateMockAnything()
entropy = 'entropy'
mock_open('/dev/random', 'w').AndReturn(mock_open)
mock_open.write(entropy).AndReturn(None)
mock_open.close().AndRaise(IOError)
self.mox.ReplayAll()
self.assertRaises(
util.SupplyEntropyError, util.SupplyEntropy, entropy, open_=mock_open)
self.mox.VerifyAll()
def testSupplyEntropyWhenNoneSupplied(self):
entropy = None
self.assertRaises(util.SupplyEntropyError, util.SupplyEntropy, entropy)
if __name__ == '__main__':
unittest.main()
| 29.375676
| 79
| 0.694636
| 1,276
| 10,869
| 5.846395
| 0.181818
| 0.068499
| 0.058981
| 0.050938
| 0.732172
| 0.723861
| 0.708311
| 0.7
| 0.676944
| 0.654826
| 0
| 0.009527
| 0.159812
| 10,869
| 370
| 80
| 29.375676
| 0.807381
| 0.054651
| 0
| 0.701149
| 0
| 0
| 0.164308
| 0.014451
| 0
| 0
| 0
| 0
| 0.099617
| 0
| null | null | 0
| 0.022989
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
121ab0498ddba19ba2264c776f8a9f29269e07a5
| 94
|
py
|
Python
|
gym_pool/envs/utils.py
|
to314as/gym-pool
|
925f08b14580260b9136f97c5f6b3b3c807820a8
|
[
"MIT"
] | 1
|
2021-04-22T17:31:05.000Z
|
2021-04-22T17:31:05.000Z
|
gym_pool/envs/utils.py
|
to314as/gym-pool
|
925f08b14580260b9136f97c5f6b3b3c807820a8
|
[
"MIT"
] | null | null | null |
gym_pool/envs/utils.py
|
to314as/gym-pool
|
925f08b14580260b9136f97c5f6b3b3c807820a8
|
[
"MIT"
] | 1
|
2021-05-31T11:48:39.000Z
|
2021-05-31T11:48:39.000Z
|
import functools
import operator
def prod(l):
return functools.reduce(operator.mul, l, 1)
| 18.8
| 47
| 0.755319
| 14
| 94
| 5.071429
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.0125
| 0.148936
| 94
| 5
| 47
| 18.8
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.5
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 0
| 0
|
0
| 5
|
124cee6cd56b06bfe62a99aab3b7e46e567d51a1
| 97
|
py
|
Python
|
zisan/Seg/davisinteractive/robot/__init__.py
|
JintuZheng/zisan
|
84b30d1ee91754d4351841a2077c78146028adfc
|
[
"MIT"
] | 40
|
2020-02-14T07:03:16.000Z
|
2022-03-07T10:52:18.000Z
|
zisan/Seg/davisinteractive/robot/__init__.py
|
EpsilionJT/zisan
|
84b30d1ee91754d4351841a2077c78146028adfc
|
[
"MIT"
] | 1
|
2021-09-04T07:40:26.000Z
|
2021-09-04T14:51:03.000Z
|
zisan/Seg/davisinteractive/robot/__init__.py
|
EpsilionJT/zisan
|
84b30d1ee91754d4351841a2077c78146028adfc
|
[
"MIT"
] | 9
|
2020-02-24T01:08:11.000Z
|
2021-12-15T07:35:14.000Z
|
from __future__ import absolute_import
from .interactive_robot import InteractiveScribblesRobot
| 24.25
| 56
| 0.896907
| 10
| 97
| 8.1
| 0.7
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.092784
| 97
| 3
| 57
| 32.333333
| 0.920455
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
125ffdfcd9a158775ac287516a92635395259c39
| 114
|
py
|
Python
|
securenative/enums/failover_strategy.py
|
securenative/securenative-python
|
0313808d6c4f282dd4ad21c7d3cab328ba414782
|
[
"MIT"
] | null | null | null |
securenative/enums/failover_strategy.py
|
securenative/securenative-python
|
0313808d6c4f282dd4ad21c7d3cab328ba414782
|
[
"MIT"
] | 10
|
2019-07-16T07:23:46.000Z
|
2020-11-22T15:01:07.000Z
|
securenative/enums/failover_strategy.py
|
securenative/securenative-python
|
0313808d6c4f282dd4ad21c7d3cab328ba414782
|
[
"MIT"
] | null | null | null |
from enum import Enum
class FailOverStrategy(Enum):
FAIL_OPEN = "fail-open"
FAIL_CLOSED = "fail-closed"
| 16.285714
| 31
| 0.710526
| 15
| 114
| 5.266667
| 0.533333
| 0.202532
| 0.303797
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.192982
| 114
| 6
| 32
| 19
| 0.858696
| 0
| 0
| 0
| 0
| 0
| 0.175439
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.25
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
d6097e204448dd80779bbf46d01b9caac0545572
| 3,431
|
py
|
Python
|
parser/fase2/team15/TytusDB_G15/FuncionInter.py
|
Epatzan/tytus
|
36b4888b7ddba6fef1f04d9c019db27d8f8bd4b0
|
[
"MIT"
] | null | null | null |
parser/fase2/team15/TytusDB_G15/FuncionInter.py
|
Epatzan/tytus
|
36b4888b7ddba6fef1f04d9c019db27d8f8bd4b0
|
[
"MIT"
] | null | null | null |
parser/fase2/team15/TytusDB_G15/FuncionInter.py
|
Epatzan/tytus
|
36b4888b7ddba6fef1f04d9c019db27d8f8bd4b0
|
[
"MIT"
] | null | null | null |
from gramatica import parse
from principal import *
import ts as TS
import ts_index as TSINDEX
from expresiones import *
from instrucciones import *
from report_ast import *
from report_tc import *
from report_ts import *
from report_errores import *
class Intermedio():
instrucciones_Global = []
tc_global1 = []
ts_globalIndex1 = []
ts_global1 = []
def __init__(self):
''' Funcion Intermedia '''
def procesar_funcion0(self):
global instrucciones_Global,tc_global1,ts_global1,listaErrores,erroressss,ts_globalIndex1
instrucciones = g.parse('CREATE DATABASE DBFase2;')
erroressss = ErrorHTML()
if erroressss.getList()== []:
instrucciones_Global = instrucciones
ts_global = TS.TablaDeSimbolos()
ts_globalIndex = TSINDEX.TablaDeSimbolos()
tc_global = TC.TablaDeTipos()
tc_global1 = tc_global
ts_global1 = ts_global
ts_globalIndex1 = ts_globalIndex
salida = procesar_instrucciones(instrucciones, ts_global,tc_global,ts_globalIndex)
return salida
else:
return 'Parser Error'
def procesar_funcion1(self):
global instrucciones_Global,tc_global1,ts_global1,listaErrores,erroressss,ts_globalIndex1
instrucciones = g.parse('USE DBFase2;')
erroressss = ErrorHTML()
if erroressss.getList()== []:
instrucciones_Global = instrucciones
ts_global = TS.TablaDeSimbolos()
ts_globalIndex = TSINDEX.TablaDeSimbolos()
tc_global = TC.TablaDeTipos()
tc_global1 = tc_global
ts_global1 = ts_global
ts_globalIndex1 = ts_globalIndex
salida = procesar_instrucciones(instrucciones, ts_global,tc_global,ts_globalIndex)
return salida
else:
return 'Parser Error'
def procesar_funcion2(self):
global instrucciones_Global,tc_global1,ts_global1,listaErrores,erroressss,ts_globalIndex1
instrucciones = g.parse('CREATE TABLE tbProducto ( idproducto integer not null primary key , producto varchar ( 150 ) not null , fechacreacion date not null , estado integer );')
erroressss = ErrorHTML()
if erroressss.getList()== []:
instrucciones_Global = instrucciones
ts_global = TS.TablaDeSimbolos()
ts_globalIndex = TSINDEX.TablaDeSimbolos()
tc_global = TC.TablaDeTipos()
tc_global1 = tc_global
ts_global1 = ts_global
ts_globalIndex1 = ts_globalIndex
salida = procesar_instrucciones(instrucciones, ts_global,tc_global,ts_globalIndex)
return salida
else:
return 'Parser Error'
def procesar_funcion3(self):
global instrucciones_Global,tc_global1,ts_global1,listaErrores,erroressss,ts_globalIndex1
instrucciones = g.parse(' CREATE UNIQUE INDEX idx_producto ON tbProducto ( idproducto ) ;')
erroressss = ErrorHTML()
if erroressss.getList()== []:
instrucciones_Global = instrucciones
ts_global = TS.TablaDeSimbolos()
ts_globalIndex = TSINDEX.TablaDeSimbolos()
tc_global = TC.TablaDeTipos()
tc_global1 = tc_global
ts_global1 = ts_global
ts_globalIndex1 = ts_globalIndex
salida = procesar_instrucciones(instrucciones, ts_global,tc_global,ts_globalIndex)
return salida
else:
return 'Parser Error'
def Reportes(self):
global instrucciones_Global,tc_global1,ts_global1,listaErrores,ts_globalIndex1
astGraph = AST()
astGraph.generarAST(instrucciones_Global)
typeC = TipeChecker()
typeC.crearReporte(tc_global1)
RTablaS = RTablaDeSimbolos()
RTablaS.crearReporte(ts_global1,ts_globalIndex1)
RTablaS.crearReporte1(ts_global1,ts_globalIndex1)
return ''
| 32.065421
| 205
| 0.7645
| 390
| 3,431
| 6.469231
| 0.189744
| 0.050733
| 0.066587
| 0.066587
| 0.723345
| 0.711455
| 0.711455
| 0.711455
| 0.711455
| 0.68807
| 0
| 0.015523
| 0.155057
| 3,431
| 106
| 206
| 32.367925
| 0.854778
| 0.005246
| 0
| 0.622222
| 0
| 0.011111
| 0.096092
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.066667
| false
| 0
| 0.111111
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
d64359c3a01befe336aba59e21cf9b0b4f327743
| 207
|
py
|
Python
|
pylint_complexity/checker.py
|
willprice/pylint-quality
|
34921e7fe30d3417e2f1ae3e1dc11082d5b850ab
|
[
"Apache-2.0"
] | null | null | null |
pylint_complexity/checker.py
|
willprice/pylint-quality
|
34921e7fe30d3417e2f1ae3e1dc11082d5b850ab
|
[
"Apache-2.0"
] | null | null | null |
pylint_complexity/checker.py
|
willprice/pylint-quality
|
34921e7fe30d3417e2f1ae3e1dc11082d5b850ab
|
[
"Apache-2.0"
] | null | null | null |
from pylint_complexity import MethodCountChecker, MethodLengthChecker
def register(linter):
linter.register_checker(MethodLengthChecker(linter))
linter.register_checker(MethodCountChecker(linter))
| 29.571429
| 69
| 0.84058
| 19
| 207
| 9
| 0.526316
| 0.140351
| 0.233918
| 0.315789
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.091787
| 207
| 6
| 70
| 34.5
| 0.909574
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.25
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
d64dab51f52e34e143cc0720966c45b24ff0e65d
| 66
|
py
|
Python
|
xu/src/python/Request/Adapter/__init__.py
|
sonnts996/XuCompa-Request
|
f343e7bfd1b4263eb76438c96d347c549cc75ce3
|
[
"Apache-2.0"
] | null | null | null |
xu/src/python/Request/Adapter/__init__.py
|
sonnts996/XuCompa-Request
|
f343e7bfd1b4263eb76438c96d347c549cc75ce3
|
[
"Apache-2.0"
] | null | null | null |
xu/src/python/Request/Adapter/__init__.py
|
sonnts996/XuCompa-Request
|
f343e7bfd1b4263eb76438c96d347c549cc75ce3
|
[
"Apache-2.0"
] | null | null | null |
from xu.src.python.Request.Adapter.ItemAdapter import ItemAdapter
| 33
| 65
| 0.863636
| 9
| 66
| 6.333333
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.060606
| 66
| 1
| 66
| 66
| 0.919355
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
d6501bfc2ab25178969e3915a06b8b3a649613b1
| 66
|
py
|
Python
|
tests/person_api/__init__.py
|
DerPate/OpenSlides
|
2733a47d315fec9b8f3cb746fd5f3739be225d65
|
[
"MIT"
] | 1
|
2015-03-22T02:07:23.000Z
|
2015-03-22T02:07:23.000Z
|
tests/person_api/__init__.py
|
frauenknecht/OpenSlides
|
6521d6b095bca33dc0c5f09f59067551800ea1e3
|
[
"MIT"
] | null | null | null |
tests/person_api/__init__.py
|
frauenknecht/OpenSlides
|
6521d6b095bca33dc0c5f09f59067551800ea1e3
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
VERSION = (9999, 9999, 9999, 'alpha', 1)
| 16.5
| 40
| 0.530303
| 9
| 66
| 3.888889
| 0.777778
| 0.457143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.264151
| 0.19697
| 66
| 3
| 41
| 22
| 0.396226
| 0.318182
| 0
| 0
| 0
| 0
| 0.116279
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
c39d89e68aaeb3ce221e0bed03494d65f2e5beea
| 160
|
py
|
Python
|
blog/context_processors/main_menu.py
|
evg-dev/pyblog
|
23c93f5fb37aeec73e70746f172565fa434aeccb
|
[
"MIT"
] | null | null | null |
blog/context_processors/main_menu.py
|
evg-dev/pyblog
|
23c93f5fb37aeec73e70746f172565fa434aeccb
|
[
"MIT"
] | 5
|
2020-06-05T17:38:56.000Z
|
2022-03-11T23:16:50.000Z
|
blog/context_processors/main_menu.py
|
evg-dev/pyblog
|
23c93f5fb37aeec73e70746f172565fa434aeccb
|
[
"MIT"
] | null | null | null |
from django.core.context_processors import request
from blog.models import Category
def menu(request):
return {"category_menu": Category.objects.all(), }
| 22.857143
| 54
| 0.775
| 21
| 160
| 5.809524
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 160
| 6
| 55
| 26.666667
| 0.871429
| 0
| 0
| 0
| 0
| 0
| 0.08125
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.5
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 0
| 0
|
0
| 5
|
613aca4eb4f7c2ebbdbad73b9c50970865e61c98
| 20
|
py
|
Python
|
hello.py
|
AbishekFiaz/Intern
|
dd00a178385912a6f45750619a309e80a395e260
|
[
"MIT"
] | 1
|
2021-04-13T11:58:16.000Z
|
2021-04-13T11:58:16.000Z
|
hello.py
|
AbishekFiaz/Intern
|
dd00a178385912a6f45750619a309e80a395e260
|
[
"MIT"
] | null | null | null |
hello.py
|
AbishekFiaz/Intern
|
dd00a178385912a6f45750619a309e80a395e260
|
[
"MIT"
] | null | null | null |
print('Hello Fiaz')
| 10
| 19
| 0.7
| 3
| 20
| 4.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1
| 20
| 1
| 20
| 20
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
613b2d5ba8d4d98a00c1a042f77afed5674c9fb5
| 52
|
py
|
Python
|
jupyterlabpymolpysnips/Programming/renumResi.py
|
MooersLab/pymolpysnips
|
50a89c85adf8006d85c1d6cd3f8aad7e440a0b92
|
[
"MIT"
] | null | null | null |
jupyterlabpymolpysnips/Programming/renumResi.py
|
MooersLab/pymolpysnips
|
50a89c85adf8006d85c1d6cd3f8aad7e440a0b92
|
[
"MIT"
] | null | null | null |
jupyterlabpymolpysnips/Programming/renumResi.py
|
MooersLab/pymolpysnips
|
50a89c85adf8006d85c1d6cd3f8aad7e440a0b92
|
[
"MIT"
] | null | null | null |
cmd.do('alter 3fa0, resi=str(int(resi)+100);sort;')
| 26
| 51
| 0.673077
| 10
| 52
| 3.5
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.102041
| 0.057692
| 52
| 1
| 52
| 52
| 0.612245
| 0
| 0
| 0
| 0
| 0
| 0.788462
| 0.557692
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
619a49b7db842d5b575b1287cc318d0b613403d3
| 167
|
py
|
Python
|
tests/pbraiders/database/adapter/__init__.py
|
pbraiders/pomponne-test-bdd
|
7f2973936318221f54e65e0f8bd839cad7216fa4
|
[
"MIT"
] | 1
|
2021-03-30T14:41:29.000Z
|
2021-03-30T14:41:29.000Z
|
tests/pbraiders/database/adapter/__init__.py
|
pbraiders/pomponne-test-bdd
|
7f2973936318221f54e65e0f8bd839cad7216fa4
|
[
"MIT"
] | null | null | null |
tests/pbraiders/database/adapter/__init__.py
|
pbraiders/pomponne-test-bdd
|
7f2973936318221f54e65e0f8bd839cad7216fa4
|
[
"MIT"
] | null | null | null |
from .abstract import AbstractAdapter
from .pymysql import PyMySQLAdapter
from .adapterfactory import AdapterFactory
from .pymysqlfactory import PyMySQLAdapterFactory
| 33.4
| 49
| 0.88024
| 16
| 167
| 9.1875
| 0.5625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.095808
| 167
| 4
| 50
| 41.75
| 0.97351
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
f63ad693c777b14d824022681181d14eadc4c0b6
| 89
|
py
|
Python
|
application/__init__.py
|
sourcery-ai-bot/pingpanapeng
|
e6a894fddff38c3750f44eaed320fcf8941f38e3
|
[
"MIT"
] | null | null | null |
application/__init__.py
|
sourcery-ai-bot/pingpanapeng
|
e6a894fddff38c3750f44eaed320fcf8941f38e3
|
[
"MIT"
] | null | null | null |
application/__init__.py
|
sourcery-ai-bot/pingpanapeng
|
e6a894fddff38c3750f44eaed320fcf8941f38e3
|
[
"MIT"
] | null | null | null |
from .application import Application
def setup(bot):
bot.add_cog(Application(bot))
| 14.833333
| 36
| 0.752809
| 12
| 89
| 5.5
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.146067
| 89
| 5
| 37
| 17.8
| 0.868421
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
9c9e16d869cc36869514f8ec305fddc2450d192b
| 75
|
py
|
Python
|
appify/ui/__init__.py
|
sylvaus/appify
|
269dd0977100fc04c74c0d92dbdd0416de468b00
|
[
"MIT"
] | null | null | null |
appify/ui/__init__.py
|
sylvaus/appify
|
269dd0977100fc04c74c0d92dbdd0416de468b00
|
[
"MIT"
] | 2
|
2019-05-10T11:56:11.000Z
|
2019-07-06T17:23:13.000Z
|
appify/ui/__init__.py
|
sylvaus/appify
|
269dd0977100fc04c74c0d92dbdd0416de468b00
|
[
"MIT"
] | null | null | null |
from appify.ui.ui import Guifier # noqa: F401 used to avoid longer import
| 37.5
| 74
| 0.773333
| 13
| 75
| 4.461538
| 0.846154
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.048387
| 0.173333
| 75
| 1
| 75
| 75
| 0.887097
| 0.506667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
9cb7b4950e3cbac113db90a2cff51cc2b0efc6fd
| 2,249
|
py
|
Python
|
test_player.py
|
gguilherme42/game_of_the_old
|
bb1fa6d98b6e19f094da49f2047f98119db0d855
|
[
"MIT"
] | null | null | null |
test_player.py
|
gguilherme42/game_of_the_old
|
bb1fa6d98b6e19f094da49f2047f98119db0d855
|
[
"MIT"
] | null | null | null |
test_player.py
|
gguilherme42/game_of_the_old
|
bb1fa6d98b6e19f094da49f2047f98119db0d855
|
[
"MIT"
] | null | null | null |
import unittest
from player import Player
class PlayerTest(unittest.TestCase):
def setUp(self):
self.player_1 = Player()
def test_when_the_players_input_its_out_of_range(self):
previous_len = len(self.player_1.choices)
self.player_1.add_choice('C4')
last_len = len(self.player_1.choices)
self.assertEqual(last_len, previous_len)
def test_when_the_player_input_its_a_string_with_len_bigger_than_2(self):
self.player_1.add_choice('C1A')
self.assertFalse(self.player_1.choices)
def test_when_the_player_input_its_not_a_string(self):
self.player_1.add_choice(12.4)
self.assertFalse(self.player_1.choices)
def test_when_the_player_tries_to_input_the_same_position(self):
self.player_1.add_choice('C1')
previos_len = len(self.player_1.choices)
self.player_1.add_choice('C1')
last_len = len(self.player_1.choices)
self.assertEqual(last_len, previos_len)
def test_crescent_diagonal_win(self):
self.player_1.add_choice('A1')
self.player_1.add_choice('A3')
self.player_1.add_choice('B1')
self.player_1.add_choice('B2')
self.player_1.add_choice('C3')
self.assertEqual(self.player_1.status, "winner")
def test_decrescent_diagonal_win(self):
self.player_1.add_choice('A1')
self.player_1.add_choice('A3')
self.player_1.add_choice('B1')
self.player_1.add_choice('B2')
self.player_1.add_choice('C1')
self.assertEqual(self.player_1.status, "winner")
def test_horizontal_win(self):
self.player_1.add_choice('A1')
self.player_1.add_choice('C1')
self.player_1.add_choice('A3')
self.player_1.add_choice('B1')
self.assertEqual(self.player_1.status, "winner")
def test_vertical_win(self):
self.player_1.add_choice('C2')
self.player_1.add_choice('C1')
self.player_1.add_choice('C3')
self.player_1.add_choice('A3')
self.player_1.add_choice('B1')
self.assertEqual(self.player_1.status, "winner")
if __name__ == "__main__":
unittest.main()
| 29.592105
| 77
| 0.654958
| 318
| 2,249
| 4.248428
| 0.194969
| 0.259067
| 0.284974
| 0.248705
| 0.76906
| 0.76906
| 0.712805
| 0.661732
| 0.661732
| 0.589933
| 0
| 0.035859
| 0.231214
| 2,249
| 76
| 78
| 29.592105
| 0.745518
| 0
| 0
| 0.54902
| 0
| 0
| 0.035111
| 0
| 0
| 0
| 0
| 0
| 0.156863
| 1
| 0.176471
| false
| 0
| 0.039216
| 0
| 0.235294
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
9ced2f00e8ac91755830d2cc3e99c51a3407e58b
| 354
|
py
|
Python
|
ISS/exceptions.py
|
justinm1295/ISS.py
|
a71bf87afc251bc408f6fc3b22af55471863897f
|
[
"MIT"
] | 2
|
2019-07-28T17:07:51.000Z
|
2020-05-12T01:07:47.000Z
|
ISS/exceptions.py
|
justinm1295/ISS.py
|
a71bf87afc251bc408f6fc3b22af55471863897f
|
[
"MIT"
] | null | null | null |
ISS/exceptions.py
|
justinm1295/ISS.py
|
a71bf87afc251bc408f6fc3b22af55471863897f
|
[
"MIT"
] | null | null | null |
class LocationFailureException(Exception):
pass
class BadLatitudeException(Exception):
pass
class BadLongitudeException(Exception):
pass
class BadAltitudeException(Exception):
pass
class BadNumberException(Exception):
pass
class GetPassesException(Exception):
pass
class AstronautFailureException(Exception):
pass
| 13.111111
| 43
| 0.768362
| 28
| 354
| 9.714286
| 0.357143
| 0.334559
| 0.397059
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.172316
| 354
| 26
| 44
| 13.615385
| 0.928328
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.571429
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
141bb69c46044f692da45c799cbba63f0f740b09
| 33
|
py
|
Python
|
bot/bot/__main__.py
|
TSPS-Team/Project
|
b1d83cb7957420b8348939f0a1d36f506095519c
|
[
"MIT"
] | null | null | null |
bot/bot/__main__.py
|
TSPS-Team/Project
|
b1d83cb7957420b8348939f0a1d36f506095519c
|
[
"MIT"
] | null | null | null |
bot/bot/__main__.py
|
TSPS-Team/Project
|
b1d83cb7957420b8348939f0a1d36f506095519c
|
[
"MIT"
] | null | null | null |
from .new_main import main
main()
| 16.5
| 26
| 0.787879
| 6
| 33
| 4.166667
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.121212
| 33
| 2
| 27
| 16.5
| 0.862069
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
1428e05b27e7d6d1b450eba6df963ff775cc8df4
| 19
|
py
|
Python
|
stripe/version.py
|
jameshageman-stripe/stripe-python
|
ab500708e2970d1962e25ee1fbc8634eb6d946cf
|
[
"MIT"
] | 7
|
2020-04-14T09:41:17.000Z
|
2021-08-06T09:38:19.000Z
|
stripe/version.py
|
jameshageman-stripe/stripe-python
|
ab500708e2970d1962e25ee1fbc8634eb6d946cf
|
[
"MIT"
] | null | null | null |
stripe/version.py
|
jameshageman-stripe/stripe-python
|
ab500708e2970d1962e25ee1fbc8634eb6d946cf
|
[
"MIT"
] | 3
|
2020-04-30T12:44:48.000Z
|
2020-12-15T08:40:26.000Z
|
VERSION = '2.17.0'
| 9.5
| 18
| 0.578947
| 4
| 19
| 2.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.25
| 0.157895
| 19
| 1
| 19
| 19
| 0.4375
| 0
| 0
| 0
| 0
| 0
| 0.315789
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
14300beed3f95fef7dce0c1af0efb0dbc1cea1cb
| 5,252
|
py
|
Python
|
caffe2/python/operator_test/group_norm_op_test.py
|
Hacky-DH/pytorch
|
80dc4be615854570aa39a7e36495897d8a040ecc
|
[
"Intel"
] | 60,067
|
2017-01-18T17:21:31.000Z
|
2022-03-31T21:37:45.000Z
|
caffe2/python/operator_test/group_norm_op_test.py
|
Hacky-DH/pytorch
|
80dc4be615854570aa39a7e36495897d8a040ecc
|
[
"Intel"
] | 66,955
|
2017-01-18T17:21:38.000Z
|
2022-03-31T23:56:11.000Z
|
caffe2/python/operator_test/group_norm_op_test.py
|
Hacky-DH/pytorch
|
80dc4be615854570aa39a7e36495897d8a040ecc
|
[
"Intel"
] | 19,210
|
2017-01-18T17:45:04.000Z
|
2022-03-31T23:51:56.000Z
|
from caffe2.python import core
import caffe2.python.hypothesis_test_util as hu
import caffe2.python.serialized_test.serialized_test_util as serial
from hypothesis import given, settings
import hypothesis.strategies as st
import numpy as np
import unittest
class TestGroupNormOp(serial.SerializedTestCase):
def group_norm_nchw_ref(self, X, gamma, beta, group, epsilon):
dims = X.shape
N = dims[0]
C = dims[1]
G = group
D = int(C / G)
X = X.reshape(N, G, D, -1)
mu = np.mean(X, axis=(2, 3), keepdims=True)
std = np.sqrt((np.var(X, axis=(2, 3), keepdims=True) + epsilon))
gamma = gamma.reshape(G, D, 1)
beta = beta.reshape(G, D, 1)
Y = gamma * (X - mu) / std + beta
return [Y.reshape(dims), mu.reshape(N, G), (1.0 / std).reshape(N, G)]
def group_norm_nhwc_ref(self, X, gamma, beta, group, epsilon):
dims = X.shape
N = dims[0]
C = dims[-1]
G = group
D = int(C / G)
X = X.reshape(N, -1, G, D)
mu = np.mean(X, axis=(1, 3), keepdims=True)
std = np.sqrt((np.var(X, axis=(1, 3), keepdims=True) + epsilon))
gamma = gamma.reshape(G, D)
beta = beta.reshape(G, D)
Y = gamma * (X - mu) / std + beta
return [Y.reshape(dims), mu.reshape(N, G), (1.0 / std).reshape(N, G)]
@serial.given(
N=st.integers(1, 5), G=st.integers(1, 5), D=st.integers(1, 5),
H=st.integers(2, 5), W=st.integers(2, 5),
epsilon=st.floats(min_value=1e-5, max_value=1e-4),
order=st.sampled_from(["NCHW", "NHWC"]), **hu.gcs)
def test_group_norm_2d(
self, N, G, D, H, W, epsilon, order, gc, dc):
op = core.CreateOperator(
"GroupNorm",
["X", "gamma", "beta"],
["Y", "mean", "inv_std"],
group=G,
epsilon=epsilon,
order=order,
)
C = G * D
if order == "NCHW":
X = np.random.randn(N, C, H, W).astype(np.float32) + 1.0
else:
X = np.random.randn(N, H, W, C).astype(np.float32) + 1.0
gamma = np.random.randn(C).astype(np.float32)
beta = np.random.randn(C).astype(np.float32)
inputs = [X, gamma, beta]
def ref_op(X, gamma, beta):
if order == "NCHW":
return self.group_norm_nchw_ref(X, gamma, beta, G, epsilon)
else:
return self.group_norm_nhwc_ref(X, gamma, beta, G, epsilon)
self.assertReferenceChecks(
device_option=gc,
op=op,
inputs=inputs,
reference=ref_op,
threshold=5e-3,
)
self.assertDeviceChecks(dc, op, inputs, [0, 1, 2])
@given(N=st.integers(1, 5), G=st.integers(1, 3), D=st.integers(2, 3),
T=st.integers(2, 4), H=st.integers(2, 4), W=st.integers(2, 4),
epsilon=st.floats(min_value=1e-5, max_value=1e-4),
order=st.sampled_from(["NCHW", "NHWC"]), **hu.gcs)
def test_group_norm_3d(
self, N, G, D, T, H, W, epsilon, order, gc, dc):
op = core.CreateOperator(
"GroupNorm",
["X", "gamma", "beta"],
["Y", "mean", "inv_std"],
group=G,
epsilon=epsilon,
order=order,
)
C = G * D
if order == "NCHW":
X = np.random.randn(N, C, T, H, W).astype(np.float32) + 1.0
else:
X = np.random.randn(N, T, H, W, C).astype(np.float32) + 1.0
gamma = np.random.randn(C).astype(np.float32)
beta = np.random.randn(C).astype(np.float32)
inputs = [X, gamma, beta]
def ref_op(X, gamma, beta):
if order == "NCHW":
return self.group_norm_nchw_ref(X, gamma, beta, G, epsilon)
else:
return self.group_norm_nhwc_ref(X, gamma, beta, G, epsilon)
self.assertReferenceChecks(
device_option=gc,
op=op,
inputs=inputs,
reference=ref_op,
threshold=5e-3,
)
self.assertDeviceChecks(dc, op, inputs, [0, 1, 2])
@given(N=st.integers(1, 5), G=st.integers(1, 5), D=st.integers(2, 2),
H=st.integers(2, 5), W=st.integers(2, 5),
epsilon=st.floats(min_value=1e-5, max_value=1e-4),
order=st.sampled_from(["NCHW", "NHWC"]), **hu.gcs)
@settings(deadline=10000)
def test_group_norm_grad(
self, N, G, D, H, W, epsilon, order, gc, dc):
op = core.CreateOperator(
"GroupNorm",
["X", "gamma", "beta"],
["Y", "mean", "inv_std"],
group=G,
epsilon=epsilon,
order=order,
)
C = G * D
X = np.arange(N * C * H * W).astype(np.float32)
np.random.shuffle(X)
if order == "NCHW":
X = X.reshape((N, C, H, W))
else:
X = X.reshape((N, H, W, C))
gamma = np.random.randn(C).astype(np.float32)
beta = np.random.randn(C).astype(np.float32)
inputs = [X, gamma, beta]
for i in range(len(inputs)):
self.assertGradientChecks(gc, op, inputs, i, [0])
if __name__ == "__main__":
unittest.main()
| 34.103896
| 77
| 0.518088
| 745
| 5,252
| 3.574497
| 0.139597
| 0.060083
| 0.052572
| 0.048066
| 0.78896
| 0.770184
| 0.764927
| 0.757792
| 0.757792
| 0.728502
| 0
| 0.032367
| 0.323496
| 5,252
| 153
| 78
| 34.326797
| 0.71714
| 0
| 0
| 0.634328
| 0
| 0
| 0.02763
| 0
| 0
| 0
| 0
| 0
| 0.037313
| 1
| 0.052239
| false
| 0
| 0.052239
| 0
| 0.156716
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
1430b538752e633ff021d89df12ab2af6d5f2e9a
| 238
|
py
|
Python
|
iati/guidance_and_support/apps.py
|
andylolz/IATI-Standard-Website
|
b781b9fe6b6430f93826e530e9560183bf8fd310
|
[
"MIT"
] | 4
|
2019-03-28T06:42:17.000Z
|
2021-06-06T13:10:51.000Z
|
iati/guidance_and_support/apps.py
|
andylolz/IATI-Standard-Website
|
b781b9fe6b6430f93826e530e9560183bf8fd310
|
[
"MIT"
] | 177
|
2018-09-28T14:21:56.000Z
|
2022-03-30T21:45:26.000Z
|
iati/guidance_and_support/apps.py
|
andylolz/IATI-Standard-Website
|
b781b9fe6b6430f93826e530e9560183bf8fd310
|
[
"MIT"
] | 8
|
2018-10-25T20:43:10.000Z
|
2022-03-17T14:19:27.000Z
|
"""Application configuration for the guidance_and_support app."""
from django.apps import AppConfig
class GuidanceAndSupportConfig(AppConfig):
"""Config class for the guidance_and_support app."""
name = 'guidance_and_support'
| 23.8
| 65
| 0.773109
| 28
| 238
| 6.357143
| 0.607143
| 0.185393
| 0.303371
| 0.191011
| 0.303371
| 0.303371
| 0
| 0
| 0
| 0
| 0
| 0
| 0.138655
| 238
| 9
| 66
| 26.444444
| 0.868293
| 0.445378
| 0
| 0
| 0
| 0
| 0.165289
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
14427c1514bd1a9dd8473d0e771b3cfc6f08922b
| 48
|
py
|
Python
|
data/processing/__init__.py
|
WadhwaniAI/covid-modelling
|
db9f89bfbec392ad4de6b4583cfab7c3d823c1c9
|
[
"MIT"
] | 3
|
2021-06-23T10:27:11.000Z
|
2022-02-09T07:50:42.000Z
|
data/processing/__init__.py
|
WadhwaniAI/covid-modelling
|
db9f89bfbec392ad4de6b4583cfab7c3d823c1c9
|
[
"MIT"
] | 3
|
2021-06-23T09:36:29.000Z
|
2022-01-13T03:38:16.000Z
|
data/processing/__init__.py
|
WadhwaniAI/covid-modelling
|
db9f89bfbec392ad4de6b4583cfab7c3d823c1c9
|
[
"MIT"
] | null | null | null |
from data.processing.processing import get_data
| 24
| 47
| 0.875
| 7
| 48
| 5.857143
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083333
| 48
| 1
| 48
| 48
| 0.931818
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
1490720f4e98de1ad19625ca2471e38e23f98142
| 90
|
py
|
Python
|
python/PaxHeaders.127271/setup.py
|
ictyangye/ovs-c2ratelimiter
|
c0e1ada35b3b5f2524fbba6324c9e996e84ac9bc
|
[
"Apache-2.0"
] | null | null | null |
python/PaxHeaders.127271/setup.py
|
ictyangye/ovs-c2ratelimiter
|
c0e1ada35b3b5f2524fbba6324c9e996e84ac9bc
|
[
"Apache-2.0"
] | null | null | null |
python/PaxHeaders.127271/setup.py
|
ictyangye/ovs-c2ratelimiter
|
c0e1ada35b3b5f2524fbba6324c9e996e84ac9bc
|
[
"Apache-2.0"
] | null | null | null |
30 mtime=1527291425.661873739
30 atime=1527291425.777874152
30 ctime=1527291454.665972637
| 22.5
| 29
| 0.866667
| 12
| 90
| 6.5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.75
| 0.066667
| 90
| 3
| 30
| 30
| 0.178571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
1ae88bd61ff84745ee473d9003ab5c9eb2c8439b
| 54
|
py
|
Python
|
example_project/carts/models/__init__.py
|
aino/django-nimda
|
334709c64cb253c0d1b5676850bd2d8ff9b8bea4
|
[
"BSD-3-Clause"
] | null | null | null |
example_project/carts/models/__init__.py
|
aino/django-nimda
|
334709c64cb253c0d1b5676850bd2d8ff9b8bea4
|
[
"BSD-3-Clause"
] | 7
|
2020-06-05T17:01:18.000Z
|
2022-03-11T23:12:34.000Z
|
example_project/carts/models/__init__.py
|
aino/django-nimda
|
334709c64cb253c0d1b5676850bd2d8ff9b8bea4
|
[
"BSD-3-Clause"
] | null | null | null |
from .cart import Cart
from .cartitem import CartItem
| 18
| 30
| 0.814815
| 8
| 54
| 5.5
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.148148
| 54
| 2
| 31
| 27
| 0.956522
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
1afeb72d29b4e94d774b2d06fbad31dd8a9921c9
| 6,459
|
py
|
Python
|
tests/test_inputs.py
|
kennguyen01/jab
|
97bc2d074fe63f81f2fbe06685e9cbeaef7354e7
|
[
"MIT"
] | null | null | null |
tests/test_inputs.py
|
kennguyen01/jab
|
97bc2d074fe63f81f2fbe06685e9cbeaef7354e7
|
[
"MIT"
] | 2
|
2021-06-02T00:58:37.000Z
|
2021-06-10T13:34:20.000Z
|
tests/test_inputs.py
|
kennguyen01/jab
|
97bc2d074fe63f81f2fbe06685e9cbeaef7354e7
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
from unittest.mock import patch
from unittest import TestCase
from inputs import UserInputs
class TestInputJobs(TestCase):
def setUp(self):
self.inputs = UserInputs()
def tearDown(self):
del self.inputs
@patch("builtins.input", return_value="")
def test_no_job(self, mock_input):
self.inputs.input_jobs()
test = []
result = self.inputs.get_jobs()
self.assertEqual(test, result)
@patch("builtins.input", return_value="developer")
def test_one_word_job(self, mock_input):
self.inputs.input_jobs()
test = ["developer"]
result = self.inputs.get_jobs()
self.assertEqual(test, result)
@patch("builtins.input", return_value="junior developer")
def test_multiple_words_job(self, mock_input):
self.inputs.input_jobs()
test = ["junior+developer"]
result = self.inputs.get_jobs()
self.assertEqual(test, result)
@patch("builtins.input", return_value="developer, programmer, engineer")
def test_multiple_job(self, mock_input):
self.inputs.input_jobs()
test = ["developer", "programmer", "engineer"]
result = self.inputs.get_jobs()
self.assertEqual(test, result)
@patch("builtins.input", return_value="java developer, java engineer")
def test_multipe_words_and_jobs(self, mock_input):
self.inputs.input_jobs()
test = ["java+developer", "java+engineer"]
result = self.inputs.get_jobs()
self.assertEqual(test, result)
@patch("builtins.input", return_value="!#^%@%eng@%i%ne(_)(er")
def test_invalid_job(self, mock_input):
self.inputs.input_jobs()
test = ["engineer"]
result = self.inputs.get_jobs()
self.assertEqual(test, result)
class TestInputStates(TestCase):
def setUp(self):
self.inputs = UserInputs()
def tearDown(self):
del self.inputs
@patch("builtins.input", return_value="")
def test_no_state(self, mock_input):
self.inputs.input_states()
test = {}
result = self.inputs.get_locations()
self.assertEqual(test, result)
@patch("builtins.input", return_value="ga")
def test_one_state(self, mock_input):
self.inputs.input_states()
test = {"GA": []}
result = self.inputs.get_locations()
self.assertEqual(test, result)
@patch("builtins.input", return_value="il, tx, pa")
def test_multiple_states(self, mock_input):
self.inputs.input_states()
test = {"IL": [], "TX": [], "PA": []}
result = self.inputs.get_locations()
self.assertEqual(test, result)
@patch("builtins.input", return_value="n%$!z, v(&n, !^%c%!ad")
def test_invalid_states(self, mock_input):
self.inputs.input_states()
test = {}
result = self.inputs.get_locations()
self.assertEqual(test, result)
class TestInputCities(TestCase):
def setUp(self):
self.inputs = UserInputs()
def tearDown(self):
del self.inputs
@patch("builtins.input", side_effect=["al", ""])
def test_no_cities(self, mock_input):
self.inputs.input_states()
self.inputs.input_cities()
test = {"AL": []}
result = self.inputs.get_locations()
self.assertEqual(test, result)
@patch("builtins.input", side_effect=["tx", "dallas"])
def test_one_city_one_state(self, mock_input):
self.inputs.input_states()
self.inputs.input_cities()
test = {"TX": ["Dallas"]}
result = self.inputs.get_locations()
self.assertEqual(test, result)
@patch("builtins.input", side_effect=["ca", "los angeles, san francisco"])
def test_multiple_cities_one_state(self, mock_input):
self.inputs.input_states()
self.inputs.input_cities()
test = {"CA": ["Los+Angeles", "San+Francisco"]}
result = self.inputs.get_locations()
self.assertEqual(test, result)
@patch("builtins.input",
side_effect=["ga, nm, ny",
"atlanta",
"santa fe",
"new york city"])
def test_one_city_multiple_states(self, mock_input):
self.inputs.input_states()
self.inputs.input_cities()
test = {'GA': ['Atlanta'],
'NM': ['Santa+Fe'],
'NY': ['New+York+City']}
result = self.inputs.get_locations()
self.assertEqual(test, result)
@patch("builtins.input",
side_effect=["va, fl, il",
"richmond, virginia beach, alexandria",
"miami, fort lauderdale",
"chicago, rockford, arlington heights"])
def test_multiples_cities_and_states(self, mock_input):
self.inputs.input_states()
self.inputs.input_cities()
test = {"FL": ["Miami", "Fort+Lauderdale"],
"IL": ["Chicago", "Rockford", "Arlington+Heights"],
"VA": ["Richmond", "Virginia+Beach", "Alexandria"]}
result = self.inputs.get_locations()
self.assertEqual(test, result)
class TestInputExp(TestCase):
def setUp(self):
self.inputs = UserInputs()
def tearDown(self):
del self.inputs
@patch("builtins.input", return_value="")
def test_no_exp(self, mock_input):
self.inputs.input_exp()
test = None
result = self.inputs.get_exp()
self.assertEqual(test, result)
@patch("builtins.input", return_value="entry")
def test_entry_exp(self, mock_input):
self.inputs.input_exp()
test = "entry_level"
result = self.inputs.get_exp()
self.assertEqual(test, result)
@patch("builtins.input", return_value="mid")
def test_mid_exp(self, mock_input):
self.inputs.input_exp()
test = "mid_level"
result = self.inputs.get_exp()
self.assertEqual(test, result)
@patch("builtins.input", return_value="senior")
def test_senior_exp(self, mock_input):
self.inputs.input_exp()
test = "senior_level"
result = self.inputs.get_exp()
self.assertEqual(test, result)
@patch("builtins.input", return_value="en!^$tr&^@#%$y")
def test_invalid_exp(self, mock_input):
self.inputs.input_exp()
test = "entry_level"
result = self.inputs.get_exp()
self.assertEqual(test, result)
| 32.457286
| 78
| 0.609227
| 745
| 6,459
| 5.091275
| 0.136913
| 0.139731
| 0.098866
| 0.089639
| 0.783812
| 0.767203
| 0.767203
| 0.767203
| 0.757712
| 0.665964
| 0
| 0.000207
| 0.251432
| 6,459
| 198
| 79
| 32.621212
| 0.784281
| 0.003251
| 0
| 0.5625
| 0
| 0
| 0.144011
| 0.003262
| 0
| 0
| 0
| 0
| 0.125
| 1
| 0.175
| false
| 0
| 0.01875
| 0
| 0.21875
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
215a8c6e4e99fdaf6d9bdc4834294fe7a3776c92
| 72
|
py
|
Python
|
redbeat/__init__.py
|
nigel-gott/redbeat
|
4dc2b924026ba9a460cbe6f550a9275d14cbf2be
|
[
"Apache-2.0"
] | 746
|
2016-02-18T22:40:49.000Z
|
2022-03-30T22:20:25.000Z
|
redbeat/__init__.py
|
nigel-gott/redbeat
|
4dc2b924026ba9a460cbe6f550a9275d14cbf2be
|
[
"Apache-2.0"
] | 178
|
2015-08-21T17:00:21.000Z
|
2022-03-22T00:33:09.000Z
|
redbeat/__init__.py
|
nigel-gott/redbeat
|
4dc2b924026ba9a460cbe6f550a9275d14cbf2be
|
[
"Apache-2.0"
] | 118
|
2016-05-16T15:56:08.000Z
|
2022-03-15T00:33:39.000Z
|
from .schedulers import RedBeatScheduler, RedBeatSchedulerEntry # noqa
| 36
| 71
| 0.847222
| 6
| 72
| 10.166667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 72
| 1
| 72
| 72
| 0.953125
| 0.055556
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
b4a21f4268243f83feff8969da6ab3dcb197c289
| 69
|
py
|
Python
|
horoscofox/signs/sign.py
|
xofoloapp/pyhoroscofox
|
5f1e00e270baa0fda64b71c6d1f91972e5b116ae
|
[
"MIT"
] | 4
|
2018-03-30T14:05:13.000Z
|
2020-08-06T10:41:17.000Z
|
horoscofox/signs/sign.py
|
xofoloapp/pyhoroscofox
|
5f1e00e270baa0fda64b71c6d1f91972e5b116ae
|
[
"MIT"
] | 3
|
2018-10-29T20:39:44.000Z
|
2019-10-19T16:14:03.000Z
|
horoscofox/signs/sign.py
|
xofoloapp/pyhoroscofox
|
5f1e00e270baa0fda64b71c6d1f91972e5b116ae
|
[
"MIT"
] | 1
|
2018-03-29T00:05:41.000Z
|
2018-03-29T00:05:41.000Z
|
class Sign():
def __init__(self, sign):
self.sign = sign
| 17.25
| 29
| 0.57971
| 9
| 69
| 4
| 0.555556
| 0.444444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.289855
| 69
| 3
| 30
| 23
| 0.734694
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
b4a3e7ff20d469136fa66e6b87728debd58f284d
| 343
|
py
|
Python
|
src/ApisDbInitializer.py
|
ytyaru0/DbInitializer.20180222164330
|
3ae4ea6c62018e356d371e640bdc693a3cb6dacb
|
[
"CC0-1.0"
] | null | null | null |
src/ApisDbInitializer.py
|
ytyaru0/DbInitializer.20180222164330
|
3ae4ea6c62018e356d371e640bdc693a3cb6dacb
|
[
"CC0-1.0"
] | null | null | null |
src/ApisDbInitializer.py
|
ytyaru0/DbInitializer.20180222164330
|
3ae4ea6c62018e356d371e640bdc693a3cb6dacb
|
[
"CC0-1.0"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import DbInitializer
class ApisDbInitializer(DbInitializer.DbInitializer): pass
if __name__ == "__main__":
assert issubclass(ApisDbInitializer().__class__, DbInitializer.DbInitializer)
assert isinstance(ApisDbInitializer(), DbInitializer.DbInitializer)
ApisDbInitializer().CreateDb()
| 34.3
| 81
| 0.77551
| 30
| 343
| 8.466667
| 0.633333
| 0.307087
| 0.338583
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.003257
| 0.104956
| 343
| 9
| 82
| 38.111111
| 0.824104
| 0.122449
| 0
| 0
| 0
| 0
| 0.026756
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0
| true
| 0.166667
| 0.166667
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
b4d85fee935287a22a4786387ce2df26d7805e46
| 19
|
py
|
Python
|
examples/repr/ex1.py
|
mcorne/python-by-example
|
15339c0909c84b51075587a6a66391100971c033
|
[
"MIT"
] | null | null | null |
examples/repr/ex1.py
|
mcorne/python-by-example
|
15339c0909c84b51075587a6a66391100971c033
|
[
"MIT"
] | null | null | null |
examples/repr/ex1.py
|
mcorne/python-by-example
|
15339c0909c84b51075587a6a66391100971c033
|
[
"MIT"
] | null | null | null |
print(repr('foo'))
| 9.5
| 18
| 0.631579
| 3
| 19
| 4
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.052632
| 19
| 1
| 19
| 19
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0.157895
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
b4dc69d3f8839f0b5d73fdb41e952b3a6695ffaf
| 135
|
py
|
Python
|
preimutils/object_detection/yolo/__init__.py
|
ArianAmani/preimutils
|
d4f79525caae322d94d97febc4654229a2eb7407
|
[
"MIT"
] | null | null | null |
preimutils/object_detection/yolo/__init__.py
|
ArianAmani/preimutils
|
d4f79525caae322d94d97febc4654229a2eb7407
|
[
"MIT"
] | null | null | null |
preimutils/object_detection/yolo/__init__.py
|
ArianAmani/preimutils
|
d4f79525caae322d94d97febc4654229a2eb7407
|
[
"MIT"
] | null | null | null |
from . import coco2yolo
from .img_aug import AMRLImageAug
from . import train_validation_sep
from .validating_data import check_dataset
| 33.75
| 42
| 0.859259
| 19
| 135
| 5.842105
| 0.684211
| 0.18018
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.008333
| 0.111111
| 135
| 4
| 42
| 33.75
| 0.916667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
b4dff30d936d1f476582c1d0580b73b501514d6e
| 31
|
py
|
Python
|
tests/json_api/__init__.py
|
dailymuse/oz
|
f4fec5078bba3258a15504247394339a100487de
|
[
"BSD-3-Clause"
] | 36
|
2015-01-24T15:38:07.000Z
|
2021-01-26T06:24:43.000Z
|
tests/blinks/__init__.py
|
dailymuse/oz
|
f4fec5078bba3258a15504247394339a100487de
|
[
"BSD-3-Clause"
] | 25
|
2015-03-02T16:44:01.000Z
|
2020-09-25T20:07:06.000Z
|
tests/blinks/__init__.py
|
dailymuse/oz
|
f4fec5078bba3258a15504247394339a100487de
|
[
"BSD-3-Clause"
] | 4
|
2016-01-15T18:27:10.000Z
|
2017-11-15T01:18:55.000Z
|
from .test_middleware import *
| 15.5
| 30
| 0.806452
| 4
| 31
| 6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.129032
| 31
| 1
| 31
| 31
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
b4ebffd1c3b8b455270048e28fb869006a266cef
| 982
|
py
|
Python
|
server/config.py
|
sebwink/deregnet-rest
|
5f6476e4f88659e1f1c654a710e202782873e696
|
[
"BSD-3-Clause"
] | null | null | null |
server/config.py
|
sebwink/deregnet-rest
|
5f6476e4f88659e1f1c654a710e202782873e696
|
[
"BSD-3-Clause"
] | null | null | null |
server/config.py
|
sebwink/deregnet-rest
|
5f6476e4f88659e1f1c654a710e202782873e696
|
[
"BSD-3-Clause"
] | 1
|
2018-01-30T18:38:56.000Z
|
2018-01-30T18:38:56.000Z
|
import os
class Config:
@classmethod
def get(cls, envvar):
return os.environ.get(envvar)
@classmethod
def mongo_host(cls):
return str(cls.get('MONGO_HOST'))
@classmethod
def mongo_port(cls):
return int(cls.get('MONGO_PORT'))
@classmethod
def mongo_user(cls):
return str(cls.get('MONGO_USER'))
@classmethod
def mongo_password(cls):
return str(cls.get('MONGO_PASSWORD'))
@classmethod
def redis_host(cls):
return str(cls.get('REDIS_HOST'))
@classmethod
def redis_port(cls):
return str(cls.get('REDIS_PORT'))
@classmethod
def host(cls):
return str(cls.get('HOST'))
@classmethod
def port(cls):
return int(cls.get('PORT'))
@classmethod
def debug(cls):
if cls.get('DEBUG') == 'false':
return False
return True
@classmethod
def server_backend(cls):
return cls.get('SERVER_BACKEND')
| 19.64
| 45
| 0.59776
| 121
| 982
| 4.735537
| 0.206612
| 0.268761
| 0.125654
| 0.157068
| 0.329843
| 0.329843
| 0
| 0
| 0
| 0
| 0
| 0
| 0.279022
| 982
| 49
| 46
| 20.040816
| 0.809322
| 0
| 0
| 0.297297
| 0
| 0
| 0.09776
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.297297
| false
| 0.054054
| 0.027027
| 0.27027
| 0.675676
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 5
|
3705f430915b1718d1160c478ba83b33ff8460d1
| 14,275
|
py
|
Python
|
tests/deprecated/test_terms.py
|
dmpayton/reqlon
|
69ea152acaed1bf4d5a6219e23e8af46f77fb9ee
|
[
"MIT"
] | null | null | null |
tests/deprecated/test_terms.py
|
dmpayton/reqlon
|
69ea152acaed1bf4d5a6219e23e8af46f77fb9ee
|
[
"MIT"
] | null | null | null |
tests/deprecated/test_terms.py
|
dmpayton/reqlon
|
69ea152acaed1bf4d5a6219e23e8af46f77fb9ee
|
[
"MIT"
] | null | null | null |
import geojson
import reqon.deprecated as reqon
import rethinkdb as r
import unittest
import pytest
from reqon.deprecated import terms
from .utils import ReQONTestMixin
class TermsTests(ReQONTestMixin, unittest.TestCase):
def setUp(self):
self.reql = r.table('movies')
# Expand Path
def test_expand_path(self):
expanded = terms._expand_path('foo.bar.baz')
expected = {'foo': { 'bar': { 'baz': True } } }
assert expected == expanded
def test_expand_path_with_array(self):
expanded = terms._expand_path([1, 2, 3])
expected = [1, 2, 3]
assert expanded == expected
# Get
def test_get(self):
reql1 = self.reqlify(lambda: reqon.TERMS['$get'](self.reql, '123'))
reql2 = self.reqlify(lambda: self.reql.get('123'))
assert str(reql1) == str(reql2)
def test_get_invalid_type(self):
with pytest.raises(reqon.exceptions.InvalidTypeError) as excinfo:
terms.get(self.reql, { "foo": "bar" })
assert terms.ERRORS['type']['invalid'].format('get') == str(excinfo.value)
with pytest.raises(reqon.exceptions.InvalidTypeError) as excinfo:
terms.get(self.reql, [1, 2, {"foo": "bar"}])
assert terms.ERRORS['type']['invalid'].format('get') == str(excinfo.value)
# Get All
def test_get_all(self):
reql1 = self.reqlify(lambda: reqon.TERMS['$get_all'](self.reql, ['123', '456']))
reql2 = self.reqlify(lambda: self.reql.get_all('123', '456', index='id'))
assert str(reql1) == str(reql2)
def test_get_all_indexed(self):
reql1 = self.reqlify(lambda: reqon.TERMS['$get_all'](self.reql, ['rank', ['123', '456']]))
reql2 = self.reqlify(lambda: self.reql.get_all('123', '456', index='rank'))
assert str(reql1) == str(reql2)
# Filter
def test_filter(self):
reql1 = self.reqlify(lambda: reqon.TERMS['$filter'](self.reql, [
['rank', ['$gt', 8]],
['age', ['$lt', 6]]
]))
reql2 = self.reqlify(lambda: self.reql.filter(r.row['rank'].gt(8)).filter(r.row['age'].lt(6)))
assert str(reql1) == str(reql2)
def test_invalid_filter(self):
with pytest.raises(reqon.exceptions.InvalidFilterError) as excinfo:
terms.filter_(self.reql, [{ 'foo': 'bar' }])
assert terms.ERRORS['filter']['invalid'].format("[{'foo': 'bar'}]") == str(excinfo.value)
# Has Fields
def test_has_fields(self):
reql1 = self.reqlify(lambda: reqon.TERMS['$has_fields'](self.reql, ['title']))
reql2 = self.reqlify(lambda: self.reql.has_fields('title'))
assert str(reql1) == str(reql2)
# With Fields
def test_with_fields(self):
reql1 = self.reqlify(lambda: reqon.TERMS['$with_fields'](self.reql, ['title']))
reql2 = self.reqlify(lambda: self.reql.with_fields('title'))
assert str(reql1) == str(reql2)
# Order By
def test_order_by(self):
reql1 = self.reqlify(lambda: reqon.TERMS['$order_by'](self.reql, 'title'))
reql2 = self.reqlify(lambda: self.reql.order_by('title'))
assert str(reql1) == str(reql2)
def test_order_by_indexed(self):
reql1 = self.reqlify(lambda: reqon.TERMS['$order_by'](self.reql, ['$index', 'title']))
reql2 = self.reqlify(lambda: self.reql.order_by(index='title'))
assert str(reql1) == str(reql2)
def test_order_by_ascending(self):
reql1 = self.reqlify(lambda: reqon.TERMS['$order_by'](self.reql, ['rank', '$asc']))
reql2 = self.reqlify(lambda: self.reql.order_by(r.asc('rank')))
assert str(reql1) == str(reql2)
def test_order_by_descending(self):
reql1 = self.reqlify(lambda: reqon.TERMS['$order_by'](self.reql, ['rank', '$desc']))
reql2 = self.reqlify(lambda: self.reql.order_by(r.desc('rank')))
assert str(reql1) == str(reql2)
def test_order_by_asc_indexed(self):
reql1 = self.reqlify(lambda: reqon.TERMS['$order_by'](self.reql, ['$index', 'rank', '$asc']))
reql2 = self.reqlify(lambda: self.reql.order_by(index=r.asc('rank')))
assert str(reql1) == str(reql2)
def test_order_by_desc_indexed(self):
reql1 = self.reqlify(lambda: reqon.TERMS['$order_by'](self.reql, ['$index', 'rank', '$desc']))
reql2 = self.reqlify(lambda: self.reql.order_by(index=r.desc('rank')))
assert str(reql1) == str(reql2)
# Skip
def test_skip(self):
reql1 = self.reqlify(lambda: reqon.TERMS['$skip'](self.reql, 100))
reql2 = self.reqlify(lambda: self.reql.skip(100))
assert str(reql1) == str(reql2)
def test_invalid_skip(self):
with pytest.raises(reqon.exceptions.InvalidTypeError) as excinfo:
terms.skip(self.reql, "4")
assert terms.ERRORS['type']['int'].format('skip') == str(excinfo.value)
with pytest.raises(reqon.exceptions.InvalidTypeError) as excinfo:
terms.skip(self.reql, 1.4)
assert terms.ERRORS['type']['int'].format('skip') == str(excinfo.value)
# Limit
def test_limit(self):
reql1 = self.reqlify(lambda: reqon.TERMS['$limit'](self.reql, 100))
reql2 = self.reqlify(lambda: self.reql.limit(100))
assert str(reql1) == str(reql2)
def test_invalid_limit(self):
with pytest.raises(reqon.exceptions.InvalidTypeError) as excinfo:
terms.limit(self.reql, "10")
assert terms.ERRORS['type']['int'].format('limit') == str(excinfo.value)
with pytest.raises(reqon.exceptions.InvalidTypeError) as excinfo:
terms.limit(self.reql, 10.5)
assert terms.ERRORS['type']['int'].format('limit') == str(excinfo.value)
# Slice
def test_slice(self):
reql1 = self.reqlify(lambda: reqon.TERMS['$slice'](self.reql, [10, 20]))
reql2 = self.reqlify(lambda: self.reql.slice(10, 20))
assert str(reql1) == str(reql2)
def test_invalid_slice(self):
with pytest.raises(reqon.exceptions.InvalidTypeError) as excinfo:
terms.slice_(self.reql, 10)
assert terms.ERRORS['type']['invalid'].format('slice_') == str(excinfo.value)
with pytest.raises(reqon.exceptions.InvalidTypeError) as excinfo:
terms.slice_(self.reql, [10.4, 10])
assert terms.ERRORS['type']['invalid'].format('slice_') == str(excinfo.value)
with pytest.raises(reqon.exceptions.InvalidTypeError) as excinfo:
terms.slice_(self.reql, ["10", 20])
assert terms.ERRORS['type']['invalid'].format('slice_') == str(excinfo.value)
# Nth
def test_nth(self):
reql1 = self.reqlify(lambda: reqon.TERMS['$nth'](self.reql, 100))
reql2 = self.reqlify(lambda: self.reql.nth(100))
assert str(reql1) == str(reql2)
def test_invalid_nth(self):
with pytest.raises(reqon.exceptions.InvalidTypeError) as excinfo:
terms.nth(self.reql, "10")
assert terms.ERRORS['type']['int'].format('nth') == str(excinfo.value)
with pytest.raises(reqon.exceptions.InvalidTypeError) as excinfo:
terms.nth(self.reql, 10.5)
assert terms.ERRORS['type']['int'].format('nth') == str(excinfo.value)
# Sample
def test_sample(self):
reql1 = self.reqlify(lambda: reqon.TERMS['$sample'](self.reql, 10))
reql2 = self.reqlify(lambda: self.reql.sample(10))
assert str(reql1) == str(reql2)
def test_invalid_sample(self):
with pytest.raises(reqon.exceptions.InvalidTypeError) as excinfo:
terms.sample(self.reql, "10")
assert terms.ERRORS['type']['int'].format('sample') == str(excinfo.value)
with pytest.raises(reqon.exceptions.InvalidTypeError) as excinfo:
terms.sample(self.reql, 10.5)
assert terms.ERRORS['type']['int'].format('sample') == str(excinfo.value)
# Pluck
def test_pluck(self):
reql1 = self.reqlify(lambda: reqon.TERMS['$pluck'](self.reql, ['title', 'year']))
reql2 = self.reqlify(lambda: self.reql.pluck('title', 'year'))
assert str(reql1) == str(reql2)
def test_invalid_pluck(self):
with pytest.raises(reqon.exceptions.InvalidTypeError) as excinfo:
terms.pluck(self.reql, "10")
assert terms.ERRORS['type']['invalid'].format('pluck') == str(excinfo.value)
with pytest.raises(reqon.exceptions.InvalidTypeError) as excinfo:
terms.pluck(self.reql, ["10", 1])
assert terms.ERRORS['type']['invalid'].format('pluck') == str(excinfo.value)
# Without
def test_without(self):
reql1 = self.reqlify(lambda: reqon.TERMS['$without'](self.reql, ['title', 'year']))
reql2 = self.reqlify(lambda: self.reql.without('title', 'year'))
assert str(reql1) == str(reql2)
def test_invalid_without(self):
with pytest.raises(reqon.exceptions.InvalidTypeError) as excinfo:
terms.without(self.reql, "10")
assert terms.ERRORS['type']['invalid'].format('without') == str(excinfo.value)
with pytest.raises(reqon.exceptions.InvalidTypeError) as excinfo:
terms.without(self.reql, ["10", 1])
assert terms.ERRORS['type']['invalid'].format('without') == str(excinfo.value)
# Group
def test_group(self):
reql1 = self.reqlify(lambda: reqon.TERMS['$group'](self.reql, 'rating'))
reql2 = self.reqlify(lambda: self.reql.group('rating'))
assert str(reql1) == str(reql2)
def test_group_indexed(self):
reql1 = self.reqlify(lambda: reqon.TERMS['$group'](self.reql, ['$index', 'rating']))
reql2 = self.reqlify(lambda: self.reql.group(index='rating'))
assert str(reql1) == str(reql2)
def test_invalid_group(self):
with pytest.raises(reqon.exceptions.InvalidTypeError) as excinfo:
terms.group(self.reql, 1)
assert terms.ERRORS['type']['invalid'].format('group') == str(excinfo.value)
with pytest.raises(reqon.exceptions.InvalidTypeError) as excinfo:
terms.group(self.reql, ["$index", 1])
assert terms.ERRORS['type']['invalid'].format('group') == str(excinfo.value)
# Count
def test_count(self):
reql1 = self.reqlify(lambda: reqon.TERMS['$count'](self.reql))
reql2 = self.reqlify(lambda: self.reql.count())
assert str(reql1) == str(reql2)
def test_count_field(self):
reql1 = self.reqlify(lambda: reqon.TERMS['$count'](self.reql, 'year'))
reql2 = self.reqlify(lambda: self.reql.count('year'))
assert str(reql1) == str(reql2)
# Sum
def test_sum(self):
reql1 = self.reqlify(lambda: reqon.TERMS['$sum'](self.reql, 'rating'))
reql2 = self.reqlify(lambda: self.reql.sum('rating'))
assert str(reql1) == str(reql2)
def test_invalid_sum(self):
with pytest.raises(reqon.exceptions.InvalidTypeError) as excinfo:
terms.sum_(self.reql, 1)
assert terms.ERRORS['type']['string'].format('sum_') == str(excinfo.value)
# Avg
def test_avg(self):
reql1 = self.reqlify(lambda: reqon.TERMS['$avg'](self.reql, 'rating'))
reql2 = self.reqlify(lambda: self.reql.avg('rating'))
assert str(reql1) == str(reql2)
def test_invalid_avg(self):
with pytest.raises(reqon.exceptions.InvalidTypeError) as excinfo:
terms.avg(self.reql, 1)
assert terms.ERRORS['type']['string'].format('avg') == str(excinfo.value)
# Min
def test_min(self):
reql1 = self.reqlify(lambda: reqon.TERMS['$min'](self.reql, 'rating'))
reql2 = self.reqlify(lambda: self.reql.min('rating'))
assert str(reql1) == str(reql2)
def test_invalid_min_(self):
with pytest.raises(reqon.exceptions.InvalidTypeError) as excinfo:
terms.min_(self.reql, 1)
assert terms.ERRORS['type']['string'].format('min_') == str(excinfo.value)
# Max
def test_max(self):
reql1 = self.reqlify(lambda: reqon.TERMS['$max'](self.reql, 'rating'))
reql2 = self.reqlify(lambda: self.reql.max('rating'))
assert str(reql1) == str(reql2)
def test_invalid_max_(self):
with pytest.raises(reqon.exceptions.InvalidTypeError) as excinfo:
terms.max_(self.reql, 1)
assert terms.ERRORS['type']['string'].format('max_') == str(excinfo.value)
# Between
def test_between(self):
_from = r.time(2016, 1, 1, 0, 0, 0,'Z')
_to = r.time(2016, 1, 31, 0, 0, 0, 'Z')
reql1 = self.reqlify(lambda: reqon.TERMS['$between'](self.reql, ['2016-01-01', '2016-01-31', 'timestamp']))
reql2 = self.reqlify(lambda: self.reql.between(_from, _to, index = 'timestamp'))
assert str(reql1) == str(reql2)
def test_nil_index(self):
_from = r.time(2016, 1, 1, 0, 0, 0, 'Z')
_to = r.time(2016, 1, 31, 0, 0, 0, 'Z')
reql1 = self.reqlify(lambda: reqon.TERMS['$between'](self.reql, ['2016-01-01', '2016-01-31']))
reql2 = self.reqlify(lambda: self.reql.between(_from, _to))
assert str(reql1) == str(reql2)
def test_between_with_strings(self):
reql1 = self.reqlify(lambda: reqon.TERMS['$between'](self.reql, ['ab', 'ef']))
reql2 = self.reqlify(lambda: self.reql.between('ab', 'ef'))
assert str(reql1) == str(reql2)
def test_between_with_int(self):
reql1 = self.reqlify(lambda: reqon.TERMS['$between'](self.reql, [1, 2, "foo"]))
reql2 = self.reqlify(lambda: self.reql.between(1, 2, index="foo"))
assert str(reql1) == str(reql2)
# Geo
def test_get_intersecting(self):
point = geojson.utils.generate_random('Point')
reql1 = self.reqlify(lambda: reqon.TERMS['$get_intersecting'](self.reql, ['location', point]))
reql2 = self.reqlify(lambda: self.reql.get_intersecting(r.point(*point['coordinates']), index='location'))
assert str(reql1) == str(reql2)
def test_get_intersecting(self):
point = geojson.utils.generate_random('Point')
reql1 = self.reqlify(lambda: reqon.TERMS['$get_nearest'](self.reql, ['location', point]))
reql2 = self.reqlify(lambda: self.reql.get_nearest(r.point(*point['coordinates']), index='location'))
assert str(reql1) == str(reql2)
| 36.508951
| 115
| 0.622347
| 1,820
| 14,275
| 4.8
| 0.065934
| 0.083333
| 0.128434
| 0.083104
| 0.855197
| 0.84272
| 0.820284
| 0.732372
| 0.683837
| 0.559524
| 0
| 0.028622
| 0.209457
| 14,275
| 390
| 116
| 36.602564
| 0.745503
| 0.010088
| 0
| 0.338776
| 0
| 0
| 0.077899
| 0
| 0
| 0
| 0
| 0
| 0.240816
| 1
| 0.204082
| false
| 0
| 0.028571
| 0
| 0.236735
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
370a53ceed296e3cc7c390084feb6c7c91c74e36
| 21
|
py
|
Python
|
sk.py
|
timeswarner/hfsk
|
93bea20e117f7866afa78fddb2fce649b5da6d6b
|
[
"Apache-2.0"
] | null | null | null |
sk.py
|
timeswarner/hfsk
|
93bea20e117f7866afa78fddb2fce649b5da6d6b
|
[
"Apache-2.0"
] | null | null | null |
sk.py
|
timeswarner/hfsk
|
93bea20e117f7866afa78fddb2fce649b5da6d6b
|
[
"Apache-2.0"
] | null | null | null |
print("终于学会了GitHub!")
| 21
| 21
| 0.761905
| 2
| 21
| 8
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 21
| 1
| 21
| 21
| 0.761905
| 0
| 0
| 0
| 0
| 0
| 0.545455
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
371e5f7293800a264791229d5417583d720f82d9
| 168
|
py
|
Python
|
rna-transcription/rna_transcription.py
|
philcleveland/exercism_python
|
bf0be451bbddf40ccc9967149d7259f4810d9972
|
[
"MIT"
] | null | null | null |
rna-transcription/rna_transcription.py
|
philcleveland/exercism_python
|
bf0be451bbddf40ccc9967149d7259f4810d9972
|
[
"MIT"
] | null | null | null |
rna-transcription/rna_transcription.py
|
philcleveland/exercism_python
|
bf0be451bbddf40ccc9967149d7259f4810d9972
|
[
"MIT"
] | null | null | null |
dna_to_rna_map = {
"G" : "C",
"C" : "G",
"T" : "A",
"A" : "U"
}
def to_rna(dna_strand):
return "".join([dna_to_rna_map[nuc] for nuc in dna_strand])
| 18.666667
| 63
| 0.511905
| 29
| 168
| 2.655172
| 0.551724
| 0.194805
| 0.207792
| 0.285714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.261905
| 168
| 9
| 63
| 18.666667
| 0.620968
| 0
| 0
| 0
| 0
| 0
| 0.047337
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.125
| false
| 0
| 0
| 0.125
| 0.25
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 5
|
2e99f8ae8309ef1a0a4046d8fb418f973e903829
| 77
|
py
|
Python
|
alpha_viergewinnt/inspector/__init__.py
|
wahtak/alpha_viergewinnt
|
569b66e656722387e450f72842ed7fe8c7d1a732
|
[
"MIT"
] | null | null | null |
alpha_viergewinnt/inspector/__init__.py
|
wahtak/alpha_viergewinnt
|
569b66e656722387e450f72842ed7fe8c7d1a732
|
[
"MIT"
] | null | null | null |
alpha_viergewinnt/inspector/__init__.py
|
wahtak/alpha_viergewinnt
|
569b66e656722387e450f72842ed7fe8c7d1a732
|
[
"MIT"
] | null | null | null |
from .interface import set_logger, log
from .value_logger import ValueLogger
| 25.666667
| 38
| 0.844156
| 11
| 77
| 5.727273
| 0.727273
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.116883
| 77
| 2
| 39
| 38.5
| 0.926471
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
2ede38ad7583c8205a6430a05d9b954433f93920
| 409
|
py
|
Python
|
timeline_api/graphql_api/model/group.py
|
Gasper/timelines
|
2a124361d9ce13c3be189ade6f65424d732d8044
|
[
"Apache-2.0",
"MIT"
] | null | null | null |
timeline_api/graphql_api/model/group.py
|
Gasper/timelines
|
2a124361d9ce13c3be189ade6f65424d732d8044
|
[
"Apache-2.0",
"MIT"
] | null | null | null |
timeline_api/graphql_api/model/group.py
|
Gasper/timelines
|
2a124361d9ce13c3be189ade6f65424d732d8044
|
[
"Apache-2.0",
"MIT"
] | null | null | null |
from graphene import ObjectType, String, ID
class Group(ObjectType):
id = ID()
name = String()
category_id = String()
@staticmethod
def resolve_id(group, info):
return group.get('id')
@staticmethod
def resolve_name(group, info):
return group.get('name')
@staticmethod
def resolve_category_id(group, info):
return group.get('category_id')
| 22.722222
| 43
| 0.633252
| 48
| 409
| 5.270833
| 0.333333
| 0.118577
| 0.26087
| 0.237154
| 0.288538
| 0.197628
| 0
| 0
| 0
| 0
| 0
| 0
| 0.259169
| 409
| 18
| 44
| 22.722222
| 0.834984
| 0
| 0
| 0.214286
| 0
| 0
| 0.041463
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.214286
| false
| 0
| 0.071429
| 0.214286
| 0.785714
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
25b55f5372e9a508e921777f0c2558284be7252c
| 25
|
py
|
Python
|
atividade1.py
|
Guilherme068/LogicaDeProgama-o
|
0a3d0b9421e87ff118c577d6700fb8220e97408a
|
[
"MIT"
] | null | null | null |
atividade1.py
|
Guilherme068/LogicaDeProgama-o
|
0a3d0b9421e87ff118c577d6700fb8220e97408a
|
[
"MIT"
] | null | null | null |
atividade1.py
|
Guilherme068/LogicaDeProgama-o
|
0a3d0b9421e87ff118c577d6700fb8220e97408a
|
[
"MIT"
] | null | null | null |
print("Olá professor")
| 8.333333
| 23
| 0.68
| 3
| 25
| 5.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.16
| 25
| 2
| 24
| 12.5
| 0.809524
| 0
| 0
| 0
| 0
| 0
| 0.541667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
d3348c0f3490fb69859b1ff94ad8c5c0c5a6684e
| 12
|
py
|
Python
|
login.py
|
laao-ren/test_17
|
8d148b4b8f502687ab8cd551749881194f2cd7a7
|
[
"MIT"
] | null | null | null |
login.py
|
laao-ren/test_17
|
8d148b4b8f502687ab8cd551749881194f2cd7a7
|
[
"MIT"
] | null | null | null |
login.py
|
laao-ren/test_17
|
8d148b4b8f502687ab8cd551749881194f2cd7a7
|
[
"MIT"
] | null | null | null |
num = 1
123
| 4
| 7
| 0.583333
| 3
| 12
| 2.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.5
| 0.333333
| 12
| 2
| 8
| 6
| 0.375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
d338bd044c65d67400f239c8b3c50cae658c2d46
| 55
|
py
|
Python
|
models/modules/__init__.py
|
jay-z007/neumann-optimizer
|
c931631346a1097d198983684d7c68d91ae82d39
|
[
"MIT"
] | 10
|
2018-10-25T04:37:30.000Z
|
2019-03-08T15:05:27.000Z
|
models/modules/__init__.py
|
jay-z007/neumann-optimizer
|
c931631346a1097d198983684d7c68d91ae82d39
|
[
"MIT"
] | null | null | null |
models/modules/__init__.py
|
jay-z007/neumann-optimizer
|
c931631346a1097d198983684d7c68d91ae82d39
|
[
"MIT"
] | 3
|
2018-05-07T19:08:57.000Z
|
2019-09-04T06:22:11.000Z
|
#Nothing much here except import
from .Net import Net
| 13.75
| 32
| 0.781818
| 9
| 55
| 4.777778
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.181818
| 55
| 3
| 33
| 18.333333
| 0.955556
| 0.563636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
d35007f22c9c256c545b0d67c997255f14e7cf0c
| 161
|
py
|
Python
|
LABORATORIO 4/ejercicio 4 laboratorio 4.py
|
msolivera/Phyton
|
1322fa2ff4bb06a17350fefa7e5268c0969e5b53
|
[
"bzip2-1.0.6"
] | null | null | null |
LABORATORIO 4/ejercicio 4 laboratorio 4.py
|
msolivera/Phyton
|
1322fa2ff4bb06a17350fefa7e5268c0969e5b53
|
[
"bzip2-1.0.6"
] | null | null | null |
LABORATORIO 4/ejercicio 4 laboratorio 4.py
|
msolivera/Phyton
|
1322fa2ff4bb06a17350fefa7e5268c0969e5b53
|
[
"bzip2-1.0.6"
] | null | null | null |
def dibujo (base,altura):
dibujo=print("x"*base)
for fila in range (altura):
print("x"+" "*(base-2)+"x")
dibujo=print("x"*base)
dibujo(7,5)
| 20.125
| 35
| 0.565217
| 25
| 161
| 3.64
| 0.52
| 0.197802
| 0.32967
| 0.351648
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.023622
| 0.21118
| 161
| 7
| 36
| 23
| 0.692913
| 0
| 0
| 0.333333
| 0
| 0
| 0.03125
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0
| 0
| 0.166667
| 0.5
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
d3a371577e6c4b6f44c467d5cd2e6e8e081929b4
| 215
|
py
|
Python
|
tianshou/utils/__init__.py
|
cm107/tianshou
|
0febf4bc1dc1366d837bab4574664f8116b66819
|
[
"MIT"
] | 1
|
2021-01-14T13:21:47.000Z
|
2021-01-14T13:21:47.000Z
|
tianshou/utils/__init__.py
|
q-learning-trader/tianshou
|
c97aa4065ee8464bd5897bb86f1f81abd8e2cff9
|
[
"MIT"
] | null | null | null |
tianshou/utils/__init__.py
|
q-learning-trader/tianshou
|
c97aa4065ee8464bd5897bb86f1f81abd8e2cff9
|
[
"MIT"
] | 1
|
2020-04-25T13:05:21.000Z
|
2020-04-25T13:05:21.000Z
|
from tianshou.utils.config import tqdm_config
from tianshou.utils.moving_average import MovAvg
from tianshou.utils.log_tools import SummaryWriter
__all__ = [
"MovAvg",
"tqdm_config",
"SummaryWriter",
]
| 21.5
| 50
| 0.767442
| 26
| 215
| 6.038462
| 0.5
| 0.229299
| 0.324841
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.148837
| 215
| 9
| 51
| 23.888889
| 0.857924
| 0
| 0
| 0
| 0
| 0
| 0.139535
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.375
| 0
| 0.375
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
6ca6dc1248c77e0e9c391874485bab1139d65f84
| 1,192
|
py
|
Python
|
passax/exceptions.py
|
Auax/passax
|
ee1b58608b3ea30970f285321d0c1821f0452c8b
|
[
"MIT"
] | 1
|
2021-12-11T22:55:56.000Z
|
2021-12-11T22:55:56.000Z
|
passax/exceptions.py
|
Auax/passax
|
ee1b58608b3ea30970f285321d0c1821f0452c8b
|
[
"MIT"
] | null | null | null |
passax/exceptions.py
|
Auax/passax
|
ee1b58608b3ea30970f285321d0c1821f0452c8b
|
[
"MIT"
] | null | null | null |
class Error(Exception):
"""
Base class for other exceptions
"""
def __init__(self, *args):
if args:
self.message = args[0]
else:
self.message = None
def __str__(self):
return self.message if self.message else " "
class DatabaseError(Error):
def __init__(self, *args):
Error.__init__(self, *args)
class DatabaseIsLocked(Error):
def __init__(self, *args):
Error.__init__(self, *args)
class DatabaseUndefinedTable(Error):
def __init__(self, *args):
Error.__init__(self, *args)
class DatabaseNotFound(Error):
def __init__(self, *args):
Error.__init__(self, *args)
class OSNotSupported(Error):
def __init__(self, *args):
Error.__init__(self, *args)
class BadOS(Error):
def __init__(self, *args):
Error.__init__(self, *args)
class BrowserNotImplemented(Error):
def __init__(self, *args):
Error.__init__(self, *args)
class MacOSKeychainAccessError(Error):
def __init__(self, *args):
Error.__init__(self, *args)
class LinuxSafeStorageError(Error):
def __init__(self, *args):
Error.__init__(self, *args)
| 20.20339
| 53
| 0.634228
| 131
| 1,192
| 5.160305
| 0.206107
| 0.224852
| 0.337278
| 0.221893
| 0.551775
| 0.551775
| 0.551775
| 0.551775
| 0.551775
| 0.497041
| 0
| 0.001109
| 0.243289
| 1,192
| 58
| 54
| 20.551724
| 0.748337
| 0.026007
| 0
| 0.542857
| 0
| 0
| 0.001747
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.314286
| false
| 0
| 0
| 0.028571
| 0.628571
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
6cb67bb6212bee6b0f6e4e3b242cebb3274af909
| 5,333
|
py
|
Python
|
days_n_links.py
|
DanielCaminero87/zoomAutoJoiner
|
7ec6a3f5385d40ddeb07cccbf8272533fb21b402
|
[
"MIT"
] | null | null | null |
days_n_links.py
|
DanielCaminero87/zoomAutoJoiner
|
7ec6a3f5385d40ddeb07cccbf8272533fb21b402
|
[
"MIT"
] | null | null | null |
days_n_links.py
|
DanielCaminero87/zoomAutoJoiner
|
7ec6a3f5385d40ddeb07cccbf8272533fb21b402
|
[
"MIT"
] | null | null | null |
import calendar
import datetime
import webbrowser
from periods import *
#get current time
now = datetime.datetime.now()
# math zoom link
def math():
url = "ZOOM LINK HERE"
webbrowser.open(url)
# homeroom zoom link
def homeroom():
url = "ZOOM LINK HERE"
webbrowser.open(url)
# socail studies zoom link
def socailStudies():
url = "ZOOM LINK HERE"
webbrowser.open(url)
# music zoom link
def music():
url = "ZOOM LINK HERE"
webbrowser.open(url)
# english zoom link
def english():
url = "ZOOM LINK HERE"
webbrowser.open(url)
# computer class zoom link
def comp():
url = "ZOOM LINK HERE"
webbrowser.open(url)
# monday Schedule
def monday():
hour = now.strftime("%H")
minute = now.strftime("%M")
time1()
if hour == "8" and minute == "00":
homeroom()
time2()
elif hour == "8" and minute == "10":
time3()
pass
elif hour == "8" and minute == "30":
time4()
pass
elif hour == "9" and minute == "15":
time5()
pass
elif hour == "10" and minute == "00":
math()
time6()
elif hour == "10" and minute == "45":
socailStudies()
time7()
elif hour == "11" and minute == "30":
time8()
pass
elif hour == "12" and minute == "00":
homeroom()
time9()
elif hour == "12" and minute == "30":
english()
time10()
elif hour == "13" and minute == "15":
music()
time11()
elif hour == "14" and minute == "00":
homeroom()
else:
exit()
# tuesday Schedule
def tuesday():
hour = now.strftime("%H")
minute = now.strftime("%M")
time1()
if hour == "8" and minute == "00":
homeroom()
time2()
elif hour == "8" and minute == "10":
time3()
pass
elif hour == "8" and minute == "30":
time4()
pass
elif hour == "9" and minute == "15":
time5()
pass
elif hour == "10" and minute == "00":
math()
time6()
elif hour == "10" and minute == "45":
socailStudies()
time7()
elif hour == "11" and minute == "30":
time8()
pass
elif hour == "12" and minute == "00":
homeroom()
time9()
elif hour == "12" and minute == "30":
english()
time10()
elif hour == "13" and minute == "15":
time11()
pass
elif hour == "14" and minute == "00":
homeroom()
else:
exit()
# PASTE IN THE TIME FOR IT TO CHECK FOR THE TIME
# wednessdays Schedule
def wednessday():
hour = now.strftime("%H")
minute = now.strftime("%M")
print(hour, minute)
time1()
if hour == "8" and minute == "00":
homeroom()
time2()
elif hour == "8" and minute == "10":
pass
elif hour == "8" and minute == "30":
time12()
pass
elif hour == "9" and minute == "20":
time13()
pass
elif hour == "10" and minute == "05":
math()
time14()
elif hour == "10" and minute == "50":
socailStudies()
time15()
elif hour == "11" and minute == "35":
time16()
pass
elif hour == "12" and minute == "05":
homeroom()
time17()
elif hour == "12" and minute == "35":
time18()
english()
elif hour == "13" and minute == "25":
music()
else:
exit()
# thursday Schedule
def thursday():
hour = now.strftime("%H")
minute = now.strftime("%M")
time1()
if hour == "8" and minute == "00":
homeroom()
time2()
elif hour == "8" and minute == "10":
time3()
pass
elif hour == "8" and minute == "30":
time4()
pass
elif hour == "9" and minute == "15":
time5()
pass
elif hour == "10" and minute == "00":
math()
time6()
elif hour == "10" and minute == "45":
socailStudies()
time7()
elif hour == "11" and minute == "30":
time8()
pass
elif hour == "12" and minute == "00":
homeroom()
time9()
elif hour == "12" and minute == "30":
english()
time10()
elif hour == "13" and minute == "15":
comp()
time11()
elif hour == "14" and minute == "00":
homeroom()
else:
exit()
# fridays Schedule
def friday():
hour = now.strftime("%H")
minute = now.strftime("%M")
time1()
if hour == "8" and minute == "00":
homeroom()
time2()
elif hour == "8" and minute == "10":
time3()
pass
elif hour == "8" and minute == "30":
math()
time4()
elif hour == "9" and minute == "15":
time5()
pass
elif hour == "10" and minute == "00":
socailStudies()
time6()
elif hour == "10" and minute == "45":
homeroom()
time7()
elif hour == "11" and minute == "30":
time8()
pass
elif hour == "12" and minute == "00":
homeroom()
time9()
elif hour == "12" and minute == "30":
english()
time10()
elif hour == "13" and minute == "15":
time11()
pass
elif hour == "14" and minute == "00":
homeroom()
else:
exit()
| 21.946502
| 48
| 0.48003
| 605
| 5,333
| 4.231405
| 0.143802
| 0.189844
| 0.098438
| 0.082031
| 0.775
| 0.745313
| 0.71875
| 0.624219
| 0.610938
| 0.610938
| 0
| 0.077784
| 0.363585
| 5,333
| 243
| 49
| 21.946502
| 0.676488
| 0.050628
| 0
| 0.847619
| 0
| 0
| 0.059394
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.052381
| false
| 0.1
| 0.019048
| 0
| 0.071429
| 0.004762
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
6ce330b9a0364d6da547ff1f743cf2ef5ad8aa43
| 219
|
py
|
Python
|
adv_finance/sampling/__init__.py
|
cw-jang/adv_finance
|
240ce03e53fc6eead469a1ce7a220510a78c437e
|
[
"BSD-3-Clause"
] | 15
|
2019-05-20T04:28:38.000Z
|
2021-12-11T06:50:52.000Z
|
adv_finance/sampling/__init__.py
|
cw-jang/adv_finance
|
240ce03e53fc6eead469a1ce7a220510a78c437e
|
[
"BSD-3-Clause"
] | null | null | null |
adv_finance/sampling/__init__.py
|
cw-jang/adv_finance
|
240ce03e53fc6eead469a1ce7a220510a78c437e
|
[
"BSD-3-Clause"
] | 2
|
2020-05-16T13:23:30.000Z
|
2020-08-13T22:58:08.000Z
|
from adv_finance.sampling.co_events import get_num_co_events
from adv_finance.sampling.bootstrap import get_ind_matrix, get_avg_uniqueness, seq_bootstrap
from .weight import get_sample_tw, get_sample_w, get_time_decay
| 43.8
| 92
| 0.881279
| 37
| 219
| 4.756757
| 0.567568
| 0.153409
| 0.159091
| 0.25
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.077626
| 219
| 5
| 93
| 43.8
| 0.871287
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
9f536e3ea2f84d580256dcccde208ac0cf47a651
| 49,890
|
py
|
Python
|
ObitSystem/Obit/python/MosaicUtil.py
|
sarrvesh/Obit
|
e4ce6029e9beb2a8c0316ee81ea710b66b2b7986
|
[
"Linux-OpenIB"
] | 1
|
2020-09-01T05:30:45.000Z
|
2020-09-01T05:30:45.000Z
|
ObitSystem/Obit/python/MosaicUtil.py
|
sarrvesh/Obit
|
e4ce6029e9beb2a8c0316ee81ea710b66b2b7986
|
[
"Linux-OpenIB"
] | null | null | null |
ObitSystem/Obit/python/MosaicUtil.py
|
sarrvesh/Obit
|
e4ce6029e9beb2a8c0316ee81ea710b66b2b7986
|
[
"Linux-OpenIB"
] | 1
|
2021-12-22T14:07:41.000Z
|
2021-12-22T14:07:41.000Z
|
# $Id$
#-----------------------------------------------------------------------
# Copyright (C) 2004-2020
# Associated Universities, Inc. Washington DC, USA.
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of
# the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public
# License along with this program; if not, write to the Free
# Software Foundation, Inc., 675 Massachusetts Ave, Cambridge,
# MA 02139, USA.
#
# Correspondence concerning this software should be addressed as follows:
# Internet email: bcotton@nrao.edu.
# Postal address: William Cotton
# National Radio Astronomy Observatory
# 520 Edgemont Road
# Charlottesville, VA 22903-2475 USA
#-----------------------------------------------------------------------
# Python utility package for Mosaicing images by weighting them together
from __future__ import absolute_import
from __future__ import print_function
import Image, ImageDesc, ImageUtil, FArray, InfoList, OErr, History, GPUFInterpolate
import os
from six.moves import range
def getMemUse():
import resource
mem = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss
print ("Memory usage %f GByte"%(mem/1.0e6))
# end getMemUse
def PMakeMaster(template, size, SumWtImage, SumWt2, err):
"""
Create a pair of images to accumulation of partial products
Create an image to contain the Sum of the input Images times the
weights, and another for the sum of the weights squared.
The descriptive material is from image template
* template = Image with position etc, to be copied
* size = output image size in pixels, e.g. [200,200]
* SumWtImage = First output image, must be defined (i.e. files named)
but not fully created.
* SumWt2 = Second output image, like SumWtImage
* err = Python Obit Error/message stack
"""
################################################################
# Checks
if not Image.PIsA(template):
print("Actually ",template.__class__)
raise TypeError("template MUST be a Python Obit Image")
if not Image.PIsA(SumWtImage):
print("Actually ",SumWtImage.__class__)
raise TypeError("SumWtImage MUST be a Python Obit Image")
if not Image.PIsA(SumWt2):
print("Actually ",SumWt2.__class__)
raise TypeError("SumWt2 MUST be a Python Obit Image")
if not OErr.OErrIsA(err):
raise TypeError("err MUST be an OErr")
#
# Get image info from template
Image.POpen (template, 1, err)
Image.PRead (template, err)
desc = Image.PGetDesc(template)
descDict = ImageDesc.PGetDict(desc) # Python dict object
listDict = template.Desc.List.Dict
Image.PClose (template, err)
#OErr.printErrMsg(err, "Error reading input image "+Image.PGetName(template))
#
# Create zero filled array for data
outArray = FArray.FArray("Initial array", size)
#
# Modify the descriptor for output.
naxis = size[0:3]
# Update reference pixel, pixel shift an integral number
dim = descDict["inaxes"]
pixOff = [naxis[0]//2-dim[0]//2, naxis[1]//2-dim[1]//2]
crpix = descDict["crpix"]
crpix[0] = crpix[0] + pixOff[0]
crpix[1] = crpix[1] + pixOff[1]
# Update size
dim[0] = naxis[0];
dim[1] = naxis[1]
#print "debug dim",dim
descDict["inaxes"] = dim
descDict["bitpix"] = -32 # output floating
#
# Do SumWtImage
desc = Image.PGetDesc(SumWtImage)
InfoList.PSetDict(desc.List, listDict) # Copy list stuff
ImageDesc.PSetDict(desc, descDict) # set output descriptor
# Write output image
Image.POpen(SumWtImage, 2, err)
nplane = template.Desc.Dict["inaxes"][2]
for iplane in range(1,(nplane+1)):
plane = [iplane,1,1,1,1]
SumWtImage.PutPlane(outArray, plane, err)
Image.PClose(SumWtImage, err)
#OErr.printErrMsg(err, "Error writing image for "+Image.PGetName(SumWtImage))
#
# Do SumWt2Image
desc = Image.PGetDesc(SumWt2)
InfoList.PSetDict(desc.List, listDict) # Copy list stuff
ImageDesc.PSetDict(desc, descDict) # set output descriptor
# Write output image
Image.POpen(SumWt2, 2, err)
for iplane in range(1,(nplane+1)):
plane = [iplane,1,1,1,1]
SumWt2.PutPlane(outArray, plane, err)
Image.PClose(SumWt2, err)
#OErr.printErrMsg(err, "Error writing image for "+Image.PGetName(SumWt2))
# Write history - sorta
inHistory = History.History("history", template.List, err)
outHistory = History.History("history", SumWtImage.List, err)
# Copy History
History.PCopy(inHistory, outHistory, err)
outHistory.Open(History.READWRITE, err)
outHistory.TimeStamp(" Start Obit PMakeMaster",err)
outHistory.Close(err)
# end PMakeMaster
def PWeightImage(inImage, factor, SumWtImage, SumWt2, err, minGain=0.1,
iblc=[1,1,1], itrc=[0,0,1], restart=0, hwidth=2, doGPU=False,
planeWt=False, OTFRA=None, OTFDec=None, inWtImage=None,
maxRMS=None):
"""
Sum an image onto Weighting accumulators using PB corrections
Calculate the weights for an image from the primary beam pattern
And accumulate into the correct locations in the accumulation images.
* inImage = Image to be accumulated
* factor = Additional multiplication factor, normally 1.0
>0 => use the factor/RMS of each image plane
* SumWtImage = First output image, must be defined (i.e. files named)
but not fully created.
* SumWt2 = Second output image, like SumWtImage
* err = Python Obit Error/message stack
* minGain = minimum allowed gain (lower values blanked).
* iblc = BLC in plane to start selection
* itrc = TRC in plane to end selection
* restart = restart channel no. 0-rel
* hwidth = half width of interpolation kernal [1-4] default 2
* doGPU = If true and GPU enables, use a GPU for the interpolation.
* NB: routine will fail if GPU is not enabled.
* planeWt = if True generate weight image per input plane
* OTFoffsets = if >1 then make beam using multiple pointing offsets
"Aussie mode" OTF. must also go=ive OTFRA, OTFDec
* OTFRA = Array of RA offsets in deg not corrected for Declination
* OTFDec = Array of Declinations offsets in deg, same size as OTFRA
* inWtImage = Beam (weight) image to use if not None
MUST have the same size as inImage
* maxRMS = if given, the maximum RMS allowed
"""
################################################################
# Checks
if not Image.PIsA(inImage):
print("Actually ",inImage.__class__)
raise TypeError("inImage MUST be a Python Obit Image")
if not Image.PIsA(SumWtImage):
print("Actually ",SumWtImage.__class__)
raise TypeError("SumWtImage MUST be a Python Obit Image")
if not Image.PIsA(SumWt2):
print("Actually ",SumWt2.__class__)
raise TypeError("SumWt2 MUST be a Python Obit Image")
if not OErr.OErrIsA(err):
raise TypeError("err MUST be an OErr")
#
haveWtImage = inWtImage != None # Weight image given
# Open accumulation files
Image.POpen(inImage, 1, err) # python gets confused
Image.PClose(inImage,err)
Image.POpen(SumWtImage, 3, err)
Image.POpen(SumWt2, 3, err)
# Get output descriptor to see how many planes
outDesc = Image.PGetDesc(SumWtImage)
outDescDict = ImageDesc.PGetDict(outDesc)
outNaxis = outDescDict["inaxes"]
print("Accumulation naxis",outNaxis)
# Get input descriptor to see how many planes
inDesc = Image.PGetDesc(inImage)
inDescDict = ImageDesc.PGetDict(inDesc)
ndim = inDescDict["naxis"]
inNaxis = inDescDict["inaxes"]
finterp = None # GPU not yet enabled
# Range of planes
bpln = max (1,iblc[2]);
epln = min (inNaxis[2], itrc[2])
if epln<bpln:
epln = inNaxis[2]
npln = epln-bpln+1
# Test if compatible
if npln < outNaxis[2]:
print("input has",npln,"planes selected and output has",outNaxis[2])
raise RuntimeError("input image has too few planes ")
if (ndim>0) and (inNaxis[2]>1): # list of 0-rel planes to loop over
planes = list(range(bpln+restart-1,bpln+npln-1))
else:
planes = [0]
#
# Set BLC,TRC
inImage.List.set("BLC",[iblc[0], iblc[1],1,1,1,1,1])
inImage.List.set("TRC",[itrc[0], itrc[1],0,0,0,0,0])
if inWtImage:
inWtImage.List.set("BLC",[iblc[0], iblc[1],1,1,1,1,1])
inWtImage.List.set("TRC",[itrc[0], itrc[1],0,0,0,0,0])
inWtImage.Open(Image.READONLY,err) # Open/close to update
inWtImage.Close(err)
XPixelImage = None; YPixelImage = None; InterpWtImage = None;InterpWt = None
InterpWtWt = None
# Loop over planes
WtImage = None
for iPlane in planes:
doPlane = [iPlane+1,1,1,1,1] # Input plane
outPlane = [iPlane+2-bpln,1,1,1,1] # output plane
if not (iPlane%20):
print("At plane", iPlane+1,os.times())
getMemUse()
# Make weight image if needed, first pass or planeWt
#if WtImage == None:
if planeWt or haveWtImage:
# Get image
Image.PGetPlane (inImage, None, doPlane, err)
OErr.printErrMsg(err, "Error reading image "+str(iPlane)+" for "+Image.PGetName(inImage))
#
# Special weighting?
if factor<0.0:
RMS = inImage.FArray.RMS
fact = abs(factor)/RMS
else:
fact = factor
if not WtImage:
WtImage = Image.Image("WeightImage")
Image.PCloneMem(inImage, WtImage, err)
if planeWt:
pln = [iPlane+1,1,1,1,1]
else:
pln = [max(1,inNaxis[2]//2),1,1,1,1]
#print('if haveWtImage'); getMemUse()
if haveWtImage:
# Beam provided, extract relevant plane to a memory resident WtImage
Image.PGetPlane (inWtImage, None, doPlane, err)
OErr.printErrMsg(err, "Error reading wt image "+str(iPlane)+" for "+
Image.PGetName(inWtImage))
# Interpolate to WtImage
ImageUtil.PInterpolateImage(inWtImage, WtImage, err, \
inPlane=doPlane, hwidth=hwidth, finterp=finterp)
OErr.printErrMsg(err, "Error interpolating wt plane "+str(doPlane))
else:
# Normal or OTF Beam?
if (OTFRA==None):
ImageUtil.PPBImage(inImage, WtImage, err, minGain, outPlane=pln)
pass
else:
ImageUtil.POTFBeam (inImage, WtImage, OTFRA, OTFDec, err, minGain, outPlane=pln)
OErr.printErrMsg(err, "Error making weight image for "+Image.PGetName(inImage))
# The interpolated versions
#print('if not InterpWtImage:'); getMemUse()
if not InterpWtImage:
InterpWtImage = Image.Image("InterpWtImage")
Image.PClone2(inImage, SumWtImage, InterpWtImage, err)
# input x, y pixels for output
#print('if not XPixelImage'); getMemUse()
if (not XPixelImage) or (not YPixelImage):
XPixelImage = Image.Image("XPixelImage")
YPixelImage = Image.Image("YPixelImage")
Image.PClone2(inImage, SumWtImage, XPixelImage, err)
Image.PClone2(inImage, SumWtImage, YPixelImage, err)
ImageUtil.PGetXYPixels(WtImage, InterpWtImage, XPixelImage, YPixelImage, err)
# Interpolated weight image
#print('if not InterWt'); getMemUse()
if not InterpWt:
InterpWt = Image.Image("InterpWt")
Image.PClone2(inImage, SumWtImage, InterpWt, err)
# Is GPU interpolation requested?
if doGPU:
finterp = GPUFInterpolate.PCreate("GPUinterp", WtImage.FArray,
XPixelImage.FArray, YPixelImage.FArray,
hwidth, err)
OErr.printErrMsg(err, "Creating GPU FInterpolator")
ImageUtil.PInterpolateImage(WtImage, InterpWt, err, \
XPix=XPixelImage, YPix=YPixelImage,
hwidth=hwidth, finterp=finterp)
OErr.printErrMsg(err, "Error interpolating wt*wt "+Image.PGetName(inImage))
# Interpolated weight image Squared
#print('if not InterpWtWt'); getMemUse()
if not InterpWtWt:
InterpWtWt = Image.Image("InterpWtWt")
Image.PClone2(inImage, SumWtImage, InterpWtWt, err)
# Determine alignment
inDesc = Image.PGetDesc(InterpWtImage) # get descriptors
inDescDict = ImageDesc.PGetDict(inDesc)
outDesc = Image.PGetDesc(SumWtImage)
outDescDict = ImageDesc.PGetDict(outDesc)
naxis = inDescDict["inaxes"] # find input center pixel in output
pos1 = [int(naxis[0]*0.5+0.5), int(naxis[1]*0.5+0.5)]
xpos1 = [float(pos1[0]),float(pos1[1])]
xpos2 = ImageDesc.PCvtPixel (inDesc, xpos1, outDesc, err)
pos2 = [int(xpos2[0]+0.5), int(xpos2[1]+0.5)]
# Is GPU interpolation requested?
if doGPU:
del finterp
finterp = GPUFInterpolate.PCreate("GPUinterp", inImage.FArray,
XPixelImage.FArray, YPixelImage.FArray,
hwidth, err)
OErr.printErrMsg(err, "Creating GPU FInterpolator")
# End init wt image
# Special weighting or editing?
if (factor<0.0) or maxRMS:
# Get image
Image.PGetPlane (inImage, None, doPlane, err)
OErr.printErrMsg(err, "Error reading image "+str(iPlane)+" for "+Image.PGetName(inImage))
RMS = inImage.FArray.RMS
# This plane acceptable?
if maxRMS and ((RMS>maxRMS) or (RMS<=0.0)):
#print 'drop plane',doPlane[0],'RMS',RMS
continue
if (factor<0.0):
fact = abs(factor)/RMS
else:
fact = factor
if not (iPlane%20):
print("Factor",fact, "plane",iPlane,"RMS",RMS)
else:
fact = factor
#print 'do plane',doPlane[0],'RMS',RMS, 'factor',fact
#print('before Interpolate'); getMemUse()
# Interpolate image plane
ImageUtil.PInterpolateImage(inImage, InterpWtImage, err, \
inPlane=doPlane, XPix=XPixelImage, YPix=YPixelImage,
hwidth=hwidth, finterp=finterp)
OErr.printErrMsg(err, "Error interpolating plane "+str(doPlane))
#print('after Interpolate'); getMemUse()
# Interpolated image times beam
FArray.PMul(InterpWtImage.FArray, InterpWt.FArray, InterpWtImage.FArray)
#
# Read accumulation image planes
Image.PGetPlane(SumWt2, None, outPlane, err)
Image.PGetPlane(SumWtImage, None, outPlane, err)
OErr.printErrMsg(err, "Error reading accumulation image ")
#print('after read old'); getMemUse()
#
# Accumulate
FArray.PShiftAdd (SumWtImage.FArray, pos2, InterpWtImage.FArray, pos1, fact, SumWtImage.FArray)
# Square weight image
FArray.PMul(InterpWt.FArray, InterpWt.FArray, InterpWtWt.FArray)
# Blank weight whereever image is blank or zero
FArray.PInClip(InterpWt.FArray, -1.0e-20, 1.0e-20, FArray.PGetBlank())
# Blank weight squared where image * Wt is blanked
FArray.PBlank (InterpWtWt.FArray, InterpWt.FArray, InterpWtWt.FArray);
# Accumulate Wt*Wt
FArray.PShiftAdd (SumWt2.FArray, pos2, InterpWtWt.FArray,pos1, fact, SumWt2.FArray)
#print('after math'); getMemUse()
#
# Write output
Image.PPutPlane(SumWt2, None, outPlane, err)
Image.PPutPlane(SumWtImage, None, outPlane, err)
OErr.printErrMsg(err, "Error writing accumulation image ")
# Cleanup
if planeWt:
WtImage.Zap(err);
del WtImage,
WtImage = None;
# end loop over planes
# close output
#Image.PClose(inImage, err)
Image.PClose(SumWtImage, err)
Image.PClose(SumWt2, err)
SumWtImage.FreeBuffer(err); SumWt2.FreeBuffer(err);
XPixelImage.Zap(err); YPixelImage.Zap(err);
InterpWtImage.Zap(err); InterpWtWt.Zap(err); InterpWt.Zap(err)
del XPixelImage, YPixelImage, InterpWtImage, InterpWtWt, InterpWt
if WtImage:
WtImage.Zap(err); del WtImage; WtImage = None
if finterp!=None:
del finterp
#print('end PWeightImage'); getMemUse()
# end PWeightImage
def PWeightImageEq(inImage, factor, SumWtImage, SumWt2, err, minGain=0.1,
iblc=[1,1,1], itrc=[0,0,1], restart=0, hwidth=2, doGPU=False,
planeWt=False, OTFRA=None, OTFDec=None, inWtImage=None,
maxRMS=None, minAccWt=0.15):
"""
Sum an image onto Weighting accumulators using PB corrections
Version for equatorial in/output and no relative rotation
Calculate the weights for an image from the primary beam pattern
And accumulate into the correct locations in the accumulation images.
* inImage = Image to be accumulated
* factor = Additional multiplication factor, normally 1.0
>0 => use the factor/RMS of each image plane
* SumWtImage = First output image, must be defined (i.e. files named)
but not fully created.
* SumWt2 = Second output image, like SumWtImage
* err = Python Obit Error/message stack
* minGain = minimum allowed gain (lower values blanked).
* iblc = BLC in plane to start selection
* itrc = TRC in plane to end selection
* restart = restart channel no. 0-rel
* hwidth = half width of interpolation kernal [1-4] default 2
* doGPU = If true and GPU enables, use a GPU for the interpolation.
* NB: routine will fail if GPU is not enabled.
* planeWt = if True generate weight image per input plane
* OTFoffsets = if >1 then make beam using multiple pointing offsets
"Aussie mode" OTF. must also go=ive OTFRA, OTFDec
* OTFRA = Array of RA offsets in deg not corrected for Declination
* OTFDec = Array of Declinations offsets in deg, same size as OTFRA
* inWtImage = Beam (weight) image to use if not None
MUST have the same size as inImage
* maxRMS = if given, the maximum RMS allowed
* minAccWt = min. acceptable max. weight, otherwise ignore
"""
################################################################
# Checks
if not Image.PIsA(inImage):
print("Actually ",inImage.__class__)
raise TypeError("inImage MUST be a Python Obit Image")
if not Image.PIsA(SumWtImage):
print("Actually ",SumWtImage.__class__)
raise TypeError("SumWtImage MUST be a Python Obit Image")
if not Image.PIsA(SumWt2):
print("Actually ",SumWt2.__class__)
raise TypeError("SumWt2 MUST be a Python Obit Image")
if not OErr.OErrIsA(err):
raise TypeError("err MUST be an OErr")
#
t0 = os.times()[4] # Initial time
haveWtImage = inWtImage != None # Weight image given
# Set BLC,TRC
inImage.List.set("BLC",[iblc[0], iblc[1],1,1,1,1,1])
inImage.List.set("TRC",[itrc[0], itrc[1],0,0,0,0,0])
# Open accumulation files
Image.POpen(inImage, Image.READONLY, err) # pythpn gets confused
Image.POpen(SumWtImage, Image.READWRITE, err)
Image.POpen(SumWt2, Image.READWRITE, err)
# Get output descriptor to see how many planes
outDesc = Image.PGetDesc(SumWtImage)
outDescDict = ImageDesc.PGetDict(outDesc)
outNaxis = outDescDict["inaxes"]
print("Accumulation naxis",outNaxis)
# Get input descriptor to see how many planes
inDesc = Image.PGetDesc(inImage)
inDescDict = ImageDesc.PGetDict(inDesc)
ndim = inDescDict["naxis"]
inNaxis = inDescDict["inaxes"]
finterp = None # GPU not yet enabled
# Range of planes
bpln = max (1,iblc[2]);
epln = min (inNaxis[2], itrc[2])
if epln<bpln:
epln = inNaxis[2]
npln = epln-bpln+1
# Test if compatible
if npln < outNaxis[2]:
print("input has",npln,"planes selected and output has",outNaxis[2])
raise RuntimeError("input image has too few planes ")
if (ndim>0) and (inNaxis[2]>1): # list of 0-rel planes to loop over
planes = list(range(bpln+restart-1,bpln+npln-1))
else:
planes = [0]
#
if inWtImage:
inWtImage.List.set("BLC",[iblc[0], iblc[1],1,1,1,1,1])
inWtImage.List.set("TRC",[itrc[0], itrc[1],0,0,0,0,0])
inWtImage.Open(Image.READONLY,err) # Open/close to update
inWtImage.Close(err)
XPixelImage = None; YPixelImage = None; InterpWtImage = None;InterpWt = None
InterpWtWt = None; WtImage = None
# Loop over planes
for iPlane in planes:
doPlane = [iPlane+1,1,1,1,1] # Input plane
outPlane = [iPlane+2-bpln,1,1,1,1] # output plane
if not (iPlane%20):
print("At plane", iPlane+1,'t=%6.1f sec'%(os.times()[4]-t0))
# Get image
inImage.List.set("BLC",[iblc[0], iblc[1],1,1,1,1,1])
inImage.List.set("TRC",[itrc[0], itrc[1],0,0,0,0,0])
Image.PGetPlane (inImage, None, doPlane, err)
OErr.printErrMsg(err, "Error reading image "+str(iPlane)+" for "+Image.PGetName(inImage))
#
# Make weight image if needed, first pass or planeWt
if WtImage == None:
WtImage = Image.Image("WeightImage")
Image.PCloneMem(inImage, WtImage, err)
# The interpolated versions
if not InterpWtImage:
InterpWtImage = Image.Image("InterpWtImage")
Image.PClone2(inImage, SumWtImage, InterpWtImage, err)
# input x, y pixels for output
if (not XPixelImage) or (not YPixelImage):
XPixelImage = Image.Image("XPixelImage")
YPixelImage = Image.Image("YPixelImage")
Image.PClone2(inImage, SumWtImage, XPixelImage, err)
Image.PClone2(inImage, SumWtImage, YPixelImage, err)
ImageUtil.PGetXYPixels(WtImage, InterpWtImage, XPixelImage, YPixelImage, err)
# Special weighting?
if factor<0.0:
RMS = inImage.FArray.RMS
fact = abs(factor)/RMS
else:
fact = factor
if planeWt:
pln = [iPlane+1,1,1,1,1]
else:
pln = [max(1,inNaxis[2]//2),1,1,1,1]
if haveWtImage:
# Beam provided, extract relevant plane to a memory resident WtImage
OErr.printErrMsg(err, "Error reading wt image "+str(iPlane)+" for "+
Image.PGetName(inWtImage))
# Interpolate to WtImage
ImageUtil.PInterpolateImage(inWtImage, WtImage, err, \
inPlane=doPlane, hwidth=hwidth, finterp=finterp)
OErr.printErrMsg(err, "Error interpolating wt plane "+str(doPlane))
elif planeWt or (iPlane==0):
# Normal or OTF Beam?
if (OTFRA==None):
ImageUtil.PPBImage(inImage, WtImage, err, minGain, outPlane=pln)
pass
else:
ImageUtil.POTFBeam (inImage, WtImage, OTFRA, OTFDec, err, minGain, outPlane=pln)
OErr.printErrMsg(err, "Error making weight image for "+Image.PGetName(inImage))
# Check maximum weight for first plane
if iPlane==0:
pos = [0,0]
maxWt = FArray.PMax(WtImage.FArray,pos)
print("Maximum weight",maxWt)
if maxWt<minAccWt:
print("Less than minAccWt",minAccWt,"skipping")
break
# Interpolated weight image
if not InterpWt:
InterpWt = Image.Image("InterpWt")
Image.PClone2(inImage, SumWtImage, InterpWt, err)
# Is GPU interpolation requested?
if doGPU:
finterp = GPUFInterpolate.PCreate("GPUinterp", WtImage.FArray,
XPixelImage.FArray, YPixelImage.FArray,
hwidth, err)
OErr.printErrMsg(err, "Creating GPU FInterpolator")
InterpWt.Desc.Dict['inaxes'], WtImage.Desc.Dict['inaxes']
ImageUtil.PInterpolateImage(WtImage, InterpWt, err, \
XPix=XPixelImage, YPix=YPixelImage,
hwidth=hwidth, finterp=finterp)
OErr.printErrMsg(err, "Error interpolating wt*wt "+Image.PGetName(inImage))
# Interpolated weight image Squared
if not InterpWtWt:
InterpWtWt = Image.Image("InterpWtWt")
Image.PClone2(inImage, SumWtImage, InterpWtWt, err)
# Determine alignment
inDesc = Image.PGetDesc(InterpWtImage) # get descriptors
inDescDict = ImageDesc.PGetDict(inDesc)
outDesc = Image.PGetDesc(SumWtImage)
outDescDict = ImageDesc.PGetDict(outDesc)
naxis = inDescDict["inaxes"] # find input center pixel in output
pos1 = [int(naxis[0]*0.5+0.5), int(naxis[1]*0.5+0.5)]
xpos1 = [float(pos1[0]),float(pos1[1])]
xpos2 = ImageDesc.PCvtPixel (inDesc, xpos1, outDesc, err)
pos2 = [int(xpos2[0]+0.5), int(xpos2[1]+0.5)]
# Is GPU interpolation requested?
if doGPU:
del finterp
finterp = GPUFInterpolate.PCreate("GPUinterp", inImage.FArray,
XPixelImage.FArray, YPixelImage.FArray,
hwidth, err)
OErr.printErrMsg(err, "Creating GPU FInterpolator")
# End init wt image
# Special weighting or editing?
if (factor<0.0) or maxRMS:
# Get image
Image.PGetPlane (inImage, None, doPlane, err)
OErr.printErrMsg(err, "Error reading image "+str(iPlane)+" for "+Image.PGetName(inImage))
RMS = inImage.FArray.RMS
# This plane acceptable?
if maxRMS and ((RMS>maxRMS) or (RMS<=0.0)):
#print 'drop plane',doPlane[0],'RMS',RMS
continue
if (factor<0.0):
fact = abs(factor)/RMS
else:
fact = factor
if not (iPlane%20):
print("Factor",fact, "plane",iPlane,"RMS",RMS)
else:
fact = factor
# Interpolate image plane
ImageUtil.PInterpolateImage(inImage, InterpWtImage, err, \
inPlane=doPlane, XPix=XPixelImage, YPix=YPixelImage,
hwidth=hwidth, finterp=finterp)
OErr.printErrMsg(err, "Error interpolating plane "+str(doPlane))
# Interpolated image times beam
FArray.PMul(InterpWtImage.FArray, InterpWt.FArray, InterpWtImage.FArray)
#
# Read accumulation image planes
Image.PGetPlane(SumWtImage, None, outPlane, err)
OErr.printErrMsg(err, "Error reading accumulation image ")
#
# Accumulate
FArray.PShiftAdd (SumWtImage.FArray, pos2, InterpWtImage.FArray, pos1, fact, SumWtImage.FArray)
Image.PPutPlane(SumWtImage, None, outPlane, err)
OErr.printErrMsg(err, "Error writing accumulation image ")
# Square weight image
Image.PGetPlane(SumWt2, None, outPlane, err)
FArray.PMul(InterpWt.FArray, InterpWt.FArray, InterpWtWt.FArray)
# Blank weight whereever image is blank or zero
FArray.PInClip(InterpWt.FArray, -1.0e-20, 1.0e-20, FArray.PGetBlank())
# Blank weight squared where image * Wt is blanked
FArray.PBlank (InterpWtWt.FArray, InterpWt.FArray, InterpWtWt.FArray);
# Accumulate Wt*Wt
FArray.PShiftAdd (SumWt2.FArray, pos2, InterpWtWt.FArray,pos1, fact, SumWt2.FArray)
#
# Write output
Image.PPutPlane(SumWt2, None, outPlane, err)
OErr.printErrMsg(err, "Error writing accumulation image ")
# Cleanup if doing a weight image per plane (continuum)
if planeWt:
del WtImage, XPixelImage, YPixelImage;
WtImage = None;XPixelImage=None; YPixelImage=None;
# end loop over planes
# close output
Image.PClose(inImage, err)
Image.PClose(SumWtImage, err)
Image.PClose(SumWt2, err)
del XPixelImage, YPixelImage, InterpWtImage, InterpWtWt,
if WtImage:
del WtImage; WtImage = None
if finterp!=None:
del finterp
# end PWeightImageEq
def PAccumIxWt(im, wt, factor, accum, accumwt, err):
"""
Accumulate im * wt into accum
Used to accumulate images which don't need PB corrections
and have a weight image.
* im = image to accumulate
* wt = weight image corresponding to accum
* factor = Additional multiplication factor, normally 1.0
* accum = image into which to accumulate im*wt
* accumwt = image into which to accumulate wt
* err = Python Obit Error/message stack
"""
################################################################
# Checks
if not Image.PIsA(im):
print("Actually ",im.__class__)
raise TypeError("im MUST be a Python Obit Image")
if not Image.PIsA(wt):
print("Actually ",wt.__class__)
raise TypeError("wt MUST be a Python Obit Image")
if not Image.PIsA(accum):
print("Actually ",accum.__class__)
raise TypeError("accum MUST be a Python Obit Image")
#
# Open files
#Image.POpen(im, 1, err)
Image.POpen(accum, Image.READWRITE, err)
Image.POpen(accumwt, Image.READWRITE, err)
# Get output descriptor to see how many planes
outDesc = accum.Desc
outDescDict = outDesc.Dict
outNaxis = outDescDict["inaxes"]
print("Accumulation naxis",outNaxis)
# Get input descriptor to see how many planes
inDesc = im.Desc
inDescDict = inDesc.Dict
ndim = inDescDict["naxis"]
inNaxis = inDescDict["inaxes"]
#print "debug input naxis is ",inNaxis
# Test if compatible
if inNaxis[2] < outNaxis[2]:
print("input has",inNaxis[2],"planes and output",outNaxis[2])
raise RuntimeError("input image has too few planes ")
if (ndim>0) and (inNaxis[2]>0): # list of planes to loop over (0-rel)
planes = list(range(inNaxis[2]))
else:
planes = [0]
#
# Loop over planes
for iPlane in planes:
doPlane = [iPlane+1,1,1,1,1]
# Get image
Image.PGetPlane (im, None, doPlane, err)
#OErr.printErrMsg(err, "Error reading image for "+Image.PGetName(im))
imArray = im.FArray
# Get Weight
Image.PGetPlane (wt, None, doPlane, err)
#OErr.printErrMsg(err, "Error reading image for "+Image.PGetName(wt))
WtArray = wt.FArray
#
# Make image*Wt memory resident image
ImageWt = Image.Image("ImageXwt")
Image.PCloneMem(im, ImageWt, err)
ImageWtArray = ImageWt.FArray
FArray.PMul(imArray, WtArray, ImageWtArray);
#
# Now the interpolated versions to be summed to the accumulation arrays
InterpWtImage = Image.Image("InterpWtImage")
Image.PClone2(im, accum, InterpWtImage, err)
ImageUtil.PInterpolateImage(ImageWt, InterpWtImage, err)
#OErr.printErrMsg(err, "Error interpolating image "+Image.PGetName(im))
InterpWt = Image.Image("InterpWt")
Image.PClone2(im, accum, InterpWt, err)
ImageUtil.PInterpolateImage(wt, InterpWt, err)
#OErr.printErrMsg(err, "Error interpolating wt "+Image.PGetName(im))
#
# Read accumulation image plane
Image.PGetPlane(accum, None, doPlane, err)
Image.PGetPlane(accumwt, None, doPlane, err)
#OErr.printErrMsg(err, "Error reading accumulation image ")
#
# Determine alignment
inDesc = InterpWtImage.Desc
inDescDict = inDesc.Dict
outDesc = accum.Desc
outDescDict = outDesc.Dict
naxis = inDescDict["inaxes"] # find input center pixel in output
pos1 = [int(naxis[0]*0.5+0.5), int(naxis[1]*0.5+0.5)]
xpos1 = [float(pos1[0]),float(pos1[1])]
xpos2 = ImageDesc.PCvtPixel (inDesc, xpos1, outDesc, err)
#OErr.printErrMsg(err, "Error converting pixel locations for "+Image.PGetName(im))
pos2 = [int(xpos2[0]+0.5), int(xpos2[1]+0.5)]
#
# Accumulate
accumArray = accum.FArray
InterpWtArray = InterpWtImage.FArray
FArray.PShiftAdd (accumArray, pos2, InterpWtArray, pos1, factor, accumArray)
accumwtArray = accumwt.FArray
InterpWtWtArray = InterpWt.FArray
# Blank weight whereever image is blank or zero
FArray.PInClip(InterpWtArray, -1.0e-20, 1.0e-20, FArray.PGetBlank())
FArray.PBlank (InterpWtWtArray, InterpWtArray, InterpWtWtArray);
FArray.PShiftAdd (accumwtArray, pos2, InterpWtWtArray,pos1, factor, accumwtArray)
#
# Write output
Image.PPutPlane(accum, None, doPlane, err)
Image.PPutPlane(accumwt, None, doPlane, err)
#OErr.printErrMsg(err, "Error writing accumulation image ")
# Cleanup,
del accumArray, accumwtArray, InterpWtArray, InterpWtWtArray, ImageWtArray, WtArray, imArray
# end loop over planes
# close output
#Image.PClose(im, err)
Image.PClose(accum, err)
Image.PClose(accumwt, err)
# End PAccumIxWt
def PNormalizeImage(SumWtImage, SumWt2, outImage, err, minWt=0.1):
"""
Sum an image onto Weighting accumulators
Normalize SumWtImage by SumWt2 write to outImage
Minimum allowed value in SumWt2 is minWt
* SumWtImage = First output image, must be defined (i.e. files named)
but not fully created.
* SumWt2 = Second output image, like SumWtImage
* outImage = Output image, must be defined.
* err = Python Obit Error/message stack
* minWt = minimum summed weight (lower values blanked).
"""
################################################################
# Checks
if not Image.PIsA(outImage):
print("Actually ",outImage.__class__)
raise TypeError("outImage MUST be a Python Obit Image")
if not Image.PIsA(SumWtImage):
print("Actually ",SumWtImage.__class__)
raise TypeError("SumWtImage MUST be a Python Obit Image")
if not Image.PIsA(SumWt2):
print("Actually ",SumWt2.__class__)
raise TypeError("SumWt2 MUST be a Python Obit Image")
if not OErr.OErrIsA(err):
raise TypeError("err MUST be an OErr")
#
# Open files
Image.POpen(outImage, 2, err)
Image.POpen(SumWtImage, 1, err)
Image.POpen(SumWt2, 1, err)
# Get descriptor to see how many planes
outDesc = Image.PGetDesc(SumWtImage)
outDescDict = ImageDesc.PGetDict(outDesc)
outNaxis = outDescDict["inaxes"]
print("Accumulation naxis",outNaxis)
# Get input descriptor to see how many planes
inDesc = Image.PGetDesc(outImage)
inDescDict = ImageDesc.PGetDict(outDesc)
ndim = inDescDict["naxis"]
inNaxis = inDescDict["inaxes"]
#print "debug input naxis is ",inNaxis
# Test if compatible
if inNaxis[2] < outNaxis[2]:
print("input has",inNaxis[2],"planes and output",outNaxis[2])
raise RuntimeError("input image has too few planes ")
if (ndim>0) and (inNaxis[2]>0): # list of planes to loop over (0-rel)
planes = list(range(inNaxis[2]))
else:
planes = [0]
#
# Loop over planes
for iPlane in planes:
doPlane = [iPlane+1,1,1,1,1]
# Get images
Image.PGetPlane (SumWtImage, None, doPlane, err)
Image.PGetPlane (SumWt2, None, doPlane, err)
OErr.printErrMsg(err, "Error reading images")
# Clip
FArray.PClipBlank (SumWt2.FArray, minWt, 1.0e25)
# Divide
FArray.PDiv (SumWtImage.FArray, SumWt2.FArray, outImage.FArray)
# Write
Image.PPutPlane(outImage, None, doPlane, err)
OErr.printErrMsg(err, "Error Writing normalized image ")
# end loop over planes
# close output
Image.PClose(outImage, err)
Image.PClose(SumWtImage, err)
Image.PClose(SumWt2, err)
# Write history - sorta
inHistory = History.History("history", SumWtImage.List, err)
outHistory = History.History("history", outImage.List, err)
# Copy History
History.PCopy(inHistory, outHistory, err)
outHistory.Open(History.READWRITE, err)
outHistory.TimeStamp(" Start Obit PNormalize",err)
outHistory.Close(err)
# end PNormalizeImage
def PGetOverlap(in1Image, in2Image, err):
"""
Determine the overlap region in in1Image with in2Image
Returns (BLC, TRC) in in1Image of overlap, only BLC pixel if no overlap
* in1Image = first input image
* in2Image = second input image, need not be same grid
but should not be rotated wrt in1Image
* err = Python Obit Error/message stack
"""
################################################################
# Checks
if not Image.PIsA(in1Image):
print("Actually ",inI1mage.__class__)
raise TypeError("in1Image MUST be a Python Obit Image")
if not Image.PIsA(in2Image):
print("Actually ",in21mage.__class__)
raise TypeError("in2Image MUST be a Python Obit Image")
if not OErr.OErrIsA(err):
raise TypeError("err MUST be an OErr")
#
# Is there overlap?
if ImageDesc.POverlap(in1Image.Desc, in2Image.Desc, err):
d1 = in1Image.Desc.Dict
nx1 = d1['inaxes'][0]
ny1 = d1['inaxes'][1]
d2 = in2Image.Desc.Dict
nx2 = d2['inaxes'][0]
ny2 = d2['inaxes'][1]
# DEBUG ALL
#return ([1,1,1,1,1,1,1], [nx1,ny1,0,0,0,0,0])
# Determine corners of in1Image in in2Image 1-rel
corn1 = []
xpos = [float(0), float(0)]
ypos = ImageDesc.PCvtPixel (in1Image.Desc, xpos, in2Image.Desc, err)
corn1.append([int(ypos[0]+0.5), int(ypos[1]+0.5)])
xpos = [float(0), float(ny1)]
ypos = ImageDesc.PCvtPixel (in1Image.Desc, xpos, in2Image.Desc, err)
corn1.append([int(ypos[0]+0.5), int(ypos[1]+0.5)])
xpos = [float(nx1), float(ny1)]
ypos = ImageDesc.PCvtPixel (in1Image.Desc, xpos, in2Image.Desc, err)
corn1.append([int(ypos[0]+0.5), int(ypos[1]+0.5)])
xpos = [float(nx1), float(0)]
ypos = ImageDesc.PCvtPixel (in1Image.Desc, xpos, in2Image.Desc, err)
corn1.append([int(ypos[0]+0.5), int(ypos[1]+0.5)])
# Determine corners of in2Image in in1Image
corn2 = []
xpos = [float(0), float(0)]
ypos = ImageDesc.PCvtPixel (in2Image.Desc, xpos, in1Image.Desc, err)
corn2.append([int(ypos[0]+0.5), int(ypos[1]+0.5)])
xpos = [float(0), float(ny2)]
ypos = ImageDesc.PCvtPixel (in2Image.Desc, xpos, in1Image.Desc, err)
corn2.append([int(ypos[0]+0.5), int(ypos[1]+0.5)])
xpos = [float(nx2), float(ny2)]
ypos = ImageDesc.PCvtPixel (in2Image.Desc, xpos, in1Image.Desc, err)
corn2.append([int(ypos[0]+0.5), int(ypos[1]+0.5)])
xpos = [float(nx2), float(0)]
ypos = ImageDesc.PCvtPixel (in2Image.Desc, xpos, in1Image.Desc, err)
corn2.append([int(ypos[0]+0.5), int(ypos[1]+0.5)])
#print "DEBUG"
#print corn1,nx1,ny1
#print corn2,nx2,ny2
# 1 entirely inside 2?
if ((corn1[0][0]>0) and (corn1[0][1]>0) and (corn1[2][0]<=nx2) and (corn1[2][1]<=ny2)):
print("1 entirely inside 2")
return ([1,1,1,1,1,1,1], [nx1, ny1, 0,0,0,0,0])
# 2 entirely inside 1?
if ((corn2[0][0]>0) and (corn2[0][1]>0) and (corn2[2][0]<=nx1) and (corn2[2][1]<=ny1)):
blc = [corn2[0][0],corn2[0][1], 1,1,1,1,1]
trc = [corn2[2][0],corn2[2][1], 0,0,0,0,0]
print("2 entirely inside 1")
return(blc,trc)
# Corner 0 in in2?
if ((corn1[0][0]>0) and (corn1[0][0]<=nx2) and (corn1[0][1]>0) and (corn1[0][1]<=ny2)):
blc = [1, 1, 1, 1, 1, 1, 1]
trc = [min(corn2[2][0],nx1), min(corn2[2][1],ny1), 0,0,0,0,0]
print("Corner 0 in in2")
return (blc, trc)
# Corner 1 in in2?
if ((corn1[1][0]>0) and (corn1[1][0]<=nx2) and (corn1[1][1]>0) and (corn1[1][1]<=ny2)):
blc = [1, min(corn2[3][1], ny1), 1, 1, 1, 1, 1]
trc = [min (corn2[3][0], nx1), ny1, 0,0,0,0,0]
print("Corner 1 in in2")
return (blc, trc)
# Corner 2 in in2?
if ((corn1[2][0]>0) and (corn1[2][0]<=nx2) and (corn1[2][1]>0) and (corn1[2][1]<=ny2)):
blc = [max(1, corn2[0][0]), max(1, corn2[0][1]), 1, 1, 1, 1, 1]
trc = [nx1, ny1, 0,0,0,0,0]
print("Corner 2 in in2")
return (blc, trc)
# Corner 3 in in2?
if ((corn1[3][0]>0) and (corn1[3][0]<=nx2) and (corn1[3][1]>0) and (corn1[3][1]<=ny2)):
blc = [max(1,corn2[1][0]), 1, 1, 1, 1, 1]
trc = [nx1, min(corn2[1][1],ny1), 0,0,0,0,0]
print("Corner 3 in in2")
return (blc, trc)
# 2 straddle bottom of 1?
if ((corn2[0][1]<0.0) and (corn2[1][1]>0.0) and (corn2[0][0]>0.0) and (corn2[3][0]<=nx1)):
blc = [corn2[0][0], 1,1,1,1,1,1]
trc = [corn2[2][0], corn2[2][1],0,0,0,0,0]
print("2 straddles bottom of 1")
return (blc, trc)
# 2 straddle top of 1?
if ((corn2[0][1]<ny1) and (corn2[1][1]>ny1) and (corn2[0][0]>0.0) and (corn2[3][0]<=nx1)):
blc = [corn2[0][0], corn2[0][1], 1,1,1,1,1,1]
trc = [corn2[2][0], ny1,0,0,0,0,0]
print("2 straddles top of 1")
return (blc, trc)
# 2 straddle right edge of 1?
if ((corn2[0][0]<nx1) and (corn2[3][0]>nx1) and (corn2[0][1]>0.0) and (corn2[1][1]<=ny1)):
blc = [corn2[0][0], corn2[0][1], 1,1,1,1,1,1]
trc = [nx1, corn2[2][1], 0,0,0,0,0]
print("2 straddles right edge of 1")
return (blc, trc)
# 2 straddle left edge of 1?
if ((corn2[0][0]<0) and (corn2[3][0]>0) and (corn2[0][1]>0.0) and (corn2[1][1]<=ny1)):
blc = [1, corn2[0][1], 1,1,1,1,1,1]
trc = [corn2[2][0], corn2[2][1], 0,0,0,0,0]
print("2 straddles left edge of 1")
return (blc, trc)
# Likely no overlap
print("Confused, probably no overlap")
print("corn1", corn1, nx1, ny1)
print("corn2", corn2, nx2, ny2)
return ([1,1,1,1,1,1,1], [1,1,0,0,0,0,0])
else:
# Default is no overlap
print("no overlap")
return ([1,1,1,1,1,1,1], [1,1,0,0,0,0,0])
# end PGetOverlap
def PMaskCube(inImage, Mask, outImage, err):
"""
Blank inImage where Mask is blanked or 0.0
* inImage = input Image cube
* Mask = 1 plane mask Image
* outImage = Output Image, must be defined.
* err = Python Obit Error/message stack
"""
################################################################
# Checks
if not Image.PIsA(outImage):
print("Actually ",outImage.__class__)
raise TypeError("outImage MUST be a Python Obit Image")
if not Image.PIsA(inImage):
print("Actually ",inImage.__class__)
raise TypeError("inImage MUST be a Python Obit Image")
if not Image.PIsA(Mask):
print("Actually ",Mask.__class__)
raise TypeError("Mask MUST be a Python Obit Image")
if not OErr.OErrIsA(err):
raise TypeError("err MUST be an OErr")
# Clone output
inImage.Clone(outImage, err)
# Open files
Image.POpen(outImage,Image.WRITEONLY, err)
Image.POpen(inImage, Image.READONLY, err)
Image.POpen(Mask, Image.READONLY, err)
OErr.printErrMsg(err, "Error opening images")
# how many planes?
ndim = inImage.Desc.Dict["naxis"]
inNaxis = inImage.Desc.Dict["inaxes"]
# list of planes to loop over (0-rel)
if (ndim>2) and (inNaxis[2]>0):
planes = list(range(inNaxis[2]))
else:
planes = [0]
# Read Mask plane
Image.PGetPlane (Mask, None, [1,1,1,1,1], err)
OErr.printErrMsg(err, "Error reading mask image")
# Mask where exactly 0.0
FArray.PInClip(Mask.FArray, -1.0e-25, 1.0e-25, FArray.fblank)
# Loop over planes
for iPlane in planes:
doPlane = [iPlane+1,1,1,1,1]
# Get image plane
Image.PGetPlane (inImage, None, doPlane, err)
OErr.printErrMsg(err, "Error reading input image")
# Make sure compatable
if not FArray.PIsCompatable(inImage.FArray, Mask.FArray):
raise RuntimeError("inImage and Mask incompatable")
# Mask where blanked
FArray.PBlank (inImage.FArray, Mask.FArray, outImage.FArray)
# Write
Image.PPutPlane(outImage, None, doPlane, err)
OErr.printErrMsg(err, "Error Writing blanked image ")
# end loop over planes
# close files
Image.PClose(outImage, err)
Image.PClose(inImage, err)
Image.PClose(Mask, err)
# Write history
inHistory = History.History("history", inImage.List, err)
outHistory = History.History("history", outImage.List, err)
# Copy History
History.PCopy(inHistory, outHistory, err)
outHistory.Open(History.READWRITE, err)
outHistory.TimeStamp(" Start Obit PMaskCube",err)
outHistory.Close(err)
# end PMaskCube
def PMaskCube2(inImage, Mask, outImage, err):
"""
Blank inImage where Mask is blanked or 0.0
* inImage = input Image cube
* Mask = mask Image cube
* outImage = Output Image, must be defined.
* err = Python Obit Error/message stack
"""
################################################################
# Checks
if not Image.PIsA(outImage):
print("Actually ",outImage.__class__)
raise TypeError("outImage MUST be a Python Obit Image")
if not Image.PIsA(inImage):
print("Actually ",inImage.__class__)
raise TypeError("inImage MUST be a Python Obit Image")
if not Image.PIsA(Mask):
print("Actually ",Mask.__class__)
raise TypeError("Mask MUST be a Python Obit Image")
if not OErr.OErrIsA(err):
raise TypeError("err MUST be an OErr")
# Clone output
inImage.Clone(outImage, err)
# Open files
Image.POpen(outImage,Image.WRITEONLY, err)
Image.POpen(inImage, Image.READONLY, err)
Image.POpen(Mask, Image.READONLY, err)
OErr.printErrMsg(err, "Error opening images")
# how many planes?
ndim = inImage.Desc.Dict["naxis"]
inNaxis = inImage.Desc.Dict["inaxes"]
# list of planes to loop over (0-rel)
if (ndim>2) and (inNaxis[2]>0):
planes = list(range(inNaxis[2]))
else:
planes = [0]
# Loop over planes
for iPlane in planes:
doPlane = [iPlane+1,1,1,1,1]
# Read Mask plane
Image.PGetPlane (Mask, None, doPlane, err)
OErr.printErrMsg(err, "Error reading mask image")
# Mask where exactly 0.0
FArray.PInClip(Mask.FArray, -1.0e-25, 1.0e-25, FArray.fblank)
# Get image plane
Image.PGetPlane (inImage, None, doPlane, err)
OErr.printErrMsg(err, "Error reading input image")
# Make sure compatable
if not FArray.PIsCompatable(inImage.FArray, Mask.FArray):
raise RuntimeError("inImage and Mask incompatable")
# Mask where blanked
FArray.PBlank (inImage.FArray, Mask.FArray, outImage.FArray)
# Write
Image.PPutPlane(outImage, None, doPlane, err)
OErr.printErrMsg(err, "Error Writing blanked image ")
# end loop over planes
# close files
Image.PClose(outImage, err)
Image.PClose(inImage, err)
Image.PClose(Mask, err)
# Write history
inHistory = History.History("history", inImage.List, err)
outHistory = History.History("history", outImage.List, err)
# Copy History
History.PCopy(inHistory, outHistory, err)
outHistory.Open(History.READWRITE, err)
outHistory.TimeStamp(" Start Obit PMaskCube",err)
outHistory.Close(err)
# end PMaskCube2
| 43.80158
| 103
| 0.59088
| 6,030
| 49,890
| 4.871476
| 0.094362
| 0.011234
| 0.012562
| 0.011983
| 0.779677
| 0.762928
| 0.730315
| 0.717277
| 0.6992
| 0.680851
| 0
| 0.032554
| 0.285769
| 49,890
| 1,138
| 104
| 43.84007
| 0.791822
| 0.249629
| 0
| 0.715505
| 0
| 0
| 0.092155
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.012802
| false
| 0.002845
| 0.008535
| 0
| 0.036984
| 0.122333
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
9f99cd89621edcf6d223c5b18f9a1efa44fee8bd
| 108
|
py
|
Python
|
ingredients_characterization/__init__.py
|
openfoodfacts/off-product-environmental-impact
|
a78958fec1a21f057339184be27cf299d4fe12d1
|
[
"MIT"
] | 3
|
2021-09-07T13:46:25.000Z
|
2022-01-12T14:38:29.000Z
|
ingredients_characterization/__init__.py
|
openfoodfacts/off-product-environmental-impact
|
a78958fec1a21f057339184be27cf299d4fe12d1
|
[
"MIT"
] | 18
|
2021-09-13T16:19:26.000Z
|
2022-03-24T16:22:38.000Z
|
ingredients_characterization/__init__.py
|
openfoodfacts/off-product-environmental-impact
|
a78958fec1a21f057339184be27cf299d4fe12d1
|
[
"MIT"
] | null | null | null |
""" Scripts used to build the background data used by the impact estimation program from external data. """
| 54
| 107
| 0.768519
| 16
| 108
| 5.1875
| 0.8125
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 108
| 1
| 108
| 108
| 0.922222
| 0.916667
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
4c992e3e2ec0c7e45bbb88386a31869ba359a574
| 14,203
|
py
|
Python
|
bb2cogs/backup/music.py
|
Team-EG/j-bot
|
2e160707d13cc4988f370713fc9f57c7cff3f5bb
|
[
"MIT"
] | 2
|
2020-07-07T01:15:15.000Z
|
2021-08-15T19:49:32.000Z
|
bb2cogs/backup/music.py
|
Team-EG/j-bot
|
2e160707d13cc4988f370713fc9f57c7cff3f5bb
|
[
"MIT"
] | null | null | null |
bb2cogs/backup/music.py
|
Team-EG/j-bot
|
2e160707d13cc4988f370713fc9f57c7cff3f5bb
|
[
"MIT"
] | 1
|
2020-04-08T04:23:10.000Z
|
2020-04-08T04:23:10.000Z
|
import json
import discord
import os
import youtube_dl
import shutil
import time
from threading import Thread
from discord.ext import commands
from discord.utils import get
from discord import FFmpegPCMAudio
class Music(commands.Cog):
def __init__(self, client):
self.client = client
print(f'{__name__} 로드 완료!')
@commands.command(pass_context=True)
async def 재생(self, ctx, *, url: str):
global title
guild_id = ctx.message.guild.id
voice = get(self.client.voice_clients, guild=ctx.guild)
if 'list=' in url:
await ctx.send('이 링크는 재생목록이네요... 재생이 취소되었습니다.')
return
def check_queues():
if voice and voice.is_connected():
time.sleep(1)
song_exist = os.path.isfile(f"music/{guild_id}/song.mp3")
try:
path = f"./music/{guild_id}/Queue"
file_list = os.listdir(path)
file_list_mp3 = [file for file in file_list if file.endswith(".mp3")]
if file_list_mp3 is None or False and song_exist is None or False:
with open(f"music/{guild_id}/queue.json", 'r') as f:
queue_data = json.load(f)
del queue_data['playing']
with open(f"music/{guild_id}/queue.json", 'w') as f:
json.dump(queue_data, f, indent=4)
return
elif file_list_mp3 is None or False:
try:
os.remove(f"music/{guild_id}/song.mp3")
with open(f"music/{guild_id}/queue.json", 'r') as f:
queue_data = json.load(f)
del queue_data['playing']
with open(f"music/{guild_id}/queue.json", 'w') as f:
json.dump(queue_data, f, indent=4)
return
except:
pass
else:
try:
result = min(file_list_mp3)
queue_exists = os.path.isfile(f"music/{guild_id}/Queue/{result}")
if queue_exists:
try:
os.remove(f"music/{guild_id}/song.mp3")
os.rename(f"music/{guild_id}/Queue/{result}", f"music/{guild_id}/song.mp3")
voice.play(discord.FFmpegPCMAudio(f"music/{guild_id}/song.mp3"))
voice.source = discord.PCMVolumeTransformer(voice.source)
voice.source.volume = 1
with open(f"music/{guild_id}/queue.json", 'r') as f:
queue_data = json.load(f)
queue_data['playing'] = queue_data[str(result[:-4])]
del queue_data[str(result[:-4])]
with open(f"music/{guild_id}/queue.json", 'w') as f:
json.dump(queue_data, f, indent=4)
check_queues()
except:
pass
except:
pass
except:
pass
check_queues()
else:
with open(f"music/{guild_id}/queue.json", 'r') as f:
queue_data = json.load(f)
del queue_data['playing']
with open(f"music/{guild_id}/queue.json", 'w') as f:
json.dump(queue_data, f, indent=4)
return
song_exist = os.path.isfile(f"music/{guild_id}/song.mp3")
try:
if song_exist:
os.remove(f"music/{guild_id}/song.mp3")
except Exception:
await ctx.send('지금 음악을 재생하는 중이에요. "대기" 명령어를 대신 사용해주세요.')
return
try:
os.mkdir(f"./music/{guild_id}/")
except Exception:
pass
try:
os.mkdir(f"./music/{guild_id}/Queue/")
except Exception:
pass
await ctx.send('잠시만 기다려주세요, 준비할께요. (해당 기능은 베타 기능입니다. 봇이 의도대로 작동하지 않을수도 있습니다.)')
await self.client.change_presence(status=discord.Status.dnd,
activity=discord.Game('저 지금 바빠요! (뮤직 다운로드중)'))
song_exist = os.path.isfile(f"song.mp3")
try:
if song_exist:
os.remove(f"song.mp3")
except Exception:
pass
ydl_opts = {
'format': 'bestaudio/best',
'quiet': True,
'postprocessors': [{
'key': 'FFmpegExtractAudio',
'preferredcodec': 'mp3',
'preferredquality': '320',
}],
}
if url.startswith("https://") or url.startswith("youtube.com") or url.startswith("youtu.be"):
try:
with youtube_dl.YoutubeDL(ydl_opts) as ydl:
ydl.download([url])
title = ydl.extract_info(url, download=False).get('title', None)
except Exception as ex:
await ctx.send(f"음악 다운로드중 오류가 발생했습니다. - {ex}")
else:
song_search = " ".join(url)
with youtube_dl.YoutubeDL(ydl_opts) as ydl:
ydl.download([f"ytsearch1:{song_search}"])
title = url
for file in os.listdir(f"./"):
if file.endswith(".mp3"):
name = file
os.rename(file, f"song.mp3")
shutil.move("song.mp3", f"music/{guild_id}")
voice.play(discord.FFmpegPCMAudio(f"music/{guild_id}/song.mp3"))
voice.source = discord.PCMVolumeTransformer(voice.source)
voice.source.volume = 1
await ctx.send(f'"{title}"을(를) 재생할께요!')
with open('botsetup.json', 'r') as f:
data = json.load(f)
prefix = data['default prefix']
await self.client.change_presence(status=discord.Status.online,
activity=discord.Game(f'"{prefix}도움"이라고 말해보세요!'))
background_thread = Thread(target=check_queues)
background_thread.start()
shutil.copy('music/queue.json', f"music/{guild_id}/queue.json")
with open(f"music/{guild_id}/queue.json", 'r') as f:
queue_data = json.load(f)
# currenttime = time.strftime("%Y%m%d%H%M%S")
queue_data['playing'] = title
with open(f"music/{guild_id}/queue.json", 'w') as f:
json.dump(queue_data, f, indent=4)
@commands.command(pass_context=True)
async def 들어와(self, ctx):
channel = ctx.message.author.voice.channel
voice = get(self.client.voice_clients, guild=ctx.guild)
if voice and voice.is_connected():
await voice.move_to(channel)
else:
voice = await channel.connect()
await voice.disconnect()
if voice and voice.is_connected():
await voice.move_to(channel)
else:
voice = await channel.connect()
await ctx.send("뮤직 채널에 들어왔어요! 어떤 음악을 재생할까요?")
@commands.command(pass_context=True)
async def 나가(self, ctx):
channel = ctx.message.author.voice.channel
guild_id = ctx.message.guild.id
voice = get(self.client.voice_clients, guild=ctx.guild)
queue_infile = os.path.isdir(f"./music/{guild_id}")
if queue_infile is True:
shutil.rmtree(f"./music/{guild_id}")
if voice and voice.is_connected():
await voice.disconnect()
await ctx.send("네, 지금 나갈께요.")
else:
await ctx.send("저 아직 뮤직 채널에 들어오지도 않았어요...")
@commands.command(pass_context=True)
async def 일시정지(self, ctx):
voice = get(self.client.voice_clients, guild=ctx.guild)
if voice and voice.is_playing():
voice.pause()
await ctx.send("음악을 잠깐 멈췄어요.")
else:
await ctx.send("지금 아무 음악도 재생하고 있지 않아요.")
@commands.command(pass_context=True)
async def 계속재생(self, ctx):
voice = get(self.client.voice_clients, guild=ctx.guild)
if voice and voice.is_paused():
voice.resume()
await ctx.send("음악을 계속 재생할께요.")
else:
await ctx.send("지금 아무 음악도 재생하고 있지 않거나 이미 재생중이에요.")
@commands.command(pass_context=True)
async def 멈춰(self, ctx):
guild_id = ctx.message.guild.id
voice = get(self.client.voice_clients, guild=ctx.guild)
queue_infile = os.path.isdir(f"./music/{guild_id}/Queue")
if queue_infile is True:
shutil.rmtree(f"./music/{guild_id}/Queue")
if voice and voice.is_playing():
voice.stop()
await ctx.send("음악을 그만 재생할께요. 모든 대기 리스트가 삭제되었습니다.")
else:
await ctx.send("지금 아무 음악도 재생하고 있지 않아요.")
@commands.command(pass_context=True)
async def 스킵(self, ctx):
voice = get(self.client.voice_clients, guild=ctx.guild)
if voice and voice.is_playing():
voice.stop()
await ctx.send("이 음악이 마음에 안드세요? 그러면 스킵할께요.")
else:
await ctx.send("지금 아무 음악도 재생하고 있지 않아요.")
@commands.command(pass_context=True)
async def 다음곡(self, ctx):
guild_id = ctx.message.guild.id
voice = get(self.client.voice_clients, guild=ctx.guild)
queue_infile = os.path.isdir(f"./music/{guild_id}/Queue")
if queue_infile is False:
await ctx.send("대기 목록이 비어있습니다.")
return
else:
path = f"./music/{guild_id}/Queue"
file_list = os.listdir(path)
file_list_mp3 = [file for file in file_list if file.endswith(".mp3")]
result = min(file_list_mp3)
song_exist = os.path.isfile(f"music/{guild_id}/song.mp3")
try:
if song_exist:
os.remove(f"music/{guild_id}/song.mp3")
os.rename(f"music/{guild_id}/Queue/{result}", f"music/{guild_id}/song.mp3")
voice.play(discord.FFmpegPCMAudio(f"music/{guild_id}/song.mp3"))
voice.source = discord.PCMVolumeTransformer(voice.source)
voice.source.volume = 1
await ctx.send("다음곡을 재생할께요!")
except Exception as ex:
await ctx.send(f'스킵 명령어를 대신 사용해주세요. 오류 - {ex}')
return
@commands.command(pass_context=True)
async def 대기(self, ctx, *, url: str):
global title
if 'list=' in url:
await ctx.send('이 링크는 재생목록이네요... 대기 리스트 추가가 취소되었습니다.')
return
await ctx.send('잠시만 기다려주세요...')
await self.client.change_presence(status=discord.Status.dnd,
activity=discord.Game('저 지금 바빠요! (뮤직 다운로드중)'))
guild_id = ctx.message.guild.id
ydl_opts = {
'format': 'bestaudio/best',
'quiet': True,
'postprocessors': [{
'key': 'FFmpegExtractAudio',
'preferredcodec': 'mp3',
'preferredquality': '320',
}],
}
if url.startswith("https://") or url.startswith("youtube.com") or url.startswith("youtu.be"):
try:
with youtube_dl.YoutubeDL(ydl_opts) as ydl:
ydl.download([url])
title = ydl.extract_info(url, download=False).get('title', None)
except Exception as ex:
await ctx.send(f"음악 다운로드중 오류가 발생했습니다. - {ex}")
else:
song_search = " ".join(url)
with youtube_dl.YoutubeDL(ydl_opts) as ydl:
ydl.download([f"ytsearch1:{song_search}"])
title = url
for file in os.listdir(f"./"):
if file.endswith(".mp3"):
name = file
currenttime = time.strftime("%Y%m%d%H%M%S")
os.rename(file, f"{currenttime}.mp3")
shutil.move(f"{currenttime}.mp3", f"music/{guild_id}/Queue")
await ctx.send(f"{title}을(를) 대기 리스트에 넣었어요!")
with open('botsetup.json', 'r') as f:
data = json.load(f)
prefix = data['default prefix']
await self.client.change_presence(status=discord.Status.online,
activity=discord.Game(f'"{prefix}도움"이라고 말해보세요!'))
with open(f"music/{guild_id}/queue.json", 'r') as f:
queue_data = json.load(f)
queue_data[str(currenttime)] = title
with open(f"music/{guild_id}/queue.json", 'w') as f:
json.dump(queue_data, f, indent=4)
@commands.command()
async def 대기리스트(self, ctx):
guild_id = ctx.message.guild.id
with open(f"music/{guild_id}/queue.json", 'r') as f:
queue_data = json.load(f)
playing = queue_data['playing']
qdata = queue_data.keys()
try:
embed = discord.Embed(title='대기 리스트', description=f'{ctx.guild.name}', colour=discord.Color.red())
embed.add_field(name='재생중', value=f'{playing}', inline=False)
for key in qdata:
if key == 'playing':
pass
else:
embed.add_field(name='대기중', value=f'{queue_data[key]}', inline=False)
await ctx.send(embed=embed)
except KeyError:
await ctx.send('대기중인 음악이 없습니다.')
def setup(client):
client.add_cog(Music(client))
| 38.806011
| 112
| 0.497993
| 1,630
| 14,203
| 4.23681
| 0.157055
| 0.052708
| 0.063713
| 0.075297
| 0.78497
| 0.761512
| 0.738633
| 0.686504
| 0.660006
| 0.638141
| 0
| 0.005726
| 0.3852
| 14,203
| 365
| 113
| 38.912329
| 0.785158
| 0.003028
| 0
| 0.716172
| 0
| 0
| 0.166606
| 0.071341
| 0
| 0
| 0
| 0
| 0
| 1
| 0.009901
| false
| 0.056106
| 0.033003
| 0
| 0.072607
| 0.0033
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
4c9ddf2de5e2404853123535bfde66095e7a1d1b
| 387
|
py
|
Python
|
hw0/install_check.py
|
amypitts01/data440
|
4900a0a78625614d245b2fd064c6ca74e13049a3
|
[
"MIT"
] | 1
|
2020-03-02T12:45:00.000Z
|
2020-03-02T12:45:00.000Z
|
hw0/install_check.py
|
amypitts01/data440
|
4900a0a78625614d245b2fd064c6ca74e13049a3
|
[
"MIT"
] | null | null | null |
hw0/install_check.py
|
amypitts01/data440
|
4900a0a78625614d245b2fd064c6ca74e13049a3
|
[
"MIT"
] | null | null | null |
import sys
import numpy
import scipy
import sklearn
import matplotlib
import pandas
print (sys.version) #prints python version
print (numpy.__version__) #prints numpy’s version
print (scipy.__version__) #print scipy’s version
print (sklearn.__version__) #print sklearn's version
print (matplotlib.__version__) #print matplotlib version
print (pandas.__version__) #prints pandas’ version
| 29.769231
| 56
| 0.819121
| 51
| 387
| 5.823529
| 0.235294
| 0.323232
| 0.131313
| 0.195286
| 0.188552
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.108527
| 387
| 13
| 57
| 29.769231
| 0.86087
| 0.343669
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0.5
| 0
| 0
| 0
| null | 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 5
|
4cb82a23a7e0c2b20aa9bdae370feb2b0cf356fb
| 201
|
py
|
Python
|
teste.py
|
EduardoFockinkSilva/python-exercises
|
e3fb6ee190f639c9e4e8b2521a8d6605c4f42cbb
|
[
"MIT"
] | null | null | null |
teste.py
|
EduardoFockinkSilva/python-exercises
|
e3fb6ee190f639c9e4e8b2521a8d6605c4f42cbb
|
[
"MIT"
] | null | null | null |
teste.py
|
EduardoFockinkSilva/python-exercises
|
e3fb6ee190f639c9e4e8b2521a8d6605c4f42cbb
|
[
"MIT"
] | null | null | null |
#test
print('==================================')
print(' this is just a test ')
print('==================================')
num = int(input('insert a number: '))
print((num * 5) % 3)
| 18.272727
| 43
| 0.338308
| 19
| 201
| 3.578947
| 0.684211
| 0.264706
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.01227
| 0.189055
| 201
| 10
| 44
| 20.1
| 0.404908
| 0.019901
| 0
| 0.4
| 0
| 0
| 0.610256
| 0.348718
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.8
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
4cf8269c10fc7edd7f2eb16be29f68fc9fe8f07d
| 264
|
py
|
Python
|
bspider/master/dao/__init__.py
|
littlebai3618/bspider
|
ff4d003cd0825247db4efe62db95f9245c0a303c
|
[
"BSD-3-Clause"
] | 3
|
2020-06-19T03:52:29.000Z
|
2021-05-21T05:50:46.000Z
|
bspider/master/dao/__init__.py
|
littlebai3618/bspider
|
ff4d003cd0825247db4efe62db95f9245c0a303c
|
[
"BSD-3-Clause"
] | 2
|
2021-03-31T19:39:03.000Z
|
2021-05-12T02:10:26.000Z
|
bspider/master/dao/__init__.py
|
littlebai3618/bspider
|
ff4d003cd0825247db4efe62db95f9245c0a303c
|
[
"BSD-3-Clause"
] | null | null | null |
from .chart_dao import ChartDao
from .code_dao import CodeDao
from .cron_dao import CronDao
from .data_source_dao import DataSourceDao
from .node_dao import NodeDao
from .project_dao import ProjectDao
from .tools_dao import ToolsDao
from .user_dao import UserDao
| 26.4
| 42
| 0.844697
| 41
| 264
| 5.219512
| 0.487805
| 0.336449
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 264
| 9
| 43
| 29.333333
| 0.926407
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
4cfd46e86fa4211c6094e32b6dbf236a449a9bde
| 49
|
py
|
Python
|
radforest/geometry/cylinder.py
|
njs2168/radforest
|
103a03c7ece0981893216c17619976c76100625d
|
[
"MIT"
] | null | null | null |
radforest/geometry/cylinder.py
|
njs2168/radforest
|
103a03c7ece0981893216c17619976c76100625d
|
[
"MIT"
] | null | null | null |
radforest/geometry/cylinder.py
|
njs2168/radforest
|
103a03c7ece0981893216c17619976c76100625d
|
[
"MIT"
] | null | null | null |
"""Object representing a cylinder in Radiance."""
| 49
| 49
| 0.755102
| 6
| 49
| 6.166667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.102041
| 49
| 1
| 49
| 49
| 0.840909
| 0.877551
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
98055f670634b6ebe600df92f9a9a6ffbbe18413
| 36
|
py
|
Python
|
ifaces/management/__init__.py
|
RossBrunton/RThing
|
9c9b352b93eab4ff1f841cbca9ea3c1a0d6f6929
|
[
"MIT"
] | null | null | null |
ifaces/management/__init__.py
|
RossBrunton/RThing
|
9c9b352b93eab4ff1f841cbca9ea3c1a0d6f6929
|
[
"MIT"
] | null | null | null |
ifaces/management/__init__.py
|
RossBrunton/RThing
|
9c9b352b93eab4ff1f841cbca9ea3c1a0d6f6929
|
[
"MIT"
] | null | null | null |
"""Management modules for ifaces"""
| 18
| 35
| 0.722222
| 4
| 36
| 6.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 36
| 1
| 36
| 36
| 0.8125
| 0.805556
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
980cb88101232ce7453bd744e8e51fd6169fb5d9
| 44,580
|
py
|
Python
|
web/transiq/restapi/views/owner.py
|
manibhushan05/transiq
|
763fafb271ce07d13ac8ce575f2fee653cf39343
|
[
"Apache-2.0"
] | null | null | null |
web/transiq/restapi/views/owner.py
|
manibhushan05/transiq
|
763fafb271ce07d13ac8ce575f2fee653cf39343
|
[
"Apache-2.0"
] | 14
|
2020-06-05T23:06:45.000Z
|
2022-03-12T00:00:18.000Z
|
web/transiq/restapi/views/owner.py
|
manibhushan05/transiq
|
763fafb271ce07d13ac8ce575f2fee653cf39343
|
[
"Apache-2.0"
] | null | null | null |
from django.db.models import Q
from django.db.models.expressions import RawSQL
from django.utils.decorators import method_decorator
from django.views.decorators.cache import cache_page
from django_filters.rest_framework import DjangoFilterBackend
from rest_framework import viewsets, status, generics, filters
from rest_framework.pagination import PageNumberPagination
from rest_framework.renderers import TemplateHTMLRenderer, JSONRenderer
from rest_framework.response import Response
from rest_framework.views import APIView
from api import s3util
from api.models import S3Upload
from api.utils import int_or_none
from authentication.models import Profile
from broker.models import Broker
from driver.models import Driver
from fileupload.models import VehicleFile, OwnerFile, DriverFile
from fileupload.views import get_new_serial
from fms.views import validate_url
from owner.models import Vehicle, Owner, Route, FuelCard, FuelCardTransaction, VehicleSummary
from owner.vehicle_util import compare_format
from restapi.filter.owner import OwnerFilter, OwnerVehicleFilter, FuelCardFilter
from restapi.helper_api import generate_username, error_response, success_response, is_fms_user, is_sme_user
from restapi.pagination import CustomeDatatablesPaginator
from restapi.parser.updatawebdata import parse_update_owner_data
from restapi.search import CustomSearch
from restapi.serializers.authentication import UserSerializer, ProfileSerializer
from restapi.serializers.driver import DriverSerializer
from restapi.serializers.owner import VehicleSerializer, OwnerSerializer, RouteSerializer, FuelCardSerializer, \
FuelCardTransactionSerializer, Select2FuelCardSerializer, FMSVehicleSerializer, VehicleSummarySerializer
from restapi.service.fms import parse_vehicle_docs
from restapi.utils import get_or_none
from sme.models import Sme
class RouteViewSet(viewsets.ViewSet):
"""
API for creation, updation, retrieval of Route
"""
def create(self, request, pk=None):
request.data["created_by"] = self.request.user.username
request.data["changed_by"] = self.request.user.username
route_serializer = RouteSerializer(data=request.data)
if route_serializer.is_valid():
route_serializer.save()
response = {
"status_code": status.HTTP_201_CREATED,
"status": "Success",
"msg": "Route Created",
"data": route_serializer.data
}
return Response(data=response, status=status.HTTP_201_CREATED)
response = {
"status_code": status.HTTP_400_BAD_REQUEST,
"status": "Error",
"msg": "Route not Created",
"data": route_serializer.errors
}
return Response(data=response, status=status.HTTP_400_BAD_REQUEST)
def update(self, request, pk=None):
request.data["changed_by"] = self.request.user.username
route = get_or_none(Route, id=pk)
if not isinstance(route, Route):
return Response({"error": "Route does not exist"}, status=status.HTTP_404_NOT_FOUND)
route_serializer = RouteSerializer(instance=route, data=request.data)
if route_serializer.is_valid():
route_serializer.save()
response = {
"status_code": status.HTTP_202_ACCEPTED,
"status": "Success",
"msg": "Route Updated",
"data": route_serializer.data
}
return Response(data=response, status=status.HTTP_202_ACCEPTED)
response = {
"status_code": status.HTTP_400_BAD_REQUEST,
"status": "Error",
"msg": "Route not Updated",
"data": route_serializer.errors
}
return Response(data=response, status=status.HTTP_400_BAD_REQUEST)
def partial_update(self, request, pk=None):
request.data["changed_by"] = self.request.user.username
route = get_or_none(Route, id=pk)
if not isinstance(route, Route):
return Response({"error": "Route does not exist"}, status=status.HTTP_404_NOT_FOUND)
route_serializer = RouteSerializer(
instance=route,
data=request.data,
partial=True
)
if route_serializer.is_valid():
route_serializer.save()
response = {
"status_code": status.HTTP_202_ACCEPTED,
"status": "Success",
"msg": "Route Updated",
"data": route_serializer.data
}
return Response(data=response, status=status.HTTP_202_ACCEPTED)
response = {
"status_code": status.HTTP_400_BAD_REQUEST,
"status": "Error",
"msg": "Route not Updated",
"data": route_serializer.errors
}
return Response(data=response, status=status.HTTP_400_BAD_REQUEST)
def retrieve(self, request, pk=None):
route = get_or_none(Route, id=pk)
if not isinstance(route, Route):
return Response({"error": "Route does not exist"}, status=status.HTTP_404_NOT_FOUND)
route_serializer = RouteSerializer(route)
return Response(route_serializer.data, status=status.HTTP_200_OK)
class OwnerListView(generics.ListAPIView):
queryset = Owner.objects.exclude(name__profile=None).exclude(deleted=True).order_by('-id')
serializer_class = OwnerSerializer
pagination_class = CustomeDatatablesPaginator
filter_backends = (CustomSearch, filters.OrderingFilter, DjangoFilterBackend)
filter_class = OwnerFilter
search_fields = (
'id', 'name__username', 'name__profile__phone', 'name__profile__name', 'owner_address', 'city__name',
'vehicle_owner__vehicle_number', 'route__destination__name', 'pan', 'created_by__username', 'created_on',
'owner_files__serial', 'owner_files__verified'
)
def list(self, request, *args, **kwargs):
queryset = self.filter_queryset(self.get_queryset())
data = {"status": "Successfull", "status_code": status.HTTP_200_OK, "msg": "Broker List"}
page = self.paginate_queryset(queryset)
if page is not None:
serializer = self.get_serializer(page, many=True)
data["data"] = serializer.data
return self.get_paginated_response(data)
serializer = self.get_serializer(queryset, many=True)
data["data"] = serializer.data
return Response(data)
class OwnerCreatePageView(APIView):
renderer_classes = (TemplateHTMLRenderer, JSONRenderer)
template_name = 'team/registrations/register_owner.html'
def get(self, request):
return Response(status=status.HTTP_200_OK)
class OwnerViewSet(viewsets.ViewSet):
"""
API for creation, updation, retrieval of Owner Vehicle
"""
renderer_classes = (JSONRenderer, TemplateHTMLRenderer)
def create(self, request):
if "created_by" not in request.data:
request.data["created_by"] = self.request.user.username
if "changed_by" not in request.data:
request.data["changed_by"] = self.request.user.username
try:
profile_data = {
"name": request.data["name"],
"phone": request.data["phone"],
"email": request.data.get("email",None),
"contact_person_name": request.data.get("contact_person_name", None),
"contact_person_phone": request.data.get("contact_person_phone", None),
"comment": request.data.get("remarks", None)
}
except KeyError:
return Response({"error": "Key Error"}, status=status.HTTP_400_BAD_REQUEST)
username = generate_username(profile_data["name"], profile_data["phone"])
password = "X@RWERT21RW!@#"
user_serializer = UserSerializer(data={"username": username, "password": password})
if user_serializer.is_valid():
user = user_serializer.save()
profile_data["user"] = user.id
profile_serializer = ProfileSerializer(data=profile_data)
if profile_serializer.is_valid():
profile_serializer.save()
request.data["name"] = user.id
request.data["owner_phone"] = profile_data["phone"]
owner_serializer = OwnerSerializer(data=request.data)
if owner_serializer.is_valid():
owner = owner_serializer.save()
Vehicle.objects.filter(id__in=request.data.get("vehicles_detail", [])).update(owner=owner)
response = {
"status_code": status.HTTP_201_CREATED,
"status": "Success",
"msg": "Owner Created",
"data": owner_serializer.data
}
return Response(data=response, status=status.HTTP_201_CREATED)
user.delete()
response = {
"status_code": status.HTTP_400_BAD_REQUEST,
"status": "Error",
"msg": "Owner not Created",
"data": owner_serializer.errors
}
return Response(data=response, status=status.HTTP_400_BAD_REQUEST)
user.delete()
return Response(profile_serializer.errors, status=status.HTTP_400_BAD_REQUEST)
return Response(user_serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def update(self, request, pk=None):
request.data["changed_by"] = self.request.user.username
owner = get_or_none(Owner, id=pk)
if not isinstance(owner, Owner):
return Response({"error": "Owner does not exist"}, status=status.HTTP_404_NOT_FOUND)
owner_serializer = OwnerSerializer(instance=owner, data=request.data)
if owner_serializer.is_valid():
owner_serializer.save()
response = {
"status_code": status.HTTP_202_ACCEPTED,
"status": "Success",
"msg": "Owner Updated",
"data": owner_serializer.data
}
return Response(data=response, status=status.HTTP_202_ACCEPTED)
response = {
"status_code": status.HTTP_400_BAD_REQUEST,
"status": "Error",
"msg": "Owner not Updated",
"data": owner_serializer.errors
}
return Response(data=response, status=status.HTTP_400_BAD_REQUEST)
def partial_update(self, request, pk=None):
owner = get_or_none(Owner, id=pk)
if not isinstance(owner, Owner):
return error_response(status=status.HTTP_400_BAD_REQUEST, msg='Owner Doesnot Exists', data={})
profile = get_or_none(Profile, user=owner.name)
parsed_data = parse_update_owner_data(request.data, id=pk)
parsed_data["changed_by"] = self.request.user.username
if isinstance(profile, Profile) and parsed_data['profile_data']:
profile_serializer = ProfileSerializer(instance=profile, data=parsed_data['profile_data'], partial=True)
if profile_serializer.is_valid():
profile_serializer.save()
else:
return error_response(
status=status.HTTP_400_BAD_REQUEST, msg='Invalid owner data', data=profile_serializer.errors)
else:
return error_response(status=status.HTTP_400_BAD_REQUEST, msg='Profile Doesnot Exists', data={})
vehicles_id = parsed_data.pop('vehicles_detail')
Vehicle.objects.filter(id__in=vehicles_id).update(owner=owner)
Vehicle.objects.exclude(id__in=vehicles_id).filter(owner=owner).update(owner=None)
owner_serializer = OwnerSerializer(
instance=owner,
data=parsed_data,
partial=True
)
if owner_serializer.is_valid():
owner_serializer.save()
return success_response(status=status.HTTP_202_ACCEPTED, msg='Owner updated', data=owner_serializer.data)
return error_response(status=status.HTTP_400_BAD_REQUEST, msg='Something went wrong',
data=owner_serializer.errors)
def retrieve(self, request, pk=None):
owner = get_or_none(Owner, id=pk)
if not isinstance(owner, Owner):
return Response({"error": "Owner does not exist"}, status=status.HTTP_404_NOT_FOUND)
owner_serializer = OwnerSerializer(owner)
return Response(owner_serializer.data, template_name='team/registrations/owner-update.html',
status=status.HTTP_200_OK)
class OwnerVehicleListView(generics.ListAPIView):
queryset = Vehicle.objects.exclude(deleted=True).order_by('-id')
serializer_class = FMSVehicleSerializer
pagination_class = CustomeDatatablesPaginator
filter_backends = (CustomSearch, filters.OrderingFilter, DjangoFilterBackend)
filter_class = OwnerVehicleFilter
search_fields = (
'id', 'owner__name__username', 'driver__name', 'rc_number', 'permit', 'permit_type',
'vehicle_type__vehicle_type', 'vehicle_capacity', 'body_type', 'vehicle_number', 'vehicle_model', 'created_on')
def list(self, request, *args, **kwargs):
queryset = self.filter_queryset(self.get_queryset())
data = {"status": "Success", "status_code": status.HTTP_200_OK, "msg": "Vehicles List"}
page = self.paginate_queryset(queryset)
if page is not None:
serializer = self.get_serializer(page, many=True)
data["data"] = serializer.data
return self.get_paginated_response(data)
serializer = self.get_serializer(queryset, many=True)
data["data"] = serializer.data
return Response(data)
def get_serializer_class(self):
if is_fms_user(user=self.request.user):
return FMSVehicleSerializer
return VehicleSerializer
def get_queryset(self):
if is_fms_user(user=self.request.user):
broker = Broker.objects.get(name=self.request.user)
bv_id = broker.broker_vehicle.exclude(deleted=True).values_list('vehicle_id', flat=True)
return Vehicle.objects.filter(id__in=bv_id).exclude(deleted=True)
elif is_sme_user(user=self.request.user):
from team.models import ManualBooking
sme = get_or_none(Sme, name=self.request.user)
if isinstance(sme, Sme):
vehicles_id = ManualBooking.objects.filter(company=sme).exclude(booking_status='cancelled').values_list(
'vehicle_id', flat=True)
return Vehicle.objects.filter(id__in=list(set(list(vehicles_id))))
return Vehicle.objects.none()
return Vehicle.objects.exclude(deleted=True)
class OwnerVehicleSummaryListView(generics.ListAPIView):
serializer_class = VehicleSummarySerializer
pagination_class = CustomeDatatablesPaginator
filter_backends = (CustomSearch, filters.OrderingFilter, DjangoFilterBackend)
# filter_class = OwnerVehicleFilter
search_fields = ('id', 'vehicle__vehicle_number')
def list(self, request, *args, **kwargs):
queryset = self.filter_queryset(self.get_queryset())
data = {"status": "Success", "status_code": status.HTTP_200_OK, "msg": "Vehicles List"}
page = self.paginate_queryset(queryset)
if page is not None:
serializer = self.get_serializer(page, many=True)
data["data"] = serializer.data
return self.get_paginated_response(data)
serializer = self.get_serializer(queryset, many=True)
data["data"] = serializer.data
return Response(data)
def get_queryset(self):
queryset = VehicleSummary.objects.exclude(deleted=True).annotate(
completed_pod_balance_amount=RawSQL(
"(accounting_summary->>%s)::numeric", ("completed_pod_balance_amount",))
).order_by('-completed_pod_balance_amount')
return queryset
class OwnerVehicleViewSet(viewsets.ViewSet):
"""
API for creation, updation, retrieval of Owner Vehicle
"""
renderer_classes = (JSONRenderer, TemplateHTMLRenderer)
def create(self, request, pk=None):
request.data['vehicle_number'] = compare_format(request.data['vehicle_number'])
if "created_by" not in request.data:
request.data["created_by"] = self.request.user.username
if "changed_by" not in request.data:
request.data["changed_by"] = self.request.user.username
if request.data.get('gps_enabled', None) == 'yes':
request.data['gps_enabled'] = True
else:
request.data['gps_enabled'] = False
if request.data.get('registration_year', None) and 'registration_year' in request.data:
request.data['registration_year'] = '01-Jan-{}'.format(request.data['registration_year'].strip())
vehicle_serializer = VehicleSerializer(data=request.data)
if vehicle_serializer.is_valid():
vehicle_serializer.save()
return success_response(status=status.HTTP_201_CREATED, msg="Vehicle Created", data=vehicle_serializer.data)
return error_response(status=status.HTTP_400_BAD_REQUEST, msg="Vehicle not Created",
data=vehicle_serializer.errors)
def update(self, request, pk=None):
request.data["changed_by"] = self.request.user.username
vehicle = get_or_none(Vehicle, id=pk)
if not isinstance(vehicle, Vehicle):
return Response({"error": "Vehicle does not exist"}, status=status.HTTP_404_NOT_FOUND)
vehicle_serializers = VehicleSerializer(instance=vehicle, data=request.data)
if vehicle_serializers.is_valid():
vehicle_serializers.save()
response = {
"status_code": status.HTTP_202_ACCEPTED,
"status": "Success",
"msg": "Vehicle Updated",
"data": vehicle_serializers.data
}
return Response(data=response, status=status.HTTP_202_ACCEPTED)
response = {
"status_code": status.HTTP_400_BAD_REQUEST,
"status": "Error",
"msg": "Vehicle not Updated",
"data": vehicle_serializers.errors
}
return Response(data=response, status=status.HTTP_400_BAD_REQUEST)
def fms_update(self, request, pk):
vehicle_id = pk
if vehicle_id and vehicle_id.isnumeric():
vehicle = get_or_none(Vehicle, id=int_or_none(vehicle_id))
if not isinstance(vehicle, Vehicle):
return error_response(msg='Vehicle with id=%s does not exist' % vehicle_id,
status=status.HTTP_404_NOT_FOUND, data=None)
if request.data.get('gps_enabled', None) == 'yes':
request.data['gps_enabled'] = True
else:
request.data['gps_enabled'] = False
data = request.data
parsed_data = parse_vehicle_docs(request.data)
request.data["changed_by"] = self.request.user.username
vehicle = get_or_none(Vehicle, id=pk)
vehicle_serializers = VehicleSerializer(
instance=vehicle,
data=parsed_data,
partial=True
)
if vehicle_serializers.is_valid():
vehicle_serializers.save()
owner = vehicle.owner if isinstance(vehicle.owner, Owner) else None
driver = vehicle.driver if isinstance(vehicle.driver, Driver) else None
if isinstance(owner, Owner) and parsed_data.get('owner_data', None):
owner_serializer = OwnerSerializer(
instance=owner,
data=parsed_data.get('owner_data', None),
partial=True
)
if owner_serializer.is_valid():
owner_serializer.save()
else:
pass
if isinstance(driver, Driver) and parsed_data.get('driver_data', None):
driver_serializer = DriverSerializer(
instance=driver,
data=parsed_data.get('driver_data', None),
partial=True
)
if driver_serializer.is_valid():
driver_serializer.save()
doc_key = 'rc_doc'
if doc_key in data and data[doc_key].get('url'):
if not VehicleFile.objects.filter(
Q(s3_url=data[doc_key].get('url') if validate_url(data[doc_key].get('url')) else None) | Q(
s3_thumb_url=data[doc_key].get('thumb_url') if validate_url(
data[doc_key].get('thumb_url')) else None)).exists():
if S3Upload.objects.filter(uuid=data[doc_key].get('uuid', None)).exists():
s3_upload = S3Upload.objects.filter(uuid=data[doc_key].get('uuid', None)).last()
else:
s3_upload = s3util.get_or_create_s3_upload(
bucket=data[doc_key].get('bucketName', None),
folder=data[doc_key].get('folderName', None),
filename=data[doc_key].get('fileName', None),
verified=False,
is_valid=False,
uuid=data[doc_key].get('uuid', None),
)
VehicleFile.objects.create(
uploaded_by=request.user,
vehicle=vehicle,
document_category='REG',
s3_url=data[doc_key].get('url') if validate_url(data[doc_key].get('url')) else None,
s3_thumb_url=data[doc_key].get('thumb_url') if validate_url(
data[doc_key].get('thumb_url')) else None,
serial=get_new_serial(VehicleFile, vehicle=vehicle),
s3_upload=s3_upload
)
doc_key = 'permit_doc'
if doc_key in data and data[doc_key].get('url'):
if not VehicleFile.objects.filter(
Q(s3_url=data[doc_key].get('url') if validate_url(data[doc_key].get('url')) else None) | Q(
s3_thumb_url=data[doc_key].get('thumb_url') if validate_url(
data[doc_key].get('thumb_url')) else None)).exists():
if S3Upload.objects.filter(uuid=data[doc_key].get('uuid', None)).exists():
s3_upload = S3Upload.objects.filter(uuid=data[doc_key].get('uuid', None)).last()
else:
s3_upload = s3util.get_or_create_s3_upload(
bucket=data[doc_key].get('bucketName', None),
folder=data[doc_key].get('folderName', None),
filename=data[doc_key].get('fileName', None),
verified=False,
is_valid=False,
uuid=data[doc_key].get('uuid', None),
)
VehicleFile.objects.create(
uploaded_by=request.user,
vehicle=vehicle,
document_category='PERM',
s3_url=data[doc_key].get('url') if validate_url(data[doc_key].get('url')) else None,
s3_thumb_url=data[doc_key].get('thumb_url') if validate_url(
data[doc_key].get('thumb_url')) else None,
serial=get_new_serial(VehicleFile, vehicle=vehicle),
s3_upload=s3_upload
)
doc_key = 'insurance_doc'
if doc_key in data and data[doc_key].get('url'):
if not VehicleFile.objects.filter(
Q(s3_url=data[doc_key].get('url') if validate_url(data[doc_key].get('url')) else None) | Q(
s3_thumb_url=data[doc_key].get('thumb_url') if validate_url(
data[doc_key].get('thumb_url')) else None)).exists():
if S3Upload.objects.filter(uuid=data[doc_key].get('uuid', None)).exists():
s3_upload = S3Upload.objects.filter(uuid=data[doc_key].get('uuid', None)).last()
else:
s3_upload = s3util.get_or_create_s3_upload(
bucket=data[doc_key].get('bucketName', None),
folder=data[doc_key].get('folderName', None),
filename=data[doc_key].get('fileName', None),
verified=False,
is_valid=False,
uuid=data[doc_key].get('uuid', None),
)
VehicleFile.objects.create(
uploaded_by=request.user,
vehicle=vehicle,
document_category='INS',
s3_url=data[doc_key].get('url') if validate_url(data[doc_key].get('url')) else None,
s3_thumb_url=data[doc_key].get('thumb_url') if validate_url(
data[doc_key].get('thumb_url')) else None,
serial=get_new_serial(VehicleFile, vehicle=vehicle),
s3_upload=s3_upload
)
doc_key = 'fitness_doc'
if doc_key in data and data[doc_key].get('url'):
if not VehicleFile.objects.filter(
Q(s3_url=data[doc_key].get('url') if validate_url(data[doc_key].get('url')) else None) | Q(
s3_thumb_url=data[doc_key].get('thumb_url') if validate_url(
data[doc_key].get('thumb_url')) else None)).exists():
if S3Upload.objects.filter(uuid=data[doc_key].get('uuid', None)).exists():
s3_upload = S3Upload.objects.filter(uuid=data[doc_key].get('uuid', None)).last()
else:
s3_upload = s3util.get_or_create_s3_upload(
bucket=data[doc_key].get('bucketName', None),
folder=data[doc_key].get('folderName', None),
filename=data[doc_key].get('fileName', None),
verified=False,
is_valid=False,
uuid=data[doc_key].get('uuid', None),
)
VehicleFile.objects.create(
uploaded_by=request.user,
vehicle=vehicle,
document_category='FIT',
s3_url=data[doc_key].get('url') if validate_url(data[doc_key].get('url')) else None,
s3_thumb_url=data[doc_key].get('thumb_url') if validate_url(
data[doc_key].get('thumb_url')) else None,
serial=get_new_serial(VehicleFile, vehicle=vehicle),
s3_upload=s3_upload
)
doc_key = 'puc_doc'
if doc_key in data and data[doc_key].get('url'):
if not VehicleFile.objects.filter(
Q(s3_url=data[doc_key].get('url') if validate_url(data[doc_key].get('url')) else None) | Q(
s3_thumb_url=data[doc_key].get('thumb_url') if validate_url(
data[doc_key].get('thumb_url')) else None)).exists():
if S3Upload.objects.filter(uuid=data[doc_key].get('uuid', None)).exists():
s3_upload = S3Upload.objects.filter(uuid=data[doc_key].get('uuid', None)).last()
else:
s3_upload = s3util.get_or_create_s3_upload(
bucket=data[doc_key].get('bucketName', None),
folder=data[doc_key].get('folderName', None),
filename=data[doc_key].get('fileName', None),
verified=False,
is_valid=False,
uuid=data[doc_key].get('uuid', None),
)
VehicleFile.objects.create(
uploaded_by=request.user,
vehicle=vehicle,
document_category='PUC',
s3_url=data[doc_key].get('url') if validate_url(data[doc_key].get('url')) else None,
s3_thumb_url=data[doc_key].get('thumb_url') if validate_url(
data[doc_key].get('thumb_url')) else None,
serial=get_new_serial(VehicleFile, vehicle=vehicle),
s3_upload=s3_upload,
)
doc_key = 'owner_pan_doc'
if doc_key in data and vehicle.owner and data[doc_key].get('url'):
if not OwnerFile.objects.filter(
Q(s3_url=data[doc_key].get('url') if validate_url(data[doc_key].get('url')) else None) | Q(
s3_thumb_url=data[doc_key].get('thumb_url') if validate_url(
data[doc_key].get('thumb_url')) else None)).exists():
if S3Upload.objects.filter(uuid=data[doc_key].get('uuid', None)).exists():
s3_upload = S3Upload.objects.filter(uuid=data[doc_key].get('uuid', None)).last()
else:
s3_upload = s3util.get_or_create_s3_upload(
bucket=data[doc_key].get('bucketName', None),
folder=data[doc_key].get('folderName', None),
filename=data[doc_key].get('fileName', None),
verified=False,
is_valid=False,
uuid=data[doc_key].get('uuid', None),
)
OwnerFile.objects.create(
uploaded_by=request.user,
owner=owner,
document_category='PAN',
s3_url=data[doc_key].get('url') if validate_url(data[doc_key].get('url')) else None,
s3_thumb_url=data[doc_key].get('thumb_url') if validate_url(
data[doc_key].get('thumb_url')) else None,
serial=get_new_serial(OwnerFile, owner=owner),
s3_upload=s3_upload
)
doc_key = 'owner_dec_doc'
if doc_key in data and vehicle.owner and data[doc_key].get('url'):
if not OwnerFile.objects.filter(
Q(s3_url=data[doc_key].get('url') if validate_url(data[doc_key].get('url')) else None) | Q(
s3_thumb_url=data[doc_key].get('thumb_url') if validate_url(
data[doc_key].get('thumb_url')) else None)).exists():
if S3Upload.objects.filter(uuid=data[doc_key].get('uuid', None)).exists():
s3_upload = S3Upload.objects.filter(uuid=data[doc_key].get('uuid', None)).last()
else:
s3_upload = s3util.get_or_create_s3_upload(
bucket=data[doc_key].get('bucketName', None),
folder=data[doc_key].get('folderName', None),
filename=data[doc_key].get('fileName', None),
verified=False,
is_valid=False,
uuid=data[doc_key].get('uuid', None),
)
OwnerFile.objects.create(
uploaded_by=request.user,
owner=owner,
document_category='DEC',
s3_url=data[doc_key].get('url') if validate_url(data[doc_key].get('url')) else None,
s3_thumb_url=data[doc_key].get('thumb_url') if validate_url(
data[doc_key].get('thumb_url')) else None,
serial=get_new_serial(OwnerFile, owner=owner),
s3_upload=s3_upload,
)
doc_key = 'driver_dl_doc'
if doc_key in data and vehicle.driver and data[doc_key].get('url'):
if not DriverFile.objects.filter(
Q(s3_url=data[doc_key].get('url') if validate_url(data[doc_key].get('url')) else None) | Q(
s3_thumb_url=data[doc_key].get('thumb_url') if validate_url(
data[doc_key].get('thumb_url')) else None)).exists():
if S3Upload.objects.filter(uuid=data[doc_key].get('uuid', None)).exists():
s3_upload = S3Upload.objects.filter(uuid=data[doc_key].get('uuid', None)).last()
else:
s3_upload = s3util.get_or_create_s3_upload(
bucket=data[doc_key].get('bucketName', None),
folder=data[doc_key].get('folderName', None),
filename=data[doc_key].get('fileName', None),
verified=False,
is_valid=False,
uuid=data[doc_key].get('uuid', None),
)
DriverFile.objects.create(
uploaded_by=request.user,
driver=driver,
document_category='DL',
s3_url=data[doc_key].get('url') if validate_url(data[doc_key].get('url')) else None,
s3_thumb_url=data[doc_key].get('thumb_url') if validate_url(
data[doc_key].get('thumb_url')) else None,
serial=get_new_serial(DriverFile, driver=driver),
s3_upload=s3_upload,
)
vehicle = get_or_none(Vehicle, id=pk)
vehicle_serializer = FMSVehicleSerializer(instance=vehicle)
return success_response(
msg='vehicle details %s' % ('edited' if vehicle_id else 'saved'),
data=vehicle_serializer.data, status=status.HTTP_202_ACCEPTED
)
def partial_update(self, request, pk=None):
request.data["changed_by"] = self.request.user.username
if request.data.get('registration_year', None) and 'registration_year' in request.data:
request.data['registration_year'] = '01-Jan-{}'.format(request.data['registration_year'].strip())
vehicle = get_or_none(Vehicle, id=pk)
data = request.data
if not isinstance(vehicle, Vehicle):
return Response({"error": "Vehicle does not exist"}, status=status.HTTP_404_NOT_FOUND)
if 'owner' not in data:
data['owner'] = None
vehicle_serializers = VehicleSerializer(
instance=vehicle,
data=data,
partial=True
)
if vehicle_serializers.is_valid():
vehicle_serializers.save()
response = {
"status_code": status.HTTP_202_ACCEPTED,
"status": "Success",
"msg": "Vehicle Updated",
"data": vehicle_serializers.data
}
return Response(data=response, status=status.HTTP_202_ACCEPTED)
response = {
"status_code": status.HTTP_400_BAD_REQUEST,
"status": "Error",
"msg": "Vehicle not Updated",
"data": vehicle_serializers.errors
}
return Response(data=response, status=status.HTTP_400_BAD_REQUEST)
def retrieve(self, request, pk=None):
vehicle = get_or_none(Vehicle, id=pk)
if not isinstance(vehicle, Vehicle):
return Response({"error": "Vehicle does not exist"}, status=status.HTTP_404_NOT_FOUND)
vehicle_serializers = VehicleSerializer(vehicle)
if is_fms_user(request.user):
vehicle_serializers = FMSVehicleSerializer(vehicle)
return Response(template_name='team/registrations/update-vehicle.html', data=vehicle_serializers.data,
status=status.HTTP_200_OK)
class FuelCardListView(generics.ListAPIView):
filter_class = FuelCardFilter
queryset = FuelCard.objects.exclude(deleted=True).exclude(card_number=None).order_by('-id')
serializer_class = Select2FuelCardSerializer
pagination_class = CustomeDatatablesPaginator
filter_backends = (CustomSearch, filters.OrderingFilter, DjangoFilterBackend)
# filter_class = OwnerVehicleFilter
search_fields = (
'id', 'customer_id', 'card_number', 'issue_date', 'expiry_date', 'created_by__username',
'created_on')
def list(self, request, *args, **kwargs):
queryset = self.filter_queryset(self.get_queryset())
data = {"status": "Success", "status_code": status.HTTP_200_OK, "msg": "Fuel Card Summary List"}
page = self.paginate_queryset(queryset)
if page is not None:
serializer = self.get_serializer(page, many=True)
data["data"] = serializer.data
return self.get_paginated_response(data)
serializer = self.get_serializer(queryset, many=True)
data["data"] = serializer.data
return Response(data)
class FuelCardViewSet(viewsets.ViewSet):
"""
API for creation, updation, retrieval of Fuel Card
"""
def create(self, request, pk=None):
request.data["created_by"] = self.request.user.username
request.data["changed_by"] = self.request.user.username
fuel_card_serializer = FuelCardSerializer(data=request.data)
if fuel_card_serializer.is_valid():
fuel_card_serializer.save()
response = {
"status_code": status.HTTP_201_CREATED,
"status": "Success",
"msg": "Fuel Card Created",
"data": fuel_card_serializer.data
}
return Response(data=response, status=status.HTTP_201_CREATED)
response = {
"status_code": status.HTTP_400_BAD_REQUEST,
"status": "Error",
"msg": "Fuel Card not Created",
"data": fuel_card_serializer.errors
}
return Response(data=response, status=status.HTTP_400_BAD_REQUEST)
def update(self, request, pk=None):
request.data["changed_by"] = self.request.user.username
fuel_card = get_or_none(FuelCard, id=pk)
if not isinstance(fuel_card, FuelCard):
return Response({"error": "Fuel Card does not exist"}, status=status.HTTP_404_NOT_FOUND)
fuel_card_serializers = FuelCardSerializer(instance=fuel_card, data=request.data)
if fuel_card_serializers.is_valid():
fuel_card_serializers.save()
response = {
"status_code": status.HTTP_202_ACCEPTED,
"status": "Success",
"msg": "Fuel Card Updated",
"data": fuel_card_serializers.data
}
return Response(data=response, status=status.HTTP_202_ACCEPTED)
response = {
"status_code": status.HTTP_400_BAD_REQUEST,
"status": "Error",
"msg": "Fuel Card not Updated",
"data": fuel_card_serializers.errors
}
return Response(data=response, status=status.HTTP_400_BAD_REQUEST)
def partial_update(self, request, pk=None):
request.data["changed_by"] = self.request.user.username
fuel_card = get_or_none(FuelCard, id=pk)
if not isinstance(fuel_card, FuelCard):
return Response({"error": "Fuel Card does not exist"}, status=status.HTTP_404_NOT_FOUND)
fuel_card_serializers = FuelCardSerializer(
instance=fuel_card,
data=request.data,
partial=True
)
if fuel_card_serializers.is_valid():
fuel_card_serializers.save()
response = {
"status_code": status.HTTP_202_ACCEPTED,
"status": "Success",
"msg": "Fuel Card Updated",
"data": fuel_card_serializers.data
}
return Response(data=response, status=status.HTTP_202_ACCEPTED)
response = {
"status_code": status.HTTP_400_BAD_REQUEST,
"status": "Error",
"msg": "Fuel Card not Updated",
"data": fuel_card_serializers.errors
}
return Response(data=response, status=status.HTTP_400_BAD_REQUEST)
def retrieve(self, request, pk=None):
fuel_card = get_or_none(FuelCard, id=pk)
if not isinstance(fuel_card, FuelCard):
return Response({"error": "Fuel Card does not exist"}, status=status.HTTP_404_NOT_FOUND)
fuel_card_serializers = FuelCardSerializer(fuel_card)
return Response(fuel_card_serializers.data, status=status.HTTP_200_OK)
class FuelCardTransactionViewSet(viewsets.ViewSet):
"""
API for creation, updation, retrieval of Owner FuelCardTransaction
"""
def create(self, request, pk=None):
request.data["created_by"] = self.request.user.username
request.data["changed_by"] = self.request.user.username
fuel_card_transaction_serializer = FuelCardTransactionSerializer(data=request.data)
if fuel_card_transaction_serializer.is_valid():
fuel_card_transaction_serializer.save()
response = {
"status_code": status.HTTP_201_CREATED,
"status": "Success",
"msg": "Fuel Card Transaction Created",
"data": fuel_card_transaction_serializer.data
}
return Response(data=response, status=status.HTTP_201_CREATED)
response = {
"status_code": status.HTTP_400_BAD_REQUEST,
"status": "Error",
"msg": "Fuel Card Transaction not Created",
"data": fuel_card_transaction_serializer.errors
}
return Response(data=response, status=status.HTTP_400_BAD_REQUEST)
def update(self, request, pk=None):
request.data["changed_by"] = self.request.user.username
fuel_card_transaction = get_or_none(FuelCardTransaction, id=pk)
if not isinstance(fuel_card_transaction, FuelCardTransaction):
return Response({"error": "Fuel Card Transaction does not exist"}, status=status.HTTP_404_NOT_FOUND)
fuel_card_transaction_serializers = FuelCardTransactionSerializer(instance=fuel_card_transaction,
data=request.data)
if fuel_card_transaction_serializers.is_valid():
fuel_card_transaction_serializers.save()
response = {
"status_code": status.HTTP_202_ACCEPTED,
"status": "Success",
"msg": "Fuel Card Transaction Updated",
"data": fuel_card_transaction_serializers.data
}
return Response(data=response, status=status.HTTP_202_ACCEPTED)
response = {
"status_code": status.HTTP_400_BAD_REQUEST,
"status": "Error",
"msg": "Fuel Card Transaction not Updated",
"data": fuel_card_transaction_serializers.errors
}
return Response(data=response, status=status.HTTP_400_BAD_REQUEST)
def partial_update(self, request, pk=None):
request.data["changed_by"] = self.request.user.username
fuel_card_transaction = get_or_none(FuelCardTransaction, id=pk)
if not isinstance(fuel_card_transaction, FuelCardTransaction):
return Response({"error": "Fuel Card Transaction does not exist"}, status=status.HTTP_404_NOT_FOUND)
fuel_card_transaction_serializers = FuelCardTransactionSerializer(
instance=fuel_card_transaction,
data=request.data,
partial=True
)
if fuel_card_transaction_serializers.is_valid():
fuel_card_transaction_serializers.save()
response = {
"status_code": status.HTTP_202_ACCEPTED,
"status": "Success",
"msg": "Fuel Card Transaction Updated",
"data": fuel_card_transaction_serializers.data
}
return Response(data=response, status=status.HTTP_202_ACCEPTED)
response = {
"status_code": status.HTTP_400_BAD_REQUEST,
"status": "Error",
"msg": "Fuel Card Transaction not Updated",
"data": fuel_card_transaction_serializers.errors
}
return Response(data=response, status=status.HTTP_400_BAD_REQUEST)
def retrieve(self, request, pk=None):
fuel_card_transaction = get_or_none(FuelCardTransaction, id=pk)
if not isinstance(fuel_card_transaction, FuelCardTransaction):
return Response({"error": "Fuel Card Transaction does not exist"}, status=status.HTTP_404_NOT_FOUND)
fuel_card_transaction_serializers = FuelCardTransactionSerializer(fuel_card_transaction)
return Response(fuel_card_transaction_serializers.data, status=status.HTTP_200_OK)
| 48.194595
| 120
| 0.605563
| 4,919
| 44,580
| 5.250254
| 0.055702
| 0.031596
| 0.046465
| 0.060404
| 0.776504
| 0.758886
| 0.736738
| 0.730543
| 0.703051
| 0.690041
| 0
| 0.011677
| 0.289255
| 44,580
| 924
| 121
| 48.246753
| 0.803409
| 0.007694
| 0
| 0.661389
| 0
| 0
| 0.100702
| 0.007861
| 0
| 0
| 0
| 0
| 0
| 1
| 0.035323
| false
| 0.003654
| 0.040195
| 0.001218
| 0.208283
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
98114e6d41eec55cde7985c1ba024ac766b738b1
| 157
|
py
|
Python
|
src/predict_text.py
|
CMUSTRUDEL/toxicity-detector
|
aec67bf27184bae89aac0f2566aa19c658949d60
|
[
"MIT"
] | 7
|
2020-03-03T18:09:48.000Z
|
2022-03-23T16:55:26.000Z
|
src/predict_text.py
|
CMUSTRUDEL/toxicity-detector
|
aec67bf27184bae89aac0f2566aa19c658949d60
|
[
"MIT"
] | 7
|
2020-02-23T23:11:30.000Z
|
2021-06-02T01:01:30.000Z
|
src/predict_text.py
|
CMUSTRUDEL/toxicity-detector
|
aec67bf27184bae89aac0f2566aa19c658949d60
|
[
"MIT"
] | 6
|
2020-02-21T00:19:57.000Z
|
2022-03-03T23:00:42.000Z
|
import pickle
import suite
model = pickle.loads(open("pretrained_model.p","rb").read())
def predict_text(text):
return suite.get_prediction(text,model)
| 15.7
| 60
| 0.757962
| 23
| 157
| 5.043478
| 0.695652
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.101911
| 157
| 9
| 61
| 17.444444
| 0.822695
| 0
| 0
| 0
| 0
| 0
| 0.129032
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0.4
| 0.2
| 0.8
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 5
|
e281a98278d5de1e456e8fa1ee344e455f15ec0e
| 266
|
py
|
Python
|
rasa/nlu/training_data/converters/__init__.py
|
sarvex/rasa
|
ac1197b3f80071bb213d4fa66d5d24b4fc01e30e
|
[
"Apache-2.0"
] | 1
|
2021-07-01T15:38:59.000Z
|
2021-07-01T15:38:59.000Z
|
rasa/nlu/training_data/converters/__init__.py
|
sarvex/rasa
|
ac1197b3f80071bb213d4fa66d5d24b4fc01e30e
|
[
"Apache-2.0"
] | 79
|
2021-08-19T09:49:24.000Z
|
2022-03-14T12:10:54.000Z
|
rasa/nlu/training_data/converters/__init__.py
|
sarvex/rasa
|
ac1197b3f80071bb213d4fa66d5d24b4fc01e30e
|
[
"Apache-2.0"
] | 1
|
2021-07-19T12:59:26.000Z
|
2021-07-19T12:59:26.000Z
|
from rasa.nlu.training_data.converters.nlu_markdown_to_yaml_converter import ( # noqa: F401, E501
NLUMarkdownToYamlConverter,
)
from rasa.nlu.training_data.converters.nlg_markdown_to_yaml_converter import ( # noqa: F401, E501
NLGMarkdownToYamlConverter,
)
| 38
| 98
| 0.81203
| 32
| 266
| 6.4375
| 0.53125
| 0.07767
| 0.106796
| 0.184466
| 0.718447
| 0.718447
| 0.398058
| 0.398058
| 0
| 0
| 0
| 0.050847
| 0.112782
| 266
| 6
| 99
| 44.333333
| 0.822034
| 0.12406
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
e286e49a978f1ddfcc5d07d4215d2a1cc7d98993
| 100
|
py
|
Python
|
nagios/exceptions.py
|
sajalshres/nagios-core-api
|
d423a7a0699843546586815a2062251510fde7d9
|
[
"MIT"
] | null | null | null |
nagios/exceptions.py
|
sajalshres/nagios-core-api
|
d423a7a0699843546586815a2062251510fde7d9
|
[
"MIT"
] | null | null | null |
nagios/exceptions.py
|
sajalshres/nagios-core-api
|
d423a7a0699843546586815a2062251510fde7d9
|
[
"MIT"
] | null | null | null |
class NagiosException(Exception):
pass
class NagiosUnexpectedResultError(Exception):
pass
| 14.285714
| 45
| 0.78
| 8
| 100
| 9.75
| 0.625
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.16
| 100
| 6
| 46
| 16.666667
| 0.928571
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
e289edd75a9431695238afd1ac89719c6fbd4724
| 60
|
py
|
Python
|
aiida_phonoxpy/__init__.py
|
atztogo/aiida-phonoxpy
|
ebc806e9057ca5ca47df62773fa3cc4a7fc94ef3
|
[
"MIT"
] | 1
|
2021-12-21T09:49:18.000Z
|
2021-12-21T09:49:18.000Z
|
aiida_phonoxpy/__init__.py
|
atztogo/aiida-phonoxpy
|
ebc806e9057ca5ca47df62773fa3cc4a7fc94ef3
|
[
"MIT"
] | null | null | null |
aiida_phonoxpy/__init__.py
|
atztogo/aiida-phonoxpy
|
ebc806e9057ca5ca47df62773fa3cc4a7fc94ef3
|
[
"MIT"
] | null | null | null |
from aiida_phonoxpy.version import __version__ # noqa F401
| 30
| 59
| 0.833333
| 8
| 60
| 5.625
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.057692
| 0.133333
| 60
| 1
| 60
| 60
| 0.807692
| 0.15
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
e290c7bcd1f2947d69b1c5aa9a9a59f814018d65
| 261
|
py
|
Python
|
maindatabase/admin.py
|
dragonman164/Database-for-Covid-19-Vaccination-with-API-support
|
b34c29414967fcdf2d9fb34ed815f5c2752694b6
|
[
"MIT"
] | 1
|
2020-12-30T16:31:14.000Z
|
2020-12-30T16:31:14.000Z
|
maindatabase/admin.py
|
rishusingh022/Database-for-Covid-19-Vaccination-with-API-support
|
8d2137f06d10ca620a4fe24ffc34c410b8a78ad6
|
[
"MIT"
] | null | null | null |
maindatabase/admin.py
|
rishusingh022/Database-for-Covid-19-Vaccination-with-API-support
|
8d2137f06d10ca620a4fe24ffc34c410b8a78ad6
|
[
"MIT"
] | 2
|
2020-12-19T19:27:08.000Z
|
2021-01-02T07:43:39.000Z
|
from django.contrib import admin
from .models import Person,report,management,Person_without_Aadhar
admin.site.register(Person)
admin.site.register(report)
admin.site.register(management)
admin.site.register(Person_without_Aadhar)
# Register your models here.
| 29
| 66
| 0.842912
| 36
| 261
| 6
| 0.416667
| 0.166667
| 0.314815
| 0.212963
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.068966
| 261
| 8
| 67
| 32.625
| 0.888889
| 0.099617
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
e2b0a9bab1cf77221005e8ce82e59446378bbacd
| 101
|
py
|
Python
|
src/ia_markov/__init__.py
|
Abelarm/python-ia-markov
|
8bef5d620b77a0944924263af6042396cf9b768b
|
[
"BSD-2-Clause"
] | null | null | null |
src/ia_markov/__init__.py
|
Abelarm/python-ia-markov
|
8bef5d620b77a0944924263af6042396cf9b768b
|
[
"BSD-2-Clause"
] | null | null | null |
src/ia_markov/__init__.py
|
Abelarm/python-ia-markov
|
8bef5d620b77a0944924263af6042396cf9b768b
|
[
"BSD-2-Clause"
] | null | null | null |
from .markov import MarkovModel # noqa
from .markov import POSMarkov # noqa
__version__ = "0.2.1"
| 20.2
| 39
| 0.732673
| 14
| 101
| 5
| 0.714286
| 0.285714
| 0.457143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.036145
| 0.178218
| 101
| 4
| 40
| 25.25
| 0.807229
| 0.089109
| 0
| 0
| 0
| 0
| 0.05618
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.