hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
1b9d875d3c8fbdfbff8a8f0a905ae500c016b540
| 202
|
py
|
Python
|
custom_components/hacs/models/frontend.py
|
sob/hass-conf
|
0d01004475b625c431245a1cee66a1c91b7bfb30
|
[
"MIT"
] | 27
|
2018-10-13T10:00:53.000Z
|
2022-02-07T23:33:12.000Z
|
config/custom_components/hacs/models/frontend.py
|
TheGroundZero/home-assistant-config
|
a963e1cb3e2acf7beda2b466b334218ac27ee42f
|
[
"MIT"
] | 33
|
2021-11-22T16:30:43.000Z
|
2022-03-29T18:00:13.000Z
|
config/custom_components/hacs/models/frontend.py
|
TheGroundZero/home-assistant-config
|
a963e1cb3e2acf7beda2b466b334218ac27ee42f
|
[
"MIT"
] | 5
|
2019-06-01T10:27:37.000Z
|
2020-09-18T14:14:56.000Z
|
"""HacsFrontend."""
class HacsFrontend:
"""HacsFrontend."""
version_running: bool = None
version_available: bool = None
version_expected: bool = None
update_pending: bool = False
| 18.363636
| 34
| 0.673267
| 20
| 202
| 6.6
| 0.55
| 0.181818
| 0.227273
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.212871
| 202
| 10
| 35
| 20.2
| 0.830189
| 0.133663
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
|
0
| 4
|
1bc8c7f134659c12cac6d78670a0c91af4805587
| 130
|
py
|
Python
|
setup.py
|
Mario-Kart-Felix/oxygen
|
ed7548d264282f3c857f22a26cfe969af335b6d0
|
[
"MIT"
] | 1
|
2018-12-18T16:06:17.000Z
|
2018-12-18T16:06:17.000Z
|
setup.py
|
Mario-Kart-Felix/oxygen
|
ed7548d264282f3c857f22a26cfe969af335b6d0
|
[
"MIT"
] | 5
|
2019-08-20T15:08:33.000Z
|
2019-10-24T15:17:39.000Z
|
setup.py
|
Mario-Kart-Felix/oxygen
|
ed7548d264282f3c857f22a26cfe969af335b6d0
|
[
"MIT"
] | 1
|
2021-03-20T04:13:28.000Z
|
2021-03-20T04:13:28.000Z
|
from distutils.core import setup
from Cython.Build import cythonize
setup(
ext_modules = cythonize("*.pyx", annotate=True)
)
| 18.571429
| 51
| 0.753846
| 17
| 130
| 5.705882
| 0.764706
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.146154
| 130
| 6
| 52
| 21.666667
| 0.873874
| 0
| 0
| 0
| 0
| 0
| 0.038462
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.4
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
9403c5a5e889a912eca0732cc3e2ba320870daab
| 169
|
py
|
Python
|
py_tdlib/constructors/push_message_content_sticker.py
|
Mr-TelegramBot/python-tdlib
|
2e2d21a742ebcd439971a32357f2d0abd0ce61eb
|
[
"MIT"
] | 24
|
2018-10-05T13:04:30.000Z
|
2020-05-12T08:45:34.000Z
|
py_tdlib/constructors/push_message_content_sticker.py
|
MrMahdi313/python-tdlib
|
2e2d21a742ebcd439971a32357f2d0abd0ce61eb
|
[
"MIT"
] | 3
|
2019-06-26T07:20:20.000Z
|
2021-05-24T13:06:56.000Z
|
py_tdlib/constructors/push_message_content_sticker.py
|
MrMahdi313/python-tdlib
|
2e2d21a742ebcd439971a32357f2d0abd0ce61eb
|
[
"MIT"
] | 5
|
2018-10-05T14:29:28.000Z
|
2020-08-11T15:04:10.000Z
|
from ..factory import Type
class pushMessageContentSticker(Type):
sticker = None # type: "sticker"
emoji = None # type: "string"
is_pinned = None # type: "Bool"
| 21.125
| 38
| 0.692308
| 20
| 169
| 5.8
| 0.65
| 0.206897
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.189349
| 169
| 7
| 39
| 24.142857
| 0.846715
| 0.254438
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.2
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 4
|
94098b42b608e8d1fd06b53ff50da36e10318b49
| 112
|
py
|
Python
|
Patterns/horizontal table pattern.py
|
lazydinoz/HackFest21
|
84bfbfbb2c75a6511226a87d2e947984db878ba1
|
[
"MIT"
] | 1
|
2021-11-12T10:51:19.000Z
|
2021-11-12T10:51:19.000Z
|
Patterns/horizontal table pattern.py
|
lazydinoz/HackFest21
|
84bfbfbb2c75a6511226a87d2e947984db878ba1
|
[
"MIT"
] | null | null | null |
Patterns/horizontal table pattern.py
|
lazydinoz/HackFest21
|
84bfbfbb2c75a6511226a87d2e947984db878ba1
|
[
"MIT"
] | null | null | null |
rows = 11
for i in range(0, rows):
for j in range(0, i + 1):
print(i * j, end= ' ')
print()
| 11.2
| 30
| 0.455357
| 20
| 112
| 2.55
| 0.55
| 0.27451
| 0.313725
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.071429
| 0.375
| 112
| 9
| 31
| 12.444444
| 0.657143
| 0
| 0
| 0
| 0
| 0
| 0.009009
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.4
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
941cfef536945d9e2a0eabaa26e92042d1fb5c28
| 4,074
|
py
|
Python
|
voltdb/tests/fixtures/generate_data.py
|
jejikenwogu/integrations-core
|
26d90d6e73c135e3bc0c590ec195d459165dc89f
|
[
"BSD-3-Clause"
] | null | null | null |
voltdb/tests/fixtures/generate_data.py
|
jejikenwogu/integrations-core
|
26d90d6e73c135e3bc0c590ec195d459165dc89f
|
[
"BSD-3-Clause"
] | null | null | null |
voltdb/tests/fixtures/generate_data.py
|
jejikenwogu/integrations-core
|
26d90d6e73c135e3bc0c590ec195d459165dc89f
|
[
"BSD-3-Clause"
] | null | null | null |
# Not used by tests, but can be manually run to generate the mock_results.json file.
import json
import random
import string
import time
from math import log2
MAPPING = {
'COMMANDLOG': ['timestamp', 'host_id', 'hostname', 'int', 'int', 'int', 'int', 'int'],
'CPU': ['timestamp', 'host_id', 'hostname', 'percent'],
'EXPORT': [
'timestamp',
'host_id',
'hostname',
'site_id',
'partition_id',
'str',
'str',
'str',
'int',
'int',
'timestamp',
'timestamp',
'int',
'int',
'int',
'str',
],
'GC': ['timestamp', 'host_id', 'hostname', 'int', 'int', 'int', 'int'],
'IDLETIME': ['timestamp', 'host_id', 'hostname', 'site_id', 'int', 'percent', 'int', 'int', 'int', 'int'],
'IMPORT': ['timestamp', 'host_id', 'hostname', 'site_id', 'str', 'str', 'int', 'int', 'int', 'int'],
'INDEX': [
'timestamp',
'host_id',
'hostname',
'site_id',
'partition_id',
'str',
'str',
'str',
'bool',
'bool',
'int',
'int',
],
'IOSTATS': ['timestamp', 'host_id', 'hostname', 'int', 'str', 'int', 'int', 'int', 'int'],
'LATENCY': [
'timestamp',
'host_id',
'hostname',
'int',
'int',
'int',
'int',
'int',
'int',
'int',
'int',
'int',
'int',
],
'MEMORY': [
'timestamp',
'host_id',
'hostname',
'int',
'int',
'int',
'int',
'int',
'int',
'int',
'int',
'int',
'int',
'int',
],
'PROCEDURE': [
'timestamp',
'host_id',
'hostname',
'site_id',
'partition_id',
'str',
'int',
'int',
'int',
'int',
'int',
'int',
'int',
'int',
'int',
'int',
'int',
'int',
'int',
'bool',
],
'PROCEDUREOUTPUT': ['timestamp', 'str', 'int', 'int', 'int', 'int', 'int', 'int'],
'PROCEDUREPROFILE': ['timestamp', 'str', 'int', 'int', 'int', 'int', 'int', 'int', 'int'],
'QUEUE': ['timestamp', 'host_id', 'hostname', 'site_id', 'int', 'int', 'int', 'int'],
'SNAPSHOTSTATUS': [
'timestamp',
'host_id',
'hostname',
'str',
'str',
'str',
'str',
'int',
'int',
'int',
'int',
'int',
'int',
'str',
'str',
],
'TABLE': [
'timestamp',
'host_id',
'hostname',
'site_id',
'partition_id',
'str',
'str',
'int',
'int',
'int',
'int',
'int',
'int',
],
}
def generate_host_id(idx=0):
return str(idx)
def generate_hostname(idx=0):
return "voltdb-host-%d" % idx
def generate_site_id(idx=0):
return idx
def generate_partition_id(idx=0):
return idx
def generate_bool(idx=0):
return idx % 2 == 0
def generate_int(idx=0):
return int(-(idx * 10 + 1) * 100000 * log2(1 - random.random()))
def generate_timestamp(idx=0):
yesterday = time.time() - 24 * 60 * 60
yesterday -= random.random() * 3600
# yesterday is between 24h and 25h ago
yesterday += idx * 3600
return int(yesterday * 1000)
def generate_percent(idx=0):
return random.random()
def generate_str(idx=0):
return ''.join(random.choice(string.ascii_lowercase) for i in range(15)) + "-" + str(idx)
def generate_data(cnt=1):
data = {}
for component in MAPPING.keys():
data[component] = []
for idx in range(cnt):
tmp_data = []
for elem_type in MAPPING[component]:
tmp_data.append(globals()['generate_%s' % elem_type](idx))
data[component].append(tmp_data)
return data
with open('mock_results.json', 'w') as f:
json.dump(generate_data(3), f, indent=4)
| 21
| 110
| 0.453854
| 419
| 4,074
| 4.298329
| 0.23389
| 0.249861
| 0.294836
| 0.299833
| 0.403109
| 0.375347
| 0.360355
| 0.284842
| 0.207107
| 0.162132
| 0
| 0.018983
| 0.353461
| 4,074
| 193
| 111
| 21.108808
| 0.664768
| 0.02921
| 0
| 0.674699
| 1
| 0
| 0.254302
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.060241
| false
| 0
| 0.036145
| 0.048193
| 0.156627
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
943e23a1f02b30ea8b9234935869f5081ffec18e
| 284
|
py
|
Python
|
__init__.py
|
edwardyehuang/iSeg
|
256b0f7fdb6e854fe026fa8df41d9a4a55db34d5
|
[
"MIT"
] | 4
|
2021-12-13T09:49:26.000Z
|
2022-02-19T11:16:50.000Z
|
__init__.py
|
edwardyehuang/iSeg
|
256b0f7fdb6e854fe026fa8df41d9a4a55db34d5
|
[
"MIT"
] | 1
|
2021-07-28T10:40:56.000Z
|
2021-08-09T07:14:06.000Z
|
__init__.py
|
edwardyehuang/iSeg
|
256b0f7fdb6e854fe026fa8df41d9a4a55db34d5
|
[
"MIT"
] | null | null | null |
# ================================================================
# MIT License
# Copyright (c) 2021 edwardyehuang (https://github.com/edwardyehuang)
# ================================================================
name = "iseg"
from iseg.core_model import SegBase, SegFoundation
| 35.5
| 69
| 0.408451
| 19
| 284
| 6.052632
| 0.894737
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.015267
| 0.077465
| 284
| 7
| 70
| 40.571429
| 0.423664
| 0.735915
| 0
| 0
| 0
| 0
| 0.057143
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
9448ab98a76b40234a9adc8f35a5a28ad306e871
| 260
|
py
|
Python
|
tornado-book/external_auth/facebook/modules.py
|
avinassh/learning-tornado
|
98b17c40d76720abcf0ab5b33ee1b46cf91d25d0
|
[
"MIT"
] | 2
|
2019-05-12T15:34:05.000Z
|
2019-05-13T14:45:51.000Z
|
tornado-book/external_auth/facebook/modules.py
|
avinassh/learning-tornado
|
98b17c40d76720abcf0ab5b33ee1b46cf91d25d0
|
[
"MIT"
] | null | null | null |
tornado-book/external_auth/facebook/modules.py
|
avinassh/learning-tornado
|
98b17c40d76720abcf0ab5b33ee1b46cf91d25d0
|
[
"MIT"
] | 3
|
2016-12-09T02:13:53.000Z
|
2019-11-02T07:57:58.000Z
|
import tornado.web
from datetime import datetime
class FeedListItem(tornado.web.UIModule):
def render(self, statusItem):
return self.render_string('entry.html', item=statusItem, format=lambda x: datetime.strptime(x,'%Y-%m-%dT%H:%M:%S+0000').strftime('%c'))
| 43.333333
| 137
| 0.753846
| 39
| 260
| 5
| 0.74359
| 0.102564
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.016667
| 0.076923
| 260
| 6
| 137
| 43.333333
| 0.795833
| 0
| 0
| 0
| 0
| 0
| 0.130268
| 0.084291
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0.4
| 0.2
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 4
|
944e8084dee179f7e645a620761b0014432fac2d
| 153
|
py
|
Python
|
solutions/week-1/variables_2.py
|
bekbolsky/stepik-python
|
91613178ef8401019fab01ad18f10ee84f2f4491
|
[
"MIT"
] | 1
|
2022-02-23T09:05:47.000Z
|
2022-02-23T09:05:47.000Z
|
solutions/week-1/variables_2.py
|
bekbolsky/stepik-python
|
91613178ef8401019fab01ad18f10ee84f2f4491
|
[
"MIT"
] | null | null | null |
solutions/week-1/variables_2.py
|
bekbolsky/stepik-python
|
91613178ef8401019fab01ad18f10ee84f2f4491
|
[
"MIT"
] | null | null | null |
x = int(input())
print(x // 60) # получение целой части от деления, чтобы определить часы
print(x % 60) # остаток от деления, чтобы определить минуты
| 30.6
| 73
| 0.712418
| 23
| 153
| 4.73913
| 0.652174
| 0.110092
| 0.146789
| 0.440367
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.032
| 0.183007
| 153
| 4
| 74
| 38.25
| 0.84
| 0.647059
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.666667
| 1
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 4
|
94538eb943d4bdcb800c4208785e2909bfa3eed9
| 83
|
py
|
Python
|
jrdb/apps.py
|
hankehly/JRDB
|
ad470e867d204ea975f7b98b57881d72fcfb41c7
|
[
"MIT"
] | 1
|
2022-02-19T14:44:34.000Z
|
2022-02-19T14:44:34.000Z
|
jrdb/apps.py
|
hankehly/JRDB
|
ad470e867d204ea975f7b98b57881d72fcfb41c7
|
[
"MIT"
] | null | null | null |
jrdb/apps.py
|
hankehly/JRDB
|
ad470e867d204ea975f7b98b57881d72fcfb41c7
|
[
"MIT"
] | 1
|
2022-02-19T14:46:40.000Z
|
2022-02-19T14:46:40.000Z
|
from django.apps import AppConfig
class JRDBConfig(AppConfig):
name = "jrdb"
| 13.833333
| 33
| 0.73494
| 10
| 83
| 6.1
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.180723
| 83
| 5
| 34
| 16.6
| 0.897059
| 0
| 0
| 0
| 0
| 0
| 0.048193
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
94564ee8227570a788a161c8f434b06c6868aba0
| 76
|
py
|
Python
|
gaia_beet/__init__.py
|
misode/gaia-beet
|
a2eeed5c42b70745144974a7b184d07061ab9aa3
|
[
"MIT"
] | null | null | null |
gaia_beet/__init__.py
|
misode/gaia-beet
|
a2eeed5c42b70745144974a7b184d07061ab9aa3
|
[
"MIT"
] | 3
|
2022-03-21T20:51:24.000Z
|
2022-03-21T20:51:52.000Z
|
gaia_beet/__init__.py
|
misode/gaia-beet
|
a2eeed5c42b70745144974a7b184d07061ab9aa3
|
[
"MIT"
] | null | null | null |
__version__ = "0.5.1"
from .api import *
from .density_functions import *
| 12.666667
| 32
| 0.710526
| 11
| 76
| 4.454545
| 0.818182
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.047619
| 0.171053
| 76
| 5
| 33
| 15.2
| 0.730159
| 0
| 0
| 0
| 0
| 0
| 0.065789
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
945df796d4f7bebb5940931ff0edc9d3236abf2f
| 135
|
py
|
Python
|
tests/test_tok.py
|
coast-team/sqlschm
|
b5d5a9c904bbaaece1cba9ea7251bda91d77b290
|
[
"Apache-2.0"
] | null | null | null |
tests/test_tok.py
|
coast-team/sqlschm
|
b5d5a9c904bbaaece1cba9ea7251bda91d77b290
|
[
"Apache-2.0"
] | null | null | null |
tests/test_tok.py
|
coast-team/sqlschm
|
b5d5a9c904bbaaece1cba9ea7251bda91d77b290
|
[
"Apache-2.0"
] | null | null | null |
from sqlschm import tok
def test_interned_consistent_val():
for val in tok.INTERNED:
assert tok.INTERNED[val].val == val
| 19.285714
| 43
| 0.711111
| 20
| 135
| 4.65
| 0.6
| 0.236559
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.207407
| 135
| 6
| 44
| 22.5
| 0.869159
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.25
| 1
| 0.25
| false
| 0
| 0.25
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
94706aaf057e7b8e13c3ee897611bc42900c45df
| 143
|
py
|
Python
|
after_deploy/models.py
|
dfop02/django-after-deploy
|
32406aea3d83dda7deff548c9f9c91183cb3dcf1
|
[
"MIT"
] | 7
|
2020-02-22T13:13:53.000Z
|
2022-03-26T18:04:44.000Z
|
after_deploy/models.py
|
dfop02/django-after-deploy
|
32406aea3d83dda7deff548c9f9c91183cb3dcf1
|
[
"MIT"
] | null | null | null |
after_deploy/models.py
|
dfop02/django-after-deploy
|
32406aea3d83dda7deff548c9f9c91183cb3dcf1
|
[
"MIT"
] | null | null | null |
from django.db import models
class tasks(models.Model):
code = models.CharField(max_length=32, primary_key=True, null=False, blank=False)
| 28.6
| 85
| 0.769231
| 22
| 143
| 4.909091
| 0.863636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.015873
| 0.118881
| 143
| 4
| 86
| 35.75
| 0.84127
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
947a634a8b070ccbf5d5c560f9ed1194ee68117c
| 995
|
py
|
Python
|
openbook_tags/migrations/0003_auto_20190206_1843.py
|
TamaraAbells/okuna-api
|
f87d8e80d2f182c01dbce68155ded0078ee707e4
|
[
"MIT"
] | 164
|
2019-07-29T17:59:06.000Z
|
2022-03-19T21:36:01.000Z
|
openbook_tags/migrations/0003_auto_20190206_1843.py
|
TamaraAbells/okuna-api
|
f87d8e80d2f182c01dbce68155ded0078ee707e4
|
[
"MIT"
] | 188
|
2019-03-16T09:53:25.000Z
|
2019-07-25T14:57:24.000Z
|
openbook_tags/migrations/0003_auto_20190206_1843.py
|
TamaraAbells/okuna-api
|
f87d8e80d2f182c01dbce68155ded0078ee707e4
|
[
"MIT"
] | 80
|
2019-08-03T17:49:08.000Z
|
2022-02-28T16:56:33.000Z
|
# Generated by Django 2.1.5 on 2019-02-06 17:43
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('openbook_tags', '0002_auto_20190206_1805'),
]
operations = [
migrations.AddField(
model_name='tag',
name='description_en',
field=models.CharField(max_length=64, null=True, verbose_name='description'),
),
migrations.AddField(
model_name='tag',
name='description_es',
field=models.CharField(max_length=64, null=True, verbose_name='description'),
),
migrations.AddField(
model_name='tag',
name='title_en',
field=models.CharField(max_length=64, null=True, verbose_name='title'),
),
migrations.AddField(
model_name='tag',
name='title_es',
field=models.CharField(max_length=64, null=True, verbose_name='title'),
),
]
| 29.264706
| 89
| 0.58593
| 106
| 995
| 5.311321
| 0.40566
| 0.127886
| 0.16341
| 0.191829
| 0.724689
| 0.724689
| 0.724689
| 0.547069
| 0.547069
| 0.547069
| 0
| 0.055319
| 0.291457
| 995
| 33
| 90
| 30.151515
| 0.743262
| 0.045226
| 0
| 0.592593
| 1
| 0
| 0.130802
| 0.024262
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.037037
| 0
| 0.148148
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
8471127acba146ec9e6ec6fa7cf718398c5327e2
| 253
|
py
|
Python
|
py/epm/__init__.py
|
NikiRui/SNEPM
|
9b0735faa5d10c3c3e4fd26636475151f9448b00
|
[
"MIT"
] | 1
|
2018-03-12T09:21:48.000Z
|
2018-03-12T09:21:48.000Z
|
py/epm/__init__.py
|
NikiRui/SNEPM
|
9b0735faa5d10c3c3e4fd26636475151f9448b00
|
[
"MIT"
] | null | null | null |
py/epm/__init__.py
|
NikiRui/SNEPM
|
9b0735faa5d10c3c3e4fd26636475151f9448b00
|
[
"MIT"
] | null | null | null |
#
# See top-level LICENSE.rst file for Copyright information
#
# -*- coding: utf-8 -*-
"""
epm
===
Tools for EPM analysis.
.. _Python: http://python.org
"""
# help with 2to3 support.
from __future__ import absolute_import, division, print_function
| 14.055556
| 64
| 0.695652
| 33
| 253
| 5.121212
| 0.878788
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.014218
| 0.166008
| 253
| 17
| 65
| 14.882353
| 0.78673
| 0.660079
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
|
0
| 4
|
849a0d0da310dc0a1f60bf1f6cf15c727346766d
| 967
|
py
|
Python
|
test/Python/Compiler/structure.py
|
marbre/mlir-npcomp
|
30adf9e6b0c1e94db38050a9e143f20a5a461d17
|
[
"Apache-2.0"
] | 11
|
2020-03-30T23:31:03.000Z
|
2021-06-16T01:17:58.000Z
|
test/Python/Compiler/structure.py
|
marbre/mlir-npcomp
|
30adf9e6b0c1e94db38050a9e143f20a5a461d17
|
[
"Apache-2.0"
] | 1
|
2020-04-26T21:19:30.000Z
|
2020-04-29T18:22:59.000Z
|
test/Python/Compiler/structure.py
|
marbre/mlir-npcomp
|
30adf9e6b0c1e94db38050a9e143f20a5a461d17
|
[
"Apache-2.0"
] | null | null | null |
# RUN: %PYTHON %s | npcomp-opt -split-input-file | FileCheck %s --dump-input=fail
from npcomp.compiler import test_config
import_global = test_config.create_import_dump_decorator()
# CHECK-LABEL: func @positional_args
# CHECK-SAME: (%arg0: !basicpy.UnknownType, %arg1: !basicpy.UnknownType) -> !basicpy.UnknownType
@import_global
def positional_args(a, b):
# CHECK: basicpy.binary_expr %arg0 "Add" %arg1
return a + b
# CHECK-LABEL: func @pass_no_return
@import_global
def pass_no_return():
# CHECK: %[[NONE:.*]] = basicpy.singleton : !basicpy.NoneType
# CHECK: %[[NONE_CAST:.*]] = basicpy.unknown_cast %[[NONE]] : !basicpy.NoneType -> !basicpy.UnknownType
# CHECK: return %[[NONE_CAST]]
# CHECK-NOT: return
pass
# CHECK-LABEL: func @expr_statement
@import_global
def expr_statement():
# CHECK: basicpy.exec {
# CHECK: %[[V:.*]] = basicpy.binary_expr
# CHECK: basicpy.exec_discard %[[V]]
# CHECK: }
# CHECK: return
a = 1
a + 2
| 26.861111
| 106
| 0.689762
| 126
| 967
| 5.111111
| 0.396825
| 0.074534
| 0.065217
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.007335
| 0.154085
| 967
| 35
| 107
| 27.628571
| 0.779951
| 0.677353
| 0
| 0.25
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0.166667
| 0.416667
| 0.083333
| 0.75
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 1
| 0
|
0
| 4
|
84c8db01533702f0f12f75d86b51d99801e8ba92
| 146
|
py
|
Python
|
taxman/income/abstract_income.py
|
robinmitra/taxman
|
a7afc0b4a1449cd46e90cd3af05f4a5d65a8acbf
|
[
"MIT"
] | 3
|
2019-01-07T13:08:59.000Z
|
2021-01-11T10:34:52.000Z
|
taxman/income/abstract_income.py
|
robinmitra/taxman
|
a7afc0b4a1449cd46e90cd3af05f4a5d65a8acbf
|
[
"MIT"
] | null | null | null |
taxman/income/abstract_income.py
|
robinmitra/taxman
|
a7afc0b4a1449cd46e90cd3af05f4a5d65a8acbf
|
[
"MIT"
] | null | null | null |
import abc
class AbstractIncome(abc.ABC):
@abc.abstractmethod
def get_amount(self):
"""Get income amount for the income type"""
| 18.25
| 51
| 0.678082
| 19
| 146
| 5.157895
| 0.684211
| 0.122449
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.219178
| 146
| 7
| 52
| 20.857143
| 0.859649
| 0.253425
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.25
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 4
|
84d44de7a484200ae72408fe3fe5f6ac98e16732
| 95
|
py
|
Python
|
quest/__init__.py
|
Fluorescence-Tools/quest
|
e17e5682f7686d1acc1fd8a22bdae33963bc16d6
|
[
"MIT"
] | null | null | null |
quest/__init__.py
|
Fluorescence-Tools/quest
|
e17e5682f7686d1acc1fd8a22bdae33963bc16d6
|
[
"MIT"
] | 1
|
2019-08-14T08:01:26.000Z
|
2019-08-15T22:59:05.000Z
|
quest/__init__.py
|
Fluorescence-Tools/quest
|
e17e5682f7686d1acc1fd8a22bdae33963bc16d6
|
[
"MIT"
] | null | null | null |
import os
import quest.utils as utils
utils.set_search_paths(
os.path.dirname(__file__)
)
| 13.571429
| 29
| 0.768421
| 15
| 95
| 4.466667
| 0.733333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.147368
| 95
| 6
| 30
| 15.833333
| 0.82716
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.4
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
84dd78e7d33cde171b3beb08515ab72d7ab44385
| 156
|
py
|
Python
|
scrapers/BPC-bournemouth-christchurch-and-poole/councillors.py
|
DemocracyClub/LGSF
|
21c2a049db08575e03db2fb63a8bccc8de0c636b
|
[
"MIT"
] | 4
|
2018-10-17T13:30:08.000Z
|
2021-06-22T13:29:43.000Z
|
scrapers/BPC-bournemouth-christchurch-and-poole/councillors.py
|
DemocracyClub/LGSF
|
21c2a049db08575e03db2fb63a8bccc8de0c636b
|
[
"MIT"
] | 46
|
2018-10-15T13:47:48.000Z
|
2022-03-23T10:26:18.000Z
|
scrapers/BPC-bournemouth-christchurch-and-poole/councillors.py
|
DemocracyClub/LGSF
|
21c2a049db08575e03db2fb63a8bccc8de0c636b
|
[
"MIT"
] | 1
|
2018-10-15T13:36:03.000Z
|
2018-10-15T13:36:03.000Z
|
from lgsf.councillors.scrapers import ModGovCouncillorScraper
class Scraper(ModGovCouncillorScraper):
base_url = "http://democracy.bcpcouncil.gov.uk"
| 26
| 61
| 0.814103
| 16
| 156
| 7.875
| 0.9375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.096154
| 156
| 5
| 62
| 31.2
| 0.893617
| 0
| 0
| 0
| 0
| 0
| 0.217949
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
ca0a1988288f4f6bd45a49833d1d336de3137ae1
| 66
|
py
|
Python
|
python/testData/findUsages/ClassUsages.py
|
truthiswill/intellij-community
|
fff88cfb0dc168eea18ecb745d3e5b93f57b0b95
|
[
"Apache-2.0"
] | 2
|
2019-04-28T07:48:50.000Z
|
2020-12-11T14:18:08.000Z
|
python/testData/findUsages/ClassUsages.py
|
truthiswill/intellij-community
|
fff88cfb0dc168eea18ecb745d3e5b93f57b0b95
|
[
"Apache-2.0"
] | 173
|
2018-07-05T13:59:39.000Z
|
2018-08-09T01:12:03.000Z
|
python/testData/findUsages/ClassUsages.py
|
truthiswill/intellij-community
|
fff88cfb0dc168eea18ecb745d3e5b93f57b0b95
|
[
"Apache-2.0"
] | 2
|
2020-03-15T08:57:37.000Z
|
2020-04-07T04:48:14.000Z
|
class C<caret>ow:
def __init__(self):
pass
c = Cow()
| 11
| 23
| 0.545455
| 10
| 66
| 3.2
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.318182
| 66
| 5
| 24
| 13.2
| 0.711111
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.25
| 0
| null | null | 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 4
|
ca1ec0b8a4e7f5db178e340d5646c980f299550a
| 218
|
py
|
Python
|
music_from_my_tshirt/context_processors.py
|
craiga/music-from-my-tshirt
|
d6f051771c8d2c16fadde78df35a09fd9d8818aa
|
[
"MIT"
] | null | null | null |
music_from_my_tshirt/context_processors.py
|
craiga/music-from-my-tshirt
|
d6f051771c8d2c16fadde78df35a09fd9d8818aa
|
[
"MIT"
] | 9
|
2020-03-01T16:48:16.000Z
|
2020-04-29T16:20:26.000Z
|
music_from_my_tshirt/context_processors.py
|
craiga/music-from-my-tshirt
|
d6f051771c8d2c16fadde78df35a09fd9d8818aa
|
[
"MIT"
] | null | null | null |
"""Context processors."""
from django.conf import settings
def sentry_dsn(request):
return {"sentry_dsn": settings.SENTRY_DSN}
def canonical_host(request):
return {"canonical_host": settings.ENFORCE_HOST}
| 18.166667
| 52
| 0.747706
| 27
| 218
| 5.814815
| 0.555556
| 0.171975
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.133028
| 218
| 11
| 53
| 19.818182
| 0.830688
| 0.087156
| 0
| 0
| 0
| 0
| 0.124352
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| false
| 0
| 0.2
| 0.4
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 4
|
ca28ed8ab2a0560487efe2001d6ce704923d282c
| 10,973
|
py
|
Python
|
pika/exceptions.py
|
hugovk/pika
|
03542ef616a2a849e8bfb0845427f50e741ea0c6
|
[
"BSD-3-Clause"
] | 1
|
2019-08-28T10:10:56.000Z
|
2019-08-28T10:10:56.000Z
|
pika/exceptions.py
|
goupper/pika
|
e2f26db4f41ac7ea6bdc50964a766472460dce4a
|
[
"BSD-3-Clause"
] | null | null | null |
pika/exceptions.py
|
goupper/pika
|
e2f26db4f41ac7ea6bdc50964a766472460dce4a
|
[
"BSD-3-Clause"
] | null | null | null |
"""Pika specific exceptions"""
class AMQPError(Exception):
def __repr__(self):
return '%s: An unspecified AMQP error has occurred; %s' % (
self.__class__.__name__, self.args)
class AMQPConnectionError(AMQPError):
def __repr__(self):
if len(self.args) == 2:
return '{}: ({}) {}'.format(self.__class__.__name__,
self.args[0],
self.args[1])
else:
return '{}: {}'.format(self.__class__.__name__, self.args)
class ConnectionOpenAborted(AMQPConnectionError):
"""Client closed connection while opening."""
pass
class StreamLostError(AMQPConnectionError):
"""Stream (TCP) connection lost."""
pass
class IncompatibleProtocolError(AMQPConnectionError):
def __repr__(self):
return ('%s: The protocol returned by the server is not supported: %s' %
(self.__class__.__name__, self.args,))
class AuthenticationError(AMQPConnectionError):
def __repr__(self):
return ('%s: Server and client could not negotiate use of the %s '
'authentication mechanism' % (self.__class__.__name__,
self.args[0]))
class ProbableAuthenticationError(AMQPConnectionError):
def __repr__(self):
return (
'%s: Client was disconnected at a connection stage indicating a '
'probable authentication error: %s' % (self.__class__.__name__,
self.args,))
class ProbableAccessDeniedError(AMQPConnectionError):
def __repr__(self):
return (
'%s: Client was disconnected at a connection stage indicating a '
'probable denial of access to the specified virtual host: %s' %
(self.__class__.__name__, self.args,))
class NoFreeChannels(AMQPConnectionError):
def __repr__(self):
return '%s: The connection has run out of free channels' % (
self.__class__.__name__)
class ConnectionWrongStateError(AMQPConnectionError):
"""Connection is in wrong state for the requested operation."""
def __repr__(self):
if self.args:
return super(ConnectionWrongStateError, self).__repr__()
else:
return ('%s: The connection is in wrong state for the requested '
'operation.' % (self.__class__.__name__))
class ConnectionClosed(AMQPConnectionError):
def __init__(self, reply_code, reply_text):
"""
:param int reply_code: reply-code that was used in user's or broker's
`Connection.Close` method. NEW in v1.0.0
:param str reply_text: reply-text that was used in user's or broker's
`Connection.Close` method. Human-readable string corresponding to
`reply_code`. NEW in v1.0.0
"""
super(ConnectionClosed, self).__init__(int(reply_code), str(reply_text))
def __repr__(self):
return '{}: ({}) {!r}'.format(self.__class__.__name__,
self.reply_code,
self.reply_text)
@property
def reply_code(self):
""" NEW in v1.0.0
:rtype: int
"""
return self.args[0]
@property
def reply_text(self):
""" NEW in v1.0.0
:rtype: str
"""
return self.args[1]
class ConnectionClosedByBroker(ConnectionClosed):
"""Connection.Close from broker."""
pass
class ConnectionClosedByClient(ConnectionClosed):
"""Connection was closed at request of Pika client."""
pass
class ConnectionBlockedTimeout(AMQPConnectionError):
"""RabbitMQ-specific: timed out waiting for connection.unblocked."""
pass
class AMQPHeartbeatTimeout(AMQPConnectionError):
"""Connection was dropped as result of heartbeat timeout."""
pass
class AMQPChannelError(AMQPError):
def __repr__(self):
return '{}: {!r}'.format(self.__class__.__name__, self.args)
class ChannelWrongStateError(AMQPChannelError):
"""Channel is in wrong state for the requested operation."""
pass
class ChannelClosed(AMQPChannelError):
"""The channel closed by client or by broker
"""
def __init__(self, reply_code, reply_text):
"""
:param int reply_code: reply-code that was used in user's or broker's
`Channel.Close` method. One of the AMQP-defined Channel Errors.
NEW in v1.0.0
:param str reply_text: reply-text that was used in user's or broker's
`Channel.Close` method. Human-readable string corresponding to
`reply_code`;
NEW in v1.0.0
"""
super(ChannelClosed, self).__init__(int(reply_code), str(reply_text))
def __repr__(self):
return '{}: ({}) {!r}'.format(self.__class__.__name__,
self.reply_code,
self.reply_text)
@property
def reply_code(self):
""" NEW in v1.0.0
:rtype: int
"""
return self.args[0]
@property
def reply_text(self):
""" NEW in v1.0.0
:rtype: str
"""
return self.args[1]
class ChannelClosedByBroker(ChannelClosed):
"""`Channel.Close` from broker; may be passed as reason to channel's
on-closed callback of non-blocking connection adapters or raised by
`BlockingConnection`.
NEW in v1.0.0
"""
pass
class ChannelClosedByClient(ChannelClosed):
"""Channel closed by client upon receipt of `Channel.CloseOk`; may be passed
as reason to channel's on-closed callback of non-blocking connection
adapters, but not raised by `BlockingConnection`.
NEW in v1.0.0
"""
pass
class DuplicateConsumerTag(AMQPChannelError):
def __repr__(self):
return ('%s: The consumer tag specified already exists for this '
'channel: %s' % (self.__class__.__name__, self.args[0]))
class ConsumerCancelled(AMQPChannelError):
def __repr__(self):
return '%s: Server cancelled consumer' % (self.__class__.__name__)
class UnroutableError(AMQPChannelError):
"""Exception containing one or more unroutable messages returned by broker
via Basic.Return.
Used by BlockingChannel.
In publisher-acknowledgements mode, this is raised upon receipt of Basic.Ack
from broker; in the event of Basic.Nack from broker, `NackError` is raised
instead
"""
def __init__(self, messages):
"""
:param messages: sequence of returned unroutable messages
:type messages: sequence of `blocking_connection.ReturnedMessage`
objects
"""
super(UnroutableError, self).__init__(
"%s unroutable message(s) returned" % (len(messages)))
self.messages = messages
def __repr__(self):
return '%s: %i unroutable messages returned by broker' % (
self.__class__.__name__, len(self.messages))
class NackError(AMQPChannelError):
"""This exception is raised when a message published in
publisher-acknowledgements mode is Nack'ed by the broker.
Used by BlockingChannel.
"""
def __init__(self, messages):
"""
:param messages: sequence of returned unroutable messages
:type messages: sequence of `blocking_connection.ReturnedMessage`
objects
"""
super(NackError, self).__init__(
"%s message(s) NACKed" % (len(messages)))
self.messages = messages
def __repr__(self):
return '%s: %i unroutable messages returned by broker' % (
self.__class__.__name__, len(self.messages))
class InvalidChannelNumber(AMQPError):
def __repr__(self):
return '%s: An invalid channel number has been specified: %s' % (
self.__class__.__name__, self.args[0])
class ProtocolSyntaxError(AMQPError):
def __repr__(self):
return '%s: An unspecified protocol syntax error occurred' % (
self.__class__.__name__)
class UnexpectedFrameError(ProtocolSyntaxError):
def __repr__(self):
return '%s: Received a frame out of sequence: %r' % (
self.__class__.__name__, self.args[0])
class ProtocolVersionMismatch(ProtocolSyntaxError):
def __repr__(self):
return '%s: Protocol versions did not match: %r vs %r' % (
self.__class__.__name__, self.args[0], self.args[1])
class BodyTooLongError(ProtocolSyntaxError):
def __repr__(self):
return ('%s: Received too many bytes for a message delivery: '
'Received %i, expected %i' % (self.__class__.__name__,
self.args[0], self.args[1]))
class InvalidFrameError(ProtocolSyntaxError):
def __repr__(self):
return '%s: Invalid frame received: %r' % (self.__class__.__name__,
self.args[0])
class InvalidFieldTypeException(ProtocolSyntaxError):
def __repr__(self):
return '%s: Unsupported field kind %s' % (self.__class__.__name__,
self.args[0])
class UnsupportedAMQPFieldException(ProtocolSyntaxError):
def __repr__(self):
return '%s: Unsupported field kind %s' % (self.__class__.__name__,
type(self.args[1]))
class MethodNotImplemented(AMQPError):
pass
class ChannelError(Exception):
def __repr__(self):
return '%s: An unspecified error occurred with the Channel' % (
self.__class__.__name__)
class InvalidMinimumFrameSize(ProtocolSyntaxError):
""" DEPRECATED; pika.connection.Parameters.frame_max property setter now
raises the standard `ValueError` exception when the value is out of bounds.
"""
def __repr__(self):
return '%s: AMQP Minimum Frame Size is 4096 Bytes' % (
self.__class__.__name__)
class InvalidMaximumFrameSize(ProtocolSyntaxError):
""" DEPRECATED; pika.connection.Parameters.frame_max property setter now
raises the standard `ValueError` exception when the value is out of bounds.
"""
def __repr__(self):
return '%s: AMQP Maximum Frame Size is 131072 Bytes' % (
self.__class__.__name__)
class ReentrancyError(Exception):
"""The requested operation would result in unsupported recursion or
reentrancy.
Used by BlockingConnection/BlockingChannel
"""
class ShortStringTooLong(AMQPError):
def __repr__(self):
return ('%s: AMQP Short String can contain up to 255 bytes: '
'%.300s' % (self.__class__.__name__, self.args[0]))
class DuplicateGetOkCallback(ChannelError):
def __repr__(self):
return ('%s: basic_get can only be called again after the callback for '
'the previous basic_get is executed' % self.__class__.__name__)
| 28.952507
| 80
| 0.628087
| 1,177
| 10,973
| 5.508921
| 0.208156
| 0.040253
| 0.058143
| 0.068168
| 0.551974
| 0.535472
| 0.500771
| 0.429673
| 0.392042
| 0.372455
| 0
| 0.008165
| 0.274492
| 10,973
| 378
| 81
| 29.029101
| 0.806306
| 0.241228
| 0
| 0.5
| 0
| 0
| 0.18414
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.211765
| false
| 0.058824
| 0
| 0.152941
| 0.641176
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 4
|
ca2a83885647c0e4b06a6b68eaaac73ccd7a6588
| 100
|
py
|
Python
|
SRC/Chapter_03-Functions-And-Looping/03_challenge.py
|
archeranimesh/tth-python-basics-3
|
accbc894324d084124ec001817edf4dc3afffa78
|
[
"MIT"
] | null | null | null |
SRC/Chapter_03-Functions-And-Looping/03_challenge.py
|
archeranimesh/tth-python-basics-3
|
accbc894324d084124ec001817edf4dc3afffa78
|
[
"MIT"
] | null | null | null |
SRC/Chapter_03-Functions-And-Looping/03_challenge.py
|
archeranimesh/tth-python-basics-3
|
accbc894324d084124ec001817edf4dc3afffa78
|
[
"MIT"
] | null | null | null |
def square(number):
"""
return the square
"""
return number ** 2
print(square(5))
| 11.111111
| 22
| 0.55
| 12
| 100
| 4.583333
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.028571
| 0.3
| 100
| 8
| 23
| 12.5
| 0.757143
| 0.17
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0
| 0.666667
| 0.333333
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 4
|
ca4129045f4da6ed48004cc93fa82a6bb18566e9
| 2,432
|
gyp
|
Python
|
athena/main/athena_main.gyp
|
7kbird/chrome
|
f56688375530f1003e34c34f441321977c5af3c3
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | null | null | null |
athena/main/athena_main.gyp
|
7kbird/chrome
|
f56688375530f1003e34c34f441321977c5af3c3
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | null | null | null |
athena/main/athena_main.gyp
|
7kbird/chrome
|
f56688375530f1003e34c34f441321977c5af3c3
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 1
|
2020-11-04T07:23:37.000Z
|
2020-11-04T07:23:37.000Z
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'variables': {
'chromium_code': 1,
},
'targets': [
{
'target_name': 'athena_main_lib',
'type': 'static_library',
'dependencies': [
'../athena.gyp:athena_lib',
'../athena.gyp:athena_content_lib',
'../athena.gyp:athena_content_support_lib',
'../resources/athena_resources.gyp:athena_resources',
# debug_widow.cc depends on this. Remove this once debug_window
# is removed.
'../../ash/ash_resources.gyp:ash_resources',
'../../chromeos/chromeos.gyp:power_manager_proto',
'../../components/components.gyp:component_metrics_proto',
'../../components/components.gyp:history_core_browser',
# infobars_test_support is required to declare some symbols used in the
# search_engines and its dependencies. See crbug.com/386171
# TODO(mukai): declare those symbols for Athena.
'../../components/components.gyp:infobars_test_support',
'../../components/components.gyp:omnibox',
'../../components/components.gyp:search_engines',
'../../skia/skia.gyp:skia',
'../../ui/app_list/app_list.gyp:app_list',
'../../ui/chromeos/ui_chromeos.gyp:ui_chromeos',
'../../ui/native_theme/native_theme.gyp:native_theme',
'../../ui/views/views.gyp:views',
'../../url/url.gyp:url_lib',
],
'include_dirs': [
'../..',
],
'sources': [
'athena_launcher.cc',
'athena_launcher.h',
'debug/debug_window.cc',
'debug/debug_window.h',
'debug/network_selector.cc',
'debug/network_selector.h',
'url_search_provider.cc',
'url_search_provider.h',
'placeholder.cc',
'placeholder.h',
],
},
{
'target_name': 'athena_main',
'type': 'executable',
'dependencies': [
'../../ui/accessibility/accessibility.gyp:ax_gen',
'../resources/athena_resources.gyp:athena_pak',
'../../extensions/shell/app_shell.gyp:app_shell_lib',
'athena_main_lib',
],
'include_dirs': [
'../..',
],
'sources': [
'athena_app_window_controller.cc',
'athena_app_window_controller.h',
'athena_main.cc',
],
}
], # targets
}
| 33.315068
| 79
| 0.594161
| 264
| 2,432
| 5.219697
| 0.409091
| 0.032656
| 0.083454
| 0.029028
| 0.123367
| 0
| 0
| 0
| 0
| 0
| 0
| 0.005933
| 0.237664
| 2,432
| 72
| 80
| 33.777778
| 0.737325
| 0.169408
| 0
| 0.238095
| 0
| 0
| 0.646766
| 0.501493
| 0
| 0
| 0
| 0.013889
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
ca4f85eab1aca1cd671819ea1e33aa6abfc91c48
| 237
|
gyp
|
Python
|
binding.gyp
|
Bruce17/nodejs-cpp-addon
|
11b329bc5705f102c76d17f465205aafe5291b1c
|
[
"MIT"
] | null | null | null |
binding.gyp
|
Bruce17/nodejs-cpp-addon
|
11b329bc5705f102c76d17f465205aafe5291b1c
|
[
"MIT"
] | null | null | null |
binding.gyp
|
Bruce17/nodejs-cpp-addon
|
11b329bc5705f102c76d17f465205aafe5291b1c
|
[
"MIT"
] | null | null | null |
{
"targets": [
{
"target_name": "binding-helloworld",
"sources": [ "src/cpp/binding-helloworld.cc" ]
},
{
"target_name": "binding-fibonacci",
"sources": [ "src/cpp/binding-fibonacci.cc" ]
}
]
}
| 19.75
| 52
| 0.527426
| 21
| 237
| 5.857143
| 0.47619
| 0.162602
| 0.276423
| 0.325203
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.270042
| 237
| 12
| 53
| 19.75
| 0.710983
| 0
| 0
| 0
| 0
| 0
| 0.567227
| 0.239496
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
ca8d9745c220c0d82c87b8a60b0e70a9f13fb54e
| 266
|
py
|
Python
|
pypartitions/__init__.py
|
klocey/partitions
|
0ce57b75007f9608f55b0a835410f0d0c1de5246
|
[
"Unlicense"
] | 3
|
2015-12-27T07:06:23.000Z
|
2020-04-09T19:22:59.000Z
|
pypartitions/__init__.py
|
klocey/partitions
|
0ce57b75007f9608f55b0a835410f0d0c1de5246
|
[
"Unlicense"
] | 6
|
2016-04-28T05:36:37.000Z
|
2021-01-31T22:07:15.000Z
|
pypartitions/__init__.py
|
klocey/partitions
|
0ce57b75007f9608f55b0a835410f0d0c1de5246
|
[
"Unlicense"
] | null | null | null |
"""
pypartitions: Efficient Sampling Algorithims for Integer Partitioning
Reference: Locey, K.J. and D.J. McGlinn. Efficient algorithms for sampling feasible sets
of macroecological patterns. PeerJ. https://peerj.com/preprints/78/
"""
from .pypartitions import *
| 26.6
| 88
| 0.781955
| 33
| 266
| 6.30303
| 0.818182
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.008584
| 0.12406
| 266
| 10
| 89
| 26.6
| 0.88412
| 0.853383
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
047f2cd50ac08c58c8931db71bdbc881ffba80a6
| 83
|
py
|
Python
|
learnergy/models/__init__.py
|
anukaal/learnergy
|
704fc2b3fcb80df41ed28d750dc4e6475df23315
|
[
"Apache-2.0"
] | 39
|
2020-02-27T00:47:45.000Z
|
2022-03-28T14:57:26.000Z
|
learnergy/models/__init__.py
|
anukaal/learnergy
|
704fc2b3fcb80df41ed28d750dc4e6475df23315
|
[
"Apache-2.0"
] | 5
|
2021-05-11T08:23:37.000Z
|
2022-01-20T12:50:59.000Z
|
learnergy/models/__init__.py
|
anukaal/learnergy
|
704fc2b3fcb80df41ed28d750dc4e6475df23315
|
[
"Apache-2.0"
] | 6
|
2020-04-15T00:23:13.000Z
|
2022-01-29T16:22:05.000Z
|
"""A package contaning subpackages of models for all common learnergy modules.
"""
| 27.666667
| 78
| 0.771084
| 11
| 83
| 5.818182
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.144578
| 83
| 2
| 79
| 41.5
| 0.901408
| 0.903614
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
048c7f29024b46f7a834b0c18835c1a64a9b4cdf
| 1,988
|
py
|
Python
|
katas/romanNumeralsToInt/python/00001/romanNumeralsToIntTest.py
|
DerickMathew/kataLog
|
2fa999de9e41aa977dd74da89bc36c6d5703722b
|
[
"MIT"
] | null | null | null |
katas/romanNumeralsToInt/python/00001/romanNumeralsToIntTest.py
|
DerickMathew/kataLog
|
2fa999de9e41aa977dd74da89bc36c6d5703722b
|
[
"MIT"
] | null | null | null |
katas/romanNumeralsToInt/python/00001/romanNumeralsToIntTest.py
|
DerickMathew/kataLog
|
2fa999de9e41aa977dd74da89bc36c6d5703722b
|
[
"MIT"
] | null | null | null |
import unittest
from romanNumeralsToInt import RomanNumeralsToInt
class TestRomanNumeralsToInt(unittest.TestCase):
def setUp(self):
self.romanNumeralsToInt = RomanNumeralsToInt()
def test_IReturns1(self):
self.assertEqual(1, self.romanNumeralsToInt.getNum('I'));
def test_VReturns5(self):
self.assertEqual(5, self.romanNumeralsToInt.getNum('V'));
def test_XReturns10(self):
self.assertEqual(10, self.romanNumeralsToInt.getNum('X'));
def test_LReturns50(self):
self.assertEqual(50, self.romanNumeralsToInt.getNum('L'));
def test_CReturns100(self):
self.assertEqual(100, self.romanNumeralsToInt.getNum('C'));
def test_DReturns500(self):
self.assertEqual(500, self.romanNumeralsToInt.getNum('D'));
def test_MReturns1000(self):
self.assertEqual(1000, self.romanNumeralsToInt.getNum('M'));
def test_IIReturns2(self):
self.assertEqual(2, self.romanNumeralsToInt.getNum('II'));
def test_IVReturns4(self):
self.assertEqual(4, self.romanNumeralsToInt.getNum('IV'));
def test_IXReturns9(self):
self.assertEqual(9, self.romanNumeralsToInt.getNum('IX'));
def test_XIXReturns19(self):
self.assertEqual(19, self.romanNumeralsToInt.getNum('XIX'));
def test_XLReturns40(self):
self.assertEqual(40, self.romanNumeralsToInt.getNum('XL'));
def test_IXLReturns39(self):
self.assertEqual(39, self.romanNumeralsToInt.getNum('IXL'));
def test_IVXLReturns34(self):
self.assertEqual(34, self.romanNumeralsToInt.getNum('IVXL'));
def test_IVXLCDMReturns344(self):
self.assertEqual(334, self.romanNumeralsToInt.getNum('IVXLCDM'));
def test_MIVXLCDMReturns1344(self):
self.assertEqual(1334, self.romanNumeralsToInt.getNum('MIVXLCDM'));
def test_IMMMMReturns3999(self):
self.assertEqual(3999, self.romanNumeralsToInt.getNum('IMMMM'));
if __name__ == '__main__':
unittest.main()
| 32.064516
| 75
| 0.70674
| 208
| 1,988
| 6.634615
| 0.322115
| 0.104348
| 0.234058
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.045728
| 0.163984
| 1,988
| 61
| 76
| 32.590164
| 0.784597
| 0
| 0
| 0
| 0
| 0
| 0.02666
| 0
| 0
| 0
| 0
| 0
| 0.414634
| 1
| 0.439024
| false
| 0
| 0.04878
| 0
| 0.512195
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 4
|
049a4c985d1df8df9dcea1d38f16fdd68583d681
| 16,261
|
py
|
Python
|
boto3_type_annotations_with_docs/boto3_type_annotations/elb/paginator.py
|
cowboygneox/boto3_type_annotations
|
450dce1de4e066b939de7eac2ec560ed1a7ddaa2
|
[
"MIT"
] | 119
|
2018-12-01T18:20:57.000Z
|
2022-02-02T10:31:29.000Z
|
boto3_type_annotations_with_docs/boto3_type_annotations/elb/paginator.py
|
cowboygneox/boto3_type_annotations
|
450dce1de4e066b939de7eac2ec560ed1a7ddaa2
|
[
"MIT"
] | 15
|
2018-11-16T00:16:44.000Z
|
2021-11-13T03:44:18.000Z
|
boto3_type_annotations_with_docs/boto3_type_annotations/elb/paginator.py
|
cowboygneox/boto3_type_annotations
|
450dce1de4e066b939de7eac2ec560ed1a7ddaa2
|
[
"MIT"
] | 11
|
2019-05-06T05:26:51.000Z
|
2021-09-28T15:27:59.000Z
|
from typing import List
from typing import Dict
from botocore.paginate import Paginator
class DescribeAccountLimits(Paginator):
def paginate(self, PaginationConfig: Dict = None) -> Dict:
"""
Creates an iterator that will paginate through responses from :py:meth:`ElasticLoadBalancing.Client.describe_account_limits`.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/elasticloadbalancing-2012-06-01/DescribeAccountLimits>`_
**Request Syntax**
::
response_iterator = paginator.paginate(
PaginationConfig={
'MaxItems': 123,
'PageSize': 123,
'StartingToken': 'string'
}
)
**Response Syntax**
::
{
'Limits': [
{
'Name': 'string',
'Max': 'string'
},
],
'NextToken': 'string'
}
**Response Structure**
- *(dict) --*
- **Limits** *(list) --*
Information about the limits.
- *(dict) --*
Information about an Elastic Load Balancing resource limit for your AWS account.
- **Name** *(string) --*
The name of the limit. The possible values are:
* classic-listeners
* classic-load-balancers
* classic-registered-instances
- **Max** *(string) --*
The maximum value of the limit.
- **NextToken** *(string) --*
A token to resume pagination.
:type PaginationConfig: dict
:param PaginationConfig:
A dictionary that provides parameters to control pagination.
- **MaxItems** *(integer) --*
The total number of items to return. If the total number of items available is more than the value specified in max-items then a ``NextToken`` will be provided in the output that you can use to resume pagination.
- **PageSize** *(integer) --*
The size of each page.
- **StartingToken** *(string) --*
A token to specify where to start paginating. This is the ``NextToken`` from a previous response.
:rtype: dict
:returns:
"""
pass
class DescribeLoadBalancers(Paginator):
def paginate(self, LoadBalancerNames: List = None, PaginationConfig: Dict = None) -> Dict:
"""
Creates an iterator that will paginate through responses from :py:meth:`ElasticLoadBalancing.Client.describe_load_balancers`.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/elasticloadbalancing-2012-06-01/DescribeLoadBalancers>`_
**Request Syntax**
::
response_iterator = paginator.paginate(
LoadBalancerNames=[
'string',
],
PaginationConfig={
'MaxItems': 123,
'PageSize': 123,
'StartingToken': 'string'
}
)
**Response Syntax**
::
{
'LoadBalancerDescriptions': [
{
'LoadBalancerName': 'string',
'DNSName': 'string',
'CanonicalHostedZoneName': 'string',
'CanonicalHostedZoneNameID': 'string',
'ListenerDescriptions': [
{
'Listener': {
'Protocol': 'string',
'LoadBalancerPort': 123,
'InstanceProtocol': 'string',
'InstancePort': 123,
'SSLCertificateId': 'string'
},
'PolicyNames': [
'string',
]
},
],
'Policies': {
'AppCookieStickinessPolicies': [
{
'PolicyName': 'string',
'CookieName': 'string'
},
],
'LBCookieStickinessPolicies': [
{
'PolicyName': 'string',
'CookieExpirationPeriod': 123
},
],
'OtherPolicies': [
'string',
]
},
'BackendServerDescriptions': [
{
'InstancePort': 123,
'PolicyNames': [
'string',
]
},
],
'AvailabilityZones': [
'string',
],
'Subnets': [
'string',
],
'VPCId': 'string',
'Instances': [
{
'InstanceId': 'string'
},
],
'HealthCheck': {
'Target': 'string',
'Interval': 123,
'Timeout': 123,
'UnhealthyThreshold': 123,
'HealthyThreshold': 123
},
'SourceSecurityGroup': {
'OwnerAlias': 'string',
'GroupName': 'string'
},
'SecurityGroups': [
'string',
],
'CreatedTime': datetime(2015, 1, 1),
'Scheme': 'string'
},
],
'NextToken': 'string'
}
**Response Structure**
- *(dict) --*
Contains the parameters for DescribeLoadBalancers.
- **LoadBalancerDescriptions** *(list) --*
Information about the load balancers.
- *(dict) --*
Information about a load balancer.
- **LoadBalancerName** *(string) --*
The name of the load balancer.
- **DNSName** *(string) --*
The DNS name of the load balancer.
- **CanonicalHostedZoneName** *(string) --*
The DNS name of the load balancer.
For more information, see `Configure a Custom Domain Name <http://docs.aws.amazon.com/elasticloadbalancing/latest/classic/using-domain-names-with-elb.html>`__ in the *Classic Load Balancers Guide* .
- **CanonicalHostedZoneNameID** *(string) --*
The ID of the Amazon Route 53 hosted zone for the load balancer.
- **ListenerDescriptions** *(list) --*
The listeners for the load balancer.
- *(dict) --*
The policies enabled for a listener.
- **Listener** *(dict) --*
The listener.
- **Protocol** *(string) --*
The load balancer transport protocol to use for routing: HTTP, HTTPS, TCP, or SSL.
- **LoadBalancerPort** *(integer) --*
The port on which the load balancer is listening. On EC2-VPC, you can specify any port from the range 1-65535. On EC2-Classic, you can specify any port from the following list: 25, 80, 443, 465, 587, 1024-65535.
- **InstanceProtocol** *(string) --*
The protocol to use for routing traffic to instances: HTTP, HTTPS, TCP, or SSL.
If the front-end protocol is HTTP, HTTPS, TCP, or SSL, ``InstanceProtocol`` must be at the same protocol.
If there is another listener with the same ``InstancePort`` whose ``InstanceProtocol`` is secure, (HTTPS or SSL), the listener's ``InstanceProtocol`` must also be secure.
If there is another listener with the same ``InstancePort`` whose ``InstanceProtocol`` is HTTP or TCP, the listener's ``InstanceProtocol`` must be HTTP or TCP.
- **InstancePort** *(integer) --*
The port on which the instance is listening.
- **SSLCertificateId** *(string) --*
The Amazon Resource Name (ARN) of the server certificate.
- **PolicyNames** *(list) --*
The policies. If there are no policies enabled, the list is empty.
- *(string) --*
- **Policies** *(dict) --*
The policies defined for the load balancer.
- **AppCookieStickinessPolicies** *(list) --*
The stickiness policies created using CreateAppCookieStickinessPolicy .
- *(dict) --*
Information about a policy for application-controlled session stickiness.
- **PolicyName** *(string) --*
The mnemonic name for the policy being created. The name must be unique within a set of policies for this load balancer.
- **CookieName** *(string) --*
The name of the application cookie used for stickiness.
- **LBCookieStickinessPolicies** *(list) --*
The stickiness policies created using CreateLBCookieStickinessPolicy .
- *(dict) --*
Information about a policy for duration-based session stickiness.
- **PolicyName** *(string) --*
The name of the policy. This name must be unique within the set of policies for this load balancer.
- **CookieExpirationPeriod** *(integer) --*
The time period, in seconds, after which the cookie should be considered stale. If this parameter is not specified, the stickiness session lasts for the duration of the browser session.
- **OtherPolicies** *(list) --*
The policies other than the stickiness policies.
- *(string) --*
- **BackendServerDescriptions** *(list) --*
Information about your EC2 instances.
- *(dict) --*
Information about the configuration of an EC2 instance.
- **InstancePort** *(integer) --*
The port on which the EC2 instance is listening.
- **PolicyNames** *(list) --*
The names of the policies enabled for the EC2 instance.
- *(string) --*
- **AvailabilityZones** *(list) --*
The Availability Zones for the load balancer.
- *(string) --*
- **Subnets** *(list) --*
The IDs of the subnets for the load balancer.
- *(string) --*
- **VPCId** *(string) --*
The ID of the VPC for the load balancer.
- **Instances** *(list) --*
The IDs of the instances for the load balancer.
- *(dict) --*
The ID of an EC2 instance.
- **InstanceId** *(string) --*
The instance ID.
- **HealthCheck** *(dict) --*
Information about the health checks conducted on the load balancer.
- **Target** *(string) --*
The instance being checked. The protocol is either TCP, HTTP, HTTPS, or SSL. The range of valid ports is one (1) through 65535.
TCP is the default, specified as a TCP: port pair, for example "TCP:5000". In this case, a health check simply attempts to open a TCP connection to the instance on the specified port. Failure to connect within the configured timeout is considered unhealthy.
SSL is also specified as SSL: port pair, for example, SSL:5000.
For HTTP/HTTPS, you must include a ping path in the string. HTTP is specified as a HTTP:port;/;PathToPing; grouping, for example "HTTP:80/weather/us/wa/seattle". In this case, a HTTP GET request is issued to the instance on the given port and path. Any answer other than "200 OK" within the timeout period is considered unhealthy.
The total length of the HTTP ping target must be 1024 16-bit Unicode characters or less.
- **Interval** *(integer) --*
The approximate interval, in seconds, between health checks of an individual instance.
- **Timeout** *(integer) --*
The amount of time, in seconds, during which no response means a failed health check.
This value must be less than the ``Interval`` value.
- **UnhealthyThreshold** *(integer) --*
The number of consecutive health check failures required before moving the instance to the ``Unhealthy`` state.
- **HealthyThreshold** *(integer) --*
The number of consecutive health checks successes required before moving the instance to the ``Healthy`` state.
- **SourceSecurityGroup** *(dict) --*
The security group for the load balancer, which you can use as part of your inbound rules for your registered instances. To only allow traffic from load balancers, add a security group rule that specifies this source security group as the inbound source.
- **OwnerAlias** *(string) --*
The owner of the security group.
- **GroupName** *(string) --*
The name of the security group.
- **SecurityGroups** *(list) --*
The security groups for the load balancer. Valid only for load balancers in a VPC.
- *(string) --*
- **CreatedTime** *(datetime) --*
The date and time the load balancer was created.
- **Scheme** *(string) --*
The type of load balancer. Valid only for load balancers in a VPC.
If ``Scheme`` is ``internet-facing`` , the load balancer has a public DNS name that resolves to a public IP address.
If ``Scheme`` is ``internal`` , the load balancer has a public DNS name that resolves to a private IP address.
- **NextToken** *(string) --*
A token to resume pagination.
:type LoadBalancerNames: list
:param LoadBalancerNames:
The names of the load balancers.
- *(string) --*
:type PaginationConfig: dict
:param PaginationConfig:
A dictionary that provides parameters to control pagination.
- **MaxItems** *(integer) --*
The total number of items to return. If the total number of items available is more than the value specified in max-items then a ``NextToken`` will be provided in the output that you can use to resume pagination.
- **PageSize** *(integer) --*
The size of each page.
- **StartingToken** *(string) --*
A token to specify where to start paginating. This is the ``NextToken`` from a previous response.
:rtype: dict
:returns:
"""
pass
| 54.750842
| 350
| 0.479983
| 1,375
| 16,261
| 5.669091
| 0.243636
| 0.033868
| 0.034638
| 0.020783
| 0.398717
| 0.332521
| 0.274278
| 0.240667
| 0.221681
| 0.203464
| 0
| 0.012969
| 0.43097
| 16,261
| 296
| 351
| 54.935811
| 0.829461
| 0.837833
| 0
| 0.222222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.222222
| false
| 0.222222
| 0.333333
| 0
| 0.777778
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
|
0
| 4
|
04af9881bc940f1429a5955671bd3fe556f5e016
| 158
|
py
|
Python
|
app/api/model/model.py
|
nccr-itmo/FEDOT.Web
|
9b6f7b66de277ea34d6d5ed621b99a3f938db61b
|
[
"BSD-3-Clause"
] | 23
|
2020-12-24T11:05:01.000Z
|
2022-03-31T20:29:12.000Z
|
app/api/model/model.py
|
nccr-itmo/FedotWeb
|
763fb1f39ad2b69104b6568e6f941c4c67762e34
|
[
"BSD-3-Clause"
] | 42
|
2021-01-11T09:38:31.000Z
|
2022-03-25T17:19:05.000Z
|
app/api/model/model.py
|
nccr-itmo/FedotWeb
|
763fb1f39ad2b69104b6568e6f941c4c67762e34
|
[
"BSD-3-Clause"
] | 5
|
2021-03-31T04:38:31.000Z
|
2022-03-31T20:29:26.000Z
|
from dataclasses import dataclass
@dataclass
class Model:
model_id: str = '0'
label: str = 'model_label'
description: str = 'model description'
| 17.555556
| 42
| 0.696203
| 19
| 158
| 5.684211
| 0.578947
| 0.148148
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.008065
| 0.21519
| 158
| 8
| 43
| 19.75
| 0.862903
| 0
| 0
| 0
| 0
| 0
| 0.183544
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.166667
| 0
| 0.833333
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
|
0
| 4
|
04b89a8bcffbcc59a021546d22fd3c78d08cb876
| 69,904
|
py
|
Python
|
notebooks/analysis/visualizations_revised_manuscript.py
|
UGentBiomath/COVID19-Model
|
ef04f6ec78a3c3dc9217cea68ae5276d159ba0c1
|
[
"MIT"
] | 22
|
2020-04-22T16:42:53.000Z
|
2021-05-06T08:44:02.000Z
|
notebooks/analysis/visualizations_revised_manuscript.py
|
UGentBiomath/COVID19-Model
|
ef04f6ec78a3c3dc9217cea68ae5276d159ba0c1
|
[
"MIT"
] | 90
|
2020-04-17T19:25:52.000Z
|
2022-03-25T12:34:39.000Z
|
notebooks/analysis/visualizations_revised_manuscript.py
|
UGentBiomath/COVID19-Model
|
ef04f6ec78a3c3dc9217cea68ae5276d159ba0c1
|
[
"MIT"
] | 26
|
2020-04-06T06:09:04.000Z
|
2020-11-21T22:40:40.000Z
|
"""
This script visualises the prevention parameters of the first and second COVID-19 waves.
Arguments:
----------
-f:
Filename of samples dictionary to be loaded. Default location is ~/data/interim/model_parameters/COVID19_SEIRD/calibrations/national/
Returns:
--------
Example use:
------------
"""
__author__ = "Tijs Alleman"
__copyright__ = "Copyright (c) 2020 by T.W. Alleman, BIOMATH, Ghent University. All Rights Reserved."
# ----------------------
# Load required packages
# ----------------------
import json
import argparse
import datetime
import random
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from matplotlib.transforms import offset_copy
from covid19model.models import models
from covid19model.data import mobility, sciensano, model_parameters
from covid19model.models.time_dependant_parameter_fncs import ramp_fun
from covid19model.visualization.output import _apply_tick_locator
from covid19model.visualization.utils import colorscale_okabe_ito, moving_avg
# covid 19 specific parameters
plt.rcParams.update({
"axes.prop_cycle": plt.cycler('color',
list(colorscale_okabe_ito.values())),
})
# -----------------------
# Handle script arguments
# -----------------------
parser = argparse.ArgumentParser()
parser.add_argument("-n", "--n_samples", help="Number of samples used to visualise model fit", default=100, type=int)
parser.add_argument("-k", "--n_draws_per_sample", help="Number of binomial draws per sample drawn used to visualize model fit", default=1, type=int)
args = parser.parse_args()
#################################################
## PART 1: Comparison of total number of cases ##
#################################################
youth = moving_avg((df_sciensano['C_0_9']+df_sciensano['C_10_19']).to_frame())
cases_youth_nov21 = youth[youth.index == pd.to_datetime('2020-11-21')].values
cases_youth_rel = moving_avg((df_sciensano['C_0_9']+df_sciensano['C_10_19']).to_frame())/cases_youth_nov21*100
work = moving_avg((df_sciensano['C_20_29']+df_sciensano['C_30_39']+df_sciensano['C_40_49']+df_sciensano['C_50_59']).to_frame())
cases_work_nov21 = work[work.index == pd.to_datetime('2020-11-21')].values
cases_work_rel = work/cases_work_nov21*100
old = moving_avg((df_sciensano['C_60_69']+df_sciensano['C_70_79']+df_sciensano['C_80_89']+df_sciensano['C_90+']).to_frame())
cases_old_nov21 = old[old.index == pd.to_datetime('2020-11-21')].values
cases_old_rel = old/cases_old_nov21*100
fig,ax=plt.subplots(figsize=(12,4.3))
ax.plot(df_sciensano.index, cases_youth_rel, linewidth=1.5, color='black')
ax.plot(df_sciensano.index, cases_work_rel, linewidth=1.5, color='orange')
ax.plot(df_sciensano.index, cases_old_rel, linewidth=1.5, color='blue')
ax.axvspan(pd.to_datetime('2020-11-21'), pd.to_datetime('2020-12-18'), color='black', alpha=0.2)
ax.axvspan(pd.to_datetime('2021-01-09'), pd.to_datetime('2021-02-15'), color='black', alpha=0.2)
ax.set_xlim([pd.to_datetime('2020-11-05'), pd.to_datetime('2021-02-01')])
ax.set_ylim([0,320])
ax.set_ylabel('Relative number of cases as compared\n to November 16th, 2020 (%)')
#ax.set_xticks([pd.to_datetime('2020-11-16'), pd.to_datetime('2020-12-18'), pd.to_datetime('2021-01-04')])
ax.legend(['$[0,20[$','$[20,60[$','$[60,\infty[$'], bbox_to_anchor=(1.05, 1), loc='upper left')
ax = _apply_tick_locator(ax)
ax.set_yticks([0,100,200,300])
ax.grid(False)
plt.tight_layout()
plt.show()
def crosscorr(datax, datay, lag=0):
""" Lag-N cross correlation.
Parameters
----------
lag : int, default 0
datax, datay : pandas.Series objects of equal length
Returns
----------
crosscorr : float
"""
return datax.corr(datay.shift(lag))
lag_series = range(-15,8)
covariance_youth_work = []
covariance_youth_old = []
covariance_work_old = []
for lag in lag_series:
covariance_youth_work.append(crosscorr(cases_youth_rel[pd.to_datetime('2020-11-02'):pd.to_datetime('2021-02-01')].squeeze(),cases_work_rel[pd.to_datetime('2020-11-02'):pd.to_datetime('2021-02-01')].squeeze(),lag=lag))
covariance_youth_old.append(crosscorr(cases_youth_rel[pd.to_datetime('2020-11-02'):pd.to_datetime('2021-02-01')].squeeze(),cases_old_rel[pd.to_datetime('2020-11-02'):pd.to_datetime('2021-02-01')].squeeze(),lag=lag))
covariance_work_old.append(crosscorr(cases_work_rel[pd.to_datetime('2020-11-02'):pd.to_datetime('2021-02-01')].squeeze(),cases_old_rel[pd.to_datetime('2020-11-02'):pd.to_datetime('2021-02-01')].squeeze(),lag=lag))
covariances = [covariance_youth_work, covariance_youth_old, covariance_work_old]
for i in range(3):
n = len(covariances[i])
k = max(covariances[i])
idx=np.argmax(covariances[i])
tau = lag_series[idx]
sig = 2/np.sqrt(n-abs(k))
if k >= sig:
print(tau, k, True)
else:
print(tau, k, False)
fig,(ax1,ax2)=plt.subplots(nrows=2,ncols=1,figsize=(15,10))
# First part
ax1.plot(df_sciensano.index, cases_youth_rel, linewidth=1.5, color='black')
ax1.plot(df_sciensano.index, cases_work_rel, linewidth=1.5, color='orange')
ax1.plot(df_sciensano.index, cases_old_rel, linewidth=1.5, color='blue')
ax1.axvspan(pd.to_datetime('2020-11-21'), pd.to_datetime('2020-12-18'), color='black', alpha=0.2)
ax1.axvspan(pd.to_datetime('2021-01-09'), pd.to_datetime('2021-02-15'), color='black', alpha=0.2)
ax1.set_xlim([pd.to_datetime('2020-11-05'), pd.to_datetime('2021-02-01')])
ax1.set_ylim([0,300])
ax1.set_ylabel('Relative number of cases as compared\n to November 16th, 2020 (%)')
#ax.set_xticks([pd.to_datetime('2020-11-16'), pd.to_datetime('2020-12-18'), pd.to_datetime('2021-01-04')])
ax1.legend(['$[0,20[$','$[20,60[$','$[60,\infty[$'], bbox_to_anchor=(1.05, 1), loc='upper left')
ax1 = _apply_tick_locator(ax1)
# Second part
ax2.scatter(lag_series, covariance_youth_work, color='black',alpha=0.6,linestyle='None',facecolors='none', s=30, linewidth=1)
ax2.scatter(lag_series, covariance_youth_old, color='black',alpha=0.6, linestyle='None',facecolors='none', s=30, linewidth=1, marker='s')
ax2.scatter(lag_series, covariance_work_old, color='black',alpha=0.6, linestyle='None',facecolors='none', s=30, linewidth=1, marker='D')
ax2.legend(['$[0,20[$ vs. $[20,60[$', '$[0,20[$ vs. $[60,\infty[$', '$[20,60[$ vs. $[60, \infty[$'], bbox_to_anchor=(1.05, 1), loc='upper left')
ax2.plot(lag_series, covariance_youth_work, color='black', linestyle='--', linewidth=1)
ax2.plot(lag_series, covariance_youth_old, color='black',linestyle='--', linewidth=1)
ax2.plot(lag_series, covariance_work_old, color='black',linestyle='--', linewidth=1)
ax2.axvline(0,linewidth=1, color='black')
ax2.grid(False)
ax2.set_ylabel('lag-$\\tau$ cross correlation (-)')
ax2.set_xlabel('$\\tau$ (days)')
plt.tight_layout()
plt.show()
fig,ax = plt.subplots(figsize=(15,5))
ax.scatter(lag_series, covariance_youth_work, color='black',alpha=0.6,linestyle='None',facecolors='none', s=30, linewidth=1)
ax.scatter(lag_series, covariance_youth_old, color='black',alpha=0.6, linestyle='None',facecolors='none', s=30, linewidth=1, marker='s')
ax.scatter(lag_series, covariance_work_old, color='black',alpha=0.6, linestyle='None',facecolors='none', s=30, linewidth=1, marker='D')
ax.legend(['$[0,20[$ vs. $[20,60[$', '$[0,20[$ vs. $[60,\infty[$', '$[20,60[$ vs. $[60, \infty[$'], bbox_to_anchor=(1.05, 1), loc='upper left')
ax.plot(lag_series, covariance_youth_work, color='black', linestyle='--', linewidth=1)
ax.plot(lag_series, covariance_youth_old, color='black',linestyle='--', linewidth=1)
ax.plot(lag_series, covariance_work_old, color='black',linestyle='--', linewidth=1)
ax.axvline(0,linewidth=1, color='black')
ax.grid(False)
ax.set_ylabel('lag-$\\tau$ cross correlation (-)')
ax.set_xlabel('$\\tau$ (days)')
plt.tight_layout()
plt.show()
#####################################################
## PART 1: Calibration robustness figure of WAVE 1 ##
#####################################################
n_calibrations = 6
n_prevention = 3
conf_int = 0.05
# -------------------------
# Load samples dictionaries
# -------------------------
samples_dicts = [
json.load(open('../../data/interim/model_parameters/COVID19_SEIRD/calibrations/national/BE_WAVE1_BETA_COMPLIANCE_2021-02-15.json')), # 2020-04-04
json.load(open('../../data/interim/model_parameters/COVID19_SEIRD/calibrations/national/BE_WAVE1_BETA_COMPLIANCE_2021-02-13.json')), # 2020-04-15
json.load(open('../../data/interim/model_parameters/COVID19_SEIRD/calibrations/national/BE_WAVE1_BETA_COMPLIANCE_2021-02-23.json')), # 2020-05-01
json.load(open('../../data/interim/model_parameters/COVID19_SEIRD/calibrations/national/BE_WAVE1_BETA_COMPLIANCE_2021-02-18.json')), # 2020-05-15
json.load(open('../../data/interim/model_parameters/COVID19_SEIRD/calibrations/national/BE_WAVE1_BETA_COMPLIANCE_2021-02-21.json')), # 2020-06-01
json.load(open('../../data/interim/model_parameters/COVID19_SEIRD/calibrations/national/BE_WAVE1_BETA_COMPLIANCE_2021-02-22.json')) # 2020-07-01
]
warmup = int(samples_dicts[0]['warmup'])
# Start of data collection
start_data = '2020-03-15'
# First datapoint used in inference
start_calibration = '2020-03-15'
# Last datapoint used in inference
end_calibrations = ['2020-04-04', '2020-04-15', '2020-05-01', '2020-05-15', '2020-06-01', '2020-07-01']
# Start- and enddate of plotfit
start_sim = start_calibration
end_sim = '2020-07-14'
# ---------
# Load data
# ---------
# Contact matrices
initN, Nc_home, Nc_work, Nc_schools, Nc_transport, Nc_leisure, Nc_others, Nc_total = model_parameters.get_interaction_matrices(dataset='willem_2012')
Nc_all = {'total': Nc_total, 'home':Nc_home, 'work': Nc_work, 'schools': Nc_schools, 'transport': Nc_transport, 'leisure': Nc_leisure, 'others': Nc_others}
levels = initN.size
# Google Mobility data
df_google = mobility.get_google_mobility_data(update=False)
# ---------------------------------
# Time-dependant parameter function
# ---------------------------------
# Extract build contact matrix function
from covid19model.models.time_dependant_parameter_fncs import make_contact_matrix_function, ramp_fun
contact_matrix_4prev, all_contact, all_contact_no_schools = make_contact_matrix_function(df_google, Nc_all)
# Define policy function
def policies_wave1_4prev(t, states, param, l , tau, prev_schools, prev_work, prev_rest, prev_home):
# Convert tau and l to dates
tau_days = pd.Timedelta(tau, unit='D')
l_days = pd.Timedelta(l, unit='D')
# Define key dates of first wave
t1 = pd.Timestamp('2020-03-15') # start of lockdown
t2 = pd.Timestamp('2020-05-15') # gradual re-opening of schools (assume 50% of nominal scenario)
t3 = pd.Timestamp('2020-07-01') # start of summer holidays
t4 = pd.Timestamp('2020-09-01') # end of summer holidays
# Define key dates of second wave
t5 = pd.Timestamp('2020-10-19') # lockdown (1)
t6 = pd.Timestamp('2020-11-02') # lockdown (2)
t7 = pd.Timestamp('2020-11-16') # schools re-open
t8 = pd.Timestamp('2020-12-18') # Christmas holiday starts
t9 = pd.Timestamp('2021-01-04') # Christmas holiday ends
t10 = pd.Timestamp('2021-02-15') # Spring break starts
t11 = pd.Timestamp('2021-02-21') # Spring break ends
t12 = pd.Timestamp('2021-04-05') # Easter holiday starts
t13 = pd.Timestamp('2021-04-18') # Easter holiday ends
# ------
# WAVE 1
# ------
if t <= t1:
t = pd.Timestamp(t.date())
return all_contact(t)
elif t1 < t < t1 + tau_days:
t = pd.Timestamp(t.date())
return all_contact(t)
elif t1 + tau_days < t <= t1 + tau_days + l_days:
t = pd.Timestamp(t.date())
policy_old = all_contact(t)
policy_new = contact_matrix_4prev(t, prev_home, prev_schools, prev_work, prev_rest,
school=0)
return ramp_fun(policy_old, policy_new, t, tau_days, l, t1)
elif t1 + tau_days + l_days < t <= t2:
t = pd.Timestamp(t.date())
return contact_matrix_4prev(t, prev_home, prev_schools, prev_work, prev_rest,
school=0)
elif t2 < t <= t3:
t = pd.Timestamp(t.date())
return contact_matrix_4prev(t, prev_home, prev_schools, prev_work, prev_rest,
school=0)
elif t3 < t <= t4:
t = pd.Timestamp(t.date())
return contact_matrix_4prev(t, prev_home, prev_schools, prev_work, prev_rest,
school=0)
# ------
# WAVE 2
# ------
elif t4 < t <= t5 + tau_days:
return contact_matrix_4prev(t, school=1)
elif t5 + tau_days < t <= t5 + tau_days + l_days:
policy_old = contact_matrix_4prev(t, school=1)
policy_new = contact_matrix_4prev(t, prev_schools, prev_work, prev_rest,
school=1)
return ramp_fun(policy_old, policy_new, t, tau_days, l, t5)
elif t5 + tau_days + l_days < t <= t6:
return contact_matrix_4prev(t, prev_home, prev_schools, prev_work, prev_rest,
school=1)
elif t6 < t <= t7:
return contact_matrix_4prev(t, prev_home, prev_schools, prev_work, prev_rest,
school=0)
elif t7 < t <= t8:
return contact_matrix_4prev(t, prev_home, prev_schools, prev_work, prev_rest,
school=1)
elif t8 < t <= t9:
return contact_matrix_4prev(t, prev_home, prev_schools, prev_work, prev_rest,
school=0)
elif t9 < t <= t10:
return contact_matrix_4prev(t, prev_home, prev_schools, prev_work, prev_rest,
school=1)
elif t10 < t <= t11:
return contact_matrix_4prev(t, prev_home, prev_schools, prev_work, prev_rest,
school=0)
elif t11 < t <= t12:
return contact_matrix_4prev(t, prev_home, prev_schools, prev_work, prev_rest,
school=1)
elif t12 < t <= t13:
return contact_matrix_4prev(t, prev_home, prev_schools, prev_work, prev_rest,
school=0)
else:
t = pd.Timestamp(t.date())
return contact_matrix_4prev(t, prev_home, prev_schools, prev_work, prev_rest,
school=1)
# --------------------
# Initialize the model
# --------------------
# Load the model parameters dictionary
params = model_parameters.get_COVID19_SEIRD_parameters()
# Add the time-dependant parameter function arguments
params.update({'l': 21, 'tau': 21, 'prev_schools': 0, 'prev_work': 0.5, 'prev_rest': 0.5, 'prev_home': 0.5})
# Define initial states
initial_states = {"S": initN, "E": np.ones(9)}
# Initialize model
model = models.COVID19_SEIRD(initial_states, params,
time_dependent_parameters={'Nc': policies_wave1_4prev})
# ------------------------
# Define sampling function
# ------------------------
def draw_fcn(param_dict,samples_dict):
# Sample first calibration
idx, param_dict['beta'] = random.choice(list(enumerate(samples_dict['beta'])))
param_dict['da'] = samples_dict['da'][idx]
param_dict['omega'] = samples_dict['omega'][idx]
param_dict['sigma'] = 5.2 - samples_dict['omega'][idx]
# Sample second calibration
param_dict['l'] = samples_dict['l'][idx]
param_dict['tau'] = samples_dict['tau'][idx]
param_dict['prev_home'] = samples_dict['prev_home'][idx]
param_dict['prev_work'] = samples_dict['prev_work'][idx]
param_dict['prev_rest'] = samples_dict['prev_rest'][idx]
return param_dict
# -------------------------------------
# Define necessary function to plot fit
# -------------------------------------
LL = conf_int/2
UL = 1-conf_int/2
def add_poisson(state_name, output, n_samples, n_draws_per_sample, UL=1-0.05*0.5, LL=0.05*0.5):
data = output[state_name].sum(dim="Nc").values
# Initialize vectors
vector = np.zeros((data.shape[1],n_draws_per_sample*n_samples))
# Loop over dimension draws
for n in range(data.shape[0]):
binomial_draw = np.random.poisson( np.expand_dims(data[n,:],axis=1),size = (data.shape[1],n_draws_per_sample))
vector[:,n*n_draws_per_sample:(n+1)*n_draws_per_sample] = binomial_draw
# Compute mean and median
mean = np.mean(vector,axis=1)
median = np.median(vector,axis=1)
# Compute quantiles
LL = np.quantile(vector, q = LL, axis = 1)
UL = np.quantile(vector, q = UL, axis = 1)
return mean, median, LL, UL
def plot_fit(ax, state_name, state_label, data_df, time, vector_mean, vector_LL, vector_UL, start_calibration='2020-03-15', end_calibration='2020-07-01' , end_sim='2020-09-01'):
ax.fill_between(pd.to_datetime(time), vector_LL, vector_UL,alpha=0.30, color = 'blue')
ax.plot(time, vector_mean,'--', color='blue', linewidth=1.5)
ax.scatter(data_df[start_calibration:end_calibration].index,data_df[state_name][start_calibration:end_calibration], color='black', alpha=0.5, linestyle='None', facecolors='none', s=30, linewidth=1)
ax.scatter(data_df[pd.to_datetime(end_calibration)+datetime.timedelta(days=1):end_sim].index,data_df[state_name][pd.to_datetime(end_calibration)+datetime.timedelta(days=1):end_sim], color='red', alpha=0.5, linestyle='None', facecolors='none', s=30, linewidth=1)
ax = _apply_tick_locator(ax)
ax.set_xlim(start_calibration,end_sim)
ax.set_ylabel(state_label)
return ax
# -------------------------------
# Visualize prevention parameters
# -------------------------------
# Method 1: all in on page
fig,axes= plt.subplots(nrows=n_calibrations,ncols=n_prevention+1, figsize=(13,8.27), gridspec_kw={'width_ratios': [1, 1, 1, 3]})
prevention_labels = ['$\Omega_{home}$ (-)', '$\Omega_{work}$ (-)', '$\Omega_{rest}$ (-)']
prevention_names = ['prev_home', 'prev_work', 'prev_rest']
row_labels = ['(a)', '(b)', '(c)', '(d)', '(e)', '(f)']
pad = 5 # in points
for i in range(n_calibrations):
print('Simulation no. {} out of {}'.format(i+1,n_calibrations))
out = model.sim(end_sim,start_date=start_sim,warmup=warmup,N=args.n_samples,draw_fcn=draw_fcn,samples=samples_dicts[i])
vector_mean, vector_median, vector_LL, vector_UL = add_poisson('H_in', out, args.n_samples, args.n_draws_per_sample)
for j in range(n_prevention+1):
if j != n_prevention:
n, bins, patches = axes[i,j].hist(samples_dicts[i][prevention_names[j]], color='blue', bins=15, density=True, alpha=0.6)
axes[i,j].axvline(np.mean(samples_dicts[i][prevention_names[j]]), ymin=0, ymax=1, linestyle='--', color='black')
max_n = 1.05*max(n)
axes[i,j].annotate('$\hat{\mu} = $'+"{:.2f}".format(np.mean(samples_dicts[i][prevention_names[j]])), xy=(np.mean(samples_dicts[i][prevention_names[j]]),max_n),
rotation=0,va='bottom', ha='center',annotation_clip=False,fontsize=10)
if j == 0:
axes[i,j].annotate(row_labels[i], xy=(0, 0.5), xytext=(-axes[i,j].yaxis.labelpad - pad, 0),
xycoords=axes[i,j].yaxis.label, textcoords='offset points',
ha='right', va='center')
axes[i,j].set_xlim([0,1])
axes[i,j].set_xticks([0.0, 0.5, 1.0])
axes[i,j].set_yticks([])
axes[i,j].grid(False)
if i == n_calibrations-1:
axes[i,j].set_xlabel(prevention_labels[j])
axes[i,j].spines['left'].set_visible(False)
else:
axes[i,j] = plot_fit(axes[i,j], 'H_in','$H_{in}$ (-)', df_sciensano, out['time'].values, vector_median, vector_LL, vector_UL, end_calibration=end_calibrations[i], end_sim=end_sim)
axes[i,j].xaxis.set_major_locator(plt.MaxNLocator(3))
axes[i,j].set_yticks([0,300, 600])
axes[i,j].set_ylim([0,700])
plt.tight_layout()
plt.show()
model_results_WAVE1 = {'time': out['time'].values, 'vector_mean': vector_mean, 'vector_median': vector_median, 'vector_LL': vector_LL, 'vector_UL': vector_UL}
#####################################
## PART 2: Hospitals vs. R0 figure ##
#####################################
def compute_R0(initN, Nc, samples_dict, model_parameters):
N = initN.size
sample_size = len(samples_dict['beta'])
R0 = np.zeros([N,sample_size])
R0_norm = np.zeros([N,sample_size])
for i in range(N):
for j in range(sample_size):
R0[i,j] = (model_parameters['a'][i] * samples_dict['da'][j] + samples_dict['omega'][j]) * samples_dict['beta'][j] * np.sum(Nc, axis=1)[i]
R0_norm[i,:] = R0[i,:]*(initN[i]/sum(initN))
R0_age = np.mean(R0,axis=1)
R0_overall = np.mean(np.sum(R0_norm,axis=0))
return R0, R0_overall
R0, R0_overall = compute_R0(initN, Nc_all['total'], samples_dicts[-1], params)
cumsum = out['H_in'].cumsum(dim='time').values
cumsum_mean = np.mean(cumsum[:,:,-1], axis=0)/sum(np.mean(cumsum[:,:,-1],axis=0))
cumsum_LL = cumsum_mean - np.quantile(cumsum[:,:,-1], q = 0.05/2, axis=0)/sum(np.mean(cumsum[:,:,-1],axis=0))
cumsum_UL = np.quantile(cumsum[:,:,-1], q = 1-0.05/2, axis=0)/sum(np.mean(cumsum[:,:,-1],axis=0)) - cumsum_mean
cumsum = (out['H_in'].mean(dim="draws")).cumsum(dim='time').values
fraction = cumsum[:,-1]/sum(cumsum[:,-1])
fig,ax = plt.subplots(figsize=(12,4))
bars = ('$[0, 10[$', '$[10, 20[$', '$[20, 30[$', '$[30, 40[$', '$[40, 50[$', '$[50, 60[$', '$[60, 70[$', '$[70, 80[$', '$[80, \infty[$')
x_pos = np.arange(len(bars))
#ax.bar(x_pos, np.mean(R0,axis=1), yerr = [np.mean(R0,axis=1) - np.quantile(R0,q=0.05/2,axis=1), np.quantile(R0,q=1-0.05/2,axis=1) - np.mean(R0,axis=1)], width=1, color='b', alpha=0.5, capsize=10)
ax.bar(x_pos, np.mean(R0,axis=1), width=1, color='b', alpha=0.8)
ax.set_ylabel('$R_0$ (-)')
ax.grid(False)
ax2 = ax.twinx()
#ax2.bar(x_pos, cumsum_mean, yerr = [cumsum_LL, cumsum_UL], width=1,color='orange',alpha=0.9,hatch="/", capsize=10)
ax2.bar(x_pos, cumsum_mean, width=1,color='orange',alpha=0.6,hatch="/")
ax2.set_ylabel('Fraction of hospitalizations (-)')
ax2.grid(False)
plt.xticks(x_pos, bars)
plt.tight_layout()
plt.show()
#########################################
## Part 3: Robustness figure of WAVE 2 ##
#########################################
n_prevention = 4
conf_int = 0.05
# -------------------------
# Load samples dictionaries
# -------------------------
samples_dicts = [
json.load(open('../../data/interim/model_parameters/COVID19_SEIRD/calibrations/national/BE_WAVE2_BETA_COMPLIANCE_2021-03-06.json')), # 2020-11-04
json.load(open('../../data/interim/model_parameters/COVID19_SEIRD/calibrations/national/BE_WAVE2_BETA_COMPLIANCE_2021-03-05.json')), # 2020-11-16
json.load(open('../../data/interim/model_parameters/COVID19_SEIRD/calibrations/national/BE_WAVE2_BETA_COMPLIANCE_2021-03-04.json')), # 2020-12-24
json.load(open('../../data/interim/model_parameters/COVID19_SEIRD/calibrations/national/BE_WAVE2_BETA_COMPLIANCE_2021-03-02.json')), # 2021-02-01
]
n_calibrations = len(samples_dicts)
warmup = int(samples_dicts[0]['warmup'])
# Start of data collection
start_data = '2020-03-15'
# First datapoint used in inference
start_calibration = '2020-09-01'
# Last datapoint used in inference
end_calibrations = ['2020-11-06','2020-11-16','2020-12-24','2021-02-01']
# Start- and enddate of plotfit
start_sim = start_calibration
end_sim = '2021-02-14'
# --------------------
# Initialize the model
# --------------------
# Load the model parameters dictionary
params = model_parameters.get_COVID19_SEIRD_parameters()
# Add the time-dependant parameter function arguments
params.update({'l': 21, 'tau': 21, 'prev_schools': 0, 'prev_work': 0.5, 'prev_rest': 0.5, 'prev_home': 0.5})
# Model initial condition on September 1st
warmup = 0
with open('../../data/interim/model_parameters/COVID19_SEIRD/calibrations/national/initial_states_2020-09-01.json', 'r') as fp:
initial_states = json.load(fp)
initial_states.update({
'VE': np.zeros(9),
'V': np.zeros(9),
'V_new': np.zeros(9),
'alpha': np.zeros(9)
})
#initial_states['ICU_tot'] = initial_states.pop('ICU')
# Initialize model
model = models.COVID19_SEIRD(initial_states, params,
time_dependent_parameters={'Nc': policies_wave1_4prev})
# ------------------------
# Define sampling function
# ------------------------
def draw_fcn(param_dict,samples_dict):
# Sample first calibration
idx, param_dict['beta'] = random.choice(list(enumerate(samples_dict['beta'])))
param_dict['da'] = samples_dict['da'][idx]
param_dict['omega'] = samples_dict['omega'][idx]
param_dict['sigma'] = 5.2 - samples_dict['omega'][idx]
# Sample second calibration
param_dict['l'] = samples_dict['l'][idx]
param_dict['tau'] = samples_dict['tau'][idx]
param_dict['prev_schools'] = samples_dict['prev_schools'][idx]
param_dict['prev_home'] = samples_dict['prev_home'][idx]
param_dict['prev_work'] = samples_dict['prev_work'][idx]
param_dict['prev_rest'] = samples_dict['prev_rest'][idx]
return param_dict
# -------------------------------
# Visualize prevention parameters
# -------------------------------
# Method 1: all in on page
fig,axes= plt.subplots(nrows=n_calibrations,ncols=n_prevention+1, figsize=(13,8.27), gridspec_kw={'width_ratios': [1, 1, 1, 1, 6]})
prevention_labels = ['$\Omega_{home}$ (-)', '$\Omega_{schools}$ (-)', '$\Omega_{work}$ (-)', '$\Omega_{rest}$ (-)']
prevention_names = ['prev_home', 'prev_schools', 'prev_work', 'prev_rest']
row_labels = ['(a)', '(b)', '(c)', '(d)', '(e)', '(f)']
pad = 5 # in points
for i in range(n_calibrations):
print('Simulation no. {} out of {}'.format(i+1,n_calibrations))
out = model.sim(end_sim,start_date=start_sim,warmup=warmup,N=args.n_samples,draw_fcn=draw_fcn,samples=samples_dicts[i])
vector_mean, vector_median, vector_LL, vector_UL = add_poisson('H_in', out, args.n_samples, args.n_draws_per_sample)
for j in range(n_prevention+1):
if j != n_prevention:
n, bins, patches = axes[i,j].hist(samples_dicts[i][prevention_names[j]], color='blue', bins=15, density=True, alpha=0.6)
axes[i,j].axvline(np.mean(samples_dicts[i][prevention_names[j]]), ymin=0, ymax=1, linestyle='--', color='black')
max_n = 1.05*max(n)
axes[i,j].annotate('$\hat{\mu} = $'+"{:.2f}".format(np.mean(samples_dicts[i][prevention_names[j]])), xy=(np.mean(samples_dicts[i][prevention_names[j]]),max_n),
rotation=0,va='bottom', ha='center',annotation_clip=False,fontsize=10)
if j == 0:
axes[i,j].annotate(row_labels[i], xy=(0, 0.5), xytext=(-axes[i,j].yaxis.labelpad - pad, 0),
xycoords=axes[i,j].yaxis.label, textcoords='offset points',
ha='right', va='center')
axes[i,j].set_xlim([0,1])
axes[i,j].set_xticks([0.0, 1.0])
axes[i,j].set_yticks([])
axes[i,j].grid(False)
if i == n_calibrations-1:
axes[i,j].set_xlabel(prevention_labels[j])
axes[i,j].spines['left'].set_visible(False)
else:
axes[i,j] = plot_fit(axes[i,j], 'H_in','$H_{in}$ (-)', df_sciensano, out['time'].values, vector_median, vector_LL, vector_UL, start_calibration = start_calibration, end_calibration=end_calibrations[i], end_sim=end_sim)
axes[i,j].xaxis.set_major_locator(plt.MaxNLocator(3))
axes[i,j].set_yticks([0,250, 500, 750])
axes[i,j].set_ylim([0,850])
plt.tight_layout()
plt.show()
model_results_WAVE2 = {'time': out['time'].values, 'vector_mean': vector_mean, 'vector_median': vector_median, 'vector_LL': vector_LL, 'vector_UL': vector_UL}
model_results = [model_results_WAVE1, model_results_WAVE2]
#################################################################
## Part 4: Comparing the maximal dataset prevention parameters ##
#################################################################
samples_dict_WAVE1 = json.load(open('../../data/interim/model_parameters/COVID19_SEIRD/calibrations/national/BE_WAVE1_BETA_COMPLIANCE_2021-02-22.json'))
samples_dict_WAVE2 = json.load(open('../../data/interim/model_parameters/COVID19_SEIRD/calibrations/national/BE_WAVE2_BETA_COMPLIANCE_2021-03-02.json'))
labels = ['$\Omega_{schools}$','$\Omega_{work}$', '$\Omega_{rest}$', '$\Omega_{home}$']
keys = ['prev_schools','prev_work','prev_rest','prev_home']
fig,axes = plt.subplots(1,4,figsize=(12,4))
for idx,ax in enumerate(axes):
if idx != 0:
(n1, bins, patches) = ax.hist(samples_dict_WAVE1[keys[idx]],bins=15,color='blue',alpha=0.4, density=True)
(n2, bins, patches) =ax.hist(samples_dict_WAVE2[keys[idx]],bins=15,color='black',alpha=0.4, density=True)
max_n = max([max(n1),max(n2)])*1.10
ax.axvline(np.mean(samples_dict_WAVE1[keys[idx]]),ls=':',ymin=0,ymax=1,color='blue')
ax.axvline(np.mean(samples_dict_WAVE2[keys[idx]]),ls=':',ymin=0,ymax=1,color='black')
if idx ==1:
ax.annotate('$\mu_1 = \mu_2 = $'+"{:.2f}".format(np.mean(samples_dict_WAVE1[keys[idx]])), xy=(np.mean(samples_dict_WAVE1[keys[idx]]),max_n),
rotation=90,va='bottom', ha='center',annotation_clip=False,fontsize=12)
else:
ax.annotate('$\mu_1 = $'+"{:.2f}".format(np.mean(samples_dict_WAVE1[keys[idx]])), xy=(np.mean(samples_dict_WAVE1[keys[idx]]),max_n),
rotation=90,va='bottom', ha='center',annotation_clip=False,fontsize=12)
ax.annotate('$\mu_2 = $'+"{:.2f}".format(np.mean(samples_dict_WAVE2[keys[idx]])), xy=(np.mean(samples_dict_WAVE2[keys[idx]]),max_n),
rotation=90,va='bottom', ha='center',annotation_clip=False,fontsize=12)
ax.set_xlabel(labels[idx])
ax.set_yticks([])
ax.spines['left'].set_visible(False)
else:
ax.hist(samples_dict_WAVE2['prev_schools'],bins=15,color='black',alpha=0.6, density=True)
ax.set_xlabel('$\Omega_{schools}$')
ax.set_yticks([])
ax.spines['left'].set_visible(False)
ax.set_xlim([0,1])
ax.xaxis.grid(False)
ax.yaxis.grid(False)
plt.tight_layout()
plt.show()
################################################################
## Part 5: Relative contributions of each contact: both waves ##
################################################################
# --------------------------------
# Re-define function to compute R0
# --------------------------------
def compute_R0(initN, Nc, samples_dict, model_parameters):
N = initN.size
sample_size = len(samples_dict['beta'])
R0 = np.zeros([N,sample_size])
R0_norm = np.zeros([N,sample_size])
for i in range(N):
for j in range(sample_size):
R0[i,j] = (model_parameters['a'][i] * samples_dict['da'][j] + samples_dict['omega'][j]) * samples_dict['beta'][j] *Nc[i,j]
R0_norm[i,:] = R0[i,:]*(initN[i]/sum(initN))
R0_age = np.mean(R0,axis=1)
R0_mean = np.sum(R0_norm,axis=0)
return R0, R0_mean
# -----------------------
# Pre-allocate dataframes
# -----------------------
index=df_google.index
columns = [['1','1','1','1','1','1','1','1','1','1','1','1','1','1','1','2','2','2','2','2','2','2','2','2','2','2','2','2','2','2'],['work_mean','work_LL','work_UL','schools_mean','schools_LL','schools_UL','rest_mean','rest_LL','rest_UL',
'home_mean','home_LL','home_UL','total_mean','total_LL','total_UL','work_mean','work_LL','work_UL','schools_mean','schools_LL','schools_UL',
'rest_mean','rest_LL','rest_UL','home_mean','home_LL','home_UL','total_mean','total_LL','total_UL']]
tuples = list(zip(*columns))
columns = pd.MultiIndex.from_tuples(tuples, names=["WAVE", "Type"])
data = np.zeros([len(df_google.index),30])
df_rel = pd.DataFrame(data=data, index=df_google.index, columns=columns)
df_abs = pd.DataFrame(data=data, index=df_google.index, columns=columns)
df_Re = pd.DataFrame(data=data, index=df_google.index, columns=columns)
samples_dicts = [samples_dict_WAVE1, samples_dict_WAVE2]
start_dates =[pd.to_datetime('2020-03-15'), pd.to_datetime('2020-10-19')]
waves=["1", "2"]
for j,samples_dict in enumerate(samples_dicts):
print('\n WAVE: ' + str(j)+'\n')
# ---------------
# Rest prevention
# ---------------
print('Rest\n')
data_rest = np.zeros([len(df_google.index.values), len(samples_dict['prev_rest'])])
Re_rest = np.zeros([len(df_google.index.values), len(samples_dict['prev_rest'])])
for idx, date in enumerate(df_google.index):
tau = np.mean(samples_dict['tau'])
l = np.mean(samples_dict['l'])
tau_days = pd.Timedelta(tau, unit='D')
l_days = pd.Timedelta(l, unit='D')
date_start = start_dates[j]
if date <= date_start + tau_days:
data_rest[idx,:] = 0.01*(100+df_google['retail_recreation'][date])* (np.sum(np.mean(Nc_leisure,axis=0)))\
+ 0.01*(100+df_google['transport'][date])* (np.sum(np.mean(Nc_transport,axis=0)))\
+ 0.01*(100+df_google['grocery'][date])* (np.sum(np.mean(Nc_others,axis=0)))*np.ones(len(samples_dict['prev_rest']))
contacts = np.expand_dims(0.01*(100+df_google['retail_recreation'][date])* (np.sum(Nc_leisure,axis=1))\
+ 0.01*(100+df_google['transport'][date])* (np.sum(Nc_transport,axis=1))\
+ 0.01*(100+df_google['grocery'][date])* (np.sum(Nc_others,axis=1)),axis=1)*np.ones([1,len(samples_dict['prev_rest'])])
R0, Re_rest[idx,:] = compute_R0(initN, contacts, samples_dict, params)
elif date_start + tau_days < date <= date_start + tau_days + l_days:
old = 0.01*(100+df_google['retail_recreation'][date])* (np.sum(np.mean(Nc_leisure,axis=0)))\
+ 0.01*(100+df_google['transport'][date])* (np.sum(np.mean(Nc_transport,axis=0)))\
+ 0.01*(100+df_google['grocery'][date])* (np.sum(np.mean(Nc_others,axis=0)))*np.ones(len(samples_dict['prev_rest']))
new = (0.01*(100+df_google['retail_recreation'][date])* (np.sum(np.mean(Nc_leisure,axis=0)))\
+ 0.01*(100+df_google['transport'][date])* (np.sum(np.mean(Nc_transport,axis=0)))\
+ 0.01*(100+df_google['grocery'][date])* (np.sum(np.mean(Nc_others,axis=0)))\
)*np.array(samples_dict['prev_rest'])
data_rest[idx,:]= old + (new-old)/l * (date-date_start-tau_days)/pd.Timedelta('1D')
old_contacts = np.expand_dims(0.01*(100+df_google['retail_recreation'][date])* (np.sum(Nc_leisure,axis=1))\
+ 0.01*(100+df_google['transport'][date])* (np.sum(Nc_transport,axis=1))\
+ 0.01*(100+df_google['grocery'][date])* (np.sum(Nc_others,axis=1)),axis=1)*np.ones([1,len(samples_dict['prev_rest'])])
new_contacts = np.expand_dims(0.01*(100+df_google['retail_recreation'][date])* (np.sum(Nc_leisure,axis=1))\
+ 0.01*(100+df_google['transport'][date])* (np.sum(Nc_transport,axis=1))\
+ 0.01*(100+df_google['grocery'][date])* (np.sum(Nc_others,axis=1)),axis=1)*np.array(samples_dict['prev_rest'])
contacts = old_contacts + (new_contacts-old_contacts)/l * (date-date_start-tau_days)/pd.Timedelta('1D')
R0, Re_rest[idx,:] = compute_R0(initN, contacts, samples_dict, params)
else:
data_rest[idx,:] = (0.01*(100+df_google['retail_recreation'][date])* (np.sum(np.mean(Nc_leisure,axis=0)))\
+ 0.01*(100+df_google['transport'][date])* (np.sum(np.mean(Nc_transport,axis=0)))\
+ 0.01*(100+df_google['grocery'][date])* (np.sum(np.mean(Nc_others,axis=0)))\
)*np.array(samples_dict['prev_rest'])
contacts = np.expand_dims(0.01*(100+df_google['retail_recreation'][date])* (np.sum(Nc_leisure,axis=1))\
+ 0.01*(100+df_google['transport'][date])* (np.sum(Nc_transport,axis=1))\
+ 0.01*(100+df_google['grocery'][date])* (np.sum(Nc_others,axis=1)),axis=1)*np.array(samples_dict['prev_rest'])
R0, Re_rest[idx,:] = compute_R0(initN, contacts, samples_dict, params)
Re_rest_mean = np.mean(Re_rest,axis=1)
Re_rest_LL = np.quantile(Re_rest,q=0.05/2,axis=1)
Re_rest_UL = np.quantile(Re_rest,q=1-0.05/2,axis=1)
# ---------------
# Work prevention
# ---------------
print('Work\n')
data_work = np.zeros([len(df_google.index.values), len(samples_dict['prev_work'])])
Re_work = np.zeros([len(df_google.index.values), len(samples_dict['prev_work'])])
for idx, date in enumerate(df_google.index):
tau = np.mean(samples_dict['tau'])
l = np.mean(samples_dict['l'])
tau_days = pd.Timedelta(tau, unit='D')
l_days = pd.Timedelta(l, unit='D')
date_start = start_dates[j]
if date <= date_start + tau_days:
data_work[idx,:] = 0.01*(100+df_google['work'][date])* (np.sum(np.mean(Nc_work,axis=0)))*np.ones(len(samples_dict['prev_work']))
contacts = np.expand_dims(0.01*(100+df_google['work'][date])* (np.sum(Nc_work,axis=1)),axis=1)*np.ones([1,len(samples_dict['prev_work'])])
R0, Re_work[idx,:] = compute_R0(initN, contacts, samples_dict, params)
elif date_start + tau_days < date <= date_start + tau_days + l_days:
old = 0.01*(100+df_google['work'][date])* (np.sum(np.mean(Nc_work,axis=0)))*np.ones(len(samples_dict['prev_work']))
new = 0.01*(100+df_google['work'][date])* (np.sum(np.mean(Nc_work,axis=0)))*np.array(samples_dict['prev_work'])
data_work[idx,:] = old + (new-old)/l * (date-date_start-tau_days)/pd.Timedelta('1D')
old_contacts = np.expand_dims(0.01*(100+df_google['work'][date])*(np.sum(Nc_work,axis=1)),axis=1)*np.ones([1,len(samples_dict['prev_work'])])
new_contacts = np.expand_dims(0.01*(100+df_google['work'][date])* (np.sum(Nc_work,axis=1)),axis=1)*np.array(samples_dict['prev_work'])
contacts = old_contacts + (new_contacts-old_contacts)/l * (date-date_start-tau_days)/pd.Timedelta('1D')
R0, Re_work[idx,:] = compute_R0(initN, contacts, samples_dict, params)
else:
data_work[idx,:] = (0.01*(100+df_google['work'][date])* (np.sum(np.mean(Nc_work,axis=0))))*np.array(samples_dict['prev_work'])
contacts = np.expand_dims(0.01*(100+df_google['work'][date])* (np.sum(Nc_work,axis=1)),axis=1)*np.array(samples_dict['prev_work'])
R0, Re_work[idx,:] = compute_R0(initN, contacts, samples_dict, params)
Re_work_mean = np.mean(Re_work,axis=1)
Re_work_LL = np.quantile(Re_work, q=0.05/2, axis=1)
Re_work_UL = np.quantile(Re_work, q=1-0.05/2, axis=1)
# ----------------
# Home prevention
# ----------------
print('Home\n')
data_home = np.zeros([len(df_google['work'].values),len(samples_dict['prev_home'])])
Re_home = np.zeros([len(df_google['work'].values),len(samples_dict['prev_home'])])
for idx, date in enumerate(df_google.index):
tau = np.mean(samples_dict['tau'])
l = np.mean(samples_dict['l'])
tau_days = pd.Timedelta(tau, unit='D')
l_days = pd.Timedelta(l, unit='D')
date_start = start_dates[j]
if date <= date_start + tau_days:
data_home[idx,:] = np.sum(np.mean(Nc_home,axis=0))*np.ones(len(samples_dict['prev_home']))
contacts = np.expand_dims((np.sum(Nc_home,axis=1)),axis=1)*np.ones(len(samples_dict['prev_home']))
R0, Re_home[idx,:] = compute_R0(initN, contacts, samples_dict, params)
elif date_start + tau_days < date <= date_start + tau_days + l_days:
old = np.sum(np.mean(Nc_home,axis=0))*np.ones(len(samples_dict['prev_home']))
new = np.sum(np.mean(Nc_home,axis=0))*np.array(samples_dict['prev_home'])
data_home[idx,:] = old + (new-old)/l * (date-date_start-tau_days)/pd.Timedelta('1D')
old_contacts = np.expand_dims(np.sum(Nc_home,axis=1),axis=1)*np.ones([1,len(samples_dict['prev_home'])])
new_contacts = np.expand_dims((np.sum(Nc_home,axis=1)),axis=1)*np.array(samples_dict['prev_home'])
contacts = old_contacts + (new_contacts-old_contacts)/l * (date-date_start-tau_days)/pd.Timedelta('1D')
R0, Re_home[idx,:] = compute_R0(initN, contacts, samples_dict, params)
else:
data_home[idx,:] = np.sum(np.mean(Nc_home,axis=0))*np.array(samples_dict['prev_home'])
contacts = np.expand_dims((np.sum(Nc_home,axis=1)),axis=1)*np.array(samples_dict['prev_home'])
R0, Re_home[idx,:] = compute_R0(initN, contacts, samples_dict, params)
Re_home_mean = np.mean(Re_home,axis=1)
Re_home_LL = np.quantile(Re_home, q=0.05/2, axis=1)
Re_home_UL = np.quantile(Re_home, q=1-0.05/2, axis=1)
# ------------------
# School prevention
# ------------------
if j == 0:
print('School\n')
data_schools = np.zeros([len(df_google.index.values), len(samples_dict['prev_work'])])
Re_schools = np.zeros([len(df_google.index.values), len(samples_dict['prev_work'])])
for idx, date in enumerate(df_google.index):
tau = np.mean(samples_dict['tau'])
l = np.mean(samples_dict['l'])
tau_days = pd.Timedelta(tau, unit='D')
l_days = pd.Timedelta(l, unit='D')
date_start = start_dates[j]
if date <= date_start + tau_days:
data_schools[idx,:] = 1*(np.sum(np.mean(Nc_schools,axis=0)))*np.ones(len(samples_dict['prev_work']))
contacts = 1*np.expand_dims(np.sum(Nc_schools,axis=1),axis=1)*np.ones([1,len(samples_dict['prev_home'])])
R0, Re_schools[idx,:] = compute_R0(initN, contacts, samples_dict, params)
elif date_start + tau_days < date <= date_start + tau_days + l_days:
old = 1*(np.sum(np.mean(Nc_schools,axis=0)))*np.ones(len(samples_dict['prev_work']))
new = 0* (np.sum(np.mean(Nc_schools,axis=0)))*np.array(samples_dict['prev_work'])
data_schools[idx,:] = old + (new-old)/l * (date-date_start-tau_days)/pd.Timedelta('1D')
old_contacts = 1*np.expand_dims(np.sum(Nc_schools,axis=1),axis=1)*np.ones([1,len(samples_dict['prev_work'])])
new_contacts = 0*np.expand_dims(np.sum(Nc_schools,axis=1),axis=1)*np.ones([1,len(samples_dict['prev_work'])])
contacts = old_contacts + (new_contacts-old_contacts)/l * (date-date_start-tau_days)/pd.Timedelta('1D')
R0, Re_schools[idx,:] = compute_R0(initN, contacts, samples_dict, params)
elif date_start + tau_days + l_days < date <= pd.to_datetime('2020-09-01'):
data_schools[idx,:] = 0* (np.sum(np.mean(Nc_schools,axis=0)))*np.array(samples_dict['prev_work'])
contacts = 0*np.expand_dims(np.sum(Nc_schools,axis=1),axis=1)*np.ones([1,len(samples_dict['prev_home'])])
R0, Re_schools[idx,:] = compute_R0(initN, contacts, samples_dict, params)
else:
data_schools[idx,:] = 1 * (np.sum(np.mean(Nc_schools,axis=0)))*np.array(samples_dict['prev_work']) # This is wrong, but is never used
contacts = 1*np.expand_dims(np.sum(Nc_schools,axis=1),axis=1)*np.ones([1,len(samples_dict['prev_home'])])
R0, Re_schools[idx,:] = compute_R0(initN, contacts, samples_dict, params)
elif j == 1:
print('School\n')
data_schools = np.zeros([len(df_google.index.values), len(samples_dict['prev_schools'])])
Re_schools = np.zeros([len(df_google.index.values), len(samples_dict['prev_work'])])
for idx, date in enumerate(df_google.index):
tau = np.mean(samples_dict['tau'])
l = np.mean(samples_dict['l'])
tau_days = pd.Timedelta(tau, unit='D')
l_days = pd.Timedelta(l, unit='D')
date_start = start_dates[j]
if date <= date_start + tau_days:
data_schools[idx,:] = 1*(np.sum(np.mean(Nc_schools,axis=0)))*np.ones(len(samples_dict['prev_schools']))
contacts = 1*np.expand_dims(np.sum(Nc_schools,axis=1),axis=1)*np.ones([1,len(samples_dict['prev_schools'])])
R0, Re_schools[idx,:] = compute_R0(initN, contacts, samples_dict, params)
elif date_start + tau_days < date <= date_start + tau_days + l_days:
old = 1*(np.sum(np.mean(Nc_schools,axis=0)))*np.ones(len(samples_dict['prev_schools']))
new = 0* (np.sum(np.mean(Nc_schools,axis=0)))*np.array(samples_dict['prev_schools'])
data_schools[idx,:] = old + (new-old)/l * (date-date_start-tau_days)/pd.Timedelta('1D')
old_contacts = 1*np.expand_dims(np.sum(Nc_schools,axis=1),axis=1)*np.ones([1,len(samples_dict['prev_schools'])])
new_contacts = 0*np.expand_dims(np.sum(Nc_schools,axis=1),axis=1)*np.ones([1,len(samples_dict['prev_schools'])])
contacts = old_contacts + (new_contacts-old_contacts)/l * (date-date_start-tau_days)/pd.Timedelta('1D')
R0, Re_schools[idx,:] = compute_R0(initN, contacts, samples_dict, params)
elif date_start + tau_days + l_days < date <= pd.to_datetime('2020-11-16'):
data_schools[idx,:] = 0* (np.sum(np.mean(Nc_schools,axis=0)))*np.array(samples_dict['prev_schools'])
contacts = 0*np.expand_dims(np.sum(Nc_schools,axis=1),axis=1)*np.ones([1,len(samples_dict['prev_schools'])])
R0, Re_schools[idx,:] = compute_R0(initN, contacts, samples_dict, params)
elif pd.to_datetime('2020-11-16') < date <= pd.to_datetime('2020-12-18'):
data_schools[idx,:] = 1* (np.sum(np.mean(Nc_schools,axis=0)))*np.array(samples_dict['prev_schools'])
contacts = 1*np.expand_dims(np.sum(Nc_schools,axis=1),axis=1)*np.ones([1,len(samples_dict['prev_schools'])])
R0, Re_schools[idx,:] = compute_R0(initN, contacts, samples_dict, params)
elif pd.to_datetime('2020-12-18') < date <= pd.to_datetime('2021-01-04'):
data_schools[idx,:] = 0* (np.sum(np.mean(Nc_schools,axis=0)))*np.array(samples_dict['prev_schools'])
contacts = tmp = 0*np.expand_dims(np.sum(Nc_schools,axis=1),axis=1)*np.ones([1,len(samples_dict['prev_schools'])])
R0, Re_schools[idx,:] = compute_R0(initN, contacts, samples_dict, params)
elif pd.to_datetime('2021-01-04') < date <= pd.to_datetime('2021-02-15'):
data_schools[idx,:] = 1* (np.sum(np.mean(Nc_schools,axis=0)))*np.array(samples_dict['prev_schools'])
contacts = 1*np.expand_dims(np.sum(Nc_schools,axis=1),axis=1)*np.ones([1,len(samples_dict['prev_schools'])])
R0, Re_schools[idx,:] = compute_R0(initN, contacts, samples_dict, params)
elif pd.to_datetime('2021-02-15') < date <= pd.to_datetime('2021-02-21'):
data_schools[idx,:] = 0* (np.sum(np.mean(Nc_schools,axis=0)))*np.array(samples_dict['prev_schools'])
contacts = 0*np.expand_dims(np.sum(Nc_schools,axis=1),axis=1)*np.ones([1,len(samples_dict['prev_schools'])])
R0, Re_schools[idx,:] = compute_R0(initN, contacts, samples_dict, params)
else:
data_schools[idx,:] = 1* (np.sum(np.mean(Nc_schools,axis=0)))*np.array(samples_dict['prev_schools'])
contacts = 1*np.expand_dims(np.sum(Nc_schools,axis=1),axis=1)*np.ones([1,len(samples_dict['prev_schools'])])
R0, Re_schools[idx,:] = compute_R0(initN, contacts, samples_dict, params)
Re_schools_mean = np.mean(Re_schools,axis=1)
Re_schools_LL = np.quantile(Re_schools, q=0.05/2, axis=1)
Re_schools_UL = np.quantile(Re_schools, q=1-0.05/2, axis=1)
# -----
# Total
# -----
data_total = data_rest + data_work + data_home + data_schools
Re_total = Re_rest + Re_work + Re_home + Re_schools
Re_total_mean = np.mean(Re_total,axis=1)
Re_total_LL = np.quantile(Re_total, q=0.05/2, axis=1)
Re_total_UL = np.quantile(Re_total, q=1-0.05/2, axis=1)
# -----------------------
# Absolute contributions
# -----------------------
abs_rest = np.zeros(data_rest.shape)
abs_work = np.zeros(data_rest.shape)
abs_home = np.zeros(data_rest.shape)
abs_schools = np.zeros(data_schools.shape)
abs_total = data_total
for i in range(data_rest.shape[0]):
abs_rest[i,:] = data_rest[i,:]
abs_work[i,:] = data_work[i,:]
abs_home[i,:] = data_home[i,:]
abs_schools[i,:] = data_schools[i,:]
abs_schools_mean = np.mean(abs_schools,axis=1)
abs_schools_LL = np.quantile(abs_schools,LL,axis=1)
abs_schools_UL = np.quantile(abs_schools,UL,axis=1)
abs_rest_mean = np.mean(abs_rest,axis=1)
abs_rest_LL = np.quantile(abs_rest,LL,axis=1)
abs_rest_UL = np.quantile(abs_rest,UL,axis=1)
abs_work_mean = np.mean(abs_work,axis=1)
abs_work_LL = np.quantile(abs_work,LL,axis=1)
abs_work_UL = np.quantile(abs_work,UL,axis=1)
abs_home_mean = np.mean(abs_home,axis=1)
abs_home_LL = np.quantile(abs_home,LL,axis=1)
abs_home_UL = np.quantile(abs_home,UL,axis=1)
abs_total_mean = np.mean(abs_total,axis=1)
abs_total_LL = np.quantile(abs_total,LL,axis=1)
abs_total_UL = np.quantile(abs_total,UL,axis=1)
# -----------------------
# Relative contributions
# -----------------------
rel_rest = np.zeros(data_rest.shape)
rel_work = np.zeros(data_rest.shape)
rel_home = np.zeros(data_rest.shape)
rel_schools = np.zeros(data_schools.shape)
rel_total = np.zeros(data_schools.shape)
for i in range(data_rest.shape[0]):
total = data_schools[i,:] + data_rest[i,:] + data_work[i,:] + data_home[i,:]
rel_rest[i,:] = data_rest[i,:]/total
rel_work[i,:] = data_work[i,:]/total
rel_home[i,:] = data_home[i,:]/total
rel_schools[i,:] = data_schools[i,:]/total
rel_total[i,:] = total/total
rel_schools_mean = np.mean(rel_schools,axis=1)
rel_schools_LL = np.quantile(rel_schools,LL,axis=1)
rel_schools_UL = np.quantile(rel_schools,UL,axis=1)
rel_rest_mean = np.mean(rel_rest,axis=1)
rel_rest_LL = np.quantile(rel_rest,LL,axis=1)
rel_rest_UL = np.quantile(rel_rest,UL,axis=1)
rel_work_mean = np.mean(rel_work,axis=1)
rel_work_LL = np.quantile(rel_work,LL,axis=1)
rel_work_UL = np.quantile(rel_work,UL,axis=1)
rel_home_mean = np.mean(rel_home,axis=1)
rel_home_LL = np.quantile(rel_home,LL,axis=1)
rel_home_UL = np.quantile(rel_home,UL,axis=1)
rel_total_mean = np.mean(rel_total,axis=1)
rel_total_LL = np.quantile(rel_total,LL,axis=1)
rel_total_UL = np.quantile(rel_total,UL,axis=1)
# ---------------------
# Append to dataframe
# ---------------------
df_rel[waves[j],"work_mean"] = rel_work_mean
df_rel[waves[j],"work_LL"] = rel_work_LL
df_rel[waves[j],"work_UL"] = rel_work_UL
df_rel[waves[j], "rest_mean"] = rel_rest_mean
df_rel[waves[j], "rest_LL"] = rel_rest_LL
df_rel[waves[j], "rest_UL"] = rel_rest_UL
df_rel[waves[j], "home_mean"] = rel_home_mean
df_rel[waves[j], "home_LL"] = rel_home_LL
df_rel[waves[j], "home_UL"] = rel_home_UL
df_rel[waves[j],"schools_mean"] = rel_schools_mean
df_rel[waves[j],"schools_LL"] = rel_schools_LL
df_rel[waves[j],"schools_UL"] = rel_schools_UL
df_rel[waves[j],"total_mean"] = rel_total_mean
df_rel[waves[j],"total_LL"] = rel_total_LL
df_rel[waves[j],"total_UL"] = rel_total_UL
copy1 = df_rel.copy(deep=True)
df_Re[waves[j],"work_mean"] = Re_work_mean
df_Re[waves[j],"work_LL"] = Re_work_LL
df_Re[waves[j],"work_UL"] = Re_work_UL
df_Re[waves[j], "rest_mean"] = Re_rest_mean
df_Re[waves[j],"rest_LL"] = Re_rest_LL
df_Re[waves[j],"rest_UL"] = Re_rest_UL
df_Re[waves[j], "home_mean"] = Re_home_mean
df_Re[waves[j], "home_LL"] = Re_home_LL
df_Re[waves[j], "home_UL"] = Re_home_UL
df_Re[waves[j],"schools_mean"] = Re_schools_mean
df_Re[waves[j],"schools_LL"] = Re_schools_LL
df_Re[waves[j],"schools_UL"] = Re_schools_UL
df_Re[waves[j],"total_mean"] = Re_total_mean
df_Re[waves[j],"total_LL"] = Re_total_LL
df_Re[waves[j],"total_UL"] = Re_total_UL
copy2 = df_Re.copy(deep=True)
df_abs[waves[j],"work_mean"] = abs_work_mean
df_abs[waves[j],"work_LL"] = abs_work_LL
df_abs[waves[j],"work_UL"] = abs_work_UL
df_abs[waves[j], "rest_mean"] = abs_rest_mean
df_abs[waves[j], "rest_LL"] = abs_rest_LL
df_abs[waves[j], "rest_UL"] = abs_rest_UL
df_abs[waves[j], "home_mean"] = abs_home_mean
df_abs[waves[j], "home_LL"] = abs_home_LL
df_abs[waves[j], "home_UL"] = abs_home_UL
df_abs[waves[j],"schools_mean"] = abs_schools_mean
df_abs[waves[j],"schools_LL"] = abs_schools_LL
df_abs[waves[j],"schools_UL"] = abs_schools_UL
df_abs[waves[j],"total_mean"] = abs_total_mean
df_abs[waves[j],"total_LL"] = abs_total_LL
df_abs[waves[j],"total_UL"] = abs_total_UL
df_rel = copy1
df_Re = copy2
#df_abs.to_excel('test.xlsx', sheet_name='Absolute contacts')
#df_rel.to_excel('test.xlsx', sheet_name='Relative contacts')
#df_Re.to_excel('test.xlsx', sheet_name='Effective reproduction number')
print(np.mean(df_abs["1","total_mean"][pd.to_datetime('2020-03-22'):pd.to_datetime('2020-05-04')]))
print(np.mean(df_Re["1","total_LL"][pd.to_datetime('2020-03-22'):pd.to_datetime('2020-05-04')]),
np.mean(df_Re["1","total_mean"][pd.to_datetime('2020-03-22'):pd.to_datetime('2020-05-04')]),
np.mean(df_Re["1","total_UL"][pd.to_datetime('2020-03-22'):pd.to_datetime('2020-05-04')]))
print(np.mean(df_abs["1","total_mean"][pd.to_datetime('2020-06-01'):pd.to_datetime('2020-07-01')]))
print(np.mean(df_Re["1","total_LL"][pd.to_datetime('2020-06-01'):pd.to_datetime('2020-07-01')]),
np.mean(df_Re["1","total_mean"][pd.to_datetime('2020-06-01'):pd.to_datetime('2020-07-01')]),
np.mean(df_Re["1","total_UL"][pd.to_datetime('2020-06-01'):pd.to_datetime('2020-07-01')]))
print(np.mean(df_abs["2","total_mean"][pd.to_datetime('2020-10-25'):pd.to_datetime('2020-11-16')]))
print(np.mean(df_Re["2","total_LL"][pd.to_datetime('2020-10-25'):pd.to_datetime('2020-11-16')]),
np.mean(df_Re["2","total_mean"][pd.to_datetime('2020-10-25'):pd.to_datetime('2020-11-16')]),
np.mean(df_Re["2","total_UL"][pd.to_datetime('2020-10-25'):pd.to_datetime('2020-11-16')]))
print(np.mean(df_abs["2","total_mean"][pd.to_datetime('2020-11-16'):pd.to_datetime('2020-12-18')]))
print(np.mean(df_Re["2","total_LL"][pd.to_datetime('2020-11-16'):pd.to_datetime('2020-12-18')]),
np.mean(df_Re["2","total_mean"][pd.to_datetime('2020-11-16'):pd.to_datetime('2020-12-18')]),
np.mean(df_Re["2","total_UL"][pd.to_datetime('2020-11-16'):pd.to_datetime('2020-12-18')]))
# ----------------------------
# Plot absolute contributions
# ----------------------------
xlims = [[pd.to_datetime('2020-03-01'), pd.to_datetime('2020-07-14')],[pd.to_datetime('2020-09-01'), pd.to_datetime('2021-02-01')]]
no_lockdown = [[pd.to_datetime('2020-03-01'), pd.to_datetime('2020-03-15')],[pd.to_datetime('2020-09-01'), pd.to_datetime('2020-10-19')]]
fig,axes=plt.subplots(nrows=2,ncols=1,figsize=(12,7))
for idx,ax in enumerate(axes):
ax.plot(df_abs.index, df_abs[waves[idx],"rest_mean"], color='blue', linewidth=2)
ax.plot(df_abs.index, df_abs[waves[idx],"work_mean"], color='red', linewidth=2)
ax.plot(df_abs.index, df_abs[waves[idx],"home_mean"], color='green', linewidth=2)
ax.plot(df_abs.index, df_abs[waves[idx],"schools_mean"], color='orange', linewidth=2)
ax.plot(df_abs.index, df_abs[waves[idx],"total_mean"], color='black', linewidth=1.5)
ax.xaxis.grid(False)
ax.yaxis.grid(False)
ax.set_ylabel('Absolute contacts (-)')
if idx == 0:
ax.legend(['leisure','work','home','schools','total'], bbox_to_anchor=(1.20, 1), loc='upper left')
ax.set_xlim(xlims[idx])
ax.axvspan(no_lockdown[idx][0], no_lockdown[idx][1], alpha=0.2, color='black')
ax2 = ax.twinx()
time = model_results[idx]['time']
vector_mean = model_results[idx]['vector_mean']
vector_LL = model_results[idx]['vector_LL']
vector_UL = model_results[idx]['vector_UL']
ax2.scatter(df_sciensano.index,df_sciensano['H_in'],color='black',alpha=0.6,linestyle='None',facecolors='none', s=30, linewidth=1)
ax2.plot(time,vector_mean,'--', color='black', linewidth=1.5)
ax2.fill_between(time,vector_LL, vector_UL,alpha=0.20, color = 'black')
ax2.xaxis.grid(False)
ax2.yaxis.grid(False)
ax2.set_xlim(xlims[idx])
ax2.set_ylabel('New hospitalisations (-)')
ax = _apply_tick_locator(ax)
ax2 = _apply_tick_locator(ax2)
plt.tight_layout()
plt.show()
plt.close()
# ----------------------------
# Plot relative contributions
# ----------------------------
fig,axes=plt.subplots(nrows=2,ncols=1,figsize=(12,7))
for idx,ax in enumerate(axes):
ax.plot(df_rel.index, df_rel[waves[idx],"rest_mean"], color='blue', linewidth=1.5)
ax.plot(df_rel.index, df_rel[waves[idx],"work_mean"], color='red', linewidth=1.5)
ax.plot(df_rel.index, df_rel[waves[idx],"home_mean"], color='green', linewidth=1.5)
ax.plot(df_rel.index, df_rel[waves[idx],"schools_mean"], color='orange', linewidth=1.5)
ax.xaxis.grid(False)
ax.yaxis.grid(False)
ax.set_ylabel('Relative contacts (-)')
if idx == 0:
ax.legend(['leisure','work','home','schools'], bbox_to_anchor=(1.20, 1), loc='upper left')
ax.set_xlim(xlims[idx])
ax.axvspan(no_lockdown[idx][0], no_lockdown[idx][1], alpha=0.2, color='black')
ax.set_yticks([0,0.25,0.50,0.75])
ax.set_ylim([0,0.85])
ax2 = ax.twinx()
time = model_results[idx]['time']
vector_mean = model_results[idx]['vector_mean']
vector_LL = model_results[idx]['vector_LL']
vector_UL = model_results[idx]['vector_UL']
ax2.scatter(df_sciensano.index,df_sciensano['H_in'],color='black',alpha=0.6,linestyle='None',facecolors='none', s=30, linewidth=1)
ax2.plot(time,vector_mean,'--', color='black', linewidth=1.5)
ax2.fill_between(time,vector_LL, vector_UL,alpha=0.20, color = 'black')
ax2.xaxis.grid(False)
ax2.yaxis.grid(False)
ax2.set_xlim(xlims[idx])
ax2.set_ylabel('New hospitalisations (-)')
ax = _apply_tick_locator(ax)
ax2 = _apply_tick_locator(ax2)
plt.tight_layout()
plt.show()
plt.close()
# --------------------------------------------
# Plot relative contributions and cluster data
# --------------------------------------------
# Perform calculation
df_clusters = pd.read_csv('../../data/interim/sciensano/clusters.csv')
population_total = 11539326
population_schools = 2344395
population_work = 4893800 #https://stat.nbb.be/Index.aspx?DataSetCode=POPULA&lang=nl
home_rel = df_clusters['family']/population_total
work_rel = df_clusters['work']/population_work
schools_rel = df_clusters['schools']/population_schools
others_rel = df_clusters['others']/population_total
normalizer = df_clusters['family']/population_total + df_clusters['work']/population_work + df_clusters['schools']/population_schools + df_clusters['others']/population_total
df_clusters['family_rel'] = df_clusters['family']/population_total/normalizer
df_clusters['work_rel'] = df_clusters['work']/population_work/normalizer
df_clusters['schools_rel'] = df_clusters['schools']/population_schools/normalizer
df_clusters['others_rel'] = df_clusters['others']/population_total/normalizer
df_clusters['midpoint_week'] = pd.to_datetime(df_clusters['startdate_week'])+(pd.to_datetime(df_clusters['enddate_week'])-pd.to_datetime(df_clusters['startdate_week']))/2
# Make plot
fig,ax = plt.subplots(figsize=(12,5))
# Cluster data
ax.plot(df_clusters['midpoint_week'], df_clusters['others_rel'], '--',color='blue',linewidth=1.5)
ax.plot(df_clusters['midpoint_week'], df_clusters['work_rel'],'--', color='red',linewidth=1.5)
ax.plot(df_clusters['midpoint_week'], df_clusters['family_rel'],'--',color='green',linewidth=1.5)
ax.plot(df_clusters['midpoint_week'], df_clusters['schools_rel'],'--', color='orange',linewidth=1.5)
# Model relative share
#ax.plot(df_rel.index, df_rel['2',"rest_mean"], color='blue', linewidth=1.5)
#ax.plot(df_rel.index, df_rel['2',"work_mean"], color='red', linewidth=1.5)
#ax.plot(df_rel.index, df_rel['2',"home_mean"], color='green', linewidth=1.5)
#ax.plot(df_rel.index, df_rel['2',"schools_mean"], color='orange', linewidth=1.5)
ax.legend(['others','work','home','schools'], bbox_to_anchor=(1.10, 1), loc='upper left')
ax.scatter(df_clusters['midpoint_week'], df_clusters['others_rel'], color='blue')
ax.scatter(df_clusters['midpoint_week'], df_clusters['work_rel'], color='red')
ax.scatter(df_clusters['midpoint_week'], df_clusters['family_rel'],color='green')
ax.scatter(df_clusters['midpoint_week'], df_clusters['schools_rel'], color='orange')
# Shading of no lockdown zone
ax.axvspan('2020-09-01', '2020-10-19', alpha=0.2, color='black')
# Other style options
ax.set_ylabel('Normalized share of clusters (-)')
ax.grid(False)
ax = _apply_tick_locator(ax)
ax.set_ylim([0,0.80])
ax.set_yticks([0,0.25,0.50,0.75])
ax2 = ax.twinx()
time = model_results[1]['time']
vector_mean = model_results[1]['vector_mean']
vector_LL = model_results[1]['vector_LL']
vector_UL = model_results[1]['vector_UL']
ax2.scatter(df_sciensano.index,df_sciensano['H_in'],color='black',alpha=0.6,linestyle='None',facecolors='none', s=30, linewidth=1)
ax2.plot(time,vector_mean,'--', color='black', linewidth=1.5)
ax2.fill_between(time,vector_LL, vector_UL,alpha=0.20, color = 'black')
ax2.scatter(df_sciensano.index,df_sciensano['H_in'],color='black',alpha=0.6,linestyle='None',facecolors='none', s=30, linewidth=1)
ax2.set_xlim(['2020-09-01', '2021-02-20'])
ax2.set_ylabel('New hospitalisations (-)')
ax2.grid(False)
ax2 = _apply_tick_locator(ax2)
plt.tight_layout()
plt.show()
# ------------------------------
# Plot Reproduction numbers (1)
# ------------------------------
xlims = [[pd.to_datetime('2020-03-01'), pd.to_datetime('2020-07-14')],[pd.to_datetime('2020-09-01'), pd.to_datetime('2021-02-01')]]
no_lockdown = [[pd.to_datetime('2020-03-01'), pd.to_datetime('2020-03-15')],[pd.to_datetime('2020-09-01'), pd.to_datetime('2020-10-19')]]
fig,axes=plt.subplots(nrows=2,ncols=1,figsize=(12,7))
for idx,ax in enumerate(axes):
ax.plot(df_Re.index, df_Re[waves[idx],"rest_mean"], color='blue', linewidth=1.5)
ax.fill_between(df_Re.index, df_Re[waves[idx], "rest_LL"], df_Re[waves[idx], "rest_UL"], color='blue', alpha=0.2)
ax.plot(df_Re.index, df_Re[waves[idx],"work_mean"], color='red', linewidth=1.5)
ax.fill_between(df_Re.index, df_Re[waves[idx], "work_LL"], df_Re[waves[idx], "work_UL"], color='red', alpha=0.2)
ax.plot(df_Re.index, df_Re[waves[idx],"home_mean"], color='green', linewidth=1.5)
ax.fill_between(df_Re.index, df_Re[waves[idx], "home_LL"], df_Re[waves[idx], "home_UL"], color='green', alpha=0.2)
ax.plot(df_Re.index, df_Re[waves[idx],"schools_mean"], color='orange', linewidth=1.5)
ax.fill_between(df_Re.index, df_Re[waves[idx], "schools_LL"], df_Re[waves[idx], "schools_UL"], color='orange', alpha=0.2)
ax.plot(df_Re.index, df_Re[waves[idx],"total_mean"], color='black', linewidth=1.5)
ax.fill_between(df_Re.index, df_Re[waves[idx], "total_LL"], df_Re[waves[idx], "total_UL"], color='black', alpha=0.2)
ax.axhline(y=1.0, color='black', linestyle='--',linewidth=1.5)
ax.xaxis.grid(False)
ax.yaxis.grid(False)
ax.set_ylabel('$R_{e}$ (-)')
if idx == 0:
ax.legend(['leisure','work','home','schools', 'total'], bbox_to_anchor=(1.20, 1), loc='upper left')
ax.set_xlim(xlims[idx])
ax.axvspan(no_lockdown[idx][0], no_lockdown[idx][1], alpha=0.2, color='black')
ax.set_yticks([0,1,2,3])
ax.set_ylim([0,4.5])
ax2 = ax.twinx()
time = model_results[idx]['time']
vector_mean = model_results[idx]['vector_mean']
vector_LL = model_results[idx]['vector_LL']
vector_UL = model_results[idx]['vector_UL']
ax2.scatter(df_sciensano.index,df_sciensano['H_in'],color='black',alpha=0.6,linestyle='None',facecolors='none', s=30, linewidth=1)
ax2.plot(time,vector_mean,'--', color='black', linewidth=1.5)
ax2.fill_between(time,vector_LL, vector_UL,alpha=0.20, color = 'black')
ax2.xaxis.grid(False)
ax2.yaxis.grid(False)
ax2.set_xlim(xlims[idx])
ax2.set_ylabel('New hospitalisations (-)')
ax = _apply_tick_locator(ax)
ax2 = _apply_tick_locator(ax2)
plt.tight_layout()
plt.show()
plt.close()
# ------------------------------
# Plot Reproduction numbers (2)
# ------------------------------
xlims = [[pd.to_datetime('2020-03-01'), pd.to_datetime('2020-07-14')],[pd.to_datetime('2020-09-01'), pd.to_datetime('2021-02-01')]]
no_lockdown = [[pd.to_datetime('2020-03-01'), pd.to_datetime('2020-03-15')],[pd.to_datetime('2020-09-01'), pd.to_datetime('2020-10-19')]]
fig,axes=plt.subplots(nrows=2,ncols=1,figsize=(12,7))
for idx,ax in enumerate(axes):
ax.plot(df_Re.index, df_Re[waves[idx],"rest_mean"], color='blue', linewidth=1.5)
ax.plot(df_Re.index, df_Re[waves[idx],"work_mean"], color='red', linewidth=1.5)
ax.plot(df_Re.index, df_Re[waves[idx],"home_mean"], color='green', linewidth=1.5)
ax.plot(df_Re.index, df_Re[waves[idx],"schools_mean"], color='orange', linewidth=1.5)
ax.plot(df_Re.index, df_Re[waves[idx],"total_mean"], color='black', linewidth=1.5)
ax.axhline(y=1.0, color='black', linestyle='--',linewidth=1.5)
ax.fill_between(df_Re.index, df_Re[waves[idx], "rest_LL"], df_Re[waves[idx], "rest_UL"], color='blue', alpha=0.2)
ax.fill_between(df_Re.index, df_Re[waves[idx], "work_LL"], df_Re[waves[idx], "work_UL"], color='red', alpha=0.2)
ax.fill_between(df_Re.index, df_Re[waves[idx], "home_LL"], df_Re[waves[idx], "home_UL"], color='green', alpha=0.2)
ax.fill_between(df_Re.index, df_Re[waves[idx], "schools_LL"], df_Re[waves[idx], "schools_UL"], color='orange', alpha=0.2)
ax.fill_between(df_Re.index, df_Re[waves[idx], "total_LL"], df_Re[waves[idx], "total_UL"], color='black', alpha=0.2)
ax.xaxis.grid(False)
ax.yaxis.grid(False)
ax.set_ylabel('$R_{e}$ (-)')
if idx == 0:
ax.legend(['leisure','work','home','schools', 'total'], bbox_to_anchor=(1.20, 1), loc='upper left')
ax.set_xlim(xlims[idx])
ax.axvspan(no_lockdown[idx][0], no_lockdown[idx][1], alpha=0.2, color='black')
ax.set_yticks([0,1,2])
ax.set_ylim([0,2.5])
ax2 = ax.twinx()
time = model_results[idx]['time']
vector_mean = model_results[idx]['vector_mean']
vector_LL = model_results[idx]['vector_LL']
vector_UL = model_results[idx]['vector_UL']
ax2.scatter(df_sciensano.index,df_sciensano['H_in'],color='black',alpha=0.6,linestyle='None',facecolors='none', s=30, linewidth=1)
ax2.plot(time,vector_mean,'--', color='black', linewidth=1.5)
ax2.fill_between(time,vector_LL, vector_UL,alpha=0.20, color = 'black')
ax2.xaxis.grid(False)
ax2.yaxis.grid(False)
ax = _apply_tick_locator(ax)
ax2 = _apply_tick_locator(ax2)
plt.tight_layout()
plt.show()
plt.close()
# -------------------------------------------------------
# Plot relative and reproduction number on one figure (1)
# -------------------------------------------------------
dfs = [df_rel, df_Re]
fig,axes=plt.subplots(nrows=2,ncols=2, figsize=(18,8.27))
for idx,ax_row in enumerate(axes):
for jdx, ax in enumerate(ax_row):
ax.plot(dfs[jdx].index, dfs[jdx][waves[idx],"rest_mean"], color='blue', linewidth=1.5)
ax.plot(dfs[jdx].index, dfs[jdx][waves[idx],"work_mean"], color='red', linewidth=1.5)
ax.plot(dfs[jdx].index, dfs[jdx][waves[idx],"home_mean"], color='green', linewidth=1.5)
ax.plot(dfs[jdx].index, dfs[jdx][waves[idx],"schools_mean"], color='orange', linewidth=1.5)
ax.plot(dfs[jdx].index, dfs[jdx][waves[idx],"total_mean"], color='black', linewidth=1.5)
if jdx == 0:
ax.set_ylabel('Relative contacts (-)')
ax.set_xlim(xlims[idx])
ax.axvspan(no_lockdown[idx][0], no_lockdown[idx][1], alpha=0.2, color='black')
ax.set_yticks([0,0.25,0.50,0.75])
ax.set_ylim([0,0.85])
if jdx == 1:
ax.axhline(y=1.0, color='black', linestyle='--',linewidth=1.5)
ax.set_ylabel('$R_{e}$ (-)')
if idx == 0:
ax.legend(['leisure','work','home','schools', 'total'], bbox_to_anchor=(1.20, 1), loc='upper left')
ax.set_xlim(xlims[idx])
ax.axvspan(no_lockdown[idx][0], no_lockdown[idx][1], alpha=0.2, color='black')
ax.set_yticks([0,1,2])
ax.set_ylim([0,2.5])
ax.xaxis.grid(False)
ax.yaxis.grid(False)
ax2 = ax.twinx()
time = model_results[idx]['time']
vector_mean = model_results[idx]['vector_mean']
vector_LL = model_results[idx]['vector_LL']
vector_UL = model_results[idx]['vector_UL']
ax2.scatter(df_sciensano.index,df_sciensano['H_in'],color='black',alpha=0.6,linestyle='None',facecolors='none', s=30, linewidth=1)
ax2.plot(time,vector_mean,'--', color='black', linewidth=1.5)
ax2.fill_between(time,vector_LL, vector_UL,alpha=0.20, color = 'black')
ax2.xaxis.grid(False)
ax2.yaxis.grid(False)
ax2.set_xlim(xlims[idx])
ax2.set_ylabel('New hospitalisations (-)')
ax = _apply_tick_locator(ax)
ax2 = _apply_tick_locator(ax2)
plt.tight_layout()
plt.show()
plt.close()
| 50.655072
| 265
| 0.636773
| 10,762
| 69,904
| 3.913864
| 0.052871
| 0.036561
| 0.030484
| 0.030009
| 0.799506
| 0.754161
| 0.722632
| 0.700791
| 0.683958
| 0.670473
| 0
| 0.051821
| 0.154726
| 69,904
| 1,380
| 266
| 50.655072
| 0.661031
| 0.082456
| 0
| 0.48318
| 0
| 0.001019
| 0.141755
| 0.023499
| 0
| 0
| 0
| 0
| 0
| 1
| 0.008155
| false
| 0
| 0.014271
| 0
| 0.046891
| 0.018349
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
04e34c2e2998f9009f105234d37b39d1092b9400
| 2,272
|
py
|
Python
|
krysztalki/workDir/tests/test matrices/test_matrices_like_2_0yy.py
|
woblob/Crystal_Symmetry
|
be2984b4487d6075986ef60822a347d0b0e6b885
|
[
"MIT"
] | null | null | null |
krysztalki/workDir/tests/test matrices/test_matrices_like_2_0yy.py
|
woblob/Crystal_Symmetry
|
be2984b4487d6075986ef60822a347d0b0e6b885
|
[
"MIT"
] | null | null | null |
krysztalki/workDir/tests/test matrices/test_matrices_like_2_0yy.py
|
woblob/Crystal_Symmetry
|
be2984b4487d6075986ef60822a347d0b0e6b885
|
[
"MIT"
] | null | null | null |
import matrices_new_extended as mne
import numpy as np
import sympy as sp
from equality_check import Point
x, y, z = sp.symbols("x y z")
Point.base_point = np.array([x, y, z, 1])
class Test_Axis_2_0yy:
def test_matrix_2_0yy(self):
expected = Point([ -x, z, y, 1])
calculated = Point.calculate(mne._matrix_2_0yy)
assert calculated == expected
def test_matrix_2_qyy(self):
expected = Point([ 1-x, z, y, 1])
calculated = Point.calculate(mne._matrix_2_qyy)
assert calculated == expected
def test_matrix_2_1_0qyy_0qq(self):
expected = Point([ -x, 1+z, y, 1])
calculated = Point.calculate(mne._matrix_2_1_0qyy_0qq)
assert calculated == expected
def test_matrix_2_0qyy_0qq(self):
expected = Point([ -x, z, 1+y, 1])
calculated = Point.calculate(mne._matrix_2_0qyy_0qq)
assert calculated == expected
def test_matrix_2_1_qyy_0hh(self):
expected = Point([ 1-x, 1+z, 1+y, 1])
calculated = Point.calculate(mne._matrix_2_1_qyy_0hh)
assert calculated == expected
def test_matrix_2_1_3omqyy_0hh(self):
expected = Point([ 1.5-x, 0.5+z, 1.5+y, 1])
calculated = Point.calculate(mne._matrix_2_1_3omqyy_0hh)
assert calculated == expected
def test_matrix_2_1_oqyy_0hh(self):
expected = Point([ 0.5-x, 1.5+z, 0.5+y, 1])
calculated = Point.calculate(mne._matrix_2_1_oqyy_0hh)
assert calculated == expected
def test_matrix_2_1_oyy_0qq(self):
expected = Point([ 0.5-x, 0.5+z, 0.5+y, 1])
calculated = Point.calculate(mne._matrix_2_1_oyy_0qq)
assert calculated == expected
def test_matrix_2_1_oyy_03q3q(self):
expected = Point([ 0.5-x, 1.5+z, 1.5+y, 1])
calculated = Point.calculate(mne._matrix_2_1_oyy_03q3q)
assert calculated == expected
def test_matrix_2_1_3ohmyy_0mqq(self):
expected = Point([ 1.5-x, 0.5+z, 1.5+y, 1])
calculated = Point.calculate(mne._matrix_2_1_3ohmyy_0mqq)
assert calculated == expected
def test_matrix_2_1_3oqyy_0hh(self):
expected = Point([ 1.5-x, 1.5+z, 0.5+y, 1])
calculated = Point.calculate(mne._matrix_2_1_3oqyy_0hh)
assert calculated == expected
| 34.424242
| 65
| 0.651849
| 359
| 2,272
| 3.827298
| 0.128134
| 0.112082
| 0.093159
| 0.112082
| 0.856623
| 0.759825
| 0.708879
| 0.663755
| 0.578603
| 0.328967
| 0
| 0.073437
| 0.232835
| 2,272
| 65
| 66
| 34.953846
| 0.714859
| 0
| 0
| 0.254902
| 0
| 0
| 0.002201
| 0
| 0
| 0
| 0
| 0
| 0.215686
| 1
| 0.215686
| false
| 0
| 0.078431
| 0
| 0.313725
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
04e78da809870931dead3f10430e92ccabd67973
| 129
|
py
|
Python
|
django/config/__init__.py
|
PiochU19/amazon-price-tracker
|
93a321d5799ee2b0b02487fea5577698a6da8aa3
|
[
"MIT"
] | null | null | null |
django/config/__init__.py
|
PiochU19/amazon-price-tracker
|
93a321d5799ee2b0b02487fea5577698a6da8aa3
|
[
"MIT"
] | 1
|
2021-06-10T22:05:12.000Z
|
2021-06-10T22:05:12.000Z
|
django/config/__init__.py
|
PiochU19/amazon_price_tracker
|
93a321d5799ee2b0b02487fea5577698a6da8aa3
|
[
"MIT"
] | 1
|
2022-01-09T03:23:19.000Z
|
2022-01-09T03:23:19.000Z
|
from __future__ import absolute_import, unicode_literals
from config.celery import app as celery_app
__all__ = ("celery_app",)
| 21.5
| 56
| 0.813953
| 18
| 129
| 5.166667
| 0.611111
| 0.193548
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.124031
| 129
| 5
| 57
| 25.8
| 0.823009
| 0
| 0
| 0
| 0
| 0
| 0.077519
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
b6da848b86206d5255d9112ae53bae70f5a0bfe4
| 170
|
py
|
Python
|
password_validation.py
|
PaulinaKomorek/UAM
|
8995ca20d91e7c1ab4e59dbaa40cc02c4b7d9287
|
[
"MIT"
] | null | null | null |
password_validation.py
|
PaulinaKomorek/UAM
|
8995ca20d91e7c1ab4e59dbaa40cc02c4b7d9287
|
[
"MIT"
] | null | null | null |
password_validation.py
|
PaulinaKomorek/UAM
|
8995ca20d91e7c1ab4e59dbaa40cc02c4b7d9287
|
[
"MIT"
] | null | null | null |
def validate_password(password: str):
if len(password)<8:
return False
for i in password:
if i.isdigit():
return True
return False
| 24.285714
| 37
| 0.594118
| 22
| 170
| 4.545455
| 0.636364
| 0.22
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.008772
| 0.329412
| 170
| 7
| 38
| 24.285714
| 0.868421
| 0
| 0
| 0.285714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.142857
| false
| 0.428571
| 0
| 0
| 0.571429
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 4
|
b6e00ecbddc0eeec136e5f6ba63d77ff7fbec6c7
| 8,552
|
py
|
Python
|
tests/test_operations.py
|
mbloem-Steelcase/traces
|
144c7bf4667e7c4554b84e78d7626200437b6c4a
|
[
"MIT"
] | 3
|
2018-06-07T01:55:00.000Z
|
2018-11-12T14:38:34.000Z
|
tests/test_operations.py
|
nachereshata/traces
|
05f2f4d09a442ae6ce5439d2bbb59d7b63c687b7
|
[
"MIT"
] | null | null | null |
tests/test_operations.py
|
nachereshata/traces
|
05f2f4d09a442ae6ce5439d2bbb59d7b63c687b7
|
[
"MIT"
] | null | null | null |
import datetime
import nose
from traces import TimeSeries
from traces.decorators import ignorant, strict
def test_scalar_ops():
a = TimeSeries()
a.set(datetime.datetime(2015, 3, 1), 1)
a.set(datetime.datetime(2015, 3, 2), 0)
a.set(datetime.datetime(2015, 3, 3), 3)
a.set(datetime.datetime(2015, 3, 4), 2)
ts_half = a.multiply(0.5)
ts_bool = a.to_bool(invert=False)
ts_threshold = a.threshold(value=1.1)
# test before domain, should give default value
assert ts_half[datetime.datetime(2015, 2, 24)] is None
assert ts_bool[datetime.datetime(2015, 2, 24)] == None
assert ts_threshold[datetime.datetime(2015, 2, 24)] == None
# test values throughout series
assert ts_half[datetime.datetime(2015, 3, 1, 6)] == 0.5
assert ts_bool[datetime.datetime(2015, 3, 1, 6)] == True
assert ts_threshold[datetime.datetime(2015, 3, 1, 6)] == False
assert ts_half[datetime.datetime(2015, 3, 2, 6)] == 0
assert ts_bool[datetime.datetime(2015, 3, 2, 6)] == False
assert ts_threshold[datetime.datetime(2015, 3, 2, 6)] == False
assert ts_half[datetime.datetime(2015, 3, 3, 6)] == 1.5
assert ts_bool[datetime.datetime(2015, 3, 3, 6)] == True
assert ts_threshold[datetime.datetime(2015, 3, 3, 6)] == True
# test after domain, should give last value
assert ts_half[datetime.datetime(2015, 3, 4, 18)] == 1
assert ts_bool[datetime.datetime(2015, 3, 4, 18)] == True
assert ts_threshold[datetime.datetime(2015, 3, 4, 18)] == True
def test_sum():
a = TimeSeries()
a.set(datetime.datetime(2015, 3, 1), 1)
a.set(datetime.datetime(2015, 3, 2), 0)
a.set(datetime.datetime(2015, 3, 3), 1)
a.set(datetime.datetime(2015, 3, 4), 0)
b = TimeSeries()
b.set(datetime.datetime(2015, 3, 1), 0)
b.set(datetime.datetime(2015, 3, 1, 12), 1)
b.set(datetime.datetime(2015, 3, 2), 0)
b.set(datetime.datetime(2015, 3, 2, 12), 1)
b.set(datetime.datetime(2015, 3, 3), 0)
c = TimeSeries()
c.set(datetime.datetime(2015, 3, 1), 0)
c.set(datetime.datetime(2015, 3, 1, 18), 1)
c.set(datetime.datetime(2015, 3, 5), 0)
ts_sum = TimeSeries.merge([a, b, c], operation=ignorant(sum))
# test before domain, should give default value
assert ts_sum[datetime.datetime(2015, 2, 24)] == 0
# test values throughout sum
assert ts_sum[datetime.datetime(2015, 3, 1)] == 1
assert ts_sum[datetime.datetime(2015, 3, 1, 6)] == 1
assert ts_sum[datetime.datetime(2015, 3, 1, 12)] == 2
assert ts_sum[datetime.datetime(2015, 3, 1, 13)] == 2
assert ts_sum[datetime.datetime(2015, 3, 1, 17)] == 2
assert ts_sum[datetime.datetime(2015, 3, 1, 18)] == 3
assert ts_sum[datetime.datetime(2015, 3, 1, 19)] == 3
assert ts_sum[datetime.datetime(2015, 3, 3)] == 2
assert ts_sum[datetime.datetime(2015, 3, 4)] == 1
assert ts_sum[datetime.datetime(2015, 3, 4, 18)] == 1
assert ts_sum[datetime.datetime(2015, 3, 5)] == 0
# test after domain, should give last value
assert ts_sum[datetime.datetime(2015, 3, 6)] == 0
assert 0 + a + b == a + b
def example_dictlike():
# test overwriting keys
l = TimeSeries()
l[datetime.datetime(2010, 1, 1)] = 5
l[datetime.datetime(2010, 1, 2)] = 4
l[datetime.datetime(2010, 1, 3)] = 3
l[datetime.datetime(2010, 1, 7)] = 2
l[datetime.datetime(2010, 1, 4)] = 1
l[datetime.datetime(2010, 1, 4)] = 10
l[datetime.datetime(2010, 1, 4)] = 5
l[datetime.datetime(2010, 1, 1)] = 1
l[datetime.datetime(2010, 1, 7)] = 1.2
l[datetime.datetime(2010, 1, 8)] = 1.3
l[datetime.datetime(2010, 1, 12)] = 1.3
# do some wackiness with a bunch of points
dt = datetime.datetime(2010, 1, 12)
for i in range(1000):
dt += datetime.timedelta(hours=random.random())
l[dt] = math.sin(i / float(math.pi))
dt -= datetime.timedelta(hours=500)
dt -= datetime.timedelta(minutes=30)
for i in range(1000):
dt += datetime.timedelta(hours=random.random())
l[dt] = math.cos(i / float(math.pi))
# what does this get?
print >> sys.stderr, l[datetime.datetime(2010, 1, 3, 23, 59, 59)]
# output the time series
for i, j in l:
print(i.isoformat(), j)
def example_mean():
l = TimeSeries()
l[datetime.datetime(2010, 1, 1)] = 0
l[datetime.datetime(2010, 1, 3, 10)] = 1
l[datetime.datetime(2010, 1, 5)] = 0
l[datetime.datetime(2010, 1, 8)] = 1
l[datetime.datetime(2010, 1, 17)] = 0
l[datetime.datetime(2010, 1, 19)] = 1
l[datetime.datetime(2010, 1, 23)] = 0
l[datetime.datetime(2010, 1, 26)] = 1
l[datetime.datetime(2010, 1, 28)] = 0
l[datetime.datetime(2010, 1, 31)] = 1
l[datetime.datetime(2010, 2, 5)] = 0
for time, value in l:
print(time.isoformat(), 0.1 * value + 1.1)
print('')
timestep = {'hours': 25}
start = datetime.datetime(2010, 1, 1)
while start <= datetime.datetime(2010, 2, 5):
end = start + datetime.timedelta(**timestep)
print(start.isoformat(), l.mean(start, end))
start = end
print('')
start = datetime.datetime(2010, 1, 1)
while start <= datetime.datetime(2010, 2, 5):
end = start + datetime.timedelta(**timestep)
print(start.isoformat(), -0.2)
print(start.isoformat(), 1.2)
start = end
def example_arrow():
l = TimeSeries()
l[arrow.Arrow(2010, 1, 1)] = 0
l[arrow.Arrow(2010, 1, 3, 10)] = 1
l[arrow.Arrow(2010, 1, 5)] = 0
l[arrow.Arrow(2010, 1, 8)] = 1
l[arrow.Arrow(2010, 1, 17)] = 0
l[arrow.Arrow(2010, 1, 19)] = 1
l[arrow.Arrow(2010, 1, 23)] = 0
l[arrow.Arrow(2010, 1, 26)] = 1
l[arrow.Arrow(2010, 1, 28)] = 0
l[arrow.Arrow(2010, 1, 31)] = 1
l[arrow.Arrow(2010, 2, 5)] = 0
for time, value in l:
print(time.naive.isoformat(), 0.1 * value + 1.1)
print('')
start = arrow.Arrow(2010, 1, 1)
end = arrow.Arrow(2010, 2, 5)
unit = {'hours': 25}
for start, end in span_range(start, end, unit):
print(start.naive.isoformat(), l.mean(start, end))
print('')
for start, end in span_range(start, end, unit):
print(start.naive.isoformat(), -0.2)
print(start.naive.isoformat(), 1.2)
def example_sum():
a = TimeSeries()
a.set(datetime.datetime(2015, 3, 1), 1)
a.set(datetime.datetime(2015, 3, 2), 0)
a.set(datetime.datetime(2015, 3, 3), 1)
a.set(datetime.datetime(2015, 3, 5), 0)
a.set(datetime.datetime(2015, 3, 6), 0)
b = TimeSeries()
b.set(datetime.datetime(2015, 3, 1), 0)
b.set(datetime.datetime(2015, 3, 2, 12), 1)
b.set(datetime.datetime(2015, 3, 3, 13, 13), 0)
b.set(datetime.datetime(2015, 3, 4), 1)
b.set(datetime.datetime(2015, 3, 5), 0)
b.set(datetime.datetime(2015, 3, 5, 12), 1)
b.set(datetime.datetime(2015, 3, 5, 19), 0)
c = TimeSeries()
c.set(datetime.datetime(2015, 3, 1, 17), 0)
c.set(datetime.datetime(2015, 3, 1, 21), 1)
c.set(datetime.datetime(2015, 3, 2, 13, 13), 0)
c.set(datetime.datetime(2015, 3, 4, 18), 1)
c.set(datetime.datetime(2015, 3, 5, 4), 0)
# output the three time series
for i, ts in enumerate([a, b, c]):
for (t0, v0), (t1, v1) in ts.iterintervals(1):
print(t0.isoformat(), i)
print(t1.isoformat(), i)
print('')
for (t0, v0), (t1, v1) in ts.iterintervals(0):
print(t0.isoformat(), i)
print(t1.isoformat(), i)
print('')
# output the sum
# for dt, i in sum([a, b, c]):
# print dt.isoformat(), i
# print ''
for dt, i in TimeSeries.merge([a, b, c], operation=sum):
print(dt.isoformat(), i)
def test_interpolation():
ts = TimeSeries(data=[(0, 0), (1, 2)])
assert ts.get(0, interpolate='linear') == 0
assert ts.get(0.25, interpolate='linear') == 0.5
assert ts.get(0.5, interpolate='linear') == 1.0
assert ts.get(0.75, interpolate='linear') == 1.5
assert ts.get(1, interpolate='linear') == 2
assert ts.get(-1, interpolate='linear') == None
assert ts.get(2, interpolate='linear') == 2
nose.tools.assert_raises(ValueError, ts.get, 0.5, 'spline')
def test_default():
ts = TimeSeries(data=[(0, 0), (1, 2)])
ts_no_default = ts.operation(ts, lambda a, b: a + b)
assert ts_no_default.default == None
ts_default = ts.operation(ts, lambda a, b: a + b, default=1)
assert ts_default.default == 1
ts_none = ts.operation(ts, lambda a, b: a + b, default=None)
assert ts_none.default == None
| 32.150376
| 69
| 0.605589
| 1,367
| 8,552
| 3.749817
| 0.099488
| 0.277799
| 0.238002
| 0.233515
| 0.785408
| 0.74366
| 0.587983
| 0.496684
| 0.390948
| 0.22181
| 0
| 0.130992
| 0.224275
| 8,552
| 265
| 70
| 32.271698
| 0.641694
| 0.052385
| 0
| 0.265957
| 0
| 0
| 0.007171
| 0
| 0
| 0
| 0
| 0
| 0.212766
| 1
| 0.042553
| false
| 0
| 0.021277
| 0
| 0.06383
| 0.111702
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
b6e737545d82e1ee3fcf7414405300b4fa918159
| 133
|
py
|
Python
|
hammers/__init__.py
|
ChameleonCloud/bag-o-hammers
|
0faaf9b21aceb155dc7da2ea92cf77af815c11e7
|
[
"Apache-2.0"
] | null | null | null |
hammers/__init__.py
|
ChameleonCloud/bag-o-hammers
|
0faaf9b21aceb155dc7da2ea92cf77af815c11e7
|
[
"Apache-2.0"
] | 8
|
2018-05-24T01:07:27.000Z
|
2021-09-01T18:02:29.000Z
|
hammers/__init__.py
|
ChameleonCloud/bag-o-hammers
|
0faaf9b21aceb155dc7da2ea92cf77af815c11e7
|
[
"Apache-2.0"
] | 2
|
2016-12-07T01:12:41.000Z
|
2018-08-17T16:57:54.000Z
|
# coding: utf-8
from .mycnf import *
from .mysqlshim import *
from .mysqlargs import *
from . import query
__version__ = '0.3.1'
| 12.090909
| 24
| 0.691729
| 19
| 133
| 4.631579
| 0.684211
| 0.340909
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.037383
| 0.195489
| 133
| 10
| 25
| 13.3
| 0.785047
| 0.097744
| 0
| 0
| 0
| 0
| 0.042373
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.8
| 0
| 0.8
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
b6ea53e8dc2b68ce566334442d0e5e69dd6893f3
| 1,308
|
py
|
Python
|
devilry/devilry_authenticate/urls.py
|
devilry/devilry-django
|
9ae28e462dfa4cfee966ebacbca04ade9627e715
|
[
"BSD-3-Clause"
] | 29
|
2015-01-18T22:56:23.000Z
|
2020-11-10T21:28:27.000Z
|
devilry/devilry_authenticate/urls.py
|
devilry/devilry-django
|
9ae28e462dfa4cfee966ebacbca04ade9627e715
|
[
"BSD-3-Clause"
] | 786
|
2015-01-06T16:10:18.000Z
|
2022-03-16T11:10:50.000Z
|
devilry/devilry_authenticate/urls.py
|
devilry/devilry-django
|
9ae28e462dfa4cfee966ebacbca04ade9627e715
|
[
"BSD-3-Clause"
] | 15
|
2015-04-06T06:18:43.000Z
|
2021-02-24T12:28:30.000Z
|
from django.urls import path, include
from cradmin_legacy.apps.cradmin_authenticate.views import logout
from devilry.devilry_authenticate.views import CustomLoginView, allauth_views
urlpatterns = [
path('login', CustomLoginView.as_view(), name='cradmin-authenticate-login'),
path('logout', logout.cradmin_logoutview, name='cradmin-authenticate-logout'),
path('allauth/login/',
allauth_views.AllauthLoginView.as_view(),
name='account_login'),
path('allauth/logout/',
allauth_views.AllauthLogoutView.as_view(),
name='account_logout'),
path('allauth/', include('allauth.urls')),
]
# from django.conf.urls import url, include
# from cradmin_legacy.apps.cradmin_authenticate.views import logout
# from devilry.devilry_authenticate.views import CustomLoginView, allauth_views
# urlpatterns = [
# url(r'^login$', CustomLoginView.as_view(), name='cradmin-authenticate-login'),
# url(r'^logout$', logout.cradmin_logoutview, name='cradmin-authenticate-logout'),
# url(r"^allauth/login/$",
# allauth_views.AllauthLoginView.as_view(),
# name="account_login"),
# url(r"^allauth/logout/$",
# allauth_views.AllauthLogoutView.as_view(),
# name="account_logout"),
# url(r'^allauth/', include('allauth.urls')),
# ]
| 36.333333
| 86
| 0.710245
| 146
| 1,308
| 6.19863
| 0.184932
| 0.125967
| 0.066298
| 0.075138
| 0.844199
| 0.844199
| 0.844199
| 0.844199
| 0.596685
| 0.596685
| 0
| 0
| 0.144495
| 1,308
| 35
| 87
| 37.371429
| 0.808758
| 0.490826
| 0
| 0
| 0
| 0
| 0.215054
| 0.081413
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.214286
| 0
| 0.214286
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
8e17b2d7d92cf7a018f3f3b6d132d6b1d67d0393
| 22
|
py
|
Python
|
solutions/03-dask-dataframe-non-cancelled.py
|
xeroquark/dask-tutorial
|
0061ae7e0e18d0a75bdd67dc9c2a551ab174e057
|
[
"BSD-3-Clause"
] | 331
|
2019-01-26T21:11:45.000Z
|
2022-03-02T11:35:16.000Z
|
solutions/03-dask-dataframe-non-cancelled.py
|
xeroquark/dask-tutorial
|
0061ae7e0e18d0a75bdd67dc9c2a551ab174e057
|
[
"BSD-3-Clause"
] | 5
|
2019-11-15T02:00:26.000Z
|
2021-01-06T04:26:40.000Z
|
solutions/03-dask-dataframe-non-cancelled.py
|
xeroquark/dask-tutorial
|
0061ae7e0e18d0a75bdd67dc9c2a551ab174e057
|
[
"BSD-3-Clause"
] | 88
|
2019-01-25T16:53:47.000Z
|
2022-03-03T00:05:08.000Z
|
len(df[~df.Cancelled])
| 22
| 22
| 0.727273
| 4
| 22
| 4
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 22
| 1
| 22
| 22
| 0.727273
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
8e31a0fcb2929be25b4708b6022423ac371a4bee
| 4,319
|
py
|
Python
|
pybossa_lists/FishingVesselsV2_HighConfidenceStudents_20160314/downloadjsons.py
|
GlobalFishingWatch/vessel-lists
|
4fb241f275d7327dc311db89bf86ef43d7ae8870
|
[
"Apache-2.0"
] | 2
|
2017-09-16T10:29:34.000Z
|
2019-01-23T17:58:21.000Z
|
pybossa_lists/FishingVesselsV2_HighConfidenceStudents_20160314/downloadjsons.py
|
GlobalFishingWatch/vessel-lists
|
4fb241f275d7327dc311db89bf86ef43d7ae8870
|
[
"Apache-2.0"
] | 16
|
2016-06-21T10:20:11.000Z
|
2016-07-07T16:03:02.000Z
|
pybossa_lists/FishingVesselsV2_HighConfidenceStudents_20160314/downloadjsons.py
|
GlobalFishingWatch/vessel-lists
|
4fb241f275d7327dc311db89bf86ef43d7ae8870
|
[
"Apache-2.0"
] | 1
|
2021-05-25T08:33:32.000Z
|
2021-05-25T08:33:32.000Z
|
#! /usr/bin/python
import urllib2
import json
task_ids = [4108 , 4109 , 4110 , 4111 , 4112 , 4113 , 4114 , 4115 , 4116 , 4117 , 4118 , 4119 , 4120 , 4121 , 4122 , 4123 , 4124 , 4125 , 4126 , 4127 , 4128 , 4129 , 4130 , 4131 , 4132 , 4133 , 4134 , 4135 , 4136 , 4137 , 4138 , 4139 , 4140 , 4141 , 4142 , 4143 , 4144 , 4145 , 4146 , 4147 , 4148 , 4149 , 4150 , 4151 , 4152 , 4153 , 4154 , 4155 , 4156 , 4157 , 4158 , 4159 , 4160 , 4161 , 4162 , 4163 , 4164 , 4165 , 4166 , 4167 , 4168 , 4169 , 4170 , 4171 , 4172 , 4173 , 4174 , 4175 , 4176 , 4177 , 4178 , 4179 , 4180 , 4181 , 4182 , 4183 , 4184 , 4185 , 4186 , 4187 , 4188 , 4189 , 4190 , 4191 , 4192 , 4193 , 4194 , 4195 , 4196 , 4197 , 4198 , 4199 , 4200 , 4201 , 4202 , 4203 , 4204 , 4205 , 4206 , 4207 , 4208 , 4209 , 4210 , 4211 , 4212 , 4213 , 4214 , 4215 , 4216 , 4217 , 4218 , 4219 , 4220 , 4221 , 4222 , 4223 , 4224 , 4225 , 4226 , 4227 , 4228 , 4229 , 4230 , 4231 , 4232 , 4233 , 4234 , 4235 , 4236 , 4237 , 4238 , 4239 , 4240 , 4241 , 4242 , 4243 , 4244 , 4245 , 4246 , 4247 , 4248 , 4249 , 4539 , 4540 , 4541 , 4542 , 4543 , 4544 , 4545 , 4546 , 4547 , 4548 , 4549 , 4550 , 4551 , 4552 , 4553 , 4554 , 4555 , 4556 , 4557 , 4558 , 4559 , 4560 , 4561 , 4562 , 4563 , 4564 , 4565 , 4566 , 4567 , 4568 , 4569 , 4570 , 4571 , 4572 , 4573 , 4574 , 4575 , 4576 , 4577 , 4578 , 4579 , 4580 , 4581 , 4582 , 4583 , 4584 , 4585 , 4586 , 4587 , 4588 , 4589 , 4590 , 4591 , 4592 , 4593 , 4594 , 4595]# , 4596 , 4597 , 4598 , 4599 , 4600 , 4601 , 4602 , 4603 , 4604 , 4605 , 4606 , 4607 , 4608 , 4609 , 4610 , 4611 , 4612 , 4613 , 4614 , 4615 , 4616 , 4617 , 4618 , 4619 , 4620 , 4621 , 4622 , 4623 , 4624 , 4625 , 4626 , 4627 , 4628 , 4629 , 4630 , 4631 , 4632 , 4633 , 4634 , 4635 , 4636 , 4637 , 4638 , 4639 , 4640 , 4641 , 4642 , 4643 , 4644 , 4645 , 4646 , 4647 , 4648 , 4649 , 4650 , 4651 , 4652 , 4653 , 4654 , 4655 , 4656 , 4657 , 4658 , 4659 , 4660 , 4661 , 4662 , 4663 , 4664 , 4665 , 4666 , 4667 , 4668 , 4669 , 4670 , 4671 , 4672 , 4673 , 4674 , 4675 , 4676 , 4677 , 4678 , 4679 , 4680 , 4681 , 4682 , 4683 , 4684 , 4685 , 4686 , 4687 , 4688 , 4689 , 4690 , 4691 , 4692 , 4693 , 4694 , 4695 , 4696 , 4697 , 4698 , 4699 , 4700 , 4701 , 4702 , 4703 , 4704 , 4705 , 4706 , 4707 , 4708 , 4709 , 4710 , 4711 , 4712 , 4713 , 4714 , 4715 , 4716 , 4717 , 4718 , 4719 , 4720 , 4721 , 4722 , 4723 , 4724 , 4725 , 4726 , 4727 , 4728 , 4729 , 4730 , 4731 , 4732 , 4733 , 4734 , 4735 , 4736 , 4737 , 4738 , 4739 , 4740 , 4741 , 4742 , 4743 , 4744 , 4745 , 4746 , 4747 , 4748 , 4749 , 4750 , 4751 , 4752 , 4753 , 4754 , 4755 , 4756 , 4757 , 4758 , 4759 , 4760 , 4761 , 4762 , 4763 , 4764 , 4765 , 4766 , 4767 , 4768 , 4769 , 4770 , 4771 , 4772 , 4773 , 4774 , 4775 , 4776 , 4777 , 4778 , 4779 , 4780 , 4781 , 4782 , 4783 , 4784 , 4785 , 4786 , 4787 , 4788 , 4789 , 4790 , 4791 , 4792 , 4793 , 4794 , 4795 , 4796 , 4797 , 4798 , 4799 , 4800 , 4801 , 4802 , 4803 , 4804 , 4805 , 4806 , 4807 , 4808 , 4809 , 4810 , 4811 , 4812 , 4813 , 4814 , 4815 , 4816 , 4817 , 4818 , 4819 , 4820 , 4821 , 4822 , 4823 , 4824 , 4825 , 4826 , 4827 , 4828 , 4829 , 4830 , 4831 , 4832 , 4833 , 4834 , 4835 , 4836 , 4837 , 4838 , 4839 , 4840 , 4841 , 4842 , 4843 , 4844 , 4845 , 4846 , 4847 , 4848 , 4849 , 4850 , 4851 , 4852 , 4853 , 4854 , 4855 , 4856 , 4857 , 4858 , 4859 , 4860 , 4861 , 4862 , 4863 , 4864 , 4865 , 4866 , 4867 , 4868 , 4869 , 4870 , 4871 , 4872 , 4873 , 4874 , 4875 , 4876 , 4877 , 4878 , 4879 , 4880 , 4881 , 4882 , 4883 , 4884 , 4885 , 4886 , 4887 , 4888 , 4889 , 4890 , 4891 , 4892 , 4893 , 4894 , 4895 , 4896 , 4897 , 4898 , 4899 , 4900 , 4901 , 4902 , 4903 , 4904 , 4905 , 4906 , 4907 , 4908 , 4909 , 4910 , 4911 , 4912 , 4913 , 4914 , 4915 , 4916 , 4917 , 4918 , 4919 , 4920 , 4921 , 4922 , 4923 , 4924 , 4925 , 4926 , 4927 , 4928 , 4929 , 4930 , 4931 , 4932 , 4933 , 4934 , 4935 , 4936 , 4937 , 4938 , 4939 , 4940 , 4941 , 4942 , 4943 , 4944 , 4945 , 4946 , 4947 , 4948 , 4949 , 4950 , 4951 , 4952 , 4953 , 4954 , 4955 , 4956 , 4957 , 4958 , 4959 , 4960 , 4961 , 4962 , 4963 , 4964 , 4965 , 4966 , 4967 , 4968 , 4969 , 4970 , 4971 , 4972]
for t in task_ids:
response = urllib2.urlopen('http://crowd.globalfishingwatch.org/api/taskrun?task_id='+str(t))
html = response.read()
f = open("jsons/"+str(t)+".json", 'w')
f.write(html)
f.close()
| 269.9375
| 4,044
| 0.575365
| 618
| 4,319
| 4.016181
| 0.978964
| 0.005641
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.740289
| 0.278768
| 4,319
| 15
| 4,045
| 287.933333
| 0.056501
| 0.615189
| 0
| 0
| 0
| 0
| 0.040964
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.222222
| 0
| 0.222222
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
8e37450e97eaff17123f1141e0cf93222341403a
| 177
|
py
|
Python
|
python/random_list.py
|
twofist/stalin-sort
|
46d9434bde96daf6c5c957b60b928fc9eb6e4006
|
[
"MIT"
] | 1,140
|
2018-10-30T13:03:09.000Z
|
2022-03-29T22:41:24.000Z
|
python/random_list.py
|
twofist/stalin-sort
|
46d9434bde96daf6c5c957b60b928fc9eb6e4006
|
[
"MIT"
] | 77
|
2018-10-30T13:20:15.000Z
|
2021-11-06T03:44:55.000Z
|
python/random_list.py
|
twofist/stalin-sort
|
46d9434bde96daf6c5c957b60b928fc9eb6e4006
|
[
"MIT"
] | 245
|
2018-10-30T13:10:53.000Z
|
2022-03-14T08:13:56.000Z
|
import random
def random_list_maker(length):
random_list = []
for i in range(0, length):
random_list.append(random.randint(0, 100))
return random_list
| 19.666667
| 50
| 0.666667
| 25
| 177
| 4.52
| 0.6
| 0.353982
| 0.283186
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.037037
| 0.237288
| 177
| 9
| 51
| 19.666667
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0.166667
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
8e461b890feaaddd81ab6392702392e0f0ab82a5
| 221
|
py
|
Python
|
cmfg/__init__.py
|
mlares/CBR_CrossCorr
|
f3599aed997e003d2d838ba5ad345d2f783a5bda
|
[
"MIT"
] | null | null | null |
cmfg/__init__.py
|
mlares/CBR_CrossCorr
|
f3599aed997e003d2d838ba5ad345d2f783a5bda
|
[
"MIT"
] | null | null | null |
cmfg/__init__.py
|
mlares/CBR_CrossCorr
|
f3599aed997e003d2d838ba5ad345d2f783a5bda
|
[
"MIT"
] | null | null | null |
# =========================================================
# DOCS
# =========================================================
"""Software for the study of Cosmic Microwave Foregrounds (CMFG)
"""
__version__ = "0.0.1"
| 22.1
| 64
| 0.312217
| 14
| 221
| 4.642857
| 0.928571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.015075
| 0.099548
| 221
| 9
| 65
| 24.555556
| 0.311558
| 0.828054
| 0
| 0
| 0
| 0
| 0.178571
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
f3ef277f5055494ea5e815e534881b76e2f5adbe
| 332
|
py
|
Python
|
exercises/Desafio024.py
|
zThiago15/Curso-em-Video
|
ef25e0497edb79bdfbe71fde485f4dafc0d2a0e6
|
[
"MIT"
] | null | null | null |
exercises/Desafio024.py
|
zThiago15/Curso-em-Video
|
ef25e0497edb79bdfbe71fde485f4dafc0d2a0e6
|
[
"MIT"
] | null | null | null |
exercises/Desafio024.py
|
zThiago15/Curso-em-Video
|
ef25e0497edb79bdfbe71fde485f4dafc0d2a0e6
|
[
"MIT"
] | 1
|
2021-07-24T21:39:26.000Z
|
2021-07-24T21:39:26.000Z
|
print('\033[30mEx: Digite um nome de uma cidade e mostre se ela começa com SANTO.')
print('\033[1;31m=-'*20)
cid = str(input('\033[1;34mDigite o nome da cidade em que você nasceu: '))
div = cid.split()
print('1° método: Começa com Santo?','Santo' in div[0].title())
print('2° método: Começa com SANTO?', div[0].upper() == 'SANTO')
| 47.428571
| 83
| 0.665663
| 61
| 332
| 3.655738
| 0.622951
| 0.121076
| 0.188341
| 0.143498
| 0.188341
| 0
| 0
| 0
| 0
| 0
| 0
| 0.08042
| 0.138554
| 332
| 7
| 84
| 47.428571
| 0.692308
| 0
| 0
| 0
| 0
| 0
| 0.618619
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.666667
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 4
|
6d0e8dfaf95b112c12f0f9455ac922c1ec26e651
| 100
|
py
|
Python
|
src/megarest/__init__.py
|
aakash-sahai/megarest
|
fa3b1dbb6e36dc1c45689436e4bcc84a492185e1
|
[
"MIT"
] | null | null | null |
src/megarest/__init__.py
|
aakash-sahai/megarest
|
fa3b1dbb6e36dc1c45689436e4bcc84a492185e1
|
[
"MIT"
] | null | null | null |
src/megarest/__init__.py
|
aakash-sahai/megarest
|
fa3b1dbb6e36dc1c45689436e4bcc84a492185e1
|
[
"MIT"
] | null | null | null |
from app import MegaRestApp
from api import MegaRestAPI
__all__ = [ 'MegaRestApp', 'MegaRestAPI' ]
| 20
| 42
| 0.77
| 11
| 100
| 6.636364
| 0.636364
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.15
| 100
| 4
| 43
| 25
| 0.858824
| 0
| 0
| 0
| 0
| 0
| 0.22
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
6d4c6a0376f1af9497358078985f1a5d44005af9
| 115
|
py
|
Python
|
tryCal.py
|
diallog/GCPpy
|
dabd55ece1c12c1a390a228cd04cb7eb110e564b
|
[
"Unlicense"
] | null | null | null |
tryCal.py
|
diallog/GCPpy
|
dabd55ece1c12c1a390a228cd04cb7eb110e564b
|
[
"Unlicense"
] | null | null | null |
tryCal.py
|
diallog/GCPpy
|
dabd55ece1c12c1a390a228cd04cb7eb110e564b
|
[
"Unlicense"
] | null | null | null |
#!/usr/bin/env python3
import calendar
calendar.setfirstweekday((calendar.SUNDAY))
print(calendar.month(2020,1))
| 16.428571
| 43
| 0.782609
| 15
| 115
| 6
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.056075
| 0.069565
| 115
| 6
| 44
| 19.166667
| 0.785047
| 0.182609
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0.333333
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
6d5b358806daeac1d711a019e32d0fd9b2e7663a
| 1,830
|
py
|
Python
|
Planeacion vuelo.py
|
ogarcia1704/Planeacion-del-vuelo-de-Dron
|
dc8a4ae818463c5450a5dfb8045c8361a8dfab71
|
[
"MIT"
] | null | null | null |
Planeacion vuelo.py
|
ogarcia1704/Planeacion-del-vuelo-de-Dron
|
dc8a4ae818463c5450a5dfb8045c8361a8dfab71
|
[
"MIT"
] | null | null | null |
Planeacion vuelo.py
|
ogarcia1704/Planeacion-del-vuelo-de-Dron
|
dc8a4ae818463c5450a5dfb8045c8361a8dfab71
|
[
"MIT"
] | null | null | null |
Python 3.7.3 (v3.7.3:ef4ec6ed12, Mar 25 2019, 21:26:53) [MSC v.1916 32 bit (Intel)] on win32
Type "help", "copyright", "credits" or "license()" for more information.
>>> from exif import Image
>>> with open('C:\\Users\\oswal\\Pictures\\oswaldo.jpg', 'rb') as image_file:
... my_image = Image(image_file)
...
>>> dir(my_image)
['_exif_ifd_pointer', '_gps_ifd_pointer', '_interoperability_ifd_Pointer', '_segments', 'cfa_pattern', 'color_space', 'components_configuration', 'compressed_bits_per_pixel', 'compression', 'contrast', 'custom_rendered', 'datetime', 'datetime_digitized', 'datetime_original', 'digital_zoom_ratio', 'exif_version', 'exposure_bias_value', 'exposure_mode', 'exposure_program', 'exposure_time', 'f_number', 'file_source', 'flash', 'flashpix_version', 'focal_length', 'focal_length_in_35mm_film', 'gain_control', 'get', 'get_file', 'gps_altitude', 'gps_altitude_ref', 'gps_datestamp', 'gps_latitude', 'gps_latitude_ref', 'gps_longitude', 'gps_longitude_ref', 'gps_map_datum', 'gps_satellites', 'gps_timestamp', 'gps_version_id', 'jpeg_interchange_format', 'jpeg_interchange_format_length', 'light_source', 'make', 'maker_note', 'max_aperture_value', 'metering_mode', 'model', 'orientation', 'photographic_sensitivity', 'pixel_x_dimension', 'pixel_y_dimension', 'resolution_unit', 'saturation', 'scene_capture_type', 'scene_type', 'sensing_method', 'sensitivity_type', 'sharpness', 'software', 'subject_distance_range', 'subsec_time', 'subsec_time_digitized', 'subsec_time_original', 'user_comment', 'white_balance', 'x_resolution', 'y_and_c_positioning', 'y_resolution']
>>> import pandas as pd
>>> data = pd.DataFrame(dir(my_image))
>>> datatoexcel = pd.ExcelWriter("CaracteristicasImagen.xlsx",engine='xlsxwriter')
>>> data.to_excel(datatoexcel, sheet_name='Sheet1')
>>> datatoexcel.save()
| 130.714286
| 1,266
| 0.747541
| 234
| 1,830
| 5.440171
| 0.649573
| 0.016496
| 0.015711
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.019585
| 0.079235
| 1,830
| 13
| 1,267
| 140.769231
| 0.735905
| 0
| 0
| 0
| 0
| 0
| 0.605944
| 0.158503
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.153846
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
6d6009f3d376ae23ba85fcfcc4f39f5a5a547c02
| 298
|
py
|
Python
|
TUTORIAIS-FLASK/AULA_1/Chapter_1/app/routes.py
|
rodrigo-schmidt-lucchesi-85/FLASK
|
6607695ef0e298f84c32ba7aaf3f164a33cc3081
|
[
"MIT"
] | null | null | null |
TUTORIAIS-FLASK/AULA_1/Chapter_1/app/routes.py
|
rodrigo-schmidt-lucchesi-85/FLASK
|
6607695ef0e298f84c32ba7aaf3f164a33cc3081
|
[
"MIT"
] | null | null | null |
TUTORIAIS-FLASK/AULA_1/Chapter_1/app/routes.py
|
rodrigo-schmidt-lucchesi-85/FLASK
|
6607695ef0e298f84c32ba7aaf3f164a33cc3081
|
[
"MIT"
] | null | null | null |
from app import app # Importing the Flask instance
@app.route('/') # Defining two enpoints for the view function index
@app.route('/index')
def index(): # View function to handle the two endpoints
return "Hello, World!" # Response to the get request
| 33.111111
| 80
| 0.624161
| 38
| 298
| 4.894737
| 0.657895
| 0.086022
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.291946
| 298
| 8
| 81
| 37.25
| 0.881517
| 0.5
| 0
| 0
| 0
| 0
| 0.138889
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| true
| 0
| 0.2
| 0.2
| 0.6
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 4
|
ed9e8890fc8f7ed6a4fa787b1c512b52f4aa635c
| 183
|
py
|
Python
|
etl/core/exceptions.py
|
cloud-cds/cds-stack
|
d68a1654d4f604369a071f784cdb5c42fc855d6e
|
[
"Apache-2.0"
] | 6
|
2018-06-27T00:09:55.000Z
|
2019-03-07T14:06:53.000Z
|
etl/core/exceptions.py
|
cloud-cds/cds-stack
|
d68a1654d4f604369a071f784cdb5c42fc855d6e
|
[
"Apache-2.0"
] | 3
|
2021-03-31T18:37:46.000Z
|
2021-06-01T21:49:41.000Z
|
etl/core/exceptions.py
|
cloud-cds/cds-stack
|
d68a1654d4f604369a071f784cdb5c42fc855d6e
|
[
"Apache-2.0"
] | 3
|
2020-01-24T16:40:49.000Z
|
2021-09-30T02:28:55.000Z
|
class TransformError(Exception):
def __init__(self, func_name, reason, context=''):
self.func_name = func_name
self.reason = reason
self.context = context
| 30.5
| 54
| 0.661202
| 21
| 183
| 5.428571
| 0.47619
| 0.210526
| 0.210526
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.240437
| 183
| 5
| 55
| 36.6
| 0.820144
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
6125884e22c320d881fb43189a225962fcb5d631
| 75
|
py
|
Python
|
contest/abc082/A.py
|
mola1129/atcoder
|
1d3b18cb92d0ba18c41172f49bfcd0dd8d29f9db
|
[
"MIT"
] | null | null | null |
contest/abc082/A.py
|
mola1129/atcoder
|
1d3b18cb92d0ba18c41172f49bfcd0dd8d29f9db
|
[
"MIT"
] | null | null | null |
contest/abc082/A.py
|
mola1129/atcoder
|
1d3b18cb92d0ba18c41172f49bfcd0dd8d29f9db
|
[
"MIT"
] | null | null | null |
import math
a, b = map(int, input().split())
print(math.ceil((a + b) / 2))
| 18.75
| 32
| 0.586667
| 14
| 75
| 3.142857
| 0.785714
| 0.090909
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.015873
| 0.16
| 75
| 3
| 33
| 25
| 0.68254
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0.333333
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
b62ad9c23d56e513d274d33fa06fd3cad0b2d19c
| 4,980
|
py
|
Python
|
figure_generation/Figure1/figure1.py
|
ramachandran-lab/multiancestry_enrichment
|
59f9eea4dbfbff6754224a9188730ebe393a3c18
|
[
"CC0-1.0"
] | 1
|
2022-03-31T18:22:52.000Z
|
2022-03-31T18:22:52.000Z
|
figure_generation/Figure1/figure1.py
|
ramachandran-lab/multiancestry_enrichment
|
59f9eea4dbfbff6754224a9188730ebe393a3c18
|
[
"CC0-1.0"
] | null | null | null |
figure_generation/Figure1/figure1.py
|
ramachandran-lab/multiancestry_enrichment
|
59f9eea4dbfbff6754224a9188730ebe393a3c18
|
[
"CC0-1.0"
] | null | null | null |
import pandas as pd
import numpy as np
import sys
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
from matplotlib.patches import Circle, Wedge, Polygon
from matplotlib.collections import PatchCollection
from collections import Counter
color_dict = {'Anthropometric':'#1b9e77','Blood pressure':'#d95f02','Hematological':'#e7298a','Metabolic':'#7570b3','Kidney':'#66a61e','Other biochemical':'#e6ab02'}
marker_dict = {'Anthropometric':'*','Blood pressure':'p','Hematological':'^','Metabolic':'s','Kidney':'d','Other biochemical':'8'}
category_dict = pd.read_csv('trait_categories.txt',sep = '\t')
category_dict = {r['Trait']:r['Category'] for i,r in category_dict.iterrows()}
trait_order = ['Basophil','Eosinophil','Neutrophil','Monocyte','Lymphocyte','WBC','RBC','MCV','Hemoglobin','Hematocrit','MCH','MCHC','PLC','DBP','SBP','CRP','EGFR','Urate','HBA1C','LDL','HDL','Triglyceride','Cholesterol','BMI','Height']
###########Once the scale files are gnereated you only have to run this to plot
#1a
proportions = pd.read_csv('scale_proportions.txt',sep = '\t',index_col = 'Traits')
fig,ax = plt.subplots(figsize = (4,4), nrows = 1, ncols = 1)
proportions = proportions.reset_index()
proportions['color'] = proportions['Traits'].map(category_dict)
proportions['color'] = proportions['color'].map(color_dict)
proportions['shape'] = proportions['Traits'].map(category_dict)
proportions['shape'] = proportions['shape'].map(marker_dict)
ax.plot([0,1],[0,1], transform=ax.transAxes, color = 'black', linestyle = '--')
proportions = proportions.set_index('Traits')
for x,y in proportions.iterrows():
ax.scatter(y['Variants'],y['Genes'],color = y['color'],marker=y['shape'])
plt.plot(np.mean(y['Variants']),0,marker = '*', color = 'black', markersize = 14)
plt.plot(0,np.mean(y['Genes']),marker = '*', color = 'black', markersize = 14)
ax.set_ylim([0,0.2])
ax.set_xlim([0,0.2])
ax.set_xticks([0,0.05,0.10,0.15,0.20])
ax.set_yticks([0,0.05,0.10,0.15,0.20])
plt.savefig('Figure1a.pdf')
#1b
proportions = pd.read_csv('scale_proportions_alt.txt',sep = '\t',index_col = 'Traits')
fig,ax = plt.subplots(figsize = (4,4), nrows = 1, ncols = 1)
proportions = proportions.reset_index()
proportions['color'] = proportions['Traits'].map(category_dict)
proportions['color'] = proportions['color'].map(color_dict)
proportions['shape'] = proportions['Traits'].map(category_dict)
proportions['shape'] = proportions['shape'].map(marker_dict)
ax.plot([0,1],[0,1], transform=ax.transAxes, color = 'black', linestyle = '--')
for x,y in proportions.iterrows():
ax.scatter(y['Variants'],y['Genes'],color = y['color'],marker=y['shape'])
plt.plot(np.mean(y['Variants']),0,marker = '*', color = 'black', markersize = 14)
plt.plot(0,np.mean(y['Genes']),marker = '*', color = 'black', markersize = 14)
ax.set_ylim([0,0.2])
ax.set_xlim([0,0.2])
ax.set_xticks([0,0.05,0.10,0.15,0.20])
ax.set_yticks([0,0.05,0.10,0.15,0.20])
plt.savefig('Figure1b.pdf')
#1c
proportions = pd.read_csv('scale_proportions_alt_nothresh.txt',sep = '\t',index_col = 'Traits')
fig,ax = plt.subplots(figsize = (4,4), nrows = 1, ncols = 1)
proportions = proportions.reset_index()
proportions['color'] = proportions['Traits'].map(category_dict)
proportions['color'] = proportions['color'].map(color_dict)
proportions['shape'] = proportions['Traits'].map(category_dict)
proportions['shape'] = proportions['shape'].map(marker_dict)
ax.plot([0,1],[0,1], transform=ax.transAxes, color = 'black', linestyle = '--')
for x,y in proportions.iterrows():
ax.scatter(y['Variants'],y['Genes'],color = y['color'],marker=y['shape'])
plt.plot(np.mean(y['Variants']),0,marker = '*', color = 'black', markersize = 14)
plt.plot(0,np.mean(y['Genes']),marker = '*', color = 'black', markersize = 14)
ax.set_ylim([0,0.2])
ax.set_xlim([0,0.2])
ax.set_xticks([0,0.05,0.10,0.15,0.20])
ax.set_yticks([0,0.05,0.10,0.15,0.20])
plt.savefig('Figure1c.pdf')
#1d
proportions = pd.read_csv('scale_proportions_nominal.txt',sep = '\t',index_col = 'Traits')
fig,ax = plt.subplots(figsize = (4,4), nrows = 1, ncols = 1)
proportions = proportions.reset_index()
proportions['color'] = proportions['Traits'].map(category_dict)
proportions['color'] = proportions['color'].map(color_dict)
proportions['shape'] = proportions['Traits'].map(category_dict)
proportions['shape'] = proportions['shape'].map(marker_dict)
proportions[['Traits','Variants','Genes']].to_csv('scale_proportions_nominal.txt',sep = '\t',index = False)
ax.plot([0,1],[0,1], transform=ax.transAxes, color = 'black', linestyle = '--')
for x,y in proportions.iterrows():
ax.scatter(y['Variants'],y['Genes'],color = y['color'],marker=y['shape'])
plt.plot(np.mean(y['Variants']),0,marker = '*', color = 'black', markersize = 14)
plt.plot(0,np.mean(y['Genes']),marker = '*', color = 'black', markersize = 14)
ax.set_ylim([0,0.1])
ax.set_xlim([0,1])
# ax.set_xticks([0,0.05,0.10,0.15,0.20])
# ax.set_yticks([0,0.05,0.10,0.15,0.20])
plt.savefig('Figure1d.pdf')
| 39.84
| 236
| 0.688755
| 752
| 4,980
| 4.476064
| 0.192819
| 0.023767
| 0.064171
| 0.066548
| 0.740939
| 0.740939
| 0.725193
| 0.70202
| 0.683007
| 0.683007
| 0
| 0.047504
| 0.082731
| 4,980
| 124
| 237
| 40.16129
| 0.689361
| 0.030924
| 0
| 0.682927
| 0
| 0
| 0.210275
| 0.028702
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.097561
| 0
| 0.097561
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
b62d46c9baee5c2a08f68c8a4c528b4329baf0e2
| 245
|
py
|
Python
|
src/get_the_dog.py
|
lipegomes/python-and-apis
|
0fd0b1daacfce5566e933651a3d4b0b5805c4e60
|
[
"MIT"
] | null | null | null |
src/get_the_dog.py
|
lipegomes/python-and-apis
|
0fd0b1daacfce5566e933651a3d4b0b5805c4e60
|
[
"MIT"
] | null | null | null |
src/get_the_dog.py
|
lipegomes/python-and-apis
|
0fd0b1daacfce5566e933651a3d4b0b5805c4e60
|
[
"MIT"
] | null | null | null |
import requests
response = requests.get("https://api.thedogapi.com/v1/breeds/1")
print(f"{response.headers.get('Content-Type')}\n")
print(f"{response.json()}\n")
print(f"{response.json()['origin']}\n")
print(f"{response.json()['name']}\n")
| 20.416667
| 64
| 0.673469
| 37
| 245
| 4.459459
| 0.540541
| 0.145455
| 0.339394
| 0.272727
| 0.345455
| 0
| 0
| 0
| 0
| 0
| 0
| 0.008658
| 0.057143
| 245
| 11
| 65
| 22.272727
| 0.705628
| 0
| 0
| 0
| 0
| 0
| 0.620408
| 0.391837
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.166667
| 0
| 0.166667
| 0.666667
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 4
|
b63666b409e624a77a0870bfa7b85e297dc79910
| 1,205
|
py
|
Python
|
vbpp/tf_utils.py
|
zcmail/vbpp_for_keystroke_dynamics
|
db03e7ab1a42479f35f8744c465c3825fca1ed04
|
[
"Apache-2.0"
] | 1
|
2020-10-09T15:08:38.000Z
|
2020-10-09T15:08:38.000Z
|
vbpp/tf_utils.py
|
zcmail/vbpp_for_keystroke_dynamics
|
db03e7ab1a42479f35f8744c465c3825fca1ed04
|
[
"Apache-2.0"
] | null | null | null |
vbpp/tf_utils.py
|
zcmail/vbpp_for_keystroke_dynamics
|
db03e7ab1a42479f35f8744c465c3825fca1ed04
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (C) PROWLER.io 2017
#
# Licensed under the Apache License, Version 2.0
"""
Prototype Code! This code may not be fully tested, or in other ways fit-for-purpose.
Use at your own risk!
"""
import tensorflow as tf
def tf_squeeze_1d(A):
return tf.reshape(A, (-1,)) # TODO should check that it's got the same length as before
def tf_len(A):
return tf.shape(A)[0]
def tf_vec_dot(v1, v2): #两个张量的点积
"""
Calculate the dot product between v1 and v2, regardless of shapes, as long
as there is at most one dimension with a length > 1 in each vector.
"""
# turn into flat vectors:
v1 = tf.squeeze(v1) #squeeze 去掉1维数组
v2 = tf.squeeze(v2)
#XXX assert v1.ndims == 1
#XXX assert v2.ndims == 1
return tf.reduce_sum(tf.multiply(v1, v2))
def tf_vec_mat_vec_mul(v1, M, v2): #计算 v1^T * M * v2
"""
Calculate the bilinear form v1^T M v2, where
v1 and v2 are vectors of length N and M is a N x N matrix.
"""
#XXX assert tf.squeeze(v1).ndims == 1
#XXX assert tf.squeeze(v2).ndims == 1
v2 = tf.reshape(v2, [-1, 1]) # turn into column vector 转为列向量
M_dot_v2 = tf.matmul(M, v2)
return tf_vec_dot(v1, M_dot_v2)
| 27.386364
| 92
| 0.643154
| 214
| 1,205
| 3.546729
| 0.481308
| 0.059289
| 0.023715
| 0.02635
| 0.044796
| 0
| 0
| 0
| 0
| 0
| 0
| 0.049614
| 0.247303
| 1,205
| 43
| 93
| 28.023256
| 0.787211
| 0.581743
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.023256
| 0
| 1
| 0.307692
| false
| 0
| 0.076923
| 0.153846
| 0.692308
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 4
|
b647a59928a09fd7113e5191d4a56199d01ba334
| 30
|
py
|
Python
|
tests/util/__init__.py
|
Vaskivskyi/asusrouter
|
de45d3e40fefc4851a15613b4789184a40ca0da4
|
[
"Apache-2.0"
] | null | null | null |
tests/util/__init__.py
|
Vaskivskyi/asusrouter
|
de45d3e40fefc4851a15613b4789184a40ca0da4
|
[
"Apache-2.0"
] | null | null | null |
tests/util/__init__.py
|
Vaskivskyi/asusrouter
|
de45d3e40fefc4851a15613b4789184a40ca0da4
|
[
"Apache-2.0"
] | null | null | null |
"""Tests for the utilities"""
| 15
| 29
| 0.666667
| 4
| 30
| 5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.133333
| 30
| 1
| 30
| 30
| 0.769231
| 0.766667
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
b64a2653a4c6ae96e971ec78fa3d8b5cef71254d
| 1,333
|
py
|
Python
|
study_roadmaps/python_sample_examples/pytorch/2_update_mode/evaluate.py
|
Shreyashwaghe/monk_v1
|
4ee4d9483e8ffac9b73a41f3c378e5abf5fc799b
|
[
"Apache-2.0"
] | 7
|
2020-07-26T08:37:29.000Z
|
2020-10-30T10:23:11.000Z
|
study_roadmaps/python_sample_examples/pytorch/2_update_mode/evaluate.py
|
mursalfk/monk_v1
|
62f34a52f242772186ffff7e56764e958fbcd920
|
[
"Apache-2.0"
] | 9
|
2020-01-28T21:40:39.000Z
|
2022-02-10T01:24:06.000Z
|
study_roadmaps/python_sample_examples/pytorch/2_update_mode/evaluate.py
|
mursalfk/monk_v1
|
62f34a52f242772186ffff7e56764e958fbcd920
|
[
"Apache-2.0"
] | 1
|
2020-10-07T12:57:44.000Z
|
2020-10-07T12:57:44.000Z
|
import os
import sys
sys.path.append("../../../monk/");
import psutil
from pytorch_prototype import prototype
################################################### Foldered - Train Dataset #################################################################
ptf = prototype(verbose=1);
ptf.Prototype("sample-project-1", "sample-experiment-1", eval_infer=True);
ptf.Dataset_Params(dataset_path="../../../monk/system_check_tests/datasets/dataset_cats_dogs_eval");
ptf.Dataset();
accuracy, class_based_accuracy = ptf.Evaluate();
###############################################################################################################################################
######################################################### CSV - Train Dataset #################################################################
ptf = prototype(verbose=1);
ptf.Prototype("sample-project-1", "sample-experiment-1", eval_infer=True);
ptf.Dataset_Params(dataset_path="../../../monk/system_check_tests/datasets/dataset_csv_id/train",
path_to_csv="../../../monk/system_check_tests/datasets/dataset_csv_id/train.csv");
ptf.Dataset();
accuracy, class_based_accuracy = ptf.Evaluate();
###############################################################################################################################################
| 27.770833
| 143
| 0.443361
| 108
| 1,333
| 5.231481
| 0.333333
| 0.084956
| 0.079646
| 0.106195
| 0.79469
| 0.79469
| 0.79469
| 0.79469
| 0.628319
| 0.530973
| 0
| 0.004796
| 0.061515
| 1,333
| 47
| 144
| 28.361702
| 0.446843
| 0.033758
| 0
| 0.5
| 0
| 0
| 0.363636
| 0.252964
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.25
| 0
| 0.25
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
b65c81531f1c4ac275db965ffe34b2739000b995
| 790
|
py
|
Python
|
backend/naki/naki/schemas/digital_item.py
|
iimcz/emod
|
432094c020247597a94e95f76cc524c20b68b685
|
[
"MIT"
] | null | null | null |
backend/naki/naki/schemas/digital_item.py
|
iimcz/emod
|
432094c020247597a94e95f76cc524c20b68b685
|
[
"MIT"
] | 6
|
2021-03-08T23:32:15.000Z
|
2022-02-26T08:11:38.000Z
|
backend/naki/naki/schemas/digital_item.py
|
iimcz/emod
|
432094c020247597a94e95f76cc524c20b68b685
|
[
"MIT"
] | null | null | null |
import colander
from naki.schemas.metadata import MetadataSequenceSchema
from naki.schemas.link import LinkSequenceSchema
class StringSequenceSchema(colander.SequenceSchema):
value = colander.SchemaNode(colander.String())
class DigitalItemSchema(colander.MappingSchema):
mime = colander.SchemaNode(colander.String())
created = colander.SchemaNode(colander.String(), missing='')
description = colander.SchemaNode(colander.String(), missing='')
id_user = colander.SchemaNode(colander.String(), missing='Unknown')
rights = colander.SchemaNode(colander.String(), missing=0)
src = colander.SchemaNode(colander.String(), missing='')
metadata = MetadataSequenceSchema()
links = LinkSequenceSchema(missing=[])
group_ids = StringSequenceSchema(missing=[])
| 41.578947
| 71
| 0.764557
| 74
| 790
| 8.135135
| 0.405405
| 0.209302
| 0.302326
| 0.372093
| 0.32392
| 0
| 0
| 0
| 0
| 0
| 0
| 0.001433
| 0.116456
| 790
| 18
| 72
| 43.888889
| 0.861032
| 0
| 0
| 0
| 0
| 0
| 0.008872
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.2
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 4
|
b68c8a9573133720d27b86e6b5ad4865424f3624
| 3,160
|
py
|
Python
|
Consuelo.py
|
digo-smithh/Consuelo-chatbot
|
9724fb421715da305fc3b9baa7ddb32c5fe0cc29
|
[
"MIT"
] | 1
|
2020-10-13T01:04:46.000Z
|
2020-10-13T01:04:46.000Z
|
Consuelo.py
|
digo-smithh/Consuelo-chatbot-Python
|
9724fb421715da305fc3b9baa7ddb32c5fe0cc29
|
[
"MIT"
] | null | null | null |
Consuelo.py
|
digo-smithh/Consuelo-chatbot-Python
|
9724fb421715da305fc3b9baa7ddb32c5fe0cc29
|
[
"MIT"
] | null | null | null |
from chatterbot import ChatBot
from chatterbot.trainers import ChatterBotCorpusTrainer
import PySimpleGUI as sg
import time
import os
from random import randrange
bot= ChatBot('Bot')
trainer = ChatterBotCorpusTrainer(bot)
trainer.train('chatterbot.corpus.portuguese')
trainer.train('chatterbot.corpus.english')
trainer.train('chatterbot.corpus.spanish')
sg.theme('BlueMono')
layout = [ [sg.Text(' Hola, eu sou Consuelo Jiménez! Habla que eu te escuto!')],
[sg.Image(filename=f"{os.path.dirname(os.path.realpath(__file__))}\img.png", key='image'),sg.Input(key='msg',focus=True,size=(37,20)), sg.Button("Enviar",change_submits=True,bind_return_key=True)],
[sg.Output(size=(50,20))]]
window = sg.Window(' Consuelo Jiménez', layout,icon=f"{os.path.dirname(os.path.realpath(__file__))}\ico.ico")
vazio = ['pode falar.','fala que eu te escuto','Digass','Estoy aqui','I will always wait for you.','Fale! Não tenha medo!','Não vai falar nada? Então vai embora e para de encher o saco','que','to te vendooooo']
despedida = ['tchau','hasta la vista baby','bjinhos','já vai?','ai que grosseria','bj','xoxo','bye','adiós']
while True:
button, values = window.Read()
message = values['msg']
print(f"Você: {message}")
window.FindElement('msg').Update('')
if message == '':
print(f'Consuelo: {str(vazio[randrange(9)])}')
elif message.strip().lower() == 'tchau' or message.strip().lower() == 'tchau!' or message.strip().lower() == 'tchau.' or message.strip().lower() == 'tchauu' or message.strip().lower() == 'bjs' or message.strip().lower() == 'bj' or message.strip().lower() == 'bjss' or message.strip().lower() == 'bjus' or message.strip().lower() == 'bjuus' or message.strip().lower() == 'bjsss' or message.strip().lower() == 'bjssss' or message.strip().lower() == 'bjo' or message.strip().lower() == 'bjoo' or message.strip().lower() == 'bjuus' or message.strip().lower() == 'ate' or message.strip().lower() == 'até mais!' or message.strip().lower() == 'ate mais' or message.strip().lower() == 'ate!' or message.strip().lower() == 'to indo' or message.strip().lower() == 'vou embora!' or message.strip().lower() == 'vou embora' or message.strip().lower() == 'bye' or message.strip().lower() == 'adiós.' or message.strip().lower() == 'adiós' or message.strip().lower() == 'adios' or message.strip().lower() == 'adios.' or message.strip().lower() == 'xoxo' or message.strip().lower() == 'hasta la vista baby!' or message.strip().lower() == 'hasta la vista baby' or message.strip().lower() == 'bye bye' or message.strip().lower() == 'bye-bye!' or message.strip().lower() == 'goodbye' or message.strip().lower() == 'goodbye!' or message.strip().lower() == 'goodbye.' or message.strip().lower() == 'tchauzinho' or message.strip().lower() == 'tchauzinho!' or message.strip().lower() == 'flw' or message.strip().lower() == 'flww' or message.strip().lower() == 'hasta la vista' or message.strip().lower() == 'hasta la vista!':
print(f'Consuelo: {str(despedida[randrange(9)])}! E você que feche o programa.')
else:
reply = bot.get_response(message)
print('Consuelo:', reply)
| 79
| 1,606
| 0.656013
| 433
| 3,160
| 4.759815
| 0.332564
| 0.232897
| 0.329937
| 0.359534
| 0.418244
| 0.416788
| 0.402232
| 0.372149
| 0.341097
| 0.184862
| 0
| 0.003631
| 0.128481
| 3,160
| 39
| 1,607
| 81.025641
| 0.744735
| 0
| 0
| 0
| 0
| 0
| 0.298101
| 0.076266
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.2
| 0
| 0.2
| 0.133333
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
b68da884d0cdda8f224288bb67367622b8a00154
| 64,280
|
py
|
Python
|
dev/Tools/Python/2.7.13/mac/Python.framework/Versions/2.7/lib/python2.7/site-packages/pyxb/bundles/dc/raw/dcterms.py
|
jeikabu/lumberyard
|
07228c605ce16cbf5aaa209a94a3cb9d6c1a4115
|
[
"AML"
] | 8
|
2019-10-07T16:33:47.000Z
|
2020-12-07T03:59:58.000Z
|
dev/Tools/Python/2.7.13/mac/Python.framework/Versions/2.7/lib/python2.7/site-packages/pyxb/bundles/dc/raw/dcterms.py
|
jeikabu/lumberyard
|
07228c605ce16cbf5aaa209a94a3cb9d6c1a4115
|
[
"AML"
] | null | null | null |
dev/Tools/Python/2.7.13/mac/Python.framework/Versions/2.7/lib/python2.7/site-packages/pyxb/bundles/dc/raw/dcterms.py
|
jeikabu/lumberyard
|
07228c605ce16cbf5aaa209a94a3cb9d6c1a4115
|
[
"AML"
] | 5
|
2020-08-27T20:44:18.000Z
|
2021-08-21T22:54:11.000Z
|
# ./pyxb/bundles/dc/raw/dcterms.py
# -*- coding: utf-8 -*-
# PyXB bindings for NM:62e52a6e1b0d23522982e9c2905e5cb67ad01951
# Generated 2014-10-19 06:25:02.925349 by PyXB version 1.2.4 using Python 2.7.3.final.0
# Namespace http://purl.org/dc/terms/
from __future__ import unicode_literals
import pyxb
import pyxb.binding
import pyxb.binding.saxer
import io
import pyxb.utils.utility
import pyxb.utils.domutils
import sys
import pyxb.utils.six as _six
# Unique identifier for bindings created at the same time
_GenerationUID = pyxb.utils.utility.UniqueIdentifier('urn:uuid:91a2ca00-5782-11e4-8790-c8600024e903')
# Version of PyXB used to generate the bindings
_PyXBVersion = '1.2.4'
# Generated bindings are not compatible across PyXB versions
if pyxb.__version__ != _PyXBVersion:
raise pyxb.PyXBVersionError(_PyXBVersion)
# Import bindings for namespaces imported into schema
import pyxb.bundles.dc.dc
import pyxb.binding.datatypes
import pyxb.binding.xml_
import pyxb.bundles.dc.dcmitype
# NOTE: All namespace declarations are reserved within the binding
Namespace = pyxb.namespace.NamespaceForURI('http://purl.org/dc/terms/', create_if_missing=True)
Namespace.configureCategories(['typeBinding', 'elementBinding'])
_Namespace_dc = pyxb.bundles.dc.dc.Namespace
_Namespace_dc.configureCategories(['typeBinding', 'elementBinding'])
def CreateFromDocument (xml_text, default_namespace=None, location_base=None):
"""Parse the given XML and use the document element to create a
Python instance.
@param xml_text An XML document. This should be data (Python 2
str or Python 3 bytes), or a text (Python 2 unicode or Python 3
str) in the L{pyxb._InputEncoding} encoding.
@keyword default_namespace The L{pyxb.Namespace} instance to use as the
default namespace where there is no default namespace in scope.
If unspecified or C{None}, the namespace of the module containing
this function will be used.
@keyword location_base: An object to be recorded as the base of all
L{pyxb.utils.utility.Location} instances associated with events and
objects handled by the parser. You might pass the URI from which
the document was obtained.
"""
if pyxb.XMLStyle_saxer != pyxb._XMLStyle:
dom = pyxb.utils.domutils.StringToDOM(xml_text)
return CreateFromDOM(dom.documentElement, default_namespace=default_namespace)
if default_namespace is None:
default_namespace = Namespace.fallbackNamespace()
saxer = pyxb.binding.saxer.make_parser(fallback_namespace=default_namespace, location_base=location_base)
handler = saxer.getContentHandler()
xmld = xml_text
if isinstance(xmld, _six.text_type):
xmld = xmld.encode(pyxb._InputEncoding)
saxer.parse(io.BytesIO(xmld))
instance = handler.rootObject()
return instance
def CreateFromDOM (node, default_namespace=None):
"""Create a Python instance from the given DOM node.
The node tag must correspond to an element declaration in this module.
@deprecated: Forcing use of DOM interface is unnecessary; use L{CreateFromDocument}."""
if default_namespace is None:
default_namespace = Namespace.fallbackNamespace()
return pyxb.binding.basis.element.AnyCreateFromDOM(node, default_namespace)
# Atomic simple type: [anonymous]
class STD_ANON (pyxb.binding.datatypes.string):
"""An atomic simple type."""
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 144, 8)
_Documentation = None
STD_ANON._InitializeFacetMap()
# Atomic simple type: [anonymous]
class STD_ANON_ (pyxb.binding.datatypes.string):
"""An atomic simple type."""
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 155, 8)
_Documentation = None
STD_ANON_._InitializeFacetMap()
# Atomic simple type: [anonymous]
class STD_ANON_2 (pyxb.binding.datatypes.string):
"""An atomic simple type."""
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 166, 8)
_Documentation = None
STD_ANON_2._InitializeFacetMap()
# Atomic simple type: [anonymous]
class STD_ANON_3 (pyxb.binding.datatypes.string):
"""An atomic simple type."""
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 177, 8)
_Documentation = None
STD_ANON_3._InitializeFacetMap()
# Atomic simple type: [anonymous]
class STD_ANON_4 (pyxb.binding.datatypes.string):
"""An atomic simple type."""
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 188, 8)
_Documentation = None
STD_ANON_4._InitializeFacetMap()
# Atomic simple type: [anonymous]
class STD_ANON_5 (pyxb.binding.datatypes.string):
"""An atomic simple type."""
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 199, 8)
_Documentation = None
STD_ANON_5._InitializeFacetMap()
# Union simple type: [anonymous]
# superclasses pyxb.binding.datatypes.anySimpleType
class STD_ANON_6 (pyxb.binding.basis.STD_union):
"""Simple type that is a union of pyxb.binding.datatypes.gYear, pyxb.binding.datatypes.gYearMonth, pyxb.binding.datatypes.date, pyxb.binding.datatypes.dateTime."""
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 210, 8)
_Documentation = None
_MemberTypes = ( pyxb.binding.datatypes.gYear, pyxb.binding.datatypes.gYearMonth, pyxb.binding.datatypes.date, pyxb.binding.datatypes.dateTime, )
STD_ANON_6._CF_pattern = pyxb.binding.facets.CF_pattern()
STD_ANON_6._CF_enumeration = pyxb.binding.facets.CF_enumeration(value_datatype=STD_ANON_6)
STD_ANON_6._InitializeFacetMap(STD_ANON_6._CF_pattern,
STD_ANON_6._CF_enumeration)
# Atomic simple type: [anonymous]
class STD_ANON_7 (pyxb.binding.datatypes.string):
"""An atomic simple type."""
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 232, 8)
_Documentation = None
STD_ANON_7._InitializeFacetMap()
# Atomic simple type: [anonymous]
class STD_ANON_8 (pyxb.binding.datatypes.anyURI):
"""An atomic simple type."""
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 243, 8)
_Documentation = None
STD_ANON_8._InitializeFacetMap()
# Atomic simple type: [anonymous]
class STD_ANON_9 (pyxb.binding.datatypes.string):
"""An atomic simple type."""
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 254, 8)
_Documentation = None
STD_ANON_9._InitializeFacetMap()
# Atomic simple type: [anonymous]
class STD_ANON_10 (pyxb.binding.datatypes.string):
"""An atomic simple type."""
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 265, 8)
_Documentation = None
STD_ANON_10._InitializeFacetMap()
# Atomic simple type: [anonymous]
class STD_ANON_11 (pyxb.binding.datatypes.language):
"""An atomic simple type."""
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 276, 8)
_Documentation = None
STD_ANON_11._InitializeFacetMap()
# Atomic simple type: [anonymous]
class STD_ANON_12 (pyxb.binding.datatypes.language):
"""An atomic simple type."""
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 287, 8)
_Documentation = None
STD_ANON_12._InitializeFacetMap()
# Atomic simple type: [anonymous]
class STD_ANON_13 (pyxb.binding.datatypes.language):
"""An atomic simple type."""
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 298, 8)
_Documentation = None
STD_ANON_13._InitializeFacetMap()
# Atomic simple type: [anonymous]
class STD_ANON_14 (pyxb.binding.datatypes.string):
"""An atomic simple type."""
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 309, 8)
_Documentation = None
STD_ANON_14._InitializeFacetMap()
# Atomic simple type: [anonymous]
class STD_ANON_15 (pyxb.binding.datatypes.string):
"""An atomic simple type."""
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 320, 8)
_Documentation = None
STD_ANON_15._InitializeFacetMap()
# Atomic simple type: [anonymous]
class STD_ANON_16 (pyxb.binding.datatypes.string):
"""An atomic simple type."""
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 331, 8)
_Documentation = None
STD_ANON_16._InitializeFacetMap()
# Atomic simple type: [anonymous]
class STD_ANON_17 (pyxb.binding.datatypes.string):
"""An atomic simple type."""
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 342, 8)
_Documentation = None
STD_ANON_17._InitializeFacetMap()
# Union simple type: [anonymous]
# superclasses pyxb.bundles.dc.dcmitype.DCMIType
class STD_ANON_18 (pyxb.binding.basis.STD_union):
"""Simple type that is a union of pyxb.bundles.dc.dcmitype.STD_ANON."""
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 221, 8)
_Documentation = None
_MemberTypes = ( pyxb.bundles.dc.dcmitype.STD_ANON, )
STD_ANON_18.Collection = 'Collection' # originally pyxb.bundles.dc.dcmitype.STD_ANON.Collection
STD_ANON_18.Dataset = 'Dataset' # originally pyxb.bundles.dc.dcmitype.STD_ANON.Dataset
STD_ANON_18.Event = 'Event' # originally pyxb.bundles.dc.dcmitype.STD_ANON.Event
STD_ANON_18.Image = 'Image' # originally pyxb.bundles.dc.dcmitype.STD_ANON.Image
STD_ANON_18.MovingImage = 'MovingImage' # originally pyxb.bundles.dc.dcmitype.STD_ANON.MovingImage
STD_ANON_18.StillImage = 'StillImage' # originally pyxb.bundles.dc.dcmitype.STD_ANON.StillImage
STD_ANON_18.InteractiveResource = 'InteractiveResource'# originally pyxb.bundles.dc.dcmitype.STD_ANON.InteractiveResource
STD_ANON_18.Service = 'Service' # originally pyxb.bundles.dc.dcmitype.STD_ANON.Service
STD_ANON_18.Software = 'Software' # originally pyxb.bundles.dc.dcmitype.STD_ANON.Software
STD_ANON_18.Sound = 'Sound' # originally pyxb.bundles.dc.dcmitype.STD_ANON.Sound
STD_ANON_18.Text = 'Text' # originally pyxb.bundles.dc.dcmitype.STD_ANON.Text
STD_ANON_18.PhysicalObject = 'PhysicalObject' # originally pyxb.bundles.dc.dcmitype.STD_ANON.PhysicalObject
STD_ANON_18._InitializeFacetMap()
# Complex type {http://purl.org/dc/terms/}elementOrRefinementContainer with content type ELEMENT_ONLY
class elementOrRefinementContainer (pyxb.binding.basis.complexTypeDefinition):
"""
This is included as a convenience for schema authors who need to define a root
or container element for all of the DC elements and element refinements.
"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'elementOrRefinementContainer')
_XSDLocation = pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 368, 2)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {http://purl.org/dc/elements/1.1/}any uses Python identifier any
__any = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(_Namespace_dc, 'any'), 'any', '__httppurl_orgdcterms_elementOrRefinementContainer_httppurl_orgdcelements1_1any', True, pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dc.xsd', 70, 2), )
any = property(__any.value, __any.set, None, None)
_ElementMap.update({
__any.name() : __any
})
_AttributeMap.update({
})
Namespace.addCategoryObject('typeBinding', 'elementOrRefinementContainer', elementOrRefinementContainer)
# Complex type {http://purl.org/dc/terms/}LCSH with content type SIMPLE
class LCSH (pyxb.bundles.dc.dc.SimpleLiteral):
"""Complex type {http://purl.org/dc/terms/}LCSH with content type SIMPLE"""
_TypeDefinition = STD_ANON
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_SIMPLE
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'LCSH')
_XSDLocation = pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 141, 2)
_ElementMap = pyxb.bundles.dc.dc.SimpleLiteral._ElementMap.copy()
_AttributeMap = pyxb.bundles.dc.dc.SimpleLiteral._AttributeMap.copy()
# Base type is pyxb.bundles.dc.dc.SimpleLiteral
# Attribute lang is restricted from parent
# Attribute {http://www.w3.org/XML/1998/namespace}lang uses Python identifier lang
__lang = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(pyxb.namespace.XML, 'lang'), 'lang', '__httppurl_orgdcelements1_1_SimpleLiteral_httpwww_w3_orgXML1998namespacelang', pyxb.binding.xml_.STD_ANON_lang, prohibited=True)
__lang._DeclarationLocation = None
__lang._UseLocation = pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 147, 8)
lang = property(__lang.value, __lang.set, None, None)
_ElementMap.update({
})
_AttributeMap.update({
__lang.name() : __lang
})
Namespace.addCategoryObject('typeBinding', 'LCSH', LCSH)
# Complex type {http://purl.org/dc/terms/}MESH with content type SIMPLE
class MESH (pyxb.bundles.dc.dc.SimpleLiteral):
"""Complex type {http://purl.org/dc/terms/}MESH with content type SIMPLE"""
_TypeDefinition = STD_ANON_
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_SIMPLE
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'MESH')
_XSDLocation = pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 152, 2)
_ElementMap = pyxb.bundles.dc.dc.SimpleLiteral._ElementMap.copy()
_AttributeMap = pyxb.bundles.dc.dc.SimpleLiteral._AttributeMap.copy()
# Base type is pyxb.bundles.dc.dc.SimpleLiteral
# Attribute lang is restricted from parent
# Attribute {http://www.w3.org/XML/1998/namespace}lang uses Python identifier lang
__lang = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(pyxb.namespace.XML, 'lang'), 'lang', '__httppurl_orgdcelements1_1_SimpleLiteral_httpwww_w3_orgXML1998namespacelang', pyxb.binding.xml_.STD_ANON_lang, prohibited=True)
__lang._DeclarationLocation = None
__lang._UseLocation = pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 158, 8)
lang = property(__lang.value, __lang.set, None, None)
_ElementMap.update({
})
_AttributeMap.update({
__lang.name() : __lang
})
Namespace.addCategoryObject('typeBinding', 'MESH', MESH)
# Complex type {http://purl.org/dc/terms/}DDC with content type SIMPLE
class DDC (pyxb.bundles.dc.dc.SimpleLiteral):
"""Complex type {http://purl.org/dc/terms/}DDC with content type SIMPLE"""
_TypeDefinition = STD_ANON_2
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_SIMPLE
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'DDC')
_XSDLocation = pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 163, 2)
_ElementMap = pyxb.bundles.dc.dc.SimpleLiteral._ElementMap.copy()
_AttributeMap = pyxb.bundles.dc.dc.SimpleLiteral._AttributeMap.copy()
# Base type is pyxb.bundles.dc.dc.SimpleLiteral
# Attribute lang is restricted from parent
# Attribute {http://www.w3.org/XML/1998/namespace}lang uses Python identifier lang
__lang = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(pyxb.namespace.XML, 'lang'), 'lang', '__httppurl_orgdcelements1_1_SimpleLiteral_httpwww_w3_orgXML1998namespacelang', pyxb.binding.xml_.STD_ANON_lang, prohibited=True)
__lang._DeclarationLocation = None
__lang._UseLocation = pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 169, 8)
lang = property(__lang.value, __lang.set, None, None)
_ElementMap.update({
})
_AttributeMap.update({
__lang.name() : __lang
})
Namespace.addCategoryObject('typeBinding', 'DDC', DDC)
# Complex type {http://purl.org/dc/terms/}LCC with content type SIMPLE
class LCC (pyxb.bundles.dc.dc.SimpleLiteral):
"""Complex type {http://purl.org/dc/terms/}LCC with content type SIMPLE"""
_TypeDefinition = STD_ANON_3
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_SIMPLE
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'LCC')
_XSDLocation = pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 174, 2)
_ElementMap = pyxb.bundles.dc.dc.SimpleLiteral._ElementMap.copy()
_AttributeMap = pyxb.bundles.dc.dc.SimpleLiteral._AttributeMap.copy()
# Base type is pyxb.bundles.dc.dc.SimpleLiteral
# Attribute lang is restricted from parent
# Attribute {http://www.w3.org/XML/1998/namespace}lang uses Python identifier lang
__lang = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(pyxb.namespace.XML, 'lang'), 'lang', '__httppurl_orgdcelements1_1_SimpleLiteral_httpwww_w3_orgXML1998namespacelang', pyxb.binding.xml_.STD_ANON_lang, prohibited=True)
__lang._DeclarationLocation = None
__lang._UseLocation = pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 180, 8)
lang = property(__lang.value, __lang.set, None, None)
_ElementMap.update({
})
_AttributeMap.update({
__lang.name() : __lang
})
Namespace.addCategoryObject('typeBinding', 'LCC', LCC)
# Complex type {http://purl.org/dc/terms/}UDC with content type SIMPLE
class UDC (pyxb.bundles.dc.dc.SimpleLiteral):
"""Complex type {http://purl.org/dc/terms/}UDC with content type SIMPLE"""
_TypeDefinition = STD_ANON_4
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_SIMPLE
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'UDC')
_XSDLocation = pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 185, 2)
_ElementMap = pyxb.bundles.dc.dc.SimpleLiteral._ElementMap.copy()
_AttributeMap = pyxb.bundles.dc.dc.SimpleLiteral._AttributeMap.copy()
# Base type is pyxb.bundles.dc.dc.SimpleLiteral
# Attribute lang is restricted from parent
# Attribute {http://www.w3.org/XML/1998/namespace}lang uses Python identifier lang
__lang = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(pyxb.namespace.XML, 'lang'), 'lang', '__httppurl_orgdcelements1_1_SimpleLiteral_httpwww_w3_orgXML1998namespacelang', pyxb.binding.xml_.STD_ANON_lang, prohibited=True)
__lang._DeclarationLocation = None
__lang._UseLocation = pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 191, 8)
lang = property(__lang.value, __lang.set, None, None)
_ElementMap.update({
})
_AttributeMap.update({
__lang.name() : __lang
})
Namespace.addCategoryObject('typeBinding', 'UDC', UDC)
# Complex type {http://purl.org/dc/terms/}Period with content type SIMPLE
class Period (pyxb.bundles.dc.dc.SimpleLiteral):
"""Complex type {http://purl.org/dc/terms/}Period with content type SIMPLE"""
_TypeDefinition = STD_ANON_5
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_SIMPLE
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'Period')
_XSDLocation = pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 196, 2)
_ElementMap = pyxb.bundles.dc.dc.SimpleLiteral._ElementMap.copy()
_AttributeMap = pyxb.bundles.dc.dc.SimpleLiteral._AttributeMap.copy()
# Base type is pyxb.bundles.dc.dc.SimpleLiteral
# Attribute lang is restricted from parent
# Attribute {http://www.w3.org/XML/1998/namespace}lang uses Python identifier lang
__lang = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(pyxb.namespace.XML, 'lang'), 'lang', '__httppurl_orgdcelements1_1_SimpleLiteral_httpwww_w3_orgXML1998namespacelang', pyxb.binding.xml_.STD_ANON_lang, prohibited=True)
__lang._DeclarationLocation = None
__lang._UseLocation = pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 202, 8)
lang = property(__lang.value, __lang.set, None, None)
_ElementMap.update({
})
_AttributeMap.update({
__lang.name() : __lang
})
Namespace.addCategoryObject('typeBinding', 'Period', Period)
# Complex type {http://purl.org/dc/terms/}W3CDTF with content type SIMPLE
class W3CDTF (pyxb.bundles.dc.dc.SimpleLiteral):
"""Complex type {http://purl.org/dc/terms/}W3CDTF with content type SIMPLE"""
_TypeDefinition = STD_ANON_6
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_SIMPLE
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'W3CDTF')
_XSDLocation = pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 207, 2)
_ElementMap = pyxb.bundles.dc.dc.SimpleLiteral._ElementMap.copy()
_AttributeMap = pyxb.bundles.dc.dc.SimpleLiteral._AttributeMap.copy()
# Base type is pyxb.bundles.dc.dc.SimpleLiteral
# Attribute lang is restricted from parent
# Attribute {http://www.w3.org/XML/1998/namespace}lang uses Python identifier lang
__lang = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(pyxb.namespace.XML, 'lang'), 'lang', '__httppurl_orgdcelements1_1_SimpleLiteral_httpwww_w3_orgXML1998namespacelang', pyxb.binding.xml_.STD_ANON_lang, prohibited=True)
__lang._DeclarationLocation = None
__lang._UseLocation = pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 213, 8)
lang = property(__lang.value, __lang.set, None, None)
_ElementMap.update({
})
_AttributeMap.update({
__lang.name() : __lang
})
Namespace.addCategoryObject('typeBinding', 'W3CDTF', W3CDTF)
# Complex type {http://purl.org/dc/terms/}DCMIType with content type SIMPLE
class DCMIType (pyxb.bundles.dc.dc.SimpleLiteral):
"""Complex type {http://purl.org/dc/terms/}DCMIType with content type SIMPLE"""
_TypeDefinition = STD_ANON_18
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_SIMPLE
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'DCMIType')
_XSDLocation = pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 218, 2)
_ElementMap = pyxb.bundles.dc.dc.SimpleLiteral._ElementMap.copy()
_AttributeMap = pyxb.bundles.dc.dc.SimpleLiteral._AttributeMap.copy()
# Base type is pyxb.bundles.dc.dc.SimpleLiteral
# Attribute lang is restricted from parent
# Attribute {http://www.w3.org/XML/1998/namespace}lang uses Python identifier lang
__lang = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(pyxb.namespace.XML, 'lang'), 'lang', '__httppurl_orgdcelements1_1_SimpleLiteral_httpwww_w3_orgXML1998namespacelang', pyxb.binding.xml_.STD_ANON_lang, prohibited=True)
__lang._DeclarationLocation = None
__lang._UseLocation = pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 224, 8)
lang = property(__lang.value, __lang.set, None, None)
_ElementMap.update({
})
_AttributeMap.update({
__lang.name() : __lang
})
Namespace.addCategoryObject('typeBinding', 'DCMIType', DCMIType)
# Complex type {http://purl.org/dc/terms/}IMT with content type SIMPLE
class IMT (pyxb.bundles.dc.dc.SimpleLiteral):
"""Complex type {http://purl.org/dc/terms/}IMT with content type SIMPLE"""
_TypeDefinition = STD_ANON_7
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_SIMPLE
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'IMT')
_XSDLocation = pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 229, 2)
_ElementMap = pyxb.bundles.dc.dc.SimpleLiteral._ElementMap.copy()
_AttributeMap = pyxb.bundles.dc.dc.SimpleLiteral._AttributeMap.copy()
# Base type is pyxb.bundles.dc.dc.SimpleLiteral
# Attribute lang is restricted from parent
# Attribute {http://www.w3.org/XML/1998/namespace}lang uses Python identifier lang
__lang = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(pyxb.namespace.XML, 'lang'), 'lang', '__httppurl_orgdcelements1_1_SimpleLiteral_httpwww_w3_orgXML1998namespacelang', pyxb.binding.xml_.STD_ANON_lang, prohibited=True)
__lang._DeclarationLocation = None
__lang._UseLocation = pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 235, 8)
lang = property(__lang.value, __lang.set, None, None)
_ElementMap.update({
})
_AttributeMap.update({
__lang.name() : __lang
})
Namespace.addCategoryObject('typeBinding', 'IMT', IMT)
# Complex type {http://purl.org/dc/terms/}URI with content type SIMPLE
class URI (pyxb.bundles.dc.dc.SimpleLiteral):
"""Complex type {http://purl.org/dc/terms/}URI with content type SIMPLE"""
_TypeDefinition = STD_ANON_8
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_SIMPLE
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'URI')
_XSDLocation = pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 240, 2)
_ElementMap = pyxb.bundles.dc.dc.SimpleLiteral._ElementMap.copy()
_AttributeMap = pyxb.bundles.dc.dc.SimpleLiteral._AttributeMap.copy()
# Base type is pyxb.bundles.dc.dc.SimpleLiteral
# Attribute lang is restricted from parent
# Attribute {http://www.w3.org/XML/1998/namespace}lang uses Python identifier lang
__lang = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(pyxb.namespace.XML, 'lang'), 'lang', '__httppurl_orgdcelements1_1_SimpleLiteral_httpwww_w3_orgXML1998namespacelang', pyxb.binding.xml_.STD_ANON_lang, prohibited=True)
__lang._DeclarationLocation = None
__lang._UseLocation = pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 246, 8)
lang = property(__lang.value, __lang.set, None, None)
_ElementMap.update({
})
_AttributeMap.update({
__lang.name() : __lang
})
Namespace.addCategoryObject('typeBinding', 'URI', URI)
# Complex type {http://purl.org/dc/terms/}ISO639-2 with content type SIMPLE
class ISO639_2 (pyxb.bundles.dc.dc.SimpleLiteral):
"""Complex type {http://purl.org/dc/terms/}ISO639-2 with content type SIMPLE"""
_TypeDefinition = STD_ANON_9
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_SIMPLE
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'ISO639-2')
_XSDLocation = pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 251, 2)
_ElementMap = pyxb.bundles.dc.dc.SimpleLiteral._ElementMap.copy()
_AttributeMap = pyxb.bundles.dc.dc.SimpleLiteral._AttributeMap.copy()
# Base type is pyxb.bundles.dc.dc.SimpleLiteral
# Attribute lang is restricted from parent
# Attribute {http://www.w3.org/XML/1998/namespace}lang uses Python identifier lang
__lang = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(pyxb.namespace.XML, 'lang'), 'lang', '__httppurl_orgdcelements1_1_SimpleLiteral_httpwww_w3_orgXML1998namespacelang', pyxb.binding.xml_.STD_ANON_lang, prohibited=True)
__lang._DeclarationLocation = None
__lang._UseLocation = pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 257, 8)
lang = property(__lang.value, __lang.set, None, None)
_ElementMap.update({
})
_AttributeMap.update({
__lang.name() : __lang
})
Namespace.addCategoryObject('typeBinding', 'ISO639-2', ISO639_2)
# Complex type {http://purl.org/dc/terms/}ISO639-3 with content type SIMPLE
class ISO639_3 (pyxb.bundles.dc.dc.SimpleLiteral):
"""Complex type {http://purl.org/dc/terms/}ISO639-3 with content type SIMPLE"""
_TypeDefinition = STD_ANON_10
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_SIMPLE
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'ISO639-3')
_XSDLocation = pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 262, 2)
_ElementMap = pyxb.bundles.dc.dc.SimpleLiteral._ElementMap.copy()
_AttributeMap = pyxb.bundles.dc.dc.SimpleLiteral._AttributeMap.copy()
# Base type is pyxb.bundles.dc.dc.SimpleLiteral
# Attribute lang is restricted from parent
# Attribute {http://www.w3.org/XML/1998/namespace}lang uses Python identifier lang
__lang = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(pyxb.namespace.XML, 'lang'), 'lang', '__httppurl_orgdcelements1_1_SimpleLiteral_httpwww_w3_orgXML1998namespacelang', pyxb.binding.xml_.STD_ANON_lang, prohibited=True)
__lang._DeclarationLocation = None
__lang._UseLocation = pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 268, 8)
lang = property(__lang.value, __lang.set, None, None)
_ElementMap.update({
})
_AttributeMap.update({
__lang.name() : __lang
})
Namespace.addCategoryObject('typeBinding', 'ISO639-3', ISO639_3)
# Complex type {http://purl.org/dc/terms/}RFC1766 with content type SIMPLE
class RFC1766 (pyxb.bundles.dc.dc.SimpleLiteral):
"""Complex type {http://purl.org/dc/terms/}RFC1766 with content type SIMPLE"""
_TypeDefinition = STD_ANON_11
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_SIMPLE
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'RFC1766')
_XSDLocation = pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 273, 2)
_ElementMap = pyxb.bundles.dc.dc.SimpleLiteral._ElementMap.copy()
_AttributeMap = pyxb.bundles.dc.dc.SimpleLiteral._AttributeMap.copy()
# Base type is pyxb.bundles.dc.dc.SimpleLiteral
# Attribute lang is restricted from parent
# Attribute {http://www.w3.org/XML/1998/namespace}lang uses Python identifier lang
__lang = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(pyxb.namespace.XML, 'lang'), 'lang', '__httppurl_orgdcelements1_1_SimpleLiteral_httpwww_w3_orgXML1998namespacelang', pyxb.binding.xml_.STD_ANON_lang, prohibited=True)
__lang._DeclarationLocation = None
__lang._UseLocation = pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 279, 8)
lang = property(__lang.value, __lang.set, None, None)
_ElementMap.update({
})
_AttributeMap.update({
__lang.name() : __lang
})
Namespace.addCategoryObject('typeBinding', 'RFC1766', RFC1766)
# Complex type {http://purl.org/dc/terms/}RFC3066 with content type SIMPLE
class RFC3066 (pyxb.bundles.dc.dc.SimpleLiteral):
"""Complex type {http://purl.org/dc/terms/}RFC3066 with content type SIMPLE"""
_TypeDefinition = STD_ANON_12
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_SIMPLE
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'RFC3066')
_XSDLocation = pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 284, 2)
_ElementMap = pyxb.bundles.dc.dc.SimpleLiteral._ElementMap.copy()
_AttributeMap = pyxb.bundles.dc.dc.SimpleLiteral._AttributeMap.copy()
# Base type is pyxb.bundles.dc.dc.SimpleLiteral
# Attribute lang is restricted from parent
# Attribute {http://www.w3.org/XML/1998/namespace}lang uses Python identifier lang
__lang = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(pyxb.namespace.XML, 'lang'), 'lang', '__httppurl_orgdcelements1_1_SimpleLiteral_httpwww_w3_orgXML1998namespacelang', pyxb.binding.xml_.STD_ANON_lang, prohibited=True)
__lang._DeclarationLocation = None
__lang._UseLocation = pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 290, 8)
lang = property(__lang.value, __lang.set, None, None)
_ElementMap.update({
})
_AttributeMap.update({
__lang.name() : __lang
})
Namespace.addCategoryObject('typeBinding', 'RFC3066', RFC3066)
# Complex type {http://purl.org/dc/terms/}RFC4646 with content type SIMPLE
class RFC4646 (pyxb.bundles.dc.dc.SimpleLiteral):
"""Complex type {http://purl.org/dc/terms/}RFC4646 with content type SIMPLE"""
_TypeDefinition = STD_ANON_13
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_SIMPLE
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'RFC4646')
_XSDLocation = pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 295, 2)
_ElementMap = pyxb.bundles.dc.dc.SimpleLiteral._ElementMap.copy()
_AttributeMap = pyxb.bundles.dc.dc.SimpleLiteral._AttributeMap.copy()
# Base type is pyxb.bundles.dc.dc.SimpleLiteral
# Attribute lang is restricted from parent
# Attribute {http://www.w3.org/XML/1998/namespace}lang uses Python identifier lang
__lang = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(pyxb.namespace.XML, 'lang'), 'lang', '__httppurl_orgdcelements1_1_SimpleLiteral_httpwww_w3_orgXML1998namespacelang', pyxb.binding.xml_.STD_ANON_lang, prohibited=True)
__lang._DeclarationLocation = None
__lang._UseLocation = pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 301, 8)
lang = property(__lang.value, __lang.set, None, None)
_ElementMap.update({
})
_AttributeMap.update({
__lang.name() : __lang
})
Namespace.addCategoryObject('typeBinding', 'RFC4646', RFC4646)
# Complex type {http://purl.org/dc/terms/}Point with content type SIMPLE
class Point (pyxb.bundles.dc.dc.SimpleLiteral):
"""Complex type {http://purl.org/dc/terms/}Point with content type SIMPLE"""
_TypeDefinition = STD_ANON_14
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_SIMPLE
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'Point')
_XSDLocation = pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 306, 2)
_ElementMap = pyxb.bundles.dc.dc.SimpleLiteral._ElementMap.copy()
_AttributeMap = pyxb.bundles.dc.dc.SimpleLiteral._AttributeMap.copy()
# Base type is pyxb.bundles.dc.dc.SimpleLiteral
# Attribute lang is restricted from parent
# Attribute {http://www.w3.org/XML/1998/namespace}lang uses Python identifier lang
__lang = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(pyxb.namespace.XML, 'lang'), 'lang', '__httppurl_orgdcelements1_1_SimpleLiteral_httpwww_w3_orgXML1998namespacelang', pyxb.binding.xml_.STD_ANON_lang, prohibited=True)
__lang._DeclarationLocation = None
__lang._UseLocation = pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 312, 8)
lang = property(__lang.value, __lang.set, None, None)
_ElementMap.update({
})
_AttributeMap.update({
__lang.name() : __lang
})
Namespace.addCategoryObject('typeBinding', 'Point', Point)
# Complex type {http://purl.org/dc/terms/}ISO3166 with content type SIMPLE
class ISO3166 (pyxb.bundles.dc.dc.SimpleLiteral):
"""Complex type {http://purl.org/dc/terms/}ISO3166 with content type SIMPLE"""
_TypeDefinition = STD_ANON_15
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_SIMPLE
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'ISO3166')
_XSDLocation = pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 317, 2)
_ElementMap = pyxb.bundles.dc.dc.SimpleLiteral._ElementMap.copy()
_AttributeMap = pyxb.bundles.dc.dc.SimpleLiteral._AttributeMap.copy()
# Base type is pyxb.bundles.dc.dc.SimpleLiteral
# Attribute lang is restricted from parent
# Attribute {http://www.w3.org/XML/1998/namespace}lang uses Python identifier lang
__lang = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(pyxb.namespace.XML, 'lang'), 'lang', '__httppurl_orgdcelements1_1_SimpleLiteral_httpwww_w3_orgXML1998namespacelang', pyxb.binding.xml_.STD_ANON_lang, prohibited=True)
__lang._DeclarationLocation = None
__lang._UseLocation = pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 323, 8)
lang = property(__lang.value, __lang.set, None, None)
_ElementMap.update({
})
_AttributeMap.update({
__lang.name() : __lang
})
Namespace.addCategoryObject('typeBinding', 'ISO3166', ISO3166)
# Complex type {http://purl.org/dc/terms/}Box with content type SIMPLE
class Box (pyxb.bundles.dc.dc.SimpleLiteral):
"""Complex type {http://purl.org/dc/terms/}Box with content type SIMPLE"""
_TypeDefinition = STD_ANON_16
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_SIMPLE
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'Box')
_XSDLocation = pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 328, 2)
_ElementMap = pyxb.bundles.dc.dc.SimpleLiteral._ElementMap.copy()
_AttributeMap = pyxb.bundles.dc.dc.SimpleLiteral._AttributeMap.copy()
# Base type is pyxb.bundles.dc.dc.SimpleLiteral
# Attribute lang is restricted from parent
# Attribute {http://www.w3.org/XML/1998/namespace}lang uses Python identifier lang
__lang = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(pyxb.namespace.XML, 'lang'), 'lang', '__httppurl_orgdcelements1_1_SimpleLiteral_httpwww_w3_orgXML1998namespacelang', pyxb.binding.xml_.STD_ANON_lang, prohibited=True)
__lang._DeclarationLocation = None
__lang._UseLocation = pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 334, 8)
lang = property(__lang.value, __lang.set, None, None)
_ElementMap.update({
})
_AttributeMap.update({
__lang.name() : __lang
})
Namespace.addCategoryObject('typeBinding', 'Box', Box)
# Complex type {http://purl.org/dc/terms/}TGN with content type SIMPLE
class TGN (pyxb.bundles.dc.dc.SimpleLiteral):
"""Complex type {http://purl.org/dc/terms/}TGN with content type SIMPLE"""
_TypeDefinition = STD_ANON_17
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_SIMPLE
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'TGN')
_XSDLocation = pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 339, 2)
_ElementMap = pyxb.bundles.dc.dc.SimpleLiteral._ElementMap.copy()
_AttributeMap = pyxb.bundles.dc.dc.SimpleLiteral._AttributeMap.copy()
# Base type is pyxb.bundles.dc.dc.SimpleLiteral
# Attribute lang is restricted from parent
# Attribute {http://www.w3.org/XML/1998/namespace}lang uses Python identifier lang
__lang = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(pyxb.namespace.XML, 'lang'), 'lang', '__httppurl_orgdcelements1_1_SimpleLiteral_httpwww_w3_orgXML1998namespacelang', pyxb.binding.xml_.STD_ANON_lang, prohibited=True)
__lang._DeclarationLocation = None
__lang._UseLocation = pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 345, 8)
lang = property(__lang.value, __lang.set, None, None)
_ElementMap.update({
})
_AttributeMap.update({
__lang.name() : __lang
})
Namespace.addCategoryObject('typeBinding', 'TGN', TGN)
title = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'title'), pyxb.binding.datatypes.anyType, location=pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 75, 3))
Namespace.addCategoryObject('elementBinding', title.name().localName(), title)
creator = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'creator'), pyxb.binding.datatypes.anyType, location=pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 76, 3))
Namespace.addCategoryObject('elementBinding', creator.name().localName(), creator)
subject = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'subject'), pyxb.binding.datatypes.anyType, location=pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 77, 3))
Namespace.addCategoryObject('elementBinding', subject.name().localName(), subject)
description = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'description'), pyxb.binding.datatypes.anyType, location=pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 78, 3))
Namespace.addCategoryObject('elementBinding', description.name().localName(), description)
publisher = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'publisher'), pyxb.binding.datatypes.anyType, location=pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 79, 3))
Namespace.addCategoryObject('elementBinding', publisher.name().localName(), publisher)
contributor = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'contributor'), pyxb.binding.datatypes.anyType, location=pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 80, 3))
Namespace.addCategoryObject('elementBinding', contributor.name().localName(), contributor)
date = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'date'), pyxb.binding.datatypes.anyType, location=pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 81, 3))
Namespace.addCategoryObject('elementBinding', date.name().localName(), date)
type = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'type'), pyxb.binding.datatypes.anyType, location=pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 82, 3))
Namespace.addCategoryObject('elementBinding', type.name().localName(), type)
format = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'format'), pyxb.binding.datatypes.anyType, location=pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 83, 3))
Namespace.addCategoryObject('elementBinding', format.name().localName(), format)
identifier = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'identifier'), pyxb.binding.datatypes.anyType, location=pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 84, 3))
Namespace.addCategoryObject('elementBinding', identifier.name().localName(), identifier)
source = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'source'), pyxb.binding.datatypes.anyType, location=pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 85, 3))
Namespace.addCategoryObject('elementBinding', source.name().localName(), source)
language = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'language'), pyxb.binding.datatypes.anyType, location=pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 86, 3))
Namespace.addCategoryObject('elementBinding', language.name().localName(), language)
relation = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'relation'), pyxb.binding.datatypes.anyType, location=pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 87, 3))
Namespace.addCategoryObject('elementBinding', relation.name().localName(), relation)
coverage = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'coverage'), pyxb.binding.datatypes.anyType, location=pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 88, 3))
Namespace.addCategoryObject('elementBinding', coverage.name().localName(), coverage)
rights = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'rights'), pyxb.binding.datatypes.anyType, location=pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 89, 3))
Namespace.addCategoryObject('elementBinding', rights.name().localName(), rights)
alternative = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'alternative'), pyxb.binding.datatypes.anyType, location=pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 91, 3))
Namespace.addCategoryObject('elementBinding', alternative.name().localName(), alternative)
tableOfContents = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'tableOfContents'), pyxb.binding.datatypes.anyType, location=pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 93, 3))
Namespace.addCategoryObject('elementBinding', tableOfContents.name().localName(), tableOfContents)
abstract = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'abstract'), pyxb.binding.datatypes.anyType, location=pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 94, 3))
Namespace.addCategoryObject('elementBinding', abstract.name().localName(), abstract)
created = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'created'), pyxb.binding.datatypes.anyType, location=pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 96, 3))
Namespace.addCategoryObject('elementBinding', created.name().localName(), created)
valid = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'valid'), pyxb.binding.datatypes.anyType, location=pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 97, 3))
Namespace.addCategoryObject('elementBinding', valid.name().localName(), valid)
available = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'available'), pyxb.binding.datatypes.anyType, location=pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 98, 3))
Namespace.addCategoryObject('elementBinding', available.name().localName(), available)
issued = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'issued'), pyxb.binding.datatypes.anyType, location=pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 99, 3))
Namespace.addCategoryObject('elementBinding', issued.name().localName(), issued)
modified = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'modified'), pyxb.binding.datatypes.anyType, location=pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 100, 3))
Namespace.addCategoryObject('elementBinding', modified.name().localName(), modified)
dateAccepted = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'dateAccepted'), pyxb.binding.datatypes.anyType, location=pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 101, 3))
Namespace.addCategoryObject('elementBinding', dateAccepted.name().localName(), dateAccepted)
dateCopyrighted = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'dateCopyrighted'), pyxb.binding.datatypes.anyType, location=pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 102, 3))
Namespace.addCategoryObject('elementBinding', dateCopyrighted.name().localName(), dateCopyrighted)
dateSubmitted = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'dateSubmitted'), pyxb.binding.datatypes.anyType, location=pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 103, 3))
Namespace.addCategoryObject('elementBinding', dateSubmitted.name().localName(), dateSubmitted)
extent = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'extent'), pyxb.binding.datatypes.anyType, location=pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 105, 3))
Namespace.addCategoryObject('elementBinding', extent.name().localName(), extent)
medium = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'medium'), pyxb.binding.datatypes.anyType, location=pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 106, 3))
Namespace.addCategoryObject('elementBinding', medium.name().localName(), medium)
isVersionOf = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'isVersionOf'), pyxb.binding.datatypes.anyType, location=pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 108, 3))
Namespace.addCategoryObject('elementBinding', isVersionOf.name().localName(), isVersionOf)
hasVersion = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'hasVersion'), pyxb.binding.datatypes.anyType, location=pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 109, 3))
Namespace.addCategoryObject('elementBinding', hasVersion.name().localName(), hasVersion)
isReplacedBy = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'isReplacedBy'), pyxb.binding.datatypes.anyType, location=pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 110, 3))
Namespace.addCategoryObject('elementBinding', isReplacedBy.name().localName(), isReplacedBy)
replaces = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'replaces'), pyxb.binding.datatypes.anyType, location=pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 111, 3))
Namespace.addCategoryObject('elementBinding', replaces.name().localName(), replaces)
isRequiredBy = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'isRequiredBy'), pyxb.binding.datatypes.anyType, location=pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 112, 3))
Namespace.addCategoryObject('elementBinding', isRequiredBy.name().localName(), isRequiredBy)
requires = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'requires'), pyxb.binding.datatypes.anyType, location=pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 113, 3))
Namespace.addCategoryObject('elementBinding', requires.name().localName(), requires)
isPartOf = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'isPartOf'), pyxb.binding.datatypes.anyType, location=pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 114, 3))
Namespace.addCategoryObject('elementBinding', isPartOf.name().localName(), isPartOf)
hasPart = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'hasPart'), pyxb.binding.datatypes.anyType, location=pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 115, 3))
Namespace.addCategoryObject('elementBinding', hasPart.name().localName(), hasPart)
isReferencedBy = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'isReferencedBy'), pyxb.binding.datatypes.anyType, location=pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 116, 3))
Namespace.addCategoryObject('elementBinding', isReferencedBy.name().localName(), isReferencedBy)
references = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'references'), pyxb.binding.datatypes.anyType, location=pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 117, 3))
Namespace.addCategoryObject('elementBinding', references.name().localName(), references)
isFormatOf = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'isFormatOf'), pyxb.binding.datatypes.anyType, location=pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 118, 3))
Namespace.addCategoryObject('elementBinding', isFormatOf.name().localName(), isFormatOf)
hasFormat = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'hasFormat'), pyxb.binding.datatypes.anyType, location=pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 119, 3))
Namespace.addCategoryObject('elementBinding', hasFormat.name().localName(), hasFormat)
conformsTo = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'conformsTo'), pyxb.binding.datatypes.anyType, location=pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 120, 3))
Namespace.addCategoryObject('elementBinding', conformsTo.name().localName(), conformsTo)
spatial = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'spatial'), pyxb.binding.datatypes.anyType, location=pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 122, 3))
Namespace.addCategoryObject('elementBinding', spatial.name().localName(), spatial)
temporal = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'temporal'), pyxb.binding.datatypes.anyType, location=pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 123, 3))
Namespace.addCategoryObject('elementBinding', temporal.name().localName(), temporal)
audience = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'audience'), pyxb.binding.datatypes.anyType, location=pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 125, 3))
Namespace.addCategoryObject('elementBinding', audience.name().localName(), audience)
accrualMethod = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'accrualMethod'), pyxb.binding.datatypes.anyType, location=pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 126, 3))
Namespace.addCategoryObject('elementBinding', accrualMethod.name().localName(), accrualMethod)
accrualPeriodicity = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'accrualPeriodicity'), pyxb.binding.datatypes.anyType, location=pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 127, 3))
Namespace.addCategoryObject('elementBinding', accrualPeriodicity.name().localName(), accrualPeriodicity)
accrualPolicy = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'accrualPolicy'), pyxb.binding.datatypes.anyType, location=pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 128, 3))
Namespace.addCategoryObject('elementBinding', accrualPolicy.name().localName(), accrualPolicy)
instructionalMethod = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'instructionalMethod'), pyxb.binding.datatypes.anyType, location=pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 129, 3))
Namespace.addCategoryObject('elementBinding', instructionalMethod.name().localName(), instructionalMethod)
provenance = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'provenance'), pyxb.binding.datatypes.anyType, location=pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 130, 3))
Namespace.addCategoryObject('elementBinding', provenance.name().localName(), provenance)
rightsHolder = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'rightsHolder'), pyxb.binding.datatypes.anyType, location=pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 131, 3))
Namespace.addCategoryObject('elementBinding', rightsHolder.name().localName(), rightsHolder)
mediator = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'mediator'), pyxb.binding.datatypes.anyType, location=pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 133, 3))
Namespace.addCategoryObject('elementBinding', mediator.name().localName(), mediator)
educationLevel = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'educationLevel'), pyxb.binding.datatypes.anyType, location=pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 134, 3))
Namespace.addCategoryObject('elementBinding', educationLevel.name().localName(), educationLevel)
accessRights = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'accessRights'), pyxb.binding.datatypes.anyType, location=pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 136, 3))
Namespace.addCategoryObject('elementBinding', accessRights.name().localName(), accessRights)
license = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'license'), pyxb.binding.datatypes.anyType, location=pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 137, 3))
Namespace.addCategoryObject('elementBinding', license.name().localName(), license)
bibliographicCitation = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'bibliographicCitation'), pyxb.binding.datatypes.anyType, location=pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 139, 3))
Namespace.addCategoryObject('elementBinding', bibliographicCitation.name().localName(), bibliographicCitation)
elementOrRefinementContainer._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(_Namespace_dc, 'any'), pyxb.bundles.dc.dc.SimpleLiteral, abstract=pyxb.binding.datatypes.boolean(1), scope=elementOrRefinementContainer, location=pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dc.xsd', 70, 2)))
def _BuildAutomaton ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton
del _BuildAutomaton
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 362, 4))
counters.add(cc_0)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(elementOrRefinementContainer._UseForTag(pyxb.namespace.ExpandedName(_Namespace_dc, 'any')), pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/dc/schemas/dcterms.xsd', 363, 1))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
st_0._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
elementOrRefinementContainer._Automaton = _BuildAutomaton()
title._setSubstitutionGroup(pyxb.bundles.dc.dc.title)
creator._setSubstitutionGroup(pyxb.bundles.dc.dc.creator)
subject._setSubstitutionGroup(pyxb.bundles.dc.dc.subject)
description._setSubstitutionGroup(pyxb.bundles.dc.dc.description)
publisher._setSubstitutionGroup(pyxb.bundles.dc.dc.publisher)
contributor._setSubstitutionGroup(pyxb.bundles.dc.dc.contributor)
date._setSubstitutionGroup(pyxb.bundles.dc.dc.date)
type._setSubstitutionGroup(pyxb.bundles.dc.dc.type)
format._setSubstitutionGroup(pyxb.bundles.dc.dc.format)
identifier._setSubstitutionGroup(pyxb.bundles.dc.dc.identifier)
source._setSubstitutionGroup(pyxb.bundles.dc.dc.source)
language._setSubstitutionGroup(pyxb.bundles.dc.dc.language)
relation._setSubstitutionGroup(pyxb.bundles.dc.dc.relation)
coverage._setSubstitutionGroup(pyxb.bundles.dc.dc.coverage)
rights._setSubstitutionGroup(pyxb.bundles.dc.dc.rights)
alternative._setSubstitutionGroup(title)
tableOfContents._setSubstitutionGroup(description)
abstract._setSubstitutionGroup(description)
created._setSubstitutionGroup(date)
valid._setSubstitutionGroup(date)
available._setSubstitutionGroup(date)
issued._setSubstitutionGroup(date)
modified._setSubstitutionGroup(date)
dateAccepted._setSubstitutionGroup(date)
dateCopyrighted._setSubstitutionGroup(date)
dateSubmitted._setSubstitutionGroup(date)
extent._setSubstitutionGroup(format)
medium._setSubstitutionGroup(format)
isVersionOf._setSubstitutionGroup(relation)
hasVersion._setSubstitutionGroup(relation)
isReplacedBy._setSubstitutionGroup(relation)
replaces._setSubstitutionGroup(relation)
isRequiredBy._setSubstitutionGroup(relation)
requires._setSubstitutionGroup(relation)
isPartOf._setSubstitutionGroup(relation)
hasPart._setSubstitutionGroup(relation)
isReferencedBy._setSubstitutionGroup(relation)
references._setSubstitutionGroup(relation)
isFormatOf._setSubstitutionGroup(relation)
hasFormat._setSubstitutionGroup(relation)
conformsTo._setSubstitutionGroup(relation)
spatial._setSubstitutionGroup(coverage)
temporal._setSubstitutionGroup(coverage)
audience._setSubstitutionGroup(pyxb.bundles.dc.dc.any)
accrualMethod._setSubstitutionGroup(pyxb.bundles.dc.dc.any)
accrualPeriodicity._setSubstitutionGroup(pyxb.bundles.dc.dc.any)
accrualPolicy._setSubstitutionGroup(pyxb.bundles.dc.dc.any)
instructionalMethod._setSubstitutionGroup(pyxb.bundles.dc.dc.any)
provenance._setSubstitutionGroup(pyxb.bundles.dc.dc.any)
rightsHolder._setSubstitutionGroup(pyxb.bundles.dc.dc.any)
mediator._setSubstitutionGroup(audience)
educationLevel._setSubstitutionGroup(audience)
accessRights._setSubstitutionGroup(rights)
license._setSubstitutionGroup(rights)
bibliographicCitation._setSubstitutionGroup(identifier)
| 53.880972
| 345
| 0.764795
| 7,926
| 64,280
| 6.053873
| 0.063714
| 0.053873
| 0.063668
| 0.059021
| 0.755372
| 0.731301
| 0.722297
| 0.698081
| 0.616656
| 0.613905
| 0
| 0.028053
| 0.103298
| 64,280
| 1,192
| 346
| 53.926175
| 0.804407
| 0.151587
| 0
| 0.396254
| 1
| 0.168588
| 0.214953
| 0.177191
| 0
| 0
| 0
| 0
| 0
| 1
| 0.004323
| false
| 0
| 0.020173
| 0
| 0.430836
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
fce4091cc7e0cd3f3139c23809c116b185f16330
| 33
|
py
|
Python
|
custom_components/__init__.py
|
Velines/hass_datelist_countdown
|
892fda10d8d83b39b02699a840e3a30d018c0cf6
|
[
"MIT"
] | null | null | null |
custom_components/__init__.py
|
Velines/hass_datelist_countdown
|
892fda10d8d83b39b02699a840e3a30d018c0cf6
|
[
"MIT"
] | null | null | null |
custom_components/__init__.py
|
Velines/hass_datelist_countdown
|
892fda10d8d83b39b02699a840e3a30d018c0cf6
|
[
"MIT"
] | null | null | null |
"""datelist_countdown module."""
| 16.5
| 32
| 0.727273
| 3
| 33
| 7.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.060606
| 33
| 1
| 33
| 33
| 0.741935
| 0.787879
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
fceecd3497190e29715e529b227a0087f5159d2a
| 204
|
py
|
Python
|
pandas_utility/__init__.py
|
mmphego/pandas_utility
|
edb1679bef7d0517ea37b0163cec77e68d2599af
|
[
"MIT"
] | 5
|
2019-08-13T22:07:17.000Z
|
2020-07-14T06:43:10.000Z
|
pandas_utility/__init__.py
|
mmphego/pandas_utility
|
edb1679bef7d0517ea37b0163cec77e68d2599af
|
[
"MIT"
] | null | null | null |
pandas_utility/__init__.py
|
mmphego/pandas_utility
|
edb1679bef7d0517ea37b0163cec77e68d2599af
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""Top-level package for Pandas Utility."""
__author__ = """Mpho Mphego"""
__email__ = "mpho112@gmail.com"
from pandas_utility.pandas_utility import PandasUtilities # noqa:401
| 22.666667
| 69
| 0.710784
| 25
| 204
| 5.4
| 0.84
| 0.288889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.039548
| 0.132353
| 204
| 8
| 70
| 25.5
| 0.723164
| 0.338235
| 0
| 0
| 0
| 0
| 0.21875
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
1e126ec2604ca327dc5cc99cbf66088d8862e4f1
| 90
|
py
|
Python
|
shell.py
|
truebit/ObjectPath
|
4f13d3fba79d46cfdac5974a6eed71712a4a7ac5
|
[
"MIT"
] | 327
|
2015-01-02T13:20:39.000Z
|
2022-03-28T11:30:25.000Z
|
shell.py
|
truebit/ObjectPath
|
4f13d3fba79d46cfdac5974a6eed71712a4a7ac5
|
[
"MIT"
] | 71
|
2015-02-03T08:22:58.000Z
|
2021-06-20T07:01:51.000Z
|
shell.py
|
truebit/ObjectPath
|
4f13d3fba79d46cfdac5974a6eed71712a4a7ac5
|
[
"MIT"
] | 89
|
2015-02-10T01:02:42.000Z
|
2021-08-09T07:17:33.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from objectpath import shell
shell.main()
| 12.857143
| 28
| 0.655556
| 13
| 90
| 4.538462
| 0.923077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.013158
| 0.155556
| 90
| 6
| 29
| 15
| 0.763158
| 0.466667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
1e1d86d92373331aa1328e926df8c462cedba838
| 117
|
py
|
Python
|
code/abc160_c_03.py
|
KoyanagiHitoshi/AtCoder
|
731892543769b5df15254e1f32b756190378d292
|
[
"MIT"
] | 3
|
2019-08-16T16:55:48.000Z
|
2021-04-11T10:21:40.000Z
|
code/abc160_c_03.py
|
KoyanagiHitoshi/AtCoder
|
731892543769b5df15254e1f32b756190378d292
|
[
"MIT"
] | null | null | null |
code/abc160_c_03.py
|
KoyanagiHitoshi/AtCoder
|
731892543769b5df15254e1f32b756190378d292
|
[
"MIT"
] | null | null | null |
K,N=map(int,input().split())
A=list(map(int,input().split()))
A+=[A[0]+K]
print(K-max(A[i+1]-A[i] for i in range(N)))
| 29.25
| 43
| 0.589744
| 29
| 117
| 2.37931
| 0.551724
| 0.173913
| 0.318841
| 0.463768
| 0.492754
| 0
| 0
| 0
| 0
| 0
| 0
| 0.018182
| 0.059829
| 117
| 4
| 43
| 29.25
| 0.609091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.25
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
1e5909320bb1555eb5792e3107a5c94458cb233e
| 61
|
py
|
Python
|
atcoder/other/chokudai_s001_g.py
|
knuu/competitive-programming
|
16bc68fdaedd6f96ae24310d697585ca8836ab6e
|
[
"MIT"
] | 1
|
2018-11-12T15:18:55.000Z
|
2018-11-12T15:18:55.000Z
|
atcoder/other/chokudai_s001_g.py
|
knuu/competitive-programming
|
16bc68fdaedd6f96ae24310d697585ca8836ab6e
|
[
"MIT"
] | null | null | null |
atcoder/other/chokudai_s001_g.py
|
knuu/competitive-programming
|
16bc68fdaedd6f96ae24310d697585ca8836ab6e
|
[
"MIT"
] | null | null | null |
input()
print(int(''.join(input().split())) % (10 ** 9 + 7))
| 20.333333
| 52
| 0.508197
| 9
| 61
| 3.444444
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.075472
| 0.131148
| 61
| 2
| 53
| 30.5
| 0.509434
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 4
|
1e5e0a37fab256057dd1bc0ca2807de8311fbb4b
| 186
|
py
|
Python
|
staticforms/__init__.py
|
unocongafas/staticforms
|
d16d298fcb4b18f2a8658ba40c1ee14e17dd8305
|
[
"Apache-2.0"
] | null | null | null |
staticforms/__init__.py
|
unocongafas/staticforms
|
d16d298fcb4b18f2a8658ba40c1ee14e17dd8305
|
[
"Apache-2.0"
] | null | null | null |
staticforms/__init__.py
|
unocongafas/staticforms
|
d16d298fcb4b18f2a8658ba40c1ee14e17dd8305
|
[
"Apache-2.0"
] | null | null | null |
"""Static forms."""
from starlette.applications import Starlette
from .middlewares import middleware
from .handlers import routes
app = Starlette(routes=routes, middleware=middleware)
| 23.25
| 53
| 0.806452
| 21
| 186
| 7.142857
| 0.52381
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.107527
| 186
| 7
| 54
| 26.571429
| 0.903614
| 0.069892
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
1eb9c525ff6619c20cdd3c9935ef546d390f475f
| 420
|
py
|
Python
|
src/vbr/errors.py
|
JoshuaUrrutia/python-vbr-1
|
a287031abd2a53db04631cce31e962f40705127b
|
[
"BSD-3-Clause"
] | null | null | null |
src/vbr/errors.py
|
JoshuaUrrutia/python-vbr-1
|
a287031abd2a53db04631cce31e962f40705127b
|
[
"BSD-3-Clause"
] | null | null | null |
src/vbr/errors.py
|
JoshuaUrrutia/python-vbr-1
|
a287031abd2a53db04631cce31e962f40705127b
|
[
"BSD-3-Clause"
] | null | null | null |
class GenericRecordError(Exception):
pass
class ValidationError(GenericRecordError):
pass
class TableNotSupported(GenericRecordError):
pass
class DuplicateSignature(GenericRecordError):
pass
class GenericVBRError(Exception):
pass
class ConnectionFailedError(GenericVBRError):
pass
class RecordNotFoundError(GenericVBRError):
pass
class NotUniqueError(GenericVBRError):
pass
| 13.548387
| 45
| 0.77619
| 32
| 420
| 10.1875
| 0.34375
| 0.193252
| 0.248466
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 420
| 30
| 46
| 14
| 0.931429
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 0
| 0
| 1
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 4
|
1eddfd1e6373f6c685269009ae6d057699e2c1fe
| 92
|
py
|
Python
|
PyOpdb/self_PyOpdb/tools/__init__.py
|
GodInLove/OPDB
|
d5d9c9ce5239037dcc57abba6377abbfccec32d1
|
[
"Apache-2.0"
] | 1
|
2017-09-24T15:59:31.000Z
|
2017-09-24T15:59:31.000Z
|
PyOpdb/self_PyOpdb/tools/__init__.py
|
GodInLove/OPDB
|
d5d9c9ce5239037dcc57abba6377abbfccec32d1
|
[
"Apache-2.0"
] | null | null | null |
PyOpdb/self_PyOpdb/tools/__init__.py
|
GodInLove/OPDB
|
d5d9c9ce5239037dcc57abba6377abbfccec32d1
|
[
"Apache-2.0"
] | null | null | null |
__author__ = "yd.liu"
from .fastq import srr_n_to_fastq
from .operon import operon_predict
| 18.4
| 34
| 0.804348
| 15
| 92
| 4.4
| 0.733333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.130435
| 92
| 4
| 35
| 23
| 0.825
| 0
| 0
| 0
| 0
| 0
| 0.065217
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
949a37a92fc08d4b9a657f5e899a036e9e5a1f06
| 413
|
py
|
Python
|
PyTradier/utils.py
|
zlopez101/PyTradier
|
83397cf38bd636c471993b57fb71a12885affcb7
|
[
"MIT"
] | 1
|
2021-04-30T23:59:20.000Z
|
2021-04-30T23:59:20.000Z
|
PyTradier/utils.py
|
zlopez101/PyTradier
|
83397cf38bd636c471993b57fb71a12885affcb7
|
[
"MIT"
] | 7
|
2021-05-08T00:47:59.000Z
|
2021-05-12T01:45:37.000Z
|
PyTradier/utils.py
|
zlopez101/PyTradier
|
83397cf38bd636c471993b57fb71a12885affcb7
|
[
"MIT"
] | null | null | null |
import requests
import pprint
from functools import wraps
from PyTradier.exceptions import *
def printer(response: requests.Response):
"""Helper function for testing endpoints
:param response: the response from API call
:type response: Response
"""
pprint.pprint(response.status_code)
pprint.pprint(response.headers)
pprint.pprint(response.text)
pprint.pprint(response.json())
| 22.944444
| 47
| 0.743341
| 49
| 413
| 6.244898
| 0.530612
| 0.156863
| 0.261438
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.171913
| 413
| 17
| 48
| 24.294118
| 0.894737
| 0.25908
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.111111
| false
| 0
| 0.444444
| 0
| 0.555556
| 0.666667
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
|
0
| 4
|
949cfad7e19b1c359695164abec5f858885ddf25
| 214
|
py
|
Python
|
apps/kindeditor/models.py
|
dryprojects/MyBlog
|
ec04ba2bc658e96cddeb1d4766047ca8e89ff656
|
[
"BSD-3-Clause"
] | 2
|
2021-08-17T13:29:21.000Z
|
2021-09-04T05:00:01.000Z
|
apps/kindeditor/models.py
|
dryprojects/MyBlog
|
ec04ba2bc658e96cddeb1d4766047ca8e89ff656
|
[
"BSD-3-Clause"
] | 1
|
2020-07-16T11:22:32.000Z
|
2020-07-16T11:22:32.000Z
|
apps/kindeditor/models.py
|
dryprojects/MyBlog
|
ec04ba2bc658e96cddeb1d4766047ca8e89ff656
|
[
"BSD-3-Clause"
] | 1
|
2020-09-18T10:41:59.000Z
|
2020-09-18T10:41:59.000Z
|
from django.db import models
# Create your models here.
class ImageUpload(models.Model):
imgFile = models.ImageField(verbose_name='kindeditor上传的图片', upload_to='kindeditor/images/%Y/%m', max_length=200)
| 30.571429
| 117
| 0.752336
| 28
| 214
| 5.642857
| 0.892857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.016129
| 0.130841
| 214
| 6
| 118
| 35.666667
| 0.833333
| 0.11215
| 0
| 0
| 0
| 0
| 0.208791
| 0.126374
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
94bdb9ae075cdf2432a6ba928ec85ebee311d8b9
| 163
|
py
|
Python
|
portfolio/myportfolio/admin.py
|
annaadhiambo/Portfolio
|
02befd352c64ed9198f44d87e550912401f8bca9
|
[
"MIT"
] | null | null | null |
portfolio/myportfolio/admin.py
|
annaadhiambo/Portfolio
|
02befd352c64ed9198f44d87e550912401f8bca9
|
[
"MIT"
] | 5
|
2020-06-05T20:44:18.000Z
|
2021-09-22T18:31:13.000Z
|
portfolio/myportfolio/admin.py
|
annaadhiambo/Portfolio
|
02befd352c64ed9198f44d87e550912401f8bca9
|
[
"MIT"
] | 2
|
2020-06-16T03:51:45.000Z
|
2020-07-06T14:07:42.000Z
|
from django.contrib import admin
from .models import *
# Register your models here.
admin.site.register(Project)
admin.site.site_header = 'My Portfolio Dashboard'
| 27.166667
| 49
| 0.797546
| 23
| 163
| 5.608696
| 0.652174
| 0.139535
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.116564
| 163
| 6
| 49
| 27.166667
| 0.895833
| 0.159509
| 0
| 0
| 0
| 0
| 0.161765
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
94c27edb62d2ba453bd2d3a92a92bcaab2aeddfb
| 101
|
py
|
Python
|
holobot/discord/sdk/models/message.py
|
rexor12/holobot
|
89b7b416403d13ccfeee117ef942426b08d3651d
|
[
"MIT"
] | 1
|
2021-05-24T00:17:46.000Z
|
2021-05-24T00:17:46.000Z
|
holobot/discord/sdk/models/message.py
|
rexor12/holobot
|
89b7b416403d13ccfeee117ef942426b08d3651d
|
[
"MIT"
] | 41
|
2021-03-24T22:50:09.000Z
|
2021-12-17T12:15:13.000Z
|
holobot/discord/sdk/models/message.py
|
rexor12/holobot
|
89b7b416403d13ccfeee117ef942426b08d3651d
|
[
"MIT"
] | null | null | null |
from dataclasses import dataclass
@dataclass
class Message:
channel_id: str
message_id: str
| 14.428571
| 33
| 0.762376
| 13
| 101
| 5.769231
| 0.692308
| 0.133333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.19802
| 101
| 6
| 34
| 16.833333
| 0.925926
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.2
| 0
| 0.8
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
|
0
| 4
|
94eb02a33e45ee6db4fc431874a190b615b235a7
| 3,843
|
py
|
Python
|
vkontakte_api/api.py
|
mcfoton/django-vkontakte-api
|
14f1be79452e448e1b2b53fd89ae577131e35898
|
[
"BSD-3-Clause"
] | null | null | null |
vkontakte_api/api.py
|
mcfoton/django-vkontakte-api
|
14f1be79452e448e1b2b53fd89ae577131e35898
|
[
"BSD-3-Clause"
] | null | null | null |
vkontakte_api/api.py
|
mcfoton/django-vkontakte-api
|
14f1be79452e448e1b2b53fd89ae577131e35898
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
from django.conf import settings
from oauth_tokens.api import ApiAbstractBase, Singleton
from oauth_tokens.models import AccessToken
from vkontakte import VKError as VkontakteError, API
__all__ = ['api_call', 'VkontakteError']
class VkontakteApi(ApiAbstractBase):
__metaclass__ = Singleton
provider = 'vkontakte'
error_class = VkontakteError
request_timeout = getattr(settings, 'VKONTAKTE_API_REQUEST_TIMEOUT', 1)
def get_consistent_token(self):
return getattr(settings, 'VKONTAKTE_API_ACCESS_TOKEN', None)
def get_tokens(self, **kwargs):
return AccessToken.objects.filter_active_tokens_of_provider(self.provider, **kwargs)
def get_api(self, **kwargs):
return API(token=self.get_token(**kwargs))
def get_api_response(self, *args, **kwargs):
return self.api.get(self.method, timeout=self.request_timeout, *args, **kwargs)
def handle_error_code_5(self, e, *args, **kwargs):
self.logger.info("Updating vkontakte access token, recursion count: %d" % self.recursion_count)
self.update_tokens()
return self.repeat_call(*args, **kwargs)
def handle_error_code_6(self, e, *args, **kwargs):
# try access_token by another user
self.logger.info(
"Vkontakte error 'Too many requests per second' on method: %s, recursion count: %d" % (self.method, self.recursion_count))
self.used_access_tokens += [self.api.token]
return self.repeat_call(*args, **kwargs)
def handle_error_code_9(self, e, *args, **kwargs):
self.logger.warning("Vkontakte flood control registered while executing method %s with params %s, \
recursion count: %d" % (self.method, kwargs, self.recursion_count))
self.used_access_tokens += [self.api.token]
return self.sleep_repeat_call(*args, **kwargs)
def handle_error_code_10(self, e, *args, **kwargs):
self.logger.warning("Internal server error: Database problems, try later. Error registered while executing \
method %s with params %s, recursion count: %d" % (self.method, kwargs, self.recursion_count))
return self.sleep_repeat_call(*args, **kwargs)
def handle_error_code_17(self, e, *args, **kwargs):
# Validation required: please open redirect_uri in browser
# TODO: cover with tests
self.logger.warning("Request error: %s. Error registered while executing \
method %s with params %s, recursion count: %d" % (e, self.method, kwargs, self.recursion_count))
user = AccessToken.objects.get(access_token=self.api.token).user_credentials
auth_request = AccessToken.objects.get_token_for_user('vkontakte', user).auth_request
auth_request.form_action_domain = 'https://m.vk.com'
response = auth_request.session.get(e.redirect_uri)
try:
method, action, data = auth_request.get_form_data_from_content(response.content)
except:
raise Exception("There is no any form in response: %s" % response.content)
data = {'code': auth_request.additional}
response = getattr(auth_request.session, method)(url=action, headers=auth_request.headers, data=data)
if 'success' not in response.url:
raise Exception("Wrong response. Can not handle VK error 17. response: %s" % response.content)
return self.sleep_repeat_call(*args, **kwargs)
def handle_error_code_500(self, e, *args, **kwargs):
# strange HTTP error appears sometimes
return self.sleep_repeat_call(*args, **kwargs)
def handle_error_code_501(self, e, *args, **kwargs):
# strange HTTP error appears sometimes
return self.sleep_repeat_call(*args, **kwargs)
def api_call(*args, **kwargs):
api = VkontakteApi()
return api.call(*args, **kwargs)
| 43.670455
| 134
| 0.691127
| 493
| 3,843
| 5.190669
| 0.269777
| 0.07034
| 0.049238
| 0.051973
| 0.370457
| 0.370457
| 0.326299
| 0.30129
| 0.30129
| 0.30129
| 0
| 0.005516
| 0.198022
| 3,843
| 87
| 135
| 44.172414
| 0.824789
| 0.054124
| 0
| 0.152542
| 0
| 0
| 0.095645
| 0.01516
| 0
| 0
| 0
| 0.011494
| 0
| 1
| 0.20339
| false
| 0
| 0.067797
| 0.101695
| 0.559322
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 4
|
a203b565deba6165f07cc6bcf4f7fb4721ab0c7a
| 85
|
py
|
Python
|
outlier_project/envs/__init__.py
|
bmmlab/Distributional-RL-Tail-Risk
|
cb53bf473810e7a63a5e0602af67ff2aa67637e5
|
[
"MIT"
] | null | null | null |
outlier_project/envs/__init__.py
|
bmmlab/Distributional-RL-Tail-Risk
|
cb53bf473810e7a63a5e0602af67ff2aa67637e5
|
[
"MIT"
] | null | null | null |
outlier_project/envs/__init__.py
|
bmmlab/Distributional-RL-Tail-Risk
|
cb53bf473810e7a63a5e0602af67ff2aa67637e5
|
[
"MIT"
] | 1
|
2021-11-06T09:37:59.000Z
|
2021-11-06T09:37:59.000Z
|
# -*- coding:utf-8 -*-
from outlier_project.envs.leptokurtosis import LeptokurticEnv
| 28.333333
| 61
| 0.776471
| 10
| 85
| 6.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.012987
| 0.094118
| 85
| 2
| 62
| 42.5
| 0.831169
| 0.235294
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
bfab2be98882cd049ab7e24d77adfad40074f10f
| 62
|
py
|
Python
|
code/abc130_a_03.py
|
KoyanagiHitoshi/AtCoder
|
731892543769b5df15254e1f32b756190378d292
|
[
"MIT"
] | 3
|
2019-08-16T16:55:48.000Z
|
2021-04-11T10:21:40.000Z
|
code/abc130_a_03.py
|
KoyanagiHitoshi/AtCoder
|
731892543769b5df15254e1f32b756190378d292
|
[
"MIT"
] | null | null | null |
code/abc130_a_03.py
|
KoyanagiHitoshi/AtCoder
|
731892543769b5df15254e1f32b756190378d292
|
[
"MIT"
] | null | null | null |
X, A = map(int, input().split())
print("0" if X < A else "10")
| 31
| 32
| 0.548387
| 13
| 62
| 2.615385
| 0.846154
| 0.117647
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.058824
| 0.177419
| 62
| 2
| 33
| 31
| 0.607843
| 0
| 0
| 0
| 0
| 0
| 0.047619
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 4
|
bfc7559d64d1a2e8f79300840ba156f64d583011
| 81
|
py
|
Python
|
modules/2.79/bpy/types/CompositorNodeFlip.py
|
cmbasnett/fake-bpy-module
|
acb8b0f102751a9563e5b5e5c7cd69a4e8aa2a55
|
[
"MIT"
] | null | null | null |
modules/2.79/bpy/types/CompositorNodeFlip.py
|
cmbasnett/fake-bpy-module
|
acb8b0f102751a9563e5b5e5c7cd69a4e8aa2a55
|
[
"MIT"
] | null | null | null |
modules/2.79/bpy/types/CompositorNodeFlip.py
|
cmbasnett/fake-bpy-module
|
acb8b0f102751a9563e5b5e5c7cd69a4e8aa2a55
|
[
"MIT"
] | null | null | null |
class CompositorNodeFlip:
axis = None
def update(self):
pass
| 9
| 25
| 0.592593
| 8
| 81
| 6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.345679
| 81
| 8
| 26
| 10.125
| 0.90566
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0.25
| 0
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 4
|
44d2bac4226a1cac28e625cd3a7c8d9e154ce538
| 1,154
|
py
|
Python
|
testing_school/math_app/forms.py
|
vv31415926/training_school
|
4839a6bd05b1fc3616a92bd879c80dae58e02aa4
|
[
"MIT"
] | null | null | null |
testing_school/math_app/forms.py
|
vv31415926/training_school
|
4839a6bd05b1fc3616a92bd879c80dae58e02aa4
|
[
"MIT"
] | null | null | null |
testing_school/math_app/forms.py
|
vv31415926/training_school
|
4839a6bd05b1fc3616a92bd879c80dae58e02aa4
|
[
"MIT"
] | null | null | null |
from django import forms
from .models import *
class TaskForm( forms.ModelForm ):
#test = forms.BooleanField()
#question = forms.CharField( widget=forms.Textarea(attrs={'cols': 60, 'rows': 3}))
class Meta:
model=Task
fields=['numtask', 'variant', 'question', 'comment', 'numclass','level','theme','img']
widgets = { 'question': forms.Textarea(attrs={'cols': 35, 'rows':5}) }
class NewTaskForm( forms.ModelForm ):
class Meta:
model=Task
fields=['numtask', 'variant', 'question', 'comment', 'numclass','level','theme','img']
widgets = { 'question': forms.Textarea(attrs={'cols': 35, 'rows':5}) }
class NewVersionForm( forms.ModelForm ):
class Meta:
model=Version
fields=['answer', 'correct']
#fields = ['npp', 'answer', 'correct', 'task']
#widgets = {'task': forms.Textarea(attrs={'cols': 35, 'rows': 5})}
class VersionForm( forms.ModelForm ):
class Meta:
model=Version
fields=['answer', 'correct' ]
class TaskUsersForm( forms.Form ):
question = forms.CharField( widget=forms.Textarea(attrs={'cols': 60, 'rows': 3}) )
| 34.969697
| 94
| 0.607452
| 124
| 1,154
| 5.653226
| 0.33871
| 0.092725
| 0.128388
| 0.156919
| 0.736091
| 0.71612
| 0.71612
| 0.71612
| 0.667618
| 0.513552
| 0
| 0.016411
| 0.207972
| 1,154
| 32
| 95
| 36.0625
| 0.750547
| 0.188908
| 0
| 0.636364
| 0
| 0
| 0.178112
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.090909
| 0
| 0.545455
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 4
|
44dc3aa81669f6ee82ce7c263f2faede5348f903
| 92
|
py
|
Python
|
2015/01/sp/graphic_config.py
|
nprapps/graphics-archive
|
97b0ef326b46a959df930f5522d325e537f7a655
|
[
"FSFAP"
] | 14
|
2015-05-08T13:41:51.000Z
|
2021-02-24T12:34:55.000Z
|
graphic_templates/table/graphic_config.py
|
tophtucker/dailygraphics
|
abc8fa7fb0e4d15800bb3edcf2c864fe98f40197
|
[
"MIT"
] | null | null | null |
graphic_templates/table/graphic_config.py
|
tophtucker/dailygraphics
|
abc8fa7fb0e4d15800bb3edcf2c864fe98f40197
|
[
"MIT"
] | 7
|
2015-04-04T04:45:54.000Z
|
2021-02-18T11:12:48.000Z
|
#!/usr/bin/env python
COPY_GOOGLE_DOC_KEY = '1ciRc--h8HuBpQzMebVygC4x_y9dvKxp6OA45ccRrIX4'
| 23
| 68
| 0.826087
| 11
| 92
| 6.545455
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.093023
| 0.065217
| 92
| 3
| 69
| 30.666667
| 0.744186
| 0.217391
| 0
| 0
| 0
| 0
| 0.619718
| 0.619718
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
44dc50b2d392fc02424a77fb43250d2cb16dd5f3
| 320
|
py
|
Python
|
bridge/library_v2.py
|
rlelito/DesignPatterns
|
4e59442a10c1407ed4d9cdceea790263c30223b3
|
[
"MIT"
] | null | null | null |
bridge/library_v2.py
|
rlelito/DesignPatterns
|
4e59442a10c1407ed4d9cdceea790263c30223b3
|
[
"MIT"
] | null | null | null |
bridge/library_v2.py
|
rlelito/DesignPatterns
|
4e59442a10c1407ed4d9cdceea790263c30223b3
|
[
"MIT"
] | null | null | null |
from library import Library
from gl2 import GL2
class LibraryV2(Library):
@staticmethod
def draw_line(x1: float, x2: float, y1: float, y2: float) -> None:
GL2.drawing_line(x1, x2, y1, y2)
@staticmethod
def draw_circle(x: float, y: float, r: float) -> None:
GL2.drawing_circle(x, y, r)
| 24.615385
| 70
| 0.65
| 48
| 320
| 4.25
| 0.4375
| 0.147059
| 0.186275
| 0.186275
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.052846
| 0.23125
| 320
| 12
| 71
| 26.666667
| 0.776423
| 0
| 0
| 0.222222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.222222
| false
| 0
| 0.222222
| 0
| 0.555556
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 4
|
44ed9b2e1614232c3586ba073cb70ee7b341d76d
| 474
|
py
|
Python
|
2059.py
|
heltonricardo/URI
|
160cca22d94aa667177c9ebf2a1c9864c5e55b41
|
[
"MIT"
] | 6
|
2021-04-13T00:33:43.000Z
|
2022-02-10T10:23:59.000Z
|
2059.py
|
heltonricardo/URI
|
160cca22d94aa667177c9ebf2a1c9864c5e55b41
|
[
"MIT"
] | null | null | null |
2059.py
|
heltonricardo/URI
|
160cca22d94aa667177c9ebf2a1c9864c5e55b41
|
[
"MIT"
] | 3
|
2021-03-23T18:42:24.000Z
|
2022-02-10T10:24:07.000Z
|
entrada = str(input()).split(' ')
p = int(entrada[0])
j1 = int(entrada[1])
j2 = int(entrada[2])
r = int(entrada[3])
a = int(entrada[4])
if r == 0 and a == 0:
if (j1 + j2) % 2 == 0 and p == 1: print('Jogador 1 ganha!')
elif (j1 + j2) % 2 != 0 and p == 0: print('Jogador 1 ganha!')
else: print('Jogador 2 ganha!')
elif r == 1 and a == 0: print('Jogador 1 ganha!')
elif r == 0 and a == 1: print('Jogador 1 ganha!')
elif r == 1 and a == 1: print('Jogador 2 ganha!')
| 33.857143
| 65
| 0.556962
| 89
| 474
| 2.966292
| 0.247191
| 0.272727
| 0.19697
| 0.272727
| 0.556818
| 0.416667
| 0
| 0
| 0
| 0
| 0
| 0.084239
| 0.223629
| 474
| 13
| 66
| 36.461538
| 0.633152
| 0
| 0
| 0
| 0
| 0
| 0.204641
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.461538
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 4
|
785a7d92375ec8281caa7c8a056a6f6d5872fcac
| 92
|
py
|
Python
|
ptest/exception.py
|
KarlGong/ptest
|
a17e82ff64465daeadcc8b73a44650a4044901d4
|
[
"Apache-2.0"
] | 65
|
2015-05-22T12:35:35.000Z
|
2022-01-12T10:14:32.000Z
|
ptest/exception.py
|
KarlGong/ptest
|
a17e82ff64465daeadcc8b73a44650a4044901d4
|
[
"Apache-2.0"
] | 3
|
2019-07-24T23:21:08.000Z
|
2020-10-28T12:14:50.000Z
|
ptest/exception.py
|
KarlGong/ptest
|
a17e82ff64465daeadcc8b73a44650a4044901d4
|
[
"Apache-2.0"
] | 10
|
2017-06-26T06:05:03.000Z
|
2021-12-08T07:50:10.000Z
|
class PTestException(Exception):
pass
class ScreenshotError(PTestException):
pass
| 13.142857
| 38
| 0.76087
| 8
| 92
| 8.75
| 0.625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.173913
| 92
| 6
| 39
| 15.333333
| 0.921053
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 4
|
786460ac1f59a78a91fdf6a8c5a09493ffeaa066
| 844
|
py
|
Python
|
scripts/get-python-version.py
|
YuanyuanNi/azure-cli
|
63844964374858bfacd209bfe1b69eb456bd64ca
|
[
"MIT"
] | 3,287
|
2016-07-26T17:34:33.000Z
|
2022-03-31T09:52:13.000Z
|
scripts/get-python-version.py
|
YuanyuanNi/azure-cli
|
63844964374858bfacd209bfe1b69eb456bd64ca
|
[
"MIT"
] | 19,206
|
2016-07-26T07:04:42.000Z
|
2022-03-31T23:57:09.000Z
|
scripts/get-python-version.py
|
YuanyuanNi/azure-cli
|
63844964374858bfacd209bfe1b69eb456bd64ca
|
[
"MIT"
] | 2,575
|
2016-07-26T06:44:40.000Z
|
2022-03-31T22:56:06.000Z
|
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
###################################################################################################
# This script returns a short string describing the major version of python being used.
#
# While it should be versatile, it was written with the intention that it will help resolve which
# requirements.txt to pull concrete-dependencies from.
###################################################################################################
import sys
print('py{}'.format(sys.version_info[0]))
| 52.75
| 99
| 0.420616
| 69
| 844
| 5.130435
| 0.855072
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.0013
| 0.088863
| 844
| 15
| 100
| 56.266667
| 0.459038
| 0.67654
| 0
| 0
| 0
| 0
| 0.060606
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0.5
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 4
|
7872bc600d7a111056017033e8f1ce65b62f17dd
| 149
|
py
|
Python
|
inference/test/test_load.py
|
knowledgevis/wsi_infer_web
|
f5201e767a502fde6f17dd17a70aabcd388d5cc2
|
[
"Apache-2.0"
] | null | null | null |
inference/test/test_load.py
|
knowledgevis/wsi_infer_web
|
f5201e767a502fde6f17dd17a70aabcd388d5cc2
|
[
"Apache-2.0"
] | null | null | null |
inference/test/test_load.py
|
knowledgevis/wsi_infer_web
|
f5201e767a502fde6f17dd17a70aabcd388d5cc2
|
[
"Apache-2.0"
] | null | null | null |
import pytest
from girder.plugin import loadedPlugins
@pytest.mark.plugin('vxapi')
def test_import(server):
assert 'vxapi' in loadedPlugins()
| 16.555556
| 39
| 0.765101
| 19
| 149
| 5.947368
| 0.684211
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.134228
| 149
| 8
| 40
| 18.625
| 0.875969
| 0
| 0
| 0
| 0
| 0
| 0.067114
| 0
| 0
| 0
| 0
| 0
| 0.2
| 1
| 0.2
| false
| 0
| 0.6
| 0
| 0.8
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
15545f6881d16dff764312e0311b84cb85fb5dda
| 272
|
py
|
Python
|
app/classes/filters.py
|
robjporter/PYTHON-APIServer-1
|
57df8e8189834504b3f473993ae12586ec32d5c9
|
[
"MIT"
] | null | null | null |
app/classes/filters.py
|
robjporter/PYTHON-APIServer-1
|
57df8e8189834504b3f473993ae12586ec32d5c9
|
[
"MIT"
] | null | null | null |
app/classes/filters.py
|
robjporter/PYTHON-APIServer-1
|
57df8e8189834504b3f473993ae12586ec32d5c9
|
[
"MIT"
] | null | null | null |
def reverse_filter( s ):
return s[ ::-1 ]
def string_trim_upper( value ):
return value.strip().upper()
def string_trim_lower( value ):
return value.strip().lower()
def datetimeformat( value, format='%H:%M / %d-%m-%Y' ):
return value.strftime( format )
| 20.923077
| 55
| 0.647059
| 38
| 272
| 4.5
| 0.5
| 0.192982
| 0.152047
| 0.245614
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.004525
| 0.1875
| 272
| 12
| 56
| 22.666667
| 0.769231
| 0
| 0
| 0
| 0
| 0
| 0.059041
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 4
|
155dc4dff676e63bf0854d45520b57b5a8ed59e1
| 117
|
py
|
Python
|
run.py
|
avagut/mheshimiwa-api
|
b0a22be36646352e255516954655823319c7cd42
|
[
"MIT"
] | null | null | null |
run.py
|
avagut/mheshimiwa-api
|
b0a22be36646352e255516954655823319c7cd42
|
[
"MIT"
] | null | null | null |
run.py
|
avagut/mheshimiwa-api
|
b0a22be36646352e255516954655823319c7cd42
|
[
"MIT"
] | null | null | null |
"""Create a master runner for the system."""
from api_files.app import app
if __name__ == "__main__":
app.run()
| 19.5
| 44
| 0.683761
| 18
| 117
| 3.944444
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.179487
| 117
| 6
| 45
| 19.5
| 0.739583
| 0.324786
| 0
| 0
| 0
| 0
| 0.108108
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
156aec56355b0ba336c7468f9709b543c2bf80d6
| 233
|
py
|
Python
|
problems/extract_the_domain_name_from_a_url.py
|
stereoabuse/codewars
|
d6437afaef38c3601903891b8b9cb0f84c108c54
|
[
"MIT"
] | null | null | null |
problems/extract_the_domain_name_from_a_url.py
|
stereoabuse/codewars
|
d6437afaef38c3601903891b8b9cb0f84c108c54
|
[
"MIT"
] | null | null | null |
problems/extract_the_domain_name_from_a_url.py
|
stereoabuse/codewars
|
d6437afaef38c3601903891b8b9cb0f84c108c54
|
[
"MIT"
] | null | null | null |
## Extract the domain name from a URL
## 5 kyu
## https://www.codewars.com//kata/514a024011ea4fb54200004b
import re
def domain_name(url):
return re.match(r'(www\.|http[s]?://)*([a-z0-9-]*)\..*$', url, re.I).group(2)
| 25.888889
| 81
| 0.609442
| 36
| 233
| 3.916667
| 0.777778
| 0.141844
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.112821
| 0.16309
| 233
| 9
| 81
| 25.888889
| 0.610256
| 0.420601
| 0
| 0
| 0
| 0
| 0.308333
| 0.308333
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 0
| 0
|
0
| 4
|
1571985a11cfb1549bf107a161dd9cf89e45b31e
| 433
|
py
|
Python
|
botc/commands/abilities/tb/__init__.py
|
Xinverse/BOTC-Bot
|
1932c649c81a5a1eab735d7abdee0761c2853940
|
[
"MIT"
] | 1
|
2020-06-21T17:20:17.000Z
|
2020-06-21T17:20:17.000Z
|
botc/commands/abilities/tb/__init__.py
|
BlueLenz/Blood-on-the-Clocktower-Storyteller-Discord-Bot
|
1932c649c81a5a1eab735d7abdee0761c2853940
|
[
"MIT"
] | 1
|
2020-07-07T03:47:44.000Z
|
2020-07-07T03:47:44.000Z
|
botc/commands/abilities/tb/__init__.py
|
BlueLenz/Blood-on-the-Clocktower-Storyteller-Discord-Bot
|
1932c649c81a5a1eab735d7abdee0761c2853940
|
[
"MIT"
] | 1
|
2022-02-18T00:42:19.000Z
|
2022-02-18T00:42:19.000Z
|
from .kill import Kill
from .learn import Learn
from .poison import Poison
from .protect import Protect
from .read import Read
from .serve import Serve
from .slay import Slay
def setup(client):
client.add_cog(Kill(client))
client.add_cog(Learn(client))
client.add_cog(Poison(client))
client.add_cog(Protect(client))
client.add_cog(Read(client))
client.add_cog(Serve(client))
client.add_cog(Slay(client))
| 25.470588
| 35
| 0.743649
| 66
| 433
| 4.772727
| 0.212121
| 0.266667
| 0.333333
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.152425
| 433
| 16
| 36
| 27.0625
| 0.858311
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.066667
| false
| 0
| 0.466667
| 0
| 0.533333
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
1598de54b414b8cfe48948e709b732f0bd8e1fb7
| 141
|
py
|
Python
|
front-end/python/dictionary.py
|
chuducthang77/Summer_code
|
48e0b507c8b4eebcb1eb641b11db8c0eced46c7d
|
[
"MIT"
] | 1
|
2021-05-17T11:50:20.000Z
|
2021-05-17T11:50:20.000Z
|
front-end/python/dictionary.py
|
chuducthang77/Summer_code
|
48e0b507c8b4eebcb1eb641b11db8c0eced46c7d
|
[
"MIT"
] | null | null | null |
front-end/python/dictionary.py
|
chuducthang77/Summer_code
|
48e0b507c8b4eebcb1eb641b11db8c0eced46c7d
|
[
"MIT"
] | null | null | null |
dictionary = {
"CSS": "101",
"Python": ["101", "201", "301"]
}
print(dictionary.get("CSS", None))
print(dictionary.get("HTML", None))
| 23.5
| 35
| 0.58156
| 17
| 141
| 4.823529
| 0.588235
| 0.365854
| 0.439024
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1
| 0.148936
| 141
| 6
| 36
| 23.5
| 0.583333
| 0
| 0
| 0
| 0
| 0
| 0.197183
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
15a6a7d8bcabeff9f632073ce95646d59e1471da
| 130
|
py
|
Python
|
sort/abstract_sort.py
|
vasili-byl/algorithms
|
4e37609ab9b724e140cfec4b01495a0952d28724
|
[
"MIT"
] | 1
|
2020-05-02T13:40:10.000Z
|
2020-05-02T13:40:10.000Z
|
sort/abstract_sort.py
|
vasili-byl/algorithms
|
4e37609ab9b724e140cfec4b01495a0952d28724
|
[
"MIT"
] | null | null | null |
sort/abstract_sort.py
|
vasili-byl/algorithms
|
4e37609ab9b724e140cfec4b01495a0952d28724
|
[
"MIT"
] | null | null | null |
class Sort(object):
def __call__(self, array, left_bound, right_bound):
raise RuntimeError("Do not implemented yet!")
| 32.5
| 55
| 0.707692
| 17
| 130
| 5.058824
| 0.941176
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.184615
| 130
| 3
| 56
| 43.333333
| 0.811321
| 0
| 0
| 0
| 0
| 0
| 0.176923
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 4
|
ec594e54cffa4a807267a88cf9a2a82205f8294d
| 160
|
py
|
Python
|
tests_here/test_open_browser.py
|
workflowsconnectortest/test_add_user_2_repo
|
f72ab43ad309f34347afdc4d93b3f93afe5f1737
|
[
"Apache-2.0"
] | null | null | null |
tests_here/test_open_browser.py
|
workflowsconnectortest/test_add_user_2_repo
|
f72ab43ad309f34347afdc4d93b3f93afe5f1737
|
[
"Apache-2.0"
] | 33
|
2020-11-21T14:04:27.000Z
|
2021-04-12T16:46:23.000Z
|
tests_here/test_open_browser.py
|
workflowsconnectortest/test_add_user_2_repo
|
f72ab43ad309f34347afdc4d93b3f93afe5f1737
|
[
"Apache-2.0"
] | null | null | null |
""" TEST """
import pytest
@pytest.mark.test
def test_open_browser(driver_session):
"""
TEST
:param driver_session:
:return:
"""
pass
| 12.307692
| 38
| 0.60625
| 18
| 160
| 5.166667
| 0.666667
| 0.27957
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.25625
| 160
| 12
| 39
| 13.333333
| 0.781513
| 0.25625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0.25
| 0.25
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 4
|
ec5b114aa276d4fe5d81757d74aa88f55e87c536
| 35
|
py
|
Python
|
python-packages/order_utils/test/__init__.py
|
bryan-liu-nova/ZRXFork
|
f29578ea7e37bf3b6cea9180df7452dbdb6fd788
|
[
"Apache-2.0"
] | 1,075
|
2018-03-04T13:18:52.000Z
|
2022-03-29T06:33:59.000Z
|
python-packages/order_utils/test/__init__.py
|
bryan-liu-nova/ZRXFork
|
f29578ea7e37bf3b6cea9180df7452dbdb6fd788
|
[
"Apache-2.0"
] | 1,873
|
2018-03-03T14:37:53.000Z
|
2021-06-26T03:02:12.000Z
|
python-packages/order_utils/test/__init__.py
|
bryan-liu-nova/ZRXFork
|
f29578ea7e37bf3b6cea9180df7452dbdb6fd788
|
[
"Apache-2.0"
] | 500
|
2018-03-03T20:39:43.000Z
|
2022-03-21T21:01:55.000Z
|
"""Tests of zero_x.order_utils."""
| 17.5
| 34
| 0.685714
| 6
| 35
| 3.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.085714
| 35
| 1
| 35
| 35
| 0.6875
| 0.8
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
01a0085fa56bd63c42d92735e96ec1cd1e61e998
| 113
|
py
|
Python
|
sksurgerybard/__init__.py
|
SciKit-Surgery/scikit-surgerybard
|
4ac2ea28acb150437361c9abd53db3e3bba6d803
|
[
"BSD-3-Clause"
] | 1
|
2021-06-30T15:55:21.000Z
|
2021-06-30T15:55:21.000Z
|
sksurgerybard/__init__.py
|
UCL/scikit-surgerybard
|
7ebe4d15d3d3fa67218424c9f737a9e8d93bfbf3
|
[
"BSD-3-Clause"
] | 68
|
2020-04-30T07:29:33.000Z
|
2022-01-20T09:47:54.000Z
|
sksurgerybard/__init__.py
|
SciKit-Surgery/scikit-surgerybard
|
4ac2ea28acb150437361c9abd53db3e3bba6d803
|
[
"BSD-3-Clause"
] | 1
|
2021-06-30T15:55:48.000Z
|
2021-06-30T15:55:48.000Z
|
# coding=utf-8
"""scikit-surgerybard"""
from . import _version
__version__ = _version.get_versions()['version']
| 18.833333
| 48
| 0.734513
| 13
| 113
| 5.846154
| 0.769231
| 0.368421
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.009804
| 0.097345
| 113
| 5
| 49
| 22.6
| 0.735294
| 0.283186
| 0
| 0
| 0
| 0
| 0.093333
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
01a3f9b3ca481b3b5647bf3654dc83a547551fa3
| 205
|
py
|
Python
|
Company_Based_Questions/Goldman Sachs/Check If two arrays with same length are equal.py
|
Satyam-Bhalla/Competitive-Coding
|
5814f5f60572f1e76495efe751b94bf4d2845198
|
[
"MIT"
] | 1
|
2021-12-09T10:36:48.000Z
|
2021-12-09T10:36:48.000Z
|
Company_Based_Questions/Goldman Sachs/Check If two arrays with same length are equal.py
|
Satyam-Bhalla/Competitive-Coding
|
5814f5f60572f1e76495efe751b94bf4d2845198
|
[
"MIT"
] | null | null | null |
Company_Based_Questions/Goldman Sachs/Check If two arrays with same length are equal.py
|
Satyam-Bhalla/Competitive-Coding
|
5814f5f60572f1e76495efe751b94bf4d2845198
|
[
"MIT"
] | null | null | null |
t = int(input())
for _ in range(t):
n = int(input())
l1 = set(list(map(int,input().split())))
l2 = set(list(map(int,input().split())))
if l1==l2:
print(1)
else:
print(0)
| 22.777778
| 44
| 0.497561
| 32
| 205
| 3.15625
| 0.5625
| 0.316832
| 0.19802
| 0.257426
| 0.455446
| 0.455446
| 0
| 0
| 0
| 0
| 0
| 0.040268
| 0.273171
| 205
| 9
| 45
| 22.777778
| 0.637584
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.222222
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
01a805d2061ecb1ff063d3a34d0252b34d06a9e6
| 114
|
py
|
Python
|
src/qdmdealer/dataset.py
|
CaoRX/sdd
|
4aa6612a195f579d6bef26a3e5cd9f1ce2c66e7c
|
[
"MIT"
] | null | null | null |
src/qdmdealer/dataset.py
|
CaoRX/sdd
|
4aa6612a195f579d6bef26a3e5cd9f1ce2c66e7c
|
[
"MIT"
] | null | null | null |
src/qdmdealer/dataset.py
|
CaoRX/sdd
|
4aa6612a195f579d6bef26a3e5cd9f1ce2c66e7c
|
[
"MIT"
] | null | null | null |
import qdmdealer.settingLoader as settingLoader
if __name__ == '__main__':
print(settingLoader.loadSetting())
| 28.5
| 47
| 0.789474
| 11
| 114
| 7.454545
| 0.818182
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.114035
| 114
| 4
| 48
| 28.5
| 0.811881
| 0
| 0
| 0
| 0
| 0
| 0.069565
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0.333333
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
01e205f1f3ae63b7f86c90a692e2672bd4818afd
| 168
|
py
|
Python
|
python/pr3.py
|
Surya-06/My-Solutions
|
58f7c7f1a0a3a28f25eff22d03a09cb2fbcf7806
|
[
"MIT"
] | null | null | null |
python/pr3.py
|
Surya-06/My-Solutions
|
58f7c7f1a0a3a28f25eff22d03a09cb2fbcf7806
|
[
"MIT"
] | null | null | null |
python/pr3.py
|
Surya-06/My-Solutions
|
58f7c7f1a0a3a28f25eff22d03a09cb2fbcf7806
|
[
"MIT"
] | null | null | null |
import mechanize
from bs4 import BeautifulSoup
br = mechanize.Browser()
response = br.open("https://en.wikipedia.org/wiki/Simha_(film)")
print response.text
| 18.666667
| 65
| 0.732143
| 22
| 168
| 5.545455
| 0.818182
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006993
| 0.14881
| 168
| 8
| 66
| 21
| 0.846154
| 0
| 0
| 0
| 0
| 0
| 0.2625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.4
| null | null | 0.2
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
01f1bcd751ad568d22d189a0f876460dc8800ab4
| 339
|
py
|
Python
|
sionna/fec/__init__.py
|
NVlabs/sionna
|
488e6c3ff6ff2b3313d0ca0f94e4247b8dd6ff35
|
[
"Apache-2.0"
] | 163
|
2022-03-22T19:47:47.000Z
|
2022-03-31T23:56:45.000Z
|
sionna/fec/__init__.py
|
Maryammhsnv/sionna
|
527d0f7866b379afffad34a6bef7ed3bf6f33ad2
|
[
"Apache-2.0"
] | 2
|
2022-03-24T12:43:07.000Z
|
2022-03-29T07:17:16.000Z
|
sionna/fec/__init__.py
|
Maryammhsnv/sionna
|
527d0f7866b379afffad34a6bef7ed3bf6f33ad2
|
[
"Apache-2.0"
] | 19
|
2022-03-23T02:31:22.000Z
|
2022-03-30T06:35:12.000Z
|
#
# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
# SPDX-License-Identifier: Apache-2.0
#
"""FEC sub-package of the Sionna library"""
from . import ldpc
from . import polar
from . import conv
from . import crc
from . import scrambling
from . import interleaving
from . import utils
| 24.214286
| 103
| 0.755162
| 46
| 339
| 5.565217
| 0.717391
| 0.273438
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.034843
| 0.153392
| 339
| 13
| 104
| 26.076923
| 0.857143
| 0.519174
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
bf096468668eb0ed6554f281ce1ce49d63e79f21
| 2,803
|
py
|
Python
|
magni/imaging/dictionaries/_visualisations.py
|
SIP-AAU/Magni
|
6328dc98a273506f433af52e6bd394754a844550
|
[
"BSD-2-Clause"
] | 42
|
2015-02-09T10:17:26.000Z
|
2021-12-21T09:38:04.000Z
|
magni/imaging/dictionaries/_visualisations.py
|
SIP-AAU/Magni
|
6328dc98a273506f433af52e6bd394754a844550
|
[
"BSD-2-Clause"
] | 3
|
2015-03-20T12:00:40.000Z
|
2015-03-20T12:01:16.000Z
|
magni/imaging/dictionaries/_visualisations.py
|
SIP-AAU/Magni
|
6328dc98a273506f433af52e6bd394754a844550
|
[
"BSD-2-Clause"
] | 14
|
2015-04-28T03:08:32.000Z
|
2021-07-24T13:29:24.000Z
|
"""
..
Copyright (c) 2014-2017, Magni developers.
All rights reserved.
See LICENSE.rst for further information.
Module providing functionality for visualising dictionary coefficients.
Routine listings
----------------
visualise_DCT(shape)
Function for visualising DCT coefficients.
visualise_DFT(shape)
Function for visualising DFT coefficients.
"""
from __future__ import division
import numpy as np
from magni.imaging import vec2mat as _vec2mat
def visualise_DCT(shape):
"""
Return utilities for visualising DCT coefficients.
A handle to a function to transform the coefficients into a 'displayable'
format is returned along with a tuple of ranges of the axes in the 2D
coefficient plane.
Parameters
----------
shape : tuple
The shape of the 2D DCT being visualised.
Returns
-------
display_coefficients : Function
The function used to transform coefficients into a 'displayable'
format.
axes_extent : tuple
The ranges of the axes in the 2D coefficient plane.
Notes
-----
The display_coefficients function takes log10 to the absolute value of the
transform cofficient vector given to it as an argument. The returned
displayable coefficients is a matrix.
The axes_extent consists of (abcissa_min, abcissa_max, ordinate_min,
ordinate_max).
"""
h, w = shape
def display_coefficients(x):
return np.log10(np.abs(_vec2mat(x, (h, w))))
axes_extent = (0, w - 1, h - 1, 0)
return display_coefficients, axes_extent
def visualise_DFT(shape):
"""
Return utilities for visualising DFT coefficients.
A handle to a function to transform the coefficients into a 'displayable'
format is returned along with a tuple of ranges of the axes in the 2D
coefficient plane.
Parameters
----------
shape : tuple
The shape of the 2D DFT being visualised.
Returns
-------
display_coefficients : Function
The function used to transform coefficients into a 'displayable'
format.
axes_extent : tuple
The ranges of the axes in the 2D coefficient plane.
Notes
-----
The display_coefficients function takes log10 to the absolute value of the
transform cofficient vector given to it as an argument. The returned
displayable coefficients is a matrix that is flipped up/down and
fftshifted.
The axes_extent consists of (abcissa_min, abcissa_max, ordinate_min,
ordinate_max).
"""
h, w = shape
def display_coefficients(x):
return np.flipud(
np.fft.fftshift(np.log10(np.abs(_vec2mat(x, (h, w))))))
axes_extent = (-(w // 2), (w - 1) // 2, -(h // 2), (h - 1) // 2)
return display_coefficients, axes_extent
| 25.715596
| 78
| 0.677132
| 367
| 2,803
| 5.076294
| 0.27248
| 0.021471
| 0.0365
| 0.060118
| 0.754697
| 0.680623
| 0.680623
| 0.680623
| 0.680623
| 0.680623
| 0
| 0.016997
| 0.244381
| 2,803
| 108
| 79
| 25.953704
| 0.862606
| 0.702818
| 0
| 0.375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.1875
| 0.125
| 0.6875
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 4
|
1726192b846dc0fe37fbbd26f7947d5c82bed2e8
| 85
|
py
|
Python
|
eight/three_fred.py
|
frrad/eopi
|
ff5d1c40c721edd16480a98e07fb36f47f2416bf
|
[
"MIT"
] | null | null | null |
eight/three_fred.py
|
frrad/eopi
|
ff5d1c40c721edd16480a98e07fb36f47f2416bf
|
[
"MIT"
] | 7
|
2018-06-04T16:28:49.000Z
|
2018-07-09T01:35:24.000Z
|
eight/three_fred.py
|
frrad/eopi
|
ff5d1c40c721edd16480a98e07fb36f47f2416bf
|
[
"MIT"
] | null | null | null |
def ans(x):
if len(x) == 2 or len(x) == 10:
return False
return True
| 17
| 35
| 0.505882
| 15
| 85
| 2.866667
| 0.733333
| 0.186047
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.054545
| 0.352941
| 85
| 4
| 36
| 21.25
| 0.727273
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 4
|
173d07d75692e2a3fd44ed162047266be76ba02f
| 44
|
py
|
Python
|
Solutions/problem 10718/python.py
|
Egnima/baekjoon_solution
|
d1da3c70bcfa34a93e049cc2a40fbfe394e7d4c3
|
[
"MIT"
] | null | null | null |
Solutions/problem 10718/python.py
|
Egnima/baekjoon_solution
|
d1da3c70bcfa34a93e049cc2a40fbfe394e7d4c3
|
[
"MIT"
] | null | null | null |
Solutions/problem 10718/python.py
|
Egnima/baekjoon_solution
|
d1da3c70bcfa34a93e049cc2a40fbfe394e7d4c3
|
[
"MIT"
] | null | null | null |
for i in range(0, 2):
print('강한친구 대한육군')
| 22
| 22
| 0.590909
| 9
| 44
| 2.888889
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.058824
| 0.227273
| 44
| 2
| 22
| 22
| 0.705882
| 0
| 0
| 0
| 0
| 0
| 0.2
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.5
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 4
|
1763ea5d77a020976c710a23f2b98ceaa4147cd5
| 1,101
|
py
|
Python
|
thinkific/bundles.py
|
OmniPro-Group/thinkific-python
|
7aeb4b86ad5357f6a078c270416d68b311d107b6
|
[
"MIT"
] | 2
|
2019-12-30T13:47:02.000Z
|
2021-07-03T07:21:37.000Z
|
thinkific/bundles.py
|
OmniPro-Group/thinkific-python
|
7aeb4b86ad5357f6a078c270416d68b311d107b6
|
[
"MIT"
] | 2
|
2021-09-08T10:22:52.000Z
|
2021-09-14T13:39:17.000Z
|
thinkific/bundles.py
|
OmniPro-Group/thinkific-python
|
7aeb4b86ad5357f6a078c270416d68b311d107b6
|
[
"MIT"
] | 3
|
2021-05-28T10:46:34.000Z
|
2022-01-26T03:42:27.000Z
|
from .client import Client
class Bundles:
def __init__(self, client):
self.__client = client
def retrieve_bundle(self, id: int):
return self.__client.request('get', '/bundles/%s' % id)
def retrieve_courses_in_bundle(self, id: int, page: int = None,
limit: int = None):
return self.__client.request('get', 'bundles/%s/courses' % id, params={
"page": page,
"limit": limit
})
def create_enrollment_in_bundle(self, bundle_id: int, values: dict):
return self.__client.request('post', '/bundles/%s/courses' % bundle_id, data=values)
def get_enrollments_in_bundle(self, bundle_id: int, page: int = None,
limit: int = None):
return self.__client.request('get', 'bundles/%s/enrollments' % bundle_id, params={
"page": page,
"limit": limit
})
def update_enrollments_in_bundle(self, bundle_id: int, values: dict):
return self.__client.request('put', 'bundles/%s/enrollments' % bundle_id, data=values)
| 36.7
| 94
| 0.59673
| 131
| 1,101
| 4.748092
| 0.236641
| 0.11254
| 0.128617
| 0.184887
| 0.662379
| 0.59164
| 0.59164
| 0.379421
| 0.379421
| 0.379421
| 0
| 0
| 0.275204
| 1,101
| 29
| 95
| 37.965517
| 0.779449
| 0
| 0
| 0.363636
| 0
| 0
| 0.114441
| 0.039964
| 0
| 0
| 0
| 0
| 0
| 1
| 0.272727
| false
| 0
| 0.045455
| 0.227273
| 0.590909
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 4
|
176b2af669424bcd81acd498dc6ee517c52f48cc
| 127
|
py
|
Python
|
Viewer/cleanupJSON.py
|
WorldViews/Spirals
|
7efe2d145298a53c9d79db73b79428efff1e0908
|
[
"MIT"
] | null | null | null |
Viewer/cleanupJSON.py
|
WorldViews/Spirals
|
7efe2d145298a53c9d79db73b79428efff1e0908
|
[
"MIT"
] | 2
|
2021-02-08T20:23:17.000Z
|
2021-04-30T20:42:00.000Z
|
Viewer/cleanupJSON.py
|
WorldViews/Spirals
|
7efe2d145298a53c9d79db73b79428efff1e0908
|
[
"MIT"
] | 2
|
2016-05-26T20:24:00.000Z
|
2019-08-01T03:11:49.000Z
|
import json
obj = json.load(file("PAL_porter_0.json"))
json.dump(obj, file("PAL_porter.json","w"), indent=4, sort_keys=True)
| 21.166667
| 69
| 0.716535
| 23
| 127
| 3.782609
| 0.652174
| 0.16092
| 0.298851
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.017241
| 0.086614
| 127
| 5
| 70
| 25.4
| 0.732759
| 0
| 0
| 0
| 0
| 0
| 0.261905
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
1772795acde0374d5d479129fde7ae536c3601ec
| 327
|
py
|
Python
|
app/test.py
|
jerry0chu/Experiment
|
18c70cf00328f8b80c9be1a324ed51c83bd16028
|
[
"MIT"
] | null | null | null |
app/test.py
|
jerry0chu/Experiment
|
18c70cf00328f8b80c9be1a324ed51c83bd16028
|
[
"MIT"
] | null | null | null |
app/test.py
|
jerry0chu/Experiment
|
18c70cf00328f8b80c9be1a324ed51c83bd16028
|
[
"MIT"
] | null | null | null |
from app.lab.handle.handleExpData import handleUploadExpData
path0 = 'D:\\CodeSpace\\PySpace\\Experiment\\app\\static\\upload\\data.xlsx'
path1='D:\\CodeSpace\\PySpace\\Experiment\\app\\static\\upload\\my.xls'
#print(path1)
handleUploadExpData(path0,1)
print('-----------------------------------')
handleUploadExpData(path1,1)
| 40.875
| 76
| 0.691131
| 36
| 327
| 6.277778
| 0.583333
| 0.212389
| 0.150442
| 0.238938
| 0.371681
| 0.371681
| 0.371681
| 0
| 0
| 0
| 0
| 0.022222
| 0.036697
| 327
| 8
| 77
| 40.875
| 0.695238
| 0.036697
| 0
| 0
| 0
| 0
| 0.520635
| 0.520635
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.166667
| 0
| 0.166667
| 0.166667
| 0
| 0
| 0
| null | 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
bded2696f6ac6099d15730ef64ca9489b378507a
| 71
|
py
|
Python
|
receparser/__init__.py
|
stagira13/receparser
|
99524468b5aa41e4e6a5ea900c9873472a06297a
|
[
"MIT"
] | 7
|
2019-11-29T04:55:18.000Z
|
2021-06-05T15:10:29.000Z
|
receparser/__init__.py
|
stagira13/receparser
|
99524468b5aa41e4e6a5ea900c9873472a06297a
|
[
"MIT"
] | 1
|
2019-11-22T05:00:34.000Z
|
2019-11-25T01:18:50.000Z
|
receparser/__init__.py
|
stagira13/receparser
|
99524468b5aa41e4e6a5ea900c9873472a06297a
|
[
"MIT"
] | null | null | null |
""" ReceParser"""
from receparser.core import (
MonthlyRece, Rece
)
| 11.833333
| 29
| 0.690141
| 7
| 71
| 7
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.169014
| 71
| 5
| 30
| 14.2
| 0.830508
| 0.140845
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
bdef69edbb8d8fc73748e174288ac8eda71b56b8
| 187
|
py
|
Python
|
brick_wall_build/tests/build_scripts/annotation_misuse_2.py
|
IndeedTokyo-BrickWall/brick-wall-build
|
b3c38f671e1913e6607285cd3e8f2b5240fd4aa7
|
[
"MIT"
] | null | null | null |
brick_wall_build/tests/build_scripts/annotation_misuse_2.py
|
IndeedTokyo-BrickWall/brick-wall-build
|
b3c38f671e1913e6607285cd3e8f2b5240fd4aa7
|
[
"MIT"
] | null | null | null |
brick_wall_build/tests/build_scripts/annotation_misuse_2.py
|
IndeedTokyo-BrickWall/brick-wall-build
|
b3c38f671e1913e6607285cd3e8f2b5240fd4aa7
|
[
"MIT"
] | null | null | null |
from brick_wall_build import task
@task()
def clean():
pass
# Should be marked as task.
def html():
pass
# References a non task.
@task(clean,html)
def android():
pass
| 12.466667
| 33
| 0.652406
| 28
| 187
| 4.285714
| 0.642857
| 0.133333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.240642
| 187
| 14
| 34
| 13.357143
| 0.84507
| 0.256684
| 0
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0.333333
| 0.111111
| 0
| 0.444444
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 4
|
bdfddcb4da4e24ee1539b0b2ab902bf1539d81cb
| 149
|
py
|
Python
|
{{cookiecutter.repo_name}}/tasks.py
|
pylabs/cookiecutter-pyramid-starter
|
d5dbf20ee2d29080eefad277f4488c6607e1a846
|
[
"MIT"
] | null | null | null |
{{cookiecutter.repo_name}}/tasks.py
|
pylabs/cookiecutter-pyramid-starter
|
d5dbf20ee2d29080eefad277f4488c6607e1a846
|
[
"MIT"
] | null | null | null |
{{cookiecutter.repo_name}}/tasks.py
|
pylabs/cookiecutter-pyramid-starter
|
d5dbf20ee2d29080eefad277f4488c6607e1a846
|
[
"MIT"
] | null | null | null |
from invoke import task
@task
def test(c):
c.run('pytest')
@task
def test_coverage(c):
c.run('pytest --cov={{ cookiecutter.repo_name }}')
| 13.545455
| 54
| 0.651007
| 23
| 149
| 4.130435
| 0.608696
| 0.147368
| 0.231579
| 0.231579
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.174497
| 149
| 10
| 55
| 14.9
| 0.772358
| 0
| 0
| 0.285714
| 0
| 0
| 0.315436
| 0.147651
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| false
| 0
| 0.142857
| 0
| 0.428571
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.