hexsha string | size int64 | ext string | lang string | max_stars_repo_path string | max_stars_repo_name string | max_stars_repo_head_hexsha string | max_stars_repo_licenses list | max_stars_count int64 | max_stars_repo_stars_event_min_datetime string | max_stars_repo_stars_event_max_datetime string | max_issues_repo_path string | max_issues_repo_name string | max_issues_repo_head_hexsha string | max_issues_repo_licenses list | max_issues_count int64 | max_issues_repo_issues_event_min_datetime string | max_issues_repo_issues_event_max_datetime string | max_forks_repo_path string | max_forks_repo_name string | max_forks_repo_head_hexsha string | max_forks_repo_licenses list | max_forks_count int64 | max_forks_repo_forks_event_min_datetime string | max_forks_repo_forks_event_max_datetime string | content string | avg_line_length float64 | max_line_length int64 | alphanum_fraction float64 | qsc_code_num_words_quality_signal int64 | qsc_code_num_chars_quality_signal float64 | qsc_code_mean_word_length_quality_signal float64 | qsc_code_frac_words_unique_quality_signal float64 | qsc_code_frac_chars_top_2grams_quality_signal float64 | qsc_code_frac_chars_top_3grams_quality_signal float64 | qsc_code_frac_chars_top_4grams_quality_signal float64 | qsc_code_frac_chars_dupe_5grams_quality_signal float64 | qsc_code_frac_chars_dupe_6grams_quality_signal float64 | qsc_code_frac_chars_dupe_7grams_quality_signal float64 | qsc_code_frac_chars_dupe_8grams_quality_signal float64 | qsc_code_frac_chars_dupe_9grams_quality_signal float64 | qsc_code_frac_chars_dupe_10grams_quality_signal float64 | qsc_code_frac_chars_replacement_symbols_quality_signal float64 | qsc_code_frac_chars_digital_quality_signal float64 | qsc_code_frac_chars_whitespace_quality_signal float64 | qsc_code_size_file_byte_quality_signal float64 | qsc_code_num_lines_quality_signal float64 | qsc_code_num_chars_line_max_quality_signal float64 | qsc_code_num_chars_line_mean_quality_signal float64 | qsc_code_frac_chars_alphabet_quality_signal float64 | qsc_code_frac_chars_comments_quality_signal float64 | qsc_code_cate_xml_start_quality_signal float64 | qsc_code_frac_lines_dupe_lines_quality_signal float64 | qsc_code_cate_autogen_quality_signal float64 | qsc_code_frac_lines_long_string_quality_signal float64 | qsc_code_frac_chars_string_length_quality_signal float64 | qsc_code_frac_chars_long_word_length_quality_signal float64 | qsc_code_frac_lines_string_concat_quality_signal float64 | qsc_code_cate_encoded_data_quality_signal float64 | qsc_code_frac_chars_hex_words_quality_signal float64 | qsc_code_frac_lines_prompt_comments_quality_signal float64 | qsc_code_frac_lines_assert_quality_signal float64 | qsc_codepython_cate_ast_quality_signal float64 | qsc_codepython_frac_lines_func_ratio_quality_signal float64 | qsc_codepython_cate_var_zero_quality_signal bool | qsc_codepython_frac_lines_pass_quality_signal float64 | qsc_codepython_frac_lines_import_quality_signal float64 | qsc_codepython_frac_lines_simplefunc_quality_signal float64 | qsc_codepython_score_lines_no_logic_quality_signal float64 | qsc_codepython_frac_lines_print_quality_signal float64 | qsc_code_num_words int64 | qsc_code_num_chars int64 | qsc_code_mean_word_length int64 | qsc_code_frac_words_unique null | qsc_code_frac_chars_top_2grams int64 | qsc_code_frac_chars_top_3grams int64 | qsc_code_frac_chars_top_4grams int64 | qsc_code_frac_chars_dupe_5grams int64 | qsc_code_frac_chars_dupe_6grams int64 | qsc_code_frac_chars_dupe_7grams int64 | qsc_code_frac_chars_dupe_8grams int64 | qsc_code_frac_chars_dupe_9grams int64 | qsc_code_frac_chars_dupe_10grams int64 | qsc_code_frac_chars_replacement_symbols int64 | qsc_code_frac_chars_digital int64 | qsc_code_frac_chars_whitespace int64 | qsc_code_size_file_byte int64 | qsc_code_num_lines int64 | qsc_code_num_chars_line_max int64 | qsc_code_num_chars_line_mean int64 | qsc_code_frac_chars_alphabet int64 | qsc_code_frac_chars_comments int64 | qsc_code_cate_xml_start int64 | qsc_code_frac_lines_dupe_lines int64 | qsc_code_cate_autogen int64 | qsc_code_frac_lines_long_string int64 | qsc_code_frac_chars_string_length int64 | qsc_code_frac_chars_long_word_length int64 | qsc_code_frac_lines_string_concat null | qsc_code_cate_encoded_data int64 | qsc_code_frac_chars_hex_words int64 | qsc_code_frac_lines_prompt_comments int64 | qsc_code_frac_lines_assert int64 | qsc_codepython_cate_ast int64 | qsc_codepython_frac_lines_func_ratio int64 | qsc_codepython_cate_var_zero int64 | qsc_codepython_frac_lines_pass int64 | qsc_codepython_frac_lines_import int64 | qsc_codepython_frac_lines_simplefunc int64 | qsc_codepython_score_lines_no_logic int64 | qsc_codepython_frac_lines_print int64 | effective string | hits int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
066369768f8c69face2d04f572a4ed8e788a0565 | 30 | py | Python | {{cookiecutter.project_slug}}/{{cookiecutter.project_root}}/api/__init__.py | justinekizhak/python-fastapi-baseline | 253f506e7b7f617065f0346f0a7d6180a7eca415 | [
"MIT"
] | null | null | null | {{cookiecutter.project_slug}}/{{cookiecutter.project_root}}/api/__init__.py | justinekizhak/python-fastapi-baseline | 253f506e7b7f617065f0346f0a7d6180a7eca415 | [
"MIT"
] | null | null | null | {{cookiecutter.project_slug}}/{{cookiecutter.project_root}}/api/__init__.py | justinekizhak/python-fastapi-baseline | 253f506e7b7f617065f0346f0a7d6180a7eca415 | [
"MIT"
] | null | null | null | from .v1 import api as api_v1
| 15 | 29 | 0.766667 | 7 | 30 | 3.142857 | 0.714286 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.083333 | 0.2 | 30 | 1 | 30 | 30 | 0.833333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
0691757b4299307cb5ed5ccd84a3e29221b0e316 | 220 | py | Python | masonite/helpers/view_helpers.py | NinjasCL-labs/masonite-core | 14053038891bd19f96303463bbe153b0c4819271 | [
"MIT"
] | null | null | null | masonite/helpers/view_helpers.py | NinjasCL-labs/masonite-core | 14053038891bd19f96303463bbe153b0c4819271 | [
"MIT"
] | null | null | null | masonite/helpers/view_helpers.py | NinjasCL-labs/masonite-core | 14053038891bd19f96303463bbe153b0c4819271 | [
"MIT"
] | null | null | null | def set_request_method(method_type):
return "<input type='hidden' name='__method' value='{0}'>".format(method_type)
def back(location):
return "<input type='hidden' name='__back' value='{0}'>".format(location)
| 31.428571 | 82 | 0.7 | 30 | 220 | 4.866667 | 0.466667 | 0.136986 | 0.205479 | 0.287671 | 0.342466 | 0 | 0 | 0 | 0 | 0 | 0 | 0.010204 | 0.109091 | 220 | 6 | 83 | 36.666667 | 0.734694 | 0 | 0 | 0 | 0 | 0 | 0.436364 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.5 | false | 0 | 0 | 0.5 | 1 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 6 |
2353d0314607faf1dbed7145d2f81b15d6294204 | 13,901 | py | Python | tests/commands.py | JakubTesarek/dbgr | fc55cee5d5a69f3fa691579bc7d2627f51cbca03 | [
"Apache-2.0"
] | 8 | 2019-05-23T19:45:46.000Z | 2021-02-08T17:21:21.000Z | tests/commands.py | JakubTesarek/dbgr | fc55cee5d5a69f3fa691579bc7d2627f51cbca03 | [
"Apache-2.0"
] | 86 | 2019-05-13T14:20:20.000Z | 2019-06-19T11:48:59.000Z | tests/commands.py | JakubTesarek/dbgr | fc55cee5d5a69f3fa691579bc7d2627f51cbca03 | [
"Apache-2.0"
] | 1 | 2021-02-08T17:21:22.000Z | 2021-02-08T17:21:22.000Z | import pytest
from argparse import Namespace
from dbgr import meta
from tests.conftest import escape_ansi, attrdict, mock_request
from dbgr import commands
from dbgr.commands import (
argument_parser, interactive_command, request_command, list_command,
environments_command, version_command, prepare_and_execute_request
)
@pytest.mark.parametrize('args', [
['-v'], ['--version']
])
def test_version_command(args):
res = argument_parser().parse_args(args)
assert isinstance(res, Namespace)
assert res.version == True
@pytest.mark.parametrize('args', [
['-v'], ['--version']
])
def test_version_command(args):
res = argument_parser().parse_args(args)
assert isinstance(res, Namespace)
assert res.version == True
@pytest.mark.parametrize('args, namespace', [
(['i'], {'use_defaults': False, 'env': 'default'}),
(['interactive'], {'use_defaults': False, 'env': 'default'}),
(['int'], {'use_defaults': False, 'env': 'default'}),
(['i', '-d'], {'use_defaults': True, 'env': 'default'}),
(['i', '--use-defaults'], {'use_defaults': True, 'env': 'default'}),
(['i', '-e', 'test'], {'use_defaults': False, 'env': 'test'}),
(['i', '--env', 'test'], {'use_defaults': False, 'env': 'test'}),
(['i', '--env', 'test', '-d'], {'use_defaults': True, 'env': 'test'})
])
def test_interactive_command(args, namespace):
res = argument_parser().parse_args(args)
assert isinstance(res, Namespace)
assert res.func == interactive_command
for key, value in namespace.items():
assert getattr(res, key) == value
@pytest.mark.parametrize('args, namespace', [
(['r', 'test'], {
'request': 'test',
'use_defaults': False,
'env': 'default',
'arguments': []
}),
(['req', 'test'], {
'request': 'test',
'use_defaults': False,
'env': 'default',
'arguments': []
}),
(['request', 'test'], {
'request': 'test',
'use_defaults': False,
'env': 'default',
'arguments': []
}),
(['request', 'test', '-d'], {
'request': 'test',
'use_defaults': True,
'env': 'default',
'arguments': []
}),
(['request', 'test', '--use-defaults'], {
'request': 'test',
'use_defaults': True,
'env': 'default',
'arguments': []
}),
(['request', 'test', '-e', 'test2'], {
'request': 'test',
'use_defaults': False,
'env': 'test2',
'arguments': []
}),
(['request', 'test', '--env', 'test2'], {
'request': 'test',
'use_defaults': False,
'env': 'test2',
'arguments': []
}),
(['request', 'test', '-a', 'x=1', '-a', 'y=2'], {
'request': 'test',
'use_defaults': False,
'env': 'default',
'arguments': ['x=1', 'y=2']
}),
(['request', 'test', '--arg', 'x=1', '--arg', 'y=2'], {
'request': 'test',
'use_defaults': False,
'env': 'default',
'arguments': ['x=1', 'y=2']
}),
(['request', 'test', '--use-defaults', '-a', 'x=1', '-a', 'y=2'], {
'request': 'test',
'use_defaults': True,
'env': 'default',
'arguments': ['x=1', 'y=2']
}),
(['request', 'test', '-e', 'test2', '-a', 'x=1', '-a', 'y=2'], {
'request': 'test',
'use_defaults': False,
'env': 'test2',
'arguments': ['x=1', 'y=2']
})
])
def test_request_command(args, namespace):
res = argument_parser().parse_args(args)
assert isinstance(res, Namespace)
assert res.func == request_command
for key, value in namespace.items():
assert getattr(res, key) == value
@pytest.mark.parametrize('args, namespace', [
(['l'], {'module': None}),
(['list'], {'module': None}),
(['list', 'module'], {'module': 'module'}),
(['list', 'module:request'], {'module': 'module:request'}),
(['list', ':request'], {'module': ':request'})
])
def test_list_command(args, namespace):
res = argument_parser().parse_args(args)
assert isinstance(res, Namespace)
assert res.func == list_command
for key, value in namespace.items():
assert getattr(res, key) == value
@pytest.mark.parametrize('args', [
['e'], ['envs'], ['list-environments']
])
def test_environments_command(args):
res = argument_parser().parse_args(args)
assert isinstance(res, Namespace)
assert res.func == environments_command
def test_version_command(capsys):
version_command()
captured = capsys.readouterr()
assert escape_ansi(captured.out) == f'{meta.__version__}\n'
@pytest.mark.asyncio
async def test_environments_command_list_environments(capsys, monkeypatch):
monkeypatch.setattr(commands, 'get_environments', lambda: ['default', 'another'])
await environments_command(attrdict({'environment': None}))
captured = capsys.readouterr()
assert escape_ansi(captured.out) == '- default\n- another\n'
@pytest.mark.asyncio
async def test_environments_command_list_environment_variables(capsys, monkeypatch):
class MockedEnvironment:
def __init__(self, env):
self.data = {
'section1': [
('attr1', 'value1'),
('attr2', 'value2')
],
'section2': [
('attr1', 'value1')
]
}
def items(self, section):
return self.data[section]
def sections(self):
return self.data.keys()
monkeypatch.setattr(commands, 'Environment', MockedEnvironment)
await environments_command(attrdict({'environment': 'default'}))
captured = capsys.readouterr()
assert escape_ansi(captured.out) == '''section1
- attr1: value1
- attr2: value2
section2
- attr1: value1
'''
@pytest.mark.asyncio
async def test_interactive_command(capsys, monkeypatch):
async def mocked_prepare_execute(req, arg):
print(req)
inputs = ['req1', 'req2', '\x03']
def mocked_input(prompt):
i = inputs.pop(0)
if i == '\x03':
raise SystemExit()
return i
monkeypatch.setattr('builtins.input', mocked_input)
monkeypatch.setattr(commands, 'prepare_and_execute_request', mocked_prepare_execute)
with pytest.raises(SystemExit):
await interactive_command({})
captured = capsys.readouterr()
assert escape_ansi(captured.out) == '''Dbgr interactive mode; press ^C to exit.
req1
req2
'''
@pytest.mark.asyncio
async def test_interactive_command_strips_whitespaces(capsys, monkeypatch):
async def mocked_prepare_execute(req, arg):
assert req == 'request'
print(req)
inputs = ['request ', '\x03']
def mocked_input(prompt):
i = inputs.pop(0)
if i == '\x03':
raise SystemExit()
return i
monkeypatch.setattr('builtins.input', mocked_input)
monkeypatch.setattr(commands, 'prepare_and_execute_request', mocked_prepare_execute)
with pytest.raises(SystemExit):
await interactive_command({})
captured = capsys.readouterr()
assert escape_ansi(captured.out) == '''Dbgr interactive mode; press ^C to exit.
request
'''
@pytest.mark.asyncio
async def test_list_command_all(monkeypatch, capsys):
requests = {
'module1': {
'req1': mock_request(name='req1', module='module1'),
'req2': mock_request(name='req2', module='module1')
},
'module2': {
'req1': mock_request(name='req1', module='module2'),
'req3': mock_request(name='req3', module='module2')
}
}
monkeypatch.setattr(commands, 'get_requests', lambda: requests)
await list_command(attrdict({'module': None}))
captured = capsys.readouterr()
assert escape_ansi(captured.out) == '''module1:
- req1
- req2
module2:
- req1
- req3
'''
@pytest.mark.asyncio
async def test_list_command_filter_module(monkeypatch, capsys):
requests = {
'module1': {
'req1': mock_request(name='req1', module='module1'),
'req2': mock_request(name='req2', module='module1')
},
'module2': {
'req1': mock_request(name='req1', module='module2'),
'req3': mock_request(name='req3', module='module2')
}
}
monkeypatch.setattr(commands, 'get_requests', lambda: requests)
await list_command(attrdict({'module': 'module1'}))
captured = capsys.readouterr()
assert escape_ansi(captured.out) == '''module1:
- req1
- req2
'''
@pytest.mark.asyncio
async def test_list_command_filter_module_request(monkeypatch, capsys):
requests = {
'module1': {
'req1': mock_request(name='req1', module='module1'),
'req2': mock_request(name='req2', module='module1')
},
'module2': {
'req1': mock_request(name='req1', module='module2'),
'req3': mock_request(name='req3', module='module2')
}
}
monkeypatch.setattr(commands, 'get_requests', lambda: requests)
await list_command(attrdict({'module': 'module1:req1'}))
captured = capsys.readouterr()
assert escape_ansi(captured.out) == '''module1:
- req1
'''
@pytest.mark.asyncio
async def test_list_command_no_requests(monkeypatch, capsys):
monkeypatch.setattr(commands, 'get_requests', lambda: {})
await list_command(attrdict({'module': None}))
captured = capsys.readouterr()
assert escape_ansi(captured.out) == 'No requests found.\n'
@pytest.mark.asyncio
async def test_list_command_module_not_found(monkeypatch, capsys):
requests = {
'module': {'req': mock_request(name='req', module='module')}
}
monkeypatch.setattr(commands, 'get_requests', lambda: requests)
await list_command(attrdict({'module': 'module_404'}))
captured = capsys.readouterr()
assert escape_ansi(captured.out) == 'Module "module_404" does not exist.\n'
@pytest.mark.asyncio
async def test_list_command_request_not_found_in_any_module(monkeypatch, capsys):
requests = {
'module': {'req': mock_request(name='req', module='module')}
}
monkeypatch.setattr(commands, 'get_requests', lambda: requests)
await list_command(attrdict({'module': ':request_404'}))
captured = capsys.readouterr()
assert escape_ansi(captured.out) == 'Request "request_404" does not exist in any module.\n'
@pytest.mark.asyncio
async def test_list_command_module_and_request_not_found(monkeypatch, capsys):
requests = {
'module1': {'req1': mock_request(name='req1', module='module1')},
'module2': {'req2': mock_request(name='req2', module='module2')}
}
monkeypatch.setattr(commands, 'get_requests', lambda: requests)
await list_command(attrdict({'module': 'module1:request2'}))
captured = capsys.readouterr()
assert escape_ansi(captured.out) == 'Request "request2" does not exist in module "module1".\n'
@pytest.mark.asyncio
async def test_list_command_request(monkeypatch, capsys):
requests = {
'module1': {
'req1': mock_request(name='req1', module='module1'),
'req2': mock_request(name='req2', module='module1')
},
'module2': {
'req1': mock_request(name='req1', module='module2'),
'req3': mock_request(name='req3', module='module2')
}
}
monkeypatch.setattr(commands, 'get_requests', lambda: requests)
await list_command(attrdict({'module': ':req1'}))
captured = capsys.readouterr()
assert escape_ansi(captured.out) == '''module1:
- req1
module2:
- req1
'''
@pytest.mark.asyncio
async def test_request_command(monkeypatch):
async def mocked_prepare_execute(req, args):
mocked_prepare_execute.called = True
monkeypatch.setattr(commands, 'prepare_and_execute_request', mocked_prepare_execute)
await request_command(attrdict({'request': 'req1'}))
assert mocked_prepare_execute.called
@pytest.mark.asyncio
async def test_prepare_and_execute_request(monkeypatch):
async def mocked_execute_request(request, use_defaults, **arguments):
assert request == 'module:request'
assert use_defaults == True
assert arguments == {
'arg1': 'value1',
'arg2': 'value2'
}
monkeypatch.setattr(commands, 'execute_request', mocked_execute_request)
monkeypatch.setattr(commands, 'init_environment', lambda _: None)
await prepare_and_execute_request(
'module:request', attrdict({
'arguments': ['arg1=value1', 'arg2=value2'],
'use_defaults': True,
'env': 'default'
})
)
@pytest.mark.asyncio
async def test_prepare_and_execute_request_exception(capsys, monkeypatch):
async def mocked_execute_request(request, use_defaults, **arguments):
raise Exception('It is broken')
monkeypatch.setattr(commands, 'execute_request', mocked_execute_request)
monkeypatch.setattr(commands, 'init_environment', lambda _: None)
await prepare_and_execute_request('module:request', attrdict(
{'arguments': [], 'use_defaults': True, 'env': 'default' }
))
captured = capsys.readouterr()
assert escape_ansi(captured.out) == 'It is broken\n'
@pytest.mark.asyncio
async def test_prepare_and_execute_request_assertion_error(capsys, monkeypatch):
async def mocked_execute_request(request, use_defaults, **arguments):
assert 1 == 2
monkeypatch.setattr(commands, 'execute_request', mocked_execute_request)
monkeypatch.setattr(commands, 'init_environment', lambda _: None)
await prepare_and_execute_request('module:request', attrdict(
{'arguments': [], 'use_defaults': True, 'env': 'default' }
))
lines = escape_ansi(capsys.readouterr().out).splitlines()
assert len(lines) == 2
assert lines[0].startswith('Assertion error in ')
assert lines[1] == 'assert 1 == 2'
| 32.327907 | 98 | 0.621754 | 1,492 | 13,901 | 5.613941 | 0.099196 | 0.038085 | 0.035817 | 0.042025 | 0.813037 | 0.778773 | 0.763372 | 0.754298 | 0.71872 | 0.641476 | 0 | 0.015276 | 0.213582 | 13,901 | 429 | 99 | 32.403263 | 0.750915 | 0 | 0 | 0.617886 | 0 | 0 | 0.203511 | 0.005827 | 0 | 0 | 0 | 0 | 0.105691 | 1 | 0.03252 | false | 0 | 0.01626 | 0.00542 | 0.062331 | 0.00542 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
236d2b2c6f485ddb64c189738e14c9c53f7ac333 | 79 | py | Python | DiagnosisFunctions/tools/imshow.py | vstenby/DiagnosisCorrection02456 | 4139f499b6a339d068e5c95811eca64806c532be | [
"MIT"
] | null | null | null | DiagnosisFunctions/tools/imshow.py | vstenby/DiagnosisCorrection02456 | 4139f499b6a339d068e5c95811eca64806c532be | [
"MIT"
] | null | null | null | DiagnosisFunctions/tools/imshow.py | vstenby/DiagnosisCorrection02456 | 4139f499b6a339d068e5c95811eca64806c532be | [
"MIT"
] | null | null | null | import matplotlib.pyplot as plt
def imshow(x):
plt.imshow(x.permute(1,2,0)) | 26.333333 | 32 | 0.721519 | 15 | 79 | 3.8 | 0.8 | 0.245614 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.043478 | 0.126582 | 79 | 3 | 32 | 26.333333 | 0.782609 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | false | 0 | 0.333333 | 0 | 0.666667 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
88c4f48c054948ffe6f7ee8d121e09eec691c417 | 104,736 | py | Python | ndex2/tests/scratch_learning.py | diamond0411/ndex2-client | bac1c13c1b9f3fd485e3e070f4f0506baf42e4cc | [
"BSD-3-Clause"
] | 5 | 2017-12-01T20:19:46.000Z | 2022-03-06T19:22:36.000Z | ndex2/tests/scratch_learning.py | diamond0411/ndex2-client | bac1c13c1b9f3fd485e3e070f4f0506baf42e4cc | [
"BSD-3-Clause"
] | 77 | 2017-11-20T17:53:38.000Z | 2022-01-26T23:43:34.000Z | ndex2/tests/scratch_learning.py | diamond0411/ndex2-client | bac1c13c1b9f3fd485e3e070f4f0506baf42e4cc | [
"BSD-3-Clause"
] | 5 | 2017-11-20T17:03:34.000Z | 2021-04-26T20:40:20.000Z | from bases import Bases
def run_test():
n_mer_length = 4
#pattern_to_number('ACGTGCA')
nda_seq = 'TACATTGTAAAAGCCGCAACTATGTGTCGAATTCAAGGCTTACTACTGACAGTAAGGTTCCTTAGCCACCATTAGAGCTCTGTCCTTTATGGGAAGTCTGGAGCCGCCACCACACTTCAGGACTCATTCCAGATCCATTAAATGACTTTGGTGTTGCAGAAGTTGCTGTTAACCTGAGATTGACATTTTGTGAACATGTCTGTCCTAACGAGCAGGGGGTTAACGACACCGCCTCCGGGTACAAATCAACGCACGTTATCGCCCACACCTCCCATATGCCCTTCGCCTTAGGGCTTGCAGGGGAGGGCTCGAAACCACAGAAGCACTTCTCAACCTTAGTGTCTGCGACTCTAGCGTTAAAGACCTGTGGTACATCTACGGGAAGCCACAGCGAATATCTGATCGAAATGCCTCTTCGTGATAACCTCACCTGGCATTAAAATATATCACGTTATTCCGGTAATAATACATTGGCAAGTGTTCGCGAGACTGTCAACTTCGGGATAATTTCCGGCGTACTTGCAACTAAGGCAAGTCTGTCTGTATTAAATAGTTCGGCCGCGTTCGGGCGGAACGGTGAGAGGTTAGTATTTCCAGTCCCCGTGTCGCTACAGAATGCGACCTAGGCTTGTAACCGCCACATCCTTATCCCCACGCGCAAAACAAGGCCCGCCATAAGAGTAGAAGGCCTACATGGATCCGGGACGTGAAGGGGCCCAGGGGACGGGCGTTATCGAAAAGTCTCCCGTTGCGCGCTGACACGCTTCTCAACAATTGGACGTTACTTTGGTTAAGGGTGATGCTTCTGAGGCCGCTGCGAGCATAACTCGACCAACCGGGTTACGTAACGGTTTACTATTGCAACTCGAAACTCCACGTGAGACGGCACTCGCATGCGATCTGCTCGTAGCTCCGATGGCGTTCGCGGGCATCTAACAGGGCAAATTAAGTGACTTTCAGCTTTTGACGGTGTTTTGGCGATTGGCTTTAGCTCCCTGTATCGTTTGTGTCGCCGCGTTGCCATCATGACCAGCCGTGCTTGAAGGCTGAAGTGAGCTAACCACATGCCTAGGCACCGTTAACGCGGAGCCGGCTCATGAATGTTGAGTATGCTCCCTGTAGCCTTCGGGTCTTAGTGTGTGAGGAGAAGCCGTCGGATACTAAAATAGACGAAAGACATTACGCGATAATAGCGGGCCACTGAGAATGCGCGAACGGGTCTACACGAGTCCAGAGCGCGCATATGAGAACAATGCCCTCGAACTCCTCAACCAGACGAGTTGTAACCAAGTTACAGAGCAGTGCTCGGTCGACATCAGACACGCTTACATCTGGAGCGGACGGGAGATCCGTATTCACTCGATCAATACCTTTGATGACAAATGATATTCCCTATATCCCTGACGGTCGGACGGCACCGTATGACTGCAGTAGATCCGTGAGGTGGCTCATAAGGTTACAGTGGCAGCCCTATTACCGTATAAGCGATGTATATAATAGTTGTCCGCTCTGACGGAAGGACGGTACTTCATTATCGCAGTTTAGAGCGATTTAATCAAGTCCTTGCGGGTTAAAGGCTCCTAAGGGTGTTAGTTGCGATGCCCCTAACTCGCTTAAGGAACTCTCAACGCGGACTTCAAACGCTCTAGCGCGCCAAATTAATCGAAAATCGGACTCGTGCATTGGTTTAACCCACCTCGCTGCGCCATAGACTTGAAGCAGTAAAGAGCACCACGTCTCTTACACCCCAACTGTCCACCGAAGCTGGCCGCTGATCCCGGGACGAGGGGCTGGCCAGGGTACAGACACCTCAAGGAATGATAAGAATGGCTGAGTTACGGATCTACAATACCACGATATACAGACCAATTGTATACGAAAATGTCGGAAAGGTAATTGCTCCGTACGAGGCCAAATTCTAGGCGGTACATCGCACAGTTGAACGAAGTAGAGCTTTCATGATAATATACAAGGGATAACTTGGGTGGGAGAGCTTCTATAATCCTCCCGGCAGGGGTTATCTGAGCAAGTTCCAGCCATAGCTGCTCTGAGGCCTTATGATTACACTCGGAAATAGCACTAAGGTACGCATACTCACGTCGGAAAGTTGATTGTGCATATGTGTACATGGACTCTTGGATGCTGAATCGCGCCCATGAGTCCTATCCAAATAGGGGGAACGTCTGTCCAACAGAACACACCCTCCACGTTCAATTACCTAAAGACTAATTTTATTCGCACAATTCGCGAACACTCGCTAAAAGAGATGAATGCTCTCATCCTAATTAACTGTCAGTGATCGCTCTTGGAGATAGAGTGATATTGCCGGTCAATTGGGCCTATTTTCACTGTTTTGGCTACGCTTAGAACAAGGTTCTCCGTGCGTTTAGGTATGCTAGTGTGTAATATTGCGGTAGTCTGCGGCAACCGACCACATACTTCCTATGATTGGCGCACCCTAGCTCTCTATTACAATGAGCAGATGGGAACCCTGGAACACGGGAAAGTGATACAGCTCGCACAATCAGCTCGTGCCTGTAAGTAGGCGCCGTAAGATACGCACACCCAATGTCCGGGTAGTAAAGACCCTTGACGAACCAGCACATGCCATCAACGCCAATGCTCGGGTCACACAACGACCGTATAGCGTAAGTTTCCAGAAAAACGCTGGGGTCCATTGGTGAGGCAGGCACTTAGGTGGTGTGTTGCCGCTACCCCCCACCTCCCTGGTCATGATGTTGTTGCGATTACGAGTAGTCTAACCACAAGCATATGGAAGGAACAAGATAGATAACAACCACCATATGACGCGTTGTCCTGATACAGTACCCTGTGCATTTATCTTTACGTCGATTCGCTTTTCAGTCCAGAAATGTGTAGTATCCCAATGTACGTACAGAGTTCTCTCCATTCACAACCTCTCTGTCCCGCGACCGCCTAAATCTTCACGTCCGAAGTTGGTTTCCGTTTTTGCGCAGGTGTTTGCGTCATAAATTGGCCTTCACCACAACGAAGGAGTTTTGTCGTAAATCTCCTAATAGTGAATCCAAGGACGAGAGGGAGTGCAACCTTTATGCAATATAGAGACCAGCATGGGGGATCAAGTGGTTTGCAAGCTTCTCACTTCAGGTTAGTACGCGTATCAGGCATTGACGACATCATTCCCCCAAATGCCAGTATATGGCCGTGAAGGATCCGGGTATCCTTGCCGTTGCTCACAGGGGCAGTCCATCAGTTCGCGTTTGTAACATCGCTAACGAAAGGCAAAGCTCTTTTGCAGTAGTCCGGGAGTGTGAGCACGAGTTAAGCTCGAATTATACTTCGCCTGTTGTCATGCACGACGGGAGATCAGGTGATATTGATCGATGCTCCTCATAATGTCTCTTTACAAGGTTCCGACTAATGCCTCTACCAAGCTTCTTAATCCCGATCCACCAGCACAACTGGAATATAACTGGCTCGATCAGGACTCCTTGTCGGCGCGTCGCGCAGAGAAGGGCATAATTTAGGCAGGTACGAAAACCGACTAAGAGGAGCCAATTACGAAGGGCATGTATGGATACGCAATTAGCTGCGACCCTCAGGCTACGTCCGTCAGGCGCCGGTGATTCTTATATGGGTGTTCTTGGGTAAATAATGCCCTTCCATCTCTCGTCATCGAGACCGTGGCGGGGAATGTACCAATTCTCACTCCACGCAGGCACATTGAGCGCAGCTGTGCCCTCCACCACTGGCCCTACTGGGACACGCCCATGCTTCTGTGTAGTTTACAACAAGGTCATCGAGCACCATGCTGTCTAGTACTAGATCCGAGTGGGAACGTGTACGCTAAGACATTTGTTCGAGAACGGAAAGAAGCCTTCAACCCTACCCAATAAGGGTCCTCATATCGATGGTTGGCTGGGGCGCTCTACCTCTATTGTTTGCACTACACTAGATAAGTCGGAAGAGGAAGTGGCTTGAGAGTCACTGAAATACGATCCACCGCTAGATAGTCGCGGCGCAATGCGCCTTCATGGGGAGACACTTCAGTCGGCAACTGCGCGGGGCCGAATGCGTCTGCTGAAGCACGCGTACAGTCGTTGCACCCTCGGGGGTTCCATAGTCCGGAGGTCACCCAAGTGCGCAACAGAACGTTATGTGATCGGTTTTCTCCCTAACCGGGCCCGGGTCCTAATCCAGCCTATGACGCTGGTTATAGAGCACTGGAGATCACAAGCCGCTGTCTGCGAGACCAAGTTGAGGTGTCTGAACAAAAGCGCAAACCCTAAAGTTCTGACTTATTAGTCGATCTAGCACTTGCGAATATTGTTATACTATGTGACTGTCTGGTTGTACCGGCTGACAAGTTTGAACTCTCGCCATCATAGAAGGACTTTTTCGAACGCTCGGTAACCTCGAGCGAATTGGCAGGTGGTACATCACGTATGTAACCTCTCCCCCTAGTAGTGTGTGAGCATATTGGAGTGGTGCTAATGCTGGTCGGTGCCCGGCCTGCAATCCACGCTCCGAACTGTCGACGATTTCAGGCCGTGTTTCCTACTCGTAATTCGCTGTCCGTAAAACAGTCGGCACCGGGGGTCATAACACTACTGGGTGTGCCCATTTCCATTGTCGACGCAAACTCAAAAGTGTATCTGTGTCTTTTCTTGCTATAGCCACTACTGCGCTTTGGTGAATACCGTGACGGACTCATGCGGACCTAAAATAAATCTGGGTCTTCAGGGGACGGTCCAAAAAAGTGTTATTTGTGTAGTGAATTTTATGAATGAAACCACGACCAGCAGAATGATCACTAGAGAATGCATAGACGTATTTCAACGGTGGGTTGCGATCGCTACCCTCACGACATAAGTACCGGTACAACCACACTGTAGAACACGCCTAAGGCGTGCTTGCTGGACTCAGGAGTGCTCCACGCTCCTATTTGCGGTACTGCTTGTGGACGACCCGTTGACTAGGAGGTAATTATTTTTGTCTAACCAACCGCTATCCATAAATGCGAAGCGATCAGTGGAACTGTTCAGATGCGGCCGGTTTGTGCCAAACTCAGAGTGAGTTCCAAGACATGAGTGATACATAGAGGTTACGTGGCGTGGAGCGCGATAAGAGTTGGGTAGCGACGCTAACAGCAAGCAAGTCCTATCCGTCGGAGTCGTACATCCAACTACCTCTTCAATTTGGAGGGAAGAGGTGACACAAAGAACAACTTGCCTCAGTCAATGACACCGCGCGTCATGGGTAGAGTCATGGGTATGCTCTAATTGTACACAAGTTCACAGCCTCGTGACGATGGTTATAAGTTACTCTTGATTGAAGGACATTCAAGCGCTAGTTCTCGGGCAATCCGCGTGGCAGACTCCTGGGGCATACTTCGCCGCCATAATTGAATAGAGGAGACCTTCCCTCGAGCCTAGGGCACCCAGGAACAGCTTTTAGATGCAAGTCCATAGTGACACATCCGCTCGTTCTAGTTGGCGCTGCTCACTTTTAATGAAATAGTCGGATGCGATATTGGATAACAAGGAATGAGGGGCCGTTTCCAGGCCGTTCGGTGTGGCGTGTCTGGTACTGTACTTCAGTTTTTATTTTAATGGTTCACGTTAAGACGACAAACGGTGAAGGATCGCAATCCAGCGACACGTGACACAGTAGATTCCACACGAGCACACACCTATCGAAATTTCTAGCAACGCTCTAATGCGCGTGGGCAATTCGACCAGCGAGCTAGCACGCTGAGGACCCCATGAGGCCGGACTTCCTCTGGAGATTATACTGGGTCGTCGCATACCATAGGAGAATCCATGGAATGGTTAGGAGTCAGAAATAACGGCCGCTTCACCCGGCATGACTACATTGCCGTTACGGCTCTTCTTGACTTAGAGTATCGCAGTTCTATAGCGATCTTGCCTCGATACTTCTGTCTTATAGGAAGCTGACGCGATCCTTACAAGATTAGAAGACGACAGAAACCGCATCCTGTCATATTATCTAAAGCTTGATGGCTCCGACTCCTTGTAAGCCCCAACCTTGATAAAACGGCGTTAAGGACTTTAACGATAGTTTTCCATGTCACTCCTGAGGTGCACACATTTACTAAAACCCGACCTCTTTGCGCCGCGTATCCTACTTACCGAGTACAACGGTTTCCGTAGGGATGAAGGAGTTATCGGGGCCGAGCTCAATCATATGTCCAGCACGGTCTCTTGTCCAAGGTCGCGGACCATTTGAGTTTCTGATGGGTTCCCTTTAGCGTACATCCGCCAGTACTGCCACTGCAGGCTAACATAGTACACACGCTACTGAGTAATCACATGTGTAGCGGTTGTATACCGGCCTACAGCGCAATTACGGTCCGCTGTTCGAGCTATATTCGGTCGGCACTACATCACCGCAGCCAGATCGCCCGGACCAAGAGTCGCGGTGATTGGACACCTTCTACACATAGCGTCAAGTCAAGGTTCGAGAAACATCATCAGCCGTGCTGAGCAAGACCCGGTCCCGGGCCATTGGAATATCAGGAAGTCCGGTCGATCCGGGGTTTAGATCCTAATCACCCCAGGAGTGAGGTTAGTCAGGTAGGCTTTTAAGCATACATGATGCATTCGCGAATCCTCCTTCCACTATAGGTAAGGACCTTCACAGACAGCAAGCGGAAGTAGACTACTTGCGAAAAGGGTTGCTACGTCGTGAAACACAAGAGTTGTACATCAGTCTCCCACTGACAGCCGGTCATGACAATCAAGCTTGGTTAGGAGCTACCGAACCATAATGAGGTGTGGGGGTGAGAGTACTTCGGAAGTAATGTTCTAACGTGGGAACGTTCCATTAGTTTCAGGCTCCTCCGGTGGAACGAACTCACTTTTCCGAGCCGCACCAAAATAACAGCTATCCACATTAGCAGAGTTCTCCGACAAGTTATCGCGGAGACCCGAGGTACCGGGTGAGTCCACCAGGGTGTTGTACCCTTGTCATTAGGGGGAAACGCACCGCTTTTGGATCCGCTTGTTAATTGCAAAGGTGAATTTCGAGCTCGTCTGCATTATCCCATCGCGTAGCGTGTACACTCTTAGACGTTCACCCTAGCTCTCCTGCGTCGTGATAATGTTGAAAGTGTGATTTTACGCTCTCCTCTATTACACATGAAGCTTTCTGTCCGAGAGGTAGGCAAGATCCACACCGACATCTCAGCGTTATGACGCTCGGGGGTGCAGGATTGTCTGCTCCTAATCCTGCCATTGCACTTCTGGTATTACCTTCCAAAAAACCGATCGCTGTGGCGGTATATTGGCTATTAGATCCGCGAATTACTACGGCTGAGCCGATTGCTCGAGGTACTGTGTGGATGGCTAATATAAATAAAAGAGGTGTGGACAGGGGACTGGGTGACCGGTTATAAAATAGGATTCTCTAGGAGGGCTGTCAATAAGTGAGAACCCTAAATAAATATGGCTACCTTCTTAATGCTTTAGATTGCTAAGGTGGAGCTGCTTCATGATTTACAGTTGCGGGTGCCTACCTACCACATGGATCCCTTTGGATATTCACCCTGCGGGAGTGTTACTGTCCCGTAAACGCTCACCGACTTCGGAGTAAATTAAACTACGACATGGCGGTGTGGAAAGAAGCGTCCATGACACTTTAGATTCACGGTGTCCCGGGGAATGGTTCACGTAAGAAGTAGCAGAACGTTATCGGGAGTGGAACAGCACACCGGTATTATCCTTCGTATTGTAACCGAGCTCACTATATATAACTCTGTTTACACAGCAAAGAGCTTAAGTTAGAACCAGTTGCTGATTCAGCGCGGCTACCGTTAAATTAGTGGCCACAATCATCGAGTAGATAAGGGGGAATGACTGCACACAATAAGAGTGGTAGCCGAATTCTCTCCGTGAGACCATTTGATAAGGAGATTAATGACGTTTCGATATTCTACGAGTTGAGCGGGGATAAGGTCCAAGATCAAAGCCGGGTTTGATCGTAATCTTAGAGCAACCACGCGACTTTACGCGCAGTTGACGAATTACCTTAAGGAGGAAATGTGGGTGACGGGGGCGCGTTATCATACCGAAGATCGTCCGTATCTCCGACGTATTCAGCGAGCGAGGCTAGATCGCGCGAACAGATGTCTTCCATCGTCGCCCGCTTCCCCCCTCTAGGCTACTAGAGTTAATTCTGAGTGTCCGTCGCGAGATTGGTATGGCGAGACAATCGTGACACTGAATCCGCTGAATTCGTTACCGGCCGCGCGTGAGCAATAGTCTATATCGGGTACGATAATAAGATTGGACTGTAATACATTGCCAAGGAAAGTAGAGTCGCGTGTTTCCGGAACTCATTGGATGGCTTCTTGTTTAGTTTGGCCTATAGACGAAGGCTCTTGGCCAAGCCCGTTAAGCGGGGGGCTTCTACACGTAAACCTATCAATAATTAACTGAAGTGAAGCGGAATACAAACGGTACTGTTACATTACTTATCGACGTTAAGAGATCATACAGCTAGTGGGGATGCCCATTAACAATAAGTCACATTGCTGAAGACCGGAAACCGGCCTCACCGGGTTACCGTCCGGTAATTCGGACACGTATTCTGTACCTTCACTGGGGGCCCATTCTCTAAGGAACCTGTAATAACGGTACTGCCCTCGCTTCATTTACTGCAACTGGGTTAAAAGCCATATGATAGAATTGACTACAGGTGTATTCGTTTCAGTGCCCAATAGCTATTGGATGGCTTCGGGATGATACCTAGTCGTAAACGATATGTTTAGCGGAGTTCTACGGTTTCTTTATCAGGTGTAAGAATCATATGGCTGTTCAGGGGCCGCCGGACTACACCCGCGACAAAGGCCTTCGGTAAAAGAATATGATTATGGCGTCGATCCACGGCTCTAGCTAAGCGATCGCCGCCCTGGACTCGGTAGCGGTGATTAATATGGATCATGGTCTAGTTGGAACGCGGATTATCCGTCCTTTGGTATGCCATCCGCCAGAAGGGATGCACTGCGTACCACACCTGCATCTCTAGGCTAAGTAAGTTAGAATACTTACAGCCCGAAGGCGTGTGGCTAAGTTGTTAGTCCTGCACACATGGCCCGCGAGGTTTTTGCGCACGACACTCCAGTGCTAAGAGCCTAATATAACCTCTCACAGCTGGCGGAAGCCAGCACGAGCGTAGTGGACTAAAGCATTACGACAGCTGAAAGAATATGCCTGTGCACGCCGGTGCCTCACTTATTCTCTCTAACGTTGAAATTCCAACCGATTAGAGTTGCACGTAGTTACACAAACATGCGCGTTCATCGCAAACGCCTGACGCGGTAAAAGGCGCGTGAACGCCCATTGCATTACTGATAGGTCGATCTTAACATAATGTCTCGAAGCCAGCCGATTACCGAAGACACGCCCCGGCCCCGTAAACAGATAAACACCTCTGCCGCACTTAGAATTTGGCGGGGATCAGCAACTGTAAGGATTTAAATTTCCACGAGCCCTCTGTTTTTTACTTCAATTAATGGAGTTCAAATGCCCTAAAGTGGTGGTCGGCACTCCTATCCCGAGTGACTAGTAGCGTACCACCGATCTGTCTCAAGCTTTGCCCCGTTTTCCGGGCTAAGTAACCGAAGCGGGAACAGACTTAGAGCTGAGACGGATGACTCTAAGGAACCATGGATTTTGGCTGTTATACCAACACGCTTGGCTTAGGCTCAAATCTTGGCCCTGCGCCTTCTACTGATCAGACCTGGTGGATGTCGACCAAAAATTCCACCCTGCATTGAGCTATGTCTACACCTCGGCTCATACGTACGCCCGTTTAAGTACAATGATTTACCTATTTTCAATGACCTTGAAAAAAAGTCAGCGGTGGTCTGTTAGTACACCCGCATGACTTTACAGCGGGCATCCGTGTTACCCCCAAACTCCTCTATGACTCAAACTCTGTGCCCCTCACCCTAACGGACGGCGAGGGCGCGCTTCTGACAAAGTTTGATGAGGTTACATACCACTAGAGGTACCTCATTCCTTGCTCTTCAAACGTACTTTAACGCTTTGCGTCAAGCCCGCAGTTTGTGCGGCGCAGCCAAGGGTGGCTTGTCGTGAGGGCGCGATCAAGGCGTCAACCAGACCTAGAAACTAAATTTGTGAGAACCGGCAAGTACTGAGAGATACATATATAAGCATTTTCTGAGAATGAAGCCGATTCGTGATAAACTGCACTGAGGTTTATAAAGTCGCGTTGCCGAGCGTTTGTAACCGCACCGCTAGCAGAGTTGCCGTCGGTATAACGTTACCCTCAAGTCGTTTGGAGGGTCGAACCTGCAGCAGTTGTCATGAACGACGCACCTGCTTTCAAATGAGATCTGGTTAGGGGAACGTTGAGTAGCGCCCGATGCGCGAGTCCGGCGGATTGAATGTATATCTCAAGCGCGGCCTCGCATTGGGTTTCCGTTTTTAATATGCAGACGTGAATCGGACGGTACCGTCTTGATTTGGTACCACAAAACCGGAGTTTTGGAGGTCTACAGCATGAATACACGCTTCAGAGGCTGGACAATAGAGAGGGAACCTTGGATTAAGAGCTACTTAAACAACGGGGCATAGCAATCGCAGAGTTCTCCGAAGGTGTAAGATTACAGCTTATTTAGGCACCCCAGTGTTGGGTGGGCTATGGCTATGAGGAACTCGTTAGTCGTGTGATTGTATCAGGATGGTCCAGCATCAGGTGGTCTAACGTCATCCGGCCCTCATTCCTGTCGACTCACATCCGTCGCCCCGTTAATGCTTGAGTAGAGGTTATACTGGTCTCGCGCATTTAAAAGCGTTAAACCATAGGATTACATCTGTGCGCGCTCCCGCTACAATCCCTCAACAGTCATTCTGGTGTGCCTCGAATTCCTGGTTGAGCTCTTCGAGTGCACAAATATGTTCCCATACCCACCGCGACCAATGTAGCCAAGGGGTAGCGGTAGTCCCCCCTGACTGCGCAACAAAGCGGCGACACTGCTGACGGTCAACGTAGGACATACGGAGCGGCTGCTAAACTATAAGGATTATTAATGAGGCTAGAAGAAACGATAACATGTGGTAAATGTATCGTAAGAAACGTGACAGACGCTCAGAGGGATCATTCCAATGGCCGGGAGGACGAGGTGAACGGATAAACAAAACAGTGCGTTTTGTATACCCAGTATATAGGGGCAGGTTGCCGCCGGCCATGATGGACGTGAGGACAGTATCGTTCGGGCGCGAGCTTATATCCATTATGGATACACACTACATGGTTGACCCATTCGGTCAGGCTCGAACATTCTGAAGAGATGGATTTCTACTATGAGTTGATCCGCTGGAAGAACAAAGAGTCGGTCTATCGCTACGTCGCTTGGAGAACATGACCCAAAATCGGCGCCCTTAAAGGTATGACAACGAGCGCGCTAGGTACCGATTGATTGTCTACCGAGCGTGTACATGTCTGGAGAGATGATCTAGGAATGCCACGGCCTTAGACCGTGAAACCAATAGACAGGACAATTCGCTATTTACGGGGACGGGAATCGCTTATCTTTGCGGGGTGGCACACTAGTGGAACAGACTATATCCATCAGCTTACGCGATTGCATAGGAGGACAGAACAAGAACCCCGAATAGCGTCCGTATAGGCATGCCTGGCCCAGACTTAGGACAAATGCATACGTAATTCTTCCGCTATGCACCACTACATAAAGCCTGTTACGATTAGACTAGCAAGTGGGTTCACTTCGTTTGTGTTAGTCAACGCTCAGCGGTTGCGTCGATTAAGCCGCAAAGCAGTCTCCATAGTCCTTTAGTATGGTCGAGAGCGATACGCAGATTGAGGTATTTCCGATCGGGCCACTCTCCGTTAATTGAGCAGACAACTCCTGTCACAAAAGAGAATACGAACAGTATAGAACTACTTCGGCATTTAACAAATCACGTGCTTTCGTTTTATGATCGATTAACTCTAAAGGCCTGTCGCTTAAGTTCCAGGATAATGTGTATAGCCCAACGCTTCGGAGTCAGCCTAGGGGGCGATGAGAAACGGGTGAGAAGGCTGCTACTGACCCGGACTACAAGCCCGTGTGCCCGATGTGCGACGGAATTCGTCAGCACGGTGGTAGGAGACCTCGTAAGACAAGACGAGGAACCAGCGGCTAGTACATGGAGTAGAGCCCGAGTACTAATACCGTTAACCGCGACACGCATTGATCCGCCAGGCGTCACGCTGCTACGCACGTTCCCCGCTCTAACCCTCTATGAAGCCAGTACAGCTATGAGCTTGCAGCCTTTTAATCATCTGAAGATGTCCGTACCTGCCTACACACCCGAGCCGCCCCAAACCGGATTGATTAGAGGAGGAATGAGTCCCCTTGCATTGAAGCACGCTGCCAGGGCGTTGTACGTATTTGGCGACAGACGCAAGACCACTTTGTAAGATGTGGGTGCTCTTAGGTGGTGCATTCCAAGTACACACGACCGCATATGAGGCAGGCCAGTGTTACTTTTGTTACGGGGCGAACGACGGTTTGTCTGCAATTGTGGGGGTCTTATACTTCCCATACAGTCCAGAGACCAGACTGAATGACCATTACATTGTAATCGGATTGTGGGTATCCTGAAAAGCCAACTGGGCTAGGAGACATACACATTATTAGCTCAACAGTGGGGTATCCCGGGGCCCCGGACTTAAGAAGGCCCTAGGCCGAAAGCCTAGCGTCGAATACGGTTACGTTATAACACCCGACGAAACTATTGCTGACCCTTCGGGTCGCGCATCTCATCCGGGCGACAGATAGGATAAACACAATTCCACGTACATCACCAGTATGGTAGTAAGCTGTCACGACCTTTAAGGCATTTTTAAGTCTGTCAACTGATGGGCTCGTCTTAGTAGAACAAAGTGAGGAGGGGACTCAAAAAGTGTATGAACGCCTAGCGCGCATCGAACACCATGTCAATGGACGACTATGCTCTTGGAGCGTCCAAGTTATGCTTGAATTTTCCACACGGTCTATAATTGCAGGAAGGGGAGCGCAAAATGTTGGTGTTGAGTACCACCCCCCCTTTTATTGTGCAGCCAGCCTCAAACCCTAGATCTTGTGGGGAGGTTAATGGCTTGGTACCGTGCCCTCTGGGCGGACCATTAATTCGCTGCCCCGGACGAATAGAGGAGACAACGATGAGTCGTTTTGCACTAGACACGTGGGGGGTACCCGTCGTAGTAGCTGAGGTTAGTGATACGCACCACGTACGCAATGCACAAACTGCTGACCAGCCCACGACTAGTCGATACGGAGCCTTTGTGCAGCGGTACTGCCTCCCTTGGGGTACCATGCGGTAAATAATCGCGTCTGTATGATTTTAGTCCTCACAGCGGATCACGATGACTTTCAGTACTCATTGCATAGCATCTTACAACATAGTTGTCCTTGGTGCAAGTATGACGCGAGTCTTATGGCAGATCCCGTCCCGGGGTGTCCCTTCTAGGACAAACAAAACTCATTGGCTATTCCCAGTGGGCAAGCGTCAAATCGTCTCGTCCTACGAGTTGAAACATCTGGTAGGTTATCGAGAAGGGCCTGGATCACCTCATTGGGTATTGAGCCGAATGGGGTGTAGATATCAGCTCGGTCTGGTAAAGGGCCTAACACATCTCCTATTGTCATCGACGATGTCAGAGCATATGTAAGCATAACAGCTGATAAGCATTCTGACGCTAAAGCAATCTAGTTCAAGGTAGACGTCTACATTGCTAAGGTTGAAAATGACTGCATTTTCCCGCCGTGATTCTGAACGCCGAGTCCTGTTTCCACCAACCAACGCGGTACCTCACTGGGCTTCATTAGAGCGCTACTACGAAGGTTCTCTCAGCGCCATCAAGACCTAAAGACACCTGGTCGATGCCACGAAGATGTCACCAGAGAGTAGTAGGTCTGTAGCGTCACTGTATAGCTATCGGAGCGATTCCCTTGGGAGTCCACGTTACCATCATGGACGCCCCTTTCTATTTCGGGACCCGATGCGAAAGATTGCTATGATCGGATTCGACTGCTGGAATCAGCGGTGGACTTCTGACTTCTAGTTAGCCAGACGGGGAATGATGTACGCCCTATTTCTAGATCTGCACGGTCACCAACGACGAATTACTACAGTGCCGATGGCGGACGTCAAGGCGATCCGTCCATCGCTCTGGGCACACAGTCGTTCAAGCCCTCGGCTTGTTAATGACGGAGATATCGCAGGCGGATGCGGACTCGATTCACCAACCCCGAAAAAAGGCCATCGAGCACAACACATCGCTAACTAACTTGTCACAAACATGATACTGAACCTTGCAGTCGGTTAAAGGCGAAAAATCCGTTGAGCTAAGGTGGCCGCCGTTGAGTAGGCAGGTCTCGGAATTTTCTCACGGGTTCACCGCCAACCCTACAGGCTGTAAATGCCAAGCTGTAGCAGCACCCTGCTACTAGTTAGAGCGATCGGTACACACCCGATGCGGGTCAATTGATTGTCTACTTAAGTGTTGAGACCCCGTTCTTGAGGCGCTCGTGACCCCCCTACCCTTTATTTGAGAATATAAGTACAGTAGGAAATAAACCCCCCATCATGTTATTGATTACTACTCGATGACTTTGGCTGATGACCAAATAACGCTTTTCATAAGACCTGGTGGCCACAAGGTTGGAAAAGAGACAGTTTGGTATACACGAAACTTGATGTGAGCATCTCAGCCACGAGGGGATCGTTCATACGATCGACCCATCTAATTGGACAGGCCCCCGTCGTTTCGTGATTCCGCGCCCAGGGAGGGTGTTGGCATCGGGCAAAATCGCTTTGGTGGAAGATGGTGCCTACCTCGGACCGCTAATAAGCTGGTTAGTTATAACAGGTCGGTGACAGGGCGCAATTCCCTCAAAAGGACATCCATGGGATCGGGTAGTGCTATTACATAAGGGTTGGCGCTTCAGATACAGTAGTGTAGTGGCGCCCATGCTGCGGTCCGCTCGTCTAAGAGATCACTCCGATTCCATACCAGTTTGTACTAGGGGCTTCGGCTCGCATTGTTTTCCAATTTGTTGGCTCCACCCTTCTGCAGCTGGGGTTAGGTCGCCCATAGGCAAGTAAGGAAGATCCCAAATGACGGGTGAACCCTCTTGTGACAATCCTGTACGGCTTATATTACGCAAACACCCACACTGTATAGGGGGATCCATCCGACTATTTCACTGCGACACGGTTGACATCGCCGTAATGACTTCCGGTTCCTATTTTCATGTTTCTACGATGCGAAGTACCCGGGAAGAGAAAGCATCGGATGGTTCTATCTGCGCGAATTCCTAGGTTGGTTCTGAATGCTCAAACTAGGAGAAGGCTTTGAAGCTTCTTAACCTTGATCCGTTTTCTGTATACATTTTCACGTGAGGCGTCTCGAGTCGCCTAGAAGATCATCTCCTGGAATTAACAGCTAAGCTATAATCCAAACCGGGGGCAGTACCCTAATACCTCGTAAAACGGGACTTCACTGCAGCGAACGGAGACTTCCGCGAGCCAGAGGTTCGATTATACTGCAGGTAATCTGGCGAAATTGCGAGCAACCGCGACGAAGATTTCCCACAACAAGGAGCCCAATTAAAGACTACTCGGGCAATTAACATCTATCGGATAGGCAAGAGCAATGATGTTAAACTAGCCCGAGAAGTGCACACTAAAACTCTCGCCCAGACCCGGACGGAGCTCAACAACCGGAATCGTCAGGTAGTGGCGATGGTCGATTTCAGGCAGTTGGTGATGAAGCTTATAACGGCAACAAGGAGACCCGGCACAGGCCAATAACATAGCGGGGGCACAAGAAATAGAGCTTATAGAATCGTGGTGCCGCAGCTTTCCGACGTTACTTCTTGGATTCACCAGAACGTGGCGCATGCTCATACTGGCAGATAACTTACCTACGAAGTCTCTAAGATAGTTCGGACTAAGTGATGGGGGTGCACCGTGCCGTGAAGGTGAGGAAATGTCCACCGCCCCGCTACCGCGGCATGTTACAAGGAACCGCAACAGTCCTTCTGTGTCCTTAGGGTGGGATCTCAAACGCTCATTCAACGTGACTTAACAAGGGCAACGAGTCATCTCGGCGGAGCGCTGTTCGTGGCTGCCATTTGTAGCGTATTACAAAAAATAGGGTCGTTATCATTCGGGTTCGGGAGCACACGCTGATCGCGGAGACGTGTGAGCTGGTATAGTGCAGTTGCCACAGGGCGTGTGCGGGAAACAACGGCTATGTCAACATGTCAACTCTGACGTCTGACCCTGTCACCAGCAAAAGGAGAGAGTAACACGTACCGCCATTATCTTGGCTGAGGCCTAGCGAACATAGAGCTCAAGCGGCAACGACCCATGCAACTAGGCAAACACATAATTTAGTAGTGGCGCGGCACTGTCTGACTGCCTTCTTTTCATCTAGAAGACTGTTCCGTGAATATAGGGAATTCTGACATTTAACGTGCCGTTGTTGCGAACGGCGCTGGTTCCGAAAGGTCTTGCCGCTCGCCTCATGCAAATAGTATTGGTCAAGACCGACAGCAACAGAGCCCCTAGTGCCATATCTACAACCCGCTCAATAAGGTGTGTACGGACAACGTCATGATCATCGTCGCTTTGTTCATGATCGAGTGGCGTACTCGCGCAATATAGTTCCATGTGCTTCAGTACGCGCAGGGACTGTTTTCATCGATCGCCAACGGGGCAATGGAGTCTAGAAAGTTAGCCGACAGTTGGTCTAGCACTGGGACTATATTACACGCTACCAAAATCATGTCTAATTACATGCTCAGGATCTGAGGTCGAGGTATGGGATAATTAGGGACTACCGGGCAGCTGCCAGGGGACTTTTGCTGAGTCACACCTTAGCGCGGAGAGCTCCTGGTACAAGGTAACTTGCGGGGTATGTACAGCGAGAATATAGTTAGGAGGATACTAGACGGGCCGGGCATGCGGTACCCCCGGCCTCAGGCTCGCAGCCAGCGACTTTCCGTAAAGAGTTTCAGAATAAGTTAACGGTAACAATGAGACAGCCACTACTCACCGCGTCTAACCCGTGCTCTGGAACTAAGCGGTGGCGATATGATCGGGAGCACAATTTATTACGGCATAGCTGTGTGAACGGCATTATGTCCGTACGCCGGGATAATTTGCCGTGGCATGTGAAGCCGCACTAACGCCCGGCTAGGCGTCTATCGCGTCAGAAACGTACCGGATAACACTGTCTGTGCTCCGCACTTGGATGAGTCCTCGGGACGGCGAAAGAGATTGATTAGTCCGGTCAGGGTGGTTCTTGCGTATGTTCCTACGCCCCTAACATATTTTTAGCAGAATACGCCTTGATGATAAACACCCAATAATCTGAGCCGCGCCTCGACCCTATTTTATACTTTGATATCTACGAGTTTGCAGAGTTGCTTCGGAGTTGCGGCATAGGATTAGCGAAGTGGAGCGGGAGCTGCCTTACACCGCTGCCCGATAATCCTGAGCCTGAACGGGGACGTGGTAACGGAAGAGGTCAGTGAAGCTGACCCTCCATTGGGACAAATTTGAACGTCTCTCAGGTCGCACAATTCTTACTGTCCCTTCTCTAGGCCTATAGGCATGTTTGAATCCTATGCGGTTGATGCATTGTGGGCCTTAATACACAGGGATTTTCCTGAATAACATATACAGAATGAGTTTAGTCTAGCGCGAGATAGACCAGTTCTTCGCCTAAGATCCTATCTGTTTTAATGTCTCCACCGTCGACGGCATGCGAAAATTACGACTTGCTCGGTTCAATCGTTCCACACCCTCTAGAGGGAATGCCTGATTGGCCTTAGATTTTTATTTGACGCTAACAGCCTTGGAGCCATAGAGCCGCCCATCTCGGGCCCGAACAGATTAAATTCGACTTAAATGGCAGATTGGGATAGTCTGAGACGTGAGTTTTGAGCAAGCAACTCGGCGCTCGTGAAAGAGAGTCGTCTCGGACGCGGATGATGATTTAGCTCGGGGGGAGGGGAGCTAAGCCAGTGATCCCGCTCCGATTCCTAAGACAAATGTAAACGAACCGTTTTTAGTTGCGAGTTCTTACAAACTACAAACACCTGTCAGGCCGGGGACAGCCCTAACCCAACTCCGAACGATCCCCGCGCCCGCGGACGACCTCTTCGACGCATTAAAGTGAACAGAAAACATTAAGAATCGATCAGTCACTTAGCTGAAGTAAGCACCTTAATAAGCGATAACGATAGGAATTGGCGTGTCTACGACCTCCTTTACGAATGTAGTCATCTGCGTCGACACGAAGGTCAAGGTTAATTCTGCCCTACACGAGATCTAATCCAGACAAAGTCAGCACACGTCGGCCTTTAATGATCCCGACAAAAGACCACATAAACGATTGATCTACATGTACTCGACTGGGTAGGAATGATACAATGTGATGTTGTAATACATATTCTACCACCAGTACTGCTTCCGGTTGGTGCGTTAGACGGGCAACACTATGAGGGTTTTTAGATGAGTGTGTCCAACGTTGTGCATCAACTGTCTATTGGTTATAAGTGAACACAAGCGATCCTCATTAGGACGGAAGGACTGCCCCAAGGGAGGAGATAGTTTTCCCACTTAATGAGGTTATCATTACCATAGCTAGGGGGGTCCGACGCTGCGGAGCGAACTTGACAACCGACAATTCCACGTACTTCCATTTGAGTCTTATACAAATTAGAGGTTAGTAGGGTTTTAAGGGGCATGGGGTCCAGTCCCCGATGACTACTGCCCAACTTGCAGTTACTACCGAGGTAAGTGGGCGGGACTTTCAACCAGGGTTCTGAACCTTGGAGGGTTGGTGTGAAGGATGTTTTTGATTGCAACTTAGGCAGCAAGGTTATACAGCCTTCCTCCCTAGTCTACAGGATCGAATATCGGAGCTTCGGGGCAGAGCCTCTCAAGAGTTCGTTGTCAAGGCACGTTACGTGATAGTCTATCGCAAGGCCAGCACCAGCAGCGACGTCTTCGTCACGTAATAGCACTCGTTCCTCGGTGTCGGCGTGCAGTGGGACTAGCGAGCACATATTTTACGCGGTGGTTAGGATTTCAAGCCAGAATTTGGCAGTTGTCATCCAGGTGAACTTGCTAGGGACAATGGTCTGATCTTGGCTGCTGGAATGGATAGTACACCACGTACAGGGACACATAGCGGGTCGCCCGGAGGCGGCTCGAACGCTCTTTTGGAGCTGGTCGGGACGGGCATGTCGTACCGCTGTCTATACGTTGCTGTATCGTTACGCGTAGCGGGGGGCTTAGGCCTGCTCGTGGAATGGGAAACTATGGGCGCCAACTGACATGAACCAAAAGTCCGCACTGATAATAGAAGTTTTCTTAGCGTAACAAATAACTTCATGCTACGCGTGAATCTTTTTGGTCTTGGGGGCAGCGAGTTGAGCTAAGTCTACAATGTGTAGTCATGTGATGTGGAAGTGAATTGTGTCGCTCCGACCGCAAATTCACAACTGGGACGCATCTAACTCTCGCCTCTGTTGTGTAGAAGAGAACGAACAATGCGTGGCACCGTGCCTACTATGCAACATGTTGGGTCGGAGACCGTGGAGGATGCCTGAAGATCACTGTTGTGCGAAGTGACTTGCATGTTTAAAACCATAAAACTACACAAAATCTCAGTCTCCCCGCCAGTGGTCGCAGTCGAGGTATTAGACGATGGTGAATTGCCTCTACTAGGATAAGATGCTGGCCGTACTCATTGGCAAGCGGAGTGTGTGAAATCCTACGGAGTCGGTTAAGTCAGAGGTGGTGGCCAGACTCCACGGACGCTATGATAGTGTGGTTATTAAGTGTCATGGTGTAGCGGTTGCGAGAGTCACCTCGAAGAGTATTTCATGTCGGAACCTCGGGCCTACGGTTGGGCGTCTACTAACAGAGCAACTGCCACTAGAATTGTGCACTTCTCATTGACATTTTCGGAGCTAAAAAGCCGAATACTACGGCCTAGGAAAGTTGTCGAAAACCAATTAGAGTAGATTGGGACCAGAAGCTTATGTGTGAATGAGTCAACGGATTGGTATACGATGCCCCAACAATGGGAATCAAACACGCCATCCAAGGAGTCGGAACGGGGAGAAGCAGATTCGTACATATCGCACGGAAGTATGGTTCGTACGCCCACAGTCTGGACGTGAGACTCCGGACGCGGTCAACAAGCGGCCGGGGGACTTTGGGTTGGTTTACAGGCTTGCCTCCATCCCGGAGCAGATTTTACTTTTCCCAAGGGTCTTACGTGTAACACCTTTATAGCATTTTGACGCCTCTTAATGTATTACATGGCCATTCGTCTGAGACTCTGTTCGTCGTAAATGACGTACTTTAGTAACTGAACATAAAATCAGTTGAAATGTCTTGGTCTACACCGCACGAATACCAGTAGTAGTCCTCACAAACATTACACGAGTCTATTTGGAGATGAGGACGTTCACTGGTAATGGGAGCGGAGCATTCTCGACAACATATGGCCGTTAAATTAGTCATTCGTGGCGTCTGCTTCATTGTGTAACATCCTACACCAGGGGCCCAGAAAATCAGGTAGAGAGAGATATACTTTGTGAGACGGGCTAATCTGTTAGAAAACTCAGCGCTCGTACTATCTGAAGCTGGAGCCTGTCCTCTGCATAAAACGTAGCGTGCCATAGGCGCTCCTAGGTCTGCCGCACGCGCCAGAGTCAGTGAATTACGTATGCCTGGCGGCATTGGGAGAGACCTTCCAGTACACGGTCGGACTAAACTACTAGTCAAGTGAAGCTGCGCGGTGGTGCAACATCCCTAGCACACGGGGCCCTGTCTATCATTGTGGAGCCATAACAATAGAGAGGGCTCTGGCTGTGGTCGGTTTCGCTTGCATGGGCTGCGTGTCTTCAAGCAATGGCGCAAGATTAATTGTTTCAGCACTTGTATCCGACCGATTGCACGCTCTGTACCCATATGATAGACATGCAATTTAGTGGTCAACACGAACATAGAGGGCCAGACTGTATCATAAGCAGCCTGGTCACGTATTTACTGCACTGTCGCCGCAGCCTTTTAACCTTTGCGCTTAGGAACAGTGACCGCTATAGTTTCGGAAATTGCCCAAACTAGGTCTAGAACTCACCTAAAGCGTGTATTTCACTCGTTTGAATATCAGCTTGACGCCAATAAGATGCTCCGCTACTGTGGTTAAGTTCTGTTAAAAGAGTGGCCGTCTGTCTTTTCTACCCCCATTTGCCATGATCGCGGGGTCTCCCGCAGCGTGCCTCTTAACGCTTCGGTAGCCGTGGGTCCATCAACACTAGTATAGAAGCCGGGCGCCCCGACAATTGCGTCGTTGGAAGTACGTACGGTCGCGAGTTCTAGTTCCATTCTAAGCTGGGGCAAGTTCAGCGATGCCTTACACTGTGCCTATCTGAGACCAATCTCGTCAATTCCAATGACCCTCGGAAACCGTAAACTACTTAGACTTACGGGGCCCGTAACTGGATCGAGTCTGCAGGTAAACGTGGCTACCTGTAGCTACCCTACGACGGATCAAATTTTAACAAGCACAATACACTTTGTGGCACATGCACGATTCTCTTGAGATCAAGAGTTTACTGCGGTCGTAACCCCCCACGAGAGAAATACCATTGAGCGATTCCTAAATTGCTTGCAGGGCCTGCCAAAACGCGCCTGCAGTTATCGCTCTCGCTGTGATTAGCGATACTCATTGAGACATCTTTGGGACACCGTTCGGGTCTAAAGTAAGCCAGGTCCCAAAAGGGCTAGAAATGCCTACCGGATGATTTTCGTATAATCACCTAGCTCTGTCAGGTAGGCCGGAATGCCAGCGGGGATTTGTGAAAGACAACAAAGTATGAAAATATGATCTAGGTGAGAGTAGCGACATCTCCAGCTCATCGTTGGTTCCCGGGGAATCTTCGAAGCTGTATTGACGCCTTACGGTTGAATATTGCGTGGCGCAACACCGCATCCGACTGACAAAGGCTATGGTATTTCTCAGTTAATCCCATCTTGCGCCGCTAGGTAGGCTGGGCTAGTGCCCTCCTTCCGGTAGTACTAAAGCTAACCTCGACTAGGTTCCGGCGGCATGAATGATGCGGGAGTGTTCCATATCGTCCTCGCATGACTCTAGTATCATATTACACGCCCTAAGTAGACCTGCTAATTATCGGACTTGACTAGCCTAGGCTGCTGACCCGAAAAACTGAGATGTATAGTGATGGCTTGCTACACTTGCTTCCAATATTCGATTAATTTTTACGATGTGCCCATAAGATACACCATAGCGCTTACTGGGGACTCGATGTCTTTGGCGGTCACTCATCCTAATACCCACTATTTCAAAGAAGAGCTCCATTCTTATCCGGTACTCGAGACACTGCCTAGCTCATACTCAATCACTCGATCGACAATAGATGAATAGATAGCCGGGCTGGCTATTGCAAACGAGTGGAGGACACAAGGAAAAGAGTGTATACAAACCAATTCACCCGTGTTGCTATGAGTAAGAAACTTGATACTGCGCCATGAACAGTTGGTAATTATCCTCCGGGACACTGGCCTCATGTCAACCAGCCAGTAACGCACCCCGCGATTATATTCAAGGTGTAATACCCTTTGGATTTTGGCTGTGGCCGCGCACACTTCAAGGTCACTGTAGGGCACACTCGTTAAAACCGTTCTATCAGGTTTATCAGCGACGCTCCCACAGGTCGACCGTACCTCGCAGATTTGTAGGCTTAACGTGACTTACTATCCGTAGTGTATATCCTAAGTACACGACAGAAATCGGCCCCACGCGGGTCTCCAACACGGTCGTCTATACCTAGTCCATCGTTTACCGGCTTGATGTGTAGCAATCTTCACTGAAGGTGTAAGCGAGCCCTCAAGATCCGAGTTGACAGGCAGATGCGCTACTAACGTTCCCGGGTCAGACCAAGCCGTTAGGACGCTTCTCAACGTAATACTGCGAAACAGCTCGGGTTTTACGACGTCCATATATAGCGATAGGCAGAGGTGGCCGTGGGATCCTCCTAATCGGCGTTGCAACGATTAGAGGCATCAACGAATAAAACTGTGTGAATAAATAGCATATCAGATTGAAGAAGCTGGTCGCTCTCTTTGCCGGTTAGGATAGACGGTTCTATAATCGCGCTGCGGGCCGGACGTTCGGGACCGGGTACCTTAGGGATCGTTCTAGCTGCCAAAATCTGGGTCGTTGCAGTTCATAAAGCATAACCACGAATTGCTGTCTTCGCAAAGACGGGCTCCACCGGAGAGCCAGGCACTTGTAATGGCGCAGAATAAAGCATCAAGGACATATTCCCGTTAGGTATACGTACTCTCCTTCGTAGAGAACTCATGTGGTCGCTCACCTCGACCTCCGGACGGTCATCAGTCCAATTTCCCCTTTTCGTGACTCGCGCTATTAGATGATGCCATGGTATTACGTCTCTGTACCGCGCCACGTTCTGAGATCGCGGTTGATACAGTACAAGCCACCGGTCGGGCCATATGCTACCATTGCCCAGTCCACTGCCGCTAATAAAATGTCTTTAAAGCCTAATGCACTCTAGGCCCTGAAATTATGGCTGCTAGGACCGTATTTTCTATATAGTTCGGAAAGAGATCACGTACTTGACACGGGAACCTTATTTACAATCCTTCAAACATTAACAGAGACCTTACTCGGAGCGTCGAACGAGAACCGCACAGTATTCGACTACTGAGCACCTTTGCGACGTACGCAGATGACGGCGGAGGAACAAAACGTAGTTTCAAATGCATTGAATTTTCTGAGGACCACAATCGTCGAGTTTTGGAATCGAGGTGATAGTGGCGTCACTGGCCTCCCCAGGGACTATTTCCGATGTATGGTCGTATTGAATCTTATTATCAGCGGCGTCGGGATGACCGTATCAGGCATCCGATGGGGGGATTCGTAAGTCGTCAAAAGATGAGATTAAAGGAAAATCAGGGCAGCGACCTCCGTCTCCGGACAGTGAGGGGACCTTCACACACCACGTGGAATGAATGATCCCAGATACGAAGTCAAATAGTGGAGATACCCCCGCATACCCACAAAATATGCTAATTGAACGATTCTACATAATACTATAGGCGTACTCACCGGCCACGGTTAAAAGATATTGGTCAGCGTTTCACATCTTTGAACGTCACAAGCACGACGATAGATTGGGCCTTTGTTGTAGAGCTAAGGTTTCGACTGAGAAAAAACGGAACAACGAGCGGCGCTACAAAAAACCCTCGAAAAAATGTGAGTCTTAATTTGCCGTCTTATTGTACTAAAGGTAGAACCGCGATCTTCCGATGACAGCCTTATAATCCGATCATGAGTGTTGATCCACTAGACATCAAGATGACGACAGGCCTTTGCGAGAGAATAAAGCATGGAACATGAGTGCTTCGACTCTCTGTTTGTCTAGACAGCTTGCGCTGCATCCTCTGAGACAGATGGCGCACAGATGCAACCCGCGGAAATCGCGCGTGCTAAGTGAAGAGGTCTACGCCACGATGCTCTGCGAGTATTATCTAGGGCGATAACTAAACGCTACTACTAGCGAACTGTACCGTCAGATTGGCACAAGTGCGCAGTGGCCCTTGTGCCAAGCGTGTATGCAACTAGTTGAGATTAGTAAACACTACGGGCAGTTCACCTTAGAGGAGATCCCGGTACTTCCGTCCACCGGGATACTTAGCACACGGTCTGAGTTATCGAACTGGACTAACACTTAAAAGCGGGCTCATGCGTCGGTCAAGGTCGGGTGAGACATCATCCACAATTTAAGTTTCCCTTGTAACGTATTAATGGCAACGTTCAATCTCAGTACGTATCCATTGCATAAGTGGTCCATAAGCTCGACATATGAGGAAGTAGCCGTACTTCGGCCTATCACTTTGATATGTGAAAAAAACTGTTCTAGCATGAAAAAAGACTCACTCTTCCTCAGTACGCTAGCGTACCCCAATTTGGAAGTTCCCATGGTTATTCCTGGAAACTCTCTTTGCGAAACCCAATCACTGTTTGCTGAATGATGCGGCACATATTAGCACCACGATTCCTGACCGACTCTGCGATTGGAAACGCTAGTATAGTATTGATTCTGAAGATTACTCCCACTACTCGAGCAAATTGCGCTGAGCCTGTCCTGGTCCGCACATACCTTATCTATGCCCGTATATGGCAGTCCACAGTTTGGTGTACCTTCCATGCCCGTACCTATCCGGCAGCGTTGTCTTTCCCTATAGGTTTGTTAATATAGCGCACCCACCGAAACGCCAATCACCCCCGTGAGTTGCTATCATGATTAATCCCTCAGCACCGCTGAACTATCGCGAGGCAGCGACTCCGAACTTCATAGACAATAGGCACGCTGTCATTGCAGTGCCAGGTACCAAGATTCGTAAAAATTGTTGCTCATTGTTACGACGAACGACATAAAAAATGTCCTACGGGTTGTCTTTTGAGTGGCATCCTTATACGCTTGCAGTCGTAGTCCGCAGGTCCAACCGGAGAAAGAGGTACCTAGACAGTATCGTATATAATTGCGAACCCTCGCATGATAAGGGATGGAGCGGTGAGAATTGACCATTAAAAACCGGGCATAGCACTACCTATTAGTAGTACGAATTCCAATTTATCTCTGGTCAGTCAAAAGACAGTTTTCTCTAGGTTAGTCAGGTCTACTTAGGTTTGGACTCTATGGCTTATTCGTGAAAAGAGGGATGTCACTTGATCTTCTATGATGGTGCACCTTACGGTAATAATACGCAATGAACACCGGAACGGGACGCAGCAAGGCAAACTGCGAATCAACGTAGCACCGGGTTTCTACCAAACAGTAACCCGTGCGGCAAGCTCTAGGATAGTGCGGACCCATGAACGAAAGGAATATTGTCTAGCAGCCTTAGGGTTGGTGTGTTCACGGCAACGTCTTAGGAAGCGGCGCACCGATGCCACATAATTCCCCGTAGTCTGGATACTAGGGTTTGGGATTATCCCATCGTAGCAGGGTGTCTGCGTTGCGACTAAGCGTCAATCTCCACCGCCTGGGGTACTTCGTCCTCCTGTTACTTGGACAGATAATTGGGTCATAGAGTTTCGAGCCCAATCGATTTGGGTTGGTGATTTGCCTCGGCGCAGATGCTAGATAAAAACGGTTATTGGCGATATCCTTCAGCATCCTATGGGGGGCGATAGGTTAACTAAGGTCCCGTGTACGGAAGTGATATTGTGTGTACTTCAATCGTTGGCGGTCAAATTGGGAGACGGGGCTGGGCGGAATTCATCGGGGTTTAACGGGGCGTGCATTAACGCTTTGTGATACTCTACCGGAAGGTCTCTTCACCCTATCTGGGCTATCACTAAAATATACTTACGTTCGGGCCTCTTATTGACGAGGTAAATATATGCGGCGCGCTAGGTTCCCTCGTGCTCAGACTTAGACGGCACCCACACGCCACATGTGCTGGTTGTTGCACTAAAGGAAGCGTGAGGGCACAAGCAAGCAGCGCGGTGCTGTACGAACGTGATAAATTAATCGTGGAGTAGCGGCACCTCCGCGCCGGGCTCGGTATCATGGACTATTGGAAGGCACCAGGAGACGCGTTCACATAGGCCGCCCGCTAACGATGCCTGAGACTGCACCCGTCAAAACAAGACACGGGCTATATGTGTTCCGCGACCTCTAATTCGGGTTGCAACACGAAGTCAGCTAGTATCAGGTGCAGGAAAATCGTGTTAACATAGCTAAACTAAAGGCAGACTCATGCCCATCCTACAGTATGACTTCATTATCCTGGCAGATGTAAAAGTCGGAGATGGAATTGCGAGTGAGAACAGAGCGTTAAGAGCAAACTCGTACACCTCTAAGGATACCCTGTCTAGGTAGTCGGGGGGTCTAACTTTGCCGATTTCAGACGTCCGTAGATGTAGTTAATGATAGAAGCTTTAACTGGCCGTGACTTTACCGCGAGCACATAAGTTGTCATCCGGAATACACCATTAAGGCGCTTTCAAAACTGTTTGTTGCATCCCGGAAATGTTTGTTTTAACGAGCTCTCCCTAGTTGCCGTAACACTCCAGACTCCCATCGGGACCGAGTTTCCGTACCGGACTATTCCAGCCCAGTTGATACGTGTGAGGAGATGCATTGGTGGCTAGTAACCATCATTAAAAATAATTGGTAGGCTCACACGCACTGAACGCATTTTTACTAAAACACCATGCGGCCCACCCTAATCATTGGAGAAATCGAACGAAACTATGCCTAGCGTTTTGCAGCCATTACCCTTAACACGCTTAACATGCATTATTAAGTGTGAGTTTGTCGTTAGGACGTATGGTGATTTTCCTGGCTGGCCTGTTTAACGACGACTATCCCAAGTATAGTCGTTTTCGTATAATGTTTCGACGCCGAGGAGTGTGGCTCTGGATATTAGCGGCGTGCTTGGTCTCAGTTCCATGTGCATTTGTCTCCTGTATCATACTTGGCTCACAATTCAGCCCGATCCCTCTACAATGCAACAACTGGCACTCGGCGCTCGTATACATAGCAGCCGACTTCCTGCGTCCCCCGAACACCGCTAAACCCGGGAGACGCAAAGGAGTCAATTATGTGCAAACAGGGGTCTATAGGGTGGCCACAGCGGACAAGTTCAGATCGACGGGATATAGGACTGCCAGATGTGTTGCGAAGTACAAGACGGCACTGTTTGCATTGGTCCAGCCTAAATGCATGGAAAAAACATAATGTAAACTCCAATCTCGCTTCTTTAGTCTTTCATAGGTCTAAGGGATAAGTCATATATATTGCTATTGAGGGGCTGTCTTACGCCCGACGTTATATTTGACAACCTAGCTTTCAAGGCCTGCCGAAAAATAATGGATATGATGGAGGCGAAGAATCTGTGAAAACTTTTAGCCAGGCTGCGCTCCATAGGGCTGTACTCACACGAACGACGTGTGTCTCGCACTGATGAACGCTCCTGACGCGATGGCCATGTCTTATAACCGAGACTGAAGCACACCGCCTTTATCGCCGTCTCAGAATTCCCCGTTAGCGAGCAGAATACGTCGCCCGCGCTTTGGAATTCAATGCGCGAAATTACCCCTCCATAATCCCACGTGGGTCCAAGCCGTTCAGGCAATGCGAAAAACCCGGTTACAAGAGACTGTGCAAAATCAAGGGTCGCCAGAACGTCGACGTGTACGGGATAACCGACGTTAGATGTTTGACACATTGCGGTAGCCCGCCAGAGCGAGACAGCATCGCACCACTCGGGAGGAGGCTAATGGTTTCCGCCCCACCGAGCCTAAACTCCGTAACATCACCAGTACCGATCATATGTAATTACGTTTTAGAATCTAGTAGCACTCGTGTAAAGGTCCACAAGAGTAGGCGACTGAGATGTTCGAGTATTAATACAGCCGTCAGAACGACACTCTAGCTGGCTAGCAAATCGGCTATTTTACCCCCTAGTCCCTATGGAAAATCCTGCTCGCAGGCAACCCGTTTGGAGATCAATCCCAATCTGAAAGTGTTATTAAAACCACATCACTAAACACGTCTTAGGCGAGAATATTAAATGACGTAGGGGCTACTCCATTAGATCCGTATCCGGTACTCACTCCAGGATATAGACGAAACATGACCGACTGTTCGGTTGCCCGGTATGGCGGTGCGGCGGGTTGGTGGATTACTCGCGGACGCGGGAAGTCGAACGAGGGGGGAAGACAAGTTCCGCGCAATAAACAGCCGTGCGCAAAAAAACCCAGGTTAGGGTCCACGCGACGTTTCTTGACCTCGGCACCCCAAGCCTCCGGAGTCCCGTCCACAACGGACTGGGACACGATAGAGAGTCACGTCCCTGTGATATCTAGTACAATCGCCGGTAACATCTTCGGCGACCACGTGTTCTGAGAGCTTGTGATCACATGCTGTTTGCCGTGGACCACAAGAGCAACCACTATGCCCCCAGGATATCGCAGTGGCACGTATGCTCTTCTTAGTGTTATCATACCCACATTGTCCGTCATAAGCGTTACGCTTGAGTCTCTGGGTTAACTCTCAGAACGGGCACTGTCTGACTTGGAGGACAAATCCAGAACACTTATCTTCTCTGTACGTAGTCCAGTGGCTGGAGGGTCGAGACAGCCCCTCAGTATTTAATTGTTTAACAGTATTACGTGTTTCCTGATTCACATATCTCACATCTCTCACATTCACGCACTTAAGCTATGTTTAACGTGATTTCGAAGGTGGTCGGGGGAATCAGAGACCTTCTTACACCAGAGGTGTCGGCGCCTTATAATAATTTGTCCTGCTGACGAGCGCTCCTAATTGCGCATGGTAAGGAGCGAGCGATTATGGTCCGAAAATCTAATCGCGCACAACTCGTTGGCCGCTCTGCTTAGAATCGCCTTTCCAATGGAAGTTAGTTAAGATTACAGTCCCTAGGGCGCATCCTCCTTAGTCGCTGCGCTAAGATCGCATTGATTATTGGAGCGGCACCTGTAAGAAACGCCCCCATCGCTATAACTACCGTTTACTGCGCTGGGATGCACCACACCATATTATCGGCCCCCTTTTCCACTACAGGGAAGCGTACCAAGGACTTGGTTTCGACGCTTCGGGTAGTCCCCGTCGAGCTGGCTAGTAATCTAGGCAGTCTTGGCCGGTATGGCTTGACGGTACACAGGATCACGCCACGCCTAAGGTCCGCAACCTACATCTAAAGCACGCCTCTACTGCTCCGCGGACACTTTGGGAAGATCGAATGTACTTCCGCCGCTCGGGTGTACAAATACTGGGACCCTTACTCAGGACGATTGCGGCATTCAGGTCGCTTGGGAACGGGCGTCCTGCTTTGGCAAGTCACCGCGGGCGGTCTAGTCCGCAAGGCCATGACCAGATCTTAATTGGCGAACGGCGCGCACTTGGCTCTGATTGACTCGCCACGGACGTTACCACGGGAGTAGTCACATTTATCGCCCCTTCCATGTATCCTAGGACTCGCGGCCCGACGATAGCAAGTTGTCGGCCAGACCGCAAAGCAACGCTGGGGCCGTGGTGGCATTGTTCGAAGCCTCCCCAGGAAGGCTGGATACGGTCGACAGACTAAGATCGAACTTGCTGCGAGTTGACGCTTATCCGCCTGAATCGGTTACCGCGACACCTACAGGTATCATCATAGAATGGAAGAGTCTGATCTACACCAACTTTTGCCTTACTCCGGTATCAACGGCCGGTAGCCAACCCGGCTGCCAAGTGGCCATTAATAGCAAGGGATCTACACATCCATAGAGGCCGGCACAACATGAAGTGACCACCTCACATCGGTCGATTCATGGGTCTGTCTTGGGCCATCGTGCCTAAACAAGAATGGGACAACGTAGATGGAAAATAGTCATGGAGTTTTCCGATACATCGTTAGCGTGCGCTGACATTTAATCAAGCGCGGCTCCCGACGATAACGGCGACGAGAGGGAAAGATCCCGTCCTTCAAGGAGGCGATCTTCAGCTTCCTTAGGTTGAAATGTTCCTTAATATATCCTGTCGTGTAATAGTCCCAGATGCGTACCTCGGGCTCTTGCATCCAAAACTGGCACTTGAAGGCACGTTACAAGGGCACACTCCGGGCAGCCCAGAGGGATGTCTCGTGGGGTGGGATGGAAGCGTATAGTTAGGGAACAAGGCGGCTGTCACTAAAGTAGCGATTAGATTAGGTTGGTTGAAACACTGTTATAAACGGATGGGTCTCGGAGACTCTCGTCATCTTGCCCGCTCAATATGGTACCGTAGGAATAGCCATGTACCTCAGGGCGACGAGTAGTTTAGAATTATACCCGCTACGGTCGTGGATTAATGTGTATAAGGCGACACGTTAATCCCGGCGGGCCGCCTTTTGCAAATGTTTGCTATCGAGACTTGTTACCACGACCCGGTGCAGATAGAATGAGACATAGTTCGACCTTGGCAACTCTGCAATCACATGAGTTAGCTCTACCCCAGGGCATTTGCAACACTATTTCTTCATCTCAGCTGGGCATCAATTACTCTGGTTTGCCCCCCGCAATAAATTACTGATTGACAAAGTAAACCTGGGAAGCCCAAAAGAGATGTATTGTCGTATATACCCGCGTGACCGGGCGGCCGCCACCATATAGTGCAGAGAAAGGTGGCATGGGATGACATAATTCTCTTCGAGCAAACACTGGGAGCGAATAGCTGAGTGGCTCAAGGATTCCGGGACGGCCGTATGAACGCAATTAACGAATAATAGACCACACCGCGAATGGCTTACGCGGTCTGCACTCGTAATTGTGACTAGAATGAGTGACTCCTCATCCGAATGCAATATCAATAACGGAAGAGTGAGTCCGCGACATAACCCTCCCTTGCGGAGTATTTAAGGCCTCTGTCAAGAATTAGAAGATACTATCCCCGCCATACCATTAATCCCTTCGCACTCTTAGGTCCTGCTAAGATTTTCCTTCAAGCCTGGCCGAGACTAACTGGAATACGGGCCCCCAATTTTACTAATTATAGTGGGCTAAACTAGGGGAAGCGGATCCCACGTTGCGCAGAACAGAGCCGACGAGTCGGTTCGGAGGTATATTAGTACTGGACCGCCGAAAGGGACCGCGTTGTTTTCCATCCTTCGCACGCCTCCTTTCCACTGGGTGCCGAGGATCGTTCTGTTTGAAGGTAACAAGCTGACATCTTCCCCCGAACGGTGATCGACCGTTTTACCCCCCCGATACAAGCGCGCATCGAGGGCATTGTAGATCCCAATTTGTGTTTGTCGACCTGGTTGACCCGATGTCATTGACGGTCGCTACTCATAATAGAGGCGAAAATTCAAGTGCTATGAGGGCACTTTTGGAGGCTACCGCCTCACTGCGCGACCAAAATGGGCTCTCGGACCTGAAGTCTTACTTTGGTCTATAGTTCGGCCTGTGTAACTTGAGGTGGTCGTTGGGCAAGGCTGGATCATTAGTCTCAGGCAGGGCCCGACCTTAGACGATAGAGTGTTAGTTCCTGGTTACATACCAATGCGCTATCCTGTGGTCGAATTCTTACGGCCGGATTCCAGCCAGGAAGCTCGATACCACACGATCACTCTGCATTCACCTGTAAGGCAAGCCTGTACCATTAAACGGTCCGAGCTGGATAACCTCCACGGCGTAAGAGCACCGCCCTGCAGGCACGCTTGTTTCCGAGGCGGGGAATGGTACTAAGTATTCAGCGGAAACCCGACCACAACCGCGGATGCGTCTGCATACACCGGAATGTGCCCAACTCATGTCCAAAATTGTGTACCTGGAGTCTGTGTGCGTCTAAGTCAATTCGAGCAAAACGAATGGCGACCCCAAGATCACTTAATAAGATCAGTTCGCACGTAACTTTAGAAAAGACGTGTGAGACATTTTCTTCCACCTTTTGCAGATTGGTCGCACTCACTCCTACTCCAGGAGGGGTATCGCACCACAAGCCCGCATATACCGTTGCTGTCTGTCTCGTAACTCTGGTGTCGCGATAGGAAAACAATCGAGAGTCCTACGCGCTGCTCCGAAGTCATGAGATGGATGATCGGTTCAAAGTGCTAGGTAACAAACAGAGAACTTTCAGTGTAAGGATACCGCTTTTGTGCGCCACCGCCGCTATATTCCTAGCTATTGGTGCCGATGTTTATGGCCTCCCCAGTCTTCGAATTGGAATACGTCCAACTCGAAACAGCGGTTGCCAAGGCACGAGTACAACTTTGATGTTGATATGTGATGACAAGCTGGAGATGCCTCTATCCAATAAACCTTTCGTTGCCGTCTCATAATATTATTTCTCTATGGTTATAAACTTATTTATACTTCTACATTGTGGACCAAGTAAGTTCTAATCACGGGCGTTTTCGAACAGAAATCCCAAAGCTCCATGAAACCATCACAACTGGAACCCATGCATGGTCACATCCCGGGGAACCTCCCGTCGCGATTGAGGCATTGACAGACTAAGTATCCGTTCAAGTATTGCAGACACACTGCATACTAGGTAGTAAGGACTCGCAGTTATTCGTACGTATTGTAAAGTAGGAGTAGGCTTTTCTAGGATTGCGTTAGCCCCCACATGACGTTAAGTGGCTGCCAACGGATCTCGGACATGCGGCCATACACTACGAGAACAGCTCAACTACGAAGGTAATCTTATCCGAAAAGGTTTACGGATGTGGAACATCTCGTTAACAGCTGTAAAGGGGCGCTCAATTACGCGGAACGGGCGCCCTAGCAACATTTTCACGTCTATGGACAGGTTGAAATATCGACTAAATTGTTGGTCATCGTGGGGATGTCATAAATCACAAATGTCCGTTAACTCTCCTTATATGACCCCGGCCACCCCCGCAATCAAACCCTGGCTGGACCGCGTTTTCCTAGAACTGACTATCGAAAACAAAGATCCACATCCCTACGGCGGTACCGGACCAGAATCAATTCGTTCGGTATTTATCCGTAAATTAAGGCTATGTACAATTGCTAAACCGATTTCAGCCACGAGTCTGTAAGCTGTCGTGAGACTCCCAGGCCTCGTCTGGACCTCACGGCTCTTACTTGTAGACCGATCTAGAATAAAGTCACCTGCCCAGGGTGTTGCACACTCGAACGAGACGCAGAGATAGGTATACCCCAGACTGAATCCGTACAAAGCTTGATGTGTAAAGCACACTGAGGTGTCGACCGATGCTGTTGCTCATATCTTTGCCAGTTATACTTTTACTATGCCGGCGACCAAAGCTTTCAAGCAGATGTCCGGACCGTCATGTAGAGAGTATCTGTGCTTCACTCCCGCTGGGATTCCATTGACTAGCGGCACTAAGGAAATCCCCGGGTCAAATCAGCGACTGAGGACTAGCCCTCGAGCCCACTTAGCTGTACATCAAACCATAGGCCGATCAGTCAGTGTCGGATGACTGTCTGCTGGAGGATAATGAGTAGTGTTCGATTGTTGACGCGGCGTACCGCGGATGGATCACCAAGTTTAAACCGGGCCCTAGGTTGAGTGTTTGGATGATGGCCTCTTGCCACAGCAATGACGGTCTAAGCGCTTAAGGAGAACTTTCGCTCCACGAGGCACCCTCCTAATTTCCCGAACACCAACTACCCATTACGGCTGTGGACATTGGGAGTCTGTACCATGCGCGATAATGTGGCAAGTAATAGTCGTGTCGTTTCGTTTAGCGCCCGTCTATCTGGACTTCCCGAATCATAAATCCAGCACATAACAATGAAGAGTTCGCGTACCGCATAGAAACACTCCTATTAAACGAGATATGTGCTTTACCTCCCTCGTTAATGAAGACGAGAAACGTCGCCACGCCTTTTGATTTACCGTACAAATGTGGGTTTCTATGGCGCTTACACGCAACGGAACAAAAAACATTTTGGGTTCATAGAGTAACCGCCCGGTCACGTAGCCCTGGCGGAACTACCATTAAGAAAGGAGTAGTGATAACGTCTGTCGACCCACGTCGGAAATCTGTCTTATGGTCAAGCAATGCAGCAGTCCATGCCGAGGTGTGTACCACTGCACGACAGCCCCCCCAAGTTGACTAGGCTAGTAAGTAACCCGTTGTTGTTCCACTGTAGCTGATGCTTTTCCGGACTAGAACCAATGGTGACCGGGTTACCGTCAAGCCGACCAGCTGGCCACCGGCCGGGGGGATCTGGTCGCTCGTGCTACCGGGAACCTGAGAGCCCGGCATATAACAGAACACCCGGAGCCTCAGCGCGCCCATGGCGGTCCTACAATACTCTGAGCTTCGTGGTTGGCTGGGATATATACGGGGCATTTGGCAGGGAACGGTCCGTAGGTAATCGTCTCTACTGGCACGCACCATTACGTCAATTGTTCAGCGAATGAAGAGTAGATCGGCGGCCAAGAGACGTGATCTCGCAGGTTTGCTACAATCTTCTAGCGCGAGTTAAAATCTCTGCACCTACAGATCACCGAATTTACATTGAGCACCTAGGCAAGCGGATGGGCTAACATGTCATGCACCCTTGTCGAGGAGCCAAACGCGTCCGGGATAACCCATTGACTTTCTCTGTAAAAATGTGATAAGCGCACGATACAAGTGGCGAATGTTCATCTAGCCTGCGTGCTATCCTACCACAAAACCTTCGTCCCTCTCCCTATACTTAGAGCGTGGGCTCAACCTTGTCCTAGGGTTCATGTGATCGCCGTAAAGCTAAAACCCTCAGGGTACTGACAGGAGTGCAGCTGAAAGCCTGCTGTCTTTACTTGTGCAATGCACCTCGCAACTAAAAATCGACCGCATGATAAGCGGCATAGGGAAGTTGTCGAGCTTTTTTCCACCACCGCTAGCGGCGTCTGGCTATAGCATTCCCGTTGTAGTGTGGTCAGGGCACGAACAGGCAACCGATCAGTACGATAGATTCGATAACGGCGAATAGATCGGTTGACTTTTGTGCTCCATTGAGCGTATATTAAATGCGCTCCGCTCATTCGTTATCACGCAGTCGCTATTCTAAAAAGATAGGTAGTGGGTTAGAATATAAATAGTTACTTCAAAGTTTGATCCTGGACAGCAAATTCGAACTGTATAGTTAAAATGTATTCAGATGATACAAGAGGTTAGAGCTTTTGCAGAGGTCCCTATTTCCCGCGTTTTCTCACTCGTACCTGAAACTCAATCTTGGGGCCCTGACTCTATACAACTTGGTATGCAAAATCGTTACGCGTTGTACCAAGCGTCGTTACCATTAGGCCGCATGCCAGAGACATTACGCCCGAGATTTTATACAGCCTAGCCGCGCCGCGCTCGGTGGTAGCTAGGGGTATGTGGGGGTTACCTGGAATAACATTCGCGGAAATCTGAACGTACCGAACGGAGCAGGGTTCTGAACTTCGGCTTGCAGATGACAGATCCTGACACAGTGAACGTACAGCAGCGTGATCGGGCTAGCGCGGACTAACGCCTTCATGTACACCACCTTGTGAATTTTTTTAGCACTACGAGAAACGCTATGACATTCAAAGGAAATCGGGATCGAATGGCGGATCCAGTATCATCATACCTAGTCGAGTGTTACTGCGTACCGTCCACCTCAAACCGACGGCATAGGCCTACGTTAGAACCTTACTACGTGGCTGCTGTGGGACCGGGGGCTAGACGTTCATAGGTCATCTCGGTGCATCCCTCGTCCATGTTACCGGTATTGTGGCTTTCGTTTGCCCAGAACCTTTCCCCAGGTGGTCGGATAGACTGATCTCGATCCAGTTCACATTTGGGATATCTCAGCAGGTGCGTGGCCATATCTAAACATGACAACATACAGCGTGACCTTAATGGTACCCGGGTGCGATCAACGAGTAGCCTGCTCCGTATCAGTTATGCCAGCGGGAAATCCCTGCCACCACGTGGGAAGTTTCCCAGTCGTAACTGTGTAGTGCAACGGTACGAATTAAGTTCCGTTGGGGAAATAACGAGCATCTTCGTCAGAAATCCCCAAAAGGCGGCTTAATAATGTGTAATTCTCATATCTTCACTGAGTGACAGGTGGGGACGAAACCTATGGCTATCCGGATCATGACGTATTTCCCCGCGCTAGCGGGCTCGCTTCTTCAATACGGTTGAGCTAGGCCAGGTAGCCCCAGGCTCAGAATCGCTATACAACACGTACTACACTCCTTATAAGTGCAGTCTCCTTTCTTGTACGGTGCAACAAGGGTGATTCAACGTATGATTTATCTCTGCGTAGCCGGAAGCTTTTAGTTTCTGCTCCATGCAAGTTGAGACGGCAACTTGTGGGGCTATGGTTTCTCCTGACGAGCTTGGCGACCTTGGCGCCGAATTTGTCCGTTGGTACTGGGCGGCAGGCCAATGTTAGATGCGATGCAATACCTCCTCGTAGGTACACCGCCAAGACGTGAGTTAAAACAGCAATCAAACTGCTTAAGGTTCGTCCTGAAATTGCGATATAGTATATACGCCGTATGATGCGGGAGTCGACATAAGTGAGCCTCACGATTCTTCACTTTCGCCGTGCACGAGGTCATTAGGTCGATGTCCATCTTTGCGGCAAGATCCATTGCTCGGCCCTCGTCTATGCGATAGGCTGATCTGAAAGCAAGCCGAGTCCATTCTGTACATAAGGGGAGTATAATTGGCGCTAACCCTCCCTTCTGGCGCCTATCGTCAGCATGCTCCCGACTTCGTAGTCGCCCGTTTTAGACAGATAGAACTACGTCCTTCGAGGGGCTTCGTGCTCTCAGATTCTACAGAAGAACAACGGCCTCAGAGTGAGCGCGCCGTAATGTGTCATGTGTGTGGAGAAAAAATGTATTGGTCGATACTGACAGATCAGAGATCGAGACAAGATTTAGTCATTCACGCAAGCTGATCGTATCCCGTAGGCCCCAGAGCCGTTTCTAGGTGGGTGGACGCTTCCCCGAAGTTTTAACAAGCATGAGATGGGTACTGTTAGATTTAACCGGCCCACCGGGATGGGACAGCTGCGCGTGACCGTGCTAGAGCTACTGCGGGCCTAAAGCCATCCGCTTCGGCTAGGCGCGCAACGGCGGGACAGCTTATGAGTATGCCCCGATCAGCAAGATCAATGAATCTGTTGCACCCGCCGTTTGACTCCAGATCGTTGACGGGTTAGGGGTGGGAAACTGTAAGACTTAGGCGCGTGGAGAAACCGGGCCTGGGCATACTAGAGTTTACTCGGTGCTCCCTTTACGGGCACTCGTATGAGATAGATCGGAGTAGACCCGCTCACTACAAGATATTTCCCCACATAAAGCGGGAAGACCGAGACGCATGTGTGCGCACGCACTCAGCTGTAAAGACCTCAGTGGTGACGGGACAGGGGACACTACGCAGAATTCATTGCACATACCCCATCTTGTCACCTGTAGAGCTTCCAACTGCCCCGCCCAAACAGCTTACCAGCCATGGGCATTCAGGATCTTTGTTTACCGCCAGTCAGGCTCCCACCGGTCGGGTGTCGGCCATAAGGCAACTGTATAGAAAACTTTGATCGGTCGACGCATATCAGTTTGTATTCTCCCATGGCATGTGCCCGGATGGGTGTAATCACCCAAAATAGGGTCATGTACCGTCCTCTCCCCTATGGAACGACTGCTTGGATAGGCATTCCATGGTGCTAGCTTCCGTATCGTCACGTACCATTACTGCACTGCGCCCCATATACGGTTTTAGCTTCTATTCCTCGAGGTTAAAGCGCGGCCGGGTCAAATATCCCTACAAAACGCTTATTTGATAGCGCTAGATGTTTACCTACGCGAGTCTTTAGGGTTCCCAGCGGTAAATCGCAAGGACAGTGGACTGGCACACGCTCGCGCAGTCTGAATTGATATATTAGCAAGATGTCGTCTTATGCTTAGCCTATAGCACCCTCTGTCAGATGTGGCGCTGTAAACGGACGGGCCGGCTAACAGGATGCCGACGTGACTGAAAGTGGTTAGTCTGTCCCGCATAGGACAATGGTATCTGAGACCAGATAACGACCTGTCCCACCTGCCCATCGTAGGCGCTCCGACTGGAATGACGAGTAAGGCAACGCTATACTATACACGCCCAAAGCAACCAGTTTGACTTACCCGTATAGCATGAGAAGACACTTCATTTTGCCGCCCGCTGGCCTTATGGGAGATGTGACGTGCTACCGACCGACTCCGTCTCGGTGAATCCTGGTCCAGTTCACAGTGCGATTAACGGGTCGGAAACTTGGGGGACTCCAATGGGGTTGCCTCCTGGTTGTTGTAACGAACTTAGATGCTGGGTATCTTACATGTATCATAGCCCGCGGACTCTGCTGTGACCGTTTGCATTTGTCGTAGAGCTTCATGCCTCCATTCAGAGCTCTCGAACGTATCTTATTATCGCCTTCACTATCGCCAATGCGCGCAAACAAAGACGATGCAGCCACTCCCCTGACTATGTGTATTTTTCCCATCCGAATCTTAATTACGATGAACGTTCCTCGTAGAAGAACTTTACGTGGGAATCCGACGCAAGGACTTCGCCAATGGGGCGCGACCACCTCTTGTGGCACATGCTGGTTCGACTCTAATAGCATTAAACCGGTGACTAATGTTATAGCCGGAGCCAGCTGCTAATTTCGGACGTATCAGGCAAGGGGGGAGAGAGGAAGTGGGCAAGGACAGTTCTAGAAATAAAAGTATGGTCGCTGCGGGATCCTTGGTTACAGTGACAAAGACAAACGTGTTTGACCGGACCGGTCATATTATTGGTGTAGCCCGTCTGCAAACCGCGCTAGGGTGCATCAACCGCACTCTAGGCGACAACGGGGTGATTGGCGGTCATCACAGAACATTTACGGATGTATTCCGTAGGACTTAGCTGACACGAGTTCCTATTAATGGTGGGCGCTATGCTACGTGTCAAAGTTAAGAAAGGCCAGAAACCGTACATAGAACATAATGGAACTCGCACAATCATCCATCACTTAGCTTCGTTGATTTGGTCTTTGGACCTTCGAGTACCTTGCCCGCACAACCGTCGTTGGCATAACAAGGTTATGCCACAAAATGTCGCCAAGAAAGGGTACTTCTATGTACCGGAACTTCCTTCAGGTGATGCCCTAATACTTGCAGCTGAATCGTACCGCTAAAGTCGATAAAGTGTGCTCCATTTACAAATCCTGCAATAGCCTTGTGTAACTGCCTGCAAGTCTATCCATCTTTTTCGCTATGCGAGCTGTGTTGAAATTTACGCTATATTGGGTTTTCTTGGATGCGGATTTAACACAAAATATCACAAGCGACCCTCCATAGTCCGGTGCGGGTCCGGTTGGTTAAAAAACTAGTGAACTACATGTGAGTCTGCCTATGCCTTAGACTAGGGCGGAGCTAAGTAACTGATGTTTTGTGTCGGTCCCGTGCTACCGCTGCGCGTTCAGCGCTTATAACGCGTTTGCTAATAGTTAGTTCGTAGTGGCAGTTGTTTACGAGGCCCCGTCAGTTGCTTGATATCACAGCGAGACATACAGGCCGCAGCAGTGCTTCAATTGTGACTTATATCGGAAGTGAAGCCCGCCTAGCGGGTCCTTTTGATACGTGTCGCCCCGAAAGTGAATCCTCTATGGAAGCTTATGGAACATCCCCTTTCTCCCGCTAGGTACCCGACTCTGCTCGCAAGACGATTATCTCTCGTAAGAAAGGCCTTTGGACAGGATTGGCTGGAGATTCGAGGTCGAGCTCCTATCGGTAAGCAGTGTAATGGCCGTGCCATTAGAAGGAAAGGCACCGGCGGTTCTTTATTCTCTTGGCGCCAGTGTTATAGGACGCGTTGTCCACGCTTTGAATTATGGGGGTCGCCTCCGTAGCGAATGTTGTGCGACCCAATGGGGAACCTTAATCATTAGGGAAGTCCGGCCCGACCGACTCTGTTTACCTCTTGTGAGCAGTTGCGATGATTTCAGGCACAATCGTCACGCTCTCGTGAACGCTTCGGTTCGAGCCCTGGACTTCCCCTTTGGGATGTCTCTTGCGCCCGGTTTGGGTGAGACGATGCGATTTCCGGGAGAAGTTGGACGTATGATAAGTAGAGCGAGCCTCAGAATTAACGGGGTAGGGTGTCTACCAGATTGAGGTTTCCTCTGAGATCGGCGTATCGCTCTCCAATCATAAGGAGTTACTGTTCCCTCGGTACCCTAATAACCGCAAGGGGAGCACCTGGCAGTATTCAAGGTGGGCATCCTGACCGCTTGCTTCGTGTCTATCACACCGGCCCCGACGCCCGAAGCACGAGGTAGTTTTCGTTACCTTACATGATTGTGCCGGCCCTGCAAGATTGATTCGCTTTCTTTCAGCAATAAGCTTACCTCCGGATAGCTTGTTTACGGTGTATAGGAACGTGTTTATCCTAGAACACTCCTCATGGTACACGAATTGAACGGTTGACGGAATCAACAGTCTAAAGGACTAGTCCATGAGGAACGCCGATTGGACTACTTCTAGTTAGGCTGGGTTCGAACTTTCCAAAGCCGACCTAATCTTGTTATACCGCCCGGTCCTTTTTATTTCGGCATGACGAGCCGCGGTGCGCCGCAACCTTATCTGGCATAAGTAGGTGCCTCTCGTCCCTGTAGTTATGAGAACATTAAGAAGACCTGTCCCACTAGACCAATTGTTGCGCATGTGCGCAGTTCCAGTACTCCATGACCCAGAGTCCAAGCTGACGTCTGAACCGATCAACAAACGACCCTTGTACTGGCGAGATATATAACAGCCTCCATAGGTGCTTAATCACACATGGGCTTCTGTTATTAACTACACACACTACGTAAAGATAATTTGACGGGGGGGTGGCTCAGGCAAATCCTCAACAGTCCCGTGAAGCATTGCTCGCCGCGATCAAACATGAGAGCCGCGAGTTCATCTCGCGAGTATATGACCGTCCAAGGCTCGGTGTCTAGGGACGGCACGTGTACCGTCCGTTTGATGTAGGCTTTACTACGGACAGAGAATCCTGTACCGAGTTTCTCGATATAAGCATCCTGAATTTCGGAGATCACAGGCGTCCACCACCTCCCGCACGCTACGTGACTTTGTATCGGAGTTTAACTACGTACTCCGGGAGCGGACCGTTTACTGAGACATATTCGGAGTAGCGGTCTATATTAACTCGGGTGCAAGTGTAGGTAAGTACCGAAAAACCGAGGCGTGCTTCGGTCCTGCTGGGCTGAGCGCTATTCAGCAAGCATTTCCAGACTCCGCCTGATATCTTATGACCCAGATAGCTGGGATAACGAAACAAACCAGGCGCCGTCTCTTCTCACGACAAAACTGCACCGAGTCGAAAACGGTCTGCCTGTCTGTTCCCACTGAAGTCTCGTGACTTCGAATTATAAGCACAGCTTGAGCGTTTAATTGCTTTCTCCTATAGACGTCTGCATTACGTTCAGGGTCTCAAGTTGCTAGGGGGCGTCCACCCCGTTATCTGGGCTTGTAAGGGACAGCTCGACTTACAGCAGTTCACAGGAGCCAAGTTTGATGGCAGGTCTTAGTACAGCTTATGCCGCCTTCTGTATTTAGGAATGCGTATCAGGTAGACTAACTTTGTAGCAATACGCCGCTTAACCAATCTCACTGGACCCAACATGTGTCTAGACTTAACCTCGTAGCGCAGCCTGTACCGTTCGAGTCTATGAGGGCTGAAGCATTACCGCCGTAATCATCGCGAATGGTCCAACATAAAGTGTACCCCTCCGCGGCATATTATGAAATACTGGACTAGATATGGGAGGGATGTACGTTTGCTCCGGAAATCGACCTTAAAGGCACACCAGTCCGGATGGTTGTAAGCCCAAATTTCCTCCGCGACTCCGTCTGAATTGTTCATGGGCCTAGAGCGACATACCTGGTTGGTTAGCGGCGGGCGTTTCACAACTTCTTCAACTTTATCCTGAGCGACTAATCGGATAGGCGACTCTCTGTGAAAGAATACACGCGGCGTATTTCTATACTAGACTCTAAAGGCGGAGTAGTTACGCCATCAGTTCCGTGCAGAGATACTTTACAGCATGACTAAAAATAATTCTAATTTTTTGTGATGTCCTTCGATACTTGGGGCGGGGGGGAGGCAAAATGAGAGTATGGTCTGCAACCTGTTGGTGAGATAGGTCTGAAATATAAGGCTACTGAATACACGCCCGCCCGCCGGACGAGATGGTGGTACCGCATGAGGTTGATATTGTAACGTATACAATACGTAAGGAGTGAATGTGCTGACTTAAGACCTGGGCATAAGCACACTTAGTGGGGCCTTGGATTTAAAACGTTAAAGGCAGCCGCACTGAATGGTCCCTCGAAGTTTCAATTTCAGACCTCGTCGTAAAGAGTCGGATACCAAACCAATGCTAGCAATCTCCATAGGTGTCACGAAGCTGGGTCACATGATCTACTCACTTTGCACAGCTGGAGAGCGGCCTGCCCCAAAGCGCGTCATTGAGCAAGCCTAGATCAACAGCCCCGCGTTCCCCCATCGATGGAATTAGAGAGGTCCCGCCCTGTCGACAAGCCAACTCTCAGGAGATCCCAAACGGCGTAAAGGAGGTGCGGACTAAGGCTAGTACGAATCTTCAGCTTAGTTAGTTGTAATTCAAACTCCAGAGGGCCGGCTCAGAGACGCCCCTGAGAGTAATAATATTGGAAACGCGATGACAAGCGTCTGCAATCATGTGACTATTCATGATTTATGGCGTTGTGGAGGGATATGACCTAAGCAACCGGGAGGGAGCTAAATGGAGTATTGAGCACTCATGAGGGTGGAAGAATGACTCGAACCAGAGCCCGGAGGCAAGCGGCGGGGGTAGGTTAAGAAAGATATCTTACAGGCCCGGTAGTTACGTACTTGGTTGAGGACAGTTGGGACCGTATCGCACAATGGGCTCGATTGGTGCGATTGATCATGCGGTCCTTGGTTCAAAATGGCGCTCTTTGCTCAGTCGCGCTATACATCCTGGCACGGCTCTCGCCGGCCCTGCGGCCCTATTCTGCAAGGATTTTGATACTTCGTGTCTTATGCGGTACATGCCCGGATATCCCACGTCTTACTGACTTTGGGAGGTGGAGACAATATAAGCTAAGTGGACCACCCTCACTTAACCACGACGGATAGTCGCTACTACGACAACCCTGCAACCCCTCGGGGAGCGGAGAAGAGGTGCTATCGCATCTCGTCGACAGCTTAGATAGTGTGCATGCCGTGTCCTCCCGGCTTTTCCAACCGAAGACGACCGAGGCTGCTGACCCGATCGTCGGACAGCAGCCGATAAGTATACACAGGCGTCCTGCCCACTCTTACAAGAATAACCGAATCGGATCCGTTCTCGTTGAACGGAGTAGATACGTTCAGGAAGATGTGAACATTGCTACGATGCTCTTGCTCGATGGCAACAATCAGGTTTGTGGCCGTTATTCCAAACCACAAAACCAACCCGTTGGTTAACGTAAATCGTCAGCTGTTCCTGCGCCCTGAGAACGGTGCGAGGTATGTCACGTGGACCAAACAGTCTGGGCGAAGCAGTTGTGTCCGAATCAACAGAGGAATAGGACTTAGACTTGCGAGCCCTAGTGGTACTCAATGGAAAGAAAATTTGATTATACACCGCTGGTGAGCGTAGCATCGACTCATCACTGTCCCTTTGTGCGAGTCCATACGCATGGAGTAATAAAGCCTACGAATAGCGTTCGCACAGCCCCGTTCCTATTTAGATCTTTTTTGGATCCCAGCAAAACGCCAATTACGCTCCTGGTGTCTAATAAGCCGCAAGTTCGTAAAATTAATCCGGAGTTCAAAATATCATCTCTGTAGAAGGTGCTTTAGTATGCGAGTCTGCGCTATGAGTAGCTACTAGTATATCATCAGTGTACCTTCAATCTACAGCAGAAGCTTCCGACTCCTAAGAGGTCGGTAGCTCGATATTCCGCCGGCTTTTGTGCACGTCAGATATGAGCAACTGTGGACTACGACTGCTCACTACTCATAGTGAATTCTCGCTGCAGGTCGTGATCGCGAACAGGTGTGTTTCATTCCACGCAGTGAAGTAGGGTTGGAACTTAAACTGATTAGTAGAGCTGATTGTGCGAGCCTGCGTTCTACCCCAGCTTCCGTTTAGCGTTTCTGCCATCAGGCCATACGTGGTTCGGCCCACGTTGGCTGTGGACAAGAGCACTGAAACGCCGTTCACCTAAGCCTCGCAATACAATAAAACTTTTTTTGGTGCTCTCCATCAGTTGGTGTCCTGTTGGCTGGGAGTTGGGATTGACCATCTCCAACCGACTACTTGACCCAGCCTGGGTGACCAGCACCATGGACAATAGCAGGACTACCTCAAGCACCCGTGATTATACCAAAGTATTTCTGAGGGACGGAAAGAAGGCCGCGACCGGTGTGAGGTTTATTACTTAACTGGCCAAGGTAACTTATCTGCGCCTGTGCAGAGACACGAGTCAAACGTACACACCCGTTGAGAATTCCTTATTAATCCCTTAGACTATACCTAGGGATAGTATTAGAGAAGGATTATTTGCACTCTGCTAAGGTGAAGCTAATTCATGGCGACTACGGTAAAGTGACAGGCTGGTAGATACCTTCGGGACTTGCGTGTGCGCGGTTCTACCCGAGGAATCCTCTGGGCGGTCGACAGAAAGTCGTCCTGATATGATTTCCGGGTACTGTTAAGTAAGCGGCGGTAGGGGGCAATTAACCTCGACTCGGGAGCTCAATCAAGTAACCCGGGGAAAGGACGCTGGACGTTTTAGAGAGCAGATACGCCGTCCTCGCATACCTTTCCAGTCTGTGTCGCGTTGGGTCACCCGCACGATTAGCGGTGTGAGCGAGCCAACCGCATCTCGCCTCTCATAGAGCCCACAGAAGAATCGAATTCTCTGGAGGCTGGCAAAGTGATGAATATGGGGTGTTAGCAGAGGAAGTGCGTCAGGCGTGTCGAACATTAATAGGTTCGGCGGAACTCATTCAACCGATGCTACCCATGTACCCCCTGACTACCGTTCCACCCAACCCGCCGCAACGGCGGTGGCAGACTTTCGCGTAAAGATGGTCGTGCCTGCCAAGCCAGCCTACGGCACAACTCAAGGGCTACTTTGCTTGTTTTATGTTATGATGTCTGTGCTGCACTTTATATCGATATACAGGTCTCCTGTTTAATCTCCGTAATCCCGGTCCCTCTTGAGAGGGTACGTATCGGAGACGATCGGGTAGGTTTTGTGACATAGCTACTCGGGGCTCAGGAATGCCTGCGACTTTAATTATTCATTCATAGCTCAGCGTTTAGGGGTTACCGAGGTAACTGGGATCCGGCGCGGAAGTACCGGCAAACTAGGGAAAGTACGAAGAGTGTTTCAGTGCGAATTCATGTCAACGTCGGCAACTAACGCAAACCGTAACATCGGTCACTCCAGTATGTGGCTCGGAATATTGCTCAGGCGTGGTAGAGGACTGGAGCGCTCGAGCGTCGCCGCGGAATCTTTTAGACCAATTAGAGTTGTTGGTGTACGAGTTGTTTAATTGACCGGAACGTCATATCAGGGATAGTACTACCCCAACGGTAGAATCAGCAGTGGAGCATACGACCTGGGCCTCCCAACAACGAATCCTAGGAATGGCGGTGGCCTAGAGTGACAAGCCGGCGCATACGCAAATCATTTGTCGGTAGGGTTTAAGAGCTCTCTGCCGCTTGAAAGGAGTAATGGATTACCGATATGCTTTCCAGTTGCGCTCGGCACGCGGTTTTGCTGGTACCGACCCTTGGATGGCGAACTTAGTCGAGTGTGTAAGATTCATTAGCAAATTGTACTTTGGAATTGCCCTGAATCTCAGTCAAATAGGCATAGTGCTGGCTACGATGAGGCCTTCCAGAGTCTGGAGGCTGGGCTTTAACCGGTGAGTCGACAACTAAACGCCCCGCGATTCAAGCGGTCCTAGTGTCGCAGTAGCGCACAAGAGGATGGCCCGTTCATTCCGTCATACGCCCATTCCTAGGTCGCAACGTAGAGTTACATATACCCCCGCGAGACGTGTCGGGTGCGATCCGAGATTTTCCTGTAATTACCGGCATAAAAAGAGCCTATGGAGACCGTCGTTCACAAGATAGACTGGACCGTAAACTCCAAAACCCTAAATAGATGAGTCGACTTAGTGGCCACAGGATGGCTAGGTACAATACGCGTGCCATAGTCTCAGTTGCAGGTGATAGATGAACCGGAGACCCGTGTTTTGGTCAAACACGTCCGGCCGGTTCGCGGGTCCGATCTCAGTATGTTCGGAGGTATAGAGCTTTCTGACTTAACGCTTCCCAGAAATAAACGTACCTTTACGCAGTAGCTAAACTGAACTTTATAACCCACCCCTATTTCGATTCACTCGGCCCACGAGACTTCGGTTACCAAATGTCCATCAACAGCGTGGTGCCGTAGGGTCGTTACGTTAGGACGCTGGGCAAGATACGAGTGACTGAGCGGTCAATGCGGACTCTTGGTGTGCTGGTGTCATTACATACCGATGGTTTACAATGTCCAGGGCGCTCATCCGCCACAGCAAGTACATCAGACAGGCAAATGTTGCTGCCTCTGCGGAATTTTCCGACCCCTTCGTGCGGCCCGTTTCGAGACGTGAACACATAACGCTGCGACATCGCGAACACAGCCTCTAGAGGCTTGGAACGATTAAATGTTCAGGTGCAACAGATGGTAATTACCTGGAGCGGTCGATACGCCTCTCCCTGCGGAAAAGCAAGCTAGACAGAATGAACCTGCGACTGAATTTCTGCGATTGGGACGCAACAAGCCCTTCAGGACGTTGCATCCTAGGAGCCGCTCCACCGACGTCCTTATCCATTTTATATTTCCTTTGCTGCTTGTACCGTAGCACAAAAACAGCTCTAAGGCTTAAGTTCCCCAGCGATCTGGCGCGCCTCTTCTTCATAAATATCCAAATTCATCCGAGTAGCTTATGCAAATGGGTCACTCCGAGTAACCTCAGGGGAAAAGTTCTCGGCTTATCGTCCCGGCCTTTTGGCCGCGTGAACCAGAGCGTAGCCCGCCGCTGAAAGGAAACCGCCTGTCTCATTTAGATTCCCCTTACTGAGATACGGACTGAAGCAGTTTCCGCCCAAAACATTCTTTTTCGGGACAGAAAACATAGAGGGAGGAGTGACCAGGTCGCACTCAGATGAATTGCGTTCGTCATCGGGCAGGGGTGGCAGGAGATCGTGTTGTTAACGGCGGTCTTGGTACGCTTACGGTTTAGGTGAGGCTATTCATGTCGAGTATAGCGGGACTGTGAGGCCGTGATCTAACGTGCGATTTGGTTGTCTTTGTAAATGACGTATGGTCGGACCATACCTTGTCTTAACGCCGGCGGCGCGCGCGTAACAACCTTTTGCGACAAGTGTGCTTACTAGCAAGGAATCGCACAACCGAAAAAGGAAGAAACGTCGGCGATCCTTGTACGTCCTCGAATGAGCTATCCTTGCTTTCACTGACCTCTACAGGTGCTGAAGTAAACTGCCTAACGCTTGCGAACCCCGCAAGGGGTTAGGGATAACTCTAAGCAAGGACTTAGCGGTACGTAATGATGACCATACATTTAATCAGATATTTCGTCGGATACCGGGAACACACAGGACGAAAGTTATGTCATGAATACTGGAGCCCTTGTCGACGTGCAGGCTATACCTGTTCTCTTTCTATTGGCCCGTGTTCGGGTTGCGATTCGACATTGTTATCAGAGCCATATAAGCCAAAATCGGGTTGCGCACTGCCATCTTTGTGCTGCATGACGAAAACCTATACGCATATCCGGTCTTAGAAGAGCGACGGAATAGTGTAGCGGGTTGTTAACCTTCGACCCAGTGGTCTCATAGAAAAATAGGATAGAGTCGCTGATGTCCTTAGTGCATCCTTGATTAATAAGGTCGTCTTGTCATTGGAAGCTGCGGTGACTCTATATGCCCAGTAGCGTGTCCACTTACGATTGGTCACTCTGCAGCGCAATCCAAGTCTGCGACTAACCTTCTAGTCCAGACTTGCTGGGCTTGTTCACAGTCAGGTTTAGGCGGCGTCCCCGCGACTGACCTTAGCCTACCGGACTTGACGATGTAATGCTCTTTCTAAGCTCCCAGTCGGGCCCAATTAATATGATAATTAGATCGCGATGTACTCCACGAGACAAAGTACCAACTCTGTTTCGGCGTACCGTAAGTGCAGGACGACCCAGGGGAGCTGGGCTATGCGAAGCTGCATACTGACAATTATGTTGAACGTGTCTTGATGACGCCGCAGGAAGCGATATTCATATAGTACTGAACAATTTAGTTCATTAACTCACTCCGGCATTGTAGACTAGGAGGAGACCGTAGTTGCTAGAGTTTGTAGTGCCCCAACTACACCTTCGACTGCGCAGCCCATTTTGGGGCATAGAACCCATCCAACAACGCGAAAACGTCATAAGCTACACTATGCCAAGACATGCGGTTCGTAGCTCACGGCGTACGAAACTTGAACAAGGTGCAATCGTTAGCCGAATATAGAGGGTTTGGTACTGCGCGTTATACAGTTGCGACAGCGCCGAGCGACTGCATTCGGGGTTTTTCCGTGGTAGGCTGTCAACTGGGGTTGGTGAACCGTAGTAAACTCGGTCTCCTGATCATCTTCATATCAGGGGACTACAACCACCTGACGGATTAAGGTAACACTGTGTTTTATTCCGCCGGCCTGCGCTGATTGCCTTCGGAAAACTGCAGATCAGTATATAGCAACCGGTATAGCCCATCAAGGGCTACAAGGGCGTGTGCCCATGGCAAATCCAATCAAAGAGTAGTTGTCGTGTTAGGATAACATCAATTCGAGACCCCAAGCAAGGTAGGACAGTCCACCCTCTATGAAGCCGGGTTTCCTAATACCGCAGAAAAGGACCGGTCTGGAAGCATATCGACGCTACGTTGCAAGCATCAATCCTCGGGAGAGGAAGCAAATCTCGGTGTGAGTAGTGCAACGCACGTGGGCGTTCCCGTTCACACTTTTTCTGCCTTCGAACATTGTATACGGGTAGTGAGCTAGCAAGATTAGTACAATGCTCCCAGAGGTTTGAAATTGGATCCCAGCCACCGTTCTAGCATCGCTATATTACAGCAGTCATTCCGAGAACGCTAGGGGAGGACGCATGTCGCAATACCATGCGGTTCATGCTTCTCGAATACAGTCCCCAAGGCATTGGGCTGTGAATTGATGAAACACGACACTTGGCGTGTTGTGCTGGAGGTAGGTCCGCCCTAACCACTGAAAATGCCCCTCAGACAGGTGCAACTGTTCGGCTTATTGCCCCCACGGTCTCAGCTTATACCAGCAAGGCTCGTTGAACCCGAGGGAATACAGATTAGCTAGGCGCGAACTTAGTTTCGAGCGTGGTCACGAGACGGAAATAGAATGCCTCGCTACTGGGGCGGCTCTGGTAAGTAATCACCGAATTGTCGAACTAGAATACAATACGACCGTACTCGGTCAGAACGCGTAGGTTTGCCACATCTCTAAAAGCTCTTTAGCTCGAAGAATGAGACGGCCTCTGGTCACTAGAAGCGACTTGTGGATCATCCCGTTCGAAGTCCCGTCGTAATTCTCTAAAGAGTTGGATCCTTTATTCGCGTGTGCGGCCACAGAAACCGAGATGGAGGGGACAATAACACGACCTGGTAACGCGGAGATCTAAGAGCGAGTTAATCAGGCGGCAGGCGCATTAGATGTTGGTCCTCTGGGTGGACTTATTCTGTAAAGAGCACGCCGCCGTATCACTGCTATTCCTACATGCCCTATAGCGTAGAATGCGACCAAAATAACTAGGTTGTGAAGCGATAGCATCGGAAATGAAACTGTGACAAACGATTGAACCGTAGGCCTTCCGTGCCGGGCCAGAACAGGCTTTGAGCTACGGTTATAGATACAGATTCCATGCCCAACATGGAATGACCAATCGCGGCGTATTCTGGAGACGTTTCTCCAGAGCCACGCAGCAGTTGGTAACCAGCAAGTGTCAAACGTACCCATATCCTGAGGATATAGTGGCTACTGCGCTCCGGCGCGGCTGTCCTTGGCCCTCCGGAAGCACGGAGAGTCCTACTTGAGCTAGTCAAGGCTGAGTAGATCGGTTGCACCTCAGCGGCGCGGAATGGGAAAGAATTCTCGGTTACTCGCTCTGTAGTTTTTTCGCCACGAGAAATCGTCTTCGTGTGCTAAGGGTGCTGATTGGAATATGCTGCCGATAGACCGTGCAACTAATTATTTCAGCCCATGAGAAACTGGTAGGTATGAGATTTAGCTAATTTAGTATCATTAGCAACGCCTGCTATCGGTATCGACGATACCTACGGAGCTGCGAGACCGTAACTGAATGTTTGCATTCCACCCTTAATTTGCCGATTCCGCTGAATCTTAGCGAAACGCACGCACACGCTATGCAGACGACGCTGCTCTAGCCAGGACCTCGACTCTTTGACGCAGAATGACCCTTAAGCTGCATACAGAGCTCCGGAAATCGACAAGGATTTCTCTGCCTTTCTGCCCCACCAACATGGGGTTGCGTATCAGCCATCACACGAGCTGACTTTGTATGTGCCATAATGATAGGAGTCCGGCCTCTAGCATTGGGTTCTAACTTCCCACCATGAAACCGTAATCCAAATCGGAATAATTGATAAAACGCCTTAAAGCCGGTCAGTTCCACAAGAAATGTTTCGACCACCGGGGCACCAGGGACGCGGCCCCTTCAGCGGCTTACCGAAAAAGCATCAAGCACGTACGGCTCAAGGGAGAGTGTCCAATCACCTATGACGGTGATAGCCTCCCAGGATCCCTTCTGCCCCGATAAGCTGCTACTAAGGAATCGGCGAAGCCAAAGCAAGTGATTTGGGGGAGACAACTGAGTTCGGGCCCTATCATTGTCTGCAGATAGCTTGGTGCTAATCCCCATACGAAGCTCGAGCTAATAGACTCATCAGGTACAACCCCCTCCGGGCCACAAGAGCTAGAACCGGAGCACTCCCCCATGATGGGAAACAATAGAAGTGGTAGACTTTACGAACCCAAAAACCTTGACGAAATTTAGTGCCGACTAGCTTGGTGCGCTTAATCTTACGGTAGCTTTGTGACTAACTAACGGGACTGGAAGAATCGTTCCTATAATATCCGCGATACTTACGGTCCTCCGCAAATCCTTCGCGAACAACCGAGAACGCAGGTACTGTTTCGATGGAACTCGTTAGTTAGCAAATCAACCGTATATGCAAGAACAACCAGTCATCTCGGAACGTATTGCTAAGCTAGCTTCATCCACTAACTCGCCCGTCAACTTACACCAAGTTTAAAACACTCCGCAAGACGCGTCTTAAACGTAGTAGCAGCGCCTGATCAATACGATTTAAGCTTAAACAACTAATAGACTACTCCTGTAACTGAGAGCCCTGTGTCTCAGGTAAAGCACCCATCTTAAACGGATGTAACTGACTTTTTACAGCGGCGGCTGATTAGGAATGCGATGTCGCGGACAGACTACTAGAAGCCAAAGTCTAGCAGCCCGCACGTTCAGCGATCTTGGGCTTCCCCTGTTTGACGACGCTCTCGCAACAGATAACCAAAAACCGCCTGCACCGTTCCAGAACACCCTTATAGTAACGGCAGACGTCCTTGCCGAACAGAGAACTCTTAGCCTCACCGGTGCAACTTCGACCTGTCATAATGCAACTATGGACCTGTGAATACCGTCTCGAAGGTCTCTCCTTAACACCGAGAAGAATTTAGACTTCGGTTTGTGCCACGCCTGAGTACTGTACATTGAAGCACGATTGCAAGCTTGCGCTCGCGGTTACTTCCTGAAAACTTACGGTGCCCGTTGGGTTAGCAGTCCGCCTCCAGACCGCCTGGATAGCGAGTAGTCTCCCCTGTTAAAATGTTATCTCCAGGTAATCTATTTTCTCACGATTTAATATGCACTAGACTCTACCAACAAAAGGGAACCGGACGACTCGCAGGAAGCTTAAACCCCGTTTAGTGGTTCATCGCTTCAGAGCGGCTGAAGTCGTCAGGCCTACTTAAGACGCCTACAGTGATTTGCAGAGGGCTACCAAGAAACCTTGATTGTATGGACACGATTCACCCGGATAGATCTATCTCTTCCTCGAGAACTAAGGACTCGGGGGGCACCATATTAATGCTAAGGTGAGGTTTAACTCCTATGTGATGAGAGCGGTACGAGCTGTTCAGATATAGGCGGGAAGCCGGTGAACTGGGGCCACGCAGCTGTTCTCACAAAGGCGACACGCTCTGATCCGTGGCATGCTTGTCGAATCGGCCAATATCCGCTCTATCAGTGTTCCTAACTGGCGGAACAACAGTAGCACAAAGTAAACCGTTCAACAACGTTCCCCACGACTTTACGGTGGCGAGCTCAATTGAGTTCCGCGGCATTAAAAAACTCCCACACACGGCCGGCTGTTTCTAACGCACCGGCCTCCTGTTTTCTTAGCCGCTATGAGTTTTATCCCCGCGAAGCGCGTGCATATGACATCGAGCAGATTGTTCTAAGAACAGAAGGTAGTCCAAGACACTGGCCTATTGAACCAGGACCCACTGAATGTATCGTGCAATCTAAAGTTGATACAGGCGGGGGCTAGAAGTCTTAAGAATGATTAAATCGTTCCACATTACATGAGCAAATATGTTTAGGGAACACTAGCAACCAAGTATGCCGCCTCCACGAACAGAACTATAGTCCCTATCAATGGTCCAGCATGTCATACTTGACTGGACATCTAGCATCGTCACCATACAAAGCGCCAAGGTGTTTCTGGGGGACGTATTGTGGAAGGCGTCGCCTGTGGTAGATCATAGGATTCAATGTGCCCAGCGCATACACGACCGCCTCACGGGCGGCGAACCCGGGGTGAAATTGCATTCGAGCTCCAGAAGCAGCAAGCTGAATCGTCCATTTGAAGAAGAAAGACACTCGCAGCTTAACCTTCAGCGGCACTTCTTAGGGCCAAAGCTGGGTAGTTGGAATCAAGGTTTCGAGGCATCCAGAGTAGTCCAAGTTGCATTATTAACGGCAGCCCAACTATGTCTAGGCTATATGGTCAGGATTCCGTTTGTAGAGTCACAAGGCTCGCCGAGCCTCAAAGTACACCGTTGTTATTGGATGGACTTCAAGTCCACACGACATCCCTGGTAGACGGCCGGGCGATAGAGGAACTCTCCAGCGTCCGAGACCCAAACTGAGCGAACCTGGGCACAATAACTATGTCGATCAGGTGTAGTCCGTGATGAGGGGGGAAGGGTGCGTGTGTGTTTTTAGGCTCTAGAAAATCGTTGATGCTTATGTGTGCGACCAAGGGAATGCTGGATCACCATCCCAGAAGGTAATTCACGTCCCCCAGCATTTACGTATTCAGAGGATCGAGAAAGTGTCTGGAACAAAGCAACGCTAGCGTTAATGGAAAAGACTCTTGTACTGCAGTAGCCTGCGATGAAATGCTCTTCCTGCCTGGTGCGGCCAAACTGAGTGGTGCACCTGTAGACCGTTAGGTGAAAGTAAGATCCAGTTTCACGCTCCGCCTCCAATGCATCGGCCATTGCATCAGCGACACACCCGAAAAAGATCAAAGGTCAAGGGGGCAACCAAACGAGCTGGCGCAGAGATGGCTTAATCTCATTTGCCGCCTGGACCACCACCCTTGTCCATCAAATGCCTAACCTTTGGCAGGTGACGGTTTATTGGATATACAGTTCTTGGTCGGGCAATTAGCTATATAAAAGTAACCAGATCCATATGGAGATTCTCACTGAACGGCCGTAACACACCCTGCTGCTCAAGTATATCGAGGGAGGTCTGCCTCGCGGCTCTCGTTCCCTATTGGACGCCCCGCAAAATAAGGCTTAATCCTACCGTAAGAATTAATATCAGACTCTTTGACTCTGAGCTGCCCTGGCCGCCGGCCCGCACGAATACGACAGTGCTTTACAAATCTTTCATCGTCAGACAGCGGTCCCTGACAACGGGGTACCAGAACCTAACAGCTATTTGTGTTACTCACAAAAACTAAAACTCTTCTCTCCGACTTAGTTTCAGGGAGCAACTATGGCTCGGGTAAAGGTTAAATGATTCCATATACCGTACTGGTGGATCACGTGAGGGCCCAAGCGCTGGCCAATTCGTCCTCCGTGTGAGTCGTGGCTTTCATCGACGTAACCAGACGCATATCTTAACTTAGTAACCGGCCGGACTTACAGGCAGGCAGGGTAGTGGTAAACGAAAAAGGCGCGTAGAAGCAACGCGTGGCAGTAGGGTGATACGTCTCTGCCTAATCCTATGATCGAGTCGGCTCAACCGGGCTCAGATGCTGTTGCCGTCGTACGCGTACCTTAAAACAGACACGAAAAATGCCGTATCTCAGTCCTCCCCTCCCGGACGACTTTTGCGCAAGTCGATCGGCATGATATATAATAAACTCGTTTACTTCTTCTTACGAATGTCCTGCTTCTTAACAGGGCTAAGTGTCCCGACCGGCAATCTTGAGAGCTGGTGCGCCACCCTTATAGGCCTAGCGTTTGTTGGCTCTACCCCGCGCCGATCACCTATAGGGTGATCTCCGACCTCTCCTATGTAAAGACGCCCATCATCTATCTGGAGCGCAGGTTCACGCTCGAACGTAAACAAGATATATGCGAATGTCCAAACCTGTCTGGCGCGGAGCCCGCAAGATGAGATCCGTCGGAGTTTACCGTGATGGTAATCCCCTAGAGCGGAAACGGCCACTGGGTAACTTGTTATCGTCTTCGGACCGTTCAAAAGGAAAACAGCACCGCCTCATCGTATAGCTCGATCGCGCACTGTCTGTCCATTTGATCCCGAGCTCGTACTCGGCTTCCTTCAGCTGAGATTCTCGGGATAACGATTGGTTAGCCGGCGGGACTTAGCAATCACGCGGTTGACAAGACTGGAGTCTTGCAAGGATGGGATAGAGTTAACGGTGTTCGTGCTACGCTCAATGCAATAAGGGCATCAAAAGTGCCATATAGGCCCAATTCTAACATAGTTTGAAGCTGCTGCATGGGGCGGCAGAAGGAGCGCTTAGCATTGGGGACAGTCGTGGTTGTCCCACCGCATGGAAGTGATCAACTCATACTTTCTAAATCATTCGCAACGAGATTCTTTTGAGCTACATTTGGGAACCGGGAATAATTTAAACCTTAAGTACGCCACGCCACTCGGCGAAAGTACACGCAGCTCCCCGTTCGATATAATGGCAGGGGAGTTGGAATTTCTCCGGACGAACGGGATTTTGTAATCAGTGTAATGGGCAATTGCCGGCGGCATAATGCTGCGTATAGCTACTTGGTCCGCGGATTGGCATCAGAAACCGGCAGCGGGGCCGAGTACACTAGAAAGTACAAGGTGCATAGTAACTTAAGTATGTTGCAAGTATATACGGCACCAGCGCACTGTCTTCTTTAGATGTGCGTTGGGGTCCACCGACTGACCCCAGATTACTGAATTCAAGCCTCAGCTGACGCTCCATACCAAGTCCCGGATCCTAAACTATAGGGACGCTATCTGTTACTATCATGATCGAGGGGGCCGATATCCCCCGGTCCTAAATACGCTTCCTCAGGACTCTCTTTCTTAGTTTTTACGTGGACAGAGAACTGTGCGAAAAGCATACTAGGGTTCTGTCTATGACGTCATATTTTCTGCTTTCGAGCGGCCCAGGCTGTGACCCAAACATACTAAGACCCCGGATGGGCACCACATCGATCCATGGTGATAGGGTTCACAAACGGTGGCTCTTTTCAAGTTCTGTACTATTTCCAAGGCACACCCCTCAGACACCCACTTTGCGTTTGTACACAAGCGCAAAAGCGAAATTATGCGGCCAATCATGGTGGGGGAATATCCAGTATTCTTAAACACCAGGAAATTCCCACGTTCTGCTCACCAGCGAAGCCTAAACTGGACTGGTACCTAAGCAATACGGCTTTCCGCTGAAATCTTAAAGATTCTACAGCTCCGGTTGCTCTGCGATATCAAAGCCAGTGGCGGGGCCCTTATTCTTTTGGGGTTGACCGATGGCTATCATGCGACACGTGGAGCACTTGGTTAATTGGTGACAAACTATACTGTAAGTCCCCAACGAAGACCCAAGCTCTCGAGATAACTTTGTGGGTCAGCCGATTAGGGTGGGTAAACTCTCTCGGTTAGGTCGTAATATGGATAATGAGTTTCGTAAAATTAGAGCGCCTGTGGTCGACGGACCCGGCACATGTTTGTCAGTACCTCCTACGCGTTGTCGTGCCTCTCGAGAGTCCACCATTTAGTCTCTGTGCCCACTGACTGTGCTAGCAGGTGGGTAAAGGGGTAGGCAGGAGTCTATCTAAACTGGCGTCTCCGTTAACCGAACCTTGGTGGACATTTCTGGATTTCGCACCCTGTCACAGAGTCCTCCCCGATGTCTGGTGCCTTTCTTGTCGCTTCCTCGCTTGACCAACTAGGCGTAGATCCCCGTCATTTGGTATAATTACGCCTCGGTCTTCCGGCCTGTCGAGTTTTTAAGCATTAATCATATTGAACAATCGCCAACGTGTCCTAGCTCTGCAAGCACCGTAGCTGCTTGAAACATCGTCTTGCCGGTAAGTGCTCCTGACGGCATCCCGAGCTATACTCCTTTGGATCAAGGACTATTCGAGAGCAGGGAATAGAGTCTTTCGCAACTTGCACAGCCAATCGGGGAGGTGAATGGTCCGGGAGCTTGGCTCTGGAGACTATTTTGAGTGTTTCAGGAGATTTACTAAGCAGCTAGAATTATGAGGAGATTCCCTTGTGTACCTTGCACTCCGACCATTACACTGGATACACAGGCGAACTCTCGACCGCCAAGTTCGTTTTATCCGCAAGAAAACGTAGCGTTTGAAGAGGACATAATCATAGCCTGTTGGCATGGTGCACAATTGAAACTTGCTAAGCCTATCCGTACGTCTCTATTTAATTTGAGGGGACACGAAGGACGTGAGGATAGGCAGATGAGGCCTGCCGTCGGCGACGGAGAATCGATCTGGATAGAACGAGTAATTATTGTGTATGTACACCCTCATCACAGAGTGGTCGGACCTAAAGGTTCAGGGTTGCAATGTTTCTGGGCAGTATGGTACATATGGATCTCTCAATGTCTTACCGCTCACCACAGGTCATGGAGCCCCCAGGACAAGCTGACCGCCTTCCTTCCACTCATCGTGTTTTGAGGACGGGGAGTACGTTGGGTGGCCACTGCGTTAAGTCAACAAGTGAGATCTCAAACTTTTATGTAGCTACGTTGGTCCGACCAGAAGCCAGCTGCACTCCAATCGGACTATAGACGTAAGCCCAAGCAACCATGTGTTACTGCAGTTAAATGTGCATGCATGGGCCCTGACATTTCCTCGTCTCGGTATGTGCCGGCAGATAGAATCCTCAACCGAATTGAGCAGCAAATGACTCGTTCTCGAACTGAATCTACACGTTATCCAAGGCTCGATTCATTTCTCTGGGAATCATCACCCTATCCGCCGTACTTTGGCTCACGCATTAGGTGTCCCTGCAACTAGAATCGTGCACGCCCGCACCGTAGCTCAGCGGCCGCCCTACCAAACGTGATATGGCCCCCTAGACGTGCATGTTGGACTGTCCAGAATTGGCAAAAAACAGTTTGGCACCCAGAAGATCGTGTTTTTCGGACTTAACTTTATGCTTACTGTATGTTCCTCTGCCGCGTTCTATAACTAACATGAGGCAAAAGGTTCACTGCCGCACATCCCATTCGGTATATGTGGACTCCGCAAGGCATGGTGCATCGTCCTTGTTAGTCGAATGATCGTGGCTTTTCCTCGGCGTCTGCAGCAAGACCTTACCTCCATACCGAAGAGAAAGCAGGCTTTAATGTATCGCGTGAGGCAAACGTATCGATGGTGTGCTGAAGGAGGTAACAAAGGGGAGTAGAATTCACAACCTTGCCCTACGTTAGCACCTTCGGGTAATAATAGAATAAGACTACATCGTGCATACAGTTTACCACATCAGCACCGAACCGACGTAGCGCCTGCTAACATGCTTACTCAATTAGGGATACCGCAATCGCTAGTGACGTGCACCTTCGCGGAGCTAGAATCAGTGTATCTGGTTAGCGGTTTATTGGACCAGACCCGCTGGAAGGGCGCGCTCTTAGACGCAGCTCAGGTGTATCTGGGGCTTCGCATATCACTCAAGCTACATCGGAGTTCATCAAATTATACTGCCCAACTGTATGTGCTGCGCGCGACACTAGTGCTCGTTTGAGCCAGCGACACTGTATTACGGCCCACCTTTCACCAGGTTTGGGCACTTGCCTACCCACAATGTTGCCCGCTAGAATGCTCCATAATAATGCAGCAACACTTTAACAGTCTCACAGGTCTATGCGTGACGCCGGGTTCGGGGTTGATGAGTGTATGAAGGCATCACTGCTGATTAGGGGCATTTAAATACTACAGTTGATTATGGGATAGTGTATGAAAACTGGTCAATTACGACTGATAAACATGTCACACTACTGCGATATGTTTGTAAGAGTCATATATGGTTAAGGGAGAGGCGGGTCGCTGACGTATTTGGCTATGTAGTACCTTGTTTGCATGCCGACATGTTTAAATCGGAAGGTGGACAGGAATGCGTCAATTGGGGCCAGATCAGATCCCCGCCTACGAACGTGTCCGCACCACTTGCTTAGACAGGGTCGGATTGTCATGACCAAGTACGTTCCACTCAATGGGGGGACCTATCCACACACTTATCACCAAACGAAGGGGTATCAAGCAAGACACAATGGTTGACGCGAGTTCGGTCGACTAGGAACGATAAAGGCCAGGGCAGATCAGGACCCAAAAATATATAACTTTTCGTCAAAGTGGGTGTCCCAAGGCTGGACGGGAATCCACCTTCCATGGTTAGAATAGCGGTCGCACGGAAGGTCTTGTATTCTAGACTGGCGGGGTCACCTTATTTGTATCATTAAGCATGTATGACGCGAAGACCAATCTAATCCCGGCCCCACCCAACAGCGAGGTCCTGAAAGGGGCTTTAAACATCGCTTGTACATCTTAAGAGTACGCACACGTCGTTATCCCGTGGTGTGACAGCGCAGAAGATATATGTAGCGCGTCTTATCTGAAGCATCCAGGTCCCAATGGTGTAAACTGTTGGCGGGGCGAAACTCCCGATATGAGGCTTTCGACTGACGAGGAATATTCAAGCAAAGATGCGAAGAGCACTAATTCAGGCGCTTTCACGCGCCCCTTACACCCACTAATTTTTCCACCCTTGGAGTTTAGTGCACATCCCTAACGAATTCATCACCCACCGCATAACGGGCGTCGTCAATCACAACTGTTAGGGTCGGACAAAGTTACACTCTGAGTGGCTCTGCTTGGTTAGTACATCACAATGACCCTCTTAGGATTGAACGGCCTCTTAGCTCAGGTCATACAACCCCCTGATGCTAGCGGTGGGTAATTCACACTCGTAAGGGTGGAATCTAGTAAGAAATTCGAACAAAAGGGGCAGGCGCCAGGATTGGAGGTCCCGTGACAAAGCCATATTCGTGGCGCTACAGGCTGGGCGGCAACTGATAAGCCAGTTGCGTCACTCATCCTTCCACTCCTGAAGAACTTTGAACGCTCGACTTGTTACTTACTCAGAAATTAAATAGGTGATTTGCGCAGAGCACCGTCGCGCGAGGTACGATGACGAAGGGTCAACACTGCCCTTGCAAGTAATTAACCGCTAGGGCCCAGTACTCGCCAACATCGGGCTTACTTGAGGTAACCTGACACCAGAAGTGGGTCAATTTGGTTGCTCAAGCCATCAGATCCGCTCGTGCCGACGTCTGGCCAGCGCGATAAGTAGGGCTGATAGTCCATATGGGTAATGGATTTAGAAGTACAGAGAGTGTGCAGACTTTATTTTTACCATTTGGTCACACCGATCCGCTGTTGCTATGACAGATCGATCTCCTCGACAGCACTTTGGCTTGCCAGTCTAATGGGTGACTGGGGTCTTTTCGTCGAGGTGTACTCAGCTAGTACGCATCGTCTACGACCATTGTCTTGAGCCCTTGGCAGGATGACAGAAAGCATAACATATTTTCACGGCTCTACCGAAACAAACAAACAGGGGTGCTGTCTCCGTCTAGCCCCGGCGTAAGCGGGAAGGCAGCATCGTCAAAATGGCGTTTGAGCTGATATCTAAGCTCTTGCAGTTTACGTGGTCCAGTCCTTGTGTGAGATTTAAATTAGATGGGGCTAAGTTGCTACTTGCAAATACCTTGGGTTGCTTTGACCCCAGTGTATCCCGGTACCCAACTTACTGACATGGCGCCGCCAGTATGCCCCGCGCGGTCCTGGGCACATTAGGCGTCAATGCGTAAGGCGTGGAGTTTACTGCAAATCGATACGGTATCCCTGGACCTCGGCTTCTAATGTTCCAATCGAGGAGAGCTGTAGCTGGCATCCTTTGCTTAAAAGCAGCTAATCTGCAGCGCATGACGCGATGTCTTAATTTCCTACTAAGATTCTATTGCGCCATAGCTTGTCAATGGTCCCGGACAGTATAAAACAGTACTCTCTAGAACTAGACCGGTAGGGCCCTCATGGCGATATGACCGTTCTCTATTACAAATATCTTTTTCGTGACTTTCTATGGGGTTCTTAGGGTGTCAGACACGAGCTTGAGACGATGAGCCAAAAAATACATGTAAACTCAAGTGAGTCGGTCTATTAATGTCCGTCGTTGCAAACTAATTCCGTCGGCCCCGCGGGGGATATCATCTCATCTTTATTGTGGCTGTAAGGCAATTTCCGGTCCCTTGTCACAGGCTGTTCTCCCGGTCGCCGCGCGATGTAGTTGGGACCCCCTACTTACGTTGAGAATCTTAGCTAGCAGACGTAAGTTACGCTGACTGGTGCGCTTAACAATCCCTTTTCCGGGTAAGATCGTCTCGTCGTGGCCCTCCTGCTCCAACTCCCGTCTTTGGGGGCGATGCTCGTGTCGAAGGGGCTTCGTCTTGCATGCACCAACAGGATCTCGCGGGCCGCAATATCTAACAAATTGACCAATTTCGTTAAGAGCTAACATCCCCACGACAGCAAATGAGTATGTTTAGGAGGACCTGCTACCGTCGGAATCACATTTCCAAGTATCCGAATATTGGAGGAAAAGGGTAAGAACTCCGTCCGACGTGCGATGATGTCGCACCGCGTTAGTCCTAGGATTTTGAAACTCTGTCATTTTTTTAGGCTAAAACTTAGTGCATCCCGCAGGATCAAATTTGAGAGTGGATCAGAAGGAAGCGTGGAATCAACGCTACTCATGTTGGTCGTGGGCACCATTAAGCAGAAGCATGCCGATATTCAATAATGCCATAAAAGCGCGTGAGAATTGAGCTCCTTTAATTCACTGTCACTAAGGTGCGAGCATGGTATTGCCCGAAAGCACACAAGGTCCAACGCACGGGAATCCGGAGGATAGAATAGCATACAAAAACGCTTTGGTACTTCATGGTGCCCTGTTGGGGCTCTGAAAATGAAGAACCAGCATGCATACTAACTGGTCGGCCGTCTGCGAACTTCGCCTTCTCGGACTGCAGCGCCACACGTTTAGTTGAAGGCTTACTATAGGCTCGTACATACGGACGATTTAAATCCATCCACGTTTGATTTGATGTGACTCGACTTGCAGATATTCAGGGAAATGCCTTTTCGAAACCACCATTTGACCACATACACATTTCTATGCAAAAAACGCTATTGAGCGTTCCCGTGCATATTTCACCTGGCCCAACACGGTAAGAACTTCGCCCAGACTTCGCCGCCATATAAAGGGTGTACACGTGACTAATTAAAAAAAAAGGAACTCGAGGGAGAAATCCGCAGTAGGGCACCCAAATCCGCTCGCGTAGAAGGTTTCTAACGACGACTCCATCCATGCCCACAACGAACTCTGTCTCCGGATCCATCGGCTCTCCGTAAATAGATCTGGTTAAAGTTCCCGAGACATACCGCCCTGCAAGCGACCGCGAGAGAAACATCTCCCCAACTGAGACGGAATGCCTTACTGGGTTAAATTTAAACGTTCAAAAACCTGTTCAAGGTGAGATAGCGGCGGTAAGCTAGTGAACAGGTTCCACAACTTGGGTTATGCTAGCTTATTCATTGGGCCTCAGATAGCGGAATGGTAATCCTGTACACCCGACCGGCACTTACATCGCGACCCATCTGGGTAGGAATACGGGACCGTCTTGGGTATAGGGGGACCCAGAACGCACCCGAGCAGGGAGAGTAGCCACTTAGGTAGTCGGGTGATCATCTGGACGACTCCAAAACGGATTGCGGTGGTGGAGGGTTACCGTTATCGCAAGCTAGCAGAGGCCTTCCGTCTCGATTAAAAAAGCTCCACTACACCTTGTTACCATAAGGAGACGTTCGGCTACTCCCCGTGCGCCATAGCAAGTATTTACTCCTAAAGTGAGGGGACTGCCCCCACGCAAAATATTATGGATACCTCGGCCGCGGGTAGAATGACTTATGACTTGGTCGTTCACCATTATAAAGTAGTTATGGGGTGGATCTGTTTATTATCACAATCCTGAGGTGGAGCTAGGTCAAACCGCCAAATATGGTATTATCACGCGCAAGTCGAACAAATTCCAGGCGGATATCATCTCGAGCACAGACGGCACAACATTATTACCCCCTCTGAGTATACTCCCACGTGAGGTAGTGCCTAGACTACTATCCATGAAGATGCCGATGGCGCCAGCTTATTGGACGCCGAGCGGGAATATACTGCGACAAGGTGGCACCTAGCCCGACGACGCTAGGAAAAGCCGCCCATTATTGACCGAAGCGCTCACCGATTCGTCACGCGCACGCCGGTGGGCCTCAATTATATTATAATCCGACTAAAACTACGTAGATTGGCTGGACGGCTGATCCGCGAGAGGTGCACTTCTTACAGTAACTATGTGGCAGACGCGTTAAATTAGGGCACCTGGATCTGACGAAAGGCAGTTAAAATCTGACGAAACCGTCCCACAGTTAATCAAGGGTCGGGATATCAGGAGTAGAATCTGGCTTGTACCACTCCTTTTTACGGGCCGGAGCCGATTTGAGAGCGCTAGGCACCCTACCGACTCCTTAACCCGACATCTTAAATGCGACAACGTACTAAACTTGGATAGCGACCACGGCCCTAGTCCGTGCTAACTTGGTTTTTCTAGGCCACGACTGCTGGATGCCTGGCCCTCTAGAAAACACGGTGTTGCTCCACAGAGCCACGCAGAGCATTGCACGACAGAGCAGTTTAACTAATTTGTAATAGAGCGCATGTATCCGATCGCAACATCACAGGCTTAAGGCTATTCGCCATGTTCTCGTGCCCTCCGCGAACTTCGGAATCTCGGTCAGCGCACGTACAGTCTCATTAGCTGGTAAGACTGTACAGGACACGCCGAAGAGCCCCTCGAACCGCATGAACTAGCCCGCTATTTGGATGGTAGGCCCACATTCTCTAAACGGGGCGTGTAAAAGCTCTTCATTTATCATATTAGGGTACCATGATTTACTTCGTCAACGCTAGTACGTTATGAGGTTTCACGGGGTATATCTGGTATAAGGTAGCTGACTGATAAATTTGGTACATGCCTTGTAGACTCAGACTTAGTCATTATCCGCGAGTGTAAATGTCTACGTGGCGCACGACCAGAGGCCGGGCGTGACGTTAGTGTAGAGAGTCCCGCACCCCTCTGACGGCCCCGCTATTAACTTTGTTTATTTGAGTGTGGGATACGAAACTAAAGGTGGCAAGAGTAAAGATATAGAAGTAGAAAACTCTGTGGTATCGAGACATACAGGACAGGCAGTCCCGCTGCTGGAGCGTCGCCTGCCCTGGGTGTGCGAAGATCTAGGGTGGTAAACAAAAAATCCCAGTCTTAGCCCTCATGCTTTTGTGAGGAGCTATTGCCTTCAGTGCGCTCATAGACCTCTCGAATGCACAACATCGTCTGGTGTGACGCTGGCATTTAACCAAACGTATATTCGTTCGGCCTATCAGCCACAAAGCAGGTTTCCATGAAGCACTCAAGGTCAAGGCTCTTTAAAGGTACTTCTTCTCCACAAGGGTATGATAGTTACCCCCTGCGAGAGTTGGGGGAGTCTTCCGGTCTGGCTGCCAGGACCGTAACGGCATGCGGTTCTGATGGTACCGCACACACAGAGTTCGAACGGGCATGCTAGTACGAGGAGCCTCCCCAGTGCCCTTCTATCCCTGAGGGTCGGCGCAGTGTGCTGATCAATCGATGTATCCCTTGCAATAAACGGGGGAGTCCTTTCGAATATAGCGCGAACATAATGGCATATCGCGTGTCGCTCACTAGTTTGCTTGAAACGTCTTTGAGGCATACGACGGCGGGAATCAACAATATACATTCAATCAGGTTCGACAACACTCCACTCGCGTTGATAAACCGTCATCGCTCGAATGTAATCATGCGGCGGACGGTCCATAGACCATTGGTGGGCCCAATTGATGAAAACAGATATCTGATCCTGCTCGACGCGATTAACGCCGACATTATCTAGAAAGGACCGCAACGACCACTCTACCCCTGTAGCACGTGAGGAGTGGTCAAGGTTCGATTCACCTGTCCTATGGCCTGATGTTGCCCTGATCCCCGGAACCTGTTTAAGATTGGTGTCCGCTAAAGTGGAAGCTTCGACACTATATCGGAGGTTATAACCCCACGGGCTGGAAAGGTGCATGAGGGACGTCGACTCCATAAGTATAGTCGCTGATCCGACAGCATTTTGCGAAGTTGCTGTAATCTGTAGCGACACGGCGCATAACGTCATTAATACGTCTGCACGCGTTACTGGATCCCTTGTTATCCAACGAGTAGTAGGCGAGGACGAGCTTCCTTAATAATAACGACAACGGCATATCTGGCTTTTGTGAATTAAACTAGGAGGAAAATACCGCCTAGTACAGCCTTGCCAGAAGTAGTAAGTAGAGGTAGTACCGGAACTGGGCTTTCCATGAACAGGATACTCCTCAGCCCGACGTGTTCAGGGAATGTACTGTGTCTGACGTTTTCGTCCCCTGTGTTTAACTTACCATCACCGACAGCTAGGAGTTAGTGCGGCAAATTTATAAAATGTAGGAAGGCGAAGTAACACTGAGGTCCACTTTTAAGCTACCATCCAGAGGAGCTTCCAATCGTTCCCATCCTCGCCAAGCTGCGATTTTAGTCGACAAAACCATTCCAGAAGTACACGCACGACGTGTGATCAAGGAGAACTTTCAATCGTGTTCAAAGTAAGCGGACGAATGTCCGATGGAGTAGTGCGACCCTCCCCAGCTCAGCTGTCGCACACGAATTATGACGTGTACATCAGCGACTGACCTAATGCTGTGCGGTCGTACGGGGCCAAAAGTCAGGGGCACGAGGACTGCAGTGTACGAATAATTCCCAGGCACTACTTGGCACCCCCAAAAAAGTGACCATCCCAAGTAGTTGGATGACTTGACGGAAACTTGCCACGTTTCCCTCGATGCGCGAACGAATAGGCTCTGTTTGAACGTCAAGCTCAATCTACCACTATAGGTAATCGTATCTGGGTGCTACATAAGCCCCGGCTCCGTAGTTCAGGAATAATGCAGTGCACACCTATAACCCTTAGCATAAACGCATCCGGAGACACAATAATTCCTCGATCCCAATATAGATCGAGCGCTAGTGTACCTGTCACTCTAAGAATGTCTATTCGCTTTTGACTGTTACACCTACACAAGATACGAGGCGGAAATGGAGCATAATCGTCTCATAGATATGTCGCCAGGAGTGCATGGCTCAAGCTTTCTATTATCCCTGTGTCAGCTGCTAGGGAGAGTATCGCTTGTGAGTATGTTCGGGGAAGGTGACCAAGGCACTTCAAACGGGCAGCGGATAAACGGAACCGAACTCAGCGGTGAATCCGAAAAGCTACTGACTTGGATAGTATTTAGGCTATAGTCGCGTCATTCTTCTTGCATCCAGTATCACCGTGAAGGGAGTCACAAGCACCAAAGTCACGAATGTACTAGATAGTCGGGATGACGGGTAAATCACCCAGTAGGGTGAAATCTGGACTGCGACCAAGAGATCCGCTGACGGCCGGAAGTGGGTCACGGCTCCCCAGGTTGTCCCGCCCTGGTGGGCCTTGTAGTAAGCCTTTGCCAAAGGGCTTGCAAGCTCTCCAGCTCCACATTACATCTCCAAGATATTAACCCTTTCATTAGTTAGACCCCTAAGCTCGCATGAAGCTCAATCTCGAGCTAGCTGCGTCACAGCAATCACACAGCTGGAAGTTCCCGCTTACAACACAATTGGTCCCGGACACAGTTTCATTCGCACGCTAACACATGAGTTCGTCATCGAGCATTCGGACCATTTTTGTATAAGGAATGGGTTATTGAAGAGCGGGTATAGCCGCGTAATCGTGCGAAGTCGAGACATATTTTATAGCGGTAAGGGTACTCCGATAACATCATCATACCCATACCTGCTCTCTCACGGATGCGTTCCGGCGATTTCGTCCTGTTGGTTCTCGTGCGCCCCCGCTTGCTAAATGGTAGCAACTGCGCTTGAAGTTCTAACCAGTCTAGGCGCAGTTGTTGGGGAATGACCAACGTAGAACGCTCTTAGGTCCAACTTAGACTTCTTGACACTGCTCGATATTCAAAGGGGATCTCTGGCGATAATGCTACGTACTCCCACGTAGGGAGCAAATAAGCCCGATGCGAAGGGCCGTATCGGGCGCTCCTAAATTTTGCTGGACAGTGTCAGAGAGCCGCCTTCCTCGTGCCAAACCTATTGACGTGTGTGTCTCTATGTCGCGATACAGCGTCCGATGTATTCGCGTATGTAGTTCAATGAGGCTCGTCTTGAGCGTCGAGAAAAGTCCAATGGGCTTCTGTCTATAGACGATGCAATGTCTTTCCGTAGTCGAAGTCTATTCTACGGGAACATTTTCATATTCCGGCGATGTCTAGCGTGGATCGCTACTTTCACCTTATGCATAGCGTCCACCGTCGCGACTGCCCGCATGGCTAACTTCTCAATATGTTGCTAAATCTGACTAGGGACACCAGCATATATGGCGATGTGTTAGAGCCGACATTATGAGCATGGCAAATCTCTGAATTTCCCGGCCCCACCAACATGCTACGGCGATCATTGGATTTTACCAGGAAGCAGCCAAGTCTGAGATTATATCGTCTCGTCTACATCTACCATCTCGAGTAATAAAGGCTACAGGTTGTAGACCCTCCGACCGGTCTGGCTGGACATATATGACTCAGTGCCCCTTCTCCTAGGTCAACCCCTTTATATAGAACTTGTTCCCGAGGAACCTGGATATCAACGGGTCTTTACACATACTTAATCCGAGACAAGGCGGCACCATGTAGAGACGAGACTTAGGAAGGGTCACAACCCTCAAAATCCATGGCAAATAGATAGCGTAACCTGCTGTTCGGGACCCGATCGGAGCCCCAATAATCTCAAGATGGTCTTCATACTTGCTCCAGAATCTGGTCCCCACACTGGTTCATAACGGCTCTGACGGACGTACGAAAACTATCATCCACGCTGTTAAGAACCCATTAATCTAAGTGATGCTATCAGTTCCGCTGTGCATAAGACGTATTTATCTTCACTGAGATCGACATGAACTCAGACTGCTCGGCCCCTGTTGGAGTTGTCGCTTGCACTTCTGATTTTGGTACTGATCGGGCAGCGGCAGGTATGTAATTTGCATTACTGCCCTCAAGAAGGTGGGTACGTGGTTTAAACGATTATCTAGATTGGTTAATAGGGTTACTCAGATGTTTGCATGCTGTAACGTTTCCGGAACGCCGAGTGATATAGATTTGCAAAATCGGCGATGGTAAATGCCTGATGGGAGGCACTTACACACACGGAGCCGCCAGCCGAGGTGTGCCCAACTTCCTTCGCCCCTTCTCATTATGCTGCCGCCAATGGTGGGTACTTCAGTTCTGCATGCCCATCAGCCTCCCCACTGAATTAAACGATTAGTCTTCCTGACAAATAGTGTATTGCCCGCTATGCCAGCCGGCTCACATGGGGAAGTACGACTACATTTGTCGGTTGTATTTCGTTTTTTCACTAACACGATATATTCAACGTCGCGAGCCATTAGGCAAGCATTGGCCAGTTGCCAACGGCGGCCAAAAACAGGAACCAGCTAGTGCCATGTTAGTGTTCACAGGGCACTGTGAAATAAGGCTTGCGAGACCGAAGGGTCGATACATAGACTCACCATGAAAACACGGGTTAAATCATTTGTGCCTATGCTACCGGAGCCGAGGTTGTTGGTTCCGACTTGAGCAGTCGGCGGGAACGTACACTGTCTACTGGAGCCTTTACGCGAAGATCTGTTTACCACATCGCTTATTTCCCACATGGGATTCGCTGTTGGCGATACGCTGCATCTGGGAGCAATTACGATAGCGCTAGATGTATACTAGCCCGTGAACACATCTTCGGAAGATCACAACTACTTCAGACTAGCGCGAACACTTATTGACAGACATTTCATCTTGTGCCGGCATCTACTAATAAAGAGCAAGATGAGGGCTAATTAATAGGAATGTGGGTGTATTGACTGTCGGGAGTACACCAACAAGCTACTGTCAATTTGCCTCAGAAGGGACCCCACAATAACAAGCACCTAGATAGCGATCGACTGCACGGCGGTGTATTTCTATTAAAAGCAATAGCGTTGGGATCGTGCAATCTAAAGAGTATTTCAATGTACTCAGATAGGGTTTTGGTCACCGGCGTAGTTAGCAGATTTGTCCACGTGACCTTGGGGTCTTGGGCCCTGGAATAGAAAGTCATTTGCACTGTTCGATAGTCCGTATCAAAAGAAAGCTCCTGGGGAAATATCTACGAGAGAGCCAAAATTCATGAACTAGTAATATTATAAGTACCCAGGCTTCCTCTGTGTGCGAACCGTTTCAAAGCATGTCGACAACACTTTCAGCCCGGGTATGCTTGGCTTTGAAGACACGACGCAGAGTATTGAACGACAAAAAACATACAAGAGCTTAGTTCGGTCAAGGATCCCCACATACGGTGTAGGGAATTATCCCCAGCTCCTAGTGTCCCTTTTGTGAGACGATTTGTACAAAAACGTCATTGAACGCCAAATGGTGATGGCGCATCCGGCCCTATGACATCAGTCACTGGGATAGGTGTGCAGTAGCCGGAAGGATAGACCACTAAGGAGCGACGTTTACACCCTCCCTAGGGGCGTGGCTGTTTCTACATCGCCCCCACTGAAACTCGAAAAAGGGCCAGTTTTGCTTCCTACTTTAACTATCAGCCATGGTCTTGCCCCCATTTACAACCACGACGGTTTTCTGATTCCTTGTTTACTTGATACCTTGTCGGCCTCATGCAATTTAAATCAATGTCTTAGGGGTTAAGTGGCTTAAGGAAGATCAAAAACTCCGCTGACAGACGTTAAGCCCTGTAGACCTACCTCATGATCGGGTGTTTATATGCGTACTTACAAAACATTCAATTGGCTGACTCCGTTCTCCCTAGCAGCTTTTCCGGCCCATCTGGCCCCCAGACCTACATTGCGTCGGCGCTAACACGAATTCCCCCACTTTGACCTTAGTCAGCCTCGCTATAAATGAGCCTCTATGTCATTGCGACAACTGGGGAAAGCCTCACCTCGAACTCTAAACTGTGAGGGGTTAGTGATCGCACGCTCCTACCAGAGCGGTGGCGAAATTCCAACAACAGTATAAACGATGTGGCACGCTTATGGGATGATATGAGCGATCGCGTTTGGGGCCTTGGTGTTAGCCTTACGATGCAGGTTGGACCTAACCCGATTGTCTTGAGACATAAGTGGTGGATTGCTGACACTATAACAAGCACGGGTGGCCGTGCCATAATTACGAATCACGCAATATGCTGCTATGTCCGCGTATCCACATGATATTAGTTGCTTTGGAAGTTTGCGGTCGTTACACGTTTTGAGATAACTCCGTCAGACCAAGTCTTACATTAGGTTCGGCGCTAACGCGCCGACTAGGCGTTGACTCTGCTACTATGCATCATCCACGACCTGGAGTGAGGGACAGATTTTTCGAACTGAAGTTGATAGGTCCCATGTCACGGGAGGTATGCACCATCTGCAACACAGCCCGACTCCATCGTGGTGAAGCCCCTCAAAGCACTACGAACGTGCACGGGTTTGGCGTCATAAGTAACACTTTCGGAATACGGACAAACCTAATCCCAGTGCGGAGGTTCGATTGGCATATAACCGTCGTCGAGTTTATCGTACCCTAGACCCCGACCCCCGGGTTCACCTCGGTGTAGTGCACACCAGGCGTGACTCAGGTTATCGATGGGATAACCGCGTCTCTTTGACTGCGAGGATTGATTATTTACACTCTGGATGGTTGACATTGTCGGCTAGGTACTTCGGGATATACTGGAATCCAGTGTCACCTTGACATAGCGGCCATGAGGCTCCGCACGCCCAGAAGGCTCGGGCGCCGTCTTACGCCAATATGGTTCATGATGCTCGCGCAATCCAGCGTCACAGCGGTGACTTGGACGTTAAAGGCATCCAACTCACAGATTACTATAAATACAGAAGTTGAACCTCACCCTCCGTATCATAATTAGTCCTCCAATCCCGATTATCGCATATTGCTTTTCGCCTATATTCCATCAAGGAGCTTATTGTCGCTCAGAAATAATAGTTGTCGACTTCGGGTTGCCGTATCCAGCTCCGTCCAGCAAAATGAATGCATCGGCTCTTGCCTTCTCTGTGTGTTGAGCTGGGTCCACCACTTTTTGACCATCACTGTTGTACACAACGAGAGAGTAAAAGGGGCACACGAGACCTCGTTCACATTAGTCGCTTCTTTTCAATTATGGGTAGAATAGTGGTCATACGAACAGTTGAAGTCTAAGAATCAAGCCTCCCGGGGTGGAATGCGTGGGACGCCGACAGTTGTTACCCCCGTTAGCGGGTAAGCACCCCTCTGGAATCGGCAAATTACGTAGCGATCGGTCGTGCGCGCCTTACGTAACGATTATACTTTGTACGCGACGTATTCTTACCAGTTCCCCATCTTGTACTGCCGGAGGGGAAAGTGAAGAATATTCAAACGCCCGGCACGGGAGTCTAAAAGGCCAACCAACACTCAACCAGCGCTGCATTGGTACAATCTTTTGCACGCTGAAGCACTGGACGGATGTCTGATGCGCGAATCTAGTACACTCACTTGAAACCGTCACTCTATAGAACCCGCATTTAATATACTTAGCGTTTGAACGTCGGGTATCCCGCGGCGTTCATGAGCAACACTGGGCGACCGTAGCTTTTAATGTTCGTCGGATGCGACAGTTAGTAGTGGTCGAATGATGTGATGCACGGCGCCCAGAGGTAACGACGGTTAAACGCCTTTTGTGAATGGTCAATAATACTAGTAAGAGCTCTAGCCATATGTACGTAATGGCATACGTGACGCCCGATCATGACCTGCCACGGCCGGCTTCGCGTAAGGGAACATCATTAAATCATGTCTTCTTCACCCCCGTGACATTGCCTCACTGTCTATTTTCAAATCCCTATCGGTTTTAATAGTGGCGGGTGGACGTAGCTTGCAACACGAGGTACTTGCAGCGTCGCGTGTCGTCTTTGGGTCATTCATAGGTTGCTATTGGCCAATTTTTTCCTGCAAAGGCTACCATAGGGCACAGGCTTAGGTGTCCAGGTTGGGCACTTTTTGGCGCTCCAGAACCGCTACCATGGAAAAATCCTTTATTCTGAAGGAGCTAAAGCCAGCCTAATTTTTGAGCTACATCTCCATATTCATACCTTACATTTACGACGTTTGATGCCAAGGGCTGCACTGCACCCGAAACATCTACCAGGTTCACGTTGGTTCCACATTCGATGGACTCCTTGTCTGCGAGAGAAAACGTCTTGCCACGCGACCTGAAGACTCTAACCGGAAAACTTCTTTCCCGGTTCTCGTTGGGCGTAATGGGGAATTCGCTTGGCACCTGGACCGTCTCGCAAGGGAGTTACTGACGGCCAGTCTTCTACAGCACCCCAAATGGAGCGTCCTGGACAGCCAACGTGATACTAGGTTCCTAGCTGGTACGGCACTGGCGATCGTTTGATGCGCTTTAGCATCGCACGCGTGTGCCATGTCCATAAATGACTGGCGAGTTAACATGGACATGTGATTGAAAGTGGGACAGCGGAGTTGAGCTTACCAACCCGAGTGTGACTCAGTTCACTGTTACCGGTACGTGAGCAGCAAGGTAGCAATTATTTGTTTCTAGTCAGCAGAAACGTACCTAAAAGTACTGCGGCTCAGAGCTGGGTCCTTTGCGTCCCGTGATGTCACGGTGCTTCGTATAACCACAGTGGTTCTAGCTTGTCTTAGTCTCGGTACACGAAACCATGTCCATGAGCGGGGGATTAAGTGCGATTTAGGACAGAGTTCACTGTGTCTAATTGGACGTTCATATACAGCCTCTGCACACAATCATGTGCTCGCTCATTTGTTATCAAGGGCACCCAATTAGTTGAAGTCGCGCGTTGAGAGTTTCCAGTGCCGAGGTCTGTATTATGGCTCCATATCGAGGTGATCTATAGATGTTGCGGTCGAGTATGTGTCGGTAAGACTAACAAATTCAGCTGAGCAGCGCAGATAAGGGAGGGTATACCCAAGGGAGGTCTGTGGTCGACTTGTGCTCGGACTCATATAGTATGAAAAGCCACGTAACTGCCCGACGTAGGAACTACGTTTGCGAAGAGAGATAGAGTACGATCGCCAATATGGCTCTCGGGCAGTCAGACTGCATGTCGTGTACTCCTGCCCAGACGATGGGGGTTCGCATGAAGCAGGCTAGACATAGATACTCGGTTCAGCAATATCTAGCGAGCCGAGACACACACTGAATGTCCGTGCGGTGCCGCCTTTCAACCCAATTAACAGCAATTCACACCTTTGAAAGCAACCTTATTTTAAATGGGACGCCTTTAAAGCTGTGCATTGATTCGCATGATATGATTTCTGTATTTCTCCTCATGGTGACAGAATGCTGCTTTTGTCACATACGTCGTAGGCTTGATTTTACGATTCACCCACTGAACAGACAACACTCACCCATGAAATCTTCAGTTGCCTTCAAGCATCGGGCCGGCTACGAGGTAGCTCGGGCCCAAGGCCCAATCACAAGTTCAAGCGACAGGGATTAACGTTCTAGCACCCATAGTGACGCATCCCTAAAATAGACACCGCCTGTAGACCCCTAACCGCAGCAATGATTAGCCCGTCCTTCCGGTATAAATTCGTTCCAGGGATGACACGTCTAGTTTGCATCCACACAGGCTGAGACGTGATCTGACATGCGAAATCCCAGCAGCAACAGAGTGCGACAATGGTGATAAGAGTTGCCACGCTACTCCAAATTCCAGTCATTCAGAACCACCACCTGAGTCCGATCGCGGGGATGGTACACAACGATAGGTCATGAGGCCTGGCTTTTGTACCGCTACTTAAGAGATATTGTCACCCTCTACCTCGAGCGGTACTTATCCTCGTAGGGACGCCCCTGGCGGTGTGACTTGTACGTCTACCAGCGGCAATCGCTAATGTCCTCGGCCCCTCCCGTCCTGTCCCCCCTCGGGTCATCTGAATCGAACAACAAGCACTAATATTATCGAGTCATTACATCATGACATCGTCAGCATAGTTACGTTCAGCCGATACTCTTAGCAATCTTCACTTGAGCTCGGCTACCCCCTAAACCTATTCACCGGCTAAGAGTACCGGCTTAATTCAGGTCACTAATTCGTTGGCCGCTCGTGCTATAGCCTTTGGACCTCACAGCGGTAGCGACTGCCAGCGTCTTCTCATAAGATTAAATGGGTGCACGTAAAGGGCTCGAGCAGTCTATTCACCTGCTGAGCACGCAAGTGCCTTTATAAGCCTATGCAGACAGTAAGCTCGTGGTATCCTACCTATCACGCACGCTGAGGTGTCCACATTCGAACACCCGGCTAGGTAATGAAGTGCCGTCCTCATGATCCGATTGTCTATGATAGACGTGAACCCCAGCGCACCGCAGTATTCAAGGAATTTTCGGTTGAATACAGGTGAACAATGTAATCAGGTGAATACGCTGAAGCCCGGAAACACCGTCTTGACTGGCGCCTGGGCCCATCTACGTATCGGAATAGCCGTCGGTCTATTTCCTCGGGATCGGAATAAGTCCTACGACGGGTTCCTTGTGTTCAGATTCCGCATTCTCTCACTGGCTTAGCGTGCTCGAAAAGCCGAAGAACTGAGCCCGACTACCGTTTAGTCGTTCGTGAGGCCAACGTATCCGGCCCACTCCGTCAGTTAGTTCTAGCAAGAGACATTATGAGTCGTTATTGATGTTGAAGACTATTCGTAGCGCACTCACAAGCTATCGTGCTACCTATTCTGTAAGATCGAGGTACAGTCTTAAGTAGCTTGATTTATCATCGAGGATCTATCCTCAAGGACTTTCTGCGGCGCAGGTAGTTAGGATAGGAATAAAATAAGCTTTAGGCTATTATCCTCTGCCACGGGCGAGATGGAAAACAATTGTCCGGGCCAACAAGGAACATTCCAGTCCTGTGAGCCCCTATGATCAACACAAGAAATGGTTCCCTATTACCAACACGCCCGTGTCATTAATCGAATCTCGGTAAGACATGGAGAAACGGTAAGCCGCAGCGGGGTCTGCTCAAAAGCCATTCTATTGTGAGTAGGCCTACGTTCCCCTCTGCACACATTATTAATTAGGCAGACACGCGGCGAGTGAGAATGTTATGAGATTTTTTAGCGTTCCTACCTTCAAGGCCAAAACTTACAGCGGCCCAGCGTTATAAAACTTCATCATTGGCGGACATTTAATCTATTTACTTGCGTTCGGTGCTTAGTTGGCCCGACGGAGTTAAAGGCCCGCCGTCTCCTCGGAGCGTCGTCAGAAAGGCGCCTAGCAAAAGTTGGGGAACCGATCCTTCGACACGCTAGATTAACTGGAGCACTTGAATCGGCTAAGTGCAGCCTATCACAACTTCTGTCTCATCTCGTTTCTGATTTATCCATCTTGTCGACTCCATACGCTCACGATGATTATTCCCAGTGCTCGTAGGACACGTATTAGGCTAGCTGCGCGGCGCCTAACATGCTCAACTCAAGTGGCCACATATCATCCATACTCTAACAACGACCGGCATTCGGCGAGCGTTATAGATACGCCTAATTCTATGGTCGTCACCAGCGTCGTACTGTTGTACCAACATCGTTTGCCCGCAGGACGTTCCCAGTCTCTAGTCACATGTTATTAATGGCCAAGCTGTACCGGGGATTTCCCAACTCTTTTCTATTTCGACGCCAGCGCTGATACCTAGCTGTGTATCTCGACGCCTAACATGATAAACGGACCGGAATCTTGGCGCCTCGCCTGTCGTAGGGAATGTCCATGCGCAACCGGAATCAGAAGGGCCTTGGCGCACGTCCCTATCGTAAGAATACCTTATAACCGTCTGTTGTGCAGGTTGAGGGCCAAACTCACTGGGCCAAACGGGCCGTATGTAGGATGACGTAGCGGTTATACATCTAGCGTCTCATGTGCCCGTACATAAATAGATCGCATAATCTTACGACGCTGCCTCCTGGCTGTTGCCCTCGGCCAGAACATTCACACGTAAGGCGTTGGAGATTTAACATAGCGACATAAGAATTCCCGCAAACCCGGCTATCGGTAGTTAACAGTCTGGTAACCTATTAAGAGGCAGTTCAGCCTATGAATTGCGGTAACACCCGGGATCGCCACCGGAGCGAATAGTCACGGCGTCTAACGTGGCGAATTGGTAACTACAAATTGCAGGTCGCCTGTTGAGGGCATGTTCGATATTTCTTAGTAATTGAGAATAGAAGTCCTAGCTGGGGGTCAACACCCGGAATTCCGTCGCTCAGGCATCAGCGTCCTTATAGTAGCCACAATATACTGCAGCCGAAGCGGAGATTTTGCCCGCTACAACTATCTCATCGTGGACTCAGGAGTTTAGACAAGCCCCCCCGACCACCCAGATCATGAAGCCTGGGTGTGCGCGATATTAGTACTCGTTCCAATCAAAAGGATAAAATGAAAAGTCGAAGAGCCGCGCGTATTCGTCTCACTCCGTGAGTGGCCGGCCGCATTCAAGACATGTAATCACACTCGGGACCATACCAGTAAACATGGTGCGTCAGAGAGGACACTCTGGCGTGCACTCAACGTCGGGCTGCACCTGATTCGCGGCTGGATTCGGCCAGAGTAGTATTATTCCTTGCCGATGGGGCGTAGATTGTGAAAGACGTGCGGTGTTCTGGGCGTCTCTTCGCCCGGTGATTTGCGTTCGTTCCTGTAATCTGCCTCGTAGATAGAGTATGACTGCGGGTGACGCAGTTTCTTTCGTTGGGCCTGCTATGCCCGAACAGTCTGACCCGAAAATCTATGCTCGGCGGGATCTTGGACATAGGGTCACCACGAAAGGTCGCAAATCCAAGAGTGGCCAGTTGCACGAGTTCAGAGGGATTTCTGAGGTCCCATTCCGCGAGTGCATGTTACTACCGGGTGAATTGAATCAGACGACCTGCTCCATCGTGTCTTATGGGTCATCAGCGCAATGTTGATGATTAGGTAAAACTTTCTTGTCCTTCCGATGATAGTCCTTCGCTGGGGATGAGTTTCTTGATTTAGAACCAATGTTACTAATGGAAAGACTGGCCCAGCCCATCGCCGGATTCCGGAAAGTTACCAGTCATTCGAGTAATCATGCGGATGGATGACTGCTCCCCCGTTACGGCTCTGTTGGCTCTGTTGGATCTACAAAAGCAACGCCGAATTACACTCATCCTTAGCAAACTTAGTCCATACGTACCTGTGGTGTGCGCTGCCCATCCAACGACGTGCAGGTATACAAGCTTAACTACGCTGGAAGGTTTGCAGGGCCTGGTACATTGAGCCCAATGACATTGAGAGTACGTTAGATCAGATAAGAAAGTGTGCAGTGTGACATTATTCCTTAGGTACCCCTGGCAGCGCTGAGACATCCTCGACGGCCTCTCGATGTTTTCTGGTTTTGCAGAGTAAAGCGATTGGTTTTACCGAGAGCGCGTAGCCTTCCTATCCCACTCATTTCCCTGCATCCTGGGTGAGACAGGCTTTCCCCTTTTGAGATACTTGCATCGACTAATATGGGATCAGCAGCATGCCCTCCAAGTCTGCACCTTCCAACAGATTCCCTAGACTAAGTCTAGGCCTATCATGAGAGTTCTAGTATATAAGCGCTTACGTTAGAACAGACCCCGCTTGTGATGACCCCCCACTCAAAACAATCACGGTGTATAAACCTAACCACCGACCGACCGCTAGTCACATGTACTATGGGGGGTACCTCCAGCCAACTTTCCTGACATAAGTTGCAGGGTCCTTGCCTACATGTTGTCGTACCCTGCTATAAAATCACCCCCCATGAAAGAAGTAAACCAACAGCATGCCTCATACTTACGTGGACTCGTTTGTACCCTCATTTTCGTTGGACATACGGGCAGCAACGCCTGAACTAGCAGGTAGCGCGCGGCATGATATACCTGCAGGTGCGTATAACAGCTCCATCAGGCGCCTTCCTGCCAGCGCCACCCGCCAACGCGATCGAGGCGATGGTCACACAAAGGTTGTTTTCAGATCTAATTGCTGAAGACAATATGTAATTCCCAGGCCACCGAGTTCAGGGTTATTAGGGCCTAGACCGGCGCATAGATTGTATGGGGACATGTGTTGAGGCTCCGTTGTGTACCAGGAGCAGGGGCGCGGCCCCCTATGCTTGAAGCATTACCTCGAAAATCTAGATCCGAGAAGCCAAACAAAGTACAACCCTGAATAGCCACAATCGGGGCCAGCGGGCGGGATTACCGACCCTGCTCTTTATGTCGTGCGTCCGGCATTCCGTAATCGCAGGGTCAAGCGAGGGAGCCCGCCAGCGGCGCACATACGCTGACCTCAGGTAGCCACGATTTTCGCCTCTCGGCGTAAGGATATTAGCCTTCTGCATCCGTGAGCCAAACAGTCGAAGGGCGGGGGGTCATCCAACGAACCGAAGTGTGCGTTGCGCTATTTTCACCCTAAATTTCAAGGGAACTTATTGGGTCTCTATCTGCGTACAGATTAGGTATATCGACGAGAGCTTTGCGAAGTGCACCGCAAACGTCACGTTCACTGGCACGGGCAGTAGTTTGTCAGCTCAAGTGGCCGCAACTCAACTTTACATCGATCTTCCTACTACGATTTGCTCACTTGGAAGCCAGGTTCGTAGCTAAAAACCGATTCGTCAAGTTTCGAATTAGTGGCTAGTTAGATATAATCTGATGGGTCTGTGGACGTTGTTTGATAAGGCTAAATGAGAAATAAGACTCCATCTCCCGAAATTCACATAGCTAGGAGTGGATTGAACTTCTTCGTGACACATTGATTGTTGCTTCGCGGACTGTGGCGGCTGTCATAGCGTTTCAAGATCCGCCTGAGATACTGACTCATACAGGCAGTCCTTGCCCCATCATGACACTAAGACCTTGCTATAGTGTCTTACCAAGAGCTCTCGGGACGTTGGCGGCCCGCATCACGGAGAGGAACTGAGTGACGACGGCAAGTTCTGGTCGTGTATCTGGAGGTCGTTACTAAGGCTCAAGGCTTTTGCATAAGTAATACTTTGAGCTACGGAGCCATTCCATCTGGTCGCCGGCAATGCCTTTCTTTCGCGAAGGCGTGTGTCGGGCTAACAGCTTTAGCCTTCTTACGATCCCTATTCCAAGTCCCCAAACCGCTGTGATTGGGAACCCATGCCTGTTGGATACTAGATATATGGAGCAGTGACTCTCATAGTCCCGGCTTGCTCCTTCCCGAACGGGGAAGAGTCTGCCACAGCAACCACTGAACTCACATGACGATCTGGACATTCCACACACCAGTATCTCGTCTATCGTAAGGTACCGTTAAGGGAAACGTGCGTCAGCACTCGAACGAAATTACGAAGCTCCCAACAGCACCTATTCGTGCTCAAAATTGGTACCCGTAATGGGTTTGGACTCACCTTATATGCAGCGTTAGTCCCGATACATGGACGGTTTCCCCTCTACAGTTCGTCTGATTACAAGGTGGGCAGCCCCGTACGCGGTGGTCATTTCTATACGGAATCAGGCTATTGTGCCACATAAAGTAGTCGAATCTGACCGGCACTGGCGCGGATCGTGAGCGCCACTCTGCGTACCTCCACGCATATATATGATGATTTACTGTGGCCTGATTGAGCCGTACCCTCCGTCTTCTTCACTTAACTAGTATGGTAAGGCAATACACACGCATGTGATGCCATGGGTCTTTGCTGTCATAGAGGCGGTGGTGTCCATAACATTCGAGTAGGGTACGATCGTCCGGATATCCGGCAAACGTCGGGCTCGCCTGACGGCTGGGCCTTAAGAGATTTTTTATCCGAGGATTTTCCCGTGCCGGTGAAATTTGCGATTATTTATATTCTCCCAATAGTATTCCTTACTGTCACGTCAAATGGGCGTTACAACACTGTGGCATGAAGTACGAAAGATAATTTCCCAATGCATTCATACGGCAGGACAGTCGGGAGTCGATCAGGTTCAACAATTATACATCGACCGGACGCAGTCCCGGCCTATAACTCTAATCCTTAAGAGAGATTAGCCAGATCCTCCTCGGCTGGCAAACGTCAGTCGCGTGGGCAACGATTTCAATTTTTAGATGCCCAGCTGCTCGTCACCCCGCGGTACGTACTAGCTACGTAATCTCGAACATCTGAGGCATGCTGAATATTGGCTGCGCGGTCGACCATGCCGCTTGAACTGGGTCACCATCAACTTTACGTGGTCGTTCATGTCGGGAAACGGTACTCTTAGTAGAGCGAATGTTGGTTGTGAACTTTTCTAGACATTGTGGTCTGCAGGTTTGGTAAAGTGCCCACAAGACATTAAATGCATGTACCCAAGCGAAAGACGACCTCGCACAGTGATCGTATTCCCTCGATCAGACTTTGGTTCGTTGAGAGAGCTAACCAACTTGTTTTTACAAAATAAAAATCCATCTTCATGGATTGGTAAGGCACAATATTTACGTACTCTCACTACACTTGTTGAATATCTAGTCCACTTGAGGATGCTCCAGTACGCTCGCGGACACTATCGCCTTTTCCACAGTCTGCCACGAATAGGAGGATAAACTCATGAGAAGCAGGATCATCACGAGAGAGTCCAACGTCTCGGGACAGCGGACTGTGACTCCTCTAACACGTCTGATCCCTCATCTATGGGTGTAACGGAGGTTTTCGTGTCGTGTACCAACCAAGCAGTGGTAAACCGTTGAACCAGTTGTCTGGGAGATCACATCCATTACATCGGAATACTGTTTAGTGCGTACCGAAACGGTGGCCTCATTCACCGTGGGGGGCGTAGCGGCAGTTTTCCCTGGGCTGACTGGCTCTTTACAGCCAAGGTACCTCAAGGCACGCAACGGTGAGAGTTCATTCAGATATTGCATGTCAGCCGGTCACGTTTTCGGCCCTTATAGCGTGTCGTAATCTCGGTACCCTACGATCATGTGACTCAACGACACGTATGGCTCACGGGTAGAACACCGTAGGCCACTCTCCACTCAATAGGCCGTCCCCTCGACGAGCTGTTCCTATGACCCGGCCTCTTTTTATTTTGCATCTTTTGATAAAAAAGTACTTACTTGGACTATACATTTGACATCAAGATGCTCCTCGGACCGAGACGACGCTTCGAAACTTTTAATCCAGCGGAAATTTTCTATGGCGTAACCTCTCTGAGTCATGTCGGGTGTCTTATCCAACGTTCATGATATTTCCCGATATAGGCATCGGGTGGGTTATGGTGATGGTCATTGAAAAGTGCGAAGTGAATCGAAAGAGCCGTAGCAGTACAATAGTTTCTACGAGTATGGCGGGGTCCCAATCGGCGCTCCGATAGAGCTTAGGCAACAGTCTTTTGTACCCCATCGTTGATCAAAGTGAACAAGGATCGTCTGAGTTGATACTGACACTGTGTTGGACCCAATAGTCTGTGGCGCGGTTATTCCTCTAGCAATAGATCGGCTCGATGGTAAATCATTTACCCGTGGCTCTCGGCAGTTACAGGAGAGGATAGCGTCATTCTGAATTGCCCTGTTGGTGTGACCAGCCGCCCGTGTCGGGGATGTGCACACCTGTCTCCCGCTCTATGTAAGTGGGCGGTGGATGTCCCTTTAAGCCCAACCCGACTCGATGTTGAGTGGAATCGCAACCTCACACGCCTTGATACCGCTAGACAAGCTCAGCTGTGTCTAACGGCTGCTCTACAGTTATGTGGCGCATCCCCACAAAGGCCACCCCCCTCTGGAGTGTCTGGCCCCGGCCATTACGAGACTGGTTATCTGACGTAGCGTTGCCAACGGTTTAATTAGACGCTAGAGTTCCAGCCTCTCGGCGGGCGTTTGCGAAGTTGGCTAGCGTTAAACGGAGCAAACTTTTCCCCAATGCTACCGTTTCTGCGTCAGAAGCCGGTCCTCGTGAAATGGGCATCAATTGTGCTTGCTCACGCCAGAGATCGCGGGGCTCCGTCTGTCGACTCGGTTTACGATTAAGGTCAGCGGTTGAATTAGCATCGTCCTCAACGCCTGACGGTTGTGCGACCGGTTCCCAGCGATTCAGATTCGAACTACTTAGACAAGAAGAATGTTCGAGCAACTAAGAGACTTGGTCCCGACATAAAGGGCACGTTGAGTTGCGGCCCGGGAGCCTTTACTATATTAGACACGGAACTGGGTTGTGCGCTAACGGGCGATCATTCATCCGGAACCCATGTAACGGCCTATCAATAACGCAAGGACGACATCTGCAATTGTTTTCATCATGAACCCAGCGCGAAATTTCGTAGTGCCGACATCCCACCCTTTTTGAGGCCTAGCCCGGTTGTCATAGACCGAGGAGAAATCGCAAGTACCACAATACCATAACACAGCAGTCATGGATGGAAAAAGCACGGCAAAGAGAACATAGGTCATTCGATCCAGATGCCAGCGCATCCCCAAAGTTTTCAATGGGCGATCGGATCTTTGTGGATATGCAGGGAACAAGACAGTTTCAGTAGGATCCTGAAAATCGTAGTAAACTCTGGTTAGGTTTCCACTAGTCGGACTCTTTTCGGCCCCGTAACATTAATGAGATTCCAGCCAATCCACGTGAGAGGTCGCCTCACGACGAGGAGTAGATTGACTGCCAAGGAGCAGTCGAATCAGTTCGTTGCTGTTTATTCAGGCCTGCTGGGATATACATGGACACCGTCACTGCACGCTTTTCGGCGCACACATGATTTCCTAGTCGAATCTCTCGGTGGTACCACCCCAGTCCGGCGGTTTCATGTACTTACGGTTTTAGTCGAATGGACTTATTTGGCGATTAGTAATCATTGGTTAAGTGGAGTAACGTTTGAACGTTCCTTCCGTTTATGAGGCTCCTGTCTCGGACCCTTACGACCAAACTCTTGTTTTCAACTTTTATGAGTTACTGGATGTTCTGACAAGAATTGAGCCTCCACCGGATTTACATAGCCTTCGGATTCGCCGGCCAGCGAAACAGACAATTTGGGACGCAAGTTGAAAATCGGACAGTGGCTGGCAAGAAAAGTGCAATTGCTAGCTCTAAAAAACCTGCCTAGTTATGGTGTCGTATAATAACTTCTGGTGATGACCGGGCATGACGAGCATTGAATTCCGTAGAGTCTAGAAAGTCCGAAATAAGTTCACGGCATGAAGCGAACCTAGTCACACCAGGGATAGGAGAAATTGCTCCGGTTGTGCTCGACCGACCATCTCGCACTAGGCCGCGGCTATCATTCATCCAGTACCCCGGTCTTCTCATATACCGATTTTTGGTTCACCGTAACAACTTTTTAATCAAAGCGTTACGAGAGATTATCGTCAGTTTGAAATTGAGTAGCTGAAGGCAAGTGCGCTACAATATCTAAGTTGCTCTATTAGAGTGATCGACGGTACGGACAACTAGTGGCTGTTTCCGCAGGGTATAGTGTGGGGAACACTAAAAATTTCAGGTACGTTTGGCGTTACCAGACTCGCCTTGTACCACACGCAAGACCGCATTAATCTAGACGGAAGTGAAGTACAAGGTAGTCTAGTTGGGGGGCTGGCACTTAACATTGGATTTCGCGTATTGCGTTCGTTTGTTGATTGACACGCCCCAAATGGATAAGGACAGTTTGGTCTCAGATCATAGTAGCGTAAACACGGAGTAGACTCGGAGGTCGGCTGACTTGGGTTCCTTTGTTAGTTACTCACCTAGTAACGCGAAGTTAACCGCCGAAGCGGGAAAGGAGCGGGACTGCCACCGGAACGATTGCTGTGTGCCCACTTACGAAGATGAACCATGCCGCAAGTCCTAAGCTCGTGAGGTGCGCCGCTCACTGGAACTCAGGCACAATTTATACGCTTTTCGTTAGATTTGCGGTATTCGACACCTGATATGCATAGGATTAAAACACGGACTAGCTTACTGTAGCGCAATAGGAACTGGAGAGTCCCGACTTGCACCGATATGTCCGTAAATGAGTCAAAAAGATGGTGGAACTTCTGCGTGCTTGAAGACCTAGCAGGTATTTCCTATCCACTATGGTTGGTTAGCACCTTTAGATAGTACGCAATGACCACTGCGCTTCGTACAAGTACACTCTACTTCGGTTTGGTCCAATAAATCTATATCGGGGAGGCCTACAACATGGAAGGCCGCAGGAGTCTAGCTGTCCGGACCTTCCCCGTCTGTCAGGGCCCAGCTCTCTTTGGGGGAGTGCAAGCTCTCGTGGCATGCAGCTATTTCAGAAACTAAGCCCTGGGCCGGACCAGAAATGTTGCGGCGCCACGTTAGCTCTCTAATTGCCATGCGTAGAGGGATGACTCGTCTTTTGGATGCCCTGTCTAAGTCATCACCGCTGTGCGTGCCTTTATGCTAGCAAAGTATGACCACCGCAATGTCACGTAAGATTGATGTACGCGTTCGCGCAAGATCCAATTAACGATATGCAAGGCGGAGGTGGCTCCCCGGCACTTTCCTCTTGATTTGGGCACCTTACTTGCCGAGTCAAATCGAGTCCTTTGGAAGCTTCCGCCGAGAGTGACTAGGAGTCCTTCGGCATCTGCTCCAATGGCCCAGGAGTAAGACGAAGTCCAAAGTGCAAACTAGGCTCCGGGTAGTGTAACCCGGTTCTGAATTATGCTTAATCTTTTCCTACGATATGTTTTGTCGAGTAACGTGATCATTGCCTACACGGCTTATCGCATACGTCGTAATTCACCTTGGCCATCCCTATATCGAGCCCTACAGTGTCAATAACGTTAGTGCCTTAGGAGGCTTAGCTATCAACCACTGTCGTCGCGTACGAAGCGGCGGCCCCAGGTCTTATCATGCCGGCACCTTTGACCACCGCATTAACTGCCCGGCGCTCTAATCCTAGGTAAGTGGGGCTATCGGAGACGATGTTAGGGCCTCGCAGCTTGACGCCGTGATTCTGCGGGTGCGTATTATGCGCTGTTTGGCCAACACACTCCTAATGTTTAACAGCCGCTGTCGGGGTGATGGATAGCCACAATTCGGGTTTACGGGCGAACCTTAGAAGCTTACATTATCACCTGTTAAGGGTGAAGCTGGCAGAGCAAGCCGTCTTACTATCCTGAGCTGGTACTCACTCAGAATGTTAACGGCATTTAGTCCTACCTGAACAATCTCGATACTAGACTTTTGTGCCTACAGGGCATCGGCTCACAGATTTAACCGCTCACCCAAAGTGTCACGATATCTCCTGGAAAGATCATTGTAAACTTCATTTACGGCCTTCAATTATTACATGCAGACTGTGAATACTATGCGTGAGTAATACACATTGGCAAGGACCTTCGCATCACACTCAGAAGATCGATGAGGGGGAACCAACGCGGCTAAAGTAAGCCGACCTAACACATTCACGCTGTCAGAGCGTACTGAGATGAGTCCTTCGCACTCACACAGGGTGCATTCTCTGGCCCTTGTCCTTCGAAGCTCGTCATTCCGAGATCATGCGCATTTCACCATCCTTCACAACTCGAAAGACTTACACTTTAACCTCGACATGTGGACTAGCCCAGAACCATAATCTCCCTCACCGGCCCGCAGAGGAGTCAGGATACCCGTCACCGGCTTGTAGGATCGAATCCGTTACATGTTTAACGCCGGCTCCAAGCAGCGTTGATACGAGCGGACAACGCCGGTACGTTCAGCCGCGTAGCTGCCTGGCGCATTTCCGTTGGTGCTCTTCAACAGTTACGCCCGTGGGACACTATGCCGATTATACCGCAGCATTCTATTCGAAACAGCGGCCAATACCGTCAGTATCTGAGAGTCTCGGCTTTATGGCTGGGACAAGTAATCCTTGCCACTGAAGAGTGACCGTTGATACCTAGGGACAGTCGCGTTTTCCTCATGAGACTGACTGAATAACTATGTGGTGCAAATCGGTTATATATCAACGAGTGCTGTTTCCAACCCTCCCAAGGTAGGTTGCCGGTAGACGAAGGAGCATAGCGCTGGGCCCTGGCAAACGTCATTCTATACTAGCCTCTCCTCCTAAAGATCGGCTAAGCTTCAGGTTGTAGACACTAATCAGCATGACTGGCCGGTTCTGGATGAGGTGGGCGCTAACTCTGTTTGTATCTCAAAAGTGGACGTATTTATGTCAAGTGACCTGGCTTATCTCGGACGGGAAGGTCGGCTCATAAGACCATCATGCGAGTTACCGGTCGTATTCTTGGTGAATAAAATCTGCTTGGAAATCTTATGCTTTATTGCAACTTTCGGGATTGTCGGTAGAATCCTCTTCCACCGATTCGTGCTCATTAATTGCATTTGTACAGGTCAAATTTGGTGAAACACACACGGTTGCCTAAAGTTTATAGTCTCAATGGCTCGTCCGCCCGAATCCTCTACTTATTACTCCCTGACCTCGCCACATGTAAAAGTGGCCGACGCGAGGCCTGGCCATATCAATAAACAACAACAGTAGGCATTGAACTACAAGTTATTAGCATGTGCTGATTCGGTGGCGTACTGCACATCCGTAAATGGTAATTCTTCCCGCATCAACCCTCTCTTCGCACGTCAAGATGCTTATCTCACTGACACATCACTCAGGATCGCACTTCTCATACTCTGCCAGAGGAGGGGACCACACAGATTATCTCATAGCATCCCACGCCCCGGCTGCGTGGACGGCTCTCGTCACAGGCTTTCCGACTAAGTAGCTTACGTAAGCTGCTTTCATCCCTATCGCTCGATCTCACTCCGTTGAATGCGCCTTCATATTATCCGGGGCCGCTTCGTAACTCATTCGGTCACTCAAGCAGGTGATATTTTACTCGCGGGAGCTGTAGTGTCGTATGCCAAGCCAGCGCTGGTAATAATCCCGTCCAGGGCCTCCTCGGAAGTCCTCTAAGCGCCTAGGTTATCATTCTGGTGCCATTCTGAAGTATGGTGATTGCAATCCACCCCCGACGGAATTGTTTGTCGTGGACGAAACCAGCGCACCTCCACCTCTTGCTCAGGGTCTGTAAACCTCGTACGTGCCGTTCGACGCCCATATGGTCGTGGGACTTCGGTTCATTGTCCATAACCCTGCCGAACCGTTTCGAGACCAGGATGGAAAAAAAACCTTTTGGAATAGGGTGCGTCAAATTAACTGGTGCTGTACGTGAGGAGATAAACCTGCATTTTAGTGCGTGATCTGAACCGACAAGTGCTTGTTAGCAGACACAAGGTGCTTGCCGGAGCCTCCTAACCTTCTGTATGCCGAGAATGGACAGTGGTAGCGATCTACTACAATACCCTGCACGTCTATGTTAGTGTATGTAGATAGCTGTAACTATTGGCTTCACCGTACTTCTGGTCTGTAGTACATGCCGACCGCTGTCATTGCTCGCCTCATTTTAACTGAGATCTGCGACCTTTAACTTGCGTGCAGATCCAGCGGGCGCCGGAGTATTGTCTTTGAGCCTGAACTGGAATACGCTTCGGCTAAGGCACATACTGCTTTCCGTTGTCAGGCAAAACAGTGACCTAGCTGCCTTGTTAGTTACGATCGCGAGGGAGATGTCCCCGAAGCAGTGAGTATTGGCATAGAATTCGTGTACGCGGTCGTCAAACTTCATTTCCGGGGGAATTAAGAGCGGGCTTAGGCAAAAGTCATGTTTACGCACTGTTTTCGTATCGCCTTGCACAAAGCGGCCAATCTTCTCGAAATGTACCCATGCACGCCTTACCTTTCGCTACTACTGATCATCATGGACTCTACCGCATGTCATCGTTTCCAGGGGAACTTCTCGTGGAATACCGAAAAAACGTACGCAATTTTTTTTATTTTTCATAACTTTTTAAGCCTATCTTTGGGTGGCAGCACTATGACTGTACTTCAGTTTGCATATGATGCTGCCTTTAGAGTACTGTTATGGCGCGGCCTTCATATCGAAACATATCGCTTGCTACATCCCATTGTAGGTCAGAAGCAGGTACTTTGACGGGACCGAAATCGGGCTTATAACCGACTCTATGCCAGATCTATTCACCCAAATCGCGCGGATTCATTTGTATAAACAGTGTTCCATGCGGAAGTTAAGCGAAGTGTCCCTCCTTTTAAGTTCTACCTCGGTTGCCATGTGTTCACAGTTTAGCCGCGTCCTCGGACATTCGGTGAGTTATACGCGTTAGGCGGGTGAGGCAACTGAAAGGCTTGCTGGCGACCGGATTCCTTCGCAGATTTCCGAAAGTTTCAGTGGTTCCCGTGCTCTTGGTCGCTCTAGAAGGAACCGTCCACTCGTTCGGTCTTAAAAATAAGTACAACCTGATAGGCGCTGCTTGTGAATTCTCTTGAAAGCAGGTTAGTCGTTGGCGCTCTCGGCACGAGGCCGAGGCGGCTCAACTCCGACGCCATATACGAACCTAGAGGAAGGCGACATGCGCGTTTCTGATTTGTCCCATGGTCGTATAAATAAAGTCTATTACGGGGTCGTTTTGCGAACCTCGTATGCACCTGGTTAGGGCTGATCCTGCTGGAGCAACAATTGCCAGCAGGCTACCTCAATGAGAATACTTGAAGACGGCCTCTTGTTCCTAGACCGGATGCCCCTCCAAAATGTATCCGGCCAAGCTTCGTGTCACACGACGTTGCCGAGTTCAGGATCTCGGGTTATAATTTACAGCCTCCAGAAGCTCGCCAGATAATATTTGCGGTGCTAGCAGTACTGCGATACGAAGAAACCTCCCGTCATATGTACTCTTTAAACCTACTATATTACGGTACATGTGTCTCTGATAGCATCAGGCTCTCACATCTTCTGTCAAATCTCTTGGCGCACCAGCTAAAGGCGGCTGGCGGCGGGCTCCAGGCGCGATTAGCTAACAGGTATTCGAACGCCCAACACAGCAGTTCGTAAAAGCGATCGGATCTTGCGACACGGTGGCGCGACGCGCATGGGGCGAAAATTTAACCTATGAGCTACGTAGTCGATGCTCCCTGTGTGGAATACGGCTACGCGGCACATGGGTCGAGGTAGCCTTATAAGCCTACCTTCACCCACCTAGATGGGGGGATATCCATTCGTCGGTTCGGGGGCGAAGGATGTCTCGTTTACGCGCCTATTCAGTATGGTGTTGCCTCGCGGTGAATTGGGGTTAATCCAAATATACTCATACCCAGGGAGAATAAGTGTCAATCAATGCGTGTAGTTATAATTGTGCTAGCGTGTTGCCGCCCCTCCACTGGCGCTTTGATCCAACTCGCAGCTTGGGGTGCCGTCAGTGAGCGGACGGGACTATAACAAATAGCAGCTCGCTATTGTACCCAAACCCGGTCGTCTAAGATATGCACCCCCACAGCGTGCAAAGTATATGGCAAGTCAACTACATAAGGGGACAAATATACTCGCTGCCGACTGAATCTTGGGAGCACAGTCTCACACCCGACAGAGTCGCACAGATTACACATGGTGGAAAATGTCCTCCTAGAGACCTACATTAATCTGTCAAACGTAGTTAAGTCGATGCTTCAACTTAGGTCACTTGGAGTGCCCTGTCTCTTTATCTCTACTAAACTCGTCGTACCGGACACCAGTGTAGGTTAGGAGGTAGAGGTGGCAACCAGAAATCGGGCATACGGATTTGGCCTTTGTTTAGCAAAGAGGTAAGCGCAGTGCTGCGGCGAAAACAACGCTGCTCTTCATGTTAGTGGCGCCGACAATGCGTCGTAATTCTCCTCCAAGTTCGAGTTGGCGCTGCCCGACGACTTTGCGAAGATACCCTTGGAGAGATTCCTAATGCCTCGTCGCCATTCACGAATGAGCGGACCTCTCGTTCCGAAGTGAGCGGTGTTCTTATGCACGTCAGCTGGCTTACAAATTGCGCATGTTCACTCGCTGAACTCCATTGAGCATTCTGGCCACATCCCTAGAGCCTAGTGATGTGTATGCCATGACATTAGGTCGTGCTGCGCCTAGACTGACCCGAGGCGGTCCTAGTGGTATTTTCTTGGCACCTCTATCACAACGGGCGGAACGACCAGTGCGGAGGACAAGAGCGTTAGGAACATATCTACGCCCGTGTTTCGCTAGTTTGAAGAGTTGGCCCAATGCCGGATAGCTCATCTTTTAGCAGTCGGGGGGTGGGATTGGGGCGGACGCCTTAAGAAATCTCATGTACCTGATGCATAAGAGATCTTATGCGGGTACAAAGACCATTGAAAATATTGAATGGAGTGTTATCCGTAGCTCGATAATTCCTACGTAACTCCGGACGCGGAGGCGTGAGTTCATCGGGTAAGAGTATGCCTCTAAAGCATTCTGCGTATTTCGCTCTCCAGGTGGCCTGCGAACCTGCCCTCGAAAGAATCGCCAAAACACCTATACCTCGCCCAGTGGATAACCGTACCTGCGAGACCAGCACAACATACACTGTACACTTACTGTAAACCTATCGCTAACCCAGTAAAGGAACGCACGGCTCGTCAGAGAATTTGGCTGAGACAACTGAAGTTGCAACAACTTTCCGAAGTACGAGCAACTTATGATCCTCTGGCCAGGCAATAAATCCTCCGAGTGCTCCTCTCACTGTGCGTTTGCATCGTCCTAAGTAGTCCCAAACGATAGCTTACGATCTATTTAGCACAGAGTAGACCTTGCCCTGTCTTCGCTCAGTCTAACGACCCGCATTGAGCTGCAAGCAAACTCTACCTGTGCGTCCAAACTGTATTTCAGCGCATTCATGAGTTCTATATTTCTCAAATTTAGCACGGCCCACCATGGACTTAATTAGCTGGGTCTGTAGTTGGCCAAGCATTATCGGGCTCCATGGTCCGGCTAGTGATGGGCGGTGGACAACGAAACTCCCCTTCTGATCGCGGCCCTGATACAAACGGTCTGCGGGGCAGTTTCCCCTCTGGCGAGATTCTAGCGTGAATAACCTCGTATACTGACACACAAAATCTTTGATAGGAACTTTCGCCCACGAGGGCAGCGTAGCGGTTCCTTTACTGAAAGCGTTGAACTACAAGCACCGCCCACCTTCGTCGAAGTCAAGTACCACCCCGGAATTTCCACTCATATCCACCGACTTCTACGGTGGGGCCAGTGGTCACCTCTCGGAGCAAACTGAAGTTATAAAACAGCTCGCTTATACTCGAAGATGTTAGCTCTAACACCCTCTACTACACCACCTACCTCAATGTTAGCCTTAGGATAGCATGGTGCTACAAAGGTTTCGATATAGTCGTCCGTAGCTGATGATTGCTTTTAACTGCTGACAGTTATGGTCTGATCCGACGGATACGAGTAGGCTGAGACGCCAGTAACAGTATTCGAACTCTGTGCGGGAATTTAGACTTTGTTGACTCTACGTACTGCAGGTGAATGGTCTGCGCGTAGTTATGGTCGAGGCCTACTGACACAGGAGAGCTGTATAACCTCCAGGTCTAGGACAAATCCTCGGTTTCCCAGAGGTTGCGTTAAACGAGTGAGAGTCCGGTGGCGAGGCGTTGTTAGGCCTTGATGGGTCCATTACTTACACACGTAGTACTTCGAGTAGCAAACAGCGCACGGGCCGCCAGTAGCAAGTGAACCGGGGGGACCAACGACGGATGTAGTTTGTTTGAATACAGATTGCTGCTGGCATTGCGTTTGGGGTATCCCATCCTCCCTTTAACGTATCCCATGGACCGTCAATTGGATAATGTTTAGCTCCAACGTAACATTACGTCTGCTGCTCAACAGTACTGCAATTTGAGAAATTTGAGACCCGATAAGTCCGAAATTTAATTGCACCAACCCTTCCGACAAGAGAGGAGAGGCGGGGATCATATCACACCGTCTTGAGGCAGTGATAGTTGGCCTCTACCGTTTGGCATTCTACTCCCCGTTGAAAGGTTTTTATGAAGCATGATTGAGCAGGCTAACCTGCAGAAGACTACACGGACATCTGGTTTACAGATCAATTAAACGCCGAACAGATGAAACTTAGTCGGTATGAACAGATTTTGTAGAATCCGAGAAGACGCCTTGTATCTGAAACAGCTGCGGCCGGACCGTATTCTCCCTTAGGGGAAGAAAGTTCGCCTTCGCATAACGCAAGGGTCGAGCCTCAAGGATACACCATCAAAACGGATACAGCCGTAGGTTGCCCGCCACTGTGGACAATTCTCCTCCATAAGGATTCAAATAACAGAATTCCATAGGTGTCCAGATGCCAGCCTTCAAGAGAATGTACTATGTGCTCATACATTCATCTGGCGTTTATTGTATACTACGGTAGGCTTTGAATGAGCCGTAGACTTATTTGTCCCGGGTGAAGGGGTAAGCGAATCTAATCACCACACAAGTCCCTGGTCCCGATATATCTATAAGGACGAAAAGTTTGTAGACCAGCCGTAGAGAGACGGTAAGTAGGAATTCTCTGCATGTTCTAGGTCCCTCTGACGTTTCTACAGCCGTGGAAGGCAGTACCATATGCGACCCAAATTCAACCGTGCATTGTTGGTGTACGACAAACTCATGTCGGACGACCTCGCTCACCAGAAGGGCTTAAATATTTAGGGCCACTACTACGGCTAGTCCGGTATTCGGCCGGCCGCGACGTTCGGCGACCGCGGAACAGGAAGTTAATTTTGTAGTATCTGACCCTGAGACTGGTGTTCTGAATGATCTTAGTCACAGTTGGTCATCAGTCTAACCTATGAGGCGTTTTTATATGGATTGTGAGCCTGCTGTAGTATTCACGAAACAAGGTCGGAGCCTCGACTTTCCGCAGACCACGGGCGCAAAAAACTGAATCTTTCGGACTACCAACAGGTAACCTTCGTAATCGCAGAGTCATGGTACCCATCACTTGCGAGTCAACCGTAGATCCTTTCTCTAGCTACTACGTGGTAGTTTTACCGCTTACCGGCCGCTAACGGGGAACATGACTGCACTCATTAGCGTTGACTGTAACTGGCTAGTGCCGCTCAGGGAGCGTAGATCTCATAATGAGAAGAGATGCATATATTACATCTTGGTAAGCTAAGGTTCCTAGGCTAATATGGAGATAATCCCTACCGAGAACTAACCAGAACATGATAGCCGTGATCAAACTCCAGGAAGTCTGAACAGATTTGGCAGCGCGCGGTGACTTTCCATCGCGCCGGGGGGCGAGCAGCCACGTATCAACCGTTCACTTTGTTAACATACAACTCGACCCAGCAGTGATTACACGTTCTATTCATACTACTACCCCCAGTCGATATGGCTTCCTCTGCAGCATCTCCACCTCGCGACATTTAGTTCCGAGAGTCGAATCATCTTGACACCCCTCCCCTGCGCTCCTCGATCCTGCTGAGGAGGTGGGCGCCGTTTGCACCTTTGTAGAGTGCACCGGAGTCAGACTTCGGCGTTGCAACTTGGTCCTCCACCGAGCTCTTCTGAAATATCAGAGCGCGGCTGTCATATAATTTTACGTGCTCTTTTCTTTAGTACTCTGACCTTGAAAAGATCCTGGTCATAGCCAGTCCGCAAAGCTAGGCCGACTACCGTGTTCTCTAATAAATACCGCTGAATCACGCTAGCAACAGCTCCCTTTTCCGTCATTTTCGCCATCCTTACCTAGCAAGTCAGCTGATACAGGAGATGCCTATCGCAAGAAAAGGTAACGTAGGAAAGGGAGTAAAAATTTTAGAAGAAAGCGCACCTTCGGAGCGATAATCCTCTATCTGGATACTCCACGAAACAAATGCGCCCTGATTTATCTGGGCTACCCATATGACGGCTTTTGGTGCCCCTATTGGGGGGACGCAACAACTCTGAGAAGCTCGGGAATGAATCCCTGTCCCGTAGGTCACTAATCTGGCCCGAAACTAATGAGTGGCCCTGTTGTTCCCCACGACGAATCGCGTACGACTCGATTAGTGTTGGGCTAAGGGGTCGCATCTGCACGCGCCGCTATTGTCGCTTCAATACTCTAGTAAGGAACCGATGTGCGGCGGGGTGGGCATAGGTCCAGCTCTGCCTACCCTCCTCGCTGGGGGTTACTAGGTGCGGCGCGTGTGGATAATTCCGTGACAACCATGAAAATGAGAGGTTTTTACAAGAATCGAGGGTTAGCGCAGCTTTCTCTTTAGTGTCACCGCCAAGACCAACCCGTGCGTCTTTGACATGCGTTGTGCCGTTCATGGCAAATACGCCCGTGCAGCTATCAGCTCTAACGACACTCGACCAACATTCGCAGCTCAAACTCGGAAAACCTCGCTCTGAACTATGTGGGGCAGTCTTGATTGAGCCGTAATCTTTCGCCAAAACCGGTGCTAGCACGGCAGTACTCAGCCGAAGGCCCGATTCCAGCAACTGAACTCTACTAGAAGAGATGGAGATCTGTTCTATCATACCAGGCGGTTATTGGTTGCAGATGCTAGGGCTACTCATCATTTCCACTCTTGCACTTAAGGACCGCGTCGTAAGTCGAAGTAATCCGTCCGGGACTAAGAGCACTGCTGGATTGGCAATAACATTGCTACTATAGGCCCCGGCAATATATGTTCTCGATTGTATTAGCGTAGGTGACAACCATTCTCCGCTTTATCGCGCGAGGGGGGAAACCTCTGTCGGCGATGTACACGGACCTATTAATCCTCTTAGAAAGTTTAGCTGTAGGTTATTCATAAATCTTAGTCGACTGAACGTCACACAGGGGCAGGTACGCAGTCCACATTTTATTTAGACTTCAATAAGAGCGAGGGGCGGGGGCGCTCTTGACAGTGGATTAACTTAACTTAATGCTTAACAACGGCCACTCAGGGGGGAGGCTTTTATCGCGAGCGGTACCAAAAGTTCGAGGTGTTTGGCTGAGACCGTTTCCAGATACGGAGTCTGGTACAAGGACGCGGTCCCTGGAGTCGACCCGTGGAAAGCCTTAATTGGGTGCTCATTTCGGTGGGAACCTCCGCGCCTGATGTTCGGGTGTAGGCCCTCGGATGAGTGCTGCTCGCACCATAGCCCCGGTGCCGACAAACGTATCGTGCATTACAAAAATCGCTAGAACCAGACGTGTCTTAGACGGGGCTGGGGACATGCTTTCCTTGACGAATTACGTCGAACCCGCAAGTAGTAACGTCTTCGTAAAACATCGCCCCGGTTGGACACCCCCTATGAGTAAGTCCAGCTTGTTATTATTACAAACAGATGGCTCGGGGGTAGAAGGTGCTCCTTTCCCCTAGCGGAAAGATGTGGGACGCGTTTTATATTGCCAAACAACTGCCGGGTGTAGTTGCAACCTGCCACTCTGTACTATGCCATGCTCAGCTGATCTATCGGTTTGCGTCAAAGCTAAGACCGGTTGAGTACCCCGTAACAATTGAGAAGCTAAAGCGTCCGCCACTCCACCGTGACGAGCGTAGAGAACGGCCAAGACTGGCCCACGTGTTGGAATCAGAAAGCGTCACCGCTGCAACCTAGACCTACGGTCATAGAACGAGAAGCAACGCGTCAAAGCGCATATCGTAGTCTTGACTCAAGACGTTGTTAAGCCGGGCGTGTCTTAGCTTTATTTATTTAGGATGTGGCCCTATTCGTACAGTGGCTGTCACCATCTTCTGGGGATGACTGGCAATGGTTTGTTCGTCAAGCGTCGGTTGTCCCAGATTCCAAGCAGTGTTTGTTTAAAATCTTTTACTGTGATTACTAGAAGGAGGGATATTCACGAAAGCCGCTCATCCTACGGTAAGGCCCAATGGCCGTAGACCCAATCATATAGAAAACAGCTGATGTCCCCGTCGGAGATAGACAACACACTAGTGTATTATATAGCCTTGGAGTCTCGCGCGAATTAACATAATCTCCCTATGGAAATGATGTCGAGATCTTTTTACATACGGATAGCGTGCAACCTGAGATCTCAACTTTAAATTACTCTACGTCTACAATGCCCCCCAAAGTAGGTCAGGATTATCCATCTCTAGCGGGCAGGGGAACTTGCAAATCGCGATATAGGGCAGGGAGCCCGTTATGCGCGCGCCTAATGTAAGTTGGTCAGGTGCTAACTATGCCTAATTATCGAGGCGTACTTGCATTATTAACTTTTTGCCTCTTCCGCGGAGTTCAATCATGCAGAAAGAATCCGGTCGGGTGTCAGTCTTGAGACGAAAAGTGGCGGAGTACATCTACCCCACGGACTGGGTGTCTTTAAAGTGTCCAGATAATATTTCGGGGCAGCCACTTTGTACTACAGGGTTATCGTGACGTGCGCTGAATTATATGTTACGCCGCAACACTCCAACCATCCGGGACCGTAGGTACGACAGCTTCAGTAACGTCACTGAACGGTGTGGACTTGCTGTTTGGAGTGAAAGGCAGAACTAATCATGGAAGAGAGGCCGCACGCGAGTAGACTGAATTACTTCCCCCAGAGTGGACGTGGGATCGCCTTATAGCCGGTCCGTAAATAATTACGTAATGCCCGGGCGTTGCTTGCATAAAATATTTTCCGTAGAACTTTCTTTTACGAGCCGGCACCCCTCTGAGAATGCCTGCGTTCGGCACGCGAGTCTATTGAAACTAATTGCAGTGTCCCATGCCTTTAGCAAGGGGCTGACCAGATCGCCCGGCCGTATTGGCTAACGATCAAGTCGTCGAAGCCCCCAGAAGCGGTCTGTAGGTGTAGTTTCGTTGTATCTTCAGGGCGAGTCTCGGGGCCTTCTGCGCTCGTGATGGTACCAGGTTCCACATTATGTGAATATGGGGGGTTATTGGAGGAGGTTTCACTGCCTACGACTTTTTTGGGCTGTGGTGAATCAACCTGTATTTCACGGGCTCTCCAATCATGTCTGGATTCACTCCCGTAGACCTGTCCTAGTGGCTGGTTGGAGCACTTAAGGGGTTAAAAGAGGGCGAGTTTCACAGGCACGGCATTTAGGTCGCAAGGTGCATTTCCACACTGTCTAAGGGCAGTCCGCAGAGCAAGCCGACGGCCAACTCGATTTCGCGTCGTGTGAATAGGTCGATACGTGCAGGCTCACTGACTGCGCCAGGTACTGTTCTGTCTGCAGTTCCTGAGAGAAAGACCCGGCCCCCAATCACCATGGGGAGTAGGAGGTCAGGAGCCTACTCAGGCATGTGAACCCTGCCTACGAAACACCTTCGAAAGTCTCAAGGAAGAACCGTGGACGGCGTCATGGTCCGTACGCCATAGTTAGGGGAGTTATGGTCATACCACGTGATTCGTCACGGATGATGGATCTTGCATCTCAATGAATGCAAAGTCTGAGAGAAAATGGTGTATACCGCACAGGCTACCGATGGTATGAACGAGCTTATGTAGAATGCCCATTCGCAAGGTCCAGGGGTTAAAATGTAGTCGTCTCTATGGCGTAGCTAAGGCCAGCGGTGACTCTCGGAATACGGTCCTAATGCCTACTTATGCAGTTGTAGTGCCGCAAAATTTTCAAGCGAGATTTACCCCCCCAAAATAACGATGAGTGCCGTCGTACTATGGGACCGAGGAGCCCACTGCTGCATTGCTTCTCTTTGAGGCATTGACACTCTGGTCCCTTATAGTATCCATGAAGGAACGCTAGGCTTAGTGTGAAGGAATCTTCTACCCCCAACGGCAAAAACTTACTGCGCTCCATGGATTCACGGAGGAGAACGGTAATAGTGTATGGGCTGGAGTTTTGCGAAAATATAAATACCTAGTGCGTGAACCGTATGTCCTAATTGGAGATCCCCGATCGACGCGAACTGAAAGGCTACTAGAAGTTGTGAGACAAATACATAGTCTGTTGTATAGGGTCACTACGCATATATCGTCGACGTTTAGTTCGGGTCTCAAGCAACGGGTCCAACATCCACCTTAAGCAGGCTAGCGGGCGAGACGCCCTGAATATTCCTTGACCAGTCAGTTCAAGGCGCTCTAATTTAAGCGGTAGTAGAACACCCGTATCCTGGCAACTTAAGGTTCTCAAGCAACCGATATCCCCACAGCCCACCCTCCCATCGGATGCGCGGAACATCCCTGTGGAGGTTGGTCAAGAATTAATTCAACTGTATAGCGCTAGTGGAGTATCCACGATGACATAGGGGTAAAATGCCCCGTTCAAAGTTGTCTACCTGCGCGTTGCTTGCCTTATTCTAAGCGATCGTCCCTCCTTCACGACACGCCATCTGTTCTAGAGACGTACATAGAGGGGGAGGGTGCATCCGAGGTACGCATAGCATATCTTGCACGGTAATACGCGCGGCCACCTCGGTTCATCAGCTTAACGTCCAAGAGGTCATATTACGATTCAGCTGTTCCACTTTTATTAGTGCAAGGACAACGATCCGGTTAAGTTGATGCCGTGTCGGTACTAAGCTGTAAAGCTTCCTTATTGTCATATGGCCATGTCTCGCGCACTTAAGTGATTTGAACTATTGGGATTTCCTTATACGGTAACAGTGCACAGTGTTCTCTCGTTCAATAGTACACTTTTTAGAACTTGGCTTGCTAGT'
p = 'GTGCTGAATCC'
q = 'GACTCCCAATA'
pattern = 'GAGGCAA'
text = 'TAGTAGGTGCAGGTTGTGGTATAGCGCTTCAAATCTTATTTTATGAGGACTTAGCTCCATGTGAGTCAAGAGTTAACTTATCTCGTCGTTCGGGGATTTTGGGAAGTGGTCAGCCCATCGTCGGCAACACTATCAACTGCATCATACGTGTGCGTACACAGAGTTCCTAGACCCACCGTTCCTTGCTCGCTTAACTTAAGCACCTATCCACGCGACCGGAGGCACGACTTTATTTGGGCTGTTCGCCCTCCAATTTTAGTAGCATACGGTCCTGACTGTGGAGGCAATCTCCTGTGTGGTGGGGGTTTCAGTCATGCTCCACCAAAGCAATGCGATTTTGAGTGGGATGACGCGTCAAATACCAGTCTGTATGCTACTATAATCCT'
print(approximate_pattern_match(pattern, text, 2))
#print(hamming_distance(p, q))
#skew(3, nda_seq)
#print(pattern_match(nda_seq, 'CGC'))
#rev_comp = reverse_complement('GCTAGCT')
#pattern_count(nda_seq, 'CGCG')
#computing_frequencies(nda_seq, 2)
# print(kmers[-1])
# print(kmers)
# anon_ndex = nc.Ndex2("http://public.ndexbio.org")
# query_result_cx = anon_ndex.get_neighborhood('c9243cce-2d32-11e8-b939-0ac135e8bacf', 'XRN1')
def hamming_distance(p, q):
h_distance = 0
for i in range(0, len(p)):
if p[i] != q[i]:
h_distance += 1
return h_distance
def approximate_pattern_match(pattern, text, d):
k = len(pattern)
found_match = []
for i in range(0, len(text) - (k - 1)):
if hamming_distance(text[i: i +k], pattern) <= d:
found_match.append(str(i))
print('approx pattern match count is %s' % str(len(found_match)))
return ' '.join(found_match)
skew_list = []
def init_skew(dna_seq):
count = 0
skew_count = 0
skew_list.append(skew_count)
for bp in dna_seq:
if bp == 'G':
skew_count += 1
elif bp == 'C':
skew_count -= 1
skew_list.append(skew_count)
print(skew_list)
#print(' '.join(skew_list))
print('---------------')
min_list = min(skew_list)
for idx, val in enumerate(skew_list):
if val == min_list:
print(idx, val)
def skew(k, dna_seq):
if len(skew_list) < 1:
init_skew(dna_seq)
return skew_list[k]
def computing_frequencies(text, k):
freq_array = []
for i in range(0, ((4**k) - 1)):
freq_array.append(0)
for i in range(0, (len(text) - k)):
pattern = text[i:i + k]
j = pattern_to_number(pattern)
freq_array[j] += 1
def pattern_to_number(pattern):
base_pair_to_number = {
'A': '0',
'C': '1',
'G': '2',
'T': '3'
}
converted = ''
for c in pattern:
converted = converted + base_pair_to_number.get(c)
octal = "{0:o}".format(124)
bases = Bases()
quad = bases.toBase(123, 4)
return converted
def find_clumps(text, k, L, t):
kmer_dict = get_kmer_dict(text, k)
kmer_dict_filtered = {k: v for k, v in kmer_dict.items() if v >= t}
kmer_clumps_found = []
print('%s total iterations' % str(len(text) - (L - 1)))
for key, v1 in kmer_dict_filtered.items():
for i in range(0, len(text) - (L - 1)):
print(str(i))
if len(pattern_match(text[i: i +L], key).split(' ')) >= t:
kmer_clumps_found.append(key)
break
return kmer_clumps_found
def pattern_count(text, pattern):
n_mer_length = len(pattern)
nmer_count = 0
for i in range(0, len(text) - (n_mer_length - 1)):
if text[i: i +n_mer_length] == pattern:
nmer_count += 1
return nmer_count
def get_kmer_dict(text, k):
kmer_dict = {}
max_occurences = 0
for i in range(0, len(text) - (k - 1)):
# kmers.append(text[i:i+k])
value = kmer_dict.get(text[i: i +k])
if value is not None:
kmer_dict[text[i: i +k]] += 1
else:
kmer_dict[text[i: i +k]] = 1
return kmer_dict
def frequent_words(text, k):
kmers = []
kmer_dict = get_kmer_dict(text, k)
max_count = max(kmer_dict.values())
for i in range(0, len(text) - (k - 1)):
kmers.append(kmer_dict.get(text[i: i +k]))
return_values = []
for k, v in kmer_dict.items():
if v == max_count:
return_values.append(k)
print('%s - %s' % (k, v))
return return_values
def reverse_complement(text):
comp_map = {
'A': 'T',
'T': 'A',
'C': 'G',
'G': 'C'
}
reverse_text = text[::-1]
compliment = ''
for c in reverse_text:
compliment = compliment + comp_map.get(c)
return compliment
def pattern_match(text, pattern):
k = len(pattern)
found_match = []
for i in range(0, len(text) - (k - 1)):
if text[i: i +k] == pattern:
found_match.append(str(i))
return ' '.join(found_match)
run_test()
if False:
with open('E_coli.txt', 'r') as ech:
e_coli_text = ech.read()
found_clumps = find_clumps(e_coli_text, 9, 500, 3)
print(' '.join(found_clumps))
with open('vibrio_cholerae.txt', 'r') as vch:
vib_chol_text = vch.read()
found_pattern = pattern_match(vib_chol_text, 'ATGATCAAG')
print(found_pattern)
rev_comp = reverse_complement(nda_seq)
pattern_count(nda_seq, 'GTTAATAGT')
frequent_words(nda_seq, 13)
kmers = []
kmer_dict = {}
for i in range(0, len(nda_seq) - (n_mer_length - 1)):
kmers.append(nda_seq[i: i + n_mer_length])
value = kmer_dict.get(nda_seq[i: i + n_mer_length])
if value is not None:
kmer_dict[nda_seq[i: i + n_mer_length]] += 1
else:
kmer_dict[nda_seq[i: i + n_mer_length]] = 1
for w in sorted(kmer_dict, key=kmer_dict.get, reverse=True):
print(w, kmer_dict[w])
| 471.783784 | 99,064 | 0.978498 | 788 | 104,736 | 129.838832 | 0.19797 | 0.001798 | 0.000586 | 0.001075 | 0.007555 | 0.005473 | 0.005327 | 0.002874 | 0.002678 | 0.002013 | 0 | 0.000794 | 0.014159 | 104,736 | 221 | 99,065 | 473.918552 | 0.990102 | 0.004172 | 0 | 0.16 | 0 | 0 | 0.95519 | 0.953483 | 0 | 1 | 0 | 0 | 0 | 1 | 0.086667 | false | 0 | 0.006667 | 0 | 0.16 | 0.073333 | 0 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
88cf1bba5e1ebccd4e87928594526895f9589a0b | 88 | py | Python | module_user/main.py | nheske/learn-python | 0f430ffa232103419669cec78202e9f2e74f7f6c | [
"MIT"
] | null | null | null | module_user/main.py | nheske/learn-python | 0f430ffa232103419669cec78202e9f2e74f7f6c | [
"MIT"
] | null | null | null | module_user/main.py | nheske/learn-python | 0f430ffa232103419669cec78202e9f2e74f7f6c | [
"MIT"
] | null | null | null | from module_folder.module_file import say_hello
print("Hello World")
print(say_hello()) | 22 | 47 | 0.818182 | 14 | 88 | 4.857143 | 0.642857 | 0.235294 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.079545 | 88 | 4 | 48 | 22 | 0.839506 | 0 | 0 | 0 | 0 | 0 | 0.123596 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.333333 | 0 | 0.333333 | 0.666667 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 1 | 0 | 6 |
88eb30fccdc7cdfea58ca93bd7aa362f916194d4 | 47 | py | Python | pyxb/bundles/opengis/waterml.py | eLBati/pyxb | 14737c23a125fd12c954823ad64fc4497816fae3 | [
"Apache-2.0"
] | 123 | 2015-01-12T06:43:22.000Z | 2022-03-20T18:06:46.000Z | pyxb/bundles/opengis/waterml.py | eLBati/pyxb | 14737c23a125fd12c954823ad64fc4497816fae3 | [
"Apache-2.0"
] | 103 | 2015-01-08T18:35:57.000Z | 2022-01-18T01:44:14.000Z | pyxb/bundles/opengis/waterml.py | eLBati/pyxb | 14737c23a125fd12c954823ad64fc4497816fae3 | [
"Apache-2.0"
] | 54 | 2015-02-15T17:12:00.000Z | 2022-03-07T23:02:32.000Z | from pyxb.bundles.opengis.raw.waterml import *
| 23.5 | 46 | 0.808511 | 7 | 47 | 5.428571 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.085106 | 47 | 1 | 47 | 47 | 0.883721 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
88fde84a086b8b30fe142d3316d31fd074e9eefa | 37 | py | Python | src/encode_program/__init__.py | DenverCoder1/godel-number-to-code | 1f4fc3d5eba97ca45411302a67db79c77e44e19d | [
"MIT"
] | null | null | null | src/encode_program/__init__.py | DenverCoder1/godel-number-to-code | 1f4fc3d5eba97ca45411302a67db79c77e44e19d | [
"MIT"
] | null | null | null | src/encode_program/__init__.py | DenverCoder1/godel-number-to-code | 1f4fc3d5eba97ca45411302a67db79c77e44e19d | [
"MIT"
] | 1 | 2022-01-18T19:51:33.000Z | 2022-01-18T19:51:33.000Z | from .encode_program import * # noqa
| 18.5 | 36 | 0.756757 | 5 | 37 | 5.4 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.162162 | 37 | 1 | 37 | 37 | 0.870968 | 0.108108 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
000a75e33e5de0363a6612c36d68d6fc9ce723a2 | 116 | py | Python | Spotkanie 5/fn_01.py | abixadamj/lekcja-enter-przyklady | 4f23ee32a139e955f992b727ad86c6effb87a6d6 | [
"MIT"
] | null | null | null | Spotkanie 5/fn_01.py | abixadamj/lekcja-enter-przyklady | 4f23ee32a139e955f992b727ad86c6effb87a6d6 | [
"MIT"
] | null | null | null | Spotkanie 5/fn_01.py | abixadamj/lekcja-enter-przyklady | 4f23ee32a139e955f992b727ad86c6effb87a6d6 | [
"MIT"
] | null | null | null | def my_function():
print("1szy wiersz...")
print("--------")
print("2gi wiersz.")
# my_function()
| 14.5 | 27 | 0.508621 | 12 | 116 | 4.75 | 0.583333 | 0.350877 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.022472 | 0.232759 | 116 | 7 | 28 | 16.571429 | 0.617978 | 0.112069 | 0 | 0 | 0 | 0 | 0.326733 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | true | 0 | 0 | 0 | 0.25 | 0.75 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 6 |
00247dee86fb2681cc968c933c65459f08ea1749 | 86 | py | Python | tests/unit/peapods/runtimes/remote/jinad/yamls/dummy.py | Rohitpandit021/jina | f3db4d5e480375d8dc3bceda814ac1963dee76d7 | [
"Apache-2.0"
] | 15,179 | 2020-04-28T10:23:56.000Z | 2022-03-31T14:35:25.000Z | tests/unit/peapods/runtimes/remote/jinad/yamls/dummy.py | Rohitpandit021/jina | f3db4d5e480375d8dc3bceda814ac1963dee76d7 | [
"Apache-2.0"
] | 3,912 | 2020-04-28T13:01:29.000Z | 2022-03-31T14:36:46.000Z | tests/unit/peapods/runtimes/remote/jinad/yamls/dummy.py | Rohitpandit021/jina | f3db4d5e480375d8dc3bceda814ac1963dee76d7 | [
"Apache-2.0"
] | 1,955 | 2020-04-28T10:50:49.000Z | 2022-03-31T12:28:34.000Z | from jina.executors import BaseExecutor
class DummyExecutor(BaseExecutor):
pass
| 14.333333 | 39 | 0.802326 | 9 | 86 | 7.666667 | 0.888889 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.151163 | 86 | 5 | 40 | 17.2 | 0.945205 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0.333333 | 0.333333 | 0 | 0.666667 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 6 |
cc631f41fc2adfb1a0fc82ec42bf0174a333458a | 16,145 | py | Python | tests/subnet/test_subnet.py | mor120/ibmcloud-python-sdk | a828c67dd47a12e743774632b923985768e3e21b | [
"Apache-2.0"
] | 2 | 2021-05-31T23:41:34.000Z | 2021-09-08T13:49:30.000Z | tests/subnet/test_subnet.py | mor120/ibmcloud-python-sdk | a828c67dd47a12e743774632b923985768e3e21b | [
"Apache-2.0"
] | 5 | 2021-07-22T01:31:37.000Z | 2022-03-13T08:29:54.000Z | tests/subnet/test_subnet.py | mor120/ibmcloud-python-sdk | a828c67dd47a12e743774632b923985768e3e21b | [
"Apache-2.0"
] | 4 | 2021-06-11T15:38:47.000Z | 2022-02-27T16:03:27.000Z | from unittest import TestCase
from mock import patch
from ibmcloud_python_sdk.resource.resource_group import ResourceGroup
from ibmcloud_python_sdk.vpc.subnet import Subnet
from ibmcloud_python_sdk.vpc.vpc import Vpc
from ibmcloud_python_sdk.vpc.acl import Acl
from ibmcloud_python_sdk.vpc.gateway import Gateway
from tests.common import get_headers, get_one, qw, qw_not_found, qw_exception, \
qw_api_error, qw_delete_code_204, qw_delete_code_400
class SubnetTestCase(TestCase):
def setUp(self):
self.type = 'subnets'
self.content = get_one(self.type)
self.subnet = Subnet()
self.patcher = patch('ibmcloud_python_sdk.auth.get_token', get_headers)
self.patcher.start()
def tearDown(self):
self.patcher.stop()
def get_subnet_network_acl(path, vpc):
content = get_one('subnets')
return {'id': content['data']['network_acl']['id']}
def get_subnet_public_gateway(path, vpc):
content = get_one('subnets')
return {'id': content['data']['public_gateway']['id']}
def get_resource_group(path, group):
data = get_one('subnets')
return {'id': data['data']['resource_group']['id']}
def get_vpc(path, vpc):
data = get_one('subnets')
return {'id': data['data']['vpc']['id']}
def get_subnet(path, vpc):
data = get_one('subnets')
return {'id': data['data']['id']}
@patch('ibmcloud_python_sdk.vpc.subnet.qw', qw)
def test_get_subnets(self):
response = self.subnet.get_subnets()
self.assertEqual(response['total_count'], 2)
@patch('ibmcloud_python_sdk.vpc.subnet.qw', qw)
def test_get_subnet_by_id(self):
response = self.subnet.get_subnet_by_id(self.content['data']['id'])
self.assertEqual(response['id'], self.content['data']['id'])
@patch('ibmcloud_python_sdk.vpc.subnet.qw', qw)
def test_get_subnet_by_name(self):
response = self.subnet.get_subnet_by_name(self.content['data']['name'])
self.assertEqual(response['name'], self.content['data']['name'])
@patch('ibmcloud_python_sdk.vpc.subnet.qw', qw)
def test_get_subnet_with_id(self):
response = self.subnet.get_subnet(self.content['data']['id'])
self.assertEqual(response['id'], self.content['data']['id'])
@patch('ibmcloud_python_sdk.vpc.subnet.qw', qw)
def test_get_subnet_with_name(self):
response = self.subnet.get_subnet(self.content['data']['name'])
self.assertEqual(response['name'], self.content['data']['name'])
@patch('ibmcloud_python_sdk.vpc.subnet.qw', qw)
def test_get_subnet_network_acl(self):
response = self.subnet.get_subnet_network_acl(
self.content['data']['id'])
self.assertEqual(response['network_acl']['name'],
self.content['data']['network_acl']['name'])
@patch('ibmcloud_python_sdk.vpc.subnet.qw', qw)
def test_get_subnet_public_gateway(self):
response = self.subnet.get_subnet_public_gateway(
self.content['data']['id'])
self.assertEqual(response['public_gateway']['name'],
self.content['data']['public_gateway']['name'])
@patch('ibmcloud_python_sdk.vpc.subnet.qw', qw_not_found)
def test_get_subnet_not_found(self):
response = self.subnet.get_subnet('wrong_subnet_name')
self.assertEqual(response['errors'][0]['code'], 'not_found')
@patch('ibmcloud_python_sdk.vpc.subnet.qw', qw_not_found)
def test_get_subnet_network_acl_not_found(self):
response = self.subnet.get_subnet_network_acl('wrong_subnet_name')
self.assertEqual(response['errors'][0]['code'], 'not_found')
@patch('ibmcloud_python_sdk.vpc.subnet.qw', qw_not_found)
def test_get_subnet_public_gateway_not_found(self):
response = self.subnet.get_subnet_public_gateway('wrong_subnet_name')
self.assertEqual(response['errors'][0]['code'], 'not_found')
@patch('ibmcloud_python_sdk.vpc.subnet.qw', qw_api_error)
def test_get_subnet_api_error(self):
response = self.subnet.get_subnet(self.content['data']['id'])
self.assertEqual(response['errors'][0]['code'], 'unpredictable_error')
@patch('ibmcloud_python_sdk.vpc.subnet.qw', qw_api_error)
def test_get_subnet_network_acl_api_error(self):
response = self.subnet.get_subnet_network_acl(self.content['data']['id'])
self.assertEqual(response['errors'][0]['code'], 'unpredictable_error')
@patch('ibmcloud_python_sdk.vpc.subnet.qw', qw_exception)
def test_get_subnets_exception(self):
with self.assertRaises(Exception):
self.subnet.get_subnets()
@patch('ibmcloud_python_sdk.vpc.subnet.qw', qw_exception)
def test_get_subnet_by_id_exception(self):
with self.assertRaises(Exception):
self.subnet.get_subnet_by_id(self.content['data']['id'])
@patch('ibmcloud_python_sdk.vpc.subnet.qw', qw_exception)
def test_get_subnet_by_name_exception(self):
with self.assertRaises(Exception):
self.subnet.get_subnet_by_name(self.content['data']['name'])
@patch('ibmcloud_python_sdk.vpc.subnet.qw', qw_exception)
def test_get_subnet_public_gateway_exception(self):
with self.assertRaises(Exception):
self.subnet.get_subnet_public_gateway(self.content['data']['id'])
@patch('ibmcloud_python_sdk.vpc.subnet.qw', qw_exception)
def test_get_subnet_network_acl_exception(self):
with self.assertRaises(Exception):
self.subnet.get_subnet_network_acl(self.content['data']['id'])
@patch('ibmcloud_python_sdk.vpc.subnet.qw', qw_exception)
@patch.object(Vpc, 'get_vpc', get_vpc)
def test_create_subnet_exception(self):
with self.assertRaises(Exception):
self.subnet.create_subnet(vpc=self.content['data']['vpc']['name'])
@patch('ibmcloud_python_sdk.vpc.subnet.qw', qw_exception)
@patch.object(Acl, 'get_network_acl', get_subnet_network_acl)
@patch.object(Subnet, 'get_subnet', get_subnet)
def test_attach_network_acl_exception(self):
with self.assertRaises(Exception):
self.subnet.attach_network_acl(
subnet=self.content['data']['id'],
network_acl=self.content['data']['network_acl']['name'])
@patch('ibmcloud_python_sdk.vpc.subnet.qw', qw_exception)
@patch.object(Gateway, 'get_public_gateway', get_subnet_public_gateway)
@patch.object(Subnet, 'get_subnet', get_subnet)
def test_attach_public_gateway_exception(self):
with self.assertRaises(Exception):
self.subnet.attach_public_gateway(
subnet=self.content['data']['id'],
public_gateway=self.content['data']['public_gateway']['name'])
@patch('ibmcloud_python_sdk.vpc.subnet.qw', qw_exception)
@patch.object(Subnet, 'get_subnet', get_subnet)
def test_detach_public_gateway_exception(self):
with self.assertRaises(Exception):
self.subnet.detach_public_gateway(self.content['data']['id'])
@patch('ibmcloud_python_sdk.vpc.subnet.qw', qw_exception)
@patch.object(Subnet, 'get_subnet', get_subnet)
def test_delete_subnet_exception(self):
with self.assertRaises(Exception):
self.subnet.delete_subnet(self.content['data']['id'])
@patch('ibmcloud_python_sdk.vpc.subnet.qw', qw)
@patch.object(ResourceGroup, 'get_resource_group', get_resource_group)
@patch.object(Subnet, 'get_subnet_network_acl', get_subnet_network_acl)
@patch.object(Subnet, 'get_subnet_public_gateway', get_subnet_public_gateway)
@patch.object(Vpc, 'get_vpc', get_vpc)
def test_create_subnet(self):
response = self.subnet.create_subnet(
name=self.content['data']['name'],
total_ipv4_address_count=256,
resource_group=self.content['data']['resource_group']['name'],
network_acl=self.content['data']['network_acl']['name'],
public_gateway=self.content['data']['public_gateway']['name'],
routing_table=self.content['data']['routing_table']['name'],
zone=self.content['data']['zone']['name'],
vpc=self.content['data']['vpc']['name'])
self.assertEqual(response['subnets'][0]['id'], self.content['data']['id'])
@patch('ibmcloud_python_sdk.vpc.subnet.qw', qw)
@patch.object(ResourceGroup, 'get_resource_group', qw_not_found)
def test_create_subnet_resource_group_not_found(self):
response = self.subnet.create_subnet(
name=self.content['data']['name'],
total_ipv4_address_count=256,
resource_group='not_found',
network_acl=self.content['data']['network_acl']['name'],
public_gateway=self.content['data']['public_gateway']['name'],
routing_table=self.content['data']['routing_table']['name'],
zone=self.content['data']['zone']['name'],
vpc=self.content['data']['vpc']['name'])
self.assertEqual(response['errors'][0]['code'], 'not_found')
@patch('ibmcloud_python_sdk.vpc.subnet.qw', qw)
@patch.object(ResourceGroup, 'get_resource_group', get_resource_group)
@patch.object(Subnet, 'get_subnet_network_acl', qw_not_found)
@patch.object(Subnet, 'get_subnet_public_gateway', get_subnet_public_gateway)
@patch.object(Vpc, 'get_vpc', get_vpc)
def test_create_subnet_network_acl_not_found(self):
response = self.subnet.create_subnet(
name=self.content['data']['name'],
total_ipv4_address_count=256,
resource_group=self.content['data']['resource_group']['name'],
network_acl='not_found',
public_gateway=self.content['data']['public_gateway']['name'],
routing_table=self.content['data']['routing_table']['name'],
zone=self.content['data']['zone']['name'],
vpc=self.content['data']['vpc']['name'])
self.assertEqual(response['errors'][0]['code'], 'not_found')
@patch('ibmcloud_python_sdk.vpc.subnet.qw', qw)
@patch.object(ResourceGroup, 'get_resource_group', get_resource_group)
@patch.object(Subnet, 'get_subnet_network_acl', get_subnet_network_acl)
@patch.object(Subnet, 'get_subnet_public_gateway', qw_not_found)
@patch.object(Vpc, 'get_vpc', get_vpc)
def test_create_subnet_public_gateway_not_found(self):
response = self.subnet.create_subnet(
name=self.content['data']['name'],
total_ipv4_address_count=256,
resource_group=self.content['data']['resource_group']['name'],
network_acl=self.content['data']['network_acl']['name'],
public_gateway='not_found',
routing_table=self.content['data']['routing_table']['name'],
zone=self.content['data']['zone']['name'],
vpc=self.content['data']['vpc']['name'])
self.assertEqual(response['errors'][0]['code'], 'not_found')
@patch('ibmcloud_python_sdk.vpc.subnet.qw', qw)
@patch.object(ResourceGroup, 'get_resource_group', get_resource_group)
@patch.object(Subnet, 'get_subnet_network_acl', get_subnet_network_acl)
@patch.object(Subnet, 'get_subnet_public_gateway', get_subnet_public_gateway)
@patch.object(Vpc, 'get_vpc', qw_not_found)
def test_create_subnet_vpc_not_found(self):
response = self.subnet.create_subnet(
name=self.content['data']['name'],
total_ipv4_address_count=256,
resource_group=self.content['data']['resource_group']['name'],
network_acl=self.content['data']['network_acl']['name'],
public_gateway=self.content['data']['public_gateway']['name'],
routing_table=self.content['data']['routing_table']['name'],
zone=self.content['data']['zone']['name'],
vpc='not_found')
self.assertEqual(response['errors'][0]['code'], 'not_found')
@patch('ibmcloud_python_sdk.vpc.subnet.qw', qw)
@patch.object(Acl, 'get_network_acl', get_subnet_network_acl)
def test_attach_network_acl(self):
response = self.subnet.attach_network_acl(
subnet=self.content['data']['name'],
network_acl=self.content['data']['network_acl']['name'])
self.assertEqual(response['id'], self.content['data']['id'])
@patch('ibmcloud_python_sdk.vpc.subnet.qw', qw)
@patch.object(Acl, 'get_network_acl', qw_not_found)
def test_attach_network_acl_not_found(self):
response = self.subnet.attach_network_acl(
subnet=self.content['data']['name'],
network_acl='not_found')
self.assertEqual(response['errors'][0]['code'], 'not_found')
@patch('ibmcloud_python_sdk.vpc.subnet.qw', qw)
@patch.object(Acl, 'get_network_acl', get_subnet_network_acl)
@patch.object(Subnet, 'get_subnet', qw_not_found)
def test_attach_network_acl_subnet_not_found(self):
response = self.subnet.attach_network_acl(
subnet='not_found',
network_acl=self.content['data']['network_acl']['name'])
self.assertEqual(response['errors'][0]['code'], 'not_found')
@patch('ibmcloud_python_sdk.vpc.subnet.qw', qw)
@patch.object(Gateway, 'get_public_gateway', get_subnet_public_gateway)
def test_attach_public_gateway(self):
response = self.subnet.attach_public_gateway(
subnet=self.content['data']['name'],
public_gateway='my-public-gateway')
self.assertEqual(response['id'], self.content['data']['id'])
@patch('ibmcloud_python_sdk.vpc.subnet.qw', qw)
@patch.object(Gateway, 'get_public_gateway', qw_not_found)
def test_attach_public_gateway_not_found(self):
response = self.subnet.attach_public_gateway(
subnet=self.content['data']['name'],
public_gateway='not_found')
self.assertEqual(response['errors'][0]['code'], 'not_found')
@patch('ibmcloud_python_sdk.vpc.subnet.qw', qw)
@patch.object(Gateway, 'get_public_gateway', get_subnet_public_gateway)
@patch.object(Subnet, 'get_subnet', qw_not_found)
def test_attach_public_gateway_subnet_not_found(self):
response = self.subnet.attach_public_gateway(
subnet='not_found',
public_gateway=self.content['data']['public_gateway']['name'])
self.assertEqual(response['errors'][0]['code'], 'not_found')
@patch('ibmcloud_python_sdk.vpc.subnet.qw', qw_delete_code_204)
@patch.object(Subnet, 'get_subnet', get_subnet)
def test_detach_public_gateway(self):
response = self.subnet.detach_public_gateway(
self.content['data']['id'])
self.assertEqual(response["status"], 'deleted')
@patch('ibmcloud_python_sdk.vpc.subnet.qw', qw)
@patch.object(Subnet, 'get_subnet', qw_not_found)
def test_detach_public_gateway_not_found(self):
response = self.subnet.detach_public_gateway('not_found')
self.assertEqual(response['errors'][0]['code'], 'not_found')
@patch('ibmcloud_python_sdk.vpc.subnet.qw', qw_delete_code_400)
@patch.object(Subnet, 'get_subnet', get_subnet)
def test_detach_public_gateway_bad_request(self):
response = self.subnet.detach_public_gateway(self.content['data']['id'])
self.assertEqual(response, 'bad_request')
@patch('ibmcloud_python_sdk.vpc.subnet.qw', qw_delete_code_204)
@patch.object(Subnet, 'get_subnet', get_subnet)
def test_delete_subnet(self):
response = self.subnet.delete_subnet(
self.content['data']['id'])
self.assertEqual(response["status"], 'deleted')
@patch('ibmcloud_python_sdk.vpc.subnet.qw', qw)
@patch.object(Subnet, 'get_subnet', qw_not_found)
def test_delete_subnet_not_found(self):
response = self.subnet.delete_subnet('not_found')
self.assertEqual(response['errors'][0]['code'], 'not_found')
@patch('ibmcloud_python_sdk.vpc.subnet.qw', qw_delete_code_400)
@patch.object(Subnet, 'get_subnet', get_subnet)
def test_delete_subnet_bad_request(self):
response = self.subnet.delete_subnet(self.content['data']['id'])
self.assertEqual(response, 'bad_request')
| 47.625369 | 82 | 0.679343 | 2,079 | 16,145 | 4.965368 | 0.03848 | 0.063644 | 0.101715 | 0.083309 | 0.927831 | 0.909329 | 0.897607 | 0.884433 | 0.853143 | 0.812264 | 0 | 0.004101 | 0.16934 | 16,145 | 338 | 83 | 47.766272 | 0.76564 | 0 | 0 | 0.612457 | 0 | 0 | 0.213565 | 0.093465 | 0 | 0 | 0 | 0 | 0.134948 | 1 | 0.15917 | false | 0 | 0.027682 | 0 | 0.207612 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
cc697ffcbd007c817de72988b47636fe98a4b46b | 22 | py | Python | anotherbot.py | singh1114/slackbot | b15fe4051cc7a7281b160454664355f1a6e04eca | [
"MIT"
] | 1 | 2017-11-20T05:03:02.000Z | 2017-11-20T05:03:02.000Z | anotherbot.py | singh1114/slackbot | b15fe4051cc7a7281b160454664355f1a6e04eca | [
"MIT"
] | 1 | 2021-06-01T21:59:47.000Z | 2021-06-01T21:59:47.000Z | anotherbot.py | singh1114/slackbot | b15fe4051cc7a7281b160454664355f1a6e04eca | [
"MIT"
] | null | null | null | import bot
bot.Bot() | 7.333333 | 10 | 0.681818 | 4 | 22 | 3.75 | 0.5 | 0.8 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.181818 | 22 | 3 | 11 | 7.333333 | 0.833333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.5 | 0 | 0.5 | 0 | 1 | 1 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 6 |
cc8ca6509d254ea6f9537bfa273f11f11d0390c6 | 250 | py | Python | tests/utils.py | GoodManWEN/dynamic_salt | 152416a918ef68b2f272d200664a224161fcb4cc | [
"MIT"
] | null | null | null | tests/utils.py | GoodManWEN/dynamic_salt | 152416a918ef68b2f272d200664a224161fcb4cc | [
"MIT"
] | null | null | null | tests/utils.py | GoodManWEN/dynamic_salt | 152416a918ef68b2f272d200664a224161fcb4cc | [
"MIT"
] | null | null | null | import random
def rand_char_generator():
char = f"{str(random.randint(1000000000,9999999999))}salt"
char = f"{chr(random.randint(1,127))}{char}{chr(random.randint(0xFF00,0xFFEF))}{chr(random.randint(0x4E00,0x9FA5))}".encode()
return char | 41.666667 | 129 | 0.72 | 35 | 250 | 5.085714 | 0.6 | 0.292135 | 0.269663 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.154867 | 0.096 | 250 | 6 | 130 | 41.666667 | 0.632743 | 0 | 0 | 0 | 0 | 0.2 | 0.613546 | 0.613546 | 0 | 0 | 0.095618 | 0 | 0 | 1 | 0.2 | false | 0 | 0.2 | 0 | 0.6 | 0 | 0 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 6 |
4e1a729ad7ad946d29063cd1a66d289b6f40dbe0 | 201 | py | Python | piroommonitor/sensors/__init__.py | ascense/piroommonitor | ca8f4c0a9b4bcfc8611553a9d4b8fe08dae5ebcf | [
"MIT"
] | null | null | null | piroommonitor/sensors/__init__.py | ascense/piroommonitor | ca8f4c0a9b4bcfc8611553a9d4b8fe08dae5ebcf | [
"MIT"
] | null | null | null | piroommonitor/sensors/__init__.py | ascense/piroommonitor | ca8f4c0a9b4bcfc8611553a9d4b8fe08dae5ebcf | [
"MIT"
] | null | null | null | from ._bmp180 import Bmp180
from ._bme680 import Bme680
from ._si7021 import Si7021
from ._tcs34725 import Tcs34725
from ._tsl2561 import Tsl2561
from ._i2c_utils import get_online_i2c_devices
| 25.125 | 47 | 0.810945 | 28 | 201 | 5.464286 | 0.428571 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.236686 | 0.159204 | 201 | 7 | 48 | 28.714286 | 0.668639 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
9d8ed34e9e94fbd09392275532e65a98b7b25c0a | 20,575 | py | Python | domainbed/model_selection.py | bismex/DomainBed | 27335e6ba24a946fedd2c52b13e39df132a89008 | [
"MIT"
] | null | null | null | domainbed/model_selection.py | bismex/DomainBed | 27335e6ba24a946fedd2c52b13e39df132a89008 | [
"MIT"
] | null | null | null | domainbed/model_selection.py | bismex/DomainBed | 27335e6ba24a946fedd2c52b13e39df132a89008 | [
"MIT"
] | 1 | 2022-03-11T11:09:12.000Z | 2022-03-11T11:09:12.000Z | # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
import itertools
import numpy as np
def get_test_records(records):
"""Given records with a common test env, get the test records (i.e. the
records with *only* that single test env and no other test envs)"""
return records.filter(lambda r: len(r['args']['test_envs']) == 1)
def get_single_dg_records(records):
num_env = 0
for txt in records[0]:
if 'env' in txt and '_in_acc' in txt:
num_env += 1
return records.filter(lambda r:len(r['args']['test_envs']) == num_env-1)
def check_holdout_fraction(records):
return records.filter(lambda r:r['args']['holdout_fraction'] != 0.0)
class SelectionMethod:
"""Abstract class whose subclasses implement strategies for model
selection across hparams and timesteps."""
def __init__(self):
raise TypeError
@classmethod
def run_acc(self, run_records):
"""
Given records from a run, return a {val_acc, test_acc} dict representing
the best val-acc and corresponding test-acc for that run.
"""
raise NotImplementedError
@classmethod
def hparams_accs(self, records):
"""
Given all records from a single (dataset, algorithm, test env) pair,
return a sorted list of (run_acc, records) tuples.
"""
return (records.group('args.hparams_seed')
.map(lambda _, run_records:
(
self.run_acc(run_records),
run_records
)
).filter(lambda x: x[0] is not None)
.sorted(key=lambda x: x[0]['val_acc'])[::-1]
)
@classmethod
def sweep_acc(self, records):
"""
Given all records from a single (dataset, algorithm, test env) pair,
return the mean test acc of the k runs with the top val accs.
"""
_hparams_accs = self.hparams_accs(records)
if len(_hparams_accs):
return _hparams_accs[0][0]['test_acc']
else:
return None
class SDGOracleLastMethod(SelectionMethod):
name = "[SDG]Test-last"
@classmethod
def run_acc(self, run_records):
run_records = get_single_dg_records(run_records)
if not len(run_records):
return None
val_acc = []
test_acc = []
for j in range(len(run_records)):
val_env_keys = [] # test_env (out)
test_env_keys = [] # test_env (in)
for i in itertools.count():
if f'env{i}_in_acc' not in run_records[j]:
break
if i in run_records[j]['args']['test_envs']:
val_env_keys.append(f'env{i}_out_acc')
if i in run_records[j]['args']['test_envs']:
test_env_keys.append(f'env{i}_in_acc')
try:
val_acc.append(np.mean([run_records[j][key] for key in val_env_keys]))
except:
# if run_records[j]['args']['holdout_fraction'] == 0.0, test_env_out does not exist
val_acc.append(np.mean([run_records[j][key] for key in test_env_keys]))
# print('exception: oracle last')
# val_acc.append(0)
test_acc.append(np.mean([run_records[j][key] for key in test_env_keys]))
return {
'val_acc': val_acc[-1],
'test_acc': test_acc[-1]
}
class SDGOracleLastMethodEACH(SelectionMethod):
name = "[SDG]Test-last"
@classmethod
def run_acc(self, run_records):
run_records = get_single_dg_records(run_records)
if not len(run_records):
return None
val_acc = []
test_acc = []
for j in range(len(run_records)):
val_env_keys = [] # test_env (out)
test_env_keys = [] # test_env (in)
for i in itertools.count():
if f'env{i}_in_acc' not in run_records[j]:
break
if i in run_records[j]['args']['test_envs']:
val_env_keys.append(f'env{i}_out_acc')
if i in run_records[j]['args']['test_envs']:
test_env_keys.append(f'env{i}_in_acc')
try:
val_acc.append(np.mean([run_records[j][key] for key in val_env_keys]))
except:
# if run_records[j]['args']['holdout_fraction'] == 0.0, test_env_out does not exist
val_acc.append(np.mean([run_records[j][key] for key in test_env_keys]))
# print('exception: oracle last')
# val_acc.append(0)
test_acc.append([run_records[j][key] for key in test_env_keys])
return {
'val_acc': val_acc[-1],
'test_acc': test_acc[-1]
}
class SDGOracleMaxMethod(SelectionMethod):
name = "[SDG]Test-max"
@classmethod
def run_acc(self, run_records):
run_records = get_single_dg_records(run_records)
if not len(run_records):
return None
val_acc = []
test_acc = []
for j in range(len(run_records)):
val_env_keys = [] # test_env (out)
test_env_keys = [] # test_env (in)
for i in itertools.count():
if f'env{i}_in_acc' not in run_records[j]:
break
if i in run_records[j]['args']['test_envs']:
val_env_keys.append(f'env{i}_out_acc')
if i in run_records[j]['args']['test_envs']:
test_env_keys.append(f'env{i}_in_acc')
try:
val_acc.append(np.mean([run_records[j][key] for key in val_env_keys]))
except:
# if run_records[j]['args']['holdout_fraction'] == 0.0, test_env_out does not exist
val_acc.append(np.mean([run_records[j][key] for key in test_env_keys]))
# print('exception: oracle max')
# val_acc.append(0)
test_acc.append(np.mean([run_records[j][key] for key in test_env_keys]))
return {
'val_acc': max(val_acc),
'test_acc': max(test_acc)
}
class SDGOracleMaxMethodEACH(SelectionMethod):
name = "[SDG]Test-max"
@classmethod
def run_acc(self, run_records):
run_records = get_single_dg_records(run_records)
if not len(run_records):
return None
val_acc = []
test_acc = []
for j in range(len(run_records)):
val_env_keys = [] # test_env (out)
test_env_keys = [] # test_env (in)
for i in itertools.count():
if f'env{i}_in_acc' not in run_records[j]:
break
if i in run_records[j]['args']['test_envs']:
val_env_keys.append(f'env{i}_out_acc')
if i in run_records[j]['args']['test_envs']:
test_env_keys.append(f'env{i}_in_acc')
try:
val_acc.append(np.mean([run_records[j][key] for key in val_env_keys]))
except:
# if run_records[j]['args']['holdout_fraction'] == 0.0, test_env_out does not exist
val_acc.append(np.mean([run_records[j][key] for key in test_env_keys]))
# print('exception: oracle max')
# val_acc.append(0)
test_acc.append([run_records[j][key] for key in test_env_keys])
return {
'val_acc': max(val_acc),
'test_acc': test_acc
}
class SDGIIDAccuracySelectionMethod50(SelectionMethod):
name = "[SDG]Train50"
@classmethod
def _step_acc(self, record):
train_env_keys = [] # not test_env (in)
val_env_keys = [] # not test_env (out)
test_env_keys = [] # test_env (in)
for i in itertools.count():
if f'env{i}_in_acc' not in record:
break
if i not in record['args']['test_envs']:
train_env_keys.append(f'env{i}_in_acc')
val_env_keys.append(f'env{i}_out_acc')
if i in record['args']['test_envs']:
test_env_keys.append(f'env{i}_in_acc')
try:
train_acc = np.mean([record[key] for key in train_env_keys])
except:
print('exception: training-domain val')
train_acc = 0
try:
val_acc = np.mean([record[key] for key in val_env_keys])
except:
print('exception: training-domain val')
val_acc = 0
ratio_val = 0.5
val_acc = ratio_val*val_acc + (1-ratio_val)*train_acc
test_acc = np.mean([record[key] for key in test_env_keys])
# if test_acc is None:
# test_acc = 0
# if val_acc is None:
# test_acc = 0
# print(val_acc)
# print(test_acc)
return {
'val_acc': val_acc,
'test_acc': test_acc
}
@classmethod
def run_acc(self, run_records):
run_records = get_single_dg_records(run_records)
# run_records = check_holdout_fraction(run_records) # not 0.0
if not len(run_records):
return None
return run_records.map(self._step_acc).argmax('val_acc')
class SDGIIDAccuracySelectionMethod75(SelectionMethod):
name = "[SDG]Train75"
@classmethod
def _step_acc(self, record):
train_env_keys = [] # not test_env (in)
val_env_keys = [] # not test_env (out)
test_env_keys = [] # test_env (in)
for i in itertools.count():
if f'env{i}_in_acc' not in record:
break
if i not in record['args']['test_envs']:
train_env_keys.append(f'env{i}_in_acc')
val_env_keys.append(f'env{i}_out_acc')
if i in record['args']['test_envs']:
test_env_keys.append(f'env{i}_in_acc')
try:
train_acc = np.mean([record[key] for key in train_env_keys])
except:
print('exception: training-domain val')
train_acc = 0
try:
val_acc = np.mean([record[key] for key in val_env_keys])
except:
print('exception: training-domain val')
val_acc = 0
ratio_val = 0.75
val_acc = ratio_val*val_acc + (1-ratio_val)*train_acc
test_acc = np.mean([record[key] for key in test_env_keys])
# if test_acc is None:
# test_acc = 0
# if val_acc is None:
# test_acc = 0
# print(val_acc)
# print(test_acc)
return {
'val_acc': val_acc,
'test_acc': test_acc
}
@classmethod
def run_acc(self, run_records):
run_records = get_single_dg_records(run_records)
# run_records = check_holdout_fraction(run_records) # not 0.0
if not len(run_records):
return None
return run_records.map(self._step_acc).argmax('val_acc')
class SDGIIDAccuracySelectionMethod25(SelectionMethod):
name = "[SDG]Train25"
@classmethod
def _step_acc(self, record):
train_env_keys = [] # not test_env (in)
val_env_keys = [] # not test_env (out)
test_env_keys = [] # test_env (in)
for i in itertools.count():
if f'env{i}_in_acc' not in record:
break
if i not in record['args']['test_envs']:
train_env_keys.append(f'env{i}_in_acc')
val_env_keys.append(f'env{i}_out_acc')
if i in record['args']['test_envs']:
test_env_keys.append(f'env{i}_in_acc')
try:
train_acc = np.mean([record[key] for key in train_env_keys])
except:
print('exception: training-domain val')
train_acc = 0
try:
val_acc = np.mean([record[key] for key in val_env_keys])
except:
print('exception: training-domain val')
val_acc = 0
ratio_val = 0.25
val_acc = ratio_val*val_acc + (1-ratio_val)*train_acc
test_acc = np.mean([record[key] for key in test_env_keys])
# if test_acc is None:
# test_acc = 0
# if val_acc is None:
# test_acc = 0
# print(val_acc)
# print(test_acc)
return {
'val_acc': val_acc,
'test_acc': test_acc
}
@classmethod
def run_acc(self, run_records):
run_records = get_single_dg_records(run_records)
# run_records = check_holdout_fraction(run_records) # not 0.0
if not len(run_records):
return None
return run_records.map(self._step_acc).argmax('val_acc')
class SDGIIDAccuracySelectionMethod(SelectionMethod):
name = "[SDG]Train"
@classmethod
def _step_acc(self, record):
val_env_keys = [] # not test_env (out)
test_env_keys = [] # test_env (in)
for i in itertools.count():
if f'env{i}_in_acc' not in record:
break
if i not in record['args']['test_envs']:
val_env_keys.append(f'env{i}_out_acc')
if i in record['args']['test_envs']:
test_env_keys.append(f'env{i}_in_acc')
try:
val_acc = np.mean([record[key] for key in val_env_keys])
except:
print('exception: training-domain val')
val_acc = 0
# test_acc = [record[key] for key in test_env_keys]
test_acc = np.mean([record[key] for key in test_env_keys])
# if test_acc is None:
# test_acc = 0
# if val_acc is None:
# test_acc = 0
# print(val_acc)
# print(test_acc)
return {
'val_acc': val_acc,
'test_acc': test_acc
}
@classmethod
def run_acc(self, run_records):
run_records = get_single_dg_records(run_records)
# run_records = check_holdout_fraction(run_records) # not 0.0
if not len(run_records):
return None
return run_records.map(self._step_acc).argmax('val_acc')
class SDGIIDAccuracySelectionMethodEACH(SelectionMethod):
name = "[SDG]Train"
@classmethod
def _step_acc(self, record):
val_env_keys = [] # not test_env (out)
test_env_keys = [] # test_env (in)
for i in itertools.count():
if f'env{i}_in_acc' not in record:
break
if i not in record['args']['test_envs']:
val_env_keys.append(f'env{i}_out_acc')
if i in record['args']['test_envs']:
test_env_keys.append(f'env{i}_in_acc')
try:
val_acc = np.mean([record[key] for key in val_env_keys])
except:
print('exception: training-domain val')
val_acc = 0
test_acc = [record[key] for key in test_env_keys]
# test_acc = np.mean([record[key] for key in test_env_keys])
# if test_acc is None:
# test_acc = 0
# if val_acc is None:
# test_acc = 0
# print(val_acc)
# print(test_acc)
return {
'val_acc': val_acc,
'test_acc': test_acc
}
@classmethod
def run_acc(self, run_records):
run_records = get_single_dg_records(run_records)
# run_records = check_holdout_fraction(run_records) # not 0.0
if not len(run_records):
return None
return run_records.map(self._step_acc).argmax('val_acc')
class SDGIIDAccuracySelectionMethodWhole(SelectionMethod):
name = "[SDG]Trainwhole"
@classmethod
def _step_acc(self, record):
val_env_keys = [] # not test_env (out)
test_env_keys_in = [] # test_env (in) * (1 - record['args']['holdout_fraction'])
test_env_keys_out = [] # test_env (out) * record['args']['holdout_fraction']
for i in itertools.count():
if f'env{i}_in_acc' not in record:
break
if i not in record['args']['test_envs']:
val_env_keys.append(f'env{i}_out_acc')
if i in record['args']['test_envs']:
test_env_keys_in.append(f'env{i}_in_acc')
if i in record['args']['test_envs']:
test_env_keys_out.append(f'env{i}_out_acc')
val_acc = np.mean([record[key] for key in val_env_keys])
test_acc_in = np.mean([record[key] for key in test_env_keys_in])
test_acc_out = np.mean([record[key] for key in test_env_keys_out])
return {
'val_acc': val_acc,
'test_acc': test_acc_out * record['args']['holdout_fraction'] + test_acc_in * (1 - record['args']['holdout_fraction'])
}
@classmethod
def run_acc(self, run_records):
run_records = get_single_dg_records(run_records)
run_records = check_holdout_fraction(run_records) # not 0.0
if not len(run_records):
return None
return run_records.map(self._step_acc).argmax('val_acc')
class OracleSelectionMethod(SelectionMethod):
"""Like Selection method which picks argmax(test_out_acc) across all hparams
and checkpoints, but instead of taking the argmax over all
checkpoints, we pick the last checkpoint, i.e. no early stopping."""
name = "test-domain-validation-set-(oracle)"
@classmethod
def run_acc(self, run_records):
run_records = run_records.filter(lambda r:
len(r['args']['test_envs']) == 1)
if not len(run_records):
return None
test_env = run_records[0]['args']['test_envs'][0]
test_out_acc_key = 'env{}_out_acc'.format(test_env)
test_in_acc_key = 'env{}_in_acc'.format(test_env)
chosen_record = run_records.sorted(lambda r: r['step'])[-1]
return {
'val_acc': chosen_record[test_out_acc_key],
'test_acc': chosen_record[test_in_acc_key]
}
class IIDAccuracySelectionMethod(SelectionMethod):
"""Picks argmax(mean(env_out_acc for env in train_envs))"""
name = "training-domain-validation-set"
@classmethod
def _step_acc(self, record):
"""Given a single record, return a {val_acc, test_acc} dict."""
test_env = record['args']['test_envs'][0]
val_env_keys = []
for i in itertools.count():
if f'env{i}_out_acc' not in record:
break
if i != test_env:
val_env_keys.append(f'env{i}_out_acc')
test_in_acc_key = 'env{}_in_acc'.format(test_env)
return {
'val_acc': np.mean([record[key] for key in val_env_keys]),
'test_acc': record[test_in_acc_key]
}
@classmethod
def run_acc(self, run_records):
test_records = get_test_records(run_records)
if not len(test_records):
return None
return test_records.map(self._step_acc).argmax('val_acc')
class LeaveOneOutSelectionMethod(SelectionMethod):
"""Picks (hparams, step) by leave-one-out cross validation."""
name = "leave-one-domain-out-cross-validation"
@classmethod
def _step_acc(self, records):
"""Return the {val_acc, test_acc} for a group of records corresponding
to a single step."""
test_records = get_test_records(records)
if len(test_records) != 1:
return None
test_env = test_records[0]['args']['test_envs'][0]
n_envs = 0
for i in itertools.count():
if f'env{i}_out_acc' not in records[0]:
break
n_envs += 1
val_accs = np.zeros(n_envs) - 1
for r in records.filter(lambda r: len(r['args']['test_envs']) == 2):
val_env = (set(r['args']['test_envs']) - set([test_env])).pop()
val_accs[val_env] = r['env{}_in_acc'.format(val_env)]
val_accs = list(val_accs[:test_env]) + list(val_accs[test_env+1:])
if any([v==-1 for v in val_accs]):
return None
val_acc = np.sum(val_accs) / (n_envs-1)
return {
'val_acc': val_acc,
'test_acc': test_records[0]['env{}_in_acc'.format(test_env)]
}
@classmethod
def run_acc(self, records):
step_accs = records.group('step').map(lambda step, step_records:
self._step_acc(step_records)
).filter_not_none()
if len(step_accs):
return step_accs.argmax('val_acc')
else:
return None
| 35.352234 | 130 | 0.567971 | 2,780 | 20,575 | 3.924101 | 0.06259 | 0.093501 | 0.039325 | 0.031259 | 0.773581 | 0.745806 | 0.723164 | 0.708681 | 0.706023 | 0.689797 | 0 | 0.007142 | 0.319514 | 20,575 | 581 | 131 | 35.413081 | 0.772016 | 0.14955 | 0 | 0.734597 | 0 | 0 | 0.101992 | 0.005908 | 0 | 0 | 0 | 0 | 0 | 1 | 0.066351 | false | 0 | 0.004739 | 0.00237 | 0.234597 | 0.018957 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
9de7305db0e3cfe4e91bb4b1be997409bdb95287 | 3,723 | py | Python | functional_tests/test_pagination.py | Shemahmforash/thisdayinmusic.net | 435b2af0dc1511b8d6e502e7065e7189c79b716c | [
"MIT"
] | 1 | 2018-10-04T15:40:24.000Z | 2018-10-04T15:40:24.000Z | functional_tests/test_pagination.py | Shemahmforash/thisdayinmusic.net | 435b2af0dc1511b8d6e502e7065e7189c79b716c | [
"MIT"
] | 76 | 2019-06-02T09:43:35.000Z | 2022-02-10T10:32:32.000Z | functional_tests/test_pagination.py | Shemahmforash/thisdayinmusic.net | 435b2af0dc1511b8d6e502e7065e7189c79b716c | [
"MIT"
] | null | null | null | import requests_mock
from django.conf import settings
from functional_tests.selenium_test_case import SeleniumTestCase
class PaginationTest(SeleniumTestCase):
@requests_mock.Mocker()
def test_main_page_presents_pagination(self, m):
m.get(settings.API_BASE_ADDRESS + '/event/', json={
"response": {
"status": {
"version": 0.1,
"code": 0,
"status": "Success"
},
"events": [
],
"pagination": {
"total": 59,
"offset": 0,
"results": 15
}
}
}, status_code=200)
self.browser.get(self.live_server_url)
pagination = self.browser.find_element_by_class_name('pagination')
self.assertIsNotNone(pagination)
active_page = pagination.find_element_by_class_name('active').text
self.assertIn('1\n(current)', active_page)
@requests_mock.Mocker()
def test_events_page_presents_pagination(self, m):
m.get(settings.API_BASE_ADDRESS + '/event/', json={
"response": {
"status": {
"version": 0.1,
"code": 0,
"status": "Success"
},
"events": [
],
"pagination": {
"total": 59,
"offset": 0,
"results": 15
}
}
}, status_code=200)
self.browser.get('%s/events/May/02' %
self.live_server_url)
pagination = self.browser.find_element_by_class_name('pagination')
self.assertIsNotNone(pagination)
active_page = pagination.find_element_by_class_name('active').text
self.assertIn('1\n(current)', active_page)
@requests_mock.Mocker()
def test_main_page_has_correct_page_selected(self, m):
m.get(settings.API_BASE_ADDRESS + '/event/?offset=15', json={
"response": {
"status": {
"version": 0.1,
"code": 0,
"status": "Success"
},
"events": [
],
"pagination": {
"total": 59,
"offset": 15,
"results": 15
}
}
}, status_code=200)
self.browser.get(self.live_server_url + '?page=2')
pagination = self.browser.find_element_by_class_name('pagination')
self.assertIsNotNone(pagination)
active_page = pagination.find_element_by_class_name('active').text
self.assertIn('2\n(current)', active_page)
@requests_mock.Mocker()
def test_events_page_has_correct_page_selected(self, m):
m.get(settings.API_BASE_ADDRESS + '/event/?offset=30', json={
"response": {
"status": {
"version": 0.1,
"code": 0,
"status": "Success"
},
"events": [
],
"pagination": {
"total": 59,
"offset": 30,
"results": 15
}
}
}, status_code=200)
self.browser.get('%s/events/May/02?page=3' %
self.live_server_url)
pagination = self.browser.find_element_by_class_name('pagination')
self.assertIsNotNone(pagination)
active_page = pagination.find_element_by_class_name('active').text
self.assertIn('3\n(current)', active_page)
| 31.550847 | 74 | 0.486167 | 339 | 3,723 | 5.079646 | 0.19469 | 0.081301 | 0.060395 | 0.083624 | 0.897793 | 0.897793 | 0.897793 | 0.87863 | 0.87863 | 0.87863 | 0 | 0.026738 | 0.39726 | 3,723 | 117 | 75 | 31.820513 | 0.740642 | 0 | 0 | 0.693878 | 0 | 0 | 0.132689 | 0.006178 | 0 | 0 | 0 | 0 | 0.081633 | 1 | 0.040816 | false | 0 | 0.030612 | 0 | 0.081633 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
d176d9dd24027d3d6d0b31edaff59dc17e6ff58e | 221 | py | Python | materials/db/__init__.py | hicsail/materials | eb1770787193141cbc9a9e89f6be33ed7de05828 | [
"MIT"
] | 1 | 2015-12-04T19:53:55.000Z | 2015-12-04T19:53:55.000Z | materials/db/__init__.py | hicsail/materials | eb1770787193141cbc9a9e89f6be33ed7de05828 | [
"MIT"
] | null | null | null | materials/db/__init__.py | hicsail/materials | eb1770787193141cbc9a9e89f6be33ed7de05828 | [
"MIT"
] | null | null | null | from .tables import Base, Component, Mixture, Ref, Measurement, Listing, Property
from .utils import get_or_create
__all__ = ['Base', 'Component', 'Mixture', 'Ref', 'Measurement', 'Listing', 'Property', 'get_or_create']
| 44.2 | 104 | 0.733032 | 27 | 221 | 5.703704 | 0.555556 | 0.168831 | 0.25974 | 0.298701 | 0.636364 | 0.636364 | 0.636364 | 0 | 0 | 0 | 0 | 0 | 0.113122 | 221 | 4 | 105 | 55.25 | 0.785714 | 0 | 0 | 0 | 0 | 0 | 0.280543 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.666667 | 0 | 0.666667 | 0 | 1 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
d18565c068d46ebec64a46add9f49a2d5f937455 | 351 | py | Python | pycord/models/__init__.py | henry232323/pycord | 93b4c0b38f1036f3d36af0a5c00e62a15c312841 | [
"MIT"
] | null | null | null | pycord/models/__init__.py | henry232323/pycord | 93b4c0b38f1036f3d36af0a5c00e62a15c312841 | [
"MIT"
] | null | null | null | pycord/models/__init__.py | henry232323/pycord | 93b4c0b38f1036f3d36af0a5c00e62a15c312841 | [
"MIT"
] | null | null | null | from .channel import Channel, TextChannel, VoiceChannel, CategoryChannel, DMChannel, DMGroupChannel, GUILD_CHANNELS, \
DM_CHANNELS, TEXTCHANNEL, VOICECHANNEL, CATEGORYCHANNEL, DMCHANNEL, GROUPDMCHANNEL
from .embed import Embed
from .guild import Guild
from .message import Message
from .role import Role
from .user import User, ClientUser, Member
| 43.875 | 118 | 0.817664 | 40 | 351 | 7.125 | 0.45 | 0.161404 | 0.266667 | 0.329825 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.122507 | 351 | 7 | 119 | 50.142857 | 0.925325 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.857143 | 0 | 0.857143 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
d19fbf09eb8873027b840b7ed88526a72695fdff | 47 | py | Python | sapextractor/utils/table_count/__init__.py | aarkue/sap-meta-explorer | 613bf657bbaa72a3781a84664e5de7626516532f | [
"Apache-2.0"
] | 2 | 2021-02-10T08:09:35.000Z | 2021-05-21T06:25:34.000Z | sapextractor/utils/table_count/__init__.py | aarkue/sap-meta-explorer | 613bf657bbaa72a3781a84664e5de7626516532f | [
"Apache-2.0"
] | null | null | null | sapextractor/utils/table_count/__init__.py | aarkue/sap-meta-explorer | 613bf657bbaa72a3781a84664e5de7626516532f | [
"Apache-2.0"
] | 3 | 2021-11-22T13:27:00.000Z | 2022-03-16T22:08:51.000Z | from sapextractor.utils.table_count import get
| 23.5 | 46 | 0.87234 | 7 | 47 | 5.714286 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.085106 | 47 | 1 | 47 | 47 | 0.930233 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
d1a29601894d683e2bcc6ef49a71a0d52966e8ed | 1,331 | py | Python | tests/test_loadtxt.py | gumazon/loadtxt | 32017309f894214d26c2e56f85486eb70368085b | [
"MIT"
] | null | null | null | tests/test_loadtxt.py | gumazon/loadtxt | 32017309f894214d26c2e56f85486eb70368085b | [
"MIT"
] | null | null | null | tests/test_loadtxt.py | gumazon/loadtxt | 32017309f894214d26c2e56f85486eb70368085b | [
"MIT"
] | null | null | null | import shlex
import subprocess
from loadtxt.loadtxt import loadtxt
def test_main_txt():
expected = 'Same thing over and over and expecting different results.'
cmd = 'python /Users/smikhail/Public/libs/loadtxt/loadtxt/loadtxt "Same thing over and over and expecting different results."'
actual = subprocess.run(shlex.split(cmd), check=True, stdout=subprocess.PIPE).stdout.decode().strip()
assert actual == expected
def test_main_file():
expected = 'Insanity is doing the same thing over and over and expecting different results.'
cmd = 'python /Users/smikhail/Public/libs/loadtxt/loadtxt/loadtxt "/Users/smikhail/Public/libs/loadtxt/loadtxt/tests/text.txt"'
actual = subprocess.run(shlex.split(cmd), check=True, stdout=subprocess.PIPE).stdout.decode().strip()
assert actual == expected
def test_loadtxt():
expected = 'Note that None as a type hint is a special case and is replaced.'.strip()
actual = loadtxt.loadtxt('Note that None as a type hint is a special case and is replaced.').strip()
assert actual == expected
def test_loadtxt_file():
expected = 'Insanity is doing the same thing over and over and expecting different results.'.strip()
actual = loadtxt.loadtxt('/Users/smikhail/Public/libs/loadtxt/loadtxt/tests/text.txt').strip()
assert actual == expected
| 41.59375 | 131 | 0.740796 | 186 | 1,331 | 5.263441 | 0.268817 | 0.128703 | 0.053115 | 0.065373 | 0.848825 | 0.848825 | 0.848825 | 0.806946 | 0.806946 | 0.757916 | 0 | 0 | 0.151766 | 1,331 | 31 | 132 | 42.935484 | 0.867139 | 0 | 0 | 0.285714 | 0 | 0.095238 | 0.479699 | 0.165414 | 0 | 0 | 0 | 0 | 0.190476 | 1 | 0.190476 | false | 0 | 0.142857 | 0 | 0.333333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
06096213e59d04d6c7315f4fac511470ed5b8a76 | 42 | py | Python | vhinny/common/__init__.py | vhinny-investing/api | 8439e759cee6069411d19cc4f15d6a4ded919d75 | [
"MIT"
] | null | null | null | vhinny/common/__init__.py | vhinny-investing/api | 8439e759cee6069411d19cc4f15d6a4ded919d75 | [
"MIT"
] | 1 | 2020-06-01T04:09:40.000Z | 2020-06-01T04:09:40.000Z | vhinny/common/__init__.py | vhinny-investing/api | 8439e759cee6069411d19cc4f15d6a4ded919d75 | [
"MIT"
] | null | null | null | from vhinny.common.base import BaseHelpers | 42 | 42 | 0.880952 | 6 | 42 | 6.166667 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.071429 | 42 | 1 | 42 | 42 | 0.948718 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
ae4d42ecc1338481a1d8420e7b92931e230a431f | 26 | py | Python | blogger/comments/urls.py | hanumanthareddy/blogger | e8352b77a342a9a2e535106743762a8b87cb7675 | [
"MIT"
] | null | null | null | blogger/comments/urls.py | hanumanthareddy/blogger | e8352b77a342a9a2e535106743762a8b87cb7675 | [
"MIT"
] | 9 | 2019-12-04T22:58:36.000Z | 2022-02-10T08:59:32.000Z | blogger/comments/urls.py | hanumanthareddy/blogger | e8352b77a342a9a2e535106743762a8b87cb7675 | [
"MIT"
] | null | null | null | from .views import render
| 13 | 25 | 0.807692 | 4 | 26 | 5.25 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.153846 | 26 | 1 | 26 | 26 | 0.954545 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
ae896338f62104733f03a1d858fc11e3db0ab0d0 | 1,654 | py | Python | tests/test_async_fakenames.py | Artem-Efremov/async-requests | 13dc5b21727b47d5be1e6a7bb0118e1d094282b5 | [
"MIT"
] | null | null | null | tests/test_async_fakenames.py | Artem-Efremov/async-requests | 13dc5b21727b47d5be1e6a7bb0118e1d094282b5 | [
"MIT"
] | 1 | 2021-06-01T23:49:49.000Z | 2021-06-01T23:49:49.000Z | tests/test_async_fakenames.py | Artem-Efremov/async-requests | 13dc5b21727b47d5be1e6a7bb0118e1d094282b5 | [
"MIT"
] | null | null | null | import sys
import json
import pytest
import requests
import urllib3
from pytest_localserver import plugin
import async_fakenames as app
import models
httpsserver = plugin.httpsserver
def test_fetch_fakename(httpsserver):
content = r'{"name":"Mrs. Neva Hessel","address":"042 Connelly Estates Suite 908\nNorth Kennedy, CT 15663","latitude":-14.154932000000002,"longitude":-76.588392,"maiden_name":"Bosco","birth_data":"1982-10-24","phone_h":"(229)290-7046x023","phone_w":"1-322-955-7206","email_u":"doyle.hettinger","email_d":"bankomatt.ru","username":"mkessler","password":"|+Nwz\"Cym0fuPFBw","domain":"stehr.info","useragent":"Mozilla\/5.0 (compatible; MSIE 10.0; Windows NT 6.0; Trident\/4.0)","ipv4":"36.238.20.204","macaddress":"AC:0D:86:6B:53:35","plasticcard":"5348836060551923","cardexpir":"07\/19","bonus":10,"company":"Rippin, Ruecker and Ankunding","color":"silver","uuid":"30c8b076-0553-3b67-980b-ea24c814cb02","height":171,"weight":94.2,"blood":"A\u2212","eye":"Amber","hair":"Straight, Black","pict":"9female","url":"https:\/\/api.namefake.com\/english-united-states\/female\/fd0a926717460996a50cbebbb4871273","sport":"Sailing","ipv4_url":"\/\/myip-address.com\/ip-lookup\/36.238.20.204","email_url":"\/\/emailfake.com\/bankomatt.ru\/doyle.hettinger","domain_url":"\/\/myip-address.com\/ip-lookup\/stehr.info"}'
headers = {'content-type': 'text/html; charset=UTF-8'}
httpsserver.serve_content(content, headers=headers)
with requests.Session() as session:
session.verify = False
name_obj = app.fetch_fakename(session, httpsserver.url)
assert json.loads(content).get('name') == name_obj.fullname
| 66.16 | 1,107 | 0.721886 | 228 | 1,654 | 5.162281 | 0.714912 | 0.02209 | 0.011895 | 0.016992 | 0.042481 | 0.042481 | 0 | 0 | 0 | 0 | 0 | 0.122316 | 0.070738 | 1,654 | 24 | 1,108 | 68.916667 | 0.643461 | 0 | 0 | 0 | 0 | 0.058824 | 0.683192 | 0.585852 | 0 | 0 | 0 | 0 | 0.058824 | 1 | 0.058824 | false | 0.058824 | 0.470588 | 0 | 0.529412 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 1 | 0 | 0 | 6 |
ae97b40815b55d15a7f55cd48139d05d8de90fec | 100 | py | Python | terrascript/vsphere/__init__.py | vutsalsinghal/python-terrascript | 3b9fb5ad77453d330fb0cd03524154a342c5d5dc | [
"BSD-2-Clause"
] | null | null | null | terrascript/vsphere/__init__.py | vutsalsinghal/python-terrascript | 3b9fb5ad77453d330fb0cd03524154a342c5d5dc | [
"BSD-2-Clause"
] | null | null | null | terrascript/vsphere/__init__.py | vutsalsinghal/python-terrascript | 3b9fb5ad77453d330fb0cd03524154a342c5d5dc | [
"BSD-2-Clause"
] | null | null | null | # terrascript/vsphere/__init__.py
import terrascript
class vsphere(terrascript.Provider):
pass | 16.666667 | 36 | 0.8 | 11 | 100 | 6.909091 | 0.727273 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.12 | 100 | 6 | 37 | 16.666667 | 0.863636 | 0.31 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0.333333 | 0.333333 | 0 | 0.666667 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 6 |
8823b7a5e45860c38092e10084c2e9088dadaba0 | 73 | py | Python | shipyard2/rules/py/startup/build.py | clchiou/garage | 446ff34f86cdbd114b09b643da44988cf5d027a3 | [
"MIT"
] | 3 | 2016-01-04T06:28:52.000Z | 2020-09-20T13:18:40.000Z | shipyard2/rules/py/startup/build.py | clchiou/garage | 446ff34f86cdbd114b09b643da44988cf5d027a3 | [
"MIT"
] | null | null | null | shipyard2/rules/py/startup/build.py | clchiou/garage | 446ff34f86cdbd114b09b643da44988cf5d027a3 | [
"MIT"
] | null | null | null | import shipyard2.rules.pythons
shipyard2.rules.pythons.define_package()
| 18.25 | 40 | 0.849315 | 9 | 73 | 6.777778 | 0.666667 | 0.459016 | 0.688525 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.028986 | 0.054795 | 73 | 3 | 41 | 24.333333 | 0.855072 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.5 | 0 | 0.5 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 6 |
8860340b5fac58ade3b31d3e1166d969181afa52 | 164 | py | Python | jrecomments/management/commands/scrape.py | ChristianJStarr/jrecomments | 2c4d72e8848d639e3b1004e58b2cc8e0889ad23d | [
"MIT"
] | null | null | null | jrecomments/management/commands/scrape.py | ChristianJStarr/jrecomments | 2c4d72e8848d639e3b1004e58b2cc8e0889ad23d | [
"MIT"
] | null | null | null | jrecomments/management/commands/scrape.py | ChristianJStarr/jrecomments | 2c4d72e8848d639e3b1004e58b2cc8e0889ad23d | [
"MIT"
] | null | null | null | from django.core.management import BaseCommand
from jrecomments.youtube import youtube_pull_comments
class Command(BaseCommand):
youtube_pull_comments(50000) | 23.428571 | 53 | 0.847561 | 20 | 164 | 6.75 | 0.65 | 0.162963 | 0.281481 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.034014 | 0.103659 | 164 | 7 | 54 | 23.428571 | 0.884354 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.5 | 0 | 0.75 | 0 | 1 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
88a308415550ed2734c0300680964f0d53e970d2 | 199 | py | Python | Codewars/8kyu/basic-making-six-toast/Python/test.py | RevansChen/online-judge | ad1b07fee7bd3c49418becccda904e17505f3018 | [
"MIT"
] | 7 | 2017-09-20T16:40:39.000Z | 2021-08-31T18:15:08.000Z | Codewars/8kyu/basic-making-six-toast/Python/test.py | RevansChen/online-judge | ad1b07fee7bd3c49418becccda904e17505f3018 | [
"MIT"
] | null | null | null | Codewars/8kyu/basic-making-six-toast/Python/test.py | RevansChen/online-judge | ad1b07fee7bd3c49418becccda904e17505f3018 | [
"MIT"
] | null | null | null | # Python - 3.6.0
Test.describe('Basic tests')
Test.it('Testing for Basic Functionality')
Test.assert_equals(six_toast(15), 9)
Test.assert_equals(six_toast(6), 0)
Test.assert_equals(six_toast(3), 3)
| 24.875 | 42 | 0.753769 | 35 | 199 | 4.114286 | 0.514286 | 0.208333 | 0.333333 | 0.395833 | 0.5 | 0 | 0 | 0 | 0 | 0 | 0 | 0.054945 | 0.085427 | 199 | 7 | 43 | 28.428571 | 0.736264 | 0.070352 | 0 | 0 | 0 | 0 | 0.229508 | 0 | 0 | 0 | 0 | 0 | 0.6 | 1 | 0 | true | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
ee31f093742c7afea3b952a81ddf584810a1adb9 | 166 | py | Python | dbgz/__init__.py | filipinascimento/dbgz | a3b10e89c78377c00978da0a876f5ad8e8416794 | [
"BSD-3-Clause"
] | null | null | null | dbgz/__init__.py | filipinascimento/dbgz | a3b10e89c78377c00978da0a876f5ad8e8416794 | [
"BSD-3-Clause"
] | null | null | null | dbgz/__init__.py | filipinascimento/dbgz | a3b10e89c78377c00978da0a876f5ad8e8416794 | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/python
# -*- coding: <utf-8> -*-
from .dbgz import DBGZWriter
from .dbgz import DBGZReader
from .dbgz import readIndicesDictionary
__version__ = "0.3.4"
| 18.444444 | 39 | 0.716867 | 22 | 166 | 5.227273 | 0.727273 | 0.208696 | 0.365217 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.027972 | 0.138554 | 166 | 8 | 40 | 20.75 | 0.776224 | 0.240964 | 0 | 0 | 0 | 0 | 0.04065 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.75 | 0 | 0.75 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
ee79d8d25ed3cdc5ae52f0b7f8a7c864edf77dd5 | 10,341 | py | Python | scr/analytic/mathieu.py | zmoitier/Scattering_BIE_QPAX | a4f9660f570c618f7a14585ab943b0f16c712632 | [
"MIT"
] | null | null | null | scr/analytic/mathieu.py | zmoitier/Scattering_BIE_QPAX | a4f9660f570c618f7a14585ab943b0f16c712632 | [
"MIT"
] | null | null | null | scr/analytic/mathieu.py | zmoitier/Scattering_BIE_QPAX | a4f9660f570c618f7a14585ab943b0f16c712632 | [
"MIT"
] | null | null | null | """ Mathieu functions
Author: Zoïs Moitier
Karlsruhe Institute of Technology, Germany
"""
from numpy import degrees # (╬ಠ益ಠ)
from numpy import cos, cosh
from scipy.special import (
mathieu_a,
mathieu_b,
mathieu_cem,
mathieu_modcem1,
mathieu_modcem2,
mathieu_modsem1,
mathieu_modsem2,
mathieu_sem,
)
def ce(m, q, η, *, p=0):
"""
v = ce(m, q, η, *, p=0)
Compute the value of the even angular Mathieu function ceₘ(q, η).
Parameters
----------
m : array_like
interger order of the Mathieu function
q : array_like
positive parameter in the Mathieu differential equation
η : array like
angular coordinate in the Elliptic coordinates
p : 0 or 1 or 2 (default 0)
0 for the function,
1 for the first derivative
2 for the second derivative
Returns
-------
v : array like
value of ceₘ(q, η) or ceₘ´(q, η) or ceₘ´´(q, η)
"""
if p == 0:
return mathieu_cem(m, q, degrees(η))[0]
if p == 1:
return mathieu_cem(m, q, degrees(η))[1]
if p == 2:
return ((2 * q) * cos(2 * η) - mathieu_a(m, q)) * mathieu_cem(m, q, degrees(η))[
0
]
raise ValueError("The value p must be 0, 1, or 2.")
def se(m, q, η, *, p=0):
"""
v = se(m, q, η, *, p=0)
Compute the value of the odd angular Mathieu function seₘ(q, η).
Parameters
----------
m : array_like
interger order of the Mathieu function
q : array_like
positive parameter in the Mathieu differential equation
η : array like
``angular'' coordinate in the Elliptic coordinates
p : 0 or 1 or 2 (default 0)
0 for the function,
1 for the first derivative
2 for the second derivative
Returns
-------
v : array like
value of seₘ(q, η) or seₘ´(q, η) or seₘ´´(q, η)
"""
if p == 0:
return mathieu_sem(m, q, degrees(η))[0]
if p == 1:
return mathieu_sem(m, q, degrees(η))[1]
if p == 2:
return ((2 * q) * cos(2 * η) - mathieu_b(m, q)) * mathieu_sem(m, q, degrees(η))[
0
]
raise ValueError("The value p must be 0, 1, or 2.")
def Mce1(m, q, ξ, *, p=0):
"""
v = Mce1(m, q, ξ, *, p=0)
Compute the value of the even Radial Mathieu function of the first kind
Mce⁽¹⁾ₘ(q, ξ).
Parameters
----------
m : array_like
interger order of the Mathieu function
q : array_like
positive parameter in the Mathieu differential equation
ξ : array like
``radial'' coordinate in the Elliptic coordinates
p : 0 or 1 or 2 (default 0)
0 for the function,
1 for the first derivative
2 for the second derivative
Returns
-------
v : array like
value of Mce⁽¹⁾ₘ(q, ξ) or Mce⁽¹⁾ₘ′(q, ξ) or Mce⁽¹⁾ₘ′´(q, ξ)
"""
if p == 0:
return mathieu_modcem1(m, q, ξ)[0]
if p == 1:
return mathieu_modcem1(m, q, ξ)[1]
if p == 2:
return (mathieu_a(m, q) - (2 * q) * cosh(2 * ξ)) * mathieu_modcem1(m, q, ξ)[0]
raise ValueError("The value p must be 0, 1, or 2.")
def Mse1(m, q, ξ, *, p=0):
"""
v = Mse1(m, q, ξ, *, p=0)
Compute the value of the odd Radial Mathieu function of the first kind
Mse⁽¹⁾ₘ(q, ξ).
Parameters
----------
m : array_like
interger order of the Mathieu function
q : array_like
positive parameter in the Mathieu differential equation
ξ : array like
radial coordinate in the Elliptic coordinates
p : 0 or 1 or 2 (default 0)
0 for the function,
1 for the first derivative
2 for the second derivative
Returns
-------
v : array like
value of Mse⁽¹⁾ₘ(q, ξ) or Mse⁽¹⁾ₘ′(q, ξ) or Mse⁽¹⁾ₘ′´(q, ξ)
"""
if p == 0:
return mathieu_modsem1(m, q, ξ)[0]
if p == 1:
return mathieu_modsem1(m, q, ξ)[1]
if p == 2:
return (mathieu_b(m, q) - (2 * q) * cosh(2 * ξ)) * mathieu_modsem1(m, q, ξ)[0]
raise ValueError("The value p must be 0, 1, or 2.")
def Mce2(m, q, ξ, *, p=0):
"""
v = Mce2(m, q, ξ, *, p=0)
Compute the value of the even Radial Mathieu function of the second kind
Mce⁽²⁾ₘ(q, ξ).
Parameters
----------
m : array_like
interger order of the Mathieu function
q : array_like
positive parameter in the Mathieu differential equation
ξ : array like
``radial'' coordinate in the Elliptic coordinates
p : 0 or 1 or 2 (default 0)
0 for the function,
1 for the first derivative
2 for the second derivative
Returns
-------
v : array like
value of Mce⁽²⁾ₘ(q, ξ) or Mce⁽²⁾ₘ′(q, ξ) or Mce⁽²⁾ₘ′´(q, ξ)
"""
if p == 0:
return mathieu_modcem2(m, q, ξ)[0]
if p == 1:
return mathieu_modcem2(m, q, ξ)[1]
if p == 2:
return (mathieu_a(m, q) - (2 * q) * cosh(2 * ξ)) * mathieu_modcem2(m, q, ξ)[0]
raise ValueError("The value p must be 0, 1, or 2.")
def Mse2(m, q, ξ, *, p=0):
"""
v = Mse2(m, q, ξ, *, p=0)
Compute the value of the even Radial Mathieu function of the second kind
Mse⁽²⁾ₘ(q, ξ).
Parameters
----------
m : array_like
interger order of the Mathieu function
q : array_like
positive parameter in the Mathieu differential equation
ξ : array like
``radial'' coordinate in the Elliptic coordinates
p : 0 or 1 or 2 (default 0)
0 for the function,
1 for the first derivative
2 for the second derivative
Returns
-------
v : array like
value of Mse⁽²⁾ₘ(q, ξ) or Mse⁽²⁾ₘ′(q, ξ) or Mse⁽²⁾ₘ′´(q, ξ)
"""
if p == 0:
return mathieu_modsem2(m, q, ξ)[0]
if p == 1:
return mathieu_modsem2(m, q, ξ)[1]
if p == 2:
return (mathieu_b(m, q) - (2 * q) * cosh(2 * ξ)) * mathieu_modsem2(m, q, ξ)[0]
raise ValueError("The value p must be 0, 1, or 2.")
def Mce3(m, q, ξ, *, p=0):
"""
v = Mce3(m, q, ξ, *, p=0)
Compute the value of the even Radial Mathieu function of the third kind
Mce⁽³⁾ₘ(q, ξ).
Parameters
----------
m : array_like
interger order of the Mathieu function
q : array_like
positive parameter in the Mathieu differential equation
ξ : array like
``radial'' coordinate in the Elliptic coordinates
p : 0 or 1 or 2 (default 0)
0 for the function,
1 for the first derivative
2 for the second derivative
Returns
-------
v : array like
value of Mce⁽³⁾ₘ(q, ξ) or Mce⁽³⁾ₘ′(q, ξ) or Mce⁽³⁾ₘ′´(q, ξ)
"""
if p == 0:
return mathieu_modcem1(m, q, ξ)[0] + 1j * mathieu_modcem2(m, q, ξ)[0]
if p == 1:
return mathieu_modcem1(m, q, ξ)[1] + 1j * mathieu_modcem2(m, q, ξ)[1]
if p == 2:
return (mathieu_a(m, q) - (2 * q) * cosh(2 * ξ)) * (
mathieu_modcem1(m, q, ξ)[0] + 1j * mathieu_modcem2(m, q, ξ)[0]
)
raise ValueError("The value p must be 0, 1, or 2.")
def Mse3(m, q, ξ, *, p=0):
"""
v = Mse3(m, q, ξ, *, p=0)
Compute the value of the even Radial Mathieu function of the third kind
Mse⁽³⁾ₘ(q, ξ).
Parameters
----------
m : array_like
interger order of the Mathieu function
q : array_like
positive parameter in the Mathieu differential equation
ξ : array like
``radial'' coordinate in the Elliptic coordinates
p : 0 or 1 or 2 (default 0)
0 for the function,
1 for the first derivative
2 for the second derivative
Returns
-------
v : array like
value of Mse⁽³⁾ₘ(q, ξ) or Mse⁽³⁾ₘ′(q, ξ) or Mse⁽³⁾ₘ′´(q, ξ)
"""
if p == 0:
return mathieu_modsem1(m, q, ξ)[0] + 1j * mathieu_modsem2(m, q, ξ)[0]
if p == 1:
return mathieu_modsem1(m, q, ξ)[1] + 1j * mathieu_modsem2(m, q, ξ)[1]
if p == 2:
return (mathieu_b(m, q) - (2 * q) * cosh(2 * ξ)) * (
mathieu_modsem1(m, q, ξ)[0] + 1j * mathieu_modsem2(m, q, ξ)[0]
)
raise ValueError("The value p must be 0, 1, or 2.")
def Mce4(m, q, ξ, *, p=0):
"""
v = Mce4(m, q, ξ, *, p=0)
Compute the value of the even Radial Mathieu function of the fourth kind
Mce⁽⁴⁾ₘ(q, ξ).
Parameters
----------
m : array_like
interger order of the Mathieu function
q : array_like
positive parameter in the Mathieu differential equation
ξ : array like
``radial'' coordinate in the Elliptic coordinates
p : 0 or 1 or 2 (default 0)
0 for the function,
1 for the first derivative
2 for the second derivative
Returns
-------
v : array like
value of Mce⁽⁴⁾ₘ(q, ξ) or Mce⁽⁴⁾ₘ′(q, ξ) or Mce⁽⁴⁾ₘ′´(q, ξ)
"""
if p == 0:
return mathieu_modcem1(m, q, ξ)[0] - 1j * mathieu_modcem2(m, q, ξ)[0]
if p == 1:
return mathieu_modcem1(m, q, ξ)[1] - 1j * mathieu_modcem2(m, q, ξ)[1]
if p == 2:
return (mathieu_a(m, q) - (2 * q) * cosh(2 * ξ)) * (
mathieu_modcem1(m, q, ξ)[0] - 1j * mathieu_modcem2(m, q, ξ)[0]
)
raise ValueError("The value p must be 0, 1, or 2.")
def Mse4(m, q, ξ, *, p=0):
"""
v = Mse4(m, q, ξ, *, p=0)
Compute the value of the even Radial Mathieu function of the fourth kind
Mse⁽⁴⁾ₘ(q, ξ).
Parameters
----------
m : array_like
interger order of the Mathieu function
q : array_like
positive parameter in the Mathieu differential equation
ξ : array like
``radial'' coordinate in the Elliptic coordinates
p : 0 or 1 or 2 (default 0)
0 for the function,
1 for the first derivative
2 for the second derivative
Returns
-------
v : array like
value of Mse⁽⁴⁾ₘ(q, ξ) or Mse⁽⁴⁾ₘ′(q, ξ) or Mse⁽⁴⁾ₘ′´(q, ξ)
"""
if p == 0:
return mathieu_modsem1(m, q, ξ)[0] - 1j * mathieu_modsem2(m, q, ξ)[0]
if p == 1:
return mathieu_modsem1(m, q, ξ)[1] - 1j * mathieu_modsem2(m, q, ξ)[1]
if p == 2:
return (mathieu_b(m, q) - (2 * q) * cosh(2 * ξ)) * (
mathieu_modsem1(m, q, ξ)[0] - 1j * mathieu_modsem2(m, q, ξ)[0]
)
raise ValueError("The value p must be 0, 1, or 2.")
| 25.345588 | 88 | 0.545499 | 1,761 | 10,341 | 3.211811 | 0.053379 | 0.031117 | 0.036068 | 0.016973 | 0.947489 | 0.942185 | 0.918847 | 0.896924 | 0.895156 | 0.860856 | 0 | 0.044527 | 0.322406 | 10,341 | 407 | 89 | 25.407862 | 0.749108 | 0.534184 | 0 | 0.423077 | 0 | 0 | 0.0787 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.096154 | false | 0 | 0.028846 | 0 | 0.413462 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
c9aa7a80aa4a09b5d544d7d2b11ad4932c7cb6ef | 164 | py | Python | src/fj/_utils/__init__.py | sinoroc/fj | fbf82d0d988812af9899f10dee9c71ea113579b1 | [
"Apache-2.0"
] | null | null | null | src/fj/_utils/__init__.py | sinoroc/fj | fbf82d0d988812af9899f10dee9c71ea113579b1 | [
"Apache-2.0"
] | 2 | 2020-07-16T15:22:53.000Z | 2020-07-25T18:40:35.000Z | src/fj/_utils/__init__.py | sinoroc/fj | fbf82d0d988812af9899f10dee9c71ea113579b1 | [
"Apache-2.0"
] | null | null | null | #
"""Utilities."""
from . import config
from . import pip_wrapper
from . import subprocess_wrapper
from . import venv_wrapper
from . import zipapp_wrapper
# EOF
| 13.666667 | 32 | 0.75 | 21 | 164 | 5.666667 | 0.47619 | 0.420168 | 0.428571 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.164634 | 164 | 11 | 33 | 14.909091 | 0.868613 | 0.091463 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
4e57ec7e53dbb7467685dec1a5d8040714a4d539 | 117 | py | Python | filem/auth.py | funkybob/django-filem | 5bcfab3a592e9c4547211d44c079d10e55c85fa1 | [
"MIT"
] | 6 | 2016-11-02T00:01:49.000Z | 2018-05-22T07:44:08.000Z | filem/auth.py | funkybob/django-filem | 5bcfab3a592e9c4547211d44c079d10e55c85fa1 | [
"MIT"
] | 5 | 2016-01-10T14:06:22.000Z | 2016-02-27T03:32:21.000Z | kopytka/auth.py | funkybob/kopytka | 59cafc58fbf40a0bf8ddc082f8a1ebeea664c9e0 | [
"MIT"
] | 2 | 2016-01-15T07:15:06.000Z | 2019-01-25T11:44:23.000Z | from django.contrib.auth.decorators import user_passes_test
staff_required = user_passes_test(lambda u: u.is_staff)
| 29.25 | 59 | 0.846154 | 19 | 117 | 4.894737 | 0.736842 | 0.215054 | 0.301075 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.08547 | 117 | 3 | 60 | 39 | 0.869159 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 1 | 0.5 | 0 | 0.5 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 6 |
4e78d0c6f3a7ce0dcdbd1b6bf2a39683694d7d26 | 257 | py | Python | tests/test_parsers/test_base_parser.py | thejens/parender | 14660d93b2822da69c528e8b3c0fb9de4bfd450b | [
"MIT"
] | 1 | 2020-12-13T16:19:36.000Z | 2020-12-13T16:19:36.000Z | tests/test_parsers/test_base_parser.py | thejens/parender | 14660d93b2822da69c528e8b3c0fb9de4bfd450b | [
"MIT"
] | 2 | 2021-12-16T13:08:56.000Z | 2022-03-14T12:58:38.000Z | tests/test_parsers/test_base_parser.py | thejens/parender | 14660d93b2822da69c528e8b3c0fb9de4bfd450b | [
"MIT"
] | 4 | 2021-11-10T15:06:04.000Z | 2022-02-16T10:47:07.000Z | # pylint: disable=missing-module-docstring
# pylint: disable=missing-function-docstring
from loren.parsers.base_parser import BaseParser
def test_base_parser() -> None:
assert BaseParser.parse({"file_contents": "a = 1"}) == {"file_contents": "a = 1"}
| 32.125 | 85 | 0.735409 | 33 | 257 | 5.575758 | 0.666667 | 0.141304 | 0.217391 | 0.152174 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.008811 | 0.116732 | 257 | 7 | 86 | 36.714286 | 0.801762 | 0.322957 | 0 | 0 | 0 | 0 | 0.210526 | 0 | 0 | 0 | 0 | 0 | 0.333333 | 1 | 0.333333 | true | 0 | 0.333333 | 0 | 0.666667 | 0 | 0 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
14d541363970ecbbae828a5d8893603e2928da6b | 248 | py | Python | desktop/core/ext-py/djangosaml2-0.16.4/djangosaml2/settings.py | DEVESHTARASIA/hue | 3a3b13ab599bd601f94472078e628fc78d32bb37 | [
"Apache-2.0"
] | 1 | 2018-08-01T05:10:26.000Z | 2018-08-01T05:10:26.000Z | desktop/core/ext-py/djangosaml2-0.16.4/djangosaml2/settings.py | DEVESHTARASIA/hue | 3a3b13ab599bd601f94472078e628fc78d32bb37 | [
"Apache-2.0"
] | null | null | null | desktop/core/ext-py/djangosaml2-0.16.4/djangosaml2/settings.py | DEVESHTARASIA/hue | 3a3b13ab599bd601f94472078e628fc78d32bb37 | [
"Apache-2.0"
] | 2 | 2019-06-17T11:51:56.000Z | 2020-07-25T08:29:56.000Z | from django.conf import settings
SAML_DJANGO_USER_MAIN_ATTRIBUTE = getattr(
settings, 'SAML_DJANGO_USER_MAIN_ATTRIBUTE', 'username')
SAML_DJANGO_USER_MAIN_ATTRIBUTE_LOOKUP = getattr(
settings, 'SAML_DJANGO_USER_MAIN_ATTRIBUTE_LOOKUP', '')
| 35.428571 | 60 | 0.822581 | 32 | 248 | 5.8125 | 0.375 | 0.215054 | 0.301075 | 0.387097 | 0.849462 | 0.849462 | 0.451613 | 0 | 0 | 0 | 0 | 0 | 0.100806 | 248 | 6 | 61 | 41.333333 | 0.834081 | 0 | 0 | 0 | 0 | 0 | 0.310484 | 0.278226 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.2 | 0 | 0.2 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
14eee0f4e3b5435fa1fc9618e5eec860702cef84 | 214 | py | Python | conan_build_tool/Configurators/__init__.py | Av3m/conan_build_tools | f82fb4a87d4ce7eea49b2af62a2ed14e9ef4a123 | [
"MIT"
] | null | null | null | conan_build_tool/Configurators/__init__.py | Av3m/conan_build_tools | f82fb4a87d4ce7eea49b2af62a2ed14e9ef4a123 | [
"MIT"
] | null | null | null | conan_build_tool/Configurators/__init__.py | Av3m/conan_build_tools | f82fb4a87d4ce7eea49b2af62a2ed14e9ef4a123 | [
"MIT"
] | null | null | null | from .CompilerConfigurator import *
from .DockerConfigurator import *
from .BuildTypeConfigurator import *
from .ConanServerConfigurator import *
from .ArchConfigurator import *
from .AndroidConfigurator import *
| 26.75 | 38 | 0.827103 | 18 | 214 | 9.833333 | 0.444444 | 0.282486 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.116822 | 214 | 7 | 39 | 30.571429 | 0.936508 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
11726552c3b4bab804d7019a1bc06aba13ebd479 | 96 | py | Python | venv/lib/python3.8/site-packages/pkginfo/tests/test_distribution.py | Retraces/UkraineBot | 3d5d7f8aaa58fa0cb8b98733b8808e5dfbdb8b71 | [
"MIT"
] | 2 | 2022-03-13T01:58:52.000Z | 2022-03-31T06:07:54.000Z | venv/lib/python3.8/site-packages/pkginfo/tests/test_distribution.py | DesmoSearch/Desmobot | b70b45df3485351f471080deb5c785c4bc5c4beb | [
"MIT"
] | 19 | 2021-11-20T04:09:18.000Z | 2022-03-23T15:05:55.000Z | venv/lib/python3.8/site-packages/pkginfo/tests/test_distribution.py | DesmoSearch/Desmobot | b70b45df3485351f471080deb5c785c4bc5c4beb | [
"MIT"
] | null | null | null | /home/runner/.cache/pip/pool/39/44/4e/02062c15e21480ab3cde14daa1250d9949510bf4d7694fa9fb4e105c17 | 96 | 96 | 0.895833 | 9 | 96 | 9.555556 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.447917 | 0 | 96 | 1 | 96 | 96 | 0.447917 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | null | 0 | 0 | null | null | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
119fc8a8c94692fdab155310c9b30fd0462093bf | 298 | py | Python | dev6b_retake/dev6b_retake/controller.py | tvangeest/DEV6B_Retake | eea61ff11d9053fb9e7ec81955adfbc7ad21ce01 | [
"MIT"
] | null | null | null | dev6b_retake/dev6b_retake/controller.py | tvangeest/DEV6B_Retake | eea61ff11d9053fb9e7ec81955adfbc7ad21ce01 | [
"MIT"
] | null | null | null | dev6b_retake/dev6b_retake/controller.py | tvangeest/DEV6B_Retake | eea61ff11d9053fb9e7ec81955adfbc7ad21ce01 | [
"MIT"
] | null | null | null | from flask import Flask, render_template, request
class Controller:
def index():
return render_template('index.html')
def a1():
if request.method == 'GET':
return render_template('A1.html')
def a2():
if request.method == 'GET':
return render_template('A2.html') | 21.285714 | 49 | 0.654362 | 38 | 298 | 5.026316 | 0.447368 | 0.293194 | 0.314136 | 0.188482 | 0.397906 | 0.397906 | 0.397906 | 0 | 0 | 0 | 0 | 0.017094 | 0.214765 | 298 | 14 | 50 | 21.285714 | 0.799145 | 0 | 0 | 0.2 | 0 | 0 | 0.100334 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.3 | true | 0 | 0.1 | 0.1 | 0.8 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 6 |
11e714f40490be5ca1c1f08701a7b241961f7ded | 10,834 | py | Python | servicedirectory/src/sd-api/bindings/tests/tests_binding_instances_views.py | ealogar/servicedirectory | fb4f4bfa8b499b93c03af589ef2f34c08a830b17 | [
"Apache-2.0"
] | null | null | null | servicedirectory/src/sd-api/bindings/tests/tests_binding_instances_views.py | ealogar/servicedirectory | fb4f4bfa8b499b93c03af589ef2f34c08a830b17 | [
"Apache-2.0"
] | null | null | null | servicedirectory/src/sd-api/bindings/tests/tests_binding_instances_views.py | ealogar/servicedirectory | fb4f4bfa8b499b93c03af589ef2f34c08a830b17 | [
"Apache-2.0"
] | null | null | null | '''
(c) Copyright 2013 Telefonica, I+D. Printed in Spain (Europe). All Rights
Reserved.
The copyright to the software program(s) is property of Telefonica I+D.
The program(s) may be used and or copied only with the express written
consent of Telefonica I+D or in accordance with the terms and conditions
stipulated in the agreement/contract under which the program(s) have
been supplied.
'''
import json
from commons.test_utils import TestCase
class_name = 'test'
url_class_collection = '/sd/v1/classes'
url_get_binding = '/sd/v1/bind_instances?class_name={0}&'
url_instance_collection = '/sd/v1/classes/{0}/instances'
url_bindings_collection = '/sd/v1/bindings/'
class BindingInstancesViewTests(TestCase):
def test_bind_instances_without_rules_should_raise_404(self):
# Create an class
class_ = {'class_name': 'test_bind', 'description': 'Descripcion test', 'default_version': "1.0"}
instance = {'uri': 'url_test', 'version': '1.0'}
instance2 = {'uri': 'url_test_2', 'version': '2.0'}
resp = self.post(url_class_collection, json.dumps(class_))
self.assertEquals(resp.status_code, 201)
url_instance_bind = url_get_binding.format('test_bind')
resp = self.post(url_instance_collection.format('test_bind'), json.dumps(instance))
self.assertEquals(resp.status_code, 201)
resp = self.post(url_instance_collection.format('test_bind'), json.dumps(instance2))
self.assertEquals(resp.status_code, 201)
# Discover all instances are returned ordered by version
resp = self.get(url_instance_bind)
self.assertEquals(resp.status_code, 404)
response_content = json.loads(resp.content)
self.assertEquals("SVC2002", response_content['exceptionId'], 'Not correct unica message')
# Discover instances of given version
resp = self.get(url_instance_bind + 'version=2.0')
self.assertEquals(resp.status_code, 404)
response_content = json.loads(resp.content)
self.assertEquals("SVC2002", response_content['exceptionId'], 'Not correct unica message')
def test_bind_instances_with_rules_should_return_matched_rules(self):
# Create an class
class_ = {'class_name': 'test_bind_with_rules', 'description': 'Descripcion test',
'default_version': "1.0"}
instance = {'uri': 'url_test', 'version': '1.0', 'attributes': {'protocol': 'https'}}
instance2 = {'uri': 'url_test_2', 'version': '2.0'}
resp = self.post(url_class_collection, json.dumps(class_))
self.assertEquals(resp.status_code, 201)
url_instance_bind = url_get_binding.format(class_['class_name'])
resp = self.post(url_instance_collection.format(class_['class_name']), json.dumps(instance))
self.assertEquals(resp.status_code, 201)
instance['id'] = json.loads(resp.content)['id']
resp = self.post(url_instance_collection.format(class_['class_name']), json.dumps(instance2))
self.assertEquals(resp.status_code, 201)
instance2['id'] = json.loads(resp.content)['id']
# add a couple of rules ...
rules_test = {
'class_name': class_['class_name'],
'origin': 'test-rules',
'binding_rules': [
{
'bindings':[instance['id']],
'group_rules': [{'operation':'eq', 'input_context_param':'ob', 'value': ['es']}]
},
{
'bindings':[instance2['id']],
'group_rules': [{'operation':'in', 'input_context_param':'ob', 'value': ['uk']}]
}, {
'bindings':[instance2['id']],
'group_rules': [{'operation':'regex', 'input_context_param':'ob', 'value': ['^gbr$']}]
},
{
'bindings':[instance2['id']],
'group_rules': [{'operation':'range', 'input_context_param':'uuid', 'value': [100, 200]}]
}
]
}
resp = self.post(url_bindings_collection, json.dumps(rules_test))
self.assertEquals(resp.status_code, 201)
# Discover instances for ob es
resp = self.get(url_instance_bind + 'ob=es&origin=test-rules')
response_content = json.loads(resp.content)
self.assertEquals(resp.status_code, 200)
self.assertEquals("url_test", response_content['uri'], "instances bind by rules was not correct")
# Discover instances for ob uk
resp = self.get(url_instance_bind + 'ob=uk&origin=test-rules&filters=uri')
response_content = json.loads(resp.content)
self.assertEquals(resp.status_code, 200)
self.assertEquals("url_test_2", response_content['uri'], "instances bind by rules was not correct")
self.assertTrue('version' not in response_content)
# Discover instances without default rules
resp = self.get(url_instance_bind + 'ob=uk')
response_content = json.loads(resp.content)
self.assertEquals(resp.status_code, 404)
self.assertEquals('SVC2002', response_content['exceptionId'], 'Invalid search by default')
# Discover instances without rules
resp = self.get(url_instance_bind + 'ob=uk&origin=no-client')
response_content = json.loads(resp.content)
self.assertEquals(resp.status_code, 404)
self.assertEquals('SVC2002', response_content['exceptionId'], 'Invalid search by no origin')
# Discover instances without matching rules
resp = self.get(url_instance_bind + 'ob=br&origin=test-rules')
response_content = json.loads(resp.content)
self.assertEquals(resp.status_code, 404)
self.assertEquals('SVC1006', response_content['exceptionId'], 'Invalid search by no origin')
def test_bind_instances_with_invalid_query_parameters_should_return_400(self):
# Create an class
class_ = {'class_name': 'test_bind2_with_rules', 'description': 'Descripcion test',
'default_version': "1.0"}
instance = {'uri': 'url_test', 'version': '1.0', 'attributes': {'protocol': 'https'}}
instance2 = {'uri': 'url_test_2', 'version': '2.0'}
resp = self.post(url_class_collection, json.dumps(class_))
self.assertEquals(resp.status_code, 201)
url_instance_bind = url_get_binding.format(class_['class_name'])
resp = self.post(url_instance_collection.format(class_['class_name']), json.dumps(instance))
self.assertEquals(resp.status_code, 201)
instance['id'] = json.loads(resp.content)['id']
resp = self.post(url_instance_collection.format(class_['class_name']), json.dumps(instance2))
self.assertEquals(resp.status_code, 201)
instance2['id'] = json.loads(resp.content)['id']
# add a couple of rules ...
rules_test = {
'class_name': class_['class_name'],
'origin': 'test-rules',
'binding_rules': [
{
'bindings':[instance['id']],
'group_rules': [{'operation':'eq', 'input_context_param':'ob', 'value': ['es']}]
},
{
'bindings':[instance2['id']],
'group_rules': [{'operation':'in', 'input_context_param':'ob', 'value': ['uk']}]
}, {
'bindings':[instance2['id']],
'group_rules': [{'operation':'regex', 'input_context_param':'ob', 'value': ['^gbr$']}]
},
{
'bindings':[instance2['id']],
'group_rules': [{'operation':'range', 'input_context_param':'uuid', 'value': [100, 200]}]
}
]
}
resp = self.post(url_bindings_collection, json.dumps(rules_test))
self.assertEquals(resp.status_code, 201)
# Discover instances with empty ob
resp = self.get(url_instance_bind + 'ob=&origin=test-rules')
response_content = json.loads(resp.content)
self.assertEquals(resp.status_code, 400)
self.assertEquals("SVC0002", response_content['exceptionId'], "instances bind by rules was not correct")
self.assertEquals("Invalid parameter value: empty-query-parameter", response_content['exceptionText'],
"instances bind by rules was not correct")
resp = self.get(url_instance_bind + 'ob=es&atributes.custom=&origin=test-rules')
response_content = json.loads(resp.content)
self.assertEquals(resp.status_code, 400)
self.assertEquals("SVC1001", response_content['exceptionId'], "instances bind by rules was not correct")
self.assertEquals("Invalid parameter: atributes.custom", response_content['exceptionText'],
"instances bind by rules was not correct")
# Discover instances with duplicated ob
resp = self.get(url_instance_bind + 'ob=es&ob=espa&origin=test-rules')
response_content = json.loads(resp.content)
self.assertEquals(resp.status_code, 400)
self.assertEquals("SVC1024", response_content['exceptionId'], "instances bind by rules was not correct")
self.assertEquals("Repeated query parameter: ob", response_content['exceptionText'],
"instances bind by rules was not correct")
def test_bind_instances_with_unexisting_class_should_return_not_found(self):
url_instance_bind = url_get_binding.format('test_class_not_existing')
# Discover premiun default instances
resp = self.get(url_instance_bind + 'version=1.0')
self.assertEquals(resp.status_code, 404)
def test_bind_instances_invalid_keys_no_rules_should_return_404(self):
# Create an class
class_ = {'class_name': 'test_bind_behaviour2', 'description': 'Descripcion test',
'default_version': "1.0"}
instance = {'uri': 'url_test', 'version': '1.0'}
instance2 = {'uri': 'url_test_2', 'version': '2.0'}
resp = self.post(url_class_collection, json.dumps(class_))
self.assertEquals(resp.status_code, 201)
url_instance_bind = url_get_binding.format('test_bind_behaviour2')
resp = self.post(url_instance_collection.format(class_['class_name']), json.dumps(instance))
self.assertEquals(resp.status_code, 201)
resp = self.post(url_instance_collection.format(class_['class_name']), json.dumps(instance2))
self.assertEquals(resp.status_code, 201)
# try to discover invalid key
resp = self.get(url_instance_bind + 'non_valid_key=test&behaviour=param_check_strict')
self.assertEquals(resp.status_code, 404)
response_content = json.loads(resp.content)
self.assertEquals("SVC2002", response_content['exceptionId'], 'Not correct unica message returned')
| 52.592233 | 112 | 0.641684 | 1,268 | 10,834 | 5.260252 | 0.134069 | 0.095952 | 0.077961 | 0.101349 | 0.812144 | 0.801349 | 0.797451 | 0.775112 | 0.731784 | 0.697751 | 0 | 0.023472 | 0.225309 | 10,834 | 205 | 113 | 52.84878 | 0.771238 | 0.083072 | 0 | 0.60625 | 0 | 0 | 0.248133 | 0.037639 | 0 | 0 | 0 | 0 | 0.25625 | 1 | 0.03125 | false | 0 | 0.0125 | 0 | 0.05 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
f504d894084ea10ddd6ec81f63dd68b54edc7af3 | 94 | py | Python | src/si/util/__init__.py | minimum16/SIB_MachineLearning | e032ce15d96a096a3ca57ba8318d3787755534e2 | [
"Apache-2.0"
] | null | null | null | src/si/util/__init__.py | minimum16/SIB_MachineLearning | e032ce15d96a096a3ca57ba8318d3787755534e2 | [
"Apache-2.0"
] | null | null | null | src/si/util/__init__.py | minimum16/SIB_MachineLearning | e032ce15d96a096a3ca57ba8318d3787755534e2 | [
"Apache-2.0"
] | null | null | null | from .util import *
from .metrics import *
from .cv import CrossValidationScore, GridSearchCV
| 23.5 | 50 | 0.797872 | 11 | 94 | 6.818182 | 0.636364 | 0.266667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.138298 | 94 | 3 | 51 | 31.333333 | 0.925926 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
ee9804725f09d233026536b4391a5bc80310ec89 | 32 | py | Python | base/__init__.py | JiwooKimAR/MWP-solver-with-pretrained-language-model | 7fa723ba4485dd446840effc457d5ebcfef9357f | [
"MIT"
] | 5 | 2022-03-02T06:30:03.000Z | 2022-03-23T13:01:20.000Z | base/__init__.py | JiwooKimAR/MWP-solver-with-pretrained-language-model | 7fa723ba4485dd446840effc457d5ebcfef9357f | [
"MIT"
] | null | null | null | base/__init__.py | JiwooKimAR/MWP-solver-with-pretrained-language-model | 7fa723ba4485dd446840effc457d5ebcfef9357f | [
"MIT"
] | null | null | null | from .BaseModel import BaseModel | 32 | 32 | 0.875 | 4 | 32 | 7 | 0.75 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.09375 | 32 | 1 | 32 | 32 | 0.965517 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
eea1db656f7a5e492eddd176f8173b06edb463dc | 45 | py | Python | project/memberships/models/__init__.py | hiraqdev/base-django | 4df57f356905274b26af57af8328f015d6c680a4 | [
"MIT"
] | 1 | 2018-03-19T05:21:53.000Z | 2018-03-19T05:21:53.000Z | project/memberships/models/__init__.py | hiraq/base-django | 4df57f356905274b26af57af8328f015d6c680a4 | [
"MIT"
] | 6 | 2020-06-05T20:17:33.000Z | 2022-03-11T23:45:44.000Z | project/memberships/models/__init__.py | hiraq/base-django | 4df57f356905274b26af57af8328f015d6c680a4 | [
"MIT"
] | null | null | null | from memberships.models.member import Member
| 22.5 | 44 | 0.866667 | 6 | 45 | 6.5 | 0.833333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.088889 | 45 | 1 | 45 | 45 | 0.95122 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
e11b17735b0b754118705912780f8cde5d975d47 | 64,918 | py | Python | 01-code-scripts/viirs.py | calekochenour/nighttime-radiance | 194cd62042bbe94e67aed9fdf7f3896174fecc38 | [
"BSD-3-Clause"
] | 7 | 2020-11-11T08:02:51.000Z | 2022-03-17T17:53:47.000Z | 01-code-scripts/viirs.py | calekochenour/nighttime-radiance | 194cd62042bbe94e67aed9fdf7f3896174fecc38 | [
"BSD-3-Clause"
] | null | null | null | 01-code-scripts/viirs.py | calekochenour/nighttime-radiance | 194cd62042bbe94e67aed9fdf7f3896174fecc38 | [
"BSD-3-Clause"
] | 4 | 2021-04-19T23:57:25.000Z | 2022-03-31T06:26:12.000Z | """ Module to work with NASA VIIRS DNB data """
import os
import re
import datetime as dt
import matplotlib.pyplot as plt
from matplotlib import colors
import numpy as np
import numpy.ma as ma
import pandas as pd
import rasterio as rio
from rasterio.transform import from_origin
import earthpy.plot as ep
import earthpy.spatial as es
def calculate_statistic(data, statistic="mean"):
"""Calculates the specified statistic over input arrays covering
the same geographic area.
Parameters
----------
data : list of numpy arrays
List of arrays containing the data. Individual arrays can
contain NaN values.
statistic : str (optional)
Statistic to be calculated over the arrays in the
list. Default value is 'mean'. Function supports
'mean', 'variance', 'deviation', and 'median'.
Returns
-------
data_statistic : numpy array
Array containing the statistic value for each pixel, computed
over the number of arrays in the input list.
Example
-------
>>>
>>>
>>>
>>>
"""
# Raise errors
if not isinstance(data, list):
raise TypeError("Input data must be of type list.")
# Calculate statistic (mean, variance, standard deviation, or median)
if statistic == "mean":
data_statistic = np.nanmean(np.stack(data), axis=0)
elif statistic == "variance":
data_statistic = np.nanvar(np.stack(data), axis=0)
elif statistic == "deviation":
data_statistic = np.nanstd(np.stack(data), axis=0)
elif statistic == "median":
data_statistic = np.nanmedian(np.stack(data), axis=0)
else:
raise ValueError(
"Invalid statistic. Function supports "
"'mean', 'variance', 'deviation', or 'median'."
)
return data_statistic
def clip_vnp46a1(geotiff_path, clip_boundary, clip_country, output_folder):
"""Clips an image to a bounding box and exports the clipped image to
a GeoTiff file.
Paramaters
----------
geotiff_path : str
Path to the GeoTiff image to be clipped.
clip_boundary : geopandas geodataframe
Geodataframe for containing the boundary used for clipping.
clip_country : str
Name of the country the data is being clipped to. The country
name is used in the name of the exported file. E.g. 'South Korea'.
Spaces and capital letters are acceptable and handled within the
function.
output_folder : str
Path to the folder where the clipped file will be exported to.
Returns
-------
message : str
Indication of concatenation completion status (success
or failure).
Example
-------
>>>
>>>
>>>
>>>
"""
# Clip VNP46A1 file
print(
f"Started clipping: Clip {os.path.basename(geotiff_path)} "
f"to {clip_country} boundary"
)
try:
print("Clipping image...")
# Clip image (return clipped array and new metadata)
with rio.open(geotiff_path) as src:
cropped_image, cropped_metadata = es.crop_image(
raster=src, geoms=clip_boundary
)
print("Setting export name...")
# Set export name
export_name = create_clipped_export_name(
image_path=geotiff_path, country_name=clip_country
)
print("Exporting to GeoTiff...")
# Export file
export_array(
array=cropped_image[0],
output_path=os.path.join(output_folder, export_name),
metadata=cropped_metadata,
)
except Exception as error:
message = print(f"Clipping failed: {error}\n")
else:
message = print(
f"Completed clipping: Clip {os.path.basename(geotiff_path)} "
f"to {clip_country} boundary\n"
)
return message
def clip_vnp46a2(geotiff_path, clip_boundary, clip_country, output_folder):
"""Clips an image to a bounding box and exports the clipped image to
a GeoTiff file.
Paramaters
----------
geotiff_path : str
Path to the GeoTiff image to be clipped.
clip_boundary : geopandas geodataframe
Geodataframe for containing the boundary used for clipping.
clip_country : str
Name of the country the data is being clipped to. The country
name is used in the name of the exported file. E.g. 'South Korea'.
Spaces and capital letters are acceptable and handled within the
function.
output_folder : str
Path to the folder where the clipped file will be exported to.
Returns
-------
message : str
Indication of concatenation completion status (success
or failure).
Example
-------
>>>
>>>
>>>
>>>
"""
# Clip VNP46A2 file
print(
f"Started clipping: Clip {os.path.basename(geotiff_path)} "
f"to {clip_country} boundary"
)
try:
print("Clipping image...")
# Clip image (return clipped array and new metadata)
with rio.open(geotiff_path) as src:
cropped_image, cropped_metadata = es.crop_image(
raster=src, geoms=clip_boundary
)
print("Setting export name...")
# Set export name
export_name = create_clipped_export_name(
image_path=geotiff_path, country_name=clip_country
)
print("Exporting to GeoTiff...")
# Export file
export_array(
array=cropped_image[0],
output_path=os.path.join(output_folder, export_name),
metadata=cropped_metadata,
)
except Exception as error:
message = print(f"Clipping failed: {error}\n")
else:
message = print(
f"Completed clipping: Clip {os.path.basename(geotiff_path)} "
f"to {clip_country} boundary\n"
)
return message
def concatenate_preprocessed_vnp46a1(
west_geotiff_path, east_geotiff_path, output_folder
):
"""Concatenates horizontally-adjacent preprocessed VNP46A1 GeoTiff
file and exports the concatenated array to a single GeoTiff.
Paramaters
----------
west_geotiff_path : str
Path to the West-most GeoTiff.
east_geotiff_path : str
Path to the East-most GeoTiff.
output_folder : str
Path to the folder where the concatenated file will be
exported to.
Returns
-------
message : str
Indication of concatenation completion status (success
or failure).
Example
-------
>>>
>>>
>>>
>>>
"""
# Concatenate adjacent VNP46A1 GeoTiff files
print(
(
f"Started concatenating:\n "
f"{os.path.basename(west_geotiff_path)}\n "
f"{os.path.basename(east_geotiff_path)}"
)
)
try:
print("Concatenating West and East arrays...")
# Concatenate West and East images along the 1-axis
concatenated = np.concatenate(
(
read_geotiff_into_array(geotiff_path=west_geotiff_path),
read_geotiff_into_array(geotiff_path=east_geotiff_path),
),
axis=1,
)
print("Getting bounding box information...")
# Get bounding box (left, top, bottom) from west image and
# (right) from east image
longitude_min = extract_geotiff_bounding_box(
geotiff_path=west_geotiff_path
).left
longitude_max = extract_geotiff_bounding_box(
geotiff_path=east_geotiff_path
).right
latitude_min = extract_geotiff_bounding_box(
geotiff_path=west_geotiff_path
).bottom
latitude_max = extract_geotiff_bounding_box(
geotiff_path=west_geotiff_path
).top
print("Creating transform...")
# Set transform (west bound, north bound, x cell size, y cell size)
concatenated_transform = from_origin(
longitude_min,
latitude_max,
(longitude_max - longitude_min) / concatenated.shape[1],
(latitude_max - latitude_min) / concatenated.shape[0],
)
print("Creating metadata...")
# Create metadata for GeoTiff export
metadata = create_metadata(
array=concatenated,
transform=concatenated_transform,
driver="GTiff",
nodata=np.nan,
count=1,
crs="epsg:4326",
)
print("Setting file export name...")
# Get name for the exported file
export_name = create_concatenated_export_name(
west_image_path=west_geotiff_path,
east_image_path=east_geotiff_path,
)
print("Exporting to GeoTiff...")
# Export concatenated array
export_array(
array=concatenated,
output_path=os.path.join(output_folder, export_name),
metadata=metadata,
)
except Exception as error:
message = print(f"Concatenating failed: {error}\n")
else:
message = print(
(
f"Completed concatenating:\n "
f"{os.path.basename(west_geotiff_path)}\n "
f"{os.path.basename(east_geotiff_path)}\n"
)
)
return message
def concatenate_preprocessed_vnp46a2(
west_geotiff_path, east_geotiff_path, output_folder
):
"""Concatenates horizontally-adjacent preprocessed VNP46A2 GeoTiff
file and exports the concatenated array to a single GeoTiff.
Paramaters
----------
west_geotiff_path : str
Path to the West-most GeoTiff.
east_geotiff_path : str
Path to the East-most GeoTiff.
output_folder : str
Path to the folder where the concatenated file will be
exported to.
Returns
-------
message : str
Indication of concatenation completion status (success
or failure).
Example
-------
>>>
>>>
>>>
>>>
"""
# Concatenate adjacent VNP46A1 GeoTiff files
print(
(
f"Started concatenating:\n "
f"{os.path.basename(west_geotiff_path)}\n "
f"{os.path.basename(east_geotiff_path)}"
)
)
try:
print("Concatenating West and East arrays...")
# Concatenate West and East images along the 1-axis
concatenated = np.concatenate(
(
read_geotiff_into_array(geotiff_path=west_geotiff_path),
read_geotiff_into_array(geotiff_path=east_geotiff_path),
),
axis=1,
)
print("Getting bounding box information...")
# Get bounding box (left, top, bottom) from west image and
# (right) from east image
longitude_min = extract_geotiff_bounding_box(
geotiff_path=west_geotiff_path
).left
longitude_max = extract_geotiff_bounding_box(
geotiff_path=east_geotiff_path
).right
latitude_min = extract_geotiff_bounding_box(
geotiff_path=west_geotiff_path
).bottom
latitude_max = extract_geotiff_bounding_box(
geotiff_path=west_geotiff_path
).top
print("Creating transform...")
# Set transform (west bound, north bound, x cell size, y cell size)
concatenated_transform = from_origin(
longitude_min,
latitude_max,
(longitude_max - longitude_min) / concatenated.shape[1],
(latitude_max - latitude_min) / concatenated.shape[0],
)
print("Creating metadata...")
# Create metadata for GeoTiff export
metadata = create_metadata(
array=concatenated,
transform=concatenated_transform,
driver="GTiff",
nodata=np.nan,
count=1,
crs="epsg:4326",
)
print("Setting file export name...")
# Get name for the exported file
export_name = create_concatenated_export_name(
west_image_path=west_geotiff_path,
east_image_path=east_geotiff_path,
)
print("Exporting to GeoTiff...")
# Export concatenated array
export_array(
array=concatenated,
output_path=os.path.join(output_folder, export_name),
metadata=metadata,
)
except Exception as error:
message = print(f"Concatenating failed: {error}\n")
else:
message = print(
(
f"Completed concatenating:\n "
f"{os.path.basename(west_geotiff_path)}\n "
f"{os.path.basename(east_geotiff_path)}\n"
)
)
return message
def create_clipped_export_name(image_path, country_name):
"""Creates a file name indicating a clipped file.
Paramaters
----------
image_path : str
Path to the original (unclipped image).
Returns
-------
export_name : str
New file name for export, indicating clipping.
Example
-------
>>>
>>>
>>>
>>>
"""
# Set export name
image_source = os.path.basename(image_path)[:7]
image_date = extract_date_vnp46a1(image_path)
image_country = country_name.replace(" ", "-").lower()
export_name = f"{image_source}-{image_date}-clipped-{image_country}.tif"
return export_name
def create_concatenated_export_name(west_image_path, east_image_path):
"""Creates a file name indicating the concatenation of adjacent two files.
Paramaters
----------
west_image_path : str
Path to the West-most image.
east_image_past : str
Path to the East-most image.
Returns
-------
export_name : str
New file name for export, indicating concatenation.
Example
-------
>>>
>>>
>>>
>>>
"""
# Extract the horizontal grid numbers from the West and East images
west_image_horizontal_grid_number, east_image_horizontal_grid_number = (
os.path.basename(west_image_path)[18:20],
os.path.basename(east_image_path)[18:20],
)
# Replace the single horizontal grid number with both the West and
# East numbers; remove series and processing time information
data_source_and_date = os.path.basename(west_image_path)[:16]
vertical_grid_number = os.path.basename(west_image_path)[21:23]
export_name = (
f"{data_source_and_date}-h{west_image_horizontal_grid_number}"
f"{east_image_horizontal_grid_number}v{vertical_grid_number}.tif"
)
return export_name
def create_date_range(start_date, end_date):
"""Creates a list of dates between a specified start and end date.
Parameters
----------
start_date : str
Start date, formatted as 'YYYY-MM-DD'.
end_date : str
Start date, formatted as 'YYYY-MM-DD'.
Returns
-------
date_range : list (of str)
List of dates between and including the start and end dates,
with each date formatted as 'YYYYMMDD'.
Example
-------
>>>
>>>
>>>
>>>
"""
# Get list of dates
dates = [
dt.datetime.strftime(date, "%Y%m%d")
for date in pd.date_range(start=start_date, end=end_date)
]
return dates
def create_metadata(
array, transform, driver="GTiff", nodata=0, count=1, crs="epsg:4326"
):
"""Creates export metadata, for use with
exporting an array to raster format.
Parameters
----------
array : numpy array
Array containing data for export.
transform : rasterio.transform affine object
Affine transformation for the georeferenced array.
driver : str
File type/format for export. Defaults to GeoTiff ('GTiff').
nodata : int or float
Value in the array indicating no data. Defaults to 0.
count : int
Number of bands in the array for export. Defaults to 1.
crs : str
Coordinate reference system for the georeferenced
array. Defaults to EPSG 4326 ('epsg:4326').
Returns
-------
metadata : dict
Dictionary containing the export metadata.
Example
-------
>>> # Imports
>>> import numpy as np
>>> from rasterio.transform import from_origin
>>> # Create array
>>> arr = np.array([[1,2],[3,4]])
>>> transform = from_origin(-73.0, 43.0, 0.5, 0.5)
>>> meta = create_metadata(arr, transform)
# Display metadata
>>> meta
{'driver': 'GTiff',
'dtype': dtype('int32'),
'nodata': 0,
'width': 2,
'height': 2,
'count': 1,
'crs': 'epsg:4326',
'transform': Affine(0.5, 0.0, -73.0,
0.0, -0.5, 43.0)}
"""
# Define metadata
metadata = {
"driver": driver,
"dtype": array.dtype,
"nodata": nodata,
"width": array.shape[1],
"height": array.shape[0],
"count": count,
"crs": crs,
"transform": transform,
}
return metadata
def create_transform_vnp46a1(hdf5):
"""Creates a geographic transform for a VNP46A1 HDF5 file,
based on longitude bounds, latitude bounds, and cell size.
Parameters
----------
hdf5 : str
Path to an existsing VNP46A1 HDF5 file.
Returns
-------
transform : affine.Affine object
Affine transformation for the georeferenced array.
Example
-------
>>>
>>>
>>>
>>>
"""
# Extract bounding box from top-level dataset
with rio.open(hdf5) as dataset:
longitude_min = int(
dataset.tags()["HDFEOS_GRIDS_VNP_Grid_DNB_WestBoundingCoord"]
)
longitude_max = int(
dataset.tags()["HDFEOS_GRIDS_VNP_Grid_DNB_EastBoundingCoord"]
)
latitude_min = int(
dataset.tags()["HDFEOS_GRIDS_VNP_Grid_DNB_SouthBoundingCoord"]
)
latitude_max = int(
dataset.tags()["HDFEOS_GRIDS_VNP_Grid_DNB_NorthBoundingCoord"]
)
# Extract number of row and columns from first
# Science Data Set (subdataset/band)
with rio.open(dataset.subdatasets[0]) as science_data_set:
num_rows, num_columns = (
science_data_set.meta.get("height"),
science_data_set.meta.get("width"),
)
# Define transform (top-left corner, cell size)
transform = from_origin(
longitude_min,
latitude_max,
(longitude_max - longitude_min) / num_columns,
(latitude_max - latitude_min) / num_rows,
)
return transform
def create_transform_vnp46a2(hdf5):
"""Creates a geographic transform for a VNP46A2 HDF5 file,
based on longitude bounds, latitude bounds, and cell size.
Parameters
----------
hdf5 : str
Path to an existsing VNP46A1 HDF5 file.
Returns
-------
transform : affine.Affine object
Affine transformation for the georeferenced array.
Example
-------
>>>
>>>
>>>
>>>
"""
# Extract bounding box from top-level dataset
with rio.open(hdf5) as dataset:
longitude_min = int(dataset.tags()["WestBoundingCoord"])
longitude_max = int(dataset.tags()["EastBoundingCoord"])
latitude_min = int(dataset.tags()["SouthBoundingCoord"])
latitude_max = int(dataset.tags()["NorthBoundingCoord"])
# Extract number of row and columns from first
# Science Data Set (subdataset/band)
with rio.open(dataset.subdatasets[0]) as band:
num_rows, num_columns = (
band.meta.get("height"),
band.meta.get("width"),
)
# Define transform (top-left corner, cell size)
transform = from_origin(
longitude_min,
latitude_max,
(longitude_max - longitude_min) / num_columns,
(latitude_max - latitude_min) / num_rows,
)
return transform
def export_array(array, output_path, metadata):
"""Exports a numpy array to a GeoTiff.
Parameters
----------
array : numpy array
Numpy array to be exported to GeoTiff.
output_path : str
Path to the output file (includeing filename).
metadata : dict
Dictionary containing the metadata required
for export.
Returns
-------
output_message : str
Message indicating success or failure of export.
Example
-------
>>> # Define export output paths
>>> radiance_mean_outpath = os.path.join(
... output_directory,
... "radiance-mean.tif")
# Define export transform
>>> transform = from_origin(
... lon_min,
... lat_max,
... coord_spacing,
... coord_spacing)
>>> # Define export metadata
>>> export_metadata = {
... "driver": "GTiff",
... "dtype": radiance_mean.dtype,
... "nodata": 0,
... "width": radiance_mean.shape[1],
... "height": radiance_mean.shape[0],
... "count": 1,
... "crs": 'epsg:4326',
... "transform": transform
... }
>>> # Export mean radiance
>>> export_array(
>>> array=radiance_mean,
>>> output_path=radiance_mean_outpath,
>>> metadata=export_metadata)
Exported: radiance-mean.tif
"""
# Write numpy array to GeoTiff
try:
with rio.open(output_path, "w", **metadata) as dst:
dst.write(array, 1)
except Exception as error:
output_message = print(f"ERROR: {error}")
else:
output_message = print(f"Exported: {os.path.split(output_path)[-1]}")
return output_message
def extract_acquisition_date_vnp46a1(hdf5_path):
"""Returns the acquisition date of a VNP46A1 HDF5 file.
Parameters
----------
hdf5_path : str
Path to a VNP46A1 HDF5 file.
Returns
-------
acquisition_date : str
Acquisition date of the image, formatted as 'YYYY-MM-DD'.
Example
-------
>>> hdf5_file = "VNP46A1.A2020001.h30v05.001.2020004003738.h5"
>>> extract_acquisition_date_vnp46a1(hdf5_file)
'2020-01-01'
"""
# Open file and extract date
with rio.open(hdf5_path) as dataset:
acquisition_date = dataset.tags()[
"HDFEOS_GRIDS_VNP_Grid_DNB_RangeBeginningDate"
]
return acquisition_date
def extract_band_vnp46a1(hdf5_path, band_name):
"""Extracts the specified band (Science Data Set) from a NASA VNP46A1
HDF5 file.
Available Science Data Sets include:
BrightnessTemperature_M12
Moon_Illumination_Fraction
Moon_Phase_Angle
QF_Cloud_Mask
QF_DNB
QF_VIIRS_M10
QF_VIIRS_M11
QF_VIIRS_M12
QF_VIIRS_M13
QF_VIIRS_M15
QF_VIIRS_M16
BrightnessTemperature_M13
Radiance_M10
Radiance_M11
Sensor_Azimuth
Sensor_Zenith
Solar_Azimuth
Solar_Zenith
UTC_Time
BrightnessTemperature_M15
BrightnessTemperature_M16
DNB_At_Sensor_Radiance_500m
Glint_Angle
Granule
Lunar_Azimuth
Lunar_Zenith
Parameters
----------
hdf5_path : str
Path to the VNP46A1 HDF5 (.h5) file.
band_name : str
Name of the band (Science Data Set) to be extracted. Must be an exact
match to an available Science Data Set.
Returns
-------
band : numpy array
Array containing the data for the specified band (Science Data Set).
Example
-------
>>> qf_cloud_mask = extract_band_vnp46a1(
... hdf5='VNP46A1.A2020001.h30v05.001.2020004003738.h5',
... band='QF_Cloud_Mask'
... )
>>> type(qf_cloud_mask)
numpy.ndarray
"""
# Raise error for invalid band name
band_names = [
"BrightnessTemperature_M12",
"Moon_Illumination_Fraction",
"Moon_Phase_Angle",
"QF_Cloud_Mask",
"QF_DNB",
"QF_VIIRS_M10",
"QF_VIIRS_M11",
"QF_VIIRS_M12",
"QF_VIIRS_M13",
"QF_VIIRS_M15",
"QF_VIIRS_M16",
"BrightnessTemperature_M13",
"Radiance_M10",
"Radiance_M11",
"Sensor_Azimuth",
"Sensor_Zenith",
"Solar_Azimuth",
"Solar_Zenith",
"UTC_Time",
"BrightnessTemperature_M15",
"BrightnessTemperature_M16",
"DNB_At_Sensor_Radiance_500m",
"Glint_Angle",
"Granule",
"Lunar_Azimuth",
"Lunar_Zenith",
]
if band_name not in band_names:
raise ValueError(
f"Invalid band name. Must be one of the following: {band_names}"
)
# Open top-level dataset, loop through Science Data Sets (subdatasets),
# and extract specified band
with rio.open(hdf5_path) as dataset:
for science_data_set in dataset.subdatasets:
if re.search(f"{band_name}$", science_data_set):
with rio.open(science_data_set) as src:
band = src.read(1)
return band
def extract_band_vnp46a2(hdf5_path, band_name):
"""Extracts the specified band (Science Data Set) from a NASA VNP46A2
HDF5 file.
Available Science Data Sets include:
DNB_BRDF-Corrected_NTL
DNB_Lunar_Irradiance
Gap_Filled_DNB_BRDF-Corrected_NTL
Latest_High_Quality_Retrieval
Mandatory_Quality_Flag
QF_Cloud_Mask
Snow_Flag
Parameters
----------
hdf5_path : str
Path to the VNP46A2 HDF5 (.h5) file.
band_name : str
Name of the band (Science Data Set) to be extracted. Must be an exact
match to an available Science Data Set.
Returns
-------
band : numpy array
Array containing the data for the specified band (Science Data Set).
Example
-------
>>> qf_cloud_mask = extract_band_vnp46a2(
... hdf5='VNP46A2.A2016153.h30v05.001.2020267141459.h5',
... band='QF_Cloud_Mask'
... )
>>> type(qf_cloud_mask)
numpy.ndarray
"""
# Raise error for invalid band name
band_names = [
"DNB_BRDF-Corrected_NTL",
"DNB_Lunar_Irradiance",
"Gap_Filled_DNB_BRDF-Corrected_NTL",
"Latest_High_Quality_Retrieval",
"Mandatory_Quality_Flag",
"QF_Cloud_Mask",
"Snow_Flag",
]
if band_name not in band_names:
raise ValueError(
f"Invalid band name. Must be one of the following: {band_names}"
)
# Open top-level dataset, loop through Science Data Sets (subdatasets),
# and extract specified band
with rio.open(hdf5_path) as dataset:
for science_data_set in dataset.subdatasets:
if re.search(f"{band_name}$", science_data_set):
with rio.open(science_data_set) as src:
band = src.read(1)
return band
def extract_date_vnp46a1(geotiff_path):
"""Extracts the file date from a preprocessed VNP46A1 GeoTiff.
Parameters
----------
geotiff_path : str
Path to the GeoTiff file.
Returns
-------
date : str
Acquisition date of the preprocessed VNP46A1 GeoTiff.
Example
-------
>>>
>>>
>>>
>>>
"""
# Get date (convert YYYYJJJ to YYYYMMDD)
date = dt.datetime.strptime(
os.path.basename(geotiff_path)[9:16], "%Y%j"
).strftime("%Y%m%d")
return date
def extract_date_vnp46a2(geotiff_path):
"""Extracts the file date from a preprocessed VNP46A2 GeoTiff.
Parameters
----------
geotiff_path : str
Path to the GeoTiff file.
Returns
-------
date : str
Acquisition date of the preprocessed VNP46A2 GeoTiff.
Example
-------
>>>
>>>
>>>
>>>
"""
# Get date (convert YYYYJJJ to YYYYMMDD)
date = dt.datetime.strptime(
os.path.basename(geotiff_path)[9:16], "%Y%j"
).strftime("%Y%m%d")
return date
def extract_geotiff_bounding_box(geotiff_path):
"""Extracts the bounding box from a GeoTiff file.
Parameters
----------
geotiff_path : str
Path to the GeoTiff file.
Returns
-------
bounding_box : rasterio.coords.BoundingBox
Bounding box for the GeoTiff
Example
-------
>>>
>>>
>>>
>>>
"""
# Extract bounding box
with rio.open(geotiff_path) as src:
bounding_box = src.bounds
return bounding_box
def extract_geotiff_metadata(geotiff_path):
"""Extract metadata from a GeoTiff file.
Parameters
----------
geotiff_path : str
Path to the GeoTiff file.
Returns
-------
metadata : dict
Dictionary containing the metadata.
Example
-------
>>>
>>>
>>>
>>>
"""
# Read-in array
with rio.open(geotiff_path) as src:
metadata = src.meta
return metadata
def extract_qa_bits(qa_band, start_bit, end_bit):
"""Extracts the QA bitmask values for a specified bitmask (starting
and ending bit).
Parameters
----------
qa_band : numpy array
Array containing the raw QA values (base-2) for all bitmasks.
start_bit : int
First bit in the bitmask.
end_bit : int
Last bit in the bitmask.
Returns
-------
qa_values : numpy array
Array containing the extracted QA values (base-10) for the
bitmask.
Example
-------
>>>
>>>
>>>
>>>
"""
# Initialize QA bit string/pattern to check QA band against
qa_bits = 0
# Add each specified QA bit flag value/string/pattern
# to the QA bits to check/extract
for bit in range(start_bit, end_bit + 1):
qa_bits += bit ** 2
# Check QA band against specified QA bits to see what
# QA flag values are set
qa_flags_set = qa_band & qa_bits
# Get base-10 value that matches bitmask documentation
# (0-1 for single bit, 0-3 for 2 bits, or 0-2^N for N bits)
qa_values = qa_flags_set >> start_bit
return qa_values
def get_masking_details(array):
"""Returns information about how many pixels are masked in an array.
Parameters
----------
array : numpy.ma.core.MaskedArray
Masked array.
Returns
-------
tuple
total : int
Total number of pixels in the array.
masked : int
Number of masked pixels in the array.
unmasked : int
Number of unmasked pixels in the array.
message : str
Message providing the masking information.
Example
-------
>>>
>>>
>>>
>>>
"""
# Get masking information
total = array.shape[0] * array.shape[1]
masked = ma.count_masked(array)
unmasked = array.count()
# Create message
message = print(f"Masked: {masked}/{total}, Unmasked: {unmasked}/{total}")
return message
def get_unique_values(array):
"""Returns the unique values from a NumPy array as a list.
Parameters
----------
array : numppy array
Array from which to get the unique values.
Returns
-------
values : list
List of unique values from the array.
Example
------
>>>
>>>
>>>
>>>
"""
# Get unique values
values = np.unique(array).tolist()
return values
def plot_quality_flag_bitmask(bitmask_array, bitmask_name, axis):
"""Plots the discrete bitmask values for an image.
Parameters
----------
bitmask_array : numpy array
Array containing the base-10 bitmask values.
bitmask_name : str
Name of the bitmask layer. Valid names: 'Day/Night', 'Land/Water
Background', 'Cloud Mask Quality', 'Cloud Detection',
'Shadow Detected', 'Cirrus Detection', 'Snow/Ice Surface', and
'QF DNB'.
Returns
-------
tuple
fig : matplotlib.figure.Figure object
The figure object associated with the histogram.
ax : matplotlib.axes._subplots.AxesSubplot objects
The axes objects associated with the histogram.
Example
-------
>>>
>>>
>>>
>>>
"""
# Store possible bitmask values and titles (for plotting)
vnp46a1_bitmasks = {
"Day/Night": {"values": [0, 1], "labels": ["Night", "Day"]},
"Land/Water Background": {
"values": [0, 1, 2, 3, 5],
"labels": [
"Land & Desert",
"Land no Desert",
"Inland Water",
"Sea Water",
"Coastal",
],
},
"Cloud Mask Quality": {
"values": [0, 1, 2, 3],
"labels": ["Poor", "Low", "Medium", "High"],
},
"Cloud Detection": {
"values": [0, 1, 2, 3],
"labels": [
"Confident Clear",
"Probably Clear",
"Probably Cloudy",
"Confident Cloudy",
],
},
"Shadow Detected": {
"values": [0, 1],
"labels": ["No Shadow", "Shadow"],
},
"Cirrus Detection": {
"values": [0, 1],
"labels": ["No Cirrus Cloud", "Cirrus Cloud"],
},
"Snow/Ice Surface": {
"values": [0, 1],
"labels": ["No Snow/Ice", "Snow/Ice"],
},
"QF DNB": {
"values": [0, 1, 2, 4, 8, 16, 256, 512, 1024, 2048],
"labels": [
"No Sensor Problems",
"Substitute Calibration",
"Out of Range",
"Saturation",
"Temperature not Nominal",
"Stray Light",
"Bowtie Deleted / Range Bit",
"Missing EV",
"Calibration Fail",
"Dead Detector",
],
},
}
# Raise errors
if bitmask_name not in vnp46a1_bitmasks.keys():
raise ValueError(
f"Invalid name. Valid names are: {list(vnp46a1_bitmasks.keys())}"
)
# Get values and labels for bitmask
bitmask_values = vnp46a1_bitmasks.get(bitmask_name).get("values")
bitmask_labels = vnp46a1_bitmasks.get(bitmask_name).get("labels")
# Create colormap with the number of values in the bitmask
cmap = plt.cm.get_cmap("tab20b", len(bitmask_values))
# Add start bin of 0 to list of bitmask values
bins = [0] + bitmask_values
# Normalize colormap to discrete intervals
bounds = [((a + b) / 2) for a, b in zip(bins[:-1], bins[1::1])] + [
2 * (bins[-1]) - bins[-2]
]
norm = colors.BoundaryNorm(bounds, cmap.N)
# Plot bitmask on axis
bitmask = axis.imshow(bitmask_array, cmap=cmap, norm=norm)
ep.draw_legend(
im_ax=bitmask,
classes=bitmask_values,
cmap=cmap,
titles=bitmask_labels,
)
axis.set_title(f"{bitmask_name}", size=16)
axis.set_axis_off()
return axis
def plot_quality_flag_bitmask_vnp46a2(bitmask_array, bitmask_name, axis):
"""Plots the discrete bitmask values for an image.
Parameters
----------
bitmask_array : numpy array
Array containing the base-10 bitmask values.
bitmask_name : str
Name of the bitmask layer. Valid names: 'Mandatory Quality Flag',
'Snow Flag', 'Day/Night', 'Land/Water Background',
'Cloud Mask Quality', 'Cloud Detection', 'Shadow Detected',
'Cirrus Detection', and 'Snow/Ice Surface'.
Returns
-------
ax : matplotlib.axes._subplots.AxesSubplot objects
The axes objects associated with plot.
Example
-------
>>>
>>>
>>>
>>>
"""
# Store possible bitmask values and titles (for plotting)
vnp46a2_bitmasks = {
"Mandatory Quality Flag": {
"values": [0, 1, 2, 255],
"labels": [
"High-Quality (Persistent)",
"High-Quality (Ephemeral)",
"Poor-Quality",
"No Retrieval",
],
},
"Snow Flag": {
"values": [0, 1, 255],
"labels": ["No Snow/Ice", "Snow/Ice", "Fill Value"],
},
"Day/Night": {"values": [0, 1], "labels": ["Night", "Day"]},
"Land/Water Background": {
"values": [0, 1, 2, 3, 5, 7],
"labels": [
"Land & Desert",
"Land no Desert",
"Inland Water",
"Sea Water",
"Coastal",
"No Data / Unknown",
],
},
"Cloud Mask Quality": {
"values": [0, 1, 2, 3],
"labels": ["Poor", "Low", "Medium", "High"],
},
"Cloud Detection": {
"values": [0, 1, 2, 3],
"labels": [
"Confident Clear",
"Probably Clear",
"Probably Cloudy",
"Confident Cloudy",
],
},
"Shadow Detected": {
"values": [0, 1],
"labels": ["No Shadow", "Shadow"],
},
"Cirrus Detection": {
"values": [0, 1],
"labels": ["No Cirrus Cloud", "Cirrus Cloud"],
},
"Snow/Ice Surface": {
"values": [0, 1],
"labels": ["No Snow/Ice", "Snow/Ice"],
},
}
# Raise errors
if bitmask_name not in vnp46a2_bitmasks.keys():
raise ValueError(
f"Invalid name. Valid names are: {list(vnp46a2_bitmasks.keys())}"
)
# Plot bitmask on axis
bitmask = axis.imshow(
bitmask_array,
# cmap="Accent",
vmin=vnp46a2_bitmasks.get(bitmask_name).get("values")[0],
vmax=vnp46a2_bitmasks.get(bitmask_name).get("values")[-1],
)
ep.draw_legend(
im_ax=bitmask,
classes=vnp46a2_bitmasks.get(bitmask_name).get("values"),
titles=vnp46a2_bitmasks.get(bitmask_name).get("labels"),
)
axis.set_title(f"{bitmask_name}", size=16)
axis.set_axis_off()
return axis
def plot_quality_flag_bitmask_single_band(bitmask_array, bitmask_name):
"""Plots the discrete bitmask values for an image.
Parameters
----------
bitmask_array : numpy array
Array containing the base-10 bitmask values.
bitmask_name : str
Name of the bitmask layer. Valid names: 'Day/Night', 'Land/Water
Background', 'Cloud Mask Quality', 'Cloud Detection',
'Shadow Detected', 'Cirrus Detection', 'Snow/Ice Surface', and
'QF DNB'.
Returns
-------
tuple
fig : matplotlib.figure.Figure object
The figure object associated with the histogram.
ax : matplotlib.axes._subplots.AxesSubplot objects
The axes objects associated with the histogram.
Example
-------
>>>
>>>
>>>
>>>
"""
# Store possible bitmask values and titles (for plotting)
vnp46a1_bitmasks = {
"Day/Night": {"values": [0, 1], "labels": ["Night", "Day"]},
"Land/Water Background": {
"values": [0, 1, 2, 3, 5],
"labels": [
"Land & Desert",
"Land no Desert",
"Inland Water",
"Sea Water",
"Coastal",
],
},
"Cloud Mask Quality": {
"values": [0, 1, 2, 3],
"labels": ["Poor", "Low", "Medium", "High"],
},
"Cloud Detection": {
"values": [0, 1, 2, 3],
"labels": [
"Confident Clear",
"Probably Clear",
"Probably Cloudy",
"Confident Cloudy",
],
},
"Shadow Detected": {
"values": [0, 1],
"labels": ["No Shadow", "Shadow"],
},
"Cirrus Detection": {
"values": [0, 1],
"labels": ["No Cirrus Cloud", "Cirrus Cloud"],
},
"Snow/Ice Surface": {
"values": [0, 1],
"labels": ["No Snow/Ice", "Snow/Ice"],
},
"QF DNB": {
"values": [0, 1, 2, 4, 8, 16, 256, 512, 1024, 2048],
"labels": [
"No Sensor Problems",
"Substitute Calibration",
"Out of Range",
"Saturation",
"Temperature not Nominal",
"Stray Light",
"Bowtie Deleted / Range Bit",
"Missing EV",
"Calibration Fail",
"Dead Detector",
],
},
}
# Raise errors
if bitmask_name not in vnp46a1_bitmasks.keys():
raise ValueError(
f"Invalid name. Valid names are: {list(vnp46a1_bitmasks.keys())}"
)
# Get values and labels for bitmask
bitmask_values = vnp46a1_bitmasks.get(bitmask_name).get("values")
bitmask_labels = vnp46a1_bitmasks.get(bitmask_name).get("labels")
# Create colormap with the number of values in the bitmask
cmap = plt.cm.get_cmap("tab20b", len(bitmask_values))
# Add start bin of 0 to list of bitmask values
bins = [0] + bitmask_values
# Normalize colormap to discrete intervals
bounds = [((a + b) / 2) for a, b in zip(bins[:-1], bins[1::1])] + [
2 * (bins[-1]) - bins[-2]
]
norm = colors.BoundaryNorm(bounds, cmap.N)
# Plot bitmask
with plt.style.context("dark_background"):
fig, ax = plt.subplots(figsize=(12, 8))
bitmask = ax.imshow(bitmask_array, cmap=cmap, norm=norm)
ep.draw_legend(
im_ax=bitmask,
classes=bitmask_values,
cmap=cmap,
titles=bitmask_labels,
)
ax.set_title(f"{bitmask_name} Bitmask", size=20)
ax.set_axis_off()
return fig, ax
def plot_quality_flag_bitmask_single_band_vnp46a2(bitmask_array, bitmask_name):
"""Plots the discrete bitmask values for an image.
Parameters
----------
bitmask_array : numpy array
Array containing the base-10 bitmask values.
bitmask_name : str
Name of the bitmask layer. Valid names: 'Mandatory Quality Flag',
'Snow Flag', 'Day/Night', 'Land/Water Background',
'Cloud Mask Quality', 'Cloud Detection', 'Shadow Detected',
'Cirrus Detection', and 'Snow/Ice Surface'.
Returns
-------
ax : matplotlib.axes._subplots.AxesSubplot objects
The axes objects associated with plot.
Example
-------
>>>
>>>
>>>
>>>
"""
# Store possible bitmask values and titles (for plotting)
vnp46a2_bitmasks = {
"Mandatory Quality Flag": {
"values": [0, 1, 2, 255],
"labels": [
"High-Quality (Persistent)",
"High-Quality (Ephemeral)",
"Poor-Quality",
"No Retrieval",
],
},
"Snow Flag": {
"values": [0, 1, 255],
"labels": ["No Snow/Ice", "Snow/Ice", "Fill Value"],
},
"Day/Night": {"values": [0, 1], "labels": ["Night", "Day"]},
"Land/Water Background": {
"values": [0, 1, 2, 3, 5, 7],
"labels": [
"Land & Desert",
"Land no Desert",
"Inland Water",
"Sea Water",
"Coastal",
"No Data / Unknown",
],
},
"Cloud Mask Quality": {
"values": [0, 1, 2, 3],
"labels": ["Poor", "Low", "Medium", "High"],
},
"Cloud Detection": {
"values": [0, 1, 2, 3],
"labels": [
"Confident Clear",
"Probably Clear",
"Probably Cloudy",
"Confident Cloudy",
],
},
"Shadow Detected": {
"values": [0, 1],
"labels": ["No Shadow", "Shadow"],
},
"Cirrus Detection": {
"values": [0, 1],
"labels": ["No Cirrus Cloud", "Cirrus Cloud"],
},
"Snow/Ice Surface": {
"values": [0, 1],
"labels": ["No Snow/Ice", "Snow/Ice"],
},
}
# Raise errors
if bitmask_name not in vnp46a2_bitmasks.keys():
raise ValueError(
f"Invalid name. Valid names are: {list(vnp46a2_bitmasks.keys())}"
)
# Plot bitmask on axis
with plt.style.context("dark_background"):
fig, ax = plt.subplots(figsize=(12, 8))
bitmask = ax.imshow(
bitmask_array,
# cmap="Accent",
vmin=vnp46a2_bitmasks.get(bitmask_name).get("values")[0],
vmax=vnp46a2_bitmasks.get(bitmask_name).get("values")[-1],
)
ep.draw_legend(
im_ax=bitmask,
classes=vnp46a2_bitmasks.get(bitmask_name).get("values"),
titles=vnp46a2_bitmasks.get(bitmask_name).get("labels"),
)
ax.set_title(f"{bitmask_name}", size=16)
ax.set_axis_off()
return fig, ax
def plot_quality_flags_vnp46a1(vnp46a1_quality_stack, data_source="NASA"):
"""Plots all VIIRS VNP46A1 DNB QF Cloud Mask bitmasks and the
QF DNB bitmask.
Parameters
----------
vnp46a1_quality_stack : numpy array
3D array containing the quality flag bitmask layers.
data_source : str, optional
Location of the data. Default value is 'NASA'.
Returns
-------
tuple
fig : matplotlib.figure.Figure object
The figure object associated with the histogram.
ax : matplotlib.axes._subplots.AxesSubplot objects
The axes objects associated with the histogram.
Example
-------
>>>
>>>
>>>
>>>
"""
# Configure plot
with plt.style.context("dark_background"):
fig, ax = plt.subplots(nrows=4, ncols=2, figsize=(15, 20))
plt.suptitle("VNP46A1 Quality Flag Bitmasks", size=20)
plt.subplots_adjust(top=0.935)
# Plot bitmasks
# Day/night
plot_quality_flag_bitmask(
bitmask_array=vnp46a1_quality_stack[0],
bitmask_name="Day/Night",
axis=ax[0][0],
)
# Land/water background
plot_quality_flag_bitmask(
bitmask_array=vnp46a1_quality_stack[1],
bitmask_name="Land/Water Background",
axis=ax[0][1],
)
# Cloud mask quality
plot_quality_flag_bitmask(
bitmask_array=vnp46a1_quality_stack[2],
bitmask_name="Cloud Mask Quality",
axis=ax[1][0],
)
# Cloud detection
plot_quality_flag_bitmask(
bitmask_array=vnp46a1_quality_stack[3],
bitmask_name="Cloud Detection",
axis=ax[1][1],
)
# Shadow detected
plot_quality_flag_bitmask(
bitmask_array=vnp46a1_quality_stack[4],
bitmask_name="Shadow Detected",
axis=ax[2][0],
)
# Cirrus detection
plot_quality_flag_bitmask(
bitmask_array=vnp46a1_quality_stack[5],
bitmask_name="Cirrus Detection",
axis=ax[2][1],
)
# Snow/ice surface
plot_quality_flag_bitmask(
bitmask_array=vnp46a1_quality_stack[6],
bitmask_name="Snow/Ice Surface",
axis=ax[3][0],
)
# QF DNB
plot_quality_flag_bitmask(
bitmask_array=vnp46a1_quality_stack[7],
bitmask_name="QF DNB",
axis=ax[3][1],
)
# Add caption
fig.text(
0.5,
0.1,
f"Data Source: {data_source}",
ha="center",
fontsize=12,
)
return fig, ax
def plot_quality_flags_vnp46a2(vnp46a2_quality_stack, data_source="NASA"):
"""Plots all VIIRS VNP46A2 DNB QF Cloud Mask bitmasks, the Mandatory
Quality Flag, and Snow Flag.
Parameters
----------
vnp46a2_quality_stack : numpy array
3D array containing the quality flag bitmask layers.
data_source : str, optional
Location of the data. Default value is 'NASA'.
Returns
-------
tuple
fig : matplotlib.figure.Figure object
The figure object associated with the histogram.
ax : matplotlib.axes._subplots.AxesSubplot objects
The axes objects associated with the histogram.
Example
-------
>>>
>>>
>>>
>>>
"""
# Configure plot
with plt.style.context("dark_background"):
fig, ax = plt.subplots(nrows=5, ncols=2, figsize=(15, 20))
plt.suptitle("VNP46A2 Quality Flag Bitmasks", size=20)
plt.subplots_adjust(top=0.935)
# Plot bitmasks
# Mandatory Quality Flag
plot_quality_flag_bitmask_vnp46a2(
bitmask_array=vnp46a2_quality_stack[0],
bitmask_name="Mandatory Quality Flag",
axis=ax[0][0],
)
# Snow flag
plot_quality_flag_bitmask_vnp46a2(
bitmask_array=vnp46a2_quality_stack[1],
bitmask_name="Snow Flag",
axis=ax[0][1],
)
# Day/night
plot_quality_flag_bitmask_vnp46a2(
bitmask_array=vnp46a2_quality_stack[2],
bitmask_name="Day/Night",
axis=ax[1][0],
)
# Land/water background
plot_quality_flag_bitmask_vnp46a2(
bitmask_array=vnp46a2_quality_stack[3],
bitmask_name="Land/Water Background",
axis=ax[1][1],
)
# Cloud mask quality
plot_quality_flag_bitmask_vnp46a2(
bitmask_array=vnp46a2_quality_stack[4],
bitmask_name="Cloud Mask Quality",
axis=ax[2][0],
)
# Cloud detection
plot_quality_flag_bitmask_vnp46a2(
bitmask_array=vnp46a2_quality_stack[5],
bitmask_name="Cloud Detection",
axis=ax[2][1],
)
# Shadow detected
plot_quality_flag_bitmask_vnp46a2(
bitmask_array=vnp46a2_quality_stack[6],
bitmask_name="Shadow Detected",
axis=ax[3][0],
)
# Cirrus detection
plot_quality_flag_bitmask_vnp46a2(
bitmask_array=vnp46a2_quality_stack[6],
bitmask_name="Cirrus Detection",
axis=ax[3][1],
)
# Snow/ice surface
plot_quality_flag_bitmask_vnp46a2(
bitmask_array=vnp46a2_quality_stack[8],
bitmask_name="Snow/Ice Surface",
axis=ax[4][0],
)
# Add caption
fig.text(
0.5,
0.1,
f"Data Source: {data_source}",
ha="center",
fontsize=12,
)
# Remove unused axis
fig.delaxes(ax[4][1])
return fig, ax
def preprocess_vnp46a1(hdf5_path, output_folder):
"""Preprocessed a NASA VNP46A1 HDF5 (.h5 file)
Preprocessing steps include masking data for fill values, clouds, and
sensor problems, filling masked values, and exporting data to a GeoTiff.
Parameters
----------
hdf5_path : str
Path to the VNP46A1 HDF5 (.h5) file to be preprocessed.
output_folder : str
Path to the folder where the preprocessed file will be exported to.
Returns
-------
message : str
Indication of preprocessing completion status (success or failure).
Example
-------
>>>
>>>
>>>
>>>
"""
# Preprocess VNP46A1 HDF5 file
print(f"Started preprocessing: {os.path.basename(hdf5_path)}")
try:
print("Extracting bands...")
# Extract DNB_At_Sensor_Radiance_500m, QF_Cloud_Mask, QF_DNB
dnb_at_sensor_radiance = extract_band_vnp46a1(
hdf5_path=hdf5_path, band_name="DNB_At_Sensor_Radiance_500m"
)
qf_cloud_mask = extract_band_vnp46a1(
hdf5_path=hdf5_path, band_name="QF_Cloud_Mask"
)
qf_dnb = extract_band_vnp46a1(hdf5_path=hdf5_path, band_name="QF_DNB")
print("Applying scale factor...")
# Apply scale factor to radiance values
dnb_at_sensor_radiance_scaled = (
dnb_at_sensor_radiance.astype("float") * 0.1
)
print("Masking for fill values...")
# Mask radiance for fill value (DNB_At_Sensor_Radiance_500m == 65535)
masked_for_fill_value = ma.masked_where(
dnb_at_sensor_radiance_scaled == 6553.5,
dnb_at_sensor_radiance_scaled,
copy=True,
)
print("Masking for clouds...")
# Extract QF_Cloud_Mask bits 6-7 (Cloud Detection Results &
# Confidence Indicator)
cloud_detection_bitmask = extract_qa_bits(
qa_band=qf_cloud_mask, start_bit=6, end_bit=7
)
# Mask radiance for 'probably cloudy' (cloud_detection_bitmask == 2)
masked_for_probably_cloudy = ma.masked_where(
cloud_detection_bitmask == 2, masked_for_fill_value, copy=True
)
# Mask radiance for 'confident cloudy' (cloud_detection_bitmask == 3)
masked_for_confident_cloudy = ma.masked_where(
cloud_detection_bitmask == 3, masked_for_probably_cloudy, copy=True
)
print("Masking for sea water...")
# Extract QF_Cloud_Mask bits 1-3 (Land/Water Background)
land_water_bitmask = extract_qa_bits(
qa_band=qf_cloud_mask, start_bit=1, end_bit=3
)
# Mask radiance for sea water (land_water_bitmask == 3)
masked_for_sea_water = ma.masked_where(
land_water_bitmask == 3, masked_for_confident_cloudy, copy=True
)
print("Masking for sensor problems...")
# Mask radiance for sensor problems (QF_DNB != 0)
# (0 = no problems, any number > 0 means some kind of issue)
# masked_for_sensor_problems = ma.masked_where(
# qf_dnb > 0, masked_for_confident_cloudy, copy=True
# )
masked_for_sensor_problems = ma.masked_where(
qf_dnb > 0, masked_for_sea_water, copy=True
)
print("Filling masked values...")
# Set fill value to np.nan and fill masked values
ma.set_fill_value(masked_for_sensor_problems, np.nan)
filled_data = masked_for_sensor_problems.filled()
print("Creating metadata...")
# Create metadata (for export)
metadata = create_metadata(
array=filled_data,
transform=create_transform_vnp46a1(hdf5_path),
driver="GTiff",
nodata=np.nan,
count=1,
crs="epsg:4326",
)
print("Exporting to GeoTiff...")
# Export masked array to GeoTiff (no data set to np.nan in export)
export_name = (
f"{os.path.basename(hdf5_path)[:-3].lower().replace('.', '-')}.tif"
)
export_array(
array=filled_data,
output_path=os.path.join(output_folder, export_name),
metadata=metadata,
)
except Exception as error:
message = print(f"Preprocessing failed: {error}\n")
else:
message = print(
f"Completed preprocessing: {os.path.basename(hdf5_path)}\n"
)
return message
def preprocess_vnp46a2(hdf5_path, output_folder):
"""Preprocessed a NASA VNP46A2 HDF5 (.h5 file)
Preprocessing steps include masking data for fill values, clouds, and
sensor problems, filling masked values, and exporting data to a GeoTiff.
Parameters
----------
hdf5_path : str
Path to the VNP46A2 HDF5 (.h5) file to be preprocessed.
output_folder : str
Path to the folder where the preprocessed file will be exported to.
Returns
-------
message : str
Indication of preprocessing completion status (success or failure).
Example
-------
>>>
>>>
>>>
>>>
"""
# Preprocess VNP46A1 HDF5 file
print(f"Started preprocessing: {os.path.basename(hdf5_path)}")
try:
print("Extracting bands...")
# Extract DNB BRDF-Corrected radiance
dnb_brdf_corrected_ntl = extract_band_vnp46a2(
hdf5_path=hdf5_path, band_name="DNB_BRDF-Corrected_NTL"
)
# Extract Mandatory Quality Flag, QF Cloud Mask, and Snow Flag bands
mandatory_quality_flag = extract_band_vnp46a2(
hdf5_path=hdf5_path, band_name="Mandatory_Quality_Flag"
)
qf_cloud_mask = extract_band_vnp46a2(
hdf5_path=hdf5_path, band_name="QF_Cloud_Mask"
)
print("Applying scale factor...")
# Apply scale factor to radiance values
dnb_brdf_corrected_ntl_scaled = (
dnb_brdf_corrected_ntl.astype("float") * 0.1
)
print("Masking for fill values...")
# Mask radiance for fill value (dnb_brdf_corrected_ntl == 65535)
masked_for_fill_value = ma.masked_where(
dnb_brdf_corrected_ntl_scaled == 6553.5,
dnb_brdf_corrected_ntl_scaled,
copy=True,
)
print("Masking for poor quality and no retrieval...")
# Mask radiance for 'poor quality' (mandatory_quality_flag == 2)
masked_for_poor_quality = ma.masked_where(
mandatory_quality_flag == 2, masked_for_fill_value, copy=True
)
# Mask radiance for 'no retrieval' (mandatory_quality_flag == 255)
masked_for_no_retrieval = ma.masked_where(
mandatory_quality_flag == 255, masked_for_poor_quality, copy=True
)
print("Masking for clouds...")
# Extract QF_Cloud_Mask bits 6-7 (Cloud Detection Results &
# Confidence Indicator)
cloud_detection_bitmask = extract_qa_bits(
qa_band=qf_cloud_mask, start_bit=6, end_bit=7
)
# Mask radiance for 'probably cloudy' (cloud_detection_bitmask == 2)
masked_for_probably_cloudy = ma.masked_where(
cloud_detection_bitmask == 2, masked_for_no_retrieval, copy=True
)
# Mask radiance for 'confident cloudy' (cloud_detection_bitmask == 3)
masked_for_confident_cloudy = ma.masked_where(
cloud_detection_bitmask == 3, masked_for_probably_cloudy, copy=True
)
print("Masking for sea water...")
# Extract QF_Cloud_Mask bits 1-3 (Land/Water Background)
land_water_bitmask = extract_qa_bits(
qa_band=qf_cloud_mask, start_bit=1, end_bit=3
)
# Mask radiance for sea water (land_water_bitmask == 3)
masked_for_sea_water = ma.masked_where(
land_water_bitmask == 3, masked_for_confident_cloudy, copy=True
)
print("Filling masked values...")
# Set fill value to np.nan and fill masked values
ma.set_fill_value(masked_for_sea_water, np.nan)
filled_data = masked_for_sea_water.filled()
print("Creating metadata...")
# Create metadata (for export)
metadata = create_metadata(
array=filled_data,
transform=create_transform_vnp46a2(hdf5_path),
driver="GTiff",
nodata=np.nan,
count=1,
crs="epsg:4326",
)
print("Exporting to GeoTiff...")
# Export masked array to GeoTiff (no data set to np.nan in export)
export_name = (
f"{os.path.basename(hdf5_path)[:-3].lower().replace('.', '-')}.tif"
)
export_array(
array=filled_data,
output_path=os.path.join(output_folder, export_name),
metadata=metadata,
)
except Exception as error:
message = print(f"Preprocessing failed: {error}\n")
else:
message = print(
f"Completed preprocessing: {os.path.basename(hdf5_path)}\n"
)
return message
def read_geotiff_into_array(geotiff_path, dimensions=1):
"""Reads a GeoTiff file into a NumPy array.
Parameters
----------
geotiff_path : str
Path to the GeoTiff file.
dimensions : int, optional
Number of bands to read in. Default value is 1.
Returns
-------
array : numpy array
Array containing the data.
Example
-------
>>>
>>>
>>>
>>>
"""
# Read-in array
with rio.open(geotiff_path) as src:
array = src.read(dimensions)
return array
def save_figure(output_path):
"""Saves the current figure to a specified location.
Parameters
----------
output_path : str
Path (including file name and extension)
for the output file.
Returns
-------
message : str
Message indicating location of saved file
(upon success) or error message (upon failure)/
Example
-------
>>> # Set output path sand save figure
>>> outpath = os.path.join("04-graphics-outputs", "figure.png")
>>> save_figure(outpath)
Saved plot: 04-graphics-outputs\\figure.png
"""
# Save figure
try:
plt.savefig(
fname=output_path, facecolor="k", dpi=300, bbox_inches="tight"
)
except Exception as error:
message = print(f"Failed to save plot: {error}")
else:
message = print(f"Saved plot: {os.path.split(output_path)[-1]}")
# Return message
return message
def stack_quality_flags_vnp46a1(vnp46a1_path):
"""Creates a stacked (3D) NumPy array containing all of the VNP46A1
quality flag bitmask layers.
Parameters
----------
vnp46a1_path : str
Path to the VNP46A1 HDF5 (.h5) file.
Returns
-------
quality_flag_stack : numpy array
3D array containing the quality flag bitmask layers.
Example
-------
>>>
>>>
>>>
>>>
"""
# Extract QF CLoud Mask and QF DNB bands
qf_cloud_mask = extract_band_vnp46a1(
hdf5_path=vnp46a1_path, band_name="QF_Cloud_Mask"
)
qf_dnb = extract_band_vnp46a1(hdf5_path=vnp46a1_path, band_name="QF_DNB")
# Extract QF Cloud Mask bitmasks
day_night = extract_qa_bits(qf_cloud_mask, 0, 0)
land_water_background = extract_qa_bits(qf_cloud_mask, 1, 3)
cloud_mask_quality = extract_qa_bits(qf_cloud_mask, 4, 5)
cloud_detection = extract_qa_bits(qf_cloud_mask, 6, 7)
shadow_detected = extract_qa_bits(qf_cloud_mask, 8, 8)
cirrus_detection = extract_qa_bits(qf_cloud_mask, 9, 9)
snow_ice_surface = extract_qa_bits(qf_cloud_mask, 10, 10)
# Create stack
quality_flag_stack = np.stack(
arrays=[
day_night,
land_water_background,
cloud_mask_quality,
cloud_detection,
shadow_detected,
cirrus_detection,
snow_ice_surface,
qf_dnb,
]
)
return quality_flag_stack
def stack_quality_flags_vnp46a2(vnp46a2_path):
"""Creates a stacked (3D) NumPy array containing all of the VNP46A2
quality flag bitmask layers.
Parameters
----------
vnp46a2_path : str
Path to the VNP46A2 HDF5 (.h5) file.
Returns
-------
quality_flag_stack : numpy array
3D array containing the quality flag bitmask layers.
Example
-------
>>>
>>>
>>>
>>>
"""
# Extract Mandatory Qyality Flag, QF Cloud Mask, and Snow Flag bands
mandatory_quality_flag = extract_band_vnp46a2(
hdf5_path=vnp46a2_path, band_name="Mandatory_Quality_Flag"
)
qf_cloud_mask = extract_band_vnp46a2(
hdf5_path=vnp46a2_path, band_name="QF_Cloud_Mask"
)
snow_flag = extract_band_vnp46a2(
hdf5_path=vnp46a2_path, band_name="Snow_Flag"
)
# Extract QF Cloud Mask bitmasks
day_night = extract_qa_bits(qf_cloud_mask, 0, 0)
land_water_background = extract_qa_bits(qf_cloud_mask, 1, 3)
cloud_mask_quality = extract_qa_bits(qf_cloud_mask, 4, 5)
cloud_detection = extract_qa_bits(qf_cloud_mask, 6, 7)
shadow_detected = extract_qa_bits(qf_cloud_mask, 8, 8)
cirrus_detection = extract_qa_bits(qf_cloud_mask, 9, 9)
snow_ice_surface = extract_qa_bits(qf_cloud_mask, 10, 10)
# Create stack
quality_flag_stack = np.stack(
arrays=[
mandatory_quality_flag,
snow_flag,
day_night,
land_water_background,
cloud_mask_quality,
cloud_detection,
shadow_detected,
cirrus_detection,
snow_ice_surface,
]
)
return quality_flag_stack
| 28.225217 | 79 | 0.576342 | 7,291 | 64,918 | 4.938829 | 0.082293 | 0.021689 | 0.014663 | 0.008998 | 0.806993 | 0.782527 | 0.741565 | 0.711461 | 0.700686 | 0.675165 | 0 | 0.026676 | 0.318032 | 64,918 | 2,299 | 80 | 28.237495 | 0.786682 | 0.359561 | 0 | 0.627885 | 0 | 0 | 0.18612 | 0.041708 | 0 | 0 | 0 | 0 | 0 | 1 | 0.032692 | false | 0 | 0.011538 | 0 | 0.076923 | 0.056731 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
01611284c50e02145392db0ce559a05077e76efc | 29 | py | Python | geomark/__init__.py | pauperpythonistas/python-geomark | edddafde990bdf42441e6adde77ea3d67a0790ee | [
"BSD-3-Clause"
] | 1 | 2017-12-16T00:39:20.000Z | 2017-12-16T00:39:20.000Z | geomark/__init__.py | pauperpythonistas/python-geomark | edddafde990bdf42441e6adde77ea3d67a0790ee | [
"BSD-3-Clause"
] | 34 | 2017-12-19T20:20:28.000Z | 2018-11-04T05:10:17.000Z | geomark/__init__.py | greg-and-adam/python-geomark | edddafde990bdf42441e6adde77ea3d67a0790ee | [
"BSD-3-Clause"
] | 2 | 2017-12-19T19:39:59.000Z | 2018-11-01T02:53:15.000Z | from .geomark import Geomark
| 14.5 | 28 | 0.827586 | 4 | 29 | 6 | 0.75 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.137931 | 29 | 1 | 29 | 29 | 0.96 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
01721591bcd881309a6774c15816c80fc0b273e3 | 90 | py | Python | injecttf/example.py | Abirami-mygithub/InjectTFParallel | 0f3d545ef8e4ea8cdffd0d23cb0ea6e30cdc302e | [
"MIT"
] | null | null | null | injecttf/example.py | Abirami-mygithub/InjectTFParallel | 0f3d545ef8e4ea8cdffd0d23cb0ea6e30cdc302e | [
"MIT"
] | null | null | null | injecttf/example.py | Abirami-mygithub/InjectTFParallel | 0f3d545ef8e4ea8cdffd0d23cb0ea6e30cdc302e | [
"MIT"
] | null | null | null | from fault_injector import Fault_Injector
fi_obj = Fault_Injector()
fi_obj.inject_fault() | 22.5 | 41 | 0.844444 | 14 | 90 | 5 | 0.5 | 0.557143 | 0.428571 | 0.514286 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.088889 | 90 | 4 | 42 | 22.5 | 0.853659 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.333333 | 0 | 0.333333 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 6 |
6d6aa5c9df4e328f719e59d1957b1e70c7e933e3 | 171 | py | Python | projecteuler/problems/problem_48.py | hjheath/ProjectEuler | 6961fe81e2039c281ea9d4ab0bdd85611bf256a8 | [
"MIT"
] | 1 | 2015-04-25T10:37:52.000Z | 2015-04-25T10:37:52.000Z | projecteuler/problems/problem_48.py | hjheath/ProjectEuler | 6961fe81e2039c281ea9d4ab0bdd85611bf256a8 | [
"MIT"
] | null | null | null | projecteuler/problems/problem_48.py | hjheath/ProjectEuler | 6961fe81e2039c281ea9d4ab0bdd85611bf256a8 | [
"MIT"
] | null | null | null | """Problem 44 of https://projecteuler.net"""
def problem_48():
"""Solution to problem 48."""
return sum(x ** x for x in range(1, 1001)) % 10 ** 10
problem_48()
| 19 | 57 | 0.608187 | 27 | 171 | 3.777778 | 0.703704 | 0.264706 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.125 | 0.204678 | 171 | 8 | 58 | 21.375 | 0.625 | 0.362573 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | true | 0 | 0 | 0 | 0.666667 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 6 |
6d7a169052b005b8f1ffc96d34bd042ff1caa958 | 89 | py | Python | tests/unit_tests/running_modes/create_model/__init__.py | marco-foscato/Lib-INVENT | fe6a65ab7165abd87b25752a6b4208c8703d11f7 | [
"Apache-2.0"
] | 26 | 2021-04-30T23:21:17.000Z | 2022-03-10T06:33:11.000Z | tests/unit_tests/running_modes/create_model/__init__.py | marco-foscato/Lib-INVENT | fe6a65ab7165abd87b25752a6b4208c8703d11f7 | [
"Apache-2.0"
] | 6 | 2021-10-03T08:35:48.000Z | 2022-03-24T09:57:39.000Z | tests/unit_tests/running_modes/create_model/__init__.py | marco-foscato/Lib-INVENT | fe6a65ab7165abd87b25752a6b4208c8703d11f7 | [
"Apache-2.0"
] | 10 | 2021-04-28T14:08:17.000Z | 2022-03-04T04:18:13.000Z | from tests.unit_tests.running_modes.create_model.create_model_test import TestCreateModel | 89 | 89 | 0.921348 | 13 | 89 | 5.923077 | 0.769231 | 0.285714 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.033708 | 89 | 1 | 89 | 89 | 0.895349 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
6dc899bb3c247c63bdc41bad34e81fa4d1cb996c | 30 | py | Python | confoo-2012/unittest_example/__init__.py | andymckay/presentations | 19e485f0ad5ca5e56779475659f31b6682e8016e | [
"CC-BY-3.0"
] | 3 | 2015-08-05T23:04:10.000Z | 2022-01-24T20:01:33.000Z | confoo-2012/unittest_example/__init__.py | andymckay/presentations | 19e485f0ad5ca5e56779475659f31b6682e8016e | [
"CC-BY-3.0"
] | null | null | null | confoo-2012/unittest_example/__init__.py | andymckay/presentations | 19e485f0ad5ca5e56779475659f31b6682e8016e | [
"CC-BY-3.0"
] | 4 | 2015-06-28T19:02:49.000Z | 2021-10-29T19:28:39.000Z | from example import sorted_ci
| 15 | 29 | 0.866667 | 5 | 30 | 5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.133333 | 30 | 1 | 30 | 30 | 0.961538 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
0972c45e9c027401871c5b79beb14b141e758f69 | 207 | py | Python | src/xsd_sites/admin.py | minyiky/xSACdb | 8c407e9a9da196750a66ad53613ad67c8c56e1c3 | [
"MIT"
] | 2 | 2017-08-14T14:40:17.000Z | 2019-02-07T13:10:23.000Z | src/xsd_sites/admin.py | minyiky/xSACdb | 8c407e9a9da196750a66ad53613ad67c8c56e1c3 | [
"MIT"
] | 19 | 2016-02-07T18:02:53.000Z | 2019-11-03T17:48:13.000Z | src/xsd_sites/admin.py | minyiky/xSACdb | 8c407e9a9da196750a66ad53613ad67c8c56e1c3 | [
"MIT"
] | 4 | 2015-10-19T17:24:35.000Z | 2021-05-12T07:30:32.000Z | from django.contrib import admin
from reversion_compare.admin import CompareVersionAdmin
from xsd_sites.models import *
class SiteAdmin(CompareVersionAdmin):
pass
admin.site.register(Site, SiteAdmin)
| 20.7 | 55 | 0.826087 | 25 | 207 | 6.76 | 0.64 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.115942 | 207 | 9 | 56 | 23 | 0.923497 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0.166667 | 0.5 | 0 | 0.666667 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 6 |
111c44cbe3fe321b5b2ffe8734d4c4f68f013984 | 71 | py | Python | chemception/utils/__init__.py | FrancescoZ/Chemception | e6c6bf943d80f4813e899d7524691f77d43bc344 | [
"BSD-2-Clause",
"MIT"
] | null | null | null | chemception/utils/__init__.py | FrancescoZ/Chemception | e6c6bf943d80f4813e899d7524691f77d43bc344 | [
"BSD-2-Clause",
"MIT"
] | null | null | null | chemception/utils/__init__.py | FrancescoZ/Chemception | e6c6bf943d80f4813e899d7524691f77d43bc344 | [
"BSD-2-Clause",
"MIT"
] | null | null | null | from .constant import *
from .helpers import *
from .visualize import * | 23.666667 | 24 | 0.760563 | 9 | 71 | 6 | 0.555556 | 0.37037 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.15493 | 71 | 3 | 24 | 23.666667 | 0.9 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
116b7d64ff983ab9bebd820180e2d6e1892873c9 | 24 | py | Python | DjangoTester/settings/__init__.py | iandmyhand/django-tester | b86c203d9529c61b291961b98b0907763385b285 | [
"MIT"
] | null | null | null | DjangoTester/settings/__init__.py | iandmyhand/django-tester | b86c203d9529c61b291961b98b0907763385b285 | [
"MIT"
] | null | null | null | DjangoTester/settings/__init__.py | iandmyhand/django-tester | b86c203d9529c61b291961b98b0907763385b285 | [
"MIT"
] | null | null | null | from . private import *
| 12 | 23 | 0.708333 | 3 | 24 | 5.666667 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.208333 | 24 | 1 | 24 | 24 | 0.894737 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
feea76d886106fc0515b3aaeb1ead301d64f1087 | 43 | py | Python | src/__init__.py | Vijay2051/Kagcolabserver | f22d4444d54ccfabb0c79acbe5c5c5c1be2922eb | [
"MIT"
] | 1 | 2020-10-21T22:24:38.000Z | 2020-10-21T22:24:38.000Z | src/__init__.py | Vijay2051/Kagcolabserver | f22d4444d54ccfabb0c79acbe5c5c5c1be2922eb | [
"MIT"
] | null | null | null | src/__init__.py | Vijay2051/Kagcolabserver | f22d4444d54ccfabb0c79acbe5c5c5c1be2922eb | [
"MIT"
] | null | null | null | from .kagcolabserver import KagColabServer
| 21.5 | 42 | 0.883721 | 4 | 43 | 9.5 | 0.75 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.093023 | 43 | 1 | 43 | 43 | 0.974359 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
3a1cf3ffc0ac45ccfde9463113284b0f8443f858 | 50 | py | Python | networks/modular_downscaling_model/config_interface/__init__.py | khoehlein/CNNs-for-Wind-Field-Downscaling | eb8418d4d893fcb2beb929abb241281b7a9b6a95 | [
"MIT"
] | 5 | 2021-05-05T06:08:52.000Z | 2022-03-24T04:57:52.000Z | networks/modular_downscaling_model/config_interface/__init__.py | khoehlein/CNNs-for-Wind-Field-Downscaling | eb8418d4d893fcb2beb929abb241281b7a9b6a95 | [
"MIT"
] | null | null | null | networks/modular_downscaling_model/config_interface/__init__.py | khoehlein/CNNs-for-Wind-Field-Downscaling | eb8418d4d893fcb2beb929abb241281b7a9b6a95 | [
"MIT"
] | 2 | 2021-08-07T05:18:05.000Z | 2022-03-31T03:48:37.000Z | from .ModelConfigurator import ModelConfigurator
| 16.666667 | 48 | 0.88 | 4 | 50 | 11 | 0.75 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.1 | 50 | 2 | 49 | 25 | 0.977778 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
3a25d6eb914626a22388c94ae8469d5f002dab31 | 49 | py | Python | recipes/recipes_emscripten/biopython/test_import_biopython.py | emscripten-forge/recipes | 62cb3e146abc8945ac210f38e4e47c080698eae5 | [
"MIT"
] | 1 | 2022-03-10T16:50:56.000Z | 2022-03-10T16:50:56.000Z | recipes/recipes_emscripten/biopython/test_import_biopython.py | emscripten-forge/recipes | 62cb3e146abc8945ac210f38e4e47c080698eae5 | [
"MIT"
] | 9 | 2022-03-18T09:26:38.000Z | 2022-03-29T09:21:51.000Z | recipes/recipes_emscripten/biopython/test_import_biopython.py | emscripten-forge/recipes | 62cb3e146abc8945ac210f38e4e47c080698eae5 | [
"MIT"
] | null | null | null |
def test_import_biopython():
import Bio
| 12.25 | 28 | 0.673469 | 6 | 49 | 5.166667 | 0.833333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.265306 | 49 | 4 | 29 | 12.25 | 0.861111 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.5 | true | 0 | 1 | 0 | 1.5 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 6 |
3a3f2ac7037a98eca1088b51d842f86ead2322a1 | 280 | py | Python | bio_embeddings/extract/annotations/__init__.py | HannesStark/bio_embeddings | 8c2e2b4a17903495fcced99fccb7f7287daacc49 | [
"MIT"
] | null | null | null | bio_embeddings/extract/annotations/__init__.py | HannesStark/bio_embeddings | 8c2e2b4a17903495fcced99fccb7f7287daacc49 | [
"MIT"
] | 1 | 2021-01-31T20:06:52.000Z | 2021-01-31T20:06:52.000Z | bio_embeddings/extract/annotations/__init__.py | konstin/bio_embeddings | a7de49fd8e152f8e735283818e6f2e2de7b824c8 | [
"MIT"
] | 1 | 2021-12-26T22:20:48.000Z | 2021-12-26T22:20:48.000Z | from bio_embeddings.extract.annotations.Disorder import Disorder
from bio_embeddings.extract.annotations.Location import Location
from bio_embeddings.extract.annotations.Membrane import Membrane
from bio_embeddings.extract.annotations.SecondaryStructure import SecondaryStructure
| 56 | 84 | 0.9 | 32 | 280 | 7.75 | 0.3125 | 0.112903 | 0.274194 | 0.387097 | 0.564516 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.057143 | 280 | 4 | 85 | 70 | 0.939394 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
28b2a7e5416240118120e20b45c13ba2f64bec9f | 1,901 | py | Python | examples/semantic_segmentation/pred_on_folder.py | Rabscuttler/raster-vision-fastai-plugin | 2db542bb78b0b6c03bfa10b4ba60430e14ef51a3 | [
"Apache-2.0"
] | null | null | null | examples/semantic_segmentation/pred_on_folder.py | Rabscuttler/raster-vision-fastai-plugin | 2db542bb78b0b6c03bfa10b4ba60430e14ef51a3 | [
"Apache-2.0"
] | null | null | null | examples/semantic_segmentation/pred_on_folder.py | Rabscuttler/raster-vision-fastai-plugin | 2db542bb78b0b6c03bfa10b4ba60430e14ef51a3 | [
"Apache-2.0"
] | null | null | null | import os
import glob
# root_dir = '/opt/data/true/data'
# imgs = [15196, 13596, 13598, 4796, 3199]
# EPOCHS = 20
# No Fine Tuning
# pred_package = '/opt/data/rv/try2/bundle/duke-seg/predict_package.zip'
# for img in imgs:
# to_predict = '{}/{}.png'.format(root_dir, img)
# output = '/opt/data/true/output20/{}.tif'.format(img)
# os.system("rastervision -p fastai predict {} {} {}".format(pred_package, to_predict, output))
# print('Created prediction segmentation {}.tif'.format(img))
# EPOCHS = 100
# No Fine Tuning
# pred_package = '/opt/data/rv/try2/bundle/duke-seg2/predict_package.zip'
# imgs = [x.split('/')[-1].strip('.png') for x in glob.glob(root_dir + '/*.png')]
#
# for img in imgs:
# to_predict = '{}/{}.png'.format(root_dir, img)
# output = '/opt/data/true/output100/{}.tif'.format(img)
# os.system("rastervision -p fastai predict {} {} {}".format(pred_package, to_predict, output))
# print('Created prediction segmentation {}.tif'.format(img))
# EPOCHS = 500
# No Fine Tuning
pred_package = '/opt/data/rv/try2/bundle/duke-seg3/predict_package.zip'
# images = [x.split('/')[-1].strip('.png') for x in glob.glob(root_dir + '/*.png')]
#
# for img in imgs:
# to_predict = '{}/{}.png'.format(root_dir, img)
# output = '/opt/data/true/output500/{}.tif'.format(img)
# os.system("rastervision -p fastai predict {} {} {}".format(pred_package, to_predict, output))
# print('Created prediction segmentation {}.tif'.format(img))
root_dir = '/opt/data/true/cant'
images = [x.split('/')[-1].strip('.jpg') for x in glob.glob(root_dir + '/*.jpg')]
for img in images:
to_predict = '{}/{}.jpg'.format(root_dir, img)
output = '/opt/data/true/output_all/{}.tif'.format(img)
os.system("rastervision -p fastai predict {} {} {}".format(pred_package, to_predict, output))
print('Created prediction segmentation {}.tif'.format(img))
| 37.27451 | 99 | 0.65071 | 268 | 1,901 | 4.511194 | 0.220149 | 0.052109 | 0.079404 | 0.052936 | 0.834574 | 0.784946 | 0.784946 | 0.767577 | 0.740281 | 0.740281 | 0 | 0.028888 | 0.144135 | 1,901 | 50 | 100 | 38.02 | 0.714198 | 0.699632 | 0 | 0 | 0 | 0 | 0.374074 | 0.159259 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.2 | 0 | 0.2 | 0.1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
28bc308d91f70bff1da89d18999e79d33434c272 | 7,491 | py | Python | authors/apps/comments/tests/test_comments.py | andela/ah-backend-realers | f4b0dbde16fed5e95ab3b1b60e365515e1fe6697 | [
"BSD-3-Clause"
] | null | null | null | authors/apps/comments/tests/test_comments.py | andela/ah-backend-realers | f4b0dbde16fed5e95ab3b1b60e365515e1fe6697 | [
"BSD-3-Clause"
] | 20 | 2019-05-27T13:05:44.000Z | 2021-06-10T21:29:36.000Z | authors/apps/comments/tests/test_comments.py | andela/ah-backend-realers | f4b0dbde16fed5e95ab3b1b60e365515e1fe6697 | [
"BSD-3-Clause"
] | 6 | 2019-06-29T11:49:01.000Z | 2020-03-02T12:53:06.000Z | from .test_base import TestBase
import json
from rest_framework import status
from ..models import Comment
class CommentsTests(TestBase):
def test_create_comment(self):
url = self.article_url+f"{self.article_slug}/comments/"
self.client.credentials(HTTP_AUTHORIZATION='Bearer ' + str(self.token1))
response = self.client.post(
url,
data=self.comment_data,
format='json'
)
expected = "we dem' boys, Holla"
self.assertEqual(expected, response.data['comment']['body'])
def test_create_comment_no_comment_in_data(self):
url = self.article_url+f"{self.article_slug}/comments/"
self.client.credentials(HTTP_AUTHORIZATION='Bearer ' + str(self.token1))
response = self.client.post(
url,
data=self.request_data_no_comment,
format='json'
)
expected = "There is no comment in the request data"
self.assertEqual(expected, response.data['error'])
def test_create_comment_no_body_in_comment(self):
url = self.article_url+f"{self.article_slug}/comments/"
self.client.credentials(HTTP_AUTHORIZATION='Bearer ' + str(self.token1))
response = self.client.post(
url,
data=self.comment_data_no_body,
format='json'
)
expected = "The comment body must be provided"
self.assertEqual(expected, response.data['error'])
def test_create_comment_article_invalid(self):
url = self.article_url+"baby/comments/"
self.client.credentials(HTTP_AUTHORIZATION='Bearer ' + str(self.token1))
response = self.client.post(
url,
data=self.comment_data,
format='json'
)
expected = "Article not found"
self.assertEqual(expected, response.data['errors'][0])
def test_delete_comment(self):
delete_url = self.article_url+f"{self.article_slug}/comments/{self.comment_id}"
response = self.client.delete(
delete_url
)
expected = "Comment successfully deleted"
self.assertEqual(expected, response.data['message'])
def test_delete_comment_not_yours(self):
delete_url = self.article_url+f"{self.article_slug}/comments/{self.comment_id}"
self.client.credentials(HTTP_AUTHORIZATION='Bearer ' + str(self.token2))
response = self.client.delete(
delete_url
)
expected = "Users can only delete their own comments"
self.assertEqual(expected, response.data['error'])
def test_edit_comment(self):
edit_url = self.article_url+f"{self.article_slug}/comments/{self.comment_id}"
response=self.client.patch(
edit_url,
data=self.comment_data,
format='json'
)
expected = "Comment successfully updated"
self.assertEqual(expected, response.data['message'])
def test_edit_comment_not_yours(self):
edit_url = self.article_url+f"{self.article_slug}/comments/{self.comment_id}"
self.client.credentials(HTTP_AUTHORIZATION='Bearer ' + str(self.token2))
response=self.client.patch(
edit_url,
data=self.comment_data,
format='json'
)
expected = "You can't edit someone else's comment"
self.assertEqual(expected, response.data['error'])
def test_get_a_comment(self):
get_url = self.article_url+f"{self.article_slug}/comments/{self.comment_id}"
response=self.client.get(
get_url
)
expected = "we dem' boys, Holla"
self.assertEqual(expected, response.data[0]['body'])
def test_get_a_comment_article_invalid(self):
get_url = self.article_url+f"baby/comments/{self.comment_id}"
response=self.client.get(
get_url
)
expected = "Article not found"
self.assertEqual(expected, response.data['errors'][0])
def test_get_a_comment_invalid_id(self):
get_url = self.article_url+f"{self.article_slug}/comments/67"
response=self.client.get(
get_url
)
expected = "Comment not found"
self.assertEqual(expected, response.data['error'])
def test_get_a_comment_bad_id(self):
get_url = self.article_url+f"{self.article_slug}/comments/dwdw"
response=self.client.get(
get_url
)
expected = "Comment id must be an integer"
self.assertEqual(expected, response.data['error'])
def test_get_all_comments(self):
url = self.article_url+f"{self.article_slug}/comments/"
response=self.client.get(
url
)
expected = "we dem' boys, Holla"
self.assertEqual(expected, response.data[0]['body'])
def test_get_all_comments_none_yet(self):
url = self.article_url+f"{self.article_slug2}/comments/"
response=self.client.get(
url
)
expected = "No comments on this article yet"
self.assertEqual(expected, response.data['error'])
def test_model_class_stringify(self):
self.client.credentials(HTTP_AUTHORIZATION='Bearer ' + str(self.token1))
comment = Comment.objects.first()
self.assertEqual(str(comment), comment.body)
def test_edit_comment_no_comment_in_request(self):
edit_url = self.article_url+f"{self.article_slug}/comments/{self.comment_id}"
response=self.client.patch(
edit_url,
data=self.request_data_no_comment,
format='json'
)
expected = "There is no comment in the request data"
self.assertEqual(expected, response.data['error'])
def test_edit_comment_no_body_in_comment(self):
edit_url = self.article_url+f"{self.article_slug}/comments/{self.comment_id}"
response=self.client.patch(
edit_url,
data=self.comment_data_no_body,
format='json'
)
expected = "The comment body must be provided"
self.assertEqual(expected, response.data['error'])
def test_edit_comment_not_exist(self):
edit_url = self.article_url+f"{self.article_slug}/comments/40"
response=self.client.patch(
edit_url,
data=self.comment_data_no_body,
format='json'
)
expected = "Comment not found"
self.assertEqual(expected, response.data['error'])
def test_edit_comment_bad_id(self):
edit_url = self.article_url+f"{self.article_slug}/comments/dw"
response=self.client.patch(
edit_url,
data=self.comment_data_no_body,
format='json'
)
expected = "Comment id must be an integer"
self.assertEqual(expected, response.data['error'])
def test_delete_comment_not_exist(self):
delete_url = self.article_url+f"{self.article_slug}/comments/40"
response = self.client.delete(
delete_url
)
expected = "Comment not found"
self.assertEqual(expected, response.data['error'])
def test_delete_comment_bad_id(self):
delete_url = self.article_url+f"{self.article_slug}/comments/tr"
response = self.client.delete(
delete_url
)
expected = "Comment id must be an integer"
self.assertEqual(expected, response.data['error'])
| 33.743243 | 87 | 0.629288 | 892 | 7,491 | 5.069507 | 0.107623 | 0.092437 | 0.06192 | 0.075188 | 0.889872 | 0.858912 | 0.855374 | 0.831933 | 0.766033 | 0.752764 | 0 | 0.003261 | 0.263116 | 7,491 | 221 | 88 | 33.895928 | 0.815942 | 0 | 0 | 0.634286 | 0 | 0 | 0.19183 | 0.09171 | 0 | 0 | 0 | 0 | 0.12 | 1 | 0.12 | false | 0 | 0.022857 | 0 | 0.148571 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
3a6ed4ff701eb46eb25983bd571364cdc93a1494 | 5,631 | py | Python | tests/podcasts/data/soundcloud_archive_page2.py | tnoff/hathor | 237dfe37e93443b5a298f9f4b258d15429d754ea | [
"BSD-2-Clause-FreeBSD"
] | 2 | 2017-05-03T17:52:27.000Z | 2019-04-04T22:29:27.000Z | tests/podcasts/data/soundcloud_archive_page2.py | tnoff/hathor | 237dfe37e93443b5a298f9f4b258d15429d754ea | [
"BSD-2-Clause-FreeBSD"
] | null | null | null | tests/podcasts/data/soundcloud_archive_page2.py | tnoff/hathor | 237dfe37e93443b5a298f9f4b258d15429d754ea | [
"BSD-2-Clause-FreeBSD"
] | null | null | null | DATA = {
"collection": [
{
"attachments_uri": "https://api.soundcloud.com/tracks/211526851/attachments",
"video_url": None,
"track_type": None,
"release_month": None,
"original_format": "mp3",
"label_name": None,
"duration": 13241535,
"id": 211526851,
"streamable": True,
"user_id": 98381617,
"title": "The MMA Hour with Ariel Helwani - Episode 287",
"favoritings_count": 15,
"commentable": True,
"label_id": None,
"download_url": "https://api.soundcloud.com/tracks/211526851/download",
"state": "finished",
"downloadable": True,
"waveform_url": "https://w1.sndcdn.com/yBwNNBZkO6lV_m.png",
"sharing": "public",
"description": "Ariel Helwani speaks to Dan Henderson, Gilbert Melendez, Makwan Amirkhani, Rafael Cordeiro, Michael Chandler, Joe Schilling, and John Pollock.",
"release_day": None,
"purchase_url": None,
"permalink": "the-mma-hour-episode-287",
"comment_count": 0,
"purchase_title": None,
"stream_url": "https://api.soundcloud.com/tracks/211526851/stream",
"last_modified": "2015/07/20 23:14:13 +0000",
"user": {
"username": "The MMA Hour with Helwani",
"permalink": "the-mma-hour",
"avatar_url": "https://i1.sndcdn.com/avatars-000099055070-aknpm5-large.jpg",
"kind": "user",
"uri": "https://api.soundcloud.com/users/98381617",
"last_modified": "2016/05/18 13:04:21 +0000",
"permalink_url": "http://soundcloud.com/the-mma-hour",
"id": 98381617
},
"genre": "The MMA Hour",
"isrc": None,
"download_count": 132,
"permalink_url": "http://soundcloud.com/the-mma-hour/the-mma-hour-episode-287",
"playback_count": 43412,
"kind": "track",
"release_year": None,
"license": "all-rights-reserved",
"artwork_url": None,
"created_at": "2015/06/22 22:15:22 +0000",
"bpm": None,
"uri": "https://api.soundcloud.com/tracks/211526851",
"original_content_size": 211852382,
"key_signature": None,
"release": None,
"tag_list": "MMA \"Ariel Helwani\" \"MMA Fighting\" TheMMAHour \"MMA Hour\" MMAHour Interviews Interview SBNation \"SB Nation\" UFC \"Dan Henderson\" \"Gilbert Melendez\" \"Makwan Amirkhani\" \"Rafael Cordeiro\" \"Michael Chandler\" \"Joe Schilling\" \"John Pollock\"",
"embeddable_by": "all"
},
{
"attachments_uri": "https://api.soundcloud.com/tracks/210501972/attachments",
"video_url": None,
"track_type": None,
"release_month": None,
"original_format": "mp3",
"label_name": None,
"duration": 13293236,
"id": 210501972,
"streamable": True,
"user_id": 98381617,
"title": "The MMA Hour - Episode 286",
"favoritings_count": 18,
"commentable": True,
"label_id": None,
"download_url": "https://api.soundcloud.com/tracks/210501972/download",
"state": "finished",
"downloadable": True,
"waveform_url": "https://w1.sndcdn.com/4vwatxk9a0zn_m.png",
"sharing": "public",
"description": "Featuring Alistair Overeem, Joanna Jedrzejczyk, Lyoto Machida, Eddie Alvarez, Scott Coker, Jessica Aguilar, Noad Lahat, Lex McMahon, Jeff Aronson, and Marc Raimondi.",
"release_day": None,
"purchase_url": None,
"permalink": "the-mma-hour-episode-286",
"comment_count": 0,
"purchase_title": None,
"stream_url": "https://api.soundcloud.com/tracks/210501972/stream",
"last_modified": "2015/06/15 23:31:30 +0000",
"user": {
"username": "The MMA Hour with Helwani",
"permalink": "the-mma-hour",
"avatar_url": "https://i1.sndcdn.com/avatars-000099055070-aknpm5-large.jpg",
"kind": "user",
"uri": "https://api.soundcloud.com/users/98381617",
"last_modified": "2016/05/18 13:04:21 +0000",
"permalink_url": "http://soundcloud.com/the-mma-hour",
"id": 98381617
},
"genre": "The MMA Hour",
"isrc": None,
"download_count": 141,
"permalink_url": "http://soundcloud.com/the-mma-hour/the-mma-hour-episode-286",
"playback_count": 49454,
"kind": "track",
"release_year": None,
"license": "all-rights-reserved",
"artwork_url": None,
"created_at": "2015/06/15 23:12:41 +0000",
"bpm": None,
"uri": "https://api.soundcloud.com/tracks/210501972",
"original_content_size": 212685330,
"key_signature": None,
"release": None,
"tag_list": "MMA \"Ariel Helwani\" \"MMA Fighting\" TheMMAHour \"MMA Hour\" MMAHour Interviews Interview SBNation \"SB Nation\" UFC \"Alistair Overeem\" \"Joanna Jedrzejczyk\" \"Lyoto Machida\" \"Eddie Alvarez\" \"Scott Coker\" \"Jessica Aguilar\" \"Noad Lahat\" \"Lex McMahon\" \"Jeff Aronson\" \"Marc Raimondi\"",
"embeddable_by": "all"
},
]
}
| 48.128205 | 327 | 0.535074 | 564 | 5,631 | 5.228723 | 0.299645 | 0.042726 | 0.054256 | 0.071211 | 0.855205 | 0.828756 | 0.828756 | 0.776534 | 0.776534 | 0.719566 | 0 | 0.092037 | 0.313088 | 5,631 | 116 | 328 | 48.543103 | 0.670372 | 0 | 0 | 0.603448 | 0 | 0.034483 | 0.501865 | 0.015983 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
3a70e522b9f1a404a14ef1807c2d3fd45e52b9d9 | 150 | py | Python | test/__init__.py | StefanBruens/pybeam | 5b7e79e2cc48265c71cb77e3ec0eae97676509fa | [
"MIT"
] | 15 | 2015-03-22T05:27:04.000Z | 2020-05-10T23:17:50.000Z | test/__init__.py | StefanBruens/pybeam | 5b7e79e2cc48265c71cb77e3ec0eae97676509fa | [
"MIT"
] | 9 | 2018-03-16T15:20:37.000Z | 2020-11-06T14:05:55.000Z | test/__init__.py | StefanBruens/pybeam | 5b7e79e2cc48265c71cb77e3ec0eae97676509fa | [
"MIT"
] | 8 | 2015-03-03T16:34:46.000Z | 2022-02-26T03:53:13.000Z | from test.schema_beam import BEAMConstructTest
from test.schema_eetf import EETFConstructTest
from test.beam_file import BEAMFileTest
import unittest
| 30 | 46 | 0.886667 | 20 | 150 | 6.5 | 0.55 | 0.184615 | 0.215385 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.093333 | 150 | 4 | 47 | 37.5 | 0.955882 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
3ad39364b775ae110df36a5c61e73287c5f90004 | 3,341 | py | Python | docker/sane-doc-reports/src/tests/library/elements/test_header_checks.py | ThisIsNotTheUserYouAreLookingFor/dockerfiles | f92673b0d15c457e4abe215cf260afbb5b25cf2e | [
"MIT"
] | 2 | 2020-12-14T15:43:39.000Z | 2020-12-14T15:43:49.000Z | docker/sane-doc-reports/src/tests/library/elements/test_header_checks.py | ThisIsNotTheUserYouAreLookingFor/dockerfiles | f92673b0d15c457e4abe215cf260afbb5b25cf2e | [
"MIT"
] | 7 | 2022-03-30T09:39:00.000Z | 2022-03-31T08:49:17.000Z | docker/sane-doc-reports/src/tests/library/elements/test_header_checks.py | ThisIsNotTheUserYouAreLookingFor/dockerfiles | f92673b0d15c457e4abe215cf260afbb5b25cf2e | [
"MIT"
] | 1 | 2021-06-17T09:27:04.000Z | 2021-06-17T09:27:04.000Z | from docx.table import Table
from sane_doc_reports.conf import XSOAR_LOGO_BASE64
from sane_doc_reports.populate.Report import Report
from tests import utils
from tests.utils import _transform
def test_logo_works_in_regular_report():
report = Report(*_transform('grid_checks/fullgrid.json'), options={
'customerLogo': XSOAR_LOGO_BASE64,
'demistoLogo': XSOAR_LOGO_BASE64
})
report.populate_report()
d = report.document
table = next(utils.iter_block_items(d))
assert isinstance(table, Table)
# Check headers for 2 images (customer logo)
assert len(d.sections[0].header._element.xpath('.//w:drawing')) == 2
def test_logo_works_in_regular_report_svg():
report = Report(*_transform('grid_checks/fullgrid.json'), options={
'customerLogo': XSOAR_LOGO_BASE64,
'demistoLogo': "data:image/svg+xml;base64,PD94bWwgdmVyc2lvbj0iMS4wIj8+CjxzdmcgeG1sbnM9Imh0dHA6Ly93d3cudzMub3JnLzIwMDAvc3ZnIiB3aWR0aD0iNDY2IiBoZWlnaHQ9IjQ2NiIgdmlld0JveD0iLTQwIC00MCA4MCA4MCI+Cgk8Y2lyY2xlIHI9IjM5Ii8+Cgk8cGF0aCBkPSJNMCwzOGEzOCwzOCAwIDAgMSAwLC03NmExOSwxOSAwIDAgMSAwLDM4YTE5LDE5IDAgMCAwIDAsMzgiIGZpbGw9IiNmZmYiLz4KCTxjaXJjbGUgY3k9IjE5IiByPSI1IiBmaWxsPSIjZmZmIi8+Cgk8Y2lyY2xlIGN5PSItMTkiIHI9IjUiLz4KPC9zdmc+"
})
report.populate_report()
d = report.document
table = next(utils.iter_block_items(d))
assert isinstance(table, Table)
# Check headers for 2 images (customer logo)
assert len(d.sections[0].header._element.xpath('.//w:drawing')) == 2
def test_logo_works_in_regular_report_without_customer_logo():
report = Report(*_transform('grid_checks/fullgrid.json'), options={
'customerLogo': None,
'demistoLogo': XSOAR_LOGO_BASE64
})
report.populate_report()
d = report.document
table = next(utils.iter_block_items(d))
assert isinstance(table, Table)
# Check headers for 2 images (customer logo)
assert len(d.sections[0].header._element.xpath('.//w:drawing')) == 1
def test_logo_not_added_if_headers_disabled():
report = Report(*_transform('grid_checks/fullgrid.json'), options={
'customerLogo': XSOAR_LOGO_BASE64,
'demistoLogo': XSOAR_LOGO_BASE64,
'disableHeaders': True
})
report.populate_report()
d = report.document
table = next(utils.iter_block_items(d))
assert isinstance(table, Table)
# Check headers for 2 images (customer logo)
assert len(d.sections[0].header._element.xpath('.//w:drawing')) == 0
def test_empty_customer_logo():
report = Report(*_transform('grid_checks/fullgrid.json'), options={
'customerLogo': '',
'demistoLogo': XSOAR_LOGO_BASE64,
})
report.populate_report()
d = report.document
table = next(utils.iter_block_items(d))
assert isinstance(table, Table)
# Check headers for 2 images (customer logo)
assert len(d.sections[0].header._element.xpath('.//w:drawing')) == 1
def test_empty_demisto_logo():
report = Report(*_transform('grid_checks/fullgrid.json'), options={
'customerLogo': '',
'demistoLogo': '',
})
report.populate_report()
d = report.document
table = next(utils.iter_block_items(d))
assert isinstance(table, Table)
# Check headers for 2 images (customer logo)
assert len(d.sections[0].header._element.xpath('.//w:drawing')) == 0
| 36.315217 | 427 | 0.725232 | 378 | 3,341 | 6.177249 | 0.185185 | 0.030835 | 0.051392 | 0.06424 | 0.727195 | 0.727195 | 0.727195 | 0.713919 | 0.713919 | 0.713919 | 0 | 0.032074 | 0.160132 | 3,341 | 91 | 428 | 36.714286 | 0.800071 | 0.076923 | 0 | 0.772727 | 0 | 0 | 0.252194 | 0.179396 | 0 | 0 | 0 | 0 | 0.181818 | 1 | 0.090909 | false | 0 | 0.075758 | 0 | 0.166667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
3aee35badfd66ae8bd99fcd6e71080bc5ed05e59 | 148 | py | Python | discord/ext/commands/errors.py | kuzaku-developers/disnake | 61cc1ad4c2bafd39726a1447c85f7e469e41af10 | [
"MIT"
] | null | null | null | discord/ext/commands/errors.py | kuzaku-developers/disnake | 61cc1ad4c2bafd39726a1447c85f7e469e41af10 | [
"MIT"
] | null | null | null | discord/ext/commands/errors.py | kuzaku-developers/disnake | 61cc1ad4c2bafd39726a1447c85f7e469e41af10 | [
"MIT"
] | null | null | null | from disnake.ext.commands.errors import *
from disnake.ext.commands.errors import __dict__ as __original_dict__
locals().update(__original_dict__)
| 29.6 | 69 | 0.837838 | 20 | 148 | 5.5 | 0.55 | 0.2 | 0.254545 | 0.4 | 0.618182 | 0.618182 | 0 | 0 | 0 | 0 | 0 | 0 | 0.081081 | 148 | 4 | 70 | 37 | 0.808824 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.666667 | 0 | 0.666667 | 0 | 1 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 6 |
aaf09fe9df52fc5e88a3865d59a1bd4e10165d3b | 24,779 | py | Python | cottonformation/res/managedblockchain.py | MacHu-GWU/cottonformation-project | 23e28c08cfb5a7cc0db6dbfdb1d7e1585c773f3b | [
"BSD-2-Clause"
] | 5 | 2021-07-22T03:45:59.000Z | 2021-12-17T21:07:14.000Z | cottonformation/res/managedblockchain.py | MacHu-GWU/cottonformation-project | 23e28c08cfb5a7cc0db6dbfdb1d7e1585c773f3b | [
"BSD-2-Clause"
] | 1 | 2021-06-25T18:01:31.000Z | 2021-06-25T18:01:31.000Z | cottonformation/res/managedblockchain.py | MacHu-GWU/cottonformation-project | 23e28c08cfb5a7cc0db6dbfdb1d7e1585c773f3b | [
"BSD-2-Clause"
] | 2 | 2021-06-27T03:08:21.000Z | 2021-06-28T22:15:51.000Z | # -*- coding: utf-8 -*-
"""
This module
"""
import attr
import typing
from ..core.model import (
Property, Resource, Tag, GetAtt, TypeHint, TypeCheck,
)
from ..core.constant import AttrMeta
#--- Property declaration ---
@attr.s
class PropNodeNodeConfiguration(Property):
"""
AWS Object Type = "AWS::ManagedBlockchain::Node.NodeConfiguration"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-managedblockchain-node-nodeconfiguration.html
Property Document:
- ``rp_AvailabilityZone``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-managedblockchain-node-nodeconfiguration.html#cfn-managedblockchain-node-nodeconfiguration-availabilityzone
- ``rp_InstanceType``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-managedblockchain-node-nodeconfiguration.html#cfn-managedblockchain-node-nodeconfiguration-instancetype
"""
AWS_OBJECT_TYPE = "AWS::ManagedBlockchain::Node.NodeConfiguration"
rp_AvailabilityZone: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "AvailabilityZone"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-managedblockchain-node-nodeconfiguration.html#cfn-managedblockchain-node-nodeconfiguration-availabilityzone"""
rp_InstanceType: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "InstanceType"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-managedblockchain-node-nodeconfiguration.html#cfn-managedblockchain-node-nodeconfiguration-instancetype"""
@attr.s
class PropMemberNetworkFabricConfiguration(Property):
"""
AWS Object Type = "AWS::ManagedBlockchain::Member.NetworkFabricConfiguration"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-managedblockchain-member-networkfabricconfiguration.html
Property Document:
- ``rp_Edition``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-managedblockchain-member-networkfabricconfiguration.html#cfn-managedblockchain-member-networkfabricconfiguration-edition
"""
AWS_OBJECT_TYPE = "AWS::ManagedBlockchain::Member.NetworkFabricConfiguration"
rp_Edition: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Edition"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-managedblockchain-member-networkfabricconfiguration.html#cfn-managedblockchain-member-networkfabricconfiguration-edition"""
@attr.s
class PropMemberApprovalThresholdPolicy(Property):
"""
AWS Object Type = "AWS::ManagedBlockchain::Member.ApprovalThresholdPolicy"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-managedblockchain-member-approvalthresholdpolicy.html
Property Document:
- ``p_ProposalDurationInHours``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-managedblockchain-member-approvalthresholdpolicy.html#cfn-managedblockchain-member-approvalthresholdpolicy-proposaldurationinhours
- ``p_ThresholdComparator``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-managedblockchain-member-approvalthresholdpolicy.html#cfn-managedblockchain-member-approvalthresholdpolicy-thresholdcomparator
- ``p_ThresholdPercentage``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-managedblockchain-member-approvalthresholdpolicy.html#cfn-managedblockchain-member-approvalthresholdpolicy-thresholdpercentage
"""
AWS_OBJECT_TYPE = "AWS::ManagedBlockchain::Member.ApprovalThresholdPolicy"
p_ProposalDurationInHours: int = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(int)),
metadata={AttrMeta.PROPERTY_NAME: "ProposalDurationInHours"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-managedblockchain-member-approvalthresholdpolicy.html#cfn-managedblockchain-member-approvalthresholdpolicy-proposaldurationinhours"""
p_ThresholdComparator: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "ThresholdComparator"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-managedblockchain-member-approvalthresholdpolicy.html#cfn-managedblockchain-member-approvalthresholdpolicy-thresholdcomparator"""
p_ThresholdPercentage: int = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(int)),
metadata={AttrMeta.PROPERTY_NAME: "ThresholdPercentage"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-managedblockchain-member-approvalthresholdpolicy.html#cfn-managedblockchain-member-approvalthresholdpolicy-thresholdpercentage"""
@attr.s
class PropMemberVotingPolicy(Property):
"""
AWS Object Type = "AWS::ManagedBlockchain::Member.VotingPolicy"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-managedblockchain-member-votingpolicy.html
Property Document:
- ``p_ApprovalThresholdPolicy``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-managedblockchain-member-votingpolicy.html#cfn-managedblockchain-member-votingpolicy-approvalthresholdpolicy
"""
AWS_OBJECT_TYPE = "AWS::ManagedBlockchain::Member.VotingPolicy"
p_ApprovalThresholdPolicy: typing.Union['PropMemberApprovalThresholdPolicy', dict] = attr.ib(
default=None,
converter=PropMemberApprovalThresholdPolicy.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(PropMemberApprovalThresholdPolicy)),
metadata={AttrMeta.PROPERTY_NAME: "ApprovalThresholdPolicy"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-managedblockchain-member-votingpolicy.html#cfn-managedblockchain-member-votingpolicy-approvalthresholdpolicy"""
@attr.s
class PropMemberMemberFabricConfiguration(Property):
"""
AWS Object Type = "AWS::ManagedBlockchain::Member.MemberFabricConfiguration"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-managedblockchain-member-memberfabricconfiguration.html
Property Document:
- ``rp_AdminPassword``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-managedblockchain-member-memberfabricconfiguration.html#cfn-managedblockchain-member-memberfabricconfiguration-adminpassword
- ``rp_AdminUsername``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-managedblockchain-member-memberfabricconfiguration.html#cfn-managedblockchain-member-memberfabricconfiguration-adminusername
"""
AWS_OBJECT_TYPE = "AWS::ManagedBlockchain::Member.MemberFabricConfiguration"
rp_AdminPassword: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "AdminPassword"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-managedblockchain-member-memberfabricconfiguration.html#cfn-managedblockchain-member-memberfabricconfiguration-adminpassword"""
rp_AdminUsername: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "AdminUsername"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-managedblockchain-member-memberfabricconfiguration.html#cfn-managedblockchain-member-memberfabricconfiguration-adminusername"""
@attr.s
class PropMemberNetworkFrameworkConfiguration(Property):
"""
AWS Object Type = "AWS::ManagedBlockchain::Member.NetworkFrameworkConfiguration"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-managedblockchain-member-networkframeworkconfiguration.html
Property Document:
- ``p_NetworkFabricConfiguration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-managedblockchain-member-networkframeworkconfiguration.html#cfn-managedblockchain-member-networkframeworkconfiguration-networkfabricconfiguration
"""
AWS_OBJECT_TYPE = "AWS::ManagedBlockchain::Member.NetworkFrameworkConfiguration"
p_NetworkFabricConfiguration: typing.Union['PropMemberNetworkFabricConfiguration', dict] = attr.ib(
default=None,
converter=PropMemberNetworkFabricConfiguration.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(PropMemberNetworkFabricConfiguration)),
metadata={AttrMeta.PROPERTY_NAME: "NetworkFabricConfiguration"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-managedblockchain-member-networkframeworkconfiguration.html#cfn-managedblockchain-member-networkframeworkconfiguration-networkfabricconfiguration"""
@attr.s
class PropMemberNetworkConfiguration(Property):
"""
AWS Object Type = "AWS::ManagedBlockchain::Member.NetworkConfiguration"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-managedblockchain-member-networkconfiguration.html
Property Document:
- ``rp_Framework``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-managedblockchain-member-networkconfiguration.html#cfn-managedblockchain-member-networkconfiguration-framework
- ``rp_FrameworkVersion``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-managedblockchain-member-networkconfiguration.html#cfn-managedblockchain-member-networkconfiguration-frameworkversion
- ``rp_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-managedblockchain-member-networkconfiguration.html#cfn-managedblockchain-member-networkconfiguration-name
- ``rp_VotingPolicy``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-managedblockchain-member-networkconfiguration.html#cfn-managedblockchain-member-networkconfiguration-votingpolicy
- ``p_Description``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-managedblockchain-member-networkconfiguration.html#cfn-managedblockchain-member-networkconfiguration-description
- ``p_NetworkFrameworkConfiguration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-managedblockchain-member-networkconfiguration.html#cfn-managedblockchain-member-networkconfiguration-networkframeworkconfiguration
"""
AWS_OBJECT_TYPE = "AWS::ManagedBlockchain::Member.NetworkConfiguration"
rp_Framework: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Framework"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-managedblockchain-member-networkconfiguration.html#cfn-managedblockchain-member-networkconfiguration-framework"""
rp_FrameworkVersion: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "FrameworkVersion"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-managedblockchain-member-networkconfiguration.html#cfn-managedblockchain-member-networkconfiguration-frameworkversion"""
rp_Name: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Name"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-managedblockchain-member-networkconfiguration.html#cfn-managedblockchain-member-networkconfiguration-name"""
rp_VotingPolicy: typing.Union['PropMemberVotingPolicy', dict] = attr.ib(
default=None,
converter=PropMemberVotingPolicy.from_dict,
validator=attr.validators.instance_of(PropMemberVotingPolicy),
metadata={AttrMeta.PROPERTY_NAME: "VotingPolicy"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-managedblockchain-member-networkconfiguration.html#cfn-managedblockchain-member-networkconfiguration-votingpolicy"""
p_Description: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Description"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-managedblockchain-member-networkconfiguration.html#cfn-managedblockchain-member-networkconfiguration-description"""
p_NetworkFrameworkConfiguration: typing.Union['PropMemberNetworkFrameworkConfiguration', dict] = attr.ib(
default=None,
converter=PropMemberNetworkFrameworkConfiguration.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(PropMemberNetworkFrameworkConfiguration)),
metadata={AttrMeta.PROPERTY_NAME: "NetworkFrameworkConfiguration"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-managedblockchain-member-networkconfiguration.html#cfn-managedblockchain-member-networkconfiguration-networkframeworkconfiguration"""
@attr.s
class PropMemberMemberFrameworkConfiguration(Property):
"""
AWS Object Type = "AWS::ManagedBlockchain::Member.MemberFrameworkConfiguration"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-managedblockchain-member-memberframeworkconfiguration.html
Property Document:
- ``p_MemberFabricConfiguration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-managedblockchain-member-memberframeworkconfiguration.html#cfn-managedblockchain-member-memberframeworkconfiguration-memberfabricconfiguration
"""
AWS_OBJECT_TYPE = "AWS::ManagedBlockchain::Member.MemberFrameworkConfiguration"
p_MemberFabricConfiguration: typing.Union['PropMemberMemberFabricConfiguration', dict] = attr.ib(
default=None,
converter=PropMemberMemberFabricConfiguration.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(PropMemberMemberFabricConfiguration)),
metadata={AttrMeta.PROPERTY_NAME: "MemberFabricConfiguration"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-managedblockchain-member-memberframeworkconfiguration.html#cfn-managedblockchain-member-memberframeworkconfiguration-memberfabricconfiguration"""
@attr.s
class PropMemberMemberConfiguration(Property):
"""
AWS Object Type = "AWS::ManagedBlockchain::Member.MemberConfiguration"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-managedblockchain-member-memberconfiguration.html
Property Document:
- ``rp_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-managedblockchain-member-memberconfiguration.html#cfn-managedblockchain-member-memberconfiguration-name
- ``p_Description``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-managedblockchain-member-memberconfiguration.html#cfn-managedblockchain-member-memberconfiguration-description
- ``p_MemberFrameworkConfiguration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-managedblockchain-member-memberconfiguration.html#cfn-managedblockchain-member-memberconfiguration-memberframeworkconfiguration
"""
AWS_OBJECT_TYPE = "AWS::ManagedBlockchain::Member.MemberConfiguration"
rp_Name: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Name"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-managedblockchain-member-memberconfiguration.html#cfn-managedblockchain-member-memberconfiguration-name"""
p_Description: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Description"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-managedblockchain-member-memberconfiguration.html#cfn-managedblockchain-member-memberconfiguration-description"""
p_MemberFrameworkConfiguration: typing.Union['PropMemberMemberFrameworkConfiguration', dict] = attr.ib(
default=None,
converter=PropMemberMemberFrameworkConfiguration.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(PropMemberMemberFrameworkConfiguration)),
metadata={AttrMeta.PROPERTY_NAME: "MemberFrameworkConfiguration"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-managedblockchain-member-memberconfiguration.html#cfn-managedblockchain-member-memberconfiguration-memberframeworkconfiguration"""
#--- Resource declaration ---
@attr.s
class Member(Resource):
"""
AWS Object Type = "AWS::ManagedBlockchain::Member"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-managedblockchain-member.html
Property Document:
- ``rp_MemberConfiguration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-managedblockchain-member.html#cfn-managedblockchain-member-memberconfiguration
- ``p_InvitationId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-managedblockchain-member.html#cfn-managedblockchain-member-invitationid
- ``p_NetworkConfiguration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-managedblockchain-member.html#cfn-managedblockchain-member-networkconfiguration
- ``p_NetworkId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-managedblockchain-member.html#cfn-managedblockchain-member-networkid
"""
AWS_OBJECT_TYPE = "AWS::ManagedBlockchain::Member"
rp_MemberConfiguration: typing.Union['PropMemberMemberConfiguration', dict] = attr.ib(
default=None,
converter=PropMemberMemberConfiguration.from_dict,
validator=attr.validators.instance_of(PropMemberMemberConfiguration),
metadata={AttrMeta.PROPERTY_NAME: "MemberConfiguration"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-managedblockchain-member.html#cfn-managedblockchain-member-memberconfiguration"""
p_InvitationId: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "InvitationId"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-managedblockchain-member.html#cfn-managedblockchain-member-invitationid"""
p_NetworkConfiguration: typing.Union['PropMemberNetworkConfiguration', dict] = attr.ib(
default=None,
converter=PropMemberNetworkConfiguration.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(PropMemberNetworkConfiguration)),
metadata={AttrMeta.PROPERTY_NAME: "NetworkConfiguration"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-managedblockchain-member.html#cfn-managedblockchain-member-networkconfiguration"""
p_NetworkId: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "NetworkId"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-managedblockchain-member.html#cfn-managedblockchain-member-networkid"""
@property
def rv_MemberId(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-managedblockchain-member.html#aws-resource-managedblockchain-member-return-values"""
return GetAtt(resource=self, attr_name="MemberId")
@property
def rv_NetworkId(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-managedblockchain-member.html#aws-resource-managedblockchain-member-return-values"""
return GetAtt(resource=self, attr_name="NetworkId")
@attr.s
class Node(Resource):
"""
AWS Object Type = "AWS::ManagedBlockchain::Node"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-managedblockchain-node.html
Property Document:
- ``rp_NetworkId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-managedblockchain-node.html#cfn-managedblockchain-node-networkid
- ``rp_NodeConfiguration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-managedblockchain-node.html#cfn-managedblockchain-node-nodeconfiguration
- ``p_MemberId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-managedblockchain-node.html#cfn-managedblockchain-node-memberid
"""
AWS_OBJECT_TYPE = "AWS::ManagedBlockchain::Node"
rp_NetworkId: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "NetworkId"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-managedblockchain-node.html#cfn-managedblockchain-node-networkid"""
rp_NodeConfiguration: typing.Union['PropNodeNodeConfiguration', dict] = attr.ib(
default=None,
converter=PropNodeNodeConfiguration.from_dict,
validator=attr.validators.instance_of(PropNodeNodeConfiguration),
metadata={AttrMeta.PROPERTY_NAME: "NodeConfiguration"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-managedblockchain-node.html#cfn-managedblockchain-node-nodeconfiguration"""
p_MemberId: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "MemberId"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-managedblockchain-node.html#cfn-managedblockchain-node-memberid"""
@property
def rv_MemberId(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-managedblockchain-node.html#aws-resource-managedblockchain-node-return-values"""
return GetAtt(resource=self, attr_name="MemberId")
@property
def rv_NodeId(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-managedblockchain-node.html#aws-resource-managedblockchain-node-return-values"""
return GetAtt(resource=self, attr_name="NodeId")
@property
def rv_Arn(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-managedblockchain-node.html#aws-resource-managedblockchain-node-return-values"""
return GetAtt(resource=self, attr_name="Arn")
@property
def rv_NetworkId(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-managedblockchain-node.html#aws-resource-managedblockchain-node-return-values"""
return GetAtt(resource=self, attr_name="NetworkId")
| 62.573232 | 262 | 0.784212 | 2,377 | 24,779 | 8.091292 | 0.040808 | 0.142307 | 0.040607 | 0.062757 | 0.858732 | 0.858732 | 0.835543 | 0.768835 | 0.768835 | 0.767899 | 0 | 0.000045 | 0.098753 | 24,779 | 395 | 263 | 62.731646 | 0.861186 | 0.37552 | 0 | 0.39801 | 0 | 0 | 0.130541 | 0.099591 | 0 | 0 | 0 | 0 | 0 | 1 | 0.029851 | false | 0.00995 | 0.019901 | 0 | 0.323383 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
aaf46dda1dd84c8d52af754525353ad38e5c22de | 174 | py | Python | cse/admin.py | Kunal614/Resources | 6bba32f9f70554ddc658e9dab864433d150e46d2 | [
"Apache-2.0"
] | 1 | 2021-10-08T10:42:39.000Z | 2021-10-08T10:42:39.000Z | cse/admin.py | Kunal614/Resources | 6bba32f9f70554ddc658e9dab864433d150e46d2 | [
"Apache-2.0"
] | 1 | 2021-07-10T04:22:44.000Z | 2021-07-10T04:22:44.000Z | cse/admin.py | Kunal614/Resources | 6bba32f9f70554ddc658e9dab864433d150e46d2 | [
"Apache-2.0"
] | null | null | null | from django.contrib import admin
from .models import *
admin.site.register(semester)
admin.site.register(subject)
admin.site.register(Books)
admin.site.register(Other_stuff) | 24.857143 | 32 | 0.821839 | 25 | 174 | 5.68 | 0.52 | 0.253521 | 0.478873 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.068966 | 174 | 7 | 33 | 24.857143 | 0.876543 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.333333 | 0 | 0.333333 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 6 |
c931e62ee32c8a2216391c72daa2845f602a8fe1 | 2,521 | py | Python | tests/varlookup_tests.py | dantaki/svtools | 9e13813ba25d0588bf2bbea204e34e7cef9bc039 | [
"MIT"
] | 120 | 2015-06-10T08:48:55.000Z | 2022-03-22T13:17:50.000Z | tests/varlookup_tests.py | dantaki/svtools | 9e13813ba25d0588bf2bbea204e34e7cef9bc039 | [
"MIT"
] | 281 | 2015-05-01T20:08:54.000Z | 2022-01-26T23:14:51.000Z | tests/varlookup_tests.py | dantaki/svtools | 9e13813ba25d0588bf2bbea204e34e7cef9bc039 | [
"MIT"
] | 52 | 2015-06-08T20:17:08.000Z | 2022-03-14T19:57:49.000Z | from unittest import TestCase, main
import os
import time
import sys
import tempfile
import difflib
import svtools.varlookup
class IntegrationTest_varlookup(TestCase):
def run_integration_test(self):
test_directory = os.path.dirname(os.path.abspath(__file__))
test_data_dir = os.path.join(test_directory, 'test_data', 'varlookup')
input_a = os.path.join(test_data_dir, 'input_a.bed')
input_b = os.path.join(test_data_dir, 'input_b.bed')
expected_result = os.path.join(test_data_dir, 'expected.bed')
temp_descriptor, temp_output_path = tempfile.mkstemp(suffix='.bed')
with os.fdopen(temp_descriptor, 'w') as output_handle:
svtools.varlookup.varLookup(input_a, input_b, output_handle, 50, '#', 'TEST')
expected_lines = open(expected_result).readlines()
# set timestamp for diff
expected_lines[1] = '##fileDate=' + time.strftime('%Y%m%d') + '\n'
produced_lines = open(temp_output_path).readlines()
diff = difflib.unified_diff(produced_lines, expected_lines, fromfile=temp_output_path, tofile=expected_result)
result = ''.join(diff)
if result != '':
for line in result:
sys.stdout.write(line)
self.assertFalse(result)
os.remove(temp_output_path)
def run_issue_209_regression_test(self):
test_directory = os.path.dirname(os.path.abspath(__file__))
test_data_dir = os.path.join(test_directory, 'test_data', 'varlookup')
input_a = os.path.join(test_data_dir, 'input_a1.bed')
input_b = os.path.join(test_data_dir, 'input_b1.bed')
expected_result = os.path.join(test_data_dir, 'expected1.bed')
temp_descriptor, temp_output_path = tempfile.mkstemp(suffix='.bed')
with os.fdopen(temp_descriptor, 'w') as output_handle:
svtools.varlookup.varLookup(input_a, input_b, output_handle, 50, '#', 'TEST')
expected_lines = open(expected_result).readlines()
# set timestamp for diff
expected_lines[1] = '##fileDate=' + time.strftime('%Y%m%d') + '\n'
produced_lines = open(temp_output_path).readlines()
diff = difflib.unified_diff(produced_lines, expected_lines, fromfile=temp_output_path, tofile=expected_result)
result = ''.join(diff)
if result != '':
for line in result:
sys.stdout.write(line)
self.assertFalse(result)
os.remove(temp_output_path)
if __name__ == "__main__":
main()
| 46.685185 | 118 | 0.668782 | 326 | 2,521 | 4.868098 | 0.233129 | 0.045369 | 0.055451 | 0.070573 | 0.860744 | 0.860744 | 0.860744 | 0.860744 | 0.860744 | 0.812854 | 0 | 0.006036 | 0.211424 | 2,521 | 53 | 119 | 47.566038 | 0.792254 | 0.01785 | 0 | 0.625 | 0 | 0 | 0.069956 | 0 | 0 | 0 | 0 | 0 | 0.041667 | 1 | 0.041667 | false | 0 | 0.145833 | 0 | 0.208333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
c93a9700c41f170ef69a61360ab983bab546bb09 | 146 | py | Python | torchfactor/factorization/__init__.py | Aaron09/torchfactor | 66782a183c583e3056e2c40d8d95568f4abb9537 | [
"MIT"
] | 5 | 2020-05-06T23:53:25.000Z | 2021-09-15T01:54:13.000Z | torchfactor/factorization/__init__.py | Aaron09/torchfactor | 66782a183c583e3056e2c40d8d95568f4abb9537 | [
"MIT"
] | null | null | null | torchfactor/factorization/__init__.py | Aaron09/torchfactor | 66782a183c583e3056e2c40d8d95568f4abb9537 | [
"MIT"
] | 1 | 2021-01-09T02:12:03.000Z | 2021-01-09T02:12:03.000Z | from .auto_tuckernet import AutoEncoderTucker
from .direct_tuckernet import DirectTuckerNet
from .nmfnet import NMFNet
from .svdnet import SVDNet
| 29.2 | 45 | 0.863014 | 18 | 146 | 6.888889 | 0.5 | 0.241935 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.109589 | 146 | 4 | 46 | 36.5 | 0.953846 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
a342529f3e84bce2f9dbded7d4a92d851540c4e2 | 38 | py | Python | selenium_browser/models/__init__.py | kkristof200/selenium_browser | b8144fe935073367911e90b50f078bfa985d6c0f | [
"MIT"
] | 1 | 2021-06-25T06:55:43.000Z | 2021-06-25T06:55:43.000Z | selenium_browser/models/__init__.py | kkristof200/selenium_browser | b8144fe935073367911e90b50f078bfa985d6c0f | [
"MIT"
] | null | null | null | selenium_browser/models/__init__.py | kkristof200/selenium_browser | b8144fe935073367911e90b50f078bfa985d6c0f | [
"MIT"
] | null | null | null | from .capabilities import Capabilities | 38 | 38 | 0.894737 | 4 | 38 | 8.5 | 0.75 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.078947 | 38 | 1 | 38 | 38 | 0.971429 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
a34d58786867734555ba8855354b61ac31766012 | 36,453 | py | Python | src/main/python/smart/makeplots_simplified.py | cday97/beam | 7e1ab50eecaefafd04daab360f8b12bc7cab559b | [
"BSD-3-Clause-LBNL"
] | 123 | 2017-04-06T20:17:19.000Z | 2022-03-02T13:42:15.000Z | src/main/python/smart/makeplots_simplified.py | cday97/beam | 7e1ab50eecaefafd04daab360f8b12bc7cab559b | [
"BSD-3-Clause-LBNL"
] | 2,676 | 2017-04-26T20:27:27.000Z | 2022-03-31T16:39:53.000Z | src/main/python/smart/makeplots_simplified.py | cday97/beam | 7e1ab50eecaefafd04daab360f8b12bc7cab559b | [
"BSD-3-Clause-LBNL"
] | 60 | 2017-04-06T20:14:32.000Z | 2022-03-30T20:10:53.000Z | import pandas as pd
import matplotlib
import sys
import matplotlib.pyplot as plt
import numpy as np
import matplotlib.patches as mpatches
import matplotlib.lines as mlines
plt.style.use('seaborn-colorblind')
# plt.style.use('ggplot')
plt.rcParams['axes.edgecolor'] = 'black'
plt.rcParams['axes.facecolor'] = 'white'
plt.rcParams['savefig.facecolor'] = 'white'
plt.rcParams['savefig.edgecolor'] = 'black'
colors = {'blue': '#377eb8', 'green': '#227222', 'orange': '#C66200', 'purple': '#470467', 'red': '#B30C0C',
'yellow': '#C6A600', 'light.green': '#C0E0C0', 'magenta': '#D0339D', 'dark.blue': '#23128F',
'brown': '#542D06', 'grey': '#8A8A8A', 'dark.grey': '#2D2D2D', 'light.yellow': '#FFE664',
'light.purple': '#9C50C0', 'light.orange': '#FFB164', 'black': '#000000'}
mode_colors = {'Ride Hail': colors['red'], 'Car': colors['grey'], 'Walk': colors['green'], 'Transit': colors['blue'],
'Ride Hail - Transit': colors['light.purple'], 'Ride Hail - Pooled': colors['purple'],
'CAV': colors['light.yellow'], 'Bike': colors['light.orange']}
expansion_factor = 8000000/630000
#_metrics_file = "/Users/haitam/workspace/pyscripts/data/smart/pilates4thSep2019/2010.metrics-final.csv"
#_output_folder = "/Users/haitam/workspace/pyscripts/data/smart/pilates4thSep2019/makeplots/2010"
_metrics_file = "/Users/haitam/workspace/pyscripts/data/smart/15thSep2019/2010.metrics-final.csv"
_output_folder = "/Users/haitam/workspace/pyscripts/data/smart/15thSep2019/makeplots/2010"
if len(sys.argv) > 1:
_metrics_file = sys.argv[1]
_output_folder = "{}/makeplots/{}".format(sys.argv[2].rsplit("/", 1)[0], sys.argv[2].rsplit("/", 1)[1])
df = pd.read_csv(_metrics_file).fillna(0)
_range = range(11)
_xpos = [1, 2.5, 3.5, 5, 6, 7.5, 8.5, 10, 11, 13, 14]
_names = [tech.rsplit(" ", 1)[0].split(" ")[-1] for tech in list(df['Technology'])]
_sc_names = ['Base', 'Mid-term', 'Long-term', 'Sharing is Caring', 'Technology Takeover', "All About Me"]
_sc_names_xpos = [1, 3, 5.5, 8, 10.5, 13.5]
_population = list(df['population'])
_rotation = 15
_standard_figsize = (6, 4.5)
# %%
plt.figure(figsize=_standard_figsize)
height_Transit = df['drive_transit_counts'].values * expansion_factor / 1000000 + \
df['ride_hail_transit_counts'].values * expansion_factor / 1000000 + \
df['walk_transit_counts'].values * expansion_factor / 1000000
height_Car = df['car_counts'].values * expansion_factor / 1000000
height_Cav = df['cav_counts'].values * expansion_factor / 1000000
height_RideHail = df['ride_hail_counts'].values * expansion_factor / 1000000 + df['ride_hail_pooled_counts'].values * expansion_factor / 1000000
height_RideHailPooled = df['ride_hail_pooled_counts'].values * expansion_factor / 1000000
height_nonMotorized = df['walk_counts'].values * expansion_factor / 1000000 + df['bike_counts'].values * expansion_factor / 1000000
height_all = height_nonMotorized + height_Car + height_Transit + height_RideHail + height_Cav
height_Transit /= height_all
height_Car /= height_all
height_Cav /= height_all
height_RideHail /= height_all
height_RideHailPooled /= height_all
height_nonMotorized /= height_all
plt_car = plt.bar(x=_xpos, height=height_Car)
plt_cav = plt.bar(x=_xpos, height=height_Cav, bottom=height_Car)
plt_transit = plt.bar(x=_xpos, height=height_Transit, bottom=height_Car + height_Cav)
plt_rh = plt.bar(x=_xpos, height=height_RideHail, bottom=height_Transit + height_Car + height_Cav)
plt_rhp = plt.bar(x=_xpos, height=height_RideHailPooled, bottom=height_Transit + height_Car + height_Cav, hatch='xx', fill=False)
plt_nm = plt.bar(x=_xpos, height=height_nonMotorized, bottom=height_Car + height_Transit + height_RideHail + height_Cav)
pooled = mpatches.Patch(facecolor='white', label='The white data', hatch='xx')
plt.xticks(_sc_names_xpos, _sc_names, rotation=_rotation)
plt.legend((plt_car, plt_cav, plt_transit, plt_rh, pooled, plt_nm), ('Car', 'CAV', 'Transit', 'Ridehail', 'Ridehail Pool', 'NonMotorized'),
labelspacing=-2.5, bbox_to_anchor=(1.05, 0.5), frameon=False)
ax = plt.gca()
ax.grid(False)
for ind in _range:
plt.text(_xpos[ind], 1.02, _names[ind], ha='center')
ax.set_ylim((0, 1.0))
plt.ylabel('Portion of Trips')
plt.savefig('{}.modesplit.png'.format(_output_folder), transparent=True, bbox_inches='tight', dpi=200, facecolor='white')
plt.clf()
# %%
plt.figure(figsize=_standard_figsize)
height_Transit = df['VMT_cable_car'].values * expansion_factor / 1000000
height_Transit += df['VMT_bus'].values * expansion_factor / 1000000
height_Transit += df['VMT_ferry'].values * expansion_factor / 1000000
height_Transit += df['VMT_rail'].values * expansion_factor / 1000000
height_Transit += df['VMT_subway'].values * expansion_factor / 1000000
height_Transit += df['VMT_tram'].values * expansion_factor / 1000000
height_Car = df['VMT_car'].values * expansion_factor / 1000000
height_Cav = df['VMT_car_CAV'].values * expansion_factor / 1000000
height_CavEmpty = df['VMT_car_CAV_empty'].values * expansion_factor / 1000000
height_CavShared = df['VMT_car_CAV_shared'].values * expansion_factor / 1000000
height_RideHail = df['VMT_car_RH'].values * expansion_factor / 1000000
height_RideHail += df['VMT_car_RH_CAV'].values * expansion_factor / 1000000
height_RideHailPooled = df['VMT_car_RH_pooled'].values * expansion_factor / 1000000
height_RideHailPooled += df['VMT_car_RH_CAV_pooled'].values * expansion_factor / 1000000
height_RideHailEmpty = df['VMT_car_RH_empty'].values * expansion_factor / 1000000
height_RideHailEmpty += df['VMT_car_RH_CAV_empty'].values * expansion_factor / 1000000
height_nonMotorized = df['VMT_bike'].values * expansion_factor / 1000000
height_all = height_nonMotorized + height_Car + height_Transit + height_RideHail + height_Cav
plt_car = plt.bar(x=_xpos, height=height_Car)
plt_cav = plt.bar(x=_xpos, height=height_Cav, bottom=height_Car)
plt_transit = plt.bar(x=_xpos, height=height_Transit, bottom=height_Car + height_Cav)
plt_rh = plt.bar(x=_xpos, height=height_RideHail, bottom=height_Transit + height_Car + height_Cav)
plt_nm = plt.bar(x=_xpos, height=height_nonMotorized, bottom=height_Car + height_Transit + height_RideHail + height_Cav)
plt_cav_empty = plt.bar(x=_xpos, height=-height_CavEmpty, bottom=height_Car+height_Cav, hatch='///', fill=False, linewidth=0)
plt_rh_empty = plt.bar(x=_xpos, height=-height_RideHailEmpty, bottom=height_Transit + height_Car + height_Cav + height_RideHail, hatch='///', fill=False, linewidth=0)
plt_rh_pooled = plt.bar(x=_xpos, height=height_RideHailPooled, bottom=height_Transit + height_Car + height_Cav, hatch="xx", fill=False, linewidth=0)
plt.xticks(_sc_names_xpos, _sc_names, rotation=_rotation)
ax = plt.gca()
empty = mpatches.Patch(facecolor='white', label='The white data', hatch='///')
shared = mpatches.Patch(facecolor='white', label='The white data', hatch='xx')
ax.grid(False)
plt.legend((plt_car, plt_cav, plt_transit, plt_rh, plt_nm, empty, shared),
('Car', 'CAV', 'Transit', 'Ridehail', 'NonMotorized', 'Empty', 'Shared'), labelspacing=-2.5,
bbox_to_anchor=(1.05, 0.5), frameon=False)
for ind in _range:
plt.text(_xpos[ind], height_all[ind] + 1.5, _names[ind], ha='center')
plt.ylabel('Vehicle Miles Traveled (millions)')
plt.savefig('{}.vmt_mode.png'.format(_output_folder), transparent=True, bbox_inches='tight', dpi=200, facecolor='white')
plt.clf()
# %%
plt.figure(figsize=_standard_figsize)
height_Transit = df['PMT_bus'].values * expansion_factor / 1000000
height_Transit += df['PMT_ferry'].values * expansion_factor / 1000000
height_Transit += df['PMT_rail'].values * expansion_factor / 1000000
height_Transit += df['PMT_subway'].values * expansion_factor / 1000000
height_Transit += df['PMT_tram'].values * expansion_factor / 1000000
height_Car = df['PMT_car'].values * expansion_factor / 1000000
height_Cav = df['PMT_car_CAV'].values * expansion_factor / 1000000
height_RideHail = df['PMT_car_RH'].values * expansion_factor / 1000000
height_RideHail += df['PMT_car_RH_CAV'].values * expansion_factor / 1000000
height_nonMotorized = df['PMT_walk'].values * expansion_factor / 1000000
height_nonMotorized += df['PMT_bike'].values * expansion_factor / 1000000
height_all = height_Car + height_Cav + height_RideHail
plt_car = plt.bar(x=_xpos, height=height_Car, color=mode_colors['Car'])
plt_cav = plt.bar(x=_xpos, height=height_Cav, bottom=height_Car, color=mode_colors['CAV'])
plt_rh = plt.bar(x=_xpos, height=height_RideHail, bottom=height_Car + height_Cav, color=mode_colors['Ride Hail'])
plt.xticks(_sc_names_xpos, _sc_names, rotation=_rotation)
plt.legend((plt_car, plt_cav, plt_rh), ('Car', 'CAV', 'Ridehail'), labelspacing=-2.5, bbox_to_anchor=(1.05, 0.5), frameon=False)
ax = plt.gca()
ax.grid(False)
for ind in _range:
plt.text(_xpos[ind], height_all[ind] + 2, _names[ind], ha='center')
plt.ylabel('LDV Person Miles Traveled (millions)')
plt.savefig('{}.pmt_mode.png'.format(_output_folder), transparent=True, bbox_inches='tight', dpi=200, facecolor='white')
plt.clf()
# %%
plt.figure(figsize=_standard_figsize)
height_Transit = df['PMT_bus'].values * expansion_factor / 1000000
height_Transit += df['PMT_ferry'].values * expansion_factor / 1000000
height_Transit += df['PMT_rail'].values * expansion_factor / 1000000
height_Transit += df['PMT_subway'].values * expansion_factor / 1000000
height_Transit += df['PMT_tram'].values * expansion_factor / 1000000
height_Car = df['PMT_car'].values * expansion_factor / 1000000
height_Cav = df['PMT_car_CAV'].values * expansion_factor / 1000000
height_RideHail = df['PMT_car_RH'].values * expansion_factor / 1000000
height_RideHail += df['PMT_car_RH_CAV'].values * expansion_factor / 1000000
height_nonMotorized = df['PMT_walk'].values * expansion_factor / 1000000
height_nonMotorized += df['PMT_bike'].values * expansion_factor / 1000000
height_all = height_Car + height_Cav + height_RideHail + height_Transit + height_nonMotorized
plt_car = plt.bar(x=_xpos, height=height_Car, color=mode_colors['Car'])
plt_cav = plt.bar(x=_xpos, height=height_Cav, bottom=height_Car, color=mode_colors['CAV'])
plt_rh = plt.bar(x=_xpos, height=height_RideHail, bottom= height_Car + height_Cav, color=mode_colors['Ride Hail'])
plt_transit = plt.bar(x=_xpos, height=height_Transit, bottom=height_Car + height_Cav + height_RideHail, color=mode_colors['Transit'])
plt_nm = plt.bar(x=_xpos, height=height_nonMotorized, bottom=height_Car + height_Transit + height_RideHail + height_Cav, color=mode_colors['Bike'])
plt.xticks(_sc_names_xpos, _sc_names, rotation=_rotation)
plt.legend((plt_car, plt_cav, plt_rh, plt_transit, plt_nm), ('Car', 'CAV', 'Ridehail','Transit','NonMotorized'), labelspacing=-2.5, bbox_to_anchor=(1.05, 0.5), frameon=False)
ax = plt.gca()
ax.grid(False)
for ind in _range:
plt.text(_xpos[ind], height_all[ind] + 2, _names[ind], ha='center')
plt.ylabel('Person Miles Traveled (millions)')
plt.savefig('{}.pmt_mode_2.png'.format(_output_folder), transparent=True, bbox_inches='tight', dpi=200, facecolor='white')
plt.clf()
# %%
plt.figure(figsize=_standard_figsize)
height_Gas = df['totalEnergy_Gasoline'].values * expansion_factor / 1000000000000
height_Diesel = df['totalEnergy_Diesel'].values * expansion_factor / 1000000000000
height_Electricity = df['totalEnergy_Electricity'].values * expansion_factor / 1000000000000
height_all = height_Gas + height_Electricity + height_Diesel
plt_g = plt.bar(x=_xpos, height=height_Gas)
plt_d = plt.bar(x=_xpos, height=height_Diesel, bottom=height_Gas)
plt_e = plt.bar(x=_xpos, height=height_Electricity, bottom=height_Diesel + height_Gas)
plt.xticks(_sc_names_xpos, _sc_names, rotation=_rotation)
plt.legend((plt_g, plt_d, plt_e), ('Gasoline', 'Diesel', 'Electricity'), labelspacing=-2.5, bbox_to_anchor=(1.05, 0.5),
frameon=False)
ax = plt.gca()
ax.grid(False)
for ind in _range:
plt.text(_xpos[ind], height_all[ind] + 5, _names[ind], ha='center')
# ax.set_ylim((0,400))
plt.ylabel('Light duty vehicle energy use (TJ)')
plt.savefig('{}.energy_source.png'.format(_output_folder), transparent=True, bbox_inches='tight', dpi=200, facecolor='white')
plt.clf()
# %%
plt.figure(figsize=_standard_figsize)
height_Gas = df['totalEnergy_Gasoline'].values/1000000
height_Diesel = df['totalEnergy_Diesel'].values/1000000
height_Electricity = df['totalEnergy_Electricity'].values/1000000
height_all = height_Gas + height_Electricity + height_Diesel
energy_intensity = df['motorizedVehicleMilesTraveled_total']/height_all
plt_g = plt.bar(x=_xpos, height=energy_intensity)
plt.xticks(_sc_names_xpos, _sc_names, rotation=_rotation)
ax = plt.gca()
ax.grid(False)
for ind in _range:
plt.text(_xpos[ind], energy_intensity[ind] + 0.005, _names[ind], ha='center')
# ax.set_ylim((0,400))
plt.ylabel('Energy productivity (mi/MJ)')
plt.savefig('{}.energy_intensity.png'.format(_output_folder), transparent=True, bbox_inches='tight', dpi=200, facecolor='white')
plt.clf()
# %%
plt.figure(figsize=_standard_figsize)
height_Low = df['VMT_L1'].values * expansion_factor / 1000000
height_High = df['VMT_L3'].values * expansion_factor / 1000000
height_CAV = df['VMT_L5'].values * expansion_factor / 1000000
height_RH_Empty = df['VMT_car_RH_empty'].values * expansion_factor / 1000000
height_PV_Empty = df['VMT_car_CAV_empty'].values * expansion_factor / 1000000
height_All = height_Low + height_High + height_CAV
plt_Low = plt.bar(x=_xpos, height=height_Low)
plt_High = plt.bar(x=_xpos, height=height_High, bottom=height_Low)
plt_CAV = plt.bar(x=_xpos, height=height_CAV, bottom=height_High + height_Low)
plt_empty_car = plt.bar(x=_xpos, height=height_RH_Empty, hatch='///', fill=False)
plt_empty_cav = plt.bar(x=_xpos, height=height_PV_Empty, bottom=height_High + height_Low, hatch='///', fill=False)
plt.xticks(_sc_names_xpos, _sc_names, rotation=_rotation)
ax = plt.gca()
empty = mpatches.Patch(facecolor='white', label='The white data', hatch='///')
plt.legend((plt_Low, plt_High, plt_CAV, empty), ('No Automation', 'Partial Automation', 'CAV', 'No Passengers'), labelspacing=-2.5, bbox_to_anchor=(1.05, 0.5), frameon=False)
plt.grid(b=None)
ax.grid(False)
for ind in _range:
plt.text(_xpos[ind], height_All[ind] + 2, _names[ind], ha='center')
plt.ylabel('Light duty vehicle miles traveled (millions)')
plt.savefig('{}.vmt_tech.png'.format(_output_folder), transparent=True, bbox_inches='tight', dpi=200, facecolor='white')
plt.clf()
# %%
plt.figure(figsize=_standard_figsize)
height_Low = df['VMT_L1'].values / _population
height_High = df['VMT_L3'].values / _population
height_CAV = df['VMT_L5'].values / _population
height_all = height_Low + height_High + height_CAV
plt_Low = plt.bar(x=_xpos, height=height_Low, color=colors['blue'])
plt_High = plt.bar(x=_xpos, height=height_High, bottom=height_Low, color=colors['green'])
plt_CAV = plt.bar(x=_xpos, height=height_CAV, bottom=height_Low + height_High, color=colors['red'])
plt.xticks(_sc_names_xpos, _sc_names, rotation=_rotation)
ax = plt.gca()
for ind in _range:
plt.text(_xpos[ind], height_all[ind] + 0.3, _names[ind], ha='center')
plt.ylabel('Light Duty Vehicle Miles per Capita')
plt.legend((plt_CAV, plt_High, plt_Low), ('Full Automation', 'Partial Automation', 'No Automation'), bbox_to_anchor=(1.05, 0.5), frameon=False)
plt.savefig('{}.vmt_percapita_tech.png'.format(_output_folder), transparent=True, bbox_inches='tight', dpi=200, facecolor='white')
plt.clf()
# %%
plt.figure(figsize=(6, 5.5))
height_RH = (df['VMT_car_RH'].values+df['VMT_car_RH_CAV'].values) * expansion_factor / 1000000
height_RH_Empty = df['VMT_car_RH_empty'].values * expansion_factor / 1000000
height_RH_Pooled = df['VMT_car_RH_pooled'].values * expansion_factor / 1000000
height_PV = (df['VMT_car'].values+df['VMT_car_CAV'].values) * expansion_factor / 1000000
height_PV_Empty = df['VMT_car_CAV_empty'].values * expansion_factor / 1000000
height_all = height_RH + height_PV
plt_rh = plt.bar(x=_xpos, height=height_RH, color=mode_colors['Ride Hail'])
rh_empty = plt.bar(x=_xpos, height=-height_RH_Empty, bottom=height_RH, hatch='///', fill=False)
rh_pooled = plt.bar(x=_xpos, height=height_RH_Pooled, hatch='xxx', fill=False)
plt_pv = plt.bar(x=_xpos, height=height_PV, bottom=height_RH, color=mode_colors['Car'])
pv_empty = plt.bar(x=_xpos, height=-height_PV_Empty, bottom=height_RH+height_PV, hatch='///', fill=False)
plt.xticks(_sc_names_xpos, _sc_names, rotation=_rotation)
ax = plt.gca()
empty = mpatches.Patch(facecolor='white', label='The white data', hatch='///')
pooled = mpatches.Patch(facecolor='white', label='The white data', hatch='xxx')
for ind in _range:
plt.text(_xpos[ind], max(height_all) + 1, _names[ind], ha='center')
plt.ylabel('Light Duty Vehicle Miles Traveled (millions)')
plt.legend((plt_pv, plt_rh, empty, pooled), ('Personal Vehicle', 'Ridehail', 'Empty', 'Shared'), bbox_to_anchor=(1.05, 0.5), frameon=False)
plt.savefig('{}.vmt_rh_empty.png'.format(_output_folder), transparent=True, bbox_inches='tight', dpi=200, facecolor='white')
plt.clf()
# %%
plt.figure(figsize=_standard_figsize)
height_RideHail = df['VMT_car_RH'].values * expansion_factor / 1000000
height_RideHail += df['VMT_car_RH_CAV'].values * expansion_factor / 1000000
height_RideHailEmpty = df['VMT_car_RH_empty'].values * expansion_factor / 1000000
height_RideHailEmpty += df['VMT_car_RH_CAV_empty'].values * expansion_factor / 1000000
height_RideHailPooled = df['VMT_car_RH_pooled'].values * expansion_factor / 1000000
height_RideHailPooled += df['VMT_car_RH_CAV_pooled'].values * expansion_factor / 1000000
plt_rh = plt.bar(x=_xpos, height=height_RideHail, color=mode_colors['Ride Hail'])
rh_empty = plt.bar(x=_xpos, height=-height_RideHailEmpty, bottom=height_RH, hatch='///', fill=False)
rh_pooled = plt.bar(x=_xpos, height=height_RideHailPooled, hatch='xxx', fill=False)
plt.xticks(_sc_names_xpos, _sc_names, rotation=_rotation)
ax = plt.gca()
empty = mpatches.Patch(facecolor='white', label='The white data', hatch='///')
pooled = mpatches.Patch(facecolor='white', label='The white data', hatch='xxx')
for ind in _range:
plt.text(_xpos[ind], height_RH[ind] + 2, _names[ind], ha='center')
plt.ylabel('Light Duty Vehicle Miles Traveled (millions)')
plt.legend((plt_rh, empty, pooled), ('Total Ridehail VMT', 'Empty VMT', 'Shared VMT'), bbox_to_anchor=(1.05, 0.5), frameon=False)
plt.savefig('{}.vmt_just_rh_empty.png'.format(_output_folder), transparent=True, bbox_inches='tight', dpi=200, facecolor='white')
plt.clf()
# %%
plt.figure(figsize=_standard_figsize)
height_CAV = df['VMT_car_CAV'].values * expansion_factor / 1000000
height_CAV_Empty = df['VMT_car_CAV_empty'].values * expansion_factor / 1000000
height_CAV_Shared = df['VMT_car_CAV_shared'].values * expansion_factor / 1000000
height_PV = df['VMT_car'].values * expansion_factor / 1000000
height_all = height_CAV + height_PV
plt_pv = plt.bar(x=_xpos, height=height_PV, color=mode_colors['Car'])
plt_cav = plt.bar(x=_xpos, height=height_CAV, bottom=height_PV, color=mode_colors['Ride Hail'])
plt_cav_empty = plt.bar(x=_xpos, height=-height_CAV_Empty, bottom=height_CAV+height_PV, hatch='///', fill=False)
plt_cav_shared = plt.bar(x=_xpos, height=height_CAV_Shared, bottom=height_PV, hatch='xxx', fill=False)
plt.xticks(_sc_names_xpos, _sc_names, rotation=_rotation)
ax = plt.gca()
empty = mpatches.Patch(facecolor='white', label='The white data', hatch='///')
pooled = mpatches.Patch(facecolor='white', label='The white data', hatch='xxx')
for ind in _range:
plt.text(_xpos[ind], max(height_all) + 2, _names[ind], ha='center')
plt.ylabel('Personal Vehicle Miles Traveled (millions)')
plt.legend((plt_cav, plt_pv, empty, pooled), ('CAV', 'Human Driven','Empty','Shared'), bbox_to_anchor=(1.05, 0.5), frameon=False)
plt.savefig('{}.vmt_just_cav_empty.png'.format(_output_folder), transparent=True, bbox_inches='tight', dpi=200, facecolor='white')
plt.clf()
# %%
plt.figure(figsize=_standard_figsize)
height_wait = df['averageOnDemandRideWaitTimeInMin'].values.copy()
plt_rh = plt.bar(x=_xpos, height=height_wait, color=mode_colors['Ride Hail'])
plt.xticks(_sc_names_xpos, _sc_names, rotation=_rotation)
ax = plt.gca()
for ind in _range:
plt.text(_xpos[ind], height_wait[ind] + 0.1, _names[ind], ha='center')
plt.ylabel('Average Ride Hail Wait (min)')
plt.savefig('{}.wait_time.png'.format(_output_folder), transparent=True, bbox_inches='tight', dpi=200, facecolor='white')
plt.clf()
# %%
plt.figure(figsize=_standard_figsize)
height_Gas = df['totalEnergy_Gasoline'].values * expansion_factor / 1000000000 / _population
height_Electricity = df['totalEnergy_Electricity'].values * expansion_factor / 1000000000 / _population
height_all = height_Gas + height_Electricity
plt_Gas = plt.bar(x=_xpos, height=height_Gas, color=colors['purple'])
plt_Electricity = plt.bar(x=_xpos, height=height_Electricity, bottom=height_Gas, color=colors['yellow'])
plt.xticks(_sc_names_xpos, _sc_names, rotation=_rotation)
ax = plt.gca()
for ind in _range:
plt.text(_xpos[ind], height_all[ind] + 0.02, _names[ind], ha='center')
plt.ylabel('Light Duty Vehicle Energy per Capita (GJ)')
plt.legend((plt_Electricity, plt_Gas), ('Electricity', 'Gasoline'), bbox_to_anchor=(1.05, 0.5), frameon=False)
plt.savefig('{}.energy_fuelsource_percapita.png'.format(_output_folder), transparent=True, bbox_inches='tight', dpi=200, facecolor='white')
plt.clf()
#%%
plt.figure(figsize=_standard_figsize)
height_Transit = df['personTravelTime_drive_transit'].values * expansion_factor / 1000000 / 60 + \
df['personTravelTime_onDemandRide_transit'].values * expansion_factor / 1000000 / 60 + \
df['personTravelTime_walk_transit'].values * expansion_factor / 1000000 / 60
height_Car = df['personTravelTime_car'].values * expansion_factor / 1000000 / 60
height_Cav = df['personTravelTime_cav'].values * expansion_factor / 1000000 / 60
height_RideHail = df['personTravelTime_onDemandRide'].values * expansion_factor / 1000000 / 60
height_RideHailPooled = df['personTravelTime_onDemandRide_pooled'].values * expansion_factor / 1000000 / 60
height_nonMotorized = df['personTravelTime_walk'].values * expansion_factor / 1000000 / 60 + \
df['personTravelTime_bike'].values * expansion_factor / 1000000 / 60
height_all = height_nonMotorized + height_Car + height_Transit + height_RideHail + height_RideHailPooled + height_Cav
plt_car = plt.bar(x=_xpos, height=height_Car, color=mode_colors['Car'])
plt_cav = plt.bar(x=_xpos, height=height_Cav, bottom=height_Car, color=mode_colors['CAV'])
plt_transit = plt.bar(x=_xpos, height=height_Transit, bottom=height_Car + height_Cav, color=mode_colors['Transit'])
plt_rh = plt.bar(x=_xpos, height=height_RideHail, bottom=height_Transit + height_Car + height_Cav, color=mode_colors['Ride Hail'])
plt_rhp = plt.bar(x=_xpos, height=height_RideHailPooled, bottom=height_RideHail + height_Car + height_Transit + height_Cav, color=mode_colors['Ride Hail - Transit'])
plt_nm = plt.bar(x=_xpos, height=height_nonMotorized, bottom=height_Car + height_Transit + height_RideHail + height_RideHailPooled + height_Cav, color=mode_colors['Bike'])
plt.xticks(_sc_names_xpos, _sc_names, rotation=_rotation)
plt.legend((plt_car, plt_cav, plt_transit, plt_rh, plt_rhp, plt_nm), ('Car', 'CAV', 'Transit', 'Ridehail', 'Ridehail (Pooled)', 'NonMotorized'), labelspacing=-2.5, bbox_to_anchor=(1.05, 0.5), frameon=False)
ax = plt.gca()
ax.grid(False)
for ind in _range:
plt.text(_xpos[ind], height_all[ind] + 0.05, _names[ind], ha='center')
plt.ylabel('Person Hours Traveled (millions)')
plt.savefig('{}.pht.png'.format(_output_folder), transparent=True, bbox_inches='tight', dpi=200, facecolor='white')
plt.clf()
# %%
plt.figure(figsize=_standard_figsize)
height_Car = df['personTravelTime_car'].values * expansion_factor / 1000000 / 60
height_Cav = df['personTravelTime_cav'].values * expansion_factor / 1000000 / 60
height_RideHail = df['personTravelTime_onDemandRide'].values * expansion_factor / 1000000 / 60
height_RideHailPooled = df['personTravelTime_onDemandRide_pooled'].values * expansion_factor / 1000000 / 60
height_all = height_Car + height_RideHail + height_RideHailPooled + height_Cav
plt_car = plt.bar(x=_xpos, height=height_Car, color=mode_colors['Car'])
plt_cav = plt.bar(x=_xpos, height=height_Cav, bottom=height_Car, color=mode_colors['CAV'])
plt_rh = plt.bar(x=_xpos, height=height_RideHail, bottom=height_Car + height_Cav, color=mode_colors['Ride Hail'])
plt_rhp = plt.bar(x=_xpos, height=height_RideHailPooled, bottom=height_RideHail + height_Car + height_Cav, color=mode_colors['Ride Hail - Transit'])
plt.xticks(_sc_names_xpos, _sc_names, rotation=_rotation)
plt.legend((plt_car, plt_cav, plt_rh, plt_rhp), ('Car', 'CAV', 'Ridehail', 'Ridehail (Pooled)'), labelspacing=-2.5, bbox_to_anchor=(1.05, 0.5), frameon=False)
ax = plt.gca()
ax.grid(False)
for ind in _range:
plt.text(_xpos[ind], height_all[ind] + 0.02, _names[ind], ha='center')
plt.ylabel('Person Hours Traveled (millions)')
plt.savefig('{}.pht_ldv.png'.format(_output_folder), transparent=True, bbox_inches='tight', dpi=200, facecolor='white')
plt.clf()
# %%
plt.figure(figsize=_standard_figsize)
height_Car = df['personTravelTime_car'].values / _population / 60
height_Cav = df['personTravelTime_cav'].values / _population / 60
height_RideHail = df['personTravelTime_onDemandRide'].values / _population / 60
height_RideHailPooled = df['personTravelTime_onDemandRide_pooled'].values / _population / 60
height_all = height_Car + height_RideHail + height_RideHailPooled + height_Cav
plt_car = plt.bar(x=_xpos, height=height_Car, color=mode_colors['Car'])
plt_cav = plt.bar(x=_xpos, height=height_Cav, bottom=height_Car, color=mode_colors['CAV'])
plt_rh = plt.bar(x=_xpos, height=height_RideHail, bottom=height_Car + height_Cav, color=mode_colors['Ride Hail'])
plt_rhp = plt.bar(x=_xpos, height=height_RideHailPooled, bottom=height_RideHail + height_Car + height_Cav,
color=mode_colors['Ride Hail - Transit'])
plt.xticks(_sc_names_xpos, _sc_names, rotation=_rotation)
plt.legend((plt_car, plt_cav, plt_rh, plt_rhp), ('Car', 'CAV', 'Ridehail', 'Ridehail (Pooled)'), labelspacing=-2.5, bbox_to_anchor=(1.05, 0.5), frameon=False)
ax = plt.gca()
ax.grid(False)
for ind in _range:
plt.text(_xpos[ind], height_all[ind] + 0.01, _names[ind], ha='center')
plt.ylabel('LDV Person Hours Traveled (per capita)')
plt.savefig('{}.pht_percapita.png'.format(_output_folder), transparent=True, bbox_inches='tight', dpi=200, facecolor='white')
plt.clf()
# %%
plt.figure(figsize=_standard_figsize)
height_Car = df['personTravelTime_car'].values / _population / 60
height_Cav = df['personTravelTime_cav'].values / _population / 60
height_RideHail = df['personTravelTime_onDemandRide'].values / _population / 60
height_RideHailPooled = df['personTravelTime_onDemandRide_pooled'].values / _population / 60
height_all = height_Car + height_RideHail + height_RideHailPooled + height_Cav
plt_rhp = plt.bar(x=_xpos, height=height_RideHailPooled, color=mode_colors['Ride Hail - Transit'])
plt_rh = plt.bar(x=_xpos, height=height_RideHail, bottom= height_RideHailPooled, color=mode_colors['Ride Hail'])
plt_cav = plt.bar(x=_xpos, height=height_Cav, bottom=height_RideHailPooled + height_RideHail, color=mode_colors['CAV'])
plt_car = plt.bar(x=_xpos, height=height_Car, bottom=height_RideHailPooled + height_RideHail + height_Cav, color=mode_colors['Car'])
plt.xticks(_sc_names_xpos, _sc_names, rotation=_rotation)
plt.legend((plt_rhp, plt_rh, plt_cav, plt_car), ('Ride Hail (Pooled)', 'Ride Hail', 'CAV', 'Car'), labelspacing=-2.5, bbox_to_anchor=(1.05, 0.5), frameon=False)
ax = plt.gca()
ax.grid(False)
for ind in _range:
plt.text(_xpos[ind], height_all[ind] + 0.01, _names[ind], ha='center')
plt.ylabel('LDV Person Hours Traveled (per capita)')
plt.savefig('{}.pht_percapita_reorder.png'.format(_output_folder), transparent=True, bbox_inches='tight', dpi=200, facecolor='white')
plt.clf()
# %%
plt.figure(figsize=(8,6))
height_Transit = df['drive_transit_counts'].values * expansion_factor / 1000000 + \
df['ride_hail_transit_counts'].values * expansion_factor / 1000000 + \
df['walk_transit_counts'].values * expansion_factor / 1000000
height_Car = df['car_counts'].values * expansion_factor / 1000000
height_Cav = df['cav_counts'].values * expansion_factor / 1000000
height_RideHail = df['ride_hail_counts'].values * expansion_factor / 1000000
height_RideHailPooled = df['ride_hail_pooled_counts'].values * expansion_factor / 1000000
height_RideHailPooledMatch = df['multi_passengers_trips_per_pool_trips'].values * height_RideHailPooled
height_nonMotorized = df['walk_counts'].values * expansion_factor / 1000000 + \
df['bike_counts'].values * expansion_factor / 1000000
height_all = height_nonMotorized + height_Car + height_Transit + height_RideHail + height_RideHailPooled + height_Cav
height_Transit /= height_all
height_Car /= height_all
height_Cav /= height_all
height_RideHail /= height_all
height_RideHailPooled /= height_all
height_nonMotorized /= height_all
height_RideHailPooledMatch /= height_all
plt_transit = plt.bar(x=_xpos, height=height_Transit, color=mode_colors['Transit'])
plt_rhp = plt.bar(x=_xpos, height=height_RideHailPooled, bottom=height_Transit, color=mode_colors['Ride Hail - Pooled'])
plt_rh = plt.bar(x=_xpos, height=height_RideHail, bottom=height_Transit + height_RideHailPooled,
color=mode_colors['Ride Hail'])
plt_cav = plt.bar(x=_xpos, height=height_Cav, bottom=height_Transit + height_RideHailPooled + height_RideHail,
color=mode_colors['CAV'])
plt_car = plt.bar(x=_xpos, height=height_Car,
bottom=height_Transit + height_RideHailPooled + height_RideHail + height_Cav,
color=mode_colors['Car'])
plt_nm = plt.bar(x=_xpos, height=height_nonMotorized,
bottom=height_RideHailPooled + height_Car + height_Transit + height_RideHail + height_Cav,
color=mode_colors['Bike'])
plt_rhp_m = plt.bar(x=_xpos,height=-height_RideHailPooledMatch, bottom=height_Transit + height_RideHailPooled ,hatch='///',fill=False)
matched = mpatches.Patch(facecolor='white', label='The white data', hatch='///')
plt.xticks(_sc_names_xpos, _sc_names, rotation=_rotation)
plt.legend((plt_transit, matched, plt_rhp, plt_rh, plt_cav, plt_car, plt_nm),
('Transit', 'Ride Hail (Matched)', 'Ride Hail (Pooled Requested)', 'Ride Hail', 'CAV', 'Car', 'Non-motorized'), labelspacing=-2.5,
bbox_to_anchor=(1.05, 0.5), frameon=False)
ax = plt.gca()
ax.grid(False)
for ind in _range:
plt.text(_xpos[ind], 1.02, _names[ind], ha='center')
ax.set_ylim((0, 1.0))
plt.ylabel('Portion of Trips')
plt.savefig('{}.modesplit_reorder.png'.format(_output_folder), transparent=True, bbox_inches='tight', dpi=200, facecolor='white')
plt.clf()
# %%
plt.figure(figsize=_standard_figsize)
height_Transit = df['PMT_bus'].values / _population
height_Transit += df['PMT_ferry'].values / _population
height_Transit += df['PMT_rail'].values / _population
height_Transit += df['PMT_subway'].values / _population
height_Transit += df['PMT_tram'].values / _population
height_Car = df['PMT_car'].values / _population
height_Cav = df['PMT_car_CAV'].values / _population
height_RideHail = df['PMT_car_RH'].values / _population
height_RideHail += df['PMT_car_RH_CAV'].values / _population
height_nonMotorized = df['PMT_walk'].values / _population
height_nonMotorized += df['PMT_bike'].values / _population
height_all = height_Car + height_RideHail + height_Cav
plt_car = plt.bar(x=_xpos, height=height_Car, color=mode_colors['Car'])
plt_cav = plt.bar(x=_xpos, height=height_Cav, bottom=height_Car, color=mode_colors['CAV'])
plt_rh = plt.bar(x=_xpos, height=height_RideHail, bottom=height_Car + height_Cav, color=mode_colors['Ride Hail'])
plt.xticks(_sc_names_xpos, _sc_names, rotation=_rotation)
plt.legend((plt_car, plt_cav, plt_rh), ('Car', 'CAV', 'Ridehail'), labelspacing=-2.5, bbox_to_anchor=(1.05, 0.5), frameon=False)
ax = plt.gca()
ax.grid(False)
for ind in _range:
plt.text(_xpos[ind], height_all[ind] + 0.5, _names[ind], ha='center')
plt.ylabel('LDV Person Miles Traveled per Capita')
plt.savefig('{}.pmt_percapita_mode.png'.format(_output_folder), transparent=True, bbox_inches='tight', dpi=200, facecolor='white')
plt.clf()
# %%
plt.figure(figsize=_standard_figsize)
totalEnergy = df['totalEnergy_Gasoline'] / 1000000 + df['totalEnergy_Electricity'] / 1000000
totalPMT = df['PMT_car'].values + df['PMT_car_CAV'].values + df['PMT_car_RH'].values + df['PMT_car_RH_CAV'].values
height = totalEnergy / totalPMT
plt_e_pmt = plt.bar(x=_xpos, height=height, color=colors['grey'])
plt.xticks(_sc_names_xpos, _sc_names, rotation=_rotation)
ax = plt.gca()
ax.grid(False)
for ind in _range:
plt.text(_xpos[ind], height[ind] + 0.025, _names[ind], ha='center')
plt.ylabel('Energy per Light Duty Vehicle Passenger Mile (MJ/mi)')
plt.savefig('{}.energy_per_pmt.png'.format(_output_folder), transparent=True, bbox_inches='tight', dpi=200, facecolor='white')
plt.clf()
# %%
plt.figure(figsize=_standard_figsize)
totalEnergy = df['totalEnergy_Gasoline'] / 1000000 + df['totalEnergy_Electricity'] / 1000000
totalVMT = df['VMT_car'].values + df['VMT_car_CAV'].values + df['VMT_car_RH'].values + df['VMT_car_RH_CAV'].values
height = totalEnergy / totalVMT
plt_e_pmt = plt.bar(x=_xpos, height=height, color=colors['grey'])
plt.xticks(_sc_names_xpos, _sc_names, rotation=_rotation)
ax = plt.gca()
ax.grid(False)
for ind in _range:
plt.text(_xpos[ind], height[ind] + 0.025, _names[ind], ha='center')
plt.ylabel('Energy per Light Duty Vehicle Mile (MJ/mi)')
plt.savefig('{}.energy_per_vmt.png'.format(_output_folder), transparent=True, bbox_inches='tight', dpi=200, facecolor='white')
plt.clf()
# %%
plt.figure(figsize=_standard_figsize)
totalPMT = df['PMT_car'].values + df['PMT_car_CAV'].values + df['PMT_car_RH'].values + df['PMT_car_RH_CAV'].values
totalVMT = df['VMT_car'].values + df['VMT_car_CAV'].values + df['VMT_car_RH'].values + df['VMT_car_RH_CAV'].values
height = totalPMT / totalVMT
plt_e_pmt = plt.bar(x=_xpos, height=height, color=colors['grey'])
plt.xticks(_sc_names_xpos, _sc_names, rotation=_rotation)
ax = plt.gca()
ax.grid(False)
for ind in _range:
plt.text(_xpos[ind], height[ind] + 0.01, _names[ind], ha='center')
plt.ylabel('Mean Occupancy')
plt.savefig('{}.occupancy.png'.format(_output_folder), transparent=True, bbox_inches='tight', dpi=200, facecolor='white')
plt.clf()
# %%
plt.figure(figsize=_standard_figsize)
height_Transit = df['PMT_bus'].values / _population
height_Transit += df['PMT_ferry'].values / _population
height_Transit += df['PMT_rail'].values / _population
height_Transit += df['PMT_subway'].values / _population
height_Transit += df['PMT_tram'].values / _population
height_Car = df['PMT_car'].values / _population
height_Cav = df['PMT_car_CAV'].values / _population
height_RideHail = df['PMT_car_RH'].values / _population
height_RideHail += df['PMT_car_RH_CAV'].values / _population
height_nonMotorized = df['PMT_walk'].values / _population
height_nonMotorized += df['PMT_bike'].values / _population
height_all = height_nonMotorized + height_Car + height_Transit + height_RideHail + height_Cav
plt_transit = plt.bar(x=_xpos, height=height_Transit, color=mode_colors['Transit'])
plt_rh = plt.bar(x=_xpos, height=height_RideHail, bottom=height_Transit, color=mode_colors['Ride Hail'])
plt_cav = plt.bar(x=_xpos, height=height_Cav, bottom=height_Transit + height_RideHail, color=mode_colors['CAV'])
plt_car = plt.bar(x=_xpos, height=height_Car, bottom=height_Transit + height_RideHail + height_Cav, color=mode_colors['Car'])
plt_nm = plt.bar(x=_xpos, height=height_nonMotorized, bottom=height_Car + height_Transit + height_RideHail + height_Cav, color=mode_colors['Bike'])
plt.xticks(_sc_names_xpos, _sc_names, rotation=_rotation)
plt.legend((plt_transit, plt_rh, plt_cav, plt_car, plt_nm), ('Transit', 'Ride Hail', 'CAV', 'Car', 'NonMotorized'), labelspacing=-2.5, bbox_to_anchor=(1.05, 0.5), frameon=False)
ax = plt.gca()
ax.grid(False)
for ind in _range:
plt.text(_xpos[ind], height_all[ind] + 0.5, _names[ind], ha='center')
plt.ylabel('Person Miles Traveled per Capita')
plt.savefig('{}.pmt_percapita_mode_reorder.png'.format(_output_folder), transparent=True, bbox_inches='tight', dpi=200, facecolor='white')
plt.clf()
| 52.830435 | 206 | 0.748361 | 5,307 | 36,453 | 4.840211 | 0.052384 | 0.056643 | 0.078483 | 0.099194 | 0.899015 | 0.883638 | 0.865302 | 0.839296 | 0.797485 | 0.782341 | 0 | 0.037243 | 0.100623 | 36,453 | 689 | 207 | 52.907112 | 0.746256 | 0.009108 | 0 | 0.587361 | 0 | 0.001859 | 0.164552 | 0.033498 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.005576 | 0.013011 | 0 | 0.013011 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
a3532c5f592af123c595b5e40f3f11ac8f7dc70d | 33 | py | Python | src/logging/__init__.py | siddk/annotated-butd | 71043a885208aad4258b11375c6d1aa586956371 | [
"MIT"
] | 4 | 2020-07-13T01:21:51.000Z | 2022-02-21T07:04:52.000Z | src/logging/__init__.py | siddk/annotated-butd | 71043a885208aad4258b11375c6d1aa586956371 | [
"MIT"
] | null | null | null | src/logging/__init__.py | siddk/annotated-butd | 71043a885208aad4258b11375c6d1aa586956371 | [
"MIT"
] | 2 | 2020-08-01T18:36:42.000Z | 2022-02-21T09:30:12.000Z | from .logger import MetricLogger
| 16.5 | 32 | 0.848485 | 4 | 33 | 7 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.121212 | 33 | 1 | 33 | 33 | 0.965517 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
6ed25b4979be7addba6d6aea140fc021cf088d6e | 295 | py | Python | scan-reporting/conversion_testing.py | rsjones94/scan-reporting | a1206183082dabb57ceb5da3b8d21f8861665f7d | [
"MIT"
] | null | null | null | scan-reporting/conversion_testing.py | rsjones94/scan-reporting | a1206183082dabb57ceb5da3b8d21f8861665f7d | [
"MIT"
] | null | null | null | scan-reporting/conversion_testing.py | rsjones94/scan-reporting | a1206183082dabb57ceb5da3b8d21f8861665f7d | [
"MIT"
] | null | null | null | import helpers as hp
the_file = '/Users/manusdonahue/Desktop/Projects/gstudy_converter/testdata/1.3.46.670589.11.17029.5.0.8164.2016041311023902000-301-1-1h7ec6.dcm'
targ_folder = '/Users/manusdonahue/Desktop/Projects/gstudy_converter/conv_testing/'
hp.dicom_to_parrec(the_file, targ_folder)
| 36.875 | 144 | 0.823729 | 45 | 295 | 5.2 | 0.733333 | 0.059829 | 0.205128 | 0.273504 | 0.401709 | 0.401709 | 0 | 0 | 0 | 0 | 0 | 0.175 | 0.050847 | 295 | 7 | 145 | 42.142857 | 0.660714 | 0 | 0 | 0 | 0 | 0.25 | 0.673469 | 0.673469 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.25 | 0 | 0.25 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
6ed6ff6d2bb7899d04fd755a6e7d1652c05338e8 | 40 | py | Python | spark_core/__init__.py | JohnOmernik/edwin_org | a8443b1863ba854078b5d5fd0884b80f58cc48a9 | [
"Apache-2.0"
] | null | null | null | spark_core/__init__.py | JohnOmernik/edwin_org | a8443b1863ba854078b5d5fd0884b80f58cc48a9 | [
"Apache-2.0"
] | null | null | null | spark_core/__init__.py | JohnOmernik/edwin_org | a8443b1863ba854078b5d5fd0884b80f58cc48a9 | [
"Apache-2.0"
] | null | null | null | from spark_core.spark_base import Spark
| 20 | 39 | 0.875 | 7 | 40 | 4.714286 | 0.714286 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.1 | 40 | 1 | 40 | 40 | 0.916667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
6e40eb508d693e52348b40cf1a90e2e76f36778e | 49,658 | py | Python | linearmodels/tests/panel/test_panel_ols.py | clarityai-eng/linearmodels | 21d059199925b51075ac2139e6d8ab44fdd17938 | [
"NCSA"
] | null | null | null | linearmodels/tests/panel/test_panel_ols.py | clarityai-eng/linearmodels | 21d059199925b51075ac2139e6d8ab44fdd17938 | [
"NCSA"
] | null | null | null | linearmodels/tests/panel/test_panel_ols.py | clarityai-eng/linearmodels | 21d059199925b51075ac2139e6d8ab44fdd17938 | [
"NCSA"
] | null | null | null | from itertools import product
import numpy as np
from numpy.linalg import lstsq
from numpy.testing import assert_allclose
import pandas as pd
import pytest
from linearmodels.iv.model import IV2SLS
from linearmodels.panel.data import PanelData
from linearmodels.panel.model import PanelOLS, PooledOLS
from linearmodels.panel.utility import AbsorbingEffectWarning
from linearmodels.shared.exceptions import MemoryWarning
from linearmodels.shared.hypotheses import WaldTestStatistic
from linearmodels.shared.utility import AttrDict
from linearmodels.tests.panel._utility import (
access_attributes,
assert_frame_similar,
assert_results_equal,
datatypes,
generate_data,
)
pytestmark = pytest.mark.filterwarnings(
"ignore::linearmodels.shared.exceptions.MissingValueWarning",
"ignore:the matrix subclass:PendingDeprecationWarning",
)
perc_missing = [0.0, 0.02, 0.20]
has_const = [True, False]
perms = list(product(perc_missing, datatypes, has_const))
ids = ["-".join(str(param) for param in perms) for perm in perms]
@pytest.fixture(params=perms, ids=ids)
def data(request):
missing, datatype, const = request.param
return generate_data(
missing, datatype, const=const, ntk=(91, 15, 5), other_effects=2
)
@pytest.fixture(params=["numpy", "pandas"])
def absorbed_data(request):
datatype = request.param
rng = np.random.RandomState(12345)
data = generate_data(0, datatype, ntk=(131, 4, 3), rng=rng)
x = data.x
if isinstance(data.x, np.ndarray):
absorbed = np.arange(x.shape[2])
absorbed = np.tile(absorbed, (1, x.shape[1], 1))
data.x = np.concatenate([data.x, absorbed])
elif isinstance(data.x, pd.DataFrame):
codes = data.x.index.codes
absorbed = np.array(codes[0]).astype(np.double)
data.x["x_absorbed"] = absorbed
return data
@pytest.fixture(params=perms, ids=ids)
def large_data(request):
missing, datatype, const = request.param
return generate_data(
missing, datatype, const=const, ntk=(51, 71, 5), other_effects=2
)
singleton_ids = [i for i, p in zip(ids, perms) if p[1] == "pandas" and not p[-1]]
singleton_perms = [p for p in perms if p[1] == "pandas" and not p[-1]]
@pytest.fixture(params=singleton_perms, ids=singleton_ids)
def singleton_data(request):
missing, datatype, const = request.param
return generate_data(
missing,
datatype,
const=const,
ntk=(91, 15, 5),
other_effects=2,
num_cats=[5 * 91, 15],
)
const_perms = list(product(perc_missing, datatypes))
const_ids = ["-".join(str(val) for val in perm) for perm in const_perms]
@pytest.fixture(params=const_perms, ids=const_ids)
def const_data(request):
missing, datatype = request.param
data = generate_data(missing, datatype, ntk=(91, 7, 1))
y = PanelData(data.y).dataframe
x = y.copy()
x.iloc[:, :] = 1
x.columns = ["Const"]
return AttrDict(y=y, x=x, w=PanelData(data.w).dataframe)
@pytest.fixture(params=[True, False])
def entity_eff(request):
return request.param
@pytest.fixture(params=[True, False])
def time_eff(request):
return request.param
lsdv_perms = [
p
for p in product([True, False], [True, False], [True, False], [0, 1, 2])
if sum(p[1:]) <= 2
]
lsdv_ids = []
for p in lsdv_perms:
str_id = "weighted" if p[0] else "unweighted"
str_id += "-entity_effects" if p[1] else ""
str_id += "-time_effects" if p[2] else ""
str_id += "-{0}_other_effects".format(p[3]) if p[3] else ""
lsdv_ids.append(str_id)
@pytest.fixture(params=lsdv_perms, ids=lsdv_ids)
def lsdv_config(request):
weights, entity_effects, time_effects, other_effects = request.param
return AttrDict(
weights=weights,
entity_effects=entity_effects,
time_effects=time_effects,
other_effects=other_effects,
)
def test_const_data_only(const_data):
y, x = const_data.y, const_data.x
mod = PanelOLS(y, x)
res = mod.fit(debiased=False)
res2 = IV2SLS(y, x, None, None).fit()
assert_allclose(res.params, res2.params)
def test_const_data_only_weights(const_data):
y, x = const_data.y, const_data.x
mod = PanelOLS(y, x, weights=const_data.w)
res = mod.fit(debiased=False)
res2 = IV2SLS(y, x, None, None, weights=const_data.w).fit()
assert_allclose(res.params, res2.params)
def test_const_data_entity(const_data):
y, x = const_data.y, const_data.x
mod = PanelOLS(y, x, entity_effects=True)
res = mod.fit(debiased=False)
x = mod.exog.dataframe
d = mod.dependent.dummies("entity", drop_first=True)
d.iloc[:, :] = d.values - x.values @ lstsq(x.values, d.values, rcond=None)[0]
xd = np.c_[x.values, d.values]
xd = pd.DataFrame(xd, index=x.index, columns=list(x.columns) + list(d.columns))
res2 = IV2SLS(mod.dependent.dataframe, xd, None, None).fit()
assert_allclose(res.params, res2.params.iloc[:1])
def test_const_data_time(const_data):
y, x = const_data.y, const_data.x
mod = PanelOLS(y, x, time_effects=True)
res = mod.fit(debiased=False)
x = mod.exog.dataframe
d = mod.dependent.dummies("time", drop_first=True)
d.iloc[:, :] = d.values - x.values @ lstsq(x.values, d.values, rcond=None)[0]
xd = np.c_[x.values, d.values]
xd = pd.DataFrame(xd, index=x.index, columns=list(x.columns) + list(d.columns))
res2 = IV2SLS(mod.dependent.dataframe, xd, None, None).fit()
assert_allclose(res.params, res2.params.iloc[:1])
@pytest.mark.parametrize("entity", [True, False])
def test_const_data_single_effect_weights(const_data, entity):
y, x = const_data.y, const_data.x
mod = PanelOLS(
y, x, entity_effects=entity, time_effects=not entity, weights=const_data.w
)
res = mod.fit(debiased=False)
y = mod.dependent.dataframe
w = mod.weights.dataframe
x = mod.exog.dataframe
dummy_type = "entity" if entity else "time"
d = mod.dependent.dummies(dummy_type, drop_first=True)
d_columns = list(d.columns)
root_w = np.sqrt(w.values)
z = np.ones_like(x)
wd = root_w * d.values
wz = root_w
d = d - z @ lstsq(wz, wd, rcond=None)[0]
xd = np.c_[x.values, d.values]
xd = pd.DataFrame(xd, index=x.index, columns=list(x.columns) + d_columns)
res2 = IV2SLS(y, xd, None, None, weights=w).fit()
assert_allclose(res.params, res2.params.iloc[:1])
def test_const_data_both(const_data):
y, x = const_data.y, const_data.x
mod = PanelOLS(y, x, entity_effects=True, time_effects=True)
res = mod.fit(debiased=False)
x = mod.exog.dataframe
d1 = mod.dependent.dummies("entity", drop_first=True)
d1.columns = ["d.entity.{0}".format(i) for i in d1]
d2 = mod.dependent.dummies("time", drop_first=True)
d2.columns = ["d.time.{0}".format(i) for i in d2]
d = np.c_[d1.values, d2.values]
d = pd.DataFrame(d, index=x.index, columns=list(d1.columns) + list(d2.columns))
d.iloc[:, :] = d.values - x.values @ lstsq(x.values, d.values, rcond=None)[0]
xd = np.c_[x.values, d.values]
xd = pd.DataFrame(xd, index=x.index, columns=list(x.columns) + list(d.columns))
res2 = IV2SLS(mod.dependent.dataframe, xd, None, None).fit()
assert_allclose(res.params, res2.params.iloc[:1])
def test_const_data_both_weights(const_data):
y, x = const_data.y, const_data.x
mod = PanelOLS(y, x, entity_effects=True, time_effects=True, weights=const_data.w)
res = mod.fit(debiased=False)
w = mod.weights.dataframe
x = mod.exog.dataframe
d1 = mod.dependent.dummies("entity", drop_first=True)
d1.columns = ["d.entity.{0}".format(i) for i in d1]
d2 = mod.dependent.dummies("time", drop_first=True)
d2.columns = ["d.time.{0}".format(i) for i in d2]
d = np.c_[d1.values, d2.values]
root_w = np.sqrt(w.values)
z = np.ones_like(x)
wd = root_w * d
wz = root_w
d = d - z @ lstsq(wz, wd, rcond=None)[0]
d = pd.DataFrame(d, index=x.index, columns=list(d1.columns) + list(d2.columns))
xd = np.c_[x.values, d.values]
xd = pd.DataFrame(xd, index=x.index, columns=list(x.columns) + list(d.columns))
res2 = IV2SLS(mod.dependent.dataframe, xd, None, None, weights=w).fit()
assert_allclose(res.params, res2.params.iloc[:1])
def test_panel_no_effects(data):
panel = PanelOLS(data.y, data.x)
assert panel._collect_effects().shape[1] == 0
res = panel.fit()
res2 = PooledOLS(data.y, data.x).fit()
assert_results_equal(res, res2)
def test_panel_no_effects_weighted(data):
res = PanelOLS(data.y, data.x, weights=data.w).fit()
res2 = PooledOLS(data.y, data.x, weights=data.w).fit()
assert_results_equal(res, res2)
def test_panel_entity_lsdv(data):
mod = PanelOLS(data.y, data.x, entity_effects=True)
res = mod.fit(auto_df=False, count_effects=False, debiased=False)
y = mod.dependent.dataframe
x = mod.exog.dataframe
if mod.has_constant:
d = mod.dependent.dummies("entity", drop_first=True)
z = np.ones_like(y)
d_demean = d.values - z @ lstsq(z, d.values, rcond=None)[0]
else:
d = mod.dependent.dummies("entity", drop_first=False)
d_demean = d.values
xd = np.c_[x.values, d_demean]
xd = pd.DataFrame(xd, index=x.index, columns=list(x.columns) + list(d.columns))
ols_mod = IV2SLS(y, xd, None, None)
res2 = ols_mod.fit(cov_type="unadjusted", debiased=False)
assert_results_equal(res, res2, test_fit=False)
assert_allclose(res.rsquared_inclusive, res2.rsquared)
res = mod.fit(cov_type="robust", auto_df=False, count_effects=False, debiased=False)
res2 = ols_mod.fit(cov_type="robust")
assert_results_equal(res, res2, test_fit=False)
clusters = data.vc1
ols_clusters = mod.reformat_clusters(data.vc1)
res = mod.fit(
cov_type="clustered",
clusters=clusters,
auto_df=False,
count_effects=False,
debiased=False,
)
res2 = ols_mod.fit(cov_type="clustered", clusters=ols_clusters.dataframe)
assert_results_equal(res, res2, test_fit=False)
clusters = data.vc2
ols_clusters = mod.reformat_clusters(data.vc2)
res = mod.fit(
cov_type="clustered",
clusters=clusters,
auto_df=False,
count_effects=False,
debiased=False,
)
res2 = ols_mod.fit(cov_type="clustered", clusters=ols_clusters.dataframe)
assert_results_equal(res, res2, test_fit=False)
res = mod.fit(
cov_type="clustered",
cluster_time=True,
auto_df=False,
count_effects=False,
debiased=False,
)
clusters = pd.DataFrame(
mod.dependent.time_ids, index=mod.dependent.index, columns=["var.clust"]
)
res2 = ols_mod.fit(cov_type="clustered", clusters=clusters)
assert_results_equal(res, res2, test_fit=False)
res = mod.fit(
cov_type="clustered",
cluster_entity=True,
auto_df=False,
count_effects=False,
debiased=False,
)
clusters = pd.DataFrame(
mod.dependent.entity_ids, index=mod.dependent.index, columns=["var.clust"]
)
res2 = ols_mod.fit(cov_type="clustered", clusters=clusters)
assert_results_equal(res, res2, test_fit=False)
def test_panel_entity_fwl(data):
mod = PanelOLS(data.y, data.x, entity_effects=True)
res = mod.fit(auto_df=False, count_effects=False, debiased=False)
y = mod.dependent.dataframe
x = mod.exog.dataframe
if mod.has_constant:
d = mod.dependent.dummies("entity", drop_first=True)
z = np.ones_like(y)
d_demean = d.values - z @ lstsq(z, d.values, rcond=None)[0]
else:
d = mod.dependent.dummies("entity", drop_first=False)
d_demean = d.values
x = x - d_demean @ lstsq(d_demean, x, rcond=None)[0]
y = y - d_demean @ lstsq(d_demean, y, rcond=None)[0]
ols_mod = IV2SLS(y, x, None, None)
res2 = ols_mod.fit(cov_type="unadjusted")
assert_results_equal(res, res2, test_df=False)
res = mod.fit(cov_type="robust", auto_df=False, count_effects=False, debiased=False)
res2 = ols_mod.fit(cov_type="robust")
assert_results_equal(res, res2, test_df=False)
def test_panel_time_lsdv(large_data):
mod = PanelOLS(large_data.y, large_data.x, time_effects=True)
res = mod.fit(auto_df=False, count_effects=False, debiased=False)
y = mod.dependent.dataframe
x = mod.exog.dataframe
d = mod.dependent.dummies("time", drop_first=mod.has_constant)
d_cols = list(d.columns)
d = d.values
if mod.has_constant:
z = np.ones_like(y)
d = d - z @ lstsq(z, d, rcond=None)[0]
xd = np.c_[x.values, d]
xd = pd.DataFrame(xd, index=x.index, columns=list(x.columns) + d_cols)
ols_mod = IV2SLS(y, xd, None, None)
res2 = ols_mod.fit(cov_type="unadjusted")
assert_results_equal(res, res2, test_fit=False)
assert_allclose(res.rsquared_inclusive, res2.rsquared)
res = mod.fit(cov_type="robust", auto_df=False, count_effects=False, debiased=False)
res2 = ols_mod.fit(cov_type="robust")
assert_results_equal(res, res2, test_fit=False)
clusters = large_data.vc1
ols_clusters = mod.reformat_clusters(clusters)
res = mod.fit(
cov_type="clustered",
clusters=clusters,
auto_df=False,
count_effects=False,
debiased=False,
)
res2 = ols_mod.fit(cov_type="clustered", clusters=ols_clusters.dataframe)
assert_results_equal(res, res2, test_fit=False)
clusters = large_data.vc2
ols_clusters = mod.reformat_clusters(clusters)
res = mod.fit(
cov_type="clustered",
clusters=clusters,
auto_df=False,
count_effects=False,
debiased=False,
)
res2 = ols_mod.fit(cov_type="clustered", clusters=ols_clusters.dataframe)
assert_results_equal(res, res2, test_fit=False)
res = mod.fit(
cov_type="clustered",
cluster_time=True,
auto_df=False,
count_effects=False,
debiased=False,
)
clusters = pd.DataFrame(
mod.dependent.time_ids, index=mod.dependent.index, columns=["var.clust"]
)
res2 = ols_mod.fit(cov_type="clustered", clusters=clusters)
assert_results_equal(res, res2, test_fit=False)
res = mod.fit(
cov_type="clustered",
cluster_entity=True,
auto_df=False,
count_effects=False,
debiased=False,
)
clusters = pd.DataFrame(
mod.dependent.entity_ids, index=mod.dependent.index, columns=["var.clust"]
)
res2 = ols_mod.fit(cov_type="clustered", clusters=clusters)
assert_results_equal(res, res2, test_fit=False)
def test_panel_time_fwl(data):
mod = PanelOLS(data.y, data.x, time_effects=True)
res = mod.fit(auto_df=False, count_effects=False, debiased=False)
y = mod.dependent.dataframe
x = mod.exog.dataframe
d = mod.dependent.dummies("time", drop_first=mod.has_constant)
d = d.values
if mod.has_constant:
z = np.ones_like(y)
d = d - z @ lstsq(z, d, rcond=None)[0]
x = x - d @ lstsq(d, x, rcond=None)[0]
y = y - d @ lstsq(d, y, rcond=None)[0]
ols_mod = IV2SLS(y, x, None, None)
res2 = ols_mod.fit(cov_type="unadjusted")
assert_results_equal(res, res2, test_df=False)
res = mod.fit(cov_type="robust", auto_df=False, count_effects=False, debiased=False)
res2 = ols_mod.fit(cov_type="robust")
assert_results_equal(res, res2, test_df=False)
def test_panel_both_lsdv(data):
mod = PanelOLS(data.y, data.x, entity_effects=True, time_effects=True)
res = mod.fit(auto_df=False, count_effects=False, debiased=False)
y = mod.dependent.dataframe
x = mod.exog.dataframe
d1 = mod.dependent.dummies("entity", drop_first=mod.has_constant)
d2 = mod.dependent.dummies("time", drop_first=True)
d = np.c_[d1.values, d2.values]
if mod.has_constant:
z = np.ones_like(y)
d = d - z @ lstsq(z, d, rcond=None)[0]
xd = np.c_[x.values, d]
xd = pd.DataFrame(
xd, index=x.index, columns=list(x.columns) + list(d1.columns) + list(d2.columns)
)
ols_mod = IV2SLS(y, xd, None, None)
res2 = ols_mod.fit(cov_type="unadjusted")
assert_results_equal(res, res2, test_fit=False)
assert_allclose(res.rsquared_inclusive, res2.rsquared)
res = mod.fit(cov_type="robust", auto_df=False, count_effects=False, debiased=False)
res2 = ols_mod.fit(cov_type="robust")
assert_results_equal(res, res2, test_fit=False)
clusters = data.vc1
ols_clusters = mod.reformat_clusters(clusters)
res = mod.fit(
cov_type="clustered",
clusters=clusters,
auto_df=False,
count_effects=False,
debiased=False,
)
res2 = ols_mod.fit(cov_type="clustered", clusters=ols_clusters.dataframe)
assert_results_equal(res, res2, test_fit=False)
clusters = data.vc2
ols_clusters = mod.reformat_clusters(clusters)
res = mod.fit(
cov_type="clustered",
clusters=clusters,
auto_df=False,
count_effects=False,
debiased=False,
)
res2 = ols_mod.fit(cov_type="clustered", clusters=ols_clusters.dataframe)
assert_results_equal(res, res2, test_fit=False)
res = mod.fit(
cov_type="clustered",
cluster_time=True,
auto_df=False,
count_effects=False,
debiased=False,
)
clusters = pd.DataFrame(
mod.dependent.time_ids, index=mod.dependent.index, columns=["var.clust"]
)
res2 = ols_mod.fit(cov_type="clustered", clusters=clusters)
assert_results_equal(res, res2, test_fit=False)
res = mod.fit(
cov_type="clustered",
cluster_entity=True,
auto_df=False,
count_effects=False,
debiased=False,
)
clusters = pd.DataFrame(
mod.dependent.entity_ids, index=mod.dependent.index, columns=["var.clust"]
)
res2 = ols_mod.fit(cov_type="clustered", clusters=clusters)
assert_results_equal(res, res2, test_fit=False)
def test_panel_both_fwl(data):
mod = PanelOLS(data.y, data.x, entity_effects=True, time_effects=True)
res = mod.fit(auto_df=False, count_effects=False, debiased=False)
y = mod.dependent.dataframe
x = mod.exog.dataframe
d1 = mod.dependent.dummies("entity", drop_first=mod.has_constant)
d2 = mod.dependent.dummies("time", drop_first=True)
d = np.c_[d1.values, d2.values]
if mod.has_constant:
z = np.ones_like(y)
d = d - z @ lstsq(z, d, rcond=None)[0]
x = x - d @ lstsq(d, x, rcond=None)[0]
y = y - d @ lstsq(d, y, rcond=None)[0]
ols_mod = IV2SLS(y, x, None, None)
res2 = ols_mod.fit(cov_type="unadjusted")
assert_results_equal(res, res2, test_df=False)
res = mod.fit(cov_type="robust", auto_df=False, count_effects=False, debiased=False)
res2 = ols_mod.fit(cov_type="robust")
assert_results_equal(res, res2, test_df=False)
def test_panel_entity_lsdv_weighted(data):
mod = PanelOLS(data.y, data.x, entity_effects=True, weights=data.w)
res = mod.fit(auto_df=False, count_effects=False, debiased=False)
y = mod.dependent.dataframe
x = mod.exog.dataframe
w = mod.weights.dataframe
d = mod.dependent.dummies("entity", drop_first=mod.has_constant)
d_cols = d.columns
d = d.values
if mod.has_constant:
z = np.ones_like(y)
root_w = np.sqrt(w.values)
wd = root_w * d
wz = root_w * z
d = d - z @ lstsq(wz, wd, rcond=None)[0]
xd = np.c_[x.values, d]
xd = pd.DataFrame(xd, index=x.index, columns=list(x.columns) + list(d_cols))
ols_mod = IV2SLS(y, xd, None, None, weights=w)
res2 = ols_mod.fit(cov_type="unadjusted")
assert_results_equal(res, res2, test_fit=False)
assert_allclose(res.rsquared_inclusive, res2.rsquared)
res = mod.fit(cov_type="robust", auto_df=False, count_effects=False, debiased=False)
res2 = ols_mod.fit(cov_type="robust")
assert_results_equal(res, res2, test_fit=False)
clusters = data.vc1
ols_clusters = mod.reformat_clusters(clusters)
res = mod.fit(
cov_type="clustered",
clusters=clusters,
auto_df=False,
count_effects=False,
debiased=False,
)
res2 = ols_mod.fit(cov_type="clustered", clusters=ols_clusters.dataframe)
assert_results_equal(res, res2, test_fit=False)
clusters = data.vc2
ols_clusters = mod.reformat_clusters(clusters)
res = mod.fit(
cov_type="clustered",
clusters=clusters,
auto_df=False,
count_effects=False,
debiased=False,
)
res2 = ols_mod.fit(cov_type="clustered", clusters=ols_clusters.dataframe)
assert_results_equal(res, res2, test_fit=False)
res = mod.fit(
cov_type="clustered",
cluster_time=True,
auto_df=False,
count_effects=False,
debiased=False,
)
clusters = pd.DataFrame(
mod.dependent.time_ids, index=mod.dependent.index, columns=["var.clust"]
)
res2 = ols_mod.fit(cov_type="clustered", clusters=clusters)
assert_results_equal(res, res2, test_fit=False)
res = mod.fit(
cov_type="clustered",
cluster_entity=True,
auto_df=False,
count_effects=False,
debiased=False,
)
clusters = pd.DataFrame(
mod.dependent.entity_ids, index=mod.dependent.index, columns=["var.clust"]
)
res2 = ols_mod.fit(cov_type="clustered", clusters=clusters)
assert_results_equal(res, res2, test_fit=False)
def test_panel_time_lsdv_weighted(large_data):
mod = PanelOLS(large_data.y, large_data.x, time_effects=True, weights=large_data.w)
res = mod.fit(auto_df=False, count_effects=False, debiased=False)
y = mod.dependent.dataframe
x = mod.exog.dataframe
w = mod.weights.dataframe
d = mod.dependent.dummies("time", drop_first=mod.has_constant)
d_cols = d.columns
d = d.values
if mod.has_constant:
z = np.ones_like(y)
root_w = np.sqrt(w.values)
wd = root_w * d
wz = root_w * z
d = d - z @ lstsq(wz, wd, rcond=None)[0]
xd = np.c_[x.values, d]
xd = pd.DataFrame(xd, index=x.index, columns=list(x.columns) + list(d_cols))
ols_mod = IV2SLS(y, xd, None, None, weights=w)
res2 = ols_mod.fit(cov_type="unadjusted")
assert_results_equal(res, res2, test_fit=False)
res = mod.fit(cov_type="robust", auto_df=False, count_effects=False, debiased=False)
res2 = ols_mod.fit(cov_type="robust")
assert_results_equal(res, res2, test_fit=False)
clusters = large_data.vc1
ols_clusters = mod.reformat_clusters(clusters)
res = mod.fit(
cov_type="clustered",
clusters=clusters,
auto_df=False,
count_effects=False,
debiased=False,
)
res2 = ols_mod.fit(cov_type="clustered", clusters=ols_clusters.dataframe)
assert_results_equal(res, res2, test_fit=False)
clusters = large_data.vc2
ols_clusters = mod.reformat_clusters(clusters)
res = mod.fit(
cov_type="clustered",
clusters=clusters,
auto_df=False,
count_effects=False,
debiased=False,
)
res2 = ols_mod.fit(cov_type="clustered", clusters=ols_clusters.dataframe)
assert_results_equal(res, res2, test_fit=False)
res = mod.fit(
cov_type="clustered",
cluster_time=True,
auto_df=False,
count_effects=False,
debiased=False,
)
clusters = pd.DataFrame(
mod.dependent.time_ids, index=mod.dependent.index, columns=["var.clust"]
)
res2 = ols_mod.fit(cov_type="clustered", clusters=clusters)
assert_results_equal(res, res2, test_fit=False)
res = mod.fit(
cov_type="clustered",
cluster_entity=True,
auto_df=False,
count_effects=False,
debiased=False,
)
clusters = pd.DataFrame(
mod.dependent.entity_ids, index=mod.dependent.index, columns=["var.clust"]
)
res2 = ols_mod.fit(cov_type="clustered", clusters=clusters)
assert_results_equal(res, res2, test_fit=False)
def test_panel_both_lsdv_weighted(data):
mod = PanelOLS(
data.y, data.x, entity_effects=True, time_effects=True, weights=data.w
)
res = mod.fit(auto_df=False, count_effects=False, debiased=False)
y = mod.dependent.dataframe
x = mod.exog.dataframe
w = mod.weights.dataframe
d1 = mod.dependent.dummies("entity", drop_first=mod.has_constant)
d2 = mod.dependent.dummies("time", drop_first=True)
d = np.c_[d1.values, d2.values]
if mod.has_constant:
z = np.ones_like(y)
root_w = np.sqrt(w.values)
wd = root_w * d
wz = root_w * z
d = d - z @ lstsq(wz, wd, rcond=None)[0]
xd = np.c_[x.values, d]
xd = pd.DataFrame(
xd, index=x.index, columns=list(x.columns) + list(d1.columns) + list(d2.columns)
)
ols_mod = IV2SLS(y, xd, None, None, weights=w)
res2 = ols_mod.fit(cov_type="unadjusted")
assert_results_equal(res, res2, test_fit=False)
assert_allclose(res.rsquared_inclusive, res2.rsquared)
res = mod.fit(cov_type="robust", auto_df=False, count_effects=False, debiased=False)
res2 = ols_mod.fit(cov_type="robust")
assert_results_equal(res, res2, test_fit=False)
clusters = data.vc1
ols_clusters = mod.reformat_clusters(clusters)
res = mod.fit(
cov_type="clustered",
clusters=clusters,
auto_df=False,
count_effects=False,
debiased=False,
)
res2 = ols_mod.fit(cov_type="clustered", clusters=ols_clusters.dataframe)
assert_results_equal(res, res2, test_fit=False)
clusters = data.vc2
ols_clusters = mod.reformat_clusters(clusters)
res = mod.fit(
cov_type="clustered",
clusters=clusters,
auto_df=False,
count_effects=False,
debiased=False,
)
res2 = ols_mod.fit(cov_type="clustered", clusters=ols_clusters.dataframe)
assert_results_equal(res, res2, test_fit=False)
res = mod.fit(
cov_type="clustered",
cluster_time=True,
auto_df=False,
count_effects=False,
debiased=False,
)
clusters = pd.DataFrame(
mod.dependent.time_ids, index=mod.dependent.index, columns=["var.clust"]
)
res2 = ols_mod.fit(cov_type="clustered", clusters=clusters)
assert_results_equal(res, res2, test_fit=False)
res = mod.fit(
cov_type="clustered",
cluster_entity=True,
auto_df=False,
count_effects=False,
debiased=False,
)
clusters = pd.DataFrame(
mod.dependent.entity_ids, index=mod.dependent.index, columns=["var.clust"]
)
res2 = ols_mod.fit(cov_type="clustered", clusters=clusters)
assert_results_equal(res, res2, test_fit=False)
def test_panel_entity_other_equivalence(data):
mod = PanelOLS(data.y, data.x, entity_effects=True)
res = mod.fit()
y = mod.dependent.dataframe
x = mod.exog.dataframe
cats = pd.DataFrame(mod.dependent.entity_ids, index=mod.dependent.index)
mod2 = PanelOLS(y, x, other_effects=cats)
res2 = mod2.fit()
assert_results_equal(res, res2)
assert "Model includes 1 other effect" in res2.summary.as_text()
def test_panel_time_other_equivalence(data):
mod = PanelOLS(data.y, data.x, time_effects=True)
res = mod.fit()
y = mod.dependent.dataframe
x = mod.exog.dataframe
cats = pd.DataFrame(mod.dependent.time_ids, index=mod.dependent.index)
mod2 = PanelOLS(y, x, other_effects=cats)
res2 = mod2.fit()
assert_results_equal(res, res2)
assert "Model includes 1 other effect" in res2.summary.as_text()
def test_panel_entity_time_other_equivalence(data):
mod = PanelOLS(data.y, data.x, entity_effects=True, time_effects=True)
res = mod.fit()
y = mod.dependent.dataframe
x = mod.exog.dataframe
c1 = mod.dependent.entity_ids
c2 = mod.dependent.time_ids
cats = np.c_[c1, c2]
cats = pd.DataFrame(cats, index=mod.dependent.index)
mod2 = PanelOLS(y, x, other_effects=cats)
res2 = mod2.fit()
assert_results_equal(res, res2)
assert "Model includes 2 other effects" in res2.summary.as_text()
def test_panel_other_lsdv(data):
mod = PanelOLS(data.y, data.x, other_effects=data.c)
assert "Num Other Effects: 2" in str(mod)
res = mod.fit(auto_df=False, count_effects=False, debiased=False)
y = mod.dependent.dataframe.copy()
x = mod.exog.dataframe.copy()
c = mod._other_effect_cats.dataframe.copy()
d = []
d_columns = []
for i, col in enumerate(c):
s = c[col].copy()
dummies = pd.get_dummies(
s.astype(np.int64), drop_first=(mod.has_constant or i > 0)
)
dummies.columns = [s.name + "_val_" + str(c) for c in dummies.columns]
d_columns.extend(list(dummies.columns))
d.append(dummies.values)
d = np.column_stack(d)
if mod.has_constant:
z = np.ones_like(y)
d = d - z @ lstsq(z, d, rcond=None)[0]
xd = np.c_[x.values, d]
xd = pd.DataFrame(xd, index=x.index, columns=list(x.columns) + list(d_columns))
ols_mod = IV2SLS(y, xd, None, None)
res2 = ols_mod.fit(cov_type="unadjusted")
assert_results_equal(res, res2, test_fit=False)
res3 = mod.fit(
cov_type="unadjusted", auto_df=False, count_effects=False, debiased=False
)
assert_results_equal(res, res3)
res = mod.fit(cov_type="robust", auto_df=False, count_effects=False, debiased=False)
res2 = ols_mod.fit(cov_type="robust")
assert_results_equal(res, res2, test_fit=False)
clusters = data.vc1
ols_clusters = mod.reformat_clusters(clusters)
res = mod.fit(
cov_type="clustered",
clusters=clusters,
auto_df=False,
count_effects=False,
debiased=False,
)
res2 = ols_mod.fit(cov_type="clustered", clusters=ols_clusters.dataframe)
assert_results_equal(res, res2, test_fit=False)
clusters = data.vc2
ols_clusters = mod.reformat_clusters(clusters)
res = mod.fit(
cov_type="clustered",
clusters=clusters,
auto_df=False,
count_effects=False,
debiased=False,
)
res2 = ols_mod.fit(cov_type="clustered", clusters=ols_clusters.dataframe)
assert_results_equal(res, res2, test_fit=False)
res = mod.fit(
cov_type="clustered",
cluster_time=True,
auto_df=False,
count_effects=False,
debiased=False,
)
clusters = pd.DataFrame(
mod.dependent.time_ids, index=mod.dependent.index, columns=["var.clust"]
)
res2 = ols_mod.fit(cov_type="clustered", clusters=clusters)
assert_results_equal(res, res2, test_fit=False)
res = mod.fit(
cov_type="clustered",
cluster_entity=True,
auto_df=False,
count_effects=False,
debiased=False,
)
clusters = pd.DataFrame(
mod.dependent.entity_ids, index=mod.dependent.index, columns=["var.clust"]
)
res2 = ols_mod.fit(cov_type="clustered", clusters=clusters)
assert_results_equal(res, res2, test_fit=False)
def test_panel_other_fwl(data):
mod = PanelOLS(data.y, data.x, other_effects=data.c)
res = mod.fit(auto_df=False, count_effects=False, debiased=False)
y = mod.dependent.dataframe
x = mod.exog.dataframe
c = mod._other_effect_cats.dataframe
d = []
d_columns = []
for i, col in enumerate(c):
s = c[col].copy()
dummies = pd.get_dummies(
s.astype(np.int64), drop_first=(mod.has_constant or i > 0)
)
dummies.columns = [s.name + "_val_" + str(c) for c in dummies.columns]
d_columns.extend(list(dummies.columns))
d.append(dummies.values)
d = np.column_stack(d)
if mod.has_constant:
z = np.ones_like(y)
d = d - z @ lstsq(z, d, rcond=None)[0]
x = x - d @ lstsq(d, x, rcond=None)[0]
y = y - d @ lstsq(d, y, rcond=None)[0]
ols_mod = IV2SLS(y, x, None, None)
res2 = ols_mod.fit(cov_type="unadjusted")
assert_results_equal(res, res2, test_df=False)
res = mod.fit(cov_type="robust", auto_df=False, count_effects=False, debiased=False)
res2 = ols_mod.fit(cov_type="robust")
assert_results_equal(res, res2, test_df=False)
def test_panel_other_incorrect_size(data):
mod = PanelOLS(data.y, data.x, entity_effects=True)
y = mod.dependent.dataframe
x = mod.exog.dataframe
cats = pd.DataFrame(mod.dependent.entity_ids, index=mod.dependent.index)
cats = PanelData(cats)
cats = cats.dataframe.iloc[: cats.dataframe.shape[0] // 2, :]
with pytest.raises(ValueError):
PanelOLS(y, x, other_effects=cats)
def test_results_access(data):
mod = PanelOLS(data.y, data.x, entity_effects=True)
res = mod.fit()
access_attributes(res)
mod = PanelOLS(data.y, data.x, other_effects=data.c)
res = mod.fit()
access_attributes(res)
mod = PanelOLS(data.y, data.x, time_effects=True, entity_effects=True)
res = mod.fit()
access_attributes(res)
mod = PanelOLS(data.y, data.x)
res = mod.fit()
access_attributes(res)
const = PanelData(data.y).copy()
const.dataframe.iloc[:, :] = 1
const.dataframe.columns = ["const"]
mod = PanelOLS(data.y, const)
res = mod.fit()
access_attributes(res)
def test_alt_rsquared(data):
mod = PanelOLS(data.y, data.x, entity_effects=True)
res = mod.fit(debiased=False)
assert_allclose(res.rsquared, res.rsquared_within)
def test_alt_rsquared_weighted(data):
mod = PanelOLS(data.y, data.x, entity_effects=True, weights=data.w)
res = mod.fit(debiased=False)
assert_allclose(res.rsquared, res.rsquared_within)
def test_too_many_effects(data):
with pytest.raises(ValueError):
PanelOLS(
data.y, data.x, entity_effects=True, time_effects=True, other_effects=data.c
)
def test_cov_equiv_cluster(data):
mod = PanelOLS(data.y, data.x, entity_effects=True)
res = mod.fit(cov_type="clustered", cluster_entity=True, debiased=False)
y = PanelData(data.y)
clusters = pd.DataFrame(y.entity_ids, index=y.index)
res2 = mod.fit(cov_type="clustered", clusters=clusters, debiased=False)
assert_results_equal(res, res2)
mod = PanelOLS(data.y, data.x, time_effects=True)
res = mod.fit(cov_type="clustered", cluster_time=True, debiased=False)
y = PanelData(data.y)
clusters = pd.DataFrame(y.time_ids, index=y.index)
res2 = mod.fit(cov_type="clustered", clusters=clusters, debiased=False)
assert_results_equal(res, res2)
res = mod.fit(cov_type="clustered", debiased=False)
res2 = mod.fit(cov_type="clustered", clusters=None, debiased=False)
assert_results_equal(res, res2)
@pytest.mark.smoke
def test_cluster_smoke(data):
mod = PanelOLS(data.y, data.x, entity_effects=True)
mod.fit(cov_type="clustered", cluster_time=True, debiased=False)
mod.fit(cov_type="clustered", cluster_entity=True, debiased=False)
c2 = PanelData(data.vc2)
c1 = PanelData(data.vc1)
mod.fit(cov_type="clustered", clusters=c2, debiased=False)
mod.fit(cov_type="clustered", cluster_entity=True, clusters=c1, debiased=False)
mod.fit(cov_type="clustered", cluster_time=True, clusters=c1, debiased=False)
with pytest.raises(ValueError):
mod.fit(cov_type="clustered", cluster_time=True, clusters=c2, debiased=False)
with pytest.raises(ValueError):
mod.fit(cov_type="clustered", cluster_entity=True, clusters=c2, debiased=False)
with pytest.raises(ValueError):
mod.fit(
cov_type="clustered",
cluster_entity=True,
cluster_time=True,
clusters=c1,
debiased=False,
)
with pytest.raises(ValueError):
clusters = c1.dataframe.iloc[: c1.dataframe.shape[0] // 2]
mod.fit(cov_type="clustered", clusters=clusters, debiased=False)
def test_f_pooled(data):
mod = PanelOLS(data.y, data.x, entity_effects=True)
res = mod.fit(debiased=False)
if mod.has_constant:
mod2 = PooledOLS(data.y, data.x)
else:
exog = mod.exog.dataframe.copy()
exog["Intercept"] = 1.0
mod2 = PooledOLS(mod.dependent.dataframe, exog)
res2 = mod2.fit(debiased=False)
eps = res.resids.values
eps2 = res2.resids.values
v1 = res.df_model - res2.df_model
v2 = res.df_resid
f_pool = (eps2.T @ eps2 - eps.T @ eps) / v1
f_pool /= (eps.T @ eps) / v2
f_pool = float(f_pool)
assert_allclose(res.f_pooled.stat, f_pool)
assert res.f_pooled.df == v1
assert res.f_pooled.df_denom == v2
mod = PanelOLS(data.y, data.x, time_effects=True)
res = mod.fit(debiased=False)
eps = res.resids.values
eps2 = res2.resids.values
v1 = res.df_model - res2.df_model
v2 = res.df_resid
f_pool = (eps2.T @ eps2 - eps.T @ eps) / v1
f_pool /= (eps.T @ eps) / v2
f_pool = float(f_pool)
assert_allclose(res.f_pooled.stat, f_pool)
assert res.f_pooled.df == v1
assert res.f_pooled.df_denom == v2
mod = PanelOLS(data.y, data.x, entity_effects=True, time_effects=True)
res = mod.fit(debiased=False)
eps = res.resids.values
eps2 = res2.resids.values
v1 = res.df_model - res2.df_model
v2 = res.df_resid
f_pool = (eps2.T @ eps2 - eps.T @ eps) / v1
f_pool /= (eps.T @ eps) / v2
f_pool = float(f_pool)
assert_allclose(res.f_pooled.stat, f_pool)
assert res.f_pooled.df == v1
assert res.f_pooled.df_denom == v2
def test_entity_other(data):
y = PanelData(data.y)
x = PanelData(data.x)
c = PanelData(data.c).copy()
missing = y.isnull | x.isnull | c.isnull
y.drop(missing)
x.drop(missing)
c.drop(missing)
c_entity = c.dataframe.copy()
c_entity.iloc[:, 1] = y.entity_ids.squeeze()
c_entity = c_entity.astype(np.int64)
mod = PanelOLS(y, x, other_effects=c_entity)
res = mod.fit(debiased=False)
c_only = PanelData(c.dataframe.iloc[:, [0]].astype(np.int64))
mod2 = PanelOLS(y, x, other_effects=c_only, entity_effects=True)
res2 = mod2.fit(debiased=False)
assert_results_equal(res, res2)
@pytest.mark.smoke
def test_other_weighted_smoke(data):
mod = PanelOLS(data.y, data.x, weights=data.w, other_effects=data.c)
mod.fit(debiased=False)
@pytest.mark.slow
def test_methods_equivalent(data, lsdv_config):
other_effects = None
if lsdv_config.other_effects == 1:
other_effects = PanelData(data.c).dataframe.iloc[:, [0]]
elif lsdv_config.other_effects == 2:
other_effects = data.c
weights = data.w if lsdv_config.weights else None
mod = PanelOLS(
data.y,
data.x,
weights=weights,
entity_effects=lsdv_config.entity_effects,
time_effects=lsdv_config.time_effects,
other_effects=other_effects,
)
res1 = mod.fit()
res2 = mod.fit(use_lsdv=True)
res3 = mod.fit(use_lsmr=True)
assert_results_equal(res1, res2)
assert_results_equal(res2, res3, strict=False)
def test_rsquared_inclusive_equivalence(data):
mod = PanelOLS(data.y, data.x)
res = mod.fit()
assert_allclose(res.rsquared, res.rsquared_inclusive)
mod = PanelOLS(data.y, data.x, weights=data.w)
res = mod.fit()
assert_allclose(res.rsquared, res.rsquared_inclusive)
def test_panel_effects_sanity(data):
mod = PanelOLS(data.y, data.x, entity_effects=True)
res = mod.fit(auto_df=False, count_effects=False)
fitted = mod.exog.values2d @ res.params.values[:, None]
expected = fitted
expected += res.resids.values[:, None]
expected += res.estimated_effects.values
assert_allclose(mod.dependent.values2d, expected)
mod = PanelOLS(data.y, data.x, entity_effects=True, time_effects=True)
res = mod.fit(auto_df=False, count_effects=False)
fitted = mod.exog.values2d @ res.params.values[:, None]
expected = fitted
expected += res.resids.values[:, None]
expected += res.estimated_effects.values
assert_allclose(mod.dependent.values2d, expected)
mod = PanelOLS(data.y, data.x, weights=data.w, entity_effects=True)
res = mod.fit(auto_df=False, count_effects=False)
fitted = mod.exog.values2d @ res.params.values[:, None]
expected = fitted
expected += res.resids.values[:, None]
expected += res.estimated_effects.values
assert_allclose(mod.dependent.values2d, expected)
mod = PanelOLS(
data.y, data.x, weights=data.w, entity_effects=True, time_effects=True
)
res = mod.fit(auto_df=False, count_effects=False)
fitted = mod.exog.values2d @ res.params.values[:, None]
expected = fitted
expected += res.resids.values[:, None]
expected += res.estimated_effects.values
assert_allclose(mod.dependent.values2d, expected)
def test_fitted_effects_residuals(data, entity_eff, time_eff):
mod = PanelOLS(data.y, data.x, entity_effects=entity_eff, time_effects=time_eff)
res = mod.fit()
expected = mod.exog.values2d @ res.params.values
expected = pd.DataFrame(expected, index=mod.exog.index, columns=["fitted_values"])
assert_allclose(res.fitted_values, expected)
assert_frame_similar(res.fitted_values, expected)
expected.iloc[:, 0] = res.resids
expected.columns = ["idiosyncratic"]
assert_allclose(res.idiosyncratic, expected)
assert_frame_similar(res.idiosyncratic, expected)
fitted_error = res.fitted_values + res.idiosyncratic.values
expected.iloc[:, 0] = mod.dependent.values2d - fitted_error
expected.columns = ["estimated_effects"]
assert_allclose(res.estimated_effects, expected, atol=1e-8)
assert_frame_similar(res.estimated_effects, expected)
@pytest.mark.parametrize("weighted", [True, False])
def test_low_memory(data, weighted):
if weighted:
mod = PanelOLS(
data.y, data.x, weights=data.w, entity_effects=True, time_effects=True
)
else:
mod = PanelOLS(data.y, data.x, entity_effects=True, time_effects=True)
res = mod.fit()
low_mem = mod.fit(low_memory=True)
assert_allclose(res.params, low_mem.params)
def test_low_memory_auto():
x = np.random.standard_normal((1000, 1000))
e = np.random.standard_normal((1000, 1000))
eff = np.arange(1000)[:, None]
y = x + e + eff + eff.T
y = y.ravel()
x = np.reshape(x, (1000000, 1))
mi = pd.MultiIndex.from_product([np.arange(1000), np.arange(1000)])
y = pd.Series(y, index=mi)
x = pd.DataFrame(x, index=mi)
mod = PanelOLS(y, x, entity_effects=True, time_effects=True)
with pytest.warns(MemoryWarning):
mod.fit()
@pytest.mark.filterwarnings("ignore::linearmodels.shared.exceptions.SingletonWarning")
def test_singleton_removal():
entities = []
for i in range(6):
entities.extend(["entity.{j}".format(j=j) for j in range(6 - i)])
nobs = len(entities)
times = np.arange(nobs) % 6
index = pd.MultiIndex.from_arrays((entities, times))
cols = ["x{0}".format(i) for i in range(3)]
x = pd.DataFrame(np.random.randn(nobs, 3), index=index, columns=cols)
y = pd.DataFrame(np.random.randn(nobs, 1), index=index)
mod = PanelOLS(y, x, singletons=False, entity_effects=True, time_effects=True)
res = mod.fit()
mod = PanelOLS(y, x, singletons=True, entity_effects=True, time_effects=True)
res_with = mod.fit()
assert_allclose(res.params, res_with.params)
@pytest.mark.filterwarnings("ignore::linearmodels.shared.exceptions.SingletonWarning")
def test_masked_singleton_removal():
nobs = 8
entities = ["A", "B", "C", "D"] * 2
times = [0, 1, 1, 1, 1, 2, 2, 2]
index = pd.MultiIndex.from_arrays((entities, times))
x = pd.DataFrame(np.random.randn(nobs, 1), index=index, columns=["x"])
y = pd.DataFrame(np.random.randn(nobs, 1), index=index)
mod = PanelOLS(y, x, singletons=False, entity_effects=True, time_effects=True)
res = mod.fit()
assert res.nobs == 6
def test_singleton_removal_other_effects(data):
mod_keep = PanelOLS(
data.y, data.x, weights=data.w, other_effects=data.c, singletons=True
)
res_keep = mod_keep.fit()
mod = PanelOLS(
data.y, data.x, weights=data.w, other_effects=data.c, singletons=False
)
res = mod.fit(cov_type="clustered", clusters=data.vc1)
assert res.nobs <= res_keep.nobs
@pytest.mark.slow
@pytest.mark.filterwarnings("ignore::linearmodels.shared.exceptions.SingletonWarning")
@pytest.mark.parametrize("other_effects", [1, 2])
def test_singleton_removal_mixed(singleton_data, other_effects):
if other_effects == 1:
other_effects = PanelData(singleton_data.c).dataframe.iloc[:, [0]]
elif other_effects == 2:
other_effects = singleton_data.c
mod = PanelOLS(singleton_data.y, singleton_data.x, other_effects=other_effects)
res_keep = mod.fit(use_lsmr=True)
mod = PanelOLS(
singleton_data.y,
singleton_data.x,
other_effects=other_effects,
singletons=False,
)
res = mod.fit(cov_type="clustered", clusters=singleton_data.vc2, use_lsmr=True)
assert_allclose(res_keep.params, res.params)
assert res.nobs <= res_keep.nobs
def test_repeated_measures_weight():
# Issue reported by email
rs = np.random.RandomState(0)
w = rs.chisquare(5, 300) / 5
idx1 = ["a"] * 100 + ["b"] * 100 + ["c"] * 100
idx2 = np.arange(300) % 25
mi = pd.MultiIndex.from_arrays([idx1, idx2])
df = pd.DataFrame(rs.standard_normal((300, 2)), index=mi, columns=["y", "x"])
w = pd.Series(w, index=mi, name="weight")
df["weight"] = w
mod = PanelOLS.from_formula(
"y ~ x + EntityEffects + TimeEffects", df, weights=df["weight"]
)
res = mod.fit()
mod = PanelOLS.from_formula("y ~ x + EntityEffects + TimeEffects", df)
res_un = mod.fit()
assert res.params[0] != res_un.params[0]
def test_absorbed(absorbed_data):
mod = PanelOLS(
absorbed_data.y, absorbed_data.x, drop_absorbed=True, entity_effects=True
)
if isinstance(absorbed_data.y, pd.DataFrame):
match = "x_absorbed"
else:
match = "Exog.3"
with pytest.warns(AbsorbingEffectWarning, match=match):
res = mod.fit()
if isinstance(absorbed_data.x, np.ndarray):
x = absorbed_data.x[:-1]
else:
x = absorbed_data.x.iloc[:, :-1]
mod = PanelOLS(absorbed_data.y, x, drop_absorbed=False, entity_effects=True)
res_no = mod.fit()
assert_allclose(res.params, res_no.params)
assert_results_equal(res, res_no)
def test_absorbed_option(data):
mod = PanelOLS(data.y, data.x, entity_effects=True, drop_absorbed=True)
res = mod.fit(auto_df=False, count_effects=False, debiased=False)
mod = PanelOLS(data.y, data.x, entity_effects=True, drop_absorbed=False)
res_false = mod.fit(auto_df=False, count_effects=False, debiased=False)
assert_results_equal(res, res_false)
def test_fully_absorbed():
x = np.arange(10)
x = np.repeat(x, (2,))[:, None]
y = x @ np.array([1]) + np.random.standard_normal(x.shape[0])
mi = pd.MultiIndex.from_product([np.arange(10), [1, 2]])
x = pd.DataFrame(x, index=mi, columns=["x"])
y = pd.Series(y, index=mi, name="y")
with pytest.raises(ValueError, match="All columns in exog have been fully"):
PanelOLS(y, x, drop_absorbed=True, entity_effects=True).fit()
def test_zero_endog():
x = np.arange(10)
x = np.repeat(x, (2,))[:, None]
y = x @ np.array([0])
mi = pd.MultiIndex.from_product([np.arange(10), [1, 2]])
x = pd.DataFrame(x, index=mi, columns=["x"])
y = pd.Series(y, index=mi, name="y")
PanelOLS(y, x).fit()
def test_f_after_drop():
rg = np.random.default_rng(918273645)
y = pd.Series(rg.standard_normal(1000))
a1 = np.arange(1000) % 10
a2 = np.arange(1000) // 100
x = pd.DataFrame(
{"x": rg.standard_normal(1000), "a1": a1, "a2": a2, "c": np.ones(1000)}
)
mi = pd.MultiIndex.from_product([list(range(100)), list(range(10))])
y.index = mi
x.index = mi
mod = PanelOLS(y, x, drop_absorbed=True, entity_effects=True, time_effects=True)
with pytest.warns(AbsorbingEffectWarning):
res = mod.fit()
assert isinstance(res.f_statistic, WaldTestStatistic)
assert isinstance(res.f_statistic_robust, WaldTestStatistic)
assert res.f_statistic.stat > 0
assert res.f_statistic_robust.stat > 0
def test_predict_incorrect(data):
mod = PanelOLS(data.y, data.x)
res = mod.fit()
with pytest.raises(ValueError, match="exog does not have the correct"):
mod.predict(res.params.iloc[:-1], exog=data.x)
exog = np.asarray(data.x)
if exog.ndim == 3:
exog = exog[:-1]
else:
exog = exog[:, :-1]
with pytest.raises(ValueError, match="exog does not have the correct"):
mod.predict(res.params, exog=exog)
@pytest.mark.parametrize(
"cov_config",
[
("clustered", "cluster"),
("unadjusted", "bandwidth"),
("kernel", "bw"),
("robust", "clusters"),
],
)
def test_unknown_covconfig_kwargs(data, cov_config):
# GH342
c, fig = cov_config
mod = PanelOLS(data.y, data.x)
if c == "clustered":
cov = "ClusteredCovariance"
elif c == "kernel":
cov = "DriscollKraay"
elif c == "robust":
cov = "HeteroskedasticCovariance"
else:
cov = "HomoskedasticCovariance"
with pytest.raises(ValueError, match=f"Covariance estimator {cov}"):
mod.fit(cov_type=c, **{fig: data.vc1})
# Reported by email
def test_corr_squared(data):
mod = PanelOLS(data.y, data.x)
res = mod.fit()
has_const = np.any(np.all(mod._x == 1.0, 0))
if has_const:
assert_allclose(res.rsquared_overall, res.corr_squared_overall)
| 33.171677 | 88 | 0.664787 | 7,138 | 49,658 | 4.452368 | 0.048193 | 0.031528 | 0.030584 | 0.044177 | 0.800321 | 0.774645 | 0.749064 | 0.731506 | 0.722161 | 0.699223 | 0 | 0.015003 | 0.202686 | 49,658 | 1,496 | 89 | 33.19385 | 0.78769 | 0.000946 | 0 | 0.631366 | 0 | 0 | 0.044328 | 0.006148 | 0 | 0 | 0 | 0 | 0.097009 | 1 | 0.048504 | false | 0 | 0.011318 | 0.001617 | 0.066289 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
281e01662a3b9c4a99ef2e65503e0b04803b36fb | 158 | py | Python | planning_agents/aiagents/__init__.py | rohitrango/gym-minigrid | fa47eea178920003792ffaa956575351b894f099 | [
"Apache-2.0"
] | null | null | null | planning_agents/aiagents/__init__.py | rohitrango/gym-minigrid | fa47eea178920003792ffaa956575351b894f099 | [
"Apache-2.0"
] | 1 | 2020-01-27T17:54:32.000Z | 2020-01-27T17:54:32.000Z | planning_agents/aiagents/__init__.py | rohitrango/gym-minigrid | fa47eea178920003792ffaa956575351b894f099 | [
"Apache-2.0"
] | null | null | null | from .asistplanningagents import *
from .preemptive import *
from .selectivetriage import *
from .mixedtimestrategy import *
from .mixedproxstrategy import *
| 26.333333 | 34 | 0.810127 | 15 | 158 | 8.533333 | 0.466667 | 0.3125 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.126582 | 158 | 5 | 35 | 31.6 | 0.927536 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
282849db0a0eb40053521e8b346d590741bb4a49 | 192 | py | Python | tfjs_graph_converter/__init__.py | ducky777/posenet-python | e27d37f31f5402c0770fee8b944f69a7c0289d03 | [
"Apache-2.0"
] | 1 | 2020-10-15T07:38:36.000Z | 2020-10-15T07:38:36.000Z | tfjs_graph_converter/__init__.py | ducky777/posenet-python | e27d37f31f5402c0770fee8b944f69a7c0289d03 | [
"Apache-2.0"
] | null | null | null | tfjs_graph_converter/__init__.py | ducky777/posenet-python | e27d37f31f5402c0770fee8b944f69a7c0289d03 | [
"Apache-2.0"
] | null | null | null | import os
from tfjs_graph_converter import version
# make tensorflow stop spamming messages
os.environ['TF_CPP_MIN_LOG_LEVEL'] = "3"
__version__ = version.VERSION
VERSION = version.VERSION
| 19.2 | 40 | 0.807292 | 27 | 192 | 5.37037 | 0.703704 | 0.482759 | 0.57931 | 0.57931 | 0.289655 | 0 | 0 | 0 | 0 | 0 | 0 | 0.005952 | 0.125 | 192 | 9 | 41 | 21.333333 | 0.857143 | 0.197917 | 0 | 0 | 0 | 0 | 0.138158 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.4 | 0 | 0.4 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 6 |
28706e08b5b056c0c45f4a70b4a41072464937ae | 171 | py | Python | app/routes/__init__.py | afrigon/fastapi-template | cb3c86353c67ef19c5abe12658e327ff37b14f90 | [
"MIT"
] | 2 | 2020-03-05T20:34:09.000Z | 2020-04-19T02:33:53.000Z | app/routes/__init__.py | afrigon/fastapi-template | cb3c86353c67ef19c5abe12658e327ff37b14f90 | [
"MIT"
] | 2 | 2019-12-17T18:49:29.000Z | 2019-12-17T23:19:11.000Z | app/routes/__init__.py | afrigon/fastapi-template | cb3c86353c67ef19c5abe12658e327ff37b14f90 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
from .router import Router # noqa: F401
from .factory import RouterFactory # noqa: F401
from .default_route import DefaultRoute # noqa: F401
| 21.375 | 53 | 0.707602 | 22 | 171 | 5.454545 | 0.590909 | 0.2 | 0.2 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.071942 | 0.187135 | 171 | 7 | 54 | 24.428571 | 0.791367 | 0.315789 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
2893783a74c412b7f3e0502e14e6f93296a215eb | 137 | py | Python | pirates/snapshot/PSnapshotRenderer.py | ksmit799/POTCO-PS | 520d38935ae8df4b452c733a82c94dddac01e275 | [
"Apache-2.0"
] | 8 | 2017-01-24T04:33:29.000Z | 2020-11-01T08:36:24.000Z | pirates/snapshot/PSnapshotRenderer.py | ksmit799/Pirates-Online-Remake | 520d38935ae8df4b452c733a82c94dddac01e275 | [
"Apache-2.0"
] | 1 | 2017-03-02T18:05:17.000Z | 2017-03-14T06:47:10.000Z | pirates/snapshot/PSnapshotRenderer.py | ksmit799/Pirates-Online-Remake | 520d38935ae8df4b452c733a82c94dddac01e275 | [
"Apache-2.0"
] | 11 | 2017-03-02T18:46:07.000Z | 2020-11-01T08:36:26.000Z | # File: P (Python 2.4)
from otp.snapshot.SnapshotRenderer import SnapshotRenderer
class PSnapshotRenderer(SnapshotRenderer):
pass
| 17.125 | 58 | 0.788321 | 15 | 137 | 7.2 | 0.866667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.016949 | 0.138686 | 137 | 7 | 59 | 19.571429 | 0.898305 | 0.145985 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0.333333 | 0.333333 | 0 | 0.666667 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 6 |
9577ae8a35dbecba2a08392c5470b92bec23eaf4 | 54 | py | Python | self_attention_cv/vit/__init__.py | Siyuan89/self-attention-cv | b39cde2fb68e05351bf3bc8048f4af13bbab256a | [
"MIT"
] | 759 | 2021-02-11T12:49:04.000Z | 2022-03-31T20:40:54.000Z | self_attention_cv/vit/__init__.py | youtang1993/self-attention-cv | ae215541d3e33d39f26947924253a63585683226 | [
"MIT"
] | 14 | 2021-03-02T17:04:00.000Z | 2022-03-21T09:34:30.000Z | self_attention_cv/vit/__init__.py | youtang1993/self-attention-cv | ae215541d3e33d39f26947924253a63585683226 | [
"MIT"
] | 97 | 2021-02-11T23:47:23.000Z | 2022-03-29T03:51:16.000Z | from .R50_ViT import ResNet50ViT
from .vit import ViT
| 18 | 32 | 0.814815 | 9 | 54 | 4.777778 | 0.555556 | 0.418605 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.086957 | 0.148148 | 54 | 2 | 33 | 27 | 0.847826 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
958b08924a60e1508071dbfa6dc09a39c9591a0d | 80 | py | Python | gql_subscriptions/__init__.py | syfun/gql-subscriptions | 6007d54000da3890ece4aa9b766fbfbaa6501ee8 | [
"MIT"
] | 7 | 2020-04-21T13:34:34.000Z | 2021-05-07T07:58:52.000Z | gql_subscriptions/__init__.py | syfun/gql-subscriptions | 6007d54000da3890ece4aa9b766fbfbaa6501ee8 | [
"MIT"
] | null | null | null | gql_subscriptions/__init__.py | syfun/gql-subscriptions | 6007d54000da3890ece4aa9b766fbfbaa6501ee8 | [
"MIT"
] | null | null | null | from .pubsub import PubSub # noqa
from .with_filter import with_filter # noqa
| 26.666667 | 44 | 0.775 | 12 | 80 | 5 | 0.5 | 0.333333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.175 | 80 | 2 | 45 | 40 | 0.909091 | 0.1125 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
959537e668e600237e750ff100394f623647fa56 | 193 | py | Python | src/surveyinterface/admin.py | UCHIC/SurveyDataViewer | 6027ea075a5c11c7686304eb9dd169664cee5c58 | [
"BSD-3-Clause"
] | 10 | 2015-01-20T17:04:47.000Z | 2020-10-24T02:16:00.000Z | src/surveyinterface/admin.py | UCHIC/SurveyDataViewer | 6027ea075a5c11c7686304eb9dd169664cee5c58 | [
"BSD-3-Clause"
] | 65 | 2015-01-16T19:17:18.000Z | 2018-02-12T23:03:11.000Z | src/surveyinterface/admin.py | UCHIC/SurveyDataViewer | 6027ea075a5c11c7686304eb9dd169664cee5c58 | [
"BSD-3-Clause"
] | 2 | 2019-07-08T20:57:14.000Z | 2020-06-02T13:29:25.000Z | from django.contrib import admin
from surveyinterface.models import Survey
# Register your models here.
class SurveyAdmin(admin.ModelAdmin):
pass
admin.site.register(Survey, SurveyAdmin) | 21.444444 | 41 | 0.80829 | 24 | 193 | 6.5 | 0.666667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.124352 | 193 | 9 | 42 | 21.444444 | 0.923077 | 0.134715 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0.2 | 0.4 | 0 | 0.6 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 6 |
95e296bcb687c4e2bfd9ca3b4bef6aea67aab179 | 158 | py | Python | modules/2.79/bpy/types/Depsgraph.py | cmbasnett/fake-bpy-module | acb8b0f102751a9563e5b5e5c7cd69a4e8aa2a55 | [
"MIT"
] | null | null | null | modules/2.79/bpy/types/Depsgraph.py | cmbasnett/fake-bpy-module | acb8b0f102751a9563e5b5e5c7cd69a4e8aa2a55 | [
"MIT"
] | null | null | null | modules/2.79/bpy/types/Depsgraph.py | cmbasnett/fake-bpy-module | acb8b0f102751a9563e5b5e5c7cd69a4e8aa2a55 | [
"MIT"
] | null | null | null | class Depsgraph:
def debug_graphviz(self, filename):
pass
def debug_rebuild(self):
pass
def debug_stats(self):
pass
| 11.285714 | 39 | 0.588608 | 18 | 158 | 5 | 0.555556 | 0.266667 | 0.266667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.341772 | 158 | 13 | 40 | 12.153846 | 0.865385 | 0 | 0 | 0.428571 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.428571 | false | 0.428571 | 0 | 0 | 0.571429 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 6 |
250465ef6fa086dd207e117672e9fbd43671f392 | 15,394 | py | Python | core/scraper.py | AnglewoodJack/ScrapeJobs | f810e1a461c24bd10ad2376627d48f1b8a145966 | [
"MIT"
] | null | null | null | core/scraper.py | AnglewoodJack/ScrapeJobs | f810e1a461c24bd10ad2376627d48f1b8a145966 | [
"MIT"
] | null | null | null | core/scraper.py | AnglewoodJack/ScrapeJobs | f810e1a461c24bd10ad2376627d48f1b8a145966 | [
"MIT"
] | null | null | null | import random
import re
from time import sleep
from urllib.parse import urljoin
from bs4 import BeautifulSoup
from selenium.webdriver.support.select import Select
from selenium.webdriver.support.wait import WebDriverWait
from tqdm import tqdm
from dateutil import parser
class JobScraper:
"""
Job core.
"""
def __init__(self, driver, link: str, timeout: float):
"""
:param driver: preconfigured webdriver (result of "configure_driver" function)
:param link: IAEA job page url to scrape
:param timeout: webdriver wait time for page loading
"""
# browser driver
self.driver = driver
# timeout
self.timeout = timeout
# jobs page link
self.link = link
# list of jobs info dictionaries
self.jobs = None
def scrape_brief(self):
"""
Scrape function template.
"""
pass
def scrape_full(self):
"""
Scrape function template.
"""
pass
class IaeaScraper(JobScraper):
"""
IAEA job core.
"""
def scrape_brief(self):
"""
Get general info for currently open vacancies.
"""
# go to jobs page
self.driver.get(self.link)
# regex for number of jobs (example: Job Openings 1 - 25 of 33)
reg_pages = re.compile(r'\d+ - \d+ of \d+')
# apply regex to search for a number of jobs at page (tag: currentPageInfo): d - reference to driver object
f = lambda d: re.search(reg_pages, d.find_element_by_id('currentPageInfo').text)
# wait until f is done
WebDriverWait(self.driver, self.timeout).until(f)
# execute f
match_res = f(self.driver)
# match_object.group(0) says that the whole part of match_object is chosen.
page_res = match_res.group(0) # get string
# create empty jobs list
jobs = []
# initialize page number
pageno = 1
# loop through pages and jobs
while True:
# get page source code
s = BeautifulSoup(self.driver.page_source, features="html.parser")
# regex to find job's link
job_reg = re.compile(r'jobdetail\.ftl\?job=\d+')
# regex to find job's id
id_reg = re.compile(r'job=(\d+\/\d+\s\(\d+\))')
# find all 'a' tags with 'href' of specified above regex pattern
for a in tqdm(s.findAll('a', href=job_reg), desc=f"Scraping IAEA's job page {pageno}"):
# for each 'a' tag find parent 'tr' and 'td' tags
tr = a.findParent('tr')
td = tr.findAll('td')
# create dictionary for each job
job = {}
# parse job link from 'href' and join with the site link
job_link = urljoin(self.link, a['href'])
# add info to job's dictionary
# add job id
job['id'] = re.findall(id_reg, job_link)[0]
# add job title
job['title'] = a.text
# add job link
job['url'] = job_link
# parse location form the second 'td' tag for current job
job['location'] = td[1].text
# parse deadline form the third 'td' tag for current job
job['deadline'] = parser.parse(td[2].text)
# add organization name manually
job['organization'] = 'IAEA'
# append job's dictionary to overall jobs list
jobs.append(job)
# sleep random time after each job
sleep(random.uniform(0.5, 1.0))
# find next page button
next_page_elem = self.driver.find_element_by_id('next')
# check if next page exists
next_page_link = s.find('a', text=f'{pageno + 1}')
# if next page exists
if next_page_link:
# go to the next page
next_page_elem.click()
# check if page has changed - example: transition from "1 - 25 of 48" to "26 - 50 of 48"
WebDriverWait(self.driver, self.timeout).until(lambda d: f(d) and f(d).group(0) != page_res)
# repeat search for number og jobs per page operations
match_res = f(self.driver)
page_res = match_res.group(0)
# update page number
pageno += 1
else:
# stop loop if no pages left
break
# save jobs list to class attribute
self.jobs = jobs
def scrape_full(self):
"""
Update jobs attribute with full info for currently open vacancies.
The full info is not parsed and and placed into jobs dictionary as html code.
"""
# check if it is a re-opening of the vacancy
reopen_status = re.compile(r'This is a re-opening of the vacancy')
# for each job in jobs list
for job in tqdm(self.jobs, desc="Getting IAEA's job descriptions"):
# go to job description page
self.driver.get(job['url'])
# get page source
s = BeautifulSoup(self.driver.page_source, features="html.parser")
if re.search(reopen_status, s.text):
# true if found re-opening status
job['reopen'] = 1
else:
# false otherwise
job['reopen'] = 0
# save html code of a job's page
job['html_page'] = s.prettify(formatter='html')
# sleep random time after each job
sleep(random.uniform(0.75, 1.0))
# close driver after getting all the jobs info
self.driver.quit()
class IrenaScraper(JobScraper):
"""
IRENA job core.
"""
def scrape_brief(self):
"""
Get general info for currently open vacancies.
"""
# get page
self.driver.get(self.link)
# regex for number of jobs (example: Jobs - Page 1 out of 2)
reg_pages = re.compile(r'\d+ out of (\d+)')
# apply regex for number of jobs search to page (tag: currentPageInfo): d - reference to driver object
f = lambda d: re.findall(reg_pages, d.find_element_by_id('content').text)
# wait until f is done
WebDriverWait(self.driver, self.timeout).until(f)
# execute f
m = f(self.driver)
# get total number of pages
total_pages = int(m[0])
# get the number of jobs per page
select = Select(self.driver.find_element_by_name('dropListSize'))
per_page = int(select.first_selected_option.text)
# get total number of jobs
total_jobs = int(re.findall(r'\d+', self.driver.find_element_by_class_name('subtitle').text)[0])
# create empty jobs list
jobs = []
# set initial page number
pageno = 1
while pageno <= total_pages:
# go to corresponding page
self.driver.find_element_by_xpath(f'//*[@title="Go to page {pageno}"]').click()
# wait for the page to be loaded
sleep(3)
# check the number of jobs on a page
if total_jobs > per_page:
jobs_number = per_page
total_jobs -= per_page
else:
jobs_number = total_jobs
# loop through jobs on the page
for i in tqdm(range(1, jobs_number + 1), desc=f"Scraping IRENA's job page {pageno}"):
# job's info dictionary initialization
job = {}
# add job title
_title = self.driver.find_element_by_id(f'requisitionListInterface.reqTitleLinkAction.row{i}')
job['title'] = _title.text
# add job location
_location = self.driver.find_element_by_id(f'requisitionListInterface.reqBasicLocation.row{i}')
job['location'] = _location.text
# add job posting date
_posted = self.driver.find_element_by_id(f'requisitionListInterface.reqPostingDate.row{i}')
job['posted'] = _posted.text
# add job id
_id = self.driver.find_element_by_id(f'requisitionListInterface.reqContestNumberValue.row{i}')
job['id'] = _id.text
# add job deadline
_deadline = self.driver.find_element_by_id(f'requisitionListInterface.reqUnpostingDate.row{i}')
job['deadline'] = parser.parse(_deadline.text)
# add organization name manually
job['organization'] = 'IRENA'
# add job position on the page
job['page/row'] = (pageno, i)
# append job's dictionary to overall jobs list
jobs.append(job)
# sleep random time after each job
sleep(random.uniform(0.5, 1.0))
# update page number
pageno += 1
self.jobs = jobs
def scrape_full(self):
"""
Update jobs attribute with full info for currently open vacancies.
The full info is not parsed and and placed into jobs dictionary as html code.
"""
# check if it is a re-opening of the vacancy
reopen_status = re.compile(r'This is a re-advertisement of the vacancy')
# for each job in jobs list
for job in tqdm(self.jobs, desc="Getting IRENA's job descriptions"):
# go to job description page
self.driver.get(self.link)
# find jobs page
page_elem = self.driver.find_element_by_xpath(f'//*[@title="Go to page {job["page/row"][0]}"]')
# go to page
page_elem.click()
# wait for the page to load
sleep(random.uniform(0.75, 1.0))
# find job
row = self.driver.find_element_by_id(f'requisitionListInterface.reqTitleLinkAction.row{job["page/row"][1]}')
# go to current job page
row.click()
# get page source
s = BeautifulSoup(self.driver.page_source, features="html.parser")
if re.search(reopen_status, s.text):
# true if found re-opening status
job['reopen'] = 1
else:
# false otherwise
job['reopen'] = 0
# save html code of a job's page
job['html_page'] = s.prettify(formatter='html')
# sleep random time after each job
sleep(random.uniform(0.75, 1.0))
# close driver after getting all the jobs info
self.driver.quit()
class IterScraper(JobScraper):
"""
ITER job core.
"""
def scrape_brief(self):
"""
Get general info for currently open vacancies.
"""
# get page
self.driver.get(self.link)
# create empty jobs list
jobs = []
# get page source
s = BeautifulSoup(self.driver.page_source, features="html.parser")
# regex to get the job unique id from the job's link
reg = re.compile(r'id=(\d+)')
# find all 'tr' tags within the jobs table
for tr in tqdm(s.tbody.findAll('tr'), desc="Scraping ITER jobs"):
# find 'b' tag
b = tr.findChildren('b')
# get 'a' tag inside 'b' tag (for job title and link)
a = b[0].findChildren('a')
# find all 'td' tags inside 'tr' (job deadline)
td = tr.findAll('td')
# create dictionary for each job
job = {}
# parse job title
job_title = a[0].text
# parse job link from 'href' and join with the site link
job_link = a[0]['href']
# add info to job's dictionary
job['id'] = job_title.split()[-1] + '/' + re.findall(reg, job_link)[0]
job['title'] = job_title
job['url'] = job_link
# parse deadline form the first 'td' tag for current job
job['deadline'] = parser.parse(td[0].text)
# specify location manually
job['location'] = 'France-St. Paul-lez-Durance)'
# add organization name manually
job['organization'] = 'ITER'
# append job's dictionary to overall jobs list
jobs.append(job)
# sleep random time after each job
sleep(random.uniform(0.5, 1.0))
# save jobs list to class attribute
self.jobs = jobs
def scrape_full(self):
"""
Update jobs attribute with full info for currently open vacancies.
The full info is not parsed and and placed into jobs dictionary as html code.
"""
reopen_status = re.compile(r'This is a re-opening of the vacancy') # might be redundant for ITER
# for each job in jobs list
for job in tqdm(self.jobs, desc="Getting ITER's job descriptions"):
# go to job description page
self.driver.get(job['url'])
# get page source
s = BeautifulSoup(self.driver.page_source, features="html.parser")
if re.search(reopen_status, s.text):
# true if found re-opening status
job['reopen'] = 1
else:
# false otherwise
job['reopen'] = 0
# save html code of a job's page
job['html_page'] = s.prettify(formatter="html")
# sleep random time after each job
sleep(random.uniform(0.75, 1.0))
# close driver after getting all the jobs info
self.driver.quit()
class OecdScraper(JobScraper):
"""
OECD job core.
"""
def scrape_brief(self):
"""
Get general info for currently open vacancies.
"""
# get page
self.driver.get(self.link)
# regex for number of jobs (example: Jobs - Page 1 out of 2)
reg_pages = re.compile(r'\d+ out of (\d+)')
# apply regex for number of jobs search to page (tag: currentPageInfo): d - reference to driver object
f = lambda d: re.findall(reg_pages, d.find_element_by_class_name('pagerlabel').text)
# wait until f is done
WebDriverWait(self.driver, self.timeout).until(f)
# execute f
m = f(self.driver)
# get total number of pages
total_pages = int(m[0])
# get the number of jobs per page
select = Select(self.driver.find_element_by_name('dropListSize'))
per_page = int(select.first_selected_option.text)
# get total number of jobs
total_jobs = int(re.findall(r'\d+', self.driver.find_element_by_class_name('subtitle').text)[0])
# create empty jobs list
jobs = []
# set initial page number
pageno = 1
while pageno <= total_pages:
# go to corresponding page
self.driver.find_element_by_xpath(f'//*[@title="Go to page {pageno}"]').click()
# wait for the page to be loaded
sleep(3)
# check the number of jobs on a page
if total_jobs > per_page:
jobs_number = per_page
total_jobs -= per_page
else:
jobs_number = total_jobs
# loop through jobs on the page
for i in tqdm(range(1, jobs_number + 1), desc=f"Scraping OECD's job page {pageno}"):
# job's info dictionary initialization
job = {}
# add job title
_title = self.driver.find_element_by_id(f'requisitionListInterface.reqTitleLinkAction.row{i}')
job['title'] = _title.text
# add OECD organization
_oecd_unit = self.driver.find_element_by_id(f'requisitionListInterface.reqOrganization.row{i}')
job['oecd_unit'] = _oecd_unit.text
# add job location
_location = self.driver.find_element_by_id(f'requisitionListInterface.reqBasicLocation.row{i}')
job['location'] = _location.text
# add job posting date
_posted = self.driver.find_element_by_id(f'requisitionListInterface.reqPostingDate.row{i}')
job['posted'] = _posted.text
# add job id
_id = self.driver.find_element_by_id(f'requisitionListInterface.reqContestNumberValue.row{i}')
job['id'] = _id.text
# add job deadline
_deadline = self.driver.find_element_by_id(f'requisitionListInterface.reqUnpostingDate.row{i}')
job['deadline'] = parser.parse(_deadline.text)
# add organization name manually
job['organization'] = 'OECD'
# add job position on the page
job['page/row'] = (pageno, i)
# append job's dictionary to overall jobs list
jobs.append(job)
# sleep random time after each job
sleep(random.uniform(0.5, 1.0))
# update page number
pageno += 1
self.jobs = jobs
def scrape_full(self):
"""
Update jobs attribute with full info for currently open vacancies.
The full info is not parsed and and placed into jobs dictionary as html code.
"""
# check if it is a re-opening of the vacancy
reopen_status = re.compile(r'This is a re-opening of the vacancy') # might be redundant for OECD
# for each job in jobs list
for job in tqdm(self.jobs, desc="Getting OECD's job descriptions"):
# go to job description page
self.driver.get(self.link)
# find jobs page
page_elem = self.driver.find_element_by_xpath(f'//*[@title="Go to page {job["page/row"][0]}"]')
# go to page
page_elem.click()
# wait for the page to load
sleep(random.uniform(1.0, 1.5))
# find job
row = self.driver.find_element_by_id(f'requisitionListInterface.reqTitleLinkAction.row{job["page/row"][1]}')
# go to current job page
row.click()
# get page source
s = BeautifulSoup(self.driver.page_source, features="html.parser")
if re.search(reopen_status, s.text):
# true if found re-opening status
job['reopen'] = 1
else:
# false otherwise
job['reopen'] = 0
# save html code of a job's page
job['html_page'] = s.prettify(formatter='html')
# sleep random time after each job
sleep(random.uniform(0.75, 1.0))
# close driver after getting all the jobs info
self.driver.quit()
| 33.176724 | 111 | 0.683643 | 2,368 | 15,394 | 4.357264 | 0.110642 | 0.048459 | 0.031498 | 0.044776 | 0.798992 | 0.775441 | 0.75722 | 0.74152 | 0.736674 | 0.736674 | 0 | 0.008785 | 0.201377 | 15,394 | 463 | 112 | 33.24838 | 0.830486 | 0.370534 | 0 | 0.705069 | 0 | 0 | 0.195896 | 0.081329 | 0 | 0 | 0 | 0 | 0 | 1 | 0.050691 | false | 0.009217 | 0.041475 | 0 | 0.115207 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
2524e58191b7183ceeca0f3107bb7b5a9cf510ce | 138 | py | Python | tests/fixtures/discover/test_exact.py | polishmatt/sputr | 7611d40090c8115dff69912725efc506414ac47a | [
"MIT"
] | 1 | 2017-02-13T23:09:18.000Z | 2017-02-13T23:09:18.000Z | tests/fixtures/discover/test_exact.py | polishmatt/sputr | 7611d40090c8115dff69912725efc506414ac47a | [
"MIT"
] | 6 | 2017-02-18T20:14:32.000Z | 2017-09-27T19:07:06.000Z | tests/fixtures/discover/test_exact.py | polishmatt/sputr | 7611d40090c8115dff69912725efc506414ac47a | [
"MIT"
] | null | null | null | import unittest
class ExactTest(unittest.TestCase):
def test_exact_name(self):
pass
def test_exact(self):
pass
| 13.8 | 35 | 0.65942 | 17 | 138 | 5.176471 | 0.647059 | 0.159091 | 0.272727 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.268116 | 138 | 9 | 36 | 15.333333 | 0.871287 | 0 | 0 | 0.333333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | false | 0.333333 | 0.166667 | 0 | 0.666667 | 0 | 1 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 6 |
256430c1303c1674506ce2f2d3d0a215c29487b9 | 35 | py | Python | package/mtwow/general/__init__.py | periodically-makes-puns/phoenix-mtwow-bote-py | ebef75e8a48d97c54b128c066d49d2ccd21a5dc9 | [
"MIT"
] | null | null | null | package/mtwow/general/__init__.py | periodically-makes-puns/phoenix-mtwow-bote-py | ebef75e8a48d97c54b128c066d49d2ccd21a5dc9 | [
"MIT"
] | 1 | 2019-11-26T03:09:23.000Z | 2019-11-26T03:09:23.000Z | package/mtwow/general/__init__.py | periodically-makes-puns/phoenix-mtwow-bote-py | ebef75e8a48d97c54b128c066d49d2ccd21a5dc9 | [
"MIT"
] | null | null | null | from . import sqlutils, user, admin | 35 | 35 | 0.771429 | 5 | 35 | 5.4 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.142857 | 35 | 1 | 35 | 35 | 0.9 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
c2641f01cbe78ed57e70ed502d7be2a86446f677 | 37,972 | py | Python | tests/test_controller.py | xIMRANx/morelia_server | 5119666151e14c964de256521cceed872cee5c48 | [
"MIT"
] | 1 | 2022-03-01T08:25:38.000Z | 2022-03-01T08:25:38.000Z | tests/test_controller.py | xIMRANx/morelia_server | 5119666151e14c964de256521cceed872cee5c48 | [
"MIT"
] | null | null | null | tests/test_controller.py | xIMRANx/morelia_server | 5119666151e14c964de256521cceed872cee5c48 | [
"MIT"
] | null | null | null | import inspect
import json
import os
import sys
import unittest
import configparser
from uuid import uuid4
import sqlobject as orm
from loguru import logger
# Add path to directory with code being checked
# to variable 'PATH' to import modules from directory
# above the directory with the tests.
BASE_PATH = os.path.abspath(os.path.dirname(__file__))
FIXTURES_PATH = os.path.join(BASE_PATH, "fixtures")
sys.path.append(os.path.split(BASE_PATH)[0])
from mod import api # noqa
from mod import controller # noqa
from mod import lib # noqa
from mod import models # noqa
# ************** Read "config.ini" ********************
config = configparser.ConfigParser()
config.read('config.ini')
limit = config['SERVER_LIMIT']
# ************** END **********************************
connection = orm.connectionForURI("sqlite:/:memory:")
orm.sqlhub.processConnection = connection
classes = [cls_name for cls_name, cls_obj
in inspect.getmembers(sys.modules["mod.models"])
if inspect.isclass(cls_obj)]
# **************** Examples of requests ********************
#
#
# variables for repeating fields in queries:
user_uuid = "123456"
user_auth_id = "auth_id"
user_password = "password"
user_login = "login"
flow_uuid = "07d949"
#
#
# requests
GET_UPDATE = {
"type": "get_update",
"data": {
"time": 111,
"user": [{
"uuid": user_uuid,
"auth_id": user_auth_id
}],
"meta": None
},
"jsonapi": {
"version": "1.0"
},
"meta": None
}
SEND_MESSAGE = {
"type": "send_message",
"data": {
"flow": [{
"uuid": flow_uuid
}],
"message": [{
"text": "Hello!",
"client_id": 123,
"file_picture": b"jkfikdkdsd",
"file_video": b"sdfsdfsdf",
"file_audio": b"fgfsdfsdfsdf",
"file_document": b"adgdfhfgth",
"emoji": b"sfdfsdfsdf"
}],
"user": [{
"uuid": user_uuid,
"auth_id": user_auth_id
}],
"meta": None
},
"jsonapi": {
"version": "1.0"
},
"meta": None
}
ALL_MESSAGES = {
"type": "all_messages",
"data": {
"time": 2,
"flow": [{
"uuid": flow_uuid
}],
"user": [{
"uuid": user_uuid,
"auth_id": user_auth_id
}],
"meta": None
},
"jsonapi": {
"version": "1.0"
},
"meta": None
}
ADD_FLOW = {
"type": "add_flow",
"data": {
"flow": [{
"type": "group",
"title": "title",
"info": "info",
"owner": "123456",
"users": ["123456"]
}],
"user": [{
"uuid": user_uuid,
"auth_id": user_auth_id
}],
"meta": None
},
"jsonapi": {
"version": "1.0"
},
"meta": None
}
ALL_FLOW = {
"type": "all_flow",
"data": {
"user": [{
"uuid": user_uuid,
"auth_id": user_auth_id
}],
"meta": None
},
"jsonapi": {
"version": "1.0"
},
"meta": None
}
USER_INFO = {
"type": "user_info",
"data": {
"user": [{
"uuid": user_uuid,
"auth_id": user_auth_id
},
{
"uuid": "123457"
},
{
"uuid": "123458"
},
{
"uuid": "123459"
},
{
"uuid": "123460"
}],
"meta": None
},
"jsonapi": {
"version": "1.0"
},
"meta": None
}
REGISTER_USER = {
"type": "register_user",
"data": {
"user": [{
"password": user_password,
"login": user_login,
"email": "querty@querty.com",
"username": "username"
}],
"meta": None
},
"jsonapi": {
"version": "1.0"
},
"meta": None
}
AUTH = {
"type": "auth",
"data": {
"user": [{
"password": user_password,
"login": user_login
}],
"meta": None
},
"jsonapi": {
"version": "1.0"
},
"meta": None
}
DELETE_USER = {
"type": "delete_user",
"data": {
"user": [{
"uuid": user_uuid,
"password": user_password,
"login": user_login,
"auth_id": user_auth_id
}],
"meta": None
},
"jsonapi": {
"version": "1.0"
},
"meta": None
}
DELETE_MESSAGE = {
"type": "delete_message",
"data": {
"flow": [{
"uuid": flow_uuid
}],
"message": [{
"uuid": "1122"
}],
"user": [{
"uuid": user_uuid,
"auth_id": user_auth_id
}],
"meta": None
},
"jsonapi": {
"version": "1.0"
},
"meta": None
}
EDITED_MESSAGE = {
"type": "edited_message",
"data": {
"message": [{
"uuid": "1",
"text": "New_Hello"
}],
"user": [{
"uuid": user_uuid,
"auth_id": user_auth_id
}],
"meta": None
},
"jsonapi": {
"version": "1.0"
},
"meta": None
}
PING_PONG = {
"type": "ping-pong",
"data": {
"user": [{
"uuid": user_uuid,
"auth_id": user_auth_id
}],
"meta": None
},
"jsonapi": {
"version": "1.0"
},
"meta": None
}
ERRORS = {
"type": "wrong type",
"data": {
"user": [{
"uuid": user_uuid,
"auth_id": user_auth_id
}],
"meta": None
},
"jsonapi": {
"version": "1.0"
},
"meta": None
}
NON_VALID_ERRORS = {
"data": {
"user": [{
"uuid": user_uuid,
"auth_id": user_auth_id
}],
"meta": None
},
"jsonapi": {
"version": "1.0"
},
"meta": None
}
ERRORS_ONLY_TYPE = {
"type": "send_message"
}
# **************** End examples of requests *****************
class TestCheckAuthToken(unittest.TestCase):
@classmethod
def setUpClass(cls):
logger.remove()
def setUp(self):
for item in classes:
class_ = getattr(models, item)
class_.createTable(ifNotExists=True)
models.UserConfig(uuid="123456",
login="login",
password="password",
authId="auth_id")
self.test = api.ValidJSON.parse_obj(SEND_MESSAGE)
def tearDown(self):
for item in classes:
class_ = getattr(models, item)
class_.dropTable(ifExists=True,
dropJoinTables=True,
cascade=True)
del self.test
def test_check_true_result(self):
uuid = self.test.data.user[0].uuid
auth_id = self.test.data.user[0].auth_id
run_method = controller.ProtocolMethods(self.test)
result = run_method._ProtocolMethods__check_auth_token(uuid,
auth_id)
self.assertTrue(result)
def test_check_wrong_uuid(self):
uuid = 654321
auth_id = self.test.data.user[0].auth_id
run_method = controller.ProtocolMethods(self.test)
result = run_method._ProtocolMethods__check_auth_token(uuid,
auth_id)
self.assertFalse(result)
def test_check_wrong_auth_id(self):
auth_id = "wrong_auth_id"
uuid = self.test.data.user[0].uuid
run_method = controller.ProtocolMethods(self.test)
result = run_method._ProtocolMethods__check_auth_token(uuid,
auth_id)
self.assertFalse(result)
class TestCheckLogin(unittest.TestCase):
@classmethod
def setUpClass(cls):
logger.remove()
def setUp(self):
for item in classes:
class_ = getattr(models, item)
class_.createTable(ifNotExists=True)
models.UserConfig(uuid="123456",
login="login",
password="password",
authId="auth_id")
self.test = api.ValidJSON.parse_obj(REGISTER_USER)
def tearDown(self):
for item in classes:
class_ = getattr(models, item)
class_.dropTable(ifExists=True,
dropJoinTables=True,
cascade=True)
del self.test
def test_check_true_result(self):
login = self.test.data.user[0].login
run_method = controller.ProtocolMethods(self.test)
result = run_method._ProtocolMethods__check_login(login)
self.assertTrue(result)
def test_check_wrong_login(self):
run_method = controller.ProtocolMethods(self.test)
result = run_method._ProtocolMethods__check_login("wrong_login")
self.assertFalse(result)
class TestRegisterUser(unittest.TestCase):
@classmethod
def setUpClass(cls):
logger.remove()
def setUp(self):
for item in classes:
class_ = getattr(models, item)
class_.createTable(ifNotExists=True)
self.test = api.ValidJSON.parse_obj(REGISTER_USER)
def tearDown(self):
for item in classes:
class_ = getattr(models, item)
class_.dropTable(ifExists=True,
dropJoinTables=True,
cascade=True)
del self.test
def test_user_created(self):
run_method = controller.ProtocolMethods(self.test)
result = json.loads(run_method.get_response())
self.assertEqual(result["errors"]["code"], 201)
def test_user_already_exists(self):
models.UserConfig(uuid="123456",
login="login",
password="password")
run_method = controller.ProtocolMethods(self.test)
result = json.loads(run_method.get_response())
self.assertEqual(result["errors"]["code"], 409)
def test_user_write_in_database(self):
controller.ProtocolMethods(self.test)
dbquery = models.UserConfig.selectBy(login="login").getOne()
self.assertEqual(dbquery.login, "login")
def test_uuid_write_in_database(self):
run_method = controller.ProtocolMethods(self.test)
result = json.loads(run_method.get_response())
dbquery = models.UserConfig.selectBy(login="login").getOne()
self.assertEqual(dbquery.uuid,
result["data"]["user"][0]["uuid"])
def test_auth_id_write_in_database(self):
run_method = controller.ProtocolMethods(self.test)
result = json.loads(run_method.get_response())
dbquery = models.UserConfig.selectBy(login="login").getOne()
self.assertEqual(dbquery.authId,
result["data"]["user"][0]["auth_id"])
def test_type_of_salt(self):
controller.ProtocolMethods(self.test)
dbquery = models.UserConfig.selectBy(login="login").getOne()
self.assertIsInstance(dbquery.salt, bytes)
def test_type_of_key(self):
controller.ProtocolMethods(self.test)
dbquery = models.UserConfig.selectBy(login="login").getOne()
self.assertIsInstance(dbquery.key, bytes)
class TestGetUpdate(unittest.TestCase):
@classmethod
def setUpClass(cls):
logger.remove()
def setUp(self):
for item in classes:
class_ = getattr(models, item)
class_.createTable(ifNotExists=True)
new_user1 = models.UserConfig(uuid="123456",
login="login",
password="password",
authId="auth_id")
new_user2 = models.UserConfig(uuid="987654",
login="login2",
password="password2",
authId="auth_id2")
new_user3 = models.UserConfig(uuid="666555",
login="login3",
password="password3",
authId="auth_id3")
new_flow1 = models.Flow(uuid="07d949",
timeCreated=111,
flowType="chat",
title="title1",
info="info1",
owner="123456")
new_flow2 = models.Flow(uuid="07d950",
timeCreated=222,
flowType="group",
title="title2",
info="info2",
owner="987654")
new_flow1.addUserConfig(new_user1)
new_flow1.addUserConfig(new_user2)
new_flow2.addUserConfig(new_user2)
new_flow2.addUserConfig(new_user1)
new_flow2.addUserConfig(new_user3)
models.Message(uuid="111",
text="Hello1",
time=111,
user=new_user1,
flow=new_flow1)
models.Message(uuid="112",
text="Hello2",
time=222,
user=new_user2,
flow=new_flow1)
models.Message(uuid="113",
text="Heeeello1",
time=111,
user=new_user1,
flow=new_flow2)
models.Message(uuid="114",
text="Heeeello2",
time=222,
user=new_user2,
flow=new_flow2)
models.Message(uuid="115",
text="Heeeello3",
time=333,
user=new_user3,
flow=new_flow2)
self.test = api.ValidJSON.parse_obj(GET_UPDATE)
def tearDown(self):
for item in classes:
class_ = getattr(models, item)
class_.dropTable(ifExists=True,
dropJoinTables=True,
cascade=True)
del self.test
def test_update(self):
run_method = controller.ProtocolMethods(self.test)
result = json.loads(run_method.get_response())
self.assertEqual(result["errors"]["code"], 200)
def test_check_message_in_result(self):
run_method = controller.ProtocolMethods(self.test)
result = json.loads(run_method.get_response())
self.assertEqual(result["data"]["message"][1]["uuid"],
"112")
def test_check_flow_in_result(self):
run_method = controller.ProtocolMethods(self.test)
result = json.loads(run_method.get_response())
self.assertEqual(result["data"]["flow"][0]["owner"],
"123456")
def test_check_user_in_result(self):
run_method = controller.ProtocolMethods(self.test)
result = json.loads(run_method.get_response())
self.assertEqual(result["data"]["user"][2]["uuid"],
"666555")
@unittest.skip("Не работает, пока не будет добавлен фильтр по времени")
def test_no_new_data_in_database(self):
self.test.data.time = 444
run_method = controller.ProtocolMethods(self.test)
result = json.loads(run_method.get_response())
self.assertEqual(result["errors"]["code"], 404)
class TestSendMessage(unittest.TestCase):
@classmethod
def setUpClass(cls):
logger.remove()
def setUp(self):
for item in classes:
class_ = getattr(models, item)
class_.createTable(ifNotExists=True)
new_user = models.UserConfig(uuid="123456",
login="login",
password="password",
authId="auth_id")
new_flow = models.Flow(uuid="07d949",
timeCreated=111,
flowType="group",
owner="123456")
new_flow.addUserConfig(new_user)
self.test = api.ValidJSON.parse_obj(SEND_MESSAGE)
def tearDown(self):
for item in classes:
class_ = getattr(models, item)
class_.dropTable(ifExists=True,
dropJoinTables=True,
cascade=True)
del self.test
def test_send_message(self):
run_method = controller.ProtocolMethods(self.test)
result = json.loads(run_method.get_response())
self.assertEqual(result["errors"]["code"], 200)
def test_check_id_in_response(self):
run_method = controller.ProtocolMethods(self.test)
result = json.loads(run_method.get_response())
dbquery = models.Message.selectBy().getOne()
self.assertEqual(result["data"]["message"][0]["uuid"],
dbquery.uuid)
def test_check_client_id_in_response(self):
run_method = controller.ProtocolMethods(self.test)
result = json.loads(run_method.get_response())
self.assertEqual(result["data"]["message"][0]["client_id"],
123)
def test_wrong_flow(self):
self.test.data.flow[0].uuid = "666666"
run_method = controller.ProtocolMethods(self.test)
result = json.loads(run_method.get_response())
self.assertEqual(result["errors"]["code"], 404)
def test_write_text_in_database(self):
controller.ProtocolMethods(self.test)
dbquery = models.Message.selectBy().getOne()
self.assertEqual(dbquery.text,
self.test.data.message[0].text)
def test_write_time_in_database(self):
controller.ProtocolMethods(self.test)
dbquery = models.Message.selectBy().getOne()
self.assertIsInstance(dbquery.time, int)
class TestAllMessages(unittest.TestCase):
@classmethod
def setUpClass(cls):
logger.remove()
def setUp(self):
for item in classes:
class_ = getattr(models, item)
class_.createTable(ifNotExists=True)
new_user = models.UserConfig(uuid="123456",
login="login",
password="password",
authId="auth_id")
new_user2 = models.UserConfig(uuid="654321",
login="login2",
password="password2",
authId="auth_id2")
new_flow = models.Flow(uuid="07d949",
flowType="chat",
owner="123456")
new_flow2 = models.Flow(uuid="07d950",
flowType="chat",
owner="654321")
new_flow.addUserConfig(new_user)
new_flow.addUserConfig(new_user2)
new_flow2.addUserConfig(new_user)
new_flow2.addUserConfig(new_user2)
for item in range(limit.getint("messages") + 10):
models.Message(uuid=str(uuid4().int),
text=f"Hello{item}",
time=item,
user=new_user,
flow=new_flow)
for item in range(limit.getint("messages") - 10):
models.Message(uuid=str(uuid4().int),
text=f"Kak Dela{item}",
time=item,
user=new_user2,
flow=new_flow2)
models.Message(uuid=str(uuid4().int),
text="Privet",
time=666,
user=new_user2,
flow=new_flow2)
self.test = api.ValidJSON.parse_obj(ALL_MESSAGES)
def tearDown(self):
for item in classes:
class_ = getattr(models, item)
class_.dropTable(ifExists=True,
dropJoinTables=True,
cascade=True)
del self.test
def test_all_message_more_limit(self):
run_method = controller.ProtocolMethods(self.test)
result = json.loads(run_method.get_response())
self.assertEqual(result["errors"]["code"], 206)
def test_all_message_less_limit(self):
self.test.data.flow[0].uuid = "07d950"
run_method = controller.ProtocolMethods(self.test)
result = json.loads(run_method.get_response())
self.assertEqual(result["errors"]["code"], 200)
def test_message_end_in_response(self):
run_method = controller.ProtocolMethods(self.test)
result = json.loads(run_method.get_response())
self.assertEqual(result["data"]["flow"][0]["message_end"], 108)
def test_check_message_in_database(self):
controller.ProtocolMethods(self.test)
dbquery = models.Message.selectBy(time=666).getOne()
self.assertEqual(dbquery.text, "Privet")
def test_wrong_message_volume(self):
self.test.data.flow[0].message_end = 256
run_method = controller.ProtocolMethods(self.test)
result = json.loads(run_method.get_response())
self.assertEqual(result["errors"]["code"], 403)
def test_wrong_flow_id(self):
self.test.data.flow[0].uuid = "666666"
run_method = controller.ProtocolMethods(self.test)
result = json.loads(run_method.get_response())
self.assertEqual(result["errors"]["code"], 404)
class TestAddFlow(unittest.TestCase):
def setUp(self):
for item in classes:
class_ = getattr(models, item)
class_.createTable(ifNotExists=True)
models.UserConfig(uuid="123456",
login="login",
password="password",
authId="auth_id")
models.Flow(uuid="07d949")
logger.remove()
self.test = api.ValidJSON.parse_obj(ADD_FLOW)
def tearDown(self):
for item in classes:
class_ = getattr(models, item)
class_.dropTable(ifExists=True,
dropJoinTables=True,
cascade=True)
del self.test
def test_add_flow_group(self):
run_method = controller.ProtocolMethods(self.test)
result = json.loads(run_method.get_response())
self.assertEqual(result["errors"]["code"], 200)
def test_add_flow_channel(self):
self.test.data.flow[0].type = "channel"
run_method = controller.ProtocolMethods(self.test)
result = json.loads(run_method.get_response())
self.assertEqual(result["errors"]["code"], 200)
def test_add_flow_bad_type(self):
error = "Wrong flow type"
self.test.data.flow[0].type = "unknown"
run_method = controller.ProtocolMethods(self.test)
result = json.loads(run_method.get_response())
self.assertEqual(result["errors"]["detail"], error)
def test_add_flow_chat_single_user(self):
error = "Must be two users only"
self.test.data.flow[0].type = "chat"
run_method = controller.ProtocolMethods(self.test)
result = json.loads(run_method.get_response())
self.assertEqual(result["errors"]["detail"], error)
def test_add_flow_chat_more_users(self):
self.test.data.flow[0].type = "chat"
self.test.data.flow[0].users.extend(["666555", "888999"])
run_method = controller.ProtocolMethods(self.test)
result = json.loads(run_method.get_response())
self.assertEqual(result["errors"]["code"], 400)
def test_check_flow_in_database(self):
run_method = controller.ProtocolMethods(self.test)
result = json.loads(run_method.get_response())
dbquery = models.Flow.selectBy(title="title").getOne()
self.assertEqual(dbquery.uuid,
result["data"]["flow"][0]["uuid"])
class TestAllFlow(unittest.TestCase):
@classmethod
def setUpClass(cls):
logger.remove()
def setUp(self):
for item in classes:
class_ = getattr(models, item)
class_.createTable(ifNotExists=True)
models.UserConfig(uuid="123456",
login="login",
password="password",
authId="auth_id")
self.test = api.ValidJSON.parse_obj(ALL_FLOW)
def tearDown(self):
for item in classes:
class_ = getattr(models, item)
class_.dropTable(ifExists=True,
dropJoinTables=True,
cascade=True)
del self.test
def test_all_flow(self):
models.Flow(uuid="07d949",
timeCreated=123456,
flowType="group",
title="title",
info="info",
owner="123456")
run_method = controller.ProtocolMethods(self.test)
result = json.loads(run_method.get_response())
self.assertEqual(result["data"]["flow"][0]["info"], "info")
def test_blank_flow_table_in_database(self):
run_method = controller.ProtocolMethods(self.test)
result = json.loads(run_method.get_response())
self.assertEqual(result["errors"]["code"], 404)
class TestUserInfo(unittest.TestCase):
@classmethod
def setUpClass(cls):
logger.remove()
def setUp(self):
for item in classes:
class_ = getattr(models, item)
class_.createTable(ifNotExists=True)
for item in range(5):
uuid = str(123456 + item)
models.UserConfig(uuid=uuid,
login="login",
password="password",
username="username",
isBot=False,
authId="auth_id",
email="email@email.com",
bio="bio")
self.test = api.ValidJSON.parse_obj(USER_INFO)
def tearDown(self):
for item in classes:
class_ = getattr(models, item)
class_.dropTable(ifExists=True,
dropJoinTables=True,
cascade=True)
del self.test
def test_user_info(self):
run_method = controller.ProtocolMethods(self.test)
result = json.loads(run_method.get_response())
self.assertEqual(result["errors"]["code"], 200)
def test_check_user_info(self):
run_method = controller.ProtocolMethods(self.test)
result = json.loads(run_method.get_response())
self.assertEqual(result["data"]["user"][0]["bio"], "bio")
def test_check_many_user_info(self):
users = [{'uuid': str(123456 + item)} for item in range(120)]
self.test.data.user.extend(users)
run_method = controller.ProtocolMethods(self.test)
result = json.loads(run_method.get_response())
self.assertEqual(result["errors"]["code"], 403)
class TestAuthentification(unittest.TestCase):
@classmethod
def setUpClass(cls):
logger.remove()
def setUp(self):
gen_hash = lib.Hash("password", 123456,
b"salt", b"key")
self.hash_password = gen_hash.password_hash()
for item in classes:
class_ = getattr(models, item)
class_.createTable(ifNotExists=True)
models.UserConfig(uuid="123456",
login="login",
password="password",
hashPassword=self.hash_password,
salt=b"salt",
key=b"key")
self.test = api.ValidJSON.parse_obj(AUTH)
def tearDown(self):
for item in classes:
class_ = getattr(models, item)
class_.dropTable(ifExists=True,
dropJoinTables=True,
cascade=True)
del self.test
def test_authentification(self):
run_method = controller.ProtocolMethods(self.test)
result = json.loads(run_method.get_response())
self.assertEqual(result["errors"]["code"], 200)
def test_blank_database(self):
login = self.test.data.user[0].login
dbquery = models.UserConfig.selectBy(login=login).getOne()
dbquery.delete(dbquery.id)
run_method = controller.ProtocolMethods(self.test)
result = json.loads(run_method.get_response())
self.assertEqual(result["errors"]["code"], 404)
def test_two_element_in_database(self):
models.UserConfig(uuid="654321",
login="login",
password="password",
salt=b"salt",
key=b"key")
run_method = controller.ProtocolMethods(self.test)
result = json.loads(run_method.get_response())
self.assertEqual(result["errors"]["code"], 404)
def test_wrong_password(self):
self.test.data.user[0].password = "wrong_password"
run_method = controller.ProtocolMethods(self.test)
result = json.loads(run_method.get_response())
self.assertEqual(result["errors"]["code"], 401)
def test_write_in_database(self):
login = self.test.data.user[0].login
run_method = controller.ProtocolMethods(self.test)
result = json.loads(run_method.get_response())
dbquery = models.UserConfig.selectBy(login=login).getOne()
self.assertEqual(dbquery.authId,
result["data"]["user"][0]["auth_id"])
class TestDeleteUser(unittest.TestCase):
def setUp(self):
for item in classes:
class_ = getattr(models, item)
class_.createTable(ifNotExists=True)
models.UserConfig(uuid="123456",
login="login",
password="password",
authId="auth_id")
self.test = api.ValidJSON.parse_obj(DELETE_USER)
logger.remove()
def tearDown(self):
del self.test
for item in classes:
class_ = getattr(models, item)
class_.dropTable(ifExists=True,
dropJoinTables=True,
cascade=True)
def test_delete_user(self):
run_method = controller.ProtocolMethods(self.test)
result = json.loads(run_method.get_response())
self.assertEqual(result["errors"]["code"], 200)
def test_wrong_login(self):
self.test.data.user[0].login = "wrong_login"
run_method = controller.ProtocolMethods(self.test)
result = json.loads(run_method.get_response())
self.assertEqual(result["errors"]["code"], 404)
def test_wrong_password(self):
self.test.data.user[0].password = "wrong_password"
run_method = controller.ProtocolMethods(self.test)
result = json.loads(run_method.get_response())
self.assertEqual(result["errors"]["code"], 404)
class TestDeleteMessage(unittest.TestCase):
def setUp(self):
for item in classes:
class_ = getattr(models, item)
class_.createTable(ifNotExists=True)
new_user = models.UserConfig(uuid="123456",
login="login",
password="password",
authId="auth_id")
new_flow = models.Flow(uuid="07d949",
timeCreated=111,
flowType="group",
title="group",
owner="123456")
new_flow.addUserConfig(new_user)
models.Message(uuid="1122",
text="Hello",
time=123456,
user=new_user,
flow=new_flow)
self.test = api.ValidJSON.parse_obj(DELETE_MESSAGE)
logger.remove()
def tearDown(self):
del self.test
for item in classes:
class_ = getattr(models, item)
class_.dropTable(ifExists=True,
dropJoinTables=True,
cascade=True)
def test_delete_message(self):
run_method = controller.ProtocolMethods(self.test)
result = json.loads(run_method.get_response())
self.assertEqual(result["errors"]["code"], 200)
def test_check_delete_message_in_database(self):
controller.ProtocolMethods(self.test)
dbquery = models.Message.selectBy(text="Hello")
self.assertEqual(dbquery.count(), 0)
def test_check_deleted_message_in_database(self):
controller.ProtocolMethods(self.test)
dbquery = models.Message.selectBy(text="Message deleted")
self.assertEqual(dbquery.count(), 1)
def test_wrong_message_id(self):
self.test.data.message[0].uuid = "2"
run_method = controller.ProtocolMethods(self.test)
result = json.loads(run_method.get_response())
self.assertEqual(result["errors"]["code"], 404)
class TestEditedMessage(unittest.TestCase):
@classmethod
def setUpClass(cls):
logger.remove()
def setUp(self):
for item in classes:
class_ = getattr(models, item)
class_.createTable(ifNotExists=True)
new_user = models.UserConfig(uuid="123456",
login="login",
password="password",
authId="auth_id")
new_flow = models.Flow(uuid="07d949",
timeCreated=112,
flowType="group",
title="group",
owner="123456")
new_flow.addUserConfig(new_user)
models.Message(uuid="1",
text="Hello",
time=123456,
user=new_user,
flow=new_flow)
self.test = api.ValidJSON.parse_obj(EDITED_MESSAGE)
def tearDown(self):
for item in classes:
class_ = getattr(models, item)
class_.dropTable(ifExists=True,
dropJoinTables=True,
cascade=True)
del self.test
def test_edited_message(self):
run_method = controller.ProtocolMethods(self.test)
result = json.loads(run_method.get_response())
self.assertEqual(result["errors"]["code"], 200)
def test_new_edited_message(self):
controller.ProtocolMethods(self.test)
dbquery = models.Message.selectBy(id=1).getOne()
self.assertEqual(dbquery.text, "New_Hello")
def test_wrong_message_id(self):
self.test.data.message[0].uuid = "3"
run_method = controller.ProtocolMethods(self.test)
result = json.loads(run_method.get_response())
self.assertEqual(result["errors"]["code"], 404)
class TestPingPong(unittest.TestCase):
def setUp(self):
for item in classes:
class_ = getattr(models, item)
class_.createTable(ifNotExists=True)
models.UserConfig(uuid="123456",
login="login",
password="password",
authId="auth_id")
self.test = api.ValidJSON.parse_obj(PING_PONG)
logger.remove()
def tearDown(self):
for item in classes:
class_ = getattr(models, item)
class_.dropTable(ifExists=True,
dropJoinTables=True,
cascade=True)
del self.test
def test_ping_pong(self):
run_method = controller.ProtocolMethods(self.test)
result = json.loads(run_method.get_response())
self.assertEqual(result["errors"]["code"], 200)
class TestErrors(unittest.TestCase):
def setUp(self):
for item in classes:
class_ = getattr(models, item)
class_.createTable(ifNotExists=True)
models.UserConfig(uuid="123456",
login="login",
password="password",
authId="auth_id")
logger.remove()
def tearDown(self):
for item in classes:
class_ = getattr(models, item)
class_.dropTable(ifExists=True,
dropJoinTables=True,
cascade=True)
del self.test
def test_wrong_type(self):
self.test = api.ValidJSON.parse_obj(ERRORS)
run_method = controller.ProtocolMethods(self.test)
result = json.loads(run_method.get_response())
self.assertEqual(result["errors"]["code"], 405)
def test_unsupported_media_type(self):
self.test = json.dumps(NON_VALID_ERRORS)
run_method = controller.ProtocolMethods(self.test)
result = json.loads(run_method.get_response())
self.assertEqual(result["errors"]["code"], 415)
def test_only_type_in_request(self):
self.test = json.dumps(ERRORS_ONLY_TYPE)
run_method = controller.ProtocolMethods(self.test)
result = json.loads(run_method.get_response())
self.assertEqual(result["errors"]["code"], 415)
if __name__ == "__main__":
unittest.main()
| 33.338016 | 75 | 0.534157 | 3,737 | 37,972 | 5.248328 | 0.080278 | 0.046908 | 0.087238 | 0.099271 | 0.80722 | 0.786213 | 0.768011 | 0.725335 | 0.704839 | 0.688115 | 0 | 0.025822 | 0.347282 | 37,972 | 1,138 | 76 | 33.367311 | 0.765503 | 0.011377 | 0 | 0.663286 | 0 | 0 | 0.077181 | 0 | 0 | 0 | 0 | 0 | 0.059838 | 1 | 0.100406 | false | 0.030426 | 0.013185 | 0 | 0.128803 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
c27b66a5dd16f76488edd5a1490f7443286605f6 | 145 | py | Python | tests/context.py | paoloinglese/pyimzML | 56a94ca45669aaae697f044eef50a8fd459e3b82 | [
"Apache-2.0"
] | 21 | 2016-03-29T14:55:40.000Z | 2022-02-25T01:53:03.000Z | tests/context.py | leorrose/pyimzML | 2a9da6c1b5e71e39ad01a2823a299029a87244ba | [
"Apache-2.0"
] | 21 | 2016-06-16T15:17:34.000Z | 2022-01-01T18:34:13.000Z | tests/context.py | leorrose/pyimzML | 2a9da6c1b5e71e39ad01a2823a299029a87244ba | [
"Apache-2.0"
] | 14 | 2015-09-03T15:26:55.000Z | 2022-03-02T13:41:58.000Z | import numpy as np
def getspectrum(min_mz, max_mz, n_peaks):
return min_mz + max_mz*np.random.rand(n_peaks), np.abs(np.random.randn(n_peaks)) | 48.333333 | 84 | 0.758621 | 29 | 145 | 3.551724 | 0.551724 | 0.174757 | 0.15534 | 0.194175 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.110345 | 145 | 3 | 84 | 48.333333 | 0.79845 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | false | 0 | 0.333333 | 0.333333 | 1 | 0 | 1 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 6 |
c28aaea58572decb6c65e9731bf33d2265e60e46 | 28,814 | py | Python | tests/integration/mongodb/factory/prof/profmgrcrud.py | RaenonX/Jelly-Bot-API | c7da1e91783dce3a2b71b955b3a22b68db9056cf | [
"MIT"
] | 5 | 2020-08-26T20:12:00.000Z | 2020-12-11T16:39:22.000Z | tests/integration/mongodb/factory/prof/profmgrcrud.py | RaenonX/Jelly-Bot | c7da1e91783dce3a2b71b955b3a22b68db9056cf | [
"MIT"
] | 234 | 2019-12-14T03:45:19.000Z | 2020-08-26T18:55:19.000Z | tests/integration/mongodb/factory/prof/profmgrcrud.py | RaenonX/Jelly-Bot-API | c7da1e91783dce3a2b71b955b3a22b68db9056cf | [
"MIT"
] | 2 | 2019-10-23T15:21:15.000Z | 2020-05-22T09:35:55.000Z | from bson import ObjectId
from pymongo.errors import DuplicateKeyError
from flags import Platform, ProfilePermission, ProfilePermissionDefault
from models import OID_KEY, ChannelModel, ChannelProfileModel, ChannelConfigModel, ChannelProfileConnectionModel
from models.exceptions import RequiredKeyNotFilledError, InvalidModelFieldError
from mongodb.factory import ChannelManager, ProfileManager
from mongodb.factory.prof_base import ProfileDataManager, UserProfileManager
from mongodb.factory.results import OperationOutcome, WriteOutcome, GetOutcome, UpdateOutcome, ArgumentParseResult
from strres.mongodb import Profile
from tests.base import TestDatabaseMixin, TestModelMixin
__all__ = ["TestProfileManagerCRUD"]
class TestProfileManagerCRUD(TestModelMixin, TestDatabaseMixin):
CHANNEL_OID = ObjectId()
CHANNEL_OID_2 = ObjectId()
USER_OID = ObjectId()
USER_OID_2 = ObjectId()
PROF_OID_1 = ObjectId()
PROF_OID_2 = ObjectId()
@staticmethod
def obj_to_clear():
return [ProfileManager]
def test_create_default(self):
ChannelManager.clear()
channel_oid = ChannelManager.ensure_register(Platform.LINE, "U123456").model.id
result = ProfileManager.create_default_profile(channel_oid)
self.assertEqual(result.outcome, WriteOutcome.O_DATA_EXISTS)
self.assertTrue(result.success)
self.assertIsNotNone(result.exception)
self.assertIsInstance(result.exception, DuplicateKeyError)
self.assertModelEqual(result.model,
ChannelProfileModel(ChannelOid=channel_oid, Name=str(Profile.DEFAULT_PROFILE_NAME)))
d = ChannelManager.find_one({OID_KEY: channel_oid})
self.assertEqual(d[ChannelModel.Config.key][ChannelConfigModel.DefaultProfileOid.key], result.model.id)
def test_create_default_no_channel(self):
result = ProfileManager.create_default_profile(self.CHANNEL_OID)
self.assertEqual(result.outcome, WriteOutcome.X_CHANNEL_NOT_FOUND)
self.assertFalse(result.success)
self.assertIsNone(result.exception)
self.assertIsNone(result.model)
def test_create_default_not_to_set(self):
ChannelManager.clear()
channel_oid = ChannelManager.ensure_register(Platform.LINE, "U123456").model.id
result = ProfileManager.create_default_profile(channel_oid, set_to_channel=False)
self.assertEqual(result.outcome, WriteOutcome.O_DATA_EXISTS)
self.assertTrue(result.success)
self.assertIsNotNone(result.exception)
self.assertIsInstance(result.exception, DuplicateKeyError)
self.assertIsNotNone(result.model)
d = ChannelManager.find_one({OID_KEY: channel_oid})
self.assertIsNotNone(d)
self.assertEqual(d[ChannelModel.Config.key].get(ChannelConfigModel.DefaultProfileOid.key), result.model.id)
def test_create_default_not_to_set_no_channel(self):
ChannelManager.clear()
result = ProfileManager.create_default_profile(self.CHANNEL_OID, set_to_channel=False)
self.assertEqual(result.outcome, WriteOutcome.X_CHANNEL_NOT_FOUND)
self.assertFalse(result.success)
self.assertIsNone(result.exception)
self.assertIsNone(result.model)
d = ChannelManager.find_one({OID_KEY: self.CHANNEL_OID})
self.assertIsNone(d)
def test_register_new(self):
ChannelManager.clear()
channel_oid = ChannelManager.ensure_register(Platform.LINE, "U123456").model.id
ProfileManager.register_new_default(channel_oid, self.USER_OID)
result = ProfileManager.register_new(self.USER_OID, ChannelOid=channel_oid, Name="A")
self.assertEqual(result.outcome, WriteOutcome.O_INSERTED)
self.assertTrue(result.success)
self.assertIsNone(result.exception)
self.assertEqual(result.attach_outcome, OperationOutcome.O_COMPLETED)
self.assertModelEqual(ProfileDataManager.find_one_casted({ChannelProfileModel.Name.key: "A"}), result.model)
self.assertEqual(ProfileDataManager.count_documents({ChannelProfileModel.Name.key: "A"}), 1)
def test_register_new_missing_args(self):
result = ProfileManager.register_new(self.USER_OID, Name="A")
self.assertEqual(result.outcome, WriteOutcome.X_REQUIRED_NOT_FILLED)
self.assertFalse(result.success)
self.assertIsInstance(result.exception, RequiredKeyNotFilledError)
self.assertEqual(result.attach_outcome, OperationOutcome.X_NOT_EXECUTED)
self.assertEqual(ProfileDataManager.count_documents({ChannelProfileModel.Name.key: "A"}), 0)
def test_register_new_args_type_mismatch(self):
result = ProfileManager.register_new(self.USER_OID, ChannelOid=self.CHANNEL_OID, Name=object())
self.assertEqual(result.outcome, WriteOutcome.X_TYPE_MISMATCH)
self.assertFalse(result.success)
self.assertIsInstance(result.exception, InvalidModelFieldError)
self.assertEqual(result.attach_outcome, OperationOutcome.X_NOT_EXECUTED)
self.assertEqual(ProfileDataManager.count_documents({}), 0)
def test_register_new_user_not_found(self):
ChannelManager.clear()
channel_oid = ChannelManager.ensure_register(Platform.LINE, "U123456").model.id
result = ProfileManager.register_new(self.USER_OID, ChannelOid=channel_oid, Name="A")
self.assertEqual(result.outcome, WriteOutcome.O_INSERTED)
self.assertTrue(result.success)
self.assertIsNone(result.exception)
self.assertEqual(result.attach_outcome, OperationOutcome.O_COMPLETED)
self.assertEqual(ProfileDataManager.count_documents({}), 2)
self.assertEqual(
ProfileDataManager.count_documents({ChannelProfileModel.Name.key: str(Profile.DEFAULT_PROFILE_NAME)}), 1
)
self.assertEqual(ProfileDataManager.count_documents({ChannelProfileModel.Name.key: "A"}), 1)
self.assertModelEqual(ProfileDataManager.find_one_casted({ChannelProfileModel.Name.key: "A"}), result.model)
def test_register_new_name_conflict(self):
ChannelManager.clear()
channel_oid = ChannelManager.ensure_register(Platform.LINE, "U123456").model.id
ProfileManager.register_new(self.USER_OID, ChannelOid=channel_oid, Name="A")
result = ProfileManager.register_new(self.USER_OID, ChannelOid=channel_oid, Name="A")
self.assertEqual(result.outcome, WriteOutcome.O_DATA_EXISTS)
self.assertTrue(result.success)
self.assertIsNotNone(result.exception)
self.assertIsInstance(result.exception, DuplicateKeyError)
self.assertEqual(result.attach_outcome, OperationOutcome.O_COMPLETED)
self.assertModelEqual(ProfileDataManager.find_one_casted({ChannelProfileModel.Name.key: "A"}), result.model)
self.assertEqual(ProfileDataManager.count_documents({ChannelProfileModel.Name.key: "A"}), 1)
def test_register_new_insuf_perm(self):
ChannelManager.clear()
channel_oid = ChannelManager.ensure_register(Platform.LINE, "U123456").model.id
ProfileManager.register_new_default(channel_oid, self.USER_OID)
result = ProfileManager.register_new(
self.USER_OID, ChannelOid=channel_oid, Name="A",
Permission=ProfilePermissionDefault.get_default_code_str_dict({ProfilePermission.MBR_CHANGE_MEMBERS}))
self.assertEqual(result.outcome, WriteOutcome.X_INSUFFICIENT_PERMISSION)
self.assertFalse(result.success)
self.assertIsNone(result.exception)
self.assertEqual(result.attach_outcome, OperationOutcome.X_NOT_EXECUTED)
self.assertEqual(ProfileDataManager.count_documents({ChannelProfileModel.Name.key: "A"}), 0)
def test_register_new_via_parsed_args(self):
ChannelManager.clear()
channel_oid = ChannelManager.ensure_register(Platform.LINE, "U123456").model.id
ProfileManager.register_new_default(channel_oid, self.USER_OID)
result = ProfileManager.register_new(
self.USER_OID,
ProfileManager.process_create_profile_kwargs({
"ChannelOid": channel_oid,
"Name": "A"
})
)
self.assertEqual(result.outcome, WriteOutcome.O_INSERTED)
self.assertTrue(result.success)
self.assertIsNone(result.exception)
self.assertEqual(result.attach_outcome, OperationOutcome.O_COMPLETED)
self.assertEqual(result.parse_arg_outcome, OperationOutcome.O_COMPLETED)
self.assertModelEqual(ProfileDataManager.find_one_casted({ChannelProfileModel.Name.key: "A"}), result.model)
self.assertEqual(ProfileDataManager.count_documents({ChannelProfileModel.Name.key: "A"}), 1)
def test_register_new_parsed_args_failed(self):
ChannelManager.clear()
channel_oid = ChannelManager.ensure_register(Platform.LINE, "U123456").model.id
ProfileManager.register_new_default(channel_oid, self.USER_OID)
result = ProfileManager.register_new(
self.USER_OID,
ProfileManager.process_create_profile_kwargs({
"ChannelOid": channel_oid,
"Name": object()
})
)
self.assertEqual(result.outcome, WriteOutcome.X_NOT_EXECUTED)
self.assertFalse(result.success)
self.assertIsNone(result.exception)
self.assertEqual(result.attach_outcome, OperationOutcome.X_NOT_EXECUTED)
self.assertEqual(result.parse_arg_outcome, OperationOutcome.X_VALUE_TYPE_MISMATCH)
self.assertEqual(ProfileDataManager.count_documents({}), 1)
def test_register_new_default(self):
ChannelManager.clear()
channel_oid = ChannelManager.ensure_register(Platform.LINE, "U123456").model.id
ProfileManager.register_new_default(channel_oid, self.USER_OID)
result = ProfileManager.register_new_default(channel_oid, self.USER_OID)
self.assertEqual(result.outcome, GetOutcome.O_CACHE_DB)
self.assertTrue(result.success)
self.assertIsNone(result.exception)
self.assertEqual(result.attach_outcome, OperationOutcome.O_COMPLETED)
self.assertModelEqual(ProfileDataManager.find_one_casted(), result.model)
def test_register_new_default_channel_not_found(self):
ChannelManager.clear()
result = ProfileManager.register_new_default(self.CHANNEL_OID, self.USER_OID)
self.assertEqual(result.outcome, GetOutcome.X_CHANNEL_NOT_FOUND)
self.assertFalse(result.success)
self.assertIsNone(result.exception)
self.assertEqual(result.attach_outcome, OperationOutcome.X_NOT_EXECUTED)
def test_register_new_default_user_not_found(self):
ChannelManager.clear()
channel_oid = ChannelManager.ensure_register(Platform.LINE, "U123456").model.id
result = ProfileManager.register_new_default(channel_oid, self.USER_OID)
self.assertEqual(result.outcome, GetOutcome.O_CACHE_DB)
self.assertTrue(result.success)
self.assertIsNone(result.exception)
self.assertEqual(result.attach_outcome, OperationOutcome.O_COMPLETED)
self.assertModelEqual(ProfileDataManager.find_one_casted(), result.model)
def test_register_new_model(self):
ChannelManager.clear()
channel_oid = ChannelManager.ensure_register(Platform.LINE, "U123456").model.id
ProfileManager.register_new_default(channel_oid, self.USER_OID)
result = ProfileManager.register_new_model(
self.USER_OID, ChannelProfileModel(ChannelOid=channel_oid, Name="A"))
self.assertEqual(result.outcome, WriteOutcome.O_INSERTED)
self.assertTrue(result.success)
self.assertIsNone(result.exception)
self.assertEqual(result.attach_outcome, OperationOutcome.O_COMPLETED)
self.assertModelEqual(
ProfileDataManager.find_one_casted({ChannelProfileModel.Name.key: "A"}),
result.model
)
self.assertEqual(ProfileDataManager.count_documents({ChannelProfileModel.Name.key: "A"}), 1)
def test_register_new_model_user_not_found(self):
ChannelManager.clear()
channel_oid = ChannelManager.ensure_register(Platform.LINE, "U123456").model.id
result = ProfileManager.register_new_model(
self.USER_OID, ChannelProfileModel(ChannelOid=channel_oid, Name="A"))
self.assertEqual(result.outcome, WriteOutcome.O_INSERTED)
self.assertTrue(result.success)
self.assertIsNone(result.exception)
self.assertEqual(result.attach_outcome, OperationOutcome.O_COMPLETED)
self.assertModelEqual(
ProfileDataManager.find_one_casted({ChannelProfileModel.Name.key: "A"}),
result.model
)
self.assertEqual(ProfileDataManager.count_documents({}), 2)
self.assertEqual(
ProfileDataManager.count_documents({ChannelProfileModel.Name.key: str(Profile.DEFAULT_PROFILE_NAME)}),
1
)
self.assertEqual(
ProfileDataManager.count_documents({ChannelProfileModel.Name.key: "A"}),
1
)
def test_register_new_model_name_conflict(self):
ChannelManager.clear()
channel_oid = ChannelManager.ensure_register(Platform.LINE, "U123456").model.id
ProfileManager.register_new(self.USER_OID, ChannelOid=channel_oid, Name="A")
result = ProfileManager.register_new_model(
self.USER_OID, ChannelProfileModel(ChannelOid=channel_oid, Name="A"))
self.assertEqual(result.outcome, WriteOutcome.O_DATA_EXISTS)
self.assertTrue(result.success)
self.assertIsNotNone(result.exception)
self.assertIsInstance(result.exception, DuplicateKeyError)
self.assertEqual(result.attach_outcome, OperationOutcome.O_COMPLETED)
self.assertModelEqual(
ProfileDataManager.find_one_casted({ChannelProfileModel.Name.key: "A"}),
result.model
)
self.assertEqual(ProfileDataManager.count_documents({ChannelProfileModel.Name.key: "A"}), 1)
def test_register_new_model_insuf_perm(self):
ChannelManager.clear()
channel_oid = ChannelManager.ensure_register(Platform.LINE, "U123456").model.id
ProfileManager.register_new_default(channel_oid, self.USER_OID)
result = ProfileManager.register_new_model(
self.USER_OID,
ChannelProfileModel(
ChannelOid=channel_oid, Name="A",
Permission=ProfilePermissionDefault.get_default_code_str_dict({ProfilePermission.MBR_CHANGE_MEMBERS})))
self.assertEqual(result.outcome, WriteOutcome.X_INSUFFICIENT_PERMISSION)
self.assertFalse(result.success)
self.assertIsNone(result.exception)
self.assertEqual(result.attach_outcome, OperationOutcome.X_NOT_EXECUTED)
self.assertEqual(ProfileDataManager.count_documents({ChannelProfileModel.Name.key: "A"}), 0)
def test_update(self):
mdl = ChannelProfileModel(ChannelOid=self.CHANNEL_OID, Name="ABC")
ProfileDataManager.insert_one_model(mdl)
result = ProfileManager.update_profile(self.CHANNEL_OID, self.USER_OID, mdl.id,
**{ChannelProfileModel.Name.key: "B"})
self.assertEqual(result, UpdateOutcome.O_UPDATED)
self.assertModelEqual(
ProfileDataManager.find_one_casted(),
ChannelProfileModel(ChannelOid=self.CHANNEL_OID, Name="B")
)
def test_update_addl_args(self):
mdl = ChannelProfileModel(ChannelOid=self.CHANNEL_OID, Name="ABC")
ProfileDataManager.insert_one_model(mdl)
result = ProfileManager.update_profile(
self.CHANNEL_OID, self.USER_OID, mdl.id,
**{ChannelProfileModel.Name.key: "B", "ABCDEF": "B"})
self.assertEqual(result, UpdateOutcome.O_PARTIAL_ARGS_REMOVED)
self.assertModelEqual(
ProfileDataManager.find_one_casted(),
ChannelProfileModel(ChannelOid=self.CHANNEL_OID, Name="B")
)
def test_update_invalid_args(self):
mdl = ChannelProfileModel(ChannelOid=self.CHANNEL_OID, Name="ABC")
ProfileDataManager.insert_one_model(mdl)
result = ProfileManager.update_profile(
self.CHANNEL_OID, self.USER_OID, mdl.id,
**{ChannelProfileModel.Name.key: "B", ChannelProfileModel.Color.key: object()})
self.assertEqual(result, UpdateOutcome.O_PARTIAL_ARGS_INVALID)
self.assertModelEqual(
ProfileDataManager.find_one_casted(),
ChannelProfileModel(ChannelOid=self.CHANNEL_OID, Name="B")
)
def test_update_insuf_perm(self):
mdl = ChannelProfileModel(ChannelOid=self.CHANNEL_OID, Name="ABC")
ProfileDataManager.insert_one_model(mdl)
UserProfileManager.insert_one_model(
ChannelProfileConnectionModel(ChannelOid=self.CHANNEL_OID, UserOid=self.USER_OID, ProfileOids=[mdl.id]))
result = ProfileManager.update_profile(
self.CHANNEL_OID, self.USER_OID, mdl.id,
**{ChannelProfileModel.Name.key: "B",
f"{ChannelProfileModel.Permission.key}.{ProfilePermission.AR_ACCESS_PINNED_MODULE.code_str}": True})
self.assertEqual(result, UpdateOutcome.X_INSUFFICIENT_PERMISSION)
self.assertModelEqual(
ProfileDataManager.find_one_casted(),
ChannelProfileModel(ChannelOid=self.CHANNEL_OID, Name="ABC")
)
def test_update_uneditable(self):
mdl = ChannelProfileModel(ChannelOid=self.CHANNEL_OID, Name="ABC")
ProfileDataManager.insert_one_model(mdl)
UserProfileManager.insert_one_model(
ChannelProfileConnectionModel(ChannelOid=self.CHANNEL_OID, UserOid=self.USER_OID, ProfileOids=[mdl.id]))
result = ProfileManager.update_profile(
self.CHANNEL_OID, self.USER_OID, mdl.id,
**{ChannelProfileModel.ChannelOid.key: self.CHANNEL_OID_2})
self.assertEqual(result, UpdateOutcome.X_UNEDITABLE)
self.assertModelEqual(
ProfileDataManager.find_one_casted(),
ChannelProfileModel(ChannelOid=self.CHANNEL_OID, Name="ABC")
)
def test_update_via_parsed(self):
arg_result = ArgumentParseResult(OperationOutcome.O_COMPLETED, None, {ChannelProfileModel.Name.key: "B"})
mdl = ChannelProfileModel(ChannelOid=self.CHANNEL_OID, Name="ABC")
ProfileDataManager.insert_one_model(mdl)
result = ProfileManager.update_profile(self.CHANNEL_OID, self.USER_OID, mdl.id, arg_result)
self.assertEqual(result, UpdateOutcome.O_UPDATED)
self.assertModelEqual(
ProfileDataManager.find_one_casted(),
ChannelProfileModel(ChannelOid=self.CHANNEL_OID, Name="B")
)
def test_update_via_parsed_failed(self):
arg_result = ArgumentParseResult(OperationOutcome.X_NOT_EXECUTED, None, {})
mdl = ChannelProfileModel(ChannelOid=self.CHANNEL_OID, Name="ABC")
ProfileDataManager.insert_one_model(mdl)
result = ProfileManager.update_profile(self.CHANNEL_OID, self.USER_OID, mdl.id, arg_result)
self.assertEqual(result, UpdateOutcome.X_ARGS_PARSE_FAILED)
self.assertModelEqual(
ProfileDataManager.find_one_casted(),
ChannelProfileModel(ChannelOid=self.CHANNEL_OID, Name="ABC")
)
def test_update_via_parsed_insuf_perm(self):
arg_result = ArgumentParseResult(
OperationOutcome.O_COMPLETED, None,
{ChannelProfileModel.Name.key: "B",
f"{ChannelProfileModel.Permission.key}.{ProfilePermission.AR_ACCESS_PINNED_MODULE.code_str}": True})
mdl = ChannelProfileModel(ChannelOid=self.CHANNEL_OID, Name="ABC")
ProfileDataManager.insert_one_model(mdl)
UserProfileManager.insert_one_model(
ChannelProfileConnectionModel(ChannelOid=self.CHANNEL_OID, UserOid=self.USER_OID, ProfileOids=[mdl.id]))
result = ProfileManager.update_profile(self.CHANNEL_OID, self.USER_OID, mdl.id, arg_result)
self.assertEqual(result, UpdateOutcome.X_INSUFFICIENT_PERMISSION)
self.assertModelEqual(
ProfileDataManager.find_one_casted(),
ChannelProfileModel(ChannelOid=self.CHANNEL_OID, Name="ABC")
)
def test_star(self):
UserProfileManager.insert_one_model(
ChannelProfileConnectionModel(ChannelOid=self.CHANNEL_OID, UserOid=self.USER_OID, ProfileOids=[]))
self.assertTrue(ProfileManager.update_channel_star(self.CHANNEL_OID, self.USER_OID, True))
self.assertEqual(UserProfileManager.count_documents({ChannelProfileConnectionModel.Starred.key: False}), 0)
self.assertEqual(UserProfileManager.count_documents({ChannelProfileConnectionModel.Starred.key: True}), 1)
self.assertFalse(ProfileManager.update_channel_star(self.CHANNEL_OID, self.USER_OID, True))
self.assertEqual(UserProfileManager.count_documents({ChannelProfileConnectionModel.Starred.key: False}), 0)
self.assertEqual(UserProfileManager.count_documents({ChannelProfileConnectionModel.Starred.key: True}), 1)
self.assertTrue(ProfileManager.update_channel_star(self.CHANNEL_OID, self.USER_OID, False))
self.assertEqual(UserProfileManager.count_documents({ChannelProfileConnectionModel.Starred.key: False}), 1)
self.assertEqual(UserProfileManager.count_documents({ChannelProfileConnectionModel.Starred.key: True}), 0)
def test_star_not_found(self):
self.assertFalse(ProfileManager.update_channel_star(self.CHANNEL_OID, self.USER_OID, False))
self.assertFalse(ProfileManager.update_channel_star(self.CHANNEL_OID, self.USER_OID, True))
def test_attach_bypass_existence_check(self):
mdl = ChannelProfileConnectionModel(ChannelOid=self.CHANNEL_OID, UserOid=self.USER_OID, ProfileOids=[])
UserProfileManager.insert_one_model(mdl)
self.assertNotEqual(
ProfileManager.attach_profile(self.CHANNEL_OID, self.USER_OID, mdl.id, bypass_existence_check=True),
OperationOutcome.X_PROFILE_NOT_FOUND_OID
)
def test_delete(self):
mdl = ChannelProfileModel(ChannelOid=self.CHANNEL_OID, Name="ABC",
Permission={ProfilePermission.AR_ACCESS_PINNED_MODULE.code_str: True,
ProfilePermission.PRF_CONTROL_SELF.code_str: True,
ProfilePermission.PRF_CONTROL_MEMBER.code_str: True})
mdl2 = ChannelProfileModel(ChannelOid=self.CHANNEL_OID, Name="DEF")
ProfileDataManager.insert_one_model(mdl)
ProfileDataManager.insert_one_model(mdl2)
UserProfileManager.insert_one_model(
ChannelProfileConnectionModel(ChannelOid=self.CHANNEL_OID, UserOid=self.USER_OID,
ProfileOids=[mdl.id, mdl2.id]))
UserProfileManager.insert_one_model(
ChannelProfileConnectionModel(ChannelOid=self.CHANNEL_OID, UserOid=self.USER_OID_2,
ProfileOids=[mdl.id, mdl2.id]))
result = ProfileManager.delete_profile(self.CHANNEL_OID, mdl2.id, self.USER_OID)
self.assertEqual(result, OperationOutcome.O_COMPLETED)
self.assertModelEqual(
UserProfileManager.find_one_casted({ChannelProfileConnectionModel.UserOid.key: self.USER_OID}),
ChannelProfileConnectionModel(ChannelOid=self.CHANNEL_OID, UserOid=self.USER_OID,
ProfileOids=[mdl.id])
)
self.assertModelEqual(
UserProfileManager.find_one_casted({ChannelProfileConnectionModel.UserOid.key: self.USER_OID_2}),
ChannelProfileConnectionModel(ChannelOid=self.CHANNEL_OID, UserOid=self.USER_OID_2,
ProfileOids=[mdl.id])
)
self.assertModelEqual(
ProfileDataManager.find_one_casted({OID_KEY: mdl.id}), mdl
)
self.assertIsNone(ProfileDataManager.find_one({OID_KEY: mdl2.id}))
def test_delete_insuf_perm(self):
mdl = ChannelProfileModel(ChannelOid=self.CHANNEL_OID, Name="ABC",
Permission={ProfilePermission.AR_ACCESS_PINNED_MODULE.code_str: True,
ProfilePermission.PRF_CONTROL_SELF.code_str: True,
ProfilePermission.PRF_CONTROL_MEMBER.code_str: True})
mdl2 = ChannelProfileModel(ChannelOid=self.CHANNEL_OID, Name="DEF")
ProfileDataManager.insert_one_model(mdl)
ProfileDataManager.insert_one_model(mdl2)
UserProfileManager.insert_one_model(
ChannelProfileConnectionModel(ChannelOid=self.CHANNEL_OID, UserOid=self.USER_OID,
ProfileOids=[mdl.id, mdl2.id]))
UserProfileManager.insert_one_model(
ChannelProfileConnectionModel(ChannelOid=self.CHANNEL_OID, UserOid=self.USER_OID_2,
ProfileOids=[mdl2.id]))
result = ProfileManager.delete_profile(self.CHANNEL_OID, mdl2.id, self.USER_OID_2)
self.assertEqual(result, OperationOutcome.X_INSUFFICIENT_PERMISSION)
self.assertModelEqual(
UserProfileManager.find_one_casted({ChannelProfileConnectionModel.UserOid.key: self.USER_OID}),
ChannelProfileConnectionModel(ChannelOid=self.CHANNEL_OID, UserOid=self.USER_OID,
ProfileOids=[mdl.id, mdl2.id])
)
self.assertModelEqual(
UserProfileManager.find_one_casted({ChannelProfileConnectionModel.UserOid.key: self.USER_OID_2}),
ChannelProfileConnectionModel(ChannelOid=self.CHANNEL_OID, UserOid=self.USER_OID_2,
ProfileOids=[mdl2.id])
)
self.assertModelEqual(
ProfileDataManager.find_one_casted({OID_KEY: mdl.id}), mdl
)
self.assertModelEqual(
ProfileDataManager.find_one_casted({OID_KEY: mdl2.id}), mdl2
)
def test_delete_not_exists(self):
UserProfileManager.insert_one_model(
ChannelProfileConnectionModel(ChannelOid=self.CHANNEL_OID, UserOid=self.USER_OID,
ProfileOids=[ObjectId()]))
result = ProfileManager.delete_profile(self.CHANNEL_OID, ObjectId(), self.USER_OID)
self.assertEqual(result, OperationOutcome.X_PROFILE_NOT_FOUND_OID)
def test_mark_unavailable_async(self):
mdl = ChannelProfileModel(ChannelOid=self.CHANNEL_OID, Name="ABC",
Permission={ProfilePermission.AR_ACCESS_PINNED_MODULE.code_str: True,
ProfilePermission.PRF_CONTROL_SELF.code_str: True,
ProfilePermission.PRF_CONTROL_MEMBER.code_str: True})
mdl2 = ChannelProfileModel(ChannelOid=self.CHANNEL_OID, Name="DEF")
ProfileDataManager.insert_one_model(mdl)
ProfileDataManager.insert_one_model(mdl2)
UserProfileManager.insert_one_model(
ChannelProfileConnectionModel(ChannelOid=self.CHANNEL_OID, UserOid=self.USER_OID,
ProfileOids=[mdl.id, mdl2.id])
)
ProfileManager.mark_unavailable_async(self.CHANNEL_OID, self.USER_OID).join()
self.assertModelEqual(
UserProfileManager.find_one_casted(),
ChannelProfileConnectionModel(ChannelOid=self.CHANNEL_OID, UserOid=self.USER_OID, ProfileOids=[])
)
def test_mark_unavailable_async_miss(self):
mdl = ChannelProfileModel(ChannelOid=self.CHANNEL_OID, Name="ABC",
Permission={ProfilePermission.AR_ACCESS_PINNED_MODULE.code_str: True,
ProfilePermission.PRF_CONTROL_SELF.code_str: True,
ProfilePermission.PRF_CONTROL_MEMBER.code_str: True})
mdl2 = ChannelProfileModel(ChannelOid=self.CHANNEL_OID, Name="DEF")
ProfileDataManager.insert_one_model(mdl)
ProfileDataManager.insert_one_model(mdl2)
UserProfileManager.insert_one_model(
ChannelProfileConnectionModel(ChannelOid=self.CHANNEL_OID, UserOid=self.USER_OID,
ProfileOids=[mdl.id, mdl2.id])
)
ProfileManager.mark_unavailable_async(self.CHANNEL_OID, self.USER_OID_2).join()
self.assertModelEqual(
UserProfileManager.find_one_casted(),
ChannelProfileConnectionModel(ChannelOid=self.CHANNEL_OID, UserOid=self.USER_OID,
ProfileOids=[mdl.id, mdl2.id])
)
| 49.339041 | 119 | 0.70674 | 2,905 | 28,814 | 6.75043 | 0.057487 | 0.055584 | 0.047833 | 0.052626 | 0.919021 | 0.893371 | 0.888577 | 0.873534 | 0.84564 | 0.830444 | 0 | 0.006141 | 0.20313 | 28,814 | 583 | 120 | 49.423671 | 0.847916 | 0 | 0 | 0.657447 | 0 | 0 | 0.014993 | 0.006941 | 0 | 0 | 0 | 0 | 0.323404 | 1 | 0.076596 | false | 0.004255 | 0.021277 | 0.002128 | 0.114894 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
c2a1c404e6ce5815a32d63f80233cbe74d7d64fb | 110 | py | Python | animation/__init__.py | dansarno/pygame-pandemic-simulation | 24e4faa03538735552d189f1b5f286d80e25db4b | [
"MIT"
] | null | null | null | animation/__init__.py | dansarno/pygame-pandemic-simulation | 24e4faa03538735552d189f1b5f286d80e25db4b | [
"MIT"
] | null | null | null | animation/__init__.py | dansarno/pygame-pandemic-simulation | 24e4faa03538735552d189f1b5f286d80e25db4b | [
"MIT"
] | null | null | null | # __init__.py
from .environment import *
from .health import *
from .population import *
from .tools import *
| 18.333333 | 26 | 0.745455 | 14 | 110 | 5.571429 | 0.571429 | 0.384615 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.163636 | 110 | 5 | 27 | 22 | 0.847826 | 0.1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
c2aedc4dd552108dc65ea2b4a64d7f616eeb976e | 16,089 | py | Python | bridges/tests/api/surveys_url_my/test_surveys_url_get.py | pegasystems/building-bridges | 1a278df62c56421ab08b9ad14395fe9bf57cd32f | [
"MIT"
] | 20 | 2021-04-14T13:03:49.000Z | 2022-03-29T17:56:26.000Z | bridges/tests/api/surveys_url_my/test_surveys_url_get.py | pegasystems/building-bridges | 1a278df62c56421ab08b9ad14395fe9bf57cd32f | [
"MIT"
] | 50 | 2021-04-16T17:32:14.000Z | 2022-03-04T12:27:37.000Z | bridges/tests/api/surveys_url_my/test_surveys_url_get.py | pegasystems/building-bridges | 1a278df62c56421ab08b9ad14395fe9bf57cd32f | [
"MIT"
] | 2 | 2021-07-23T01:52:38.000Z | 2022-03-30T15:42:32.000Z | import json
from http import HTTPStatus
from unittest.mock import patch
from bridges.tests.api.basic_test import BasicTest
USER1 = {'host': 'host1.example.com', 'cookie': 'cookie1', 'user_id': 'abcdefghijklmnop'}
USER2 = {'host': 'host2.example.com', 'cookie': 'cookie2', 'user_id': 'abcdefghijklmnop'}
USER3 = {'host': 'host3.example.com', 'cookie': 'cookie3', 'user_id': 'qrstuvwxy987654321'}
USER4 = {'host': 'host3.example.com', 'cookie': 'cookie3', 'user_id': 'njnkajdaj812721811',
'full_name': 'John Doe', 'email': 'john.doe@company.com'}
class GetSurveysTest(BasicTest):
def handle_db_new_views(self):
# get user view
request = self.server.receives()
request.ok(cursor={'id': 0, 'firstBatch': []})
# add one view
request = self.server.receives()
request.ok({"nModified": 1})
def test_standard(self):
future = self.make_future_get_request('surveys/url-1')
# get one survey
request = self.server.receives()
request.ok(cursor={'id': 0, 'firstBatch': [{
"_id": self.example_ids[0],
"title": "example-title",
"description": "example_description",
"number": 1,
"hide_votes": False,
"is_anonymous": True,
'asking_questions_enabled': True,
'voting_enabled': True,
"results_secret": "secret",
"admin_secret": "admin-secret",
"views": [USER1],
'author': {"host": "localhost", "cookie": "cookie"},
"url": "example-url",
"date": self.sample_timestamp(),
"questions": [
{
"content": "example-content-1",
'author': {"host": "localhost", "cookie": "cookie"},
"reply": 'sample reply',
"date": self.sample_timestamp(),
"votes": [],
"_id": self.example_ids[1],
"hidden": False
},
{
"content": "example-content-2",
'author': {"host": "localhost", "cookie": "cookie"},
"date": self.sample_timestamp(),
"votes": [
{'author': {"host": "localhost", "cookie": "cookie"},
'date': self.sample_timestamp(),
'upvote': True
}
],
"_id": self.example_ids[2],
"hidden": False
},
{
"content": "example-content-3",
'author': {"host": "localhost", "cookie": "cookie"},
"date": self.sample_timestamp(),
"votes": [
{'author': {"host": "localhost", "cookie": "cookie"},
'date': self.sample_timestamp(),
'upvote': False
}
],
"_id": self.example_ids[3],
"hidden": False
}
]}]})
self.handle_db_new_views()
http_response = future()
self.assertEqual(http_response.status_code, HTTPStatus.OK)
data = json.loads(http_response.get_data(as_text=True))
self.assertEqual(data, {
"title": "example-title",
"key": "example-url-1",
"description": "example_description",
"hideVotes": False,
"isAnonymous": True,
'question_author_name_field_visible': False,
'limit_question_characters': 200,
'limit_question_characters_enabled': False,
'asking_questions_enabled': True,
'voting_enabled': True,
"viewsNumber": 1,
"votersNumber": 1,
"questionersNumber": 1,
"date": self.sample_timestamp_string(),
"questions": [
{
"_id": str(self.example_ids[1]),
"content": "example-content-1",
"reply": 'sample reply',
"upvotes": 0,
"downvotes": 0,
"isAuthor": True,
"voted": "none",
"read": "false",
"hidden": False,
"isAnonymous": True,
"authorEmail": None,
"authorFullName": None,
'author_nickname': None
},
{
"_id": str(self.example_ids[2]),
"content": "example-content-2",
"reply": '',
"upvotes": 1,
"downvotes": 0,
"isAuthor": True,
"voted": "up",
"read": "false",
"hidden": False,
"isAnonymous": True,
"authorEmail": None,
"authorFullName": None,
'author_nickname': None
},
{
"_id": str(self.example_ids[3]),
"content": "example-content-3",
"reply": '',
"upvotes": 0,
"downvotes": 1,
"isAuthor": True,
"voted": "down",
"read": "false",
"hidden": False,
"isAnonymous": True,
"authorEmail": None,
"authorFullName": None,
'author_nickname': None
},
]
})
def test_not_anonymous_survey_should_return_question_author_info(self):
future = self.make_future_get_request('surveys/url-1')
# get one survey
request = self.server.receives()
request.ok(cursor={'id': 0, 'firstBatch': [{
"_id": self.example_ids[0],
"title": "example-title",
"description": "example_description",
"number": 1,
"hide_votes": False,
"is_anonymous": False,
"results_secret": "secret",
"admin_secret": "admin-secret",
"views": [USER4],
'author': {"host": "localhost", "cookie": "cookie"},
"url": "example-url",
"date": self.sample_timestamp(),
"questions": [
{
"content": "example-content-1",
'author': USER4,
'reply': 'reply',
'is_anonymous': False,
"date": self.sample_timestamp(),
"votes": [],
"_id": self.example_ids[1],
"hidden": False
},
]}]})
self.handle_db_new_views()
http_response = future()
data = json.loads(http_response.get_data(as_text=True))
self.assertEqual(data, {
"title": "example-title",
"key": "example-url-1",
"description": "example_description",
"hideVotes": False,
"isAnonymous": False,
'question_author_name_field_visible': False,
'limit_question_characters': 200,
'limit_question_characters_enabled': False,
'asking_questions_enabled': True,
'voting_enabled': True,
"viewsNumber": 1,
"votersNumber": 0,
"questionersNumber": 1,
"date": self.sample_timestamp_string(),
"questions": [
{
"_id": str(self.example_ids[1]),
"content": "example-content-1",
'reply': 'reply',
"upvotes": 0,
"downvotes": 0,
"isAuthor": False,
"voted": "none",
"read": "false",
"hidden": False,
"isAnonymous": False,
"authorEmail": USER4['email'],
"authorFullName": USER4['full_name'],
'author_nickname': None
},
]
})
def test_hidden_votes(self):
future = self.make_future_get_request('surveys/url-1')
request = self.server.receives()
request.ok(cursor={'id': 0, 'firstBatch': [{
"_id": self.example_ids[0],
"title": "example-title",
"number": 1,
"hide_votes": True,
"isAnonymous": True,
'question_author_name_field_visible': False,
'asking_questions_enabled': True,
'voting_enabled': True,
"results_secret": "secret",
"admin_secret": "admin-secret",
"views": [USER1],
'author': {
"host": "NOT-LOCALHOST",
"cookie": "BAD_COOKIE"
},
"url": "example-url",
"date": self.sample_timestamp(),
"questions": [
{
"content": "example-content-2",
'author': {"host": "NOT-LOCALHOST", "cookie": "BAD_COOKIE"},
"date": self.sample_timestamp(),
"votes": [
{'author': {"host": "NOT-LOCALHOST", "cookie": "BAD_COOKIE"},
'date': self.sample_timestamp(),
'upvote': True
}
],
"_id": self.example_ids[2],
"hidden": False,
"isAnonymous": True
},
]}]})
self.handle_db_new_views()
http_response = future()
self.assertEqual(http_response.status_code, HTTPStatus.OK)
data = json.loads(http_response.get_data(as_text=True))
self.assertEqual(data, {
"title": "example-title",
"key": "example-url-1",
"hideVotes": True,
"isAnonymous": True,
"description": None,
'question_author_name_field_visible': False,
'limit_question_characters': 200,
'limit_question_characters_enabled': False,
'asking_questions_enabled': True,
'voting_enabled': True,
"viewsNumber": 1, "votersNumber": 1, "questionersNumber": 1,
"date": self.sample_timestamp_string(),
"questions": [
{
"_id": str(self.example_ids[2]),
"authorEmail": None,
"authorFullName": None,
"reply": "",
'author_nickname': None,
"content": "example-content-2",
"upvotes": None,
"downvotes": None,
"isAuthor": False,
"voted": "none",
"read": "false",
"hidden": False,
"isAnonymous": True
},
]
})
def test_hidden_votes_secrets(self):
db_response = [{
"_id": self.example_ids[0],
"title": "example-title",
"description": "example_description",
"number": 1,
"hide_votes": False,
"isAnonymous": True,
'asking_questions_enabled': True,
'voting_enabled': True,
"results_secret": "secret",
"admin_secret": "admin-secret",
'author': {"host": "NOT-LOCALHOST", "cookie": "BAD_COOKIE"},
"url": "example-url",
"date": self.sample_timestamp(),
"views": [USER1, USER3],
"questions": [
{
"content": "example-content-2",
'author': {"host": "NOT-LOCALHOST", "cookie": "BAD_COOKIE"},
"reply": "answer",
"date": self.sample_timestamp(),
"votes": [
{'author': USER1,
'date': self.sample_timestamp(),
'upvote': True
},
{'author': USER2,
'date': self.sample_timestamp(),
'upvote': False
},
{'author': USER3,
'date': self.sample_timestamp(),
'upvote': False
}
],
"voted": "none",
"_id": self.example_ids[2],
"hidden": False
},
]}]
api_correct_secret_response = {
"title": "example-title",
"key": "example-url-1",
"hideVotes": False,
"isAnonymous": True,
"description": "example_description",
'question_author_name_field_visible': False,
'limit_question_characters': 200,
'limit_question_characters_enabled': False,
'asking_questions_enabled': True,
'voting_enabled': True,
"viewsNumber": 2, "votersNumber": 2, "questionersNumber": 1,
"date": self.sample_timestamp_string(),
"questions": [
{
"_id": str(self.example_ids[2]),
"content": "example-content-2",
"upvotes": 1,
"downvotes": 2,
"reply": "answer",
"isAuthor": False,
"voted": "none",
"read": "false",
"hidden": False,
"isAnonymous": True,
"authorEmail": None,
"authorFullName": None,
'author_nickname': None
},
]
}
def handle_correct_secret(url):
future = self.make_future_get_request(url)
request = self.server.receives()
request.ok(cursor={'id': 0, 'firstBatch': db_response})
self.handle_db_new_views()
http_response = future()
self.assertEqual(http_response.status_code, HTTPStatus.OK)
data = json.loads(http_response.get_data(as_text=True))
self.assertEqual(data, api_correct_secret_response)
def test_hidden_votes_wrong_admin_secret():
future = self.make_future_get_request('surveys/url-1?admin_secret=wrong-admin-secret')
request = self.server.receives()
request.ok(cursor={'id': 0, 'firstBatch': db_response})
http_response = future()
self.assertEqual(http_response.status_code, HTTPStatus.UNAUTHORIZED)
def test_hidden_votes_with_admin_secret():
handle_correct_secret('surveys/url-1?admin_secret=admin-secret')
def test_hidden_votes_with_results_secret():
handle_correct_secret('surveys/url-1?results_secret=secret')
test_hidden_votes_with_admin_secret()
test_hidden_votes_with_results_secret()
test_hidden_votes_wrong_admin_secret()
def test_notFound(self):
future = self.make_future_get_request('surveys/not-existing-url-1')
# get one survey
request = self.server.receives()
request.ok(cursor={'id': 0, 'firstBatch': []})
http_response = future()
self.assertEqual(http_response.status_code, HTTPStatus.NOT_FOUND)
def test_badUrl(self):
future = self.make_future_get_request('surveys/someVeryBadUrlWithoutNumber')
# get one survey
http_response = future()
self.assertEqual(http_response.status_code, HTTPStatus.NOT_FOUND)
def test_badUrlWithDashAtEnd(self):
future = self.make_future_get_request('surveys/someVeryBadUrlWithoutNumber-')
# get one survey
http_response = future()
self.assertEqual(http_response.status_code, HTTPStatus.NOT_FOUND)
| 38.862319 | 98 | 0.464479 | 1,302 | 16,089 | 5.511521 | 0.110599 | 0.022297 | 0.039019 | 0.064103 | 0.833751 | 0.809225 | 0.750697 | 0.730909 | 0.693144 | 0.680045 | 0 | 0.013183 | 0.40593 | 16,089 | 413 | 99 | 38.956416 | 0.737602 | 0.006278 | 0 | 0.721785 | 0 | 0 | 0.253191 | 0.049186 | 0 | 0 | 0 | 0 | 0.028871 | 1 | 0.031496 | false | 0 | 0.010499 | 0 | 0.044619 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
c2c670beb89250818d9ba91b71ae7255bd36f8b0 | 1,950 | py | Python | tests/plugins/gemini/test_gemini_case_mixin.py | robinandeer/puzzle | 9476f05b416d3a5135d25492cb31411fdf831c58 | [
"MIT"
] | 24 | 2015-10-15T16:29:58.000Z | 2020-12-08T22:14:13.000Z | tests/plugins/gemini/test_gemini_case_mixin.py | robinandeer/puzzle | 9476f05b416d3a5135d25492cb31411fdf831c58 | [
"MIT"
] | 212 | 2015-10-08T14:28:36.000Z | 2020-04-29T22:44:10.000Z | tests/plugins/gemini/test_gemini_case_mixin.py | robinandeer/puzzle | 9476f05b416d3a5135d25492cb31411fdf831c58 | [
"MIT"
] | 11 | 2015-10-08T09:26:46.000Z | 2018-02-02T16:45:07.000Z | from puzzle.plugins import GeminiPlugin
def test_get_individuals(gemini_case_obj):
adapter = GeminiPlugin()
adapter.add_case(gemini_case_obj)
ind_ids = [ind.ind_id for ind in adapter.individual_objs]
assert set(ind_ids) == set(['NA12877', 'NA12878', 'NA12882'])
def test_get_individuals_one_ind(gemini_case_obj):
adapter = GeminiPlugin()
adapter.add_case(gemini_case_obj)
ind_ids = [ind.ind_id for ind in adapter.individuals('NA12877')]
assert set(ind_ids) == set(['NA12877'])
def test_get_individuals_two_inds(gemini_case_obj):
adapter = GeminiPlugin()
adapter.add_case(gemini_case_obj)
ind_ids = [ind.ind_id for ind in adapter.individuals('NA12877', 'NA12878')]
assert set(ind_ids) == set(['NA12877', 'NA12878'])
def test__get_individuals(gemini_case_obj):
adapter = GeminiPlugin()
adapter.add_case(gemini_case_obj)
ind_ids = [ind.ind_id for ind in adapter.individuals()]
assert set(ind_ids) == set(['NA12877', 'NA12878', 'NA12882'])
def test_cases(gemini_case_obj):
adapter = GeminiPlugin()
adapter.add_case(gemini_case_obj)
case_ids = [case.case_id for case in adapter.cases()]
assert set(case_ids) == set(['643594'])
def test_case_objs(gemini_case_obj):
adapter = GeminiPlugin()
adapter.add_case(gemini_case_obj)
case_ids = [case.case_id for case in adapter.case_objs]
assert set(case_ids) == set(['643594'])
def test_case(gemini_case_obj):
adapter = GeminiPlugin()
adapter.add_case(gemini_case_obj)
case_id = '643594'
assert adapter.case(case_id).case_id == case_id
def test_case_no_id(gemini_case_obj):
adapter = GeminiPlugin()
adapter.add_case(gemini_case_obj)
case_id = '643594'
assert adapter.case().case_id == case_id
def test_case_wrong_id(gemini_case_obj):
adapter = GeminiPlugin()
adapter.add_case(gemini_case_obj)
case_id = 'hello'
assert adapter.case(case_id) == None
| 30 | 79 | 0.720513 | 281 | 1,950 | 4.654804 | 0.124555 | 0.137615 | 0.178899 | 0.129969 | 0.891437 | 0.873853 | 0.85474 | 0.808869 | 0.808869 | 0.753823 | 0 | 0.051439 | 0.162564 | 1,950 | 64 | 80 | 30.46875 | 0.749541 | 0 | 0 | 0.521739 | 0 | 0 | 0.057949 | 0 | 0 | 0 | 0 | 0 | 0.195652 | 1 | 0.195652 | false | 0 | 0.021739 | 0 | 0.217391 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
6c3e90fd12bc1d9d67b44528dc5f0a07e1be15b9 | 101,080 | py | Python | RestaurantManagement.py | ManuelCLopes/RestaurantManagement | ae6e7d683fc4021a29a32c3df263a1edc62901a4 | [
"MIT"
] | null | null | null | RestaurantManagement.py | ManuelCLopes/RestaurantManagement | ae6e7d683fc4021a29a32c3df263a1edc62901a4 | [
"MIT"
] | null | null | null | RestaurantManagement.py | ManuelCLopes/RestaurantManagement | ae6e7d683fc4021a29a32c3df263a1edc62901a4 | [
"MIT"
] | null | null | null | import tkinter as tk
import tkinter.ttk as ttk
import os
import psycopg2
import RestaurantManagement as sW
from tkinter.ttk import Combobox
from tkinter.ttk import Label
from datetime import date
import hashlib
today = date.today()
LARGE_FONT= ("Verdana", 12)
restaurante = 0
admin = 0
Prato = ''
class ecra_entrada(tk.Tk):
def __init__(self, *args, **kwargs):
self._frame = None
tk.Tk.__init__(self, *args, **kwargs)
self.title('R & R - Base de dados II')
self.container = tk.Frame(self.geometry("700x700+500+100"))
self.resizable(0,0)
self.container.pack(side="top", fill="both", expand = True)
self.container.grid_rowconfigure(0, weight=1)
self.container.grid_columnconfigure(0, weight=1)
self.show_frame(Inicio)
def show_frame(self, frame_class):
new_frame = frame_class(self.container, self)
if self._frame is not None:
self._frame.forget()
self._frame = new_frame
new_frame.grid(row=0, column=0, sticky="nsew")
def show_detalhes_prato(self, p):
global Prato
Prato = p
new_frame = Detalhes(self.container, self)
if self._frame is not None:
self._frame.forget()
self._frame = new_frame
new_frame.grid(row=0, column=0, sticky="nsew")
return Prato
def endApp(self):
os._exit(1)
def receita_diaria(self, d):
conn = psycopg2.connect(host = "localhost", database = "RestaurantManagement", user = "postgres", password = "8DE2DF6A4D")
conn.autocommit = True
cur = conn.cursor()
cur.execute("SELECT ver_receita_diaria('" + d + "'," + str(restaurante) + ")")
v = cur.fetchone()
vv = str(v).replace("('", "")
vvv = str(vv).replace("',)", "")
cur.close()
conn.close()
if vvv == "(None,)":
vvv = "0 €"
return vvv
def receita_ultimos_sete_dias(self):
conn = psycopg2.connect(host = "localhost", database = "RestaurantManagement", user = "postgres", password = "8DE2DF6A4D")
conn.autocommit = True
cur = conn.cursor()
cur.execute("SELECT ver_receita_ultimos_sete_dias(" + str(restaurante) + ")")
v = cur.fetchone()
vv = str(v).replace("('", "")
vvv = str(vv).replace("',)", "")
cur.close()
conn.close()
if vvv == "(None,)":
vvv = "0 €"
return vvv
def add_ingrediente(self, prato, ing):
conn = psycopg2.connect(host = "localhost", database = "RestaurantManagement", user = "postgres", password = "8DE2DF6A4D")
conn.autocommit = True
cur = conn.cursor()
cur.execute("CALL add_ingrediente('"+ prato + "','" + ing + "')")
cur.close()
conn.close()
self.show_frame(Ementas)
def ver_pratos(self):
conn = psycopg2.connect(host = "localhost", database = "RestaurantManagement", user = "postgres", password = "8DE2DF6A4D")
conn.autocommit = True
cur = conn.cursor()
cur.execute('SELECT * FROM ver_pratos() as (Pratos VARCHAR(30))')
vvv = []
for v in cur.fetchall():
vv = str(v).replace("('", "")
vvv.append(str(vv).replace("',)", ""))
cur.close()
conn.close()
return vvv
def ver_ementa_pratos(self, r):
conn = psycopg2.connect(host = "localhost", database = "RestaurantManagement", user = "postgres", password = "8DE2DF6A4D")
conn.autocommit = True
cur = conn.cursor()
cur.execute("select * from ver_ementa_pratos(" + str(r) + ")")
vvv = []
for v in cur.fetchall():
vv = str(v).replace("('", "")
vvv.append(str(vv).replace("',)", ""))
cur.close()
conn.close()
return vvv
def ver_mesas_restaurante_estado(self, r, e):
conn = psycopg2.connect(host = "localhost", database = "RestaurantManagement", user = "postgres", password = "8DE2DF6A4D")
conn.autocommit = True
cur = conn.cursor()
cur.execute("SELECT ver_mesas_restaurante_estado(" + str(r) + "," + str(e) + ")")
vvv = cur.fetchall()
cur.close()
conn.close()
return vvv
def ver_mesas_restaurante(self, r):
conn = psycopg2.connect(host = "localhost", database = "RestaurantManagement", user = "postgres", password = "8DE2DF6A4D")
conn.autocommit = True
cur = conn.cursor()
cur.execute('SELECT ver_mesas_restaurante(' + str(r) + ')')
vvv = cur.fetchall()
cur.close()
conn.close()
return vvv
def ver_funcionarios_restaurante(self, r):
conn = psycopg2.connect(host = "localhost", database = "RestaurantManagement", user = "postgres", password = "8DE2DF6A4D")
conn.autocommit = True
cur = conn.cursor()
cur.execute('SELECT ver_funcionarios(' + str(r) + ')')
vvv = cur.fetchall()
cur.close()
conn.close()
return vvv
def ver_nome_produtos(self):
conn = psycopg2.connect(host = "localhost", database = "RestaurantManagement", user = "postgres", password = "8DE2DF6A4D")
conn.autocommit = True
cur = conn.cursor()
cur.execute('SELECT ver_nome_produtos()')
vvv = []
for v in cur.fetchall():
vv = str(v).replace("('", "")
vvv.append(str(vv).replace("',)", ""))
cur.close()
conn.close()
return vvv
def verifica_Numeros(self, s):
try:
float(s)
except ValueError:
return False
return True
class Inicio(tk.Frame):
def login(self, user, password):
global admin
global restaurante
conn = psycopg2.connect(host = "localhost", database = "RestaurantManagement", user = "postgres", password = "8DE2DF6A4D")
cur = conn.cursor()
enc_pass = hashlib.md5(password.encode()).hexdigest()
cur.execute("select * from login('" + user + "','" + str(enc_pass) + "')")
result = cur.fetchone()
cur.close()
conn.close()
if result is not None:
admin = result[0]
restaurante = result[1]
return True
else:
return False
def iniciar(self, user, password, controller):
verificacao = self.login(user, password)
if verificacao != True:
self.label_ERRO.config(text="Utilizador e/ou password inválidos!")
return 0
if admin == 1:
controller.show_frame(Ementas)
elif admin == 0:
controller.show_frame(Ementas_func)
def __init__(self, parent, controller):
tk.Frame.__init__(self,parent)
label_titulo = tk.Label(self, text="R&R", font=("Helvetica", 36))
label_titulo.place(x=297, y=50)
label_subtitulo = tk.Label(self, text="login", font=("Helvetica", 20))
label_subtitulo.place(x=320, y=110)
label_user = tk.Label(self, text="Utilizador:", font=LARGE_FONT)
label_user.place(x=130, y=250)
user = tk.Entry(self, font=LARGE_FONT)
user.place(relwidth=0.40, rely=0.01, relheight=0.05, y=240, x=240)
user.focus_set()
label_pass = tk.Label(self, text="Password:", font=LARGE_FONT)
label_pass.place(x=130, y=330)
password = tk.Entry(self, font=LARGE_FONT, show="*")
password.place(relwidth=0.40, rely=0.01, relheight=0.05, y=320, x=240)
password.focus_set()
self.label_ERRO = tk.Label(self, text="", font=LARGE_FONT)
self.label_ERRO.place(x=210, y=400)
self.label_ERRO.config(fg="red")
button_entrar = tk.Button(self, text="Entrar",command=lambda: self.iniciar(user.get(), password.get(), controller))
button_entrar.place(x=260, y=500)
button_entrar.config(width=25,height=2, background="#87E193", fg="black")
button_criar = tk.Button(self, text="Criar conta",command=lambda: controller.show_frame(CriarConta))
button_criar.place(x=260, y=550)
button_criar.config(width=25,height=2, background="#BDB76B", fg="black")
button_sair = tk.Button(self, text="Sair", command=lambda: controller.endApp())
button_sair.place(x=260, y=600)
button_sair.config(width=25,height=2, background="#BC0022", fg="white")
class Inserir_Prato(tk.Frame):
def ver_ingredientes_restantes(self, prato):
conn = psycopg2.connect(host = "localhost", database = "RestaurantManagement", user = "postgres", password = "8DE2DF6A4D")
conn.autocommit = True
cur = conn.cursor()
cur.execute("SELECT ver_ingredientes_restantes('" + prato + "')")
vvv = cur.fetchall()
cur.close()
conn.close()
return vvv
def __init__(self, parent, controller):
tk.Frame.__init__(self,parent)
self.pagina(parent, controller)
def pagina(self, parent, controller):
label_titulo = tk.Label(self, text="Criação de um prato", font=("Helvetica", 20))
label_titulo.place(x=220, y=110)
v = controller.ver_pratos()
label_pratosExistentes = tk.Label(self, text="Pratos existentes:", font=LARGE_FONT)
label_pratosExistentes.place(x=110, y=250)
combo=Combobox(self, values=v)
combo.place(x=330, y = 250)
combo.config(width=30,height=20)
label_prato = tk.Label(self, text="Nome prato:", font=LARGE_FONT) #Escolher nome do prato
label_prato.place(x=110, y=300)
prato = tk.Entry(self, font=30)
prato.place(y=300, x=330)
prato.focus_set()
label_preco = tk.Label(self, text="Preço:", font=LARGE_FONT) #Escolher Preço
label_preco.place(x=110, y=350)
preco = tk.Entry(self, font=30)
preco.place(y=350, x=330)
preco.focus_set()
self.label_ERRO = tk.Label(self, text="", font=LARGE_FONT)
self.label_ERRO.place(x=190, y=400)
self.label_ERRO.config(fg="red")
button_gravar = tk.Button(self, text="Criar prato", command=lambda: self.criar_prato(prato.get(), preco.get(), controller))
button_gravar.place(x=260, y=650)
button_gravar.config(width=25,height=2, background="#87E193", fg="white")
button_voltar = tk.Button(self, text="Voltar", command=lambda: controller.show_frame(Consultar_Pratos))
button_voltar.place(x=25, y=650)
button_voltar.config(width=25,height=2, background="#FF7F50", fg="black")
button_sair = tk.Button(self, text="Sair", command=lambda: controller.endApp())
button_sair.place(x=500, y=650)
button_sair.config(width=25,height=2, background="#BC0022", fg="white")
def criar_prato(self, prato, preco, controller):
if prato != '':
if not controller.verifica_Numeros(prato):
if preco != '':
if controller.verifica_Numeros(preco):
conn = psycopg2.connect(host = "localhost", database = "RestaurantManagement", user = "postgres", password = "8DE2DF6A4D")
conn.autocommit = True
cur = conn.cursor()
cur.execute("CALL criar_prato('"+ prato + "','" + preco + "')")
cur.close()
conn.close()
controller.show_frame(Ementas)
return True
self.label_ERRO.config(text="É necessário preencher todos os campos devidamente!")
return False
class Detalhes(tk.Frame):
def ver_ingredientes_restantes(self, prato):
conn = psycopg2.connect(host = "localhost", database = "RestaurantManagement", user = "postgres", password = "8DE2DF6A4D")
conn.autocommit = True
cur = conn.cursor()
cur.execute("SELECT ver_ingredientes_restantes('" + prato + "')")
vvv = []
for v in cur.fetchall():
vv = str(v).replace("('", "")
vvv.append(str(vv).replace("',)", ""))
cur.close()
conn.close()
return vvv
def ver_detalhes(self, prato):
conn = psycopg2.connect(host = "localhost", database = "RestaurantManagement", user = "postgres", password = "8DE2DF6A4D")
conn.autocommit = True
cur = conn.cursor()
cur.execute("SELECT * FROM ver_detalhes_prato('" + prato + "')")
vvv = cur.fetchall()
cur.close()
conn.close()
return vvv
def __init__(self, parent, controller):
tk.Frame.__init__(self,parent)
self.pagina(parent, controller)
def pagina(self, parent, controller):
label_pratosExistentes = tk.Label(self, text=str(Prato), font=("Helvetica", 20))
label_pratosExistentes.place(x=50, y=50)
colunas = ('Preço', 'Ingredientes', 'Alergias')
self.lista = ttk.Treeview(self, columns=colunas, show='headings')
# set column headings
for col in colunas:
self.lista.heading(col, text=col, anchor='center')
self.lista.column(col, width=25, anchor='center')
self.lista.grid(row=1, column=0, columnspan=2, ipadx=250, padx=50, pady=325)
vvv = self.ver_detalhes(Prato)
for data in vvv:
self.lista.insert("", "end", values=data)
if admin == 1:
label_ing = tk.Label(self, text="Ingredientes:", font=LARGE_FONT) #Escolher ingredientes
label_ing.place(x=140, y=250)
ingredientes = self.ver_ingredientes_restantes(Prato)
ing = ttk.Combobox(self, values = ingredientes)
ing.place(x = 270, y = 250)
ing.config(width=30,height=20)
button_inserir = tk.Button(self, text="+", command=lambda: controller.add_ingrediente(Prato, ing.get()))
button_inserir.place(x=500, y=250)
button_inserir.config(width=2,height=1, fg="black")
label_preco = tk.Label(self, text="Preço:", font=LARGE_FONT) #Escolher Preço
label_preco.place(x=200, y=150)
preco = tk.Entry(self, font=30)
preco.place(y=150, x=270)
preco.focus_set()
self.label_ERRO = tk.Label(self, text="", font=LARGE_FONT)
self.label_ERRO.place(x=180, y=575)
self.label_ERRO.config(fg="red")
button_voltar = tk.Button(self, text="Voltar", command=lambda: controller.show_frame(Ementas))
button_gravar = tk.Button(self, text="Atualizar preço", command=lambda: self.atualizar_preco(preco.get(), controller))
button_gravar.place(x=260, y=650)
button_gravar.config(width=25,height=2, background="#87E193", fg="black")
else:
button_voltar = tk.Button(self, text="Voltar", command=lambda: controller.show_frame(Ementas_func))
button_voltar.place(x=25, y=650)
button_voltar.config(width=25,height=2, background="#FF7F50", fg="black")
button_sair = tk.Button(self, text="Sair", command=lambda: controller.endApp())
button_sair.place(x=500, y=650)
button_sair.config(width=25,height=2, background="#BC0022", fg="white")
def atualizar_preco(self, preco, controller):
if preco != '':
if controller.verifica_Numeros(preco):
conn = psycopg2.connect(host = "localhost", database = "RestaurantManagement", user = "postgres", password = "8DE2DF6A4D")
conn.autocommit = True
cur = conn.cursor()
cur.execute("CALL atualizarpreco_prato('"+ Prato + "','" + preco + "')")
cur.close()
conn.close()
controller.show_frame(Ementas)
return True
self.label_ERRO.config(text="Para atualizar o preço, é necessário \npreencher o campo com um valor adequado")
return False
class Gerir_Reservas(tk.Frame):
def __init__(self, parent, controller):
tk.Frame.__init__(self,parent)
button_pratos = tk.Button(self, text="Fazer Reserva",command=lambda: controller.show_frame(Fazer_Reserva))
button_pratos.place(x=170, y=300)
button_pratos.config(width=20,height=5, fg="black")
button_outros = tk.Button(self, text="Ver Reservas",command=lambda: controller.show_frame(Ver_Reservas))
button_outros.place(x=415, y=300)
button_outros.config(width=20,height=5, fg="black")
if admin == 1:
button_voltar = tk.Button(self, text="Voltar", command=lambda: controller.show_frame(Ementas))
else:
button_voltar = tk.Button(self, text="Voltar", command=lambda: controller.show_frame(Ementas_func))
button_voltar.place(x=25, y=650)
button_voltar.config(width=25,height=2, background="#FF7F50", fg="black")
button_sair = tk.Button(self, text="Sair", command=lambda: controller.endApp())
button_sair.place(x=500, y=650)
button_sair.config(width=25,height=2, background="#BC0022", fg="white")
class Fazer_Reserva(tk.Frame):
def __init__(self, parent, controller):
tk.Frame.__init__(self,parent)
self.pagina(parent, controller)
def pagina(self, parent, controller):
label_titulo = tk.Label(self, text="Fazer reserva:", font=LARGE_FONT)
label_titulo.place(x=275, y=25)
label_data = tk.Label(self, text="Data:", font=LARGE_FONT)
label_data.place(x=150, y=125)
combo_dia=Combobox(self, values=['1', '2', '3', '4', '5', '6', '7', '8', '9', '10', '11', '12', '13', '14', '15', '16', '17', '18', '19', '20', '21', '22', '23', '24', '25', '26', '27', '28', '29', '30'])
combo_dia.place(x=350, y = 125, width=50)
combo_mes=Combobox(self, values=['1', '2', '3', '4', '5', '6', '7', '8', '9', '10', '11', '12'])
combo_mes.place(x=425, y = 125, width=50)
combo_ano=Combobox(self, values=['2020'])
combo_ano.place(x=500, y = 125, width=50)
f = controller.ver_funcionarios_restaurante(restaurante) #Escolher funcionário
label_f = tk.Label(self, text="Funcionário:", font=LARGE_FONT)
label_f.place(x=150, y=175)
combo_f=Combobox(self, values=f)
combo_f.place(x=350, y = 175)
label_npessoas = tk.Label(self, text="Número de pessoas:", font=LARGE_FONT) #Escolher n_pessoas
label_npessoas.place(x=150, y= 225)
combo_npessoas=Combobox(self, values=['1', '2', '3', '4', '5', '6', '7', '8', '9', '10', '11', '12', '13', '14', '15', '16', '17', '18', '19', '20'])
combo_npessoas.place(x=350, y = 225, width=50)
label_cliente = tk.Label(self, text="Nome cliente:", font=LARGE_FONT) #nome cliente
label_cliente.place(x=150, y=275)
cliente = tk.Entry(self, font=20)
cliente.place(relwidth=0.30, rely=0.01, relheight=0.04, y=275, x=350)
cliente.focus_set()
label_obs = tk.Label(self, text="Observações:", font=LARGE_FONT) #Observações
label_obs.place(x=150, y=325)
obs = tk.Entry(self, font=20)
obs.place(relwidth=0.30, rely=0.01, relheight=0.1, y=325, x=350)
obs.focus_set()
self.label_ERRO = tk.Label(self, text="", font=LARGE_FONT)
self.label_ERRO.place(x=130, y=500)
self.label_ERRO.config(fg="red")
#Fazer reserva
button_inserir = tk.Button(self, text="Fazer Reserva", command=lambda: self.fazer_reserva(combo_dia.get(), combo_mes.get(), combo_ano.get(), combo_npessoas.get(), cliente.get(), combo_f.get(), obs.get(), controller))
button_inserir.place(x=260, y=650)
button_inserir.config(width=25,height=2, background="#87E193", fg="black")
if admin == 1:
button_voltar = tk.Button(self, text="Voltar", command=lambda: controller.show_frame(Ementas))
else:
button_voltar = tk.Button(self, text="Voltar", command=lambda: controller.show_frame(Ementas_func))
button_voltar.place(x=25, y=650)
button_voltar.config(width=25,height=2, background="#FF7F50", fg="black")
button_sair = tk.Button(self, text="Sair", command=lambda: controller.endApp())
button_sair.place(x=500, y=650)
button_sair.config(width=25,height=2, background="#BC0022", fg="white")
def fazer_reserva(self, dia, mes, ano, npessoas, cliente, funcionario, observacoes, controller):
if dia != '':
if mes != '':
if ano != '':
if npessoas != '':
if cliente != '':
if funcionario != '':
conn = psycopg2.connect(host = "localhost", database = "RestaurantManagement", user = "postgres", password = "8DE2DF6A4D")
conn.autocommit = True
cur = conn.cursor()
cur.execute("CALL criar_reserva('" + str(dia) + "-" + str(mes) + "-" + str(ano) + "'," + str(npessoas) + ",'" + cliente + "','" + funcionario + "'," + str(restaurante) + ",'" + observacoes + "')")
cur.close()
conn.close()
controller.show_frame(Ementas)
return True
self.label_ERRO.config(text="Apenas o campo 'Observações' é facultativo!\nNo entanto também deve ser preenchido mencionando\n a hora e outros aspetos relevantes da reserva.")
return False
class Ver_Reservas(tk.Frame):
def __init__(self, parent, controller):
tk.Frame.__init__(self, parent)
self.pagina(parent, controller)
def pagina(self, parent, controller):
label = tk.Label(self, text="Consultar reservas", font=LARGE_FONT)
label.place(x=290, y=50)
colunas = ('Data', 'Cliente', 'Pessoas', 'Funcionário', 'Observações')
self.lista = ttk.Treeview(self, columns=colunas, show='headings')
# set column headings
for col in colunas:
self.lista.heading(col, text=col, anchor='center')
self.lista.column(col, width=15, anchor='center')
self.lista.grid(row=1, column=0, columnspan=2, ipadx=250, padx=50, pady=200)
vvv = self.ver_reservas()
for data in vvv:
self.lista.insert("", "end", values=data)
if admin == 1:
button_voltar = tk.Button(self, text="Voltar", command=lambda: controller.show_frame(Ementas))
else:
button_voltar = tk.Button(self, text="Voltar", command=lambda: controller.show_frame(Ementas_func))
button_voltar.place(x=25, y=650)
button_voltar.config(width=25,height=2, background="#FF7F50", fg="black")
button_sair = tk.Button(self, text="Sair", command=lambda: controller.endApp())
button_sair.place(x=500, y=650)
button_sair.config(width=25,height=2, background="#BC0022", fg="white")
def ver_reservas(self):
conn = psycopg2.connect(host = "localhost", database = "RestaurantManagement", user = "postgres", password = "8DE2DF6A4D")
conn.autocommit = True
cur = conn.cursor()
cur.execute("SELECT * FROM ver_reservas("+ str(restaurante) + ")")
v = cur.fetchall()
cur.close()
conn.close()
return v
class Registar_pedido_pratos(tk.Frame):
def __init__(self, parent, controller):
tk.Frame.__init__(self,parent)
self.pagina(parent, controller)
def pagina(self, parent, controller):
label_titulo = tk.Label(self, text="Registar pedido", font=("Helvetica", 20))
label_titulo.place(x=250, y=110)
tipo_refeicao = 'Prato'
m = controller.ver_mesas_restaurante(restaurante) #Escolher mesa
label_m = tk.Label(self, text="Mesa:", font=LARGE_FONT)
label_m.place(x=150, y=250)
combo_m=Combobox(self, values=m)
combo_m.place(x=350, y = 250)
f = controller.ver_funcionarios_restaurante(restaurante) #Escolher funcionário
label_f = tk.Label(self, text="Funcionário:", font=LARGE_FONT)
label_f.place(x=150, y=300)
combo_f=Combobox(self, values=f)
combo_f.place(x=350, y = 300)
p = controller.ver_ementa_pratos(restaurante) #Escolher produto
label_p = tk.Label(self, text="Prato:", font=LARGE_FONT)
label_p.place(x=150, y= 350)
combo_p=Combobox(self, values=p)
combo_p.place(x=350, y = 350)
self.label_ERRO = tk.Label(self, text="", font=LARGE_FONT)
self.label_ERRO.place(x=125, y=500)
self.label_ERRO.config(fg="red")
button_inserir = tk.Button(self, text="Inserir na conta", command=lambda: self.registar_pedido(tipo_refeicao, combo_p.get(), combo_m.get(), combo_f.get(), controller))
button_inserir.place(x=260, y=650)
button_inserir.config(width=25,height=2, background="#87E193", fg="black")
button_voltar = tk.Button(self, text="Voltar", command=lambda: controller.show_frame(Registar_pedido))
button_voltar.place(x=25, y=650)
button_voltar.config(width=25,height=2, background="#FF7F50", fg="black")
button_sair = tk.Button(self, text="Sair", command=lambda: controller.endApp())
button_sair.place(x=500, y=650)
button_sair.config(width=25,height=2, background="#BC0022", fg="white")
def registar_pedido(self, tr, prod, mesa, funcionario, controller):
if prod != '':
if mesa != '':
if funcionario != '':
conn = psycopg2.connect(host = "localhost", database = "RestaurantManagement", user = "postgres", password = "8DE2DF6A4D")
conn.autocommit = True
cur = conn.cursor()
cur.execute("CALL registar_pedido('" + tr + "','" + prod + "'," + str(mesa) + ",'" + funcionario + "')")
cur.close()
conn.close()
controller.show_frame(Ementas)
return True
self.label_ERRO.config(text="É necessário preencher todos os campos devidamente!")
return False
class Registar_pedido_outros(tk.Frame):
def __init__(self, parent, controller):
tk.Frame.__init__(self,parent)
self.pagina(parent, controller)
def pagina(self, parent, controller):
label_titulo = tk.Label(self, text="Registar pedido", font=("Helvetica", 20))
label_titulo.place(x=250, y=110)
tipo_refeicao = 'outro'
m = controller.ver_mesas_restaurante(restaurante) #Escolher mesa
label_m = tk.Label(self, text="Mesa:", font=LARGE_FONT)
label_m.place(x=150, y=250)
combo_m=Combobox(self, values=m)
combo_m.place(x=350, y = 250)
f = controller.ver_funcionarios_restaurante(restaurante) #Escolher funcionário
label_f = tk.Label(self, text="Funcionário:", font=LARGE_FONT)
label_f.place(x=150, y=300)
combo_f=Combobox(self, values=f)
combo_f.place(x=350, y = 300)
p = controller.ver_nome_produtos() #Escolher produto
label_p = tk.Label(self, text="Produto:", font=LARGE_FONT)
label_p.place(x=150, y=350)
combo_p=Combobox(self, values=p)
combo_p.place(x=350, y = 350)
self.label_ERRO = tk.Label(self, text="", font=LARGE_FONT)
self.label_ERRO.place(x=125, y=500)
self.label_ERRO.config(fg="red")
button_inserir = tk.Button(self, text="Inserir na conta", command=lambda: self.registar_pedido(tipo_refeicao, combo_p.get(), combo_m.get(), combo_f.get(), controller))
button_inserir.place(x=260, y=650)
button_inserir.config(width=25,height=2, background="#87E193", fg="black")
button_voltar = tk.Button(self, text="Voltar", command=lambda: controller.show_frame(Registar_pedido))
button_voltar.place(x=25, y=650)
button_voltar.config(width=25,height=2, background="#FF7F50", fg="black")
button_sair = tk.Button(self, text="Sair", command=lambda: controller.endApp())
button_sair.place(x=500, y=650)
button_sair.config(width=25,height=2, background="#BC0022", fg="white")
def registar_pedido(self, tr, prod, mesa, funcionario, controller):
if prod != '':
if mesa != '':
if funcionario != '':
conn = psycopg2.connect(host = "localhost", database = "RestaurantManagement", user = "postgres", password = "8DE2DF6A4D")
conn.autocommit = True
cur = conn.cursor()
cur.execute("CALL registar_pedido('" + tr + "','" + prod + "'," + str(mesa) + ",'" + funcionario + "')")
cur.close()
conn.close()
controller.show_frame(Ementas)
return True
self.label_ERRO.config(text="É necessário preencher todos os campos devidamente!")
return False
class Registar_pedido(tk.Frame):
def __init__(self, parent, controller):
tk.Frame.__init__(self,parent)
button_pratos = tk.Button(self, text="PRATOS",command=lambda: controller.show_frame(Registar_pedido_pratos))
button_pratos.place(x=170, y=300)
button_pratos.config(width=20,height=5, fg="black")
button_outros = tk.Button(self, text="OUTROS PRODUTOS",command=lambda: controller.show_frame(Registar_pedido_outros))
button_outros.place(x=415, y=300)
button_outros.config(width=20,height=5, fg="black")
if admin == 1:
button_voltar = tk.Button(self, text="Voltar", command=lambda: controller.show_frame(Ementas))
else:
button_voltar = tk.Button(self, text="Voltar", command=lambda: controller.show_frame(Ementas_func))
button_voltar.place(x=25, y=650)
button_voltar.config(width=25,height=2, background="#FF7F50", fg="black")
button_sair = tk.Button(self, text="Sair", command=lambda: controller.endApp())
button_sair.place(x=500, y=650)
button_sair.config(width=25,height=2, background="#BC0022", fg="white")
class Consultar_Pedidos(tk.Frame):
def __init__(self, parent, controller):
tk.Frame.__init__(self, parent)
self.pagina(parent, controller)
def pagina(self, parent, controller):
label = tk.Label(self, text="Pedidos recentes", font=("Helvetica", 20))
label.place(x=290, y=50)
colunas = ('ID pedido', 'Produto', 'Data', 'Mesa', 'Funcionario')
self.lista = ttk.Treeview(self, columns=colunas, show='headings')
# set column headings
for col in colunas:
self.lista.heading(col, text=col, anchor='center')
self.lista.column(col, width=15, anchor='center')
self.lista.grid(row=1, column=0, columnspan=2, ipadx=250, padx=50, pady=200)
vvv = self.ver_pedidos()
for data in vvv:
self.lista.insert("", "end", values=data)
self.pedido = ''
self.lista.bind('<<TreeviewSelect>>', self.selectItem)
self.label_ERRO = tk.Label(self, text="", font=LARGE_FONT)
self.label_ERRO.place(x=190, y=500)
self.label_ERRO.config(fg="red")
if admin == 1:
button_voltar = tk.Button(self, text="Voltar", command=lambda: controller.show_frame(Ementas))
else:
button_voltar = tk.Button(self, text="Voltar", command=lambda: controller.show_frame(Ementas_func))
button_apagar = tk.Button(self, text="Cancelar pedido", command=lambda: self.cancelar_pedido(self.pedido, controller))
button_apagar.place(x=260, y=650)
button_apagar.config(width=25,height=2, background="#c4cbd5", fg="black")
button_voltar.place(x=25, y=650)
button_voltar.config(width=25,height=2, background="#FF7F50", fg="black")
button_sair = tk.Button(self, text="Sair", command=lambda: controller.endApp())
button_sair.place(x=500, y=650)
button_sair.config(width=25,height=2, background="#BC0022", fg="white")
def ver_pedidos(self):
conn = psycopg2.connect(host = "localhost", database = "RestaurantManagement", user = "postgres", password = "8DE2DF6A4D")
conn.autocommit = True
cur = conn.cursor()
cur.execute("SELECT * FROM ver_pedidos(" + str(restaurante) + ")")
v = cur.fetchall()
cur.close()
conn.close()
return v
def selectItem(self, event):
for item in self.lista.selection():
item_text = self.lista.item(item, 'values')
vvv = []
x = 0
for i in item_text:
if (i == '(' or i == "'") and x <= 2:
x = x + 1
else:
vvv.append(i)
break
vv = str(vvv).replace("['", "")
p = str(vv).replace("']", "")
self.pedido = p
def cancelar_pedido(self, pedido, controller):
if pedido != '':
conn = psycopg2.connect(host = "localhost", database = "RestaurantManagement", user = "postgres", password = "8DE2DF6A4D")
conn.autocommit = True
cur = conn.cursor()
cur.execute("CALL cancelar_pedido("+ str(pedido) + ")")
cur.close()
conn.close()
controller.show_frame(Ementas)
return True
self.label_ERRO.config(text="É necessário selecionar um pedido!")
return False
class Registar_fatura(tk.Frame):
def __init__(self, parent, controller):
tk.Frame.__init__(self,parent)
self.pagina(parent, controller)
def pagina(self, parent, controller):
label_titulo = tk.Label(self, text="Registo de fatura", font=("Helvetica", 20))
label_titulo.place(x=245, y=50)
m = controller.ver_mesas_restaurante_estado(restaurante, 1) #Escolher mesa
label_m = tk.Label(self, text="Mesa:", font=LARGE_FONT)
label_m.place(x=200, y=200)
combo_m=Combobox(self, values = m)
combo_m.place(x=300, y = 200)
label_nif = tk.Label(self, text="NIF:", font=LARGE_FONT)
label_nif.place(x=50, y= 300)
nif = tk.Entry(self, font=30)
nif.place(relwidth=0.30, rely=0.01, relheight=0.04, y=295, x=100)
nif.focus_set()
label_cliente = tk.Label(self, text="Nº Cliente:", font=LARGE_FONT)
label_cliente.place(x=340, y= 300)
cliente = tk.Entry(self, font=30)
cliente.place(relwidth=0.30, rely=0.01, relheight=0.04, y=295, x=440)
cliente.focus_set()
self.label_ERRO = tk.Label(self, text="", font=LARGE_FONT)
self.label_ERRO.place(x=190, y=500)
self.label_ERRO.config(fg="red")
#Inserir produto
button_inserir = tk.Button(self, text="Registar fatura", command=lambda: self.registar_fatura(combo_m.get(), nif.get(), cliente.get(), controller))
button_inserir.place(x=260, y=650)
button_inserir.config(width=25,height=2, background="#87E193", fg="black")
if admin == 1:
button_voltar = tk.Button(self, text="Voltar", command=lambda: controller.show_frame(Ementas))
else:
button_voltar = tk.Button(self, text="Voltar", command=lambda: controller.show_frame(Ementas_func))
button_voltar.place(x=25, y=650)
button_voltar.config(width=25,height=2, background="#FF7F50", fg="black")
button_sair = tk.Button(self, text="Sair", command=lambda: controller.endApp())
button_sair.place(x=500, y=650)
button_sair.config(width=25,height=2, background="#BC0022", fg="white")
def registar_fatura(self, m, nif, cliente, controller):
if m != '':
conn = psycopg2.connect(host = "localhost", database = "RestaurantManagement", user = "postgres", password = "8DE2DF6A4D")
conn.autocommit = True
cur = conn.cursor()
if nif == "":
if cliente == "":
cur.execute("CALL add_fatura_s_nif(0," + str(m) + "," + str(restaurante) + ")")
else:
cur.execute("CALL add_fatura_s_nif(" + str(cliente) + "," + str(m) + "," + str(restaurante) + ")")
else:
cur.execute("CALL add_fatura_c_nif(" + str(nif) + "," + str(m) + "," + str(restaurante) + ")")
cur.close()
conn.close()
controller.show_frame(Ementas)
return True
self.label_ERRO.config(text="Deve selecionar uma mesa para proceder\nao registo da fatura!")
return False
class Consultar_Faturas_semana(tk.Frame):
def __init__(self, parent, controller):
tk.Frame.__init__(self, parent)
self.pagina(parent, controller)
def pagina(self, parent, controller):
label = tk.Label(self, text="Faturas - Últimos 7 dias", font=LARGE_FONT)
label.place(x=25, y=25)
colunas = ('ID fatura', 'Cliente', 'NIF', 'Data', 'Produto', 'Preço', 'Estado')
lista = ttk.Treeview(self, columns=colunas, show='headings')
# set column headings
for col in colunas:
lista.heading(col, text=col, anchor='center')
lista.column(col, width=35, anchor='center')
lista.grid(row=1, column=0, columnspan=2, ipadx=200, padx=25, pady=75)
vvv = self.ver_faturas_semana()
for data in vvv:
lista.insert("", "end", values=data)
button_pagamento = tk.Button(self, text="Efetuar pagamento", command=lambda: controller.show_frame(Confirmar_Pagamento))
button_pagamento.place(x=260, y=650)
button_pagamento.config(width=25,height=2, background="#87E193", fg="black")
if admin == 1:
date = today.strftime("%Y-%m-%d")
rd = controller.receita_diaria(date)
rs = controller.receita_ultimos_sete_dias()
label_rd = tk.Label(self, text="Receita Diária: " + rd + " ", font=LARGE_FONT)
label_rd.place(x=25, y=350)
label_rs = tk.Label(self, text="Receita últimos 7 dias: " + rs + " ", font=LARGE_FONT)
label_rs.place(x=25, y=400)
button_voltar = tk.Button(self, text="Voltar", command=lambda: controller.show_frame(Ementas))
else:
button_voltar = tk.Button(self, text="Voltar", command=lambda: controller.show_frame(Ementas_func))
button_voltar.place(x=25, y=650)
button_voltar.config(width=25,height=2, background="#FF7F50", fg="black")
button_sair = tk.Button(self, text="Sair", command=lambda: controller.endApp())
button_sair.place(x=500, y=650)
button_sair.config(width=25,height=2, background="#BC0022", fg="white")
def ver_faturas_semana(self):
conn = psycopg2.connect(host = "localhost", database = "RestaurantManagement", user = "postgres", password = "8DE2DF6A4D")
conn.autocommit = True
cur = conn.cursor()
cur.execute("SELECT * FROM ver_faturas_semana(" + str(restaurante) + ")")
v = cur.fetchall()
cur.close()
conn.close()
return v
class Consultar_Faturas(tk.Frame):
def __init__(self, parent, controller):
tk.Frame.__init__(self, parent)
self.pagina(parent, controller)
def pagina(self, parent, controller):
label = tk.Label(self, text="Faturas - Desde o ínicio", font=LARGE_FONT)
label.place(x=25, y=25)
colunas = ('ID fatura', 'Cliente', 'NIF', 'Data', 'Produto', 'Preço', 'Estado')
lista = ttk.Treeview(self, columns=colunas, show='headings')
# set column headings
for col in colunas:
lista.heading(col, text=col, anchor='center')
lista.column(col, width=35, anchor='center')
lista.grid(row=1, column=0, columnspan=2, ipadx=200, padx=25, pady=75)
vvv = self.ver_faturas()
for data in vvv:
lista.insert("", "end", values=data)
button_pagamento = tk.Button(self, text="Efetuar pagamento", command=lambda: controller.show_frame(Confirmar_Pagamento))
button_pagamento.place(x=260, y=650)
button_pagamento.config(width=25,height=2, background="#87E193", fg="black")
if admin == 1:
date = today.strftime("%Y-%m-%d")
rd = controller.receita_diaria(date)
rs = controller.receita_ultimos_sete_dias()
rma = self.receita_media_almocos()
rmj = self.receita_media_jantares()
rmd = self.receita_media_diaria()
label_rd = tk.Label(self, text="Receita Diária: " + rd + " ", font=LARGE_FONT)
label_rd.place(x=25, y=350)
label_rs = tk.Label(self, text="Receita últimos 7 dias: " + rs + " ", font=LARGE_FONT)
label_rs.place(x=25, y=400)
label_rma = tk.Label(self, text="Receita Média almoços: " + rma + " ", font=LARGE_FONT)
label_rma.place(x=25, y=450)
label_rmj = tk.Label(self, text="Receita Média jantares: " + rmj + " ", font=LARGE_FONT)
label_rmj.place(x=25, y=500)
label_rmd = tk.Label(self, text="Receita Média diária: " + rmd + " ", font=LARGE_FONT)
label_rmd.place(x=25, y=550)
button_voltar = tk.Button(self, text="Voltar", command=lambda: controller.show_frame(Ementas))
else:
button_voltar = tk.Button(self, text="Voltar", command=lambda: controller.show_frame(Ementas_func))
button_voltar.place(x=25, y=650)
button_voltar.config(width=25,height=2, background="#FF7F50", fg="black")
button_sair = tk.Button(self, text="Sair", command=lambda: controller.endApp())
button_sair.place(x=500, y=650)
button_sair.config(width=25,height=2, background="#BC0022", fg="white")
def ver_faturas(self):
conn = psycopg2.connect(host = "localhost", database = "RestaurantManagement", user = "postgres", password = "8DE2DF6A4D")
conn.autocommit = True
cur = conn.cursor()
cur.execute("SELECT * FROM ver_faturas(" + str(restaurante) + ")")
v = cur.fetchall()
cur.close()
conn.close()
return v
def receita_media_almocos(self):
conn = psycopg2.connect(host = "localhost", database = "RestaurantManagement", user = "postgres", password = "8DE2DF6A4D")
conn.autocommit = True
cur = conn.cursor()
cur.execute("SELECT ver_receita_media_almocos(" + str(restaurante) + ")")
v = cur.fetchone()
vv = str(v).replace("('", "")
vvv = str(vv).replace("',)", "")
cur.close()
conn.close()
if vvv == "(None,)":
vvv = "0 €"
return vvv
def receita_media_jantares(self):
conn = psycopg2.connect(host = "localhost", database = "RestaurantManagement", user = "postgres", password = "8DE2DF6A4D")
conn.autocommit = True
cur = conn.cursor()
cur.execute("SELECT ver_receita_media_jantares(" + str(restaurante) + ")")
v = cur.fetchone()
vv = str(v).replace("('", "")
vvv = str(vv).replace("',)", "")
cur.close()
conn.close()
if vvv == "(None,)":
vvv = "0 €"
return vvv
def receita_media_diaria(self):
conn = psycopg2.connect(host = "localhost", database = "RestaurantManagement", user = "postgres", password = "8DE2DF6A4D")
conn.autocommit = True
cur = conn.cursor()
cur.execute("SELECT ver_receita_media_diaria(" + str(restaurante) + ")")
v = cur.fetchone()
vv = str(v).replace("('", "")
vvv = str(vv).replace("',)", "")
cur.close()
conn.close()
if vvv == "(None,)":
vvv = "0 €"
return vvv
class Confirmar_Pagamento(tk.Frame):
def __init__(self, parent, controller):
tk.Frame.__init__(self, parent)
self.pagina(parent, controller)
def pagina(self, parent, controller):
label = tk.Label(self, text="Faturas - Por pagar", font=LARGE_FONT)
label.place(x=25, y=25)
colunas = ('ID fatura', 'Cliente', 'NIF', 'Data', 'Produto', 'Preço')
self.lista = ttk.Treeview(self, columns=colunas, show='headings')
# set column headings
for col in colunas:
self.lista.heading(col, text=col, anchor='center')
self.lista.column(col, width=40, anchor='center')
self.lista.grid(row=1, column=0, columnspan=2, ipadx=200, padx=25, pady=75)
vvv = self.ver_faturas_por_pagar()
for data in vvv:
self.lista.insert("", "end", values=data)
self.prod = ''
self.lista.bind('<<TreeviewSelect>>', self.selectItem)
self.label_ERRO = tk.Label(self, text="", font=LARGE_FONT)
self.label_ERRO.place(x=190, y=400)
self.label_ERRO.config(fg="red")
button_pagamento = tk.Button(self, text="Efetuar pagamento", command=lambda: self.confirmar_pagamento(self.prod, controller))
button_pagamento.place(x=260, y=650)
button_pagamento.config(width=25,height=2, background="#87E193", fg="black")
if admin == 1:
button_voltar = tk.Button(self, text="Voltar", command=lambda: controller.show_frame(Ementas))
else:
button_voltar = tk.Button(self, text="Voltar", command=lambda: controller.show_frame(Ementas_func))
button_voltar.place(x=25, y=650)
button_voltar.config(width=25,height=2, background="#FF7F50", fg="black")
button_sair = tk.Button(self, text="Sair", command=lambda: controller.endApp())
button_sair.place(x=500, y=650)
button_sair.config(width=25,height=2, background="#BC0022", fg="white")
def selectItem(self, event):
for item in self.lista.selection():
item_text = self.lista.item(item, 'values')
vvv = []
x = 0
for i in item_text:
if (i == '(' or i == "'") and x <= 2:
x = x + 1
else:
vvv.append(i)
break
vv = str(vvv).replace("['", "")
p = str(vv).replace("']", "")
self.prod = p
def ver_faturas_por_pagar(self):
conn = psycopg2.connect(host = "localhost", database = "RestaurantManagement", user = "postgres", password = "8DE2DF6A4D")
conn.autocommit = True
cur = conn.cursor()
cur.execute("SELECT * FROM ver_faturas_por_pagar(" + str(restaurante) + ")")
v = cur.fetchall()
cur.close()
conn.close()
return v
def confirmar_pagamento(self, prod, controller):
if prod != '':
conn = psycopg2.connect(host = "localhost", database = "RestaurantManagement", user = "postgres", password = "8DE2DF6A4D")
conn.autocommit = True
cur = conn.cursor()
cur.execute("CALL registar_pagamento(" + str(prod) + ")")
cur.close()
conn.close()
controller.show_frame(Ementas)
return True
self.label_ERRO.config(text="É necessário selecionar uma fatura!")
return False
class Inserir_Produtos(tk.Frame):
def __init__(self, parent, controller):
tk.Frame.__init__(self,parent)
self.pagina(parent, controller)
def pagina(self, parent, controller):
label_titulo = tk.Label(self, text="Criação de um produto", font=("Helvetica", 20))
label_titulo.place(x=220, y=25)
label_produto = tk.Label(self, text="Nome produto:", font=LARGE_FONT) #Escolher nome do produto
label_produto.place(x=130, y=100)
produto = tk.Entry(self, font=30)
produto.place(relwidth=0.40, rely=0.01, relheight=0.05, y=90, x=310)
produto.focus_set()
label_preco = tk.Label(self, text="Preço:", font=LARGE_FONT) #Escolher Preço
label_preco.place(x=130, y=150)
preco = tk.Entry(self, font=30)
preco.place(relwidth=0.40, rely=0.01, relheight=0.05, y=140, x=310)
preco.focus_set()
label_zconf = tk.Label(self, text="Zona de confeção:", font=LARGE_FONT) #Escolher zona confeção
label_zconf.place(x=130, y=200)
combo_zconf=Combobox(self, values=['Cozinha', 'Balcão'])
combo_zconf.place(x=310, y = 200)
label_tp = tk.Label(self, text="Tipo de produto:", font=LARGE_FONT) #Escolher tipo de produto
label_tp.place(x=130, y=250)
combo_tp=Combobox(self, values= ['Bebida', 'Peixe', 'Carne', 'Acompanhamento', 'Fruta', 'Salgados', 'Doces'])
combo_tp.place(x=310, y = 250)
label_tr = tk.Label(self, text="Tipo de refeição:", font=LARGE_FONT) #Escolher tipo de refeicao
label_tr.place(x=130, y=300)
combo_tr=Combobox(self, values=['Prato', 'Sobremesa', 'Entradas', 'Snack', 'Vegetariano'])
combo_tr.place(x=310, y = 300)
label_iva = tk.Label(self, text="IVA:", font=LARGE_FONT) #Escolher IVA
label_iva.place(x=130, y=350)
combo_iva=Combobox(self, values=['0.23', '0.13', '0.06'])
combo_iva.place(x=310, y = 350)
label_quantidade = tk.Label(self, text="Quantidade:", font=LARGE_FONT) #Escolher quantidade que vai ser posta à venda
label_quantidade.place(x=130, y=400)
quantidade = tk.Entry(self, font=30)
quantidade.place(relwidth=0.40, rely=0.01, relheight=0.05, y=390, x=310)
quantidade.focus_set()
label_min = tk.Label(self, text="Stock Mínimo:", font=LARGE_FONT) #Escolher quantidade minima
label_min.place(x=130, y=450)
minimo = tk.Entry(self, font=30)
minimo.place(relwidth=0.40, rely=0.01, relheight=0.05, y=440, x=310)
minimo.focus_set()
self.label_ERRO = tk.Label(self, text="", font=LARGE_FONT)
self.label_ERRO.place(x=130, y=550)
self.label_ERRO.config(fg="red")
button_inserir = tk.Button(self, text="Inserir", command=lambda: self.criar_produto(produto.get(), preco.get(), combo_zconf.get(), combo_tp.get(), combo_tr.get(), combo_iva.get(), quantidade.get(), minimo.get(), controller))
button_inserir.place(x=260, y=650)
button_inserir.config(width=25,height=2, background="#87E193", fg="black")
button_voltar = tk.Button(self, text="Voltar", command=lambda: controller.show_frame(Ver_Produtos))
button_voltar.place(x=25, y=650)
button_voltar.config(width=25,height=2, background="#FF7F50", fg="black")
button_sair = tk.Button(self, text="Sair", command=lambda: controller.endApp())
button_sair.place(x=500, y=650)
button_sair.config(width=25,height=2, background="#BC0022", fg="white")
def criar_produto(self, produto, preco, zconf, tp, tr, iva, quant, minimo, controller):
if produto != '' and preco != '' and zconf != '' and tp != '' and tr != '' and iva != '' and quant != '' and minimo != '':
if not controller.verifica_Numeros(produto) and controller.verifica_Numeros(preco) and controller.verifica_Numeros(quant) and controller.verifica_Numeros(minimo):
conn = psycopg2.connect(host = "localhost", database = "RestaurantManagement", user = "postgres", password = "8DE2DF6A4D")
conn.autocommit = True
cur = conn.cursor()
cur.execute("CALL criar_produto('"+ zconf + "','" + tr + "','" + tp + "','" + str(preco) + "','" + str(iva) + "'," + str(quant) + ",'" + produto + "'," + str(minimo) + ")")
cur.close()
conn.close()
controller.show_frame(Ementas)
return True
self.label_ERRO.config(text="Devem ser preenchidos todos os campos devidamente!")
return False
class Inserir_Prato_Ementa(tk.Frame):
def __init__(self, parent, controller):
tk.Frame.__init__(self,parent)
self.pagina(parent, controller)
def pagina(self, parent, controller):
label_titulo = tk.Label(self, text="Inserir prato na\nEmenta", font=("Helvetica", 20))
label_titulo.place(x=250, y=50)
v = controller.ver_pratos() #Ver pratos existentes
label_pratosExistentes = tk.Label(self, text="Pratos existentes:", font=LARGE_FONT)
label_pratosExistentes.place(x=150, y=200)
combo_pratos=Combobox(self, values=v)
combo_pratos.place(x=350, y = 200)
label_tr = tk.Label(self, text="Tipo de refeição:", font=LARGE_FONT) #Escolher tipo de refeicao
label_tr.place(x=150, y=300)
combo_tr=Combobox(self, values=['Almoço', 'Jantar'])
combo_tr.place(x=350, y = 300)
label_ds = tk.Label(self, text="Dia da semana:", font=LARGE_FONT) #dia da semana
label_ds.pack(pady=30,padx=2)
label_ds.place(x=150, y=400)
combo_ds=Combobox(self, values=['Segunda', 'Terça', 'Quarta', 'Quinta', 'Sexta', 'Sábado', 'Domingo'])
combo_ds.place(x=350, y = 400)
self.label_ERRO = tk.Label(self, text="", font=LARGE_FONT)
self.label_ERRO.place(x=200, y=500)
self.label_ERRO.config(fg="red")
button_inserir = tk.Button(self, text="Inserir", command=lambda: self.inserir_prato_ementa(combo_pratos.get(), combo_tr.get(), combo_ds.get(), controller))
button_inserir.place(x=260, y=650)
button_inserir.config(width=25,height=2, background="#87E193", fg="white")
button_voltar = tk.Button(self, text="Voltar", command=lambda: controller.show_frame(Consultar_Ementa))
button_voltar.place(x=25, y=650)
button_voltar.config(width=25,height=2, background="#FF7F50", fg="black")
button_sair = tk.Button(self, text="Sair", command=lambda: controller.endApp())
button_sair.place(x=500, y=650)
button_sair.config(width=25,height=2, background="#BC0022", fg="white")
def inserir_prato_ementa(self, prato, descricao, diasemanal, controller):
if prato != '' and descricao != '' and diasemanal != '':
conn = psycopg2.connect(host = "localhost", database = "RestaurantManagement", user = "postgres", password = "8DE2DF6A4D")
conn.autocommit = True
cur = conn.cursor()
cur.execute("CALL inserir_prato_ementa('"+ prato + "','" + descricao + "','" + diasemanal + "','" + str(restaurante) + "')")
cur.close()
conn.close()
controller.show_frame(Ementas)
return True
self.label_ERRO.config(text="Deve preencher todos os campos!")
return False
class Associar_Alergia_Produto(tk.Frame):
def __init__(self, parent, controller):
tk.Frame.__init__(self,parent)
label_titulo = tk.Label(self, text="Associar alergia\na produto", font=("Helvetica", 20))
label_titulo.place(x=250, y=110)
v = controller.ver_nome_produtos() #Ver produtos existentes
label_prodExistentes = tk.Label(self, text="Produto:", font=LARGE_FONT)
label_prodExistentes.place(x=150, y=250)
combo_prod=Combobox(self, values=v)
combo_prod.place(x=350, y = 250)
a = self.ver_nome_alergias() #ver alergias
label_a = tk.Label(self, text="Alergias:", font=LARGE_FONT)
label_a.place(x=150, y=300)
combo_a=Combobox(self, values=a)
combo_a.place(x=350, y = 300)
label_g = tk.Label(self, text="Gravidade:", font=LARGE_FONT) #gravidade da alergia
label_g.place(x=150, y=350)
combo_g=Combobox(self, values=['1', '2', '3', '4', '5'])
combo_g.place(x=350, y = 350)
self.label_ERRO = tk.Label(self, text="", font=LARGE_FONT)
self.label_ERRO.place(x=200, y=500)
self.label_ERRO.config(fg="red")
button_inserir = tk.Button(self, text="Inserir", command=lambda: self.associar_alergia_produto(combo_prod.get(), combo_a.get(), combo_g.get(), controller))
button_inserir.place(x=260, y=650)
button_inserir.config(width=25,height=2, background="#87E193", fg="black")
button_voltar = tk.Button(self, text="Voltar", command=lambda: controller.show_frame(Consultar_Alergias))
button_voltar.place(x=25, y=650)
button_voltar.config(width=25,height=2, background="#FF7F50", fg="black")
button_sair = tk.Button(self, text="Sair", command=lambda: controller.endApp())
button_sair.place(x=500, y=650)
button_sair.config(width=25,height=2, background="#BC0022", fg="white")
def associar_alergia_produto(self, p, a, gravidade, controller):
if p != '' and a != '' and gravidade != '':
conn = psycopg2.connect(host = "localhost", database = "RestaurantManagement", user = "postgres", password = "8DE2DF6A4D")
conn.autocommit = True
cur = conn.cursor()
cur.execute("CALL alergia_produtos('" + p + "','" + a + "','" + gravidade + "')")
cur.close()
conn.close()
controller.show_frame(Ementas)
return True
self.label_ERRO.config(text="Deve preencher todos os campos!")
return False
def ver_nome_alergias(self):
conn = psycopg2.connect(host = "localhost", database = "RestaurantManagement", user = "postgres", password = "8DE2DF6A4D")
conn.autocommit = True
cur = conn.cursor()
cur.execute('SELECT ver_nome_alergias()')
vvv = []
for v in cur.fetchall():
vv = str(v).replace("('", "")
vvv.append(str(vv).replace("',)", ""))
cur.close()
conn.close()
return vvv
class Ementas_func(tk.Frame):
def __init__(self, parent, controller):
tk.Frame.__init__(self, parent)
label = tk.Label(self, text="Pedidos e Faturas", font=LARGE_FONT)
label.place(x=30, y=20)
button_registarFatura = tk.Button(self, text="Registar Fatura",command=lambda: controller.show_frame(Registar_fatura))
button_registarFatura.place(x=25, y=50)
button_registarFatura.config(width=20,height=5, fg="black")
button_verFaturas7dias = tk.Button(self, text="Consultar faturas\n(últimos 7 dias)",command=lambda: (controller.show_frame(Consultar_Faturas_semana)))
button_verFaturas7dias.place(x=180, y=50)
button_verFaturas7dias.config(width=20,height=5, fg="black")
button_verPedidos = tk.Button(self, text="Consultar Pedidos",command=lambda: controller.show_frame(Consultar_Pedidos))
button_verPedidos.place(x=335, y=50)
button_verPedidos.config(width=20,height=5, fg="black")
button_registarPedido = tk.Button(self, text="Registar pedido",command=lambda: controller.show_frame(Registar_pedido))
button_registarPedido.place(x=490, y=50)
button_registarPedido.config(width=20,height=5, fg="black")
button_confirmarPagamento = tk.Button(self, text="Confirmar pagamento",command=lambda: controller.show_frame(Confirmar_Pagamento))
button_confirmarPagamento.place(x=25, y=150)
button_confirmarPagamento.config(width=20,height=5, fg="black")
button_verTodasFaturas = tk.Button(self, text="Consultar faturas\n(desde o ínicio)",command=lambda: controller.show_frame(Consultar_Faturas))
button_verTodasFaturas.place(x=180, y=150)
button_verTodasFaturas.config(width=20,height=5, fg="black")
button_reservas = tk.Button(self, text="Reservas",command=lambda: controller.show_frame(Gerir_Reservas))
button_reservas.place(x=335, y=150)
button_reservas.config(width=20,height=5, fg="black")
label = tk.Label(self, text="Gestão restaurante", font=LARGE_FONT)
label.place(x=30, y=270)
button_verEmenta = tk.Button(self, text="Consultar ementa",command=lambda: controller.show_frame(Consultar_Ementa))
button_verEmenta.place(x=25, y=300)
button_verEmenta.config(width=20,height=5, fg="black")
button_verAlergias = tk.Button(self, text="Consultar alergias \ndos pratos",command=lambda: controller.show_frame(Consultar_Alergias))
button_verAlergias.place(x=180, y=300)
button_verAlergias.config(width=20,height=5, fg="black")
button_verPratos = tk.Button(self, text="Consultar pratos",command=lambda: controller.show_frame(Consultar_Pratos))
button_verPratos.place(x=335, y=300)
button_verPratos.config(width=20,height=5, fg="black")
button_verProdutos = tk.Button(self, text="Consultar produtos",command=lambda: controller.show_frame(Ver_Produtos))
button_verProdutos.place(x=490, y=300)
button_verProdutos.config(width=20,height=5, fg="black")
button_voltar = tk.Button(self, text="Voltar", command=lambda: controller.show_frame(Inicio))
button_voltar.place(x=25, y=650)
button_voltar.config(width=25,height=2, background="#FF7F50", fg="black")
button_sair = tk.Button(self, text="Sair", command=lambda: controller.endApp())
button_sair.place(x=500, y=650)
button_sair.config(width=25,height=2, background="#BC0022", fg="white")
def gerar_xml(self):
conn = psycopg2.connect(host = "localhost", database = "RestaurantManagement", user = "postgres", password = "8DE2DF6A4D")
conn.autocommit = True
cur = conn.cursor()
cur.execute("SELECT gravarXML("+str(restaurante)+")")
resultado = cur.fetchone()
r = str(resultado).replace("('", "")
info = str(r).replace("',)", "")
cur.close()
conn.close()
arquivo = open('BackupDadosR&R.xml','w')
arquivo.write(str(info))
arquivo.close()
class Ementas(tk.Frame):
def __init__(self, parent, controller):
tk.Frame.__init__(self, parent)
label = tk.Label(self, text="Pedidos e Faturas", font=LARGE_FONT)
label.place(x=30, y=20)
button_registarFatura = tk.Button(self, text="Registar Fatura",command=lambda: controller.show_frame(Registar_fatura))
button_registarFatura.place(x=25, y=50)
button_registarFatura.config(width=20,height=5, fg="black")
button_verFaturas7dias = tk.Button(self, text="Consultar faturas\n(últimos 7 dias)",command=lambda: (controller.show_frame(Consultar_Faturas_semana)))
button_verFaturas7dias.place(x=180, y=50)
button_verFaturas7dias.config(width=20,height=5, fg="black")
button_verPedidos = tk.Button(self, text="Consultar Pedidos",command=lambda: controller.show_frame(Consultar_Pedidos))
button_verPedidos.place(x=335, y=50)
button_verPedidos.config(width=20,height=5, fg="black")
button_registarPedido = tk.Button(self, text="Registar pedido",command=lambda: controller.show_frame(Registar_pedido))
button_registarPedido.place(x=490, y=50)
button_registarPedido.config(width=20,height=5, fg="black")
button_confirmarPagamento = tk.Button(self, text="Confirmar pagamento",command=lambda: controller.show_frame(Confirmar_Pagamento))
button_confirmarPagamento.place(x=25, y=150)
button_confirmarPagamento.config(width=20,height=5, fg="black")
button_verTodasFaturas = tk.Button(self, text="Consultar faturas\n(desde o ínicio)",command=lambda: controller.show_frame(Consultar_Faturas))
button_verTodasFaturas.place(x=180, y=150)
button_verTodasFaturas.config(width=20,height=5, fg="black")
button_reservas = tk.Button(self, text="Reservas",command=lambda: controller.show_frame(Gerir_Reservas))
button_reservas.place(x=335, y=150)
button_reservas.config(width=20,height=5, fg="black")
label = tk.Label(self, text="Gestão restaurante", font=LARGE_FONT)
label.place(x=30, y=270)
button_verEmenta = tk.Button(self, text="Ementa",command=lambda: controller.show_frame(Consultar_Ementa))
button_verEmenta.place(x=25, y=300)
button_verEmenta.config(width=20,height=5, fg="black")
button_verAlergias = tk.Button(self, text="Alergias",command=lambda: controller.show_frame(Consultar_Alergias))
button_verAlergias.place(x=180, y=300)
button_verAlergias.config(width=20,height=5, fg="black")
button_verPratos = tk.Button(self, text="Pratos",command=lambda: controller.show_frame(Consultar_Pratos))
button_verPratos.place(x=335, y=300)
button_verPratos.config(width=20,height=5, fg="black")
button_verProdutos = tk.Button(self, text="Produtos",command=lambda: controller.show_frame(Ver_Produtos))
button_verProdutos.place(x=490, y=300)
button_verProdutos.config(width=20,height=5, fg="black")
button_verHEmenta = tk.Button(self, text="Consultar\nhistórico de ementa",command=lambda: controller.show_frame(Consultar_HistoricoEmenta))
button_verHEmenta.place(x=25, y=400)
button_verHEmenta.config(width=20,height=5, fg="black")
button_gravarXML = tk.Button(self, text="Gravar XML",command=lambda: self.gerar_xml())
button_gravarXML.place(x=180, y=400)
button_gravarXML.config(width=20,height=5, fg="black")
button_verPedidosConta = tk.Button(self, text="Ver pedidos de\ncriação de conta",command=lambda: controller.show_frame(Consultar_RegistoContas))
button_verPedidosConta.place(x=335, y=400)
button_verPedidosConta.config(width=20,height=5, fg="black")
button_stock = tk.Button(self, text="Alerta Stock",command=lambda: controller.show_frame(Stock_Fornecedores))
button_stock.place(x=490, y=400)
button_stock.config(width=20,height=5, fg="black")
button_clientes = tk.Button(self, text="Clientes",command=lambda: controller.show_frame(Consultar_Clientes))
button_clientes.place(x=25, y=500)
button_clientes.config(width=20,height=5, fg="black")
self.label_xml = tk.Label(self, text="", font=LARGE_FONT)
self.label_xml.place(x=245, y=655)
self.label_xml.config(fg="black")
button_voltar = tk.Button(self, text="Voltar", command=lambda: controller.show_frame(Inicio))
button_voltar.place(x=25, y=650)
button_voltar.config(width=25,height=2, background="#FF7F50", fg="black")
button_sair = tk.Button(self, text="Sair", command=lambda: controller.endApp())
button_sair.place(x=500, y=650)
button_sair.config(width=25,height=2, background="#BC0022", fg="white")
def gerar_xml(self):
conn = psycopg2.connect(host = "localhost", database = "RestaurantManagement", user = "postgres", password = "8DE2DF6A4D")
conn.autocommit = True
cur = conn.cursor()
cur.execute("SELECT gravarXML("+str(restaurante)+")")
resultado = cur.fetchone()
r = str(resultado).replace("('", "")
info = str(r).replace("',)", "")
cur.close()
conn.close()
arquivo = open('BackupDadosR&R.xml','w')
arquivo.write(str(info))
arquivo.close()
self.label_xml.config(text="XML gravado com sucesso!")
class Consultar_Pratos(tk.Frame):
def __init__(self, parent, controller):
tk.Frame.__init__(self,parent)
self.pagina(parent, controller)
def pagina(self, parent, controller):
label_titulo = tk.Label(self, text="Consultar pratos", font=("Helvetica", 20))
label_titulo.place(x=250, y=100)
v = controller.ver_pratos() #Ver pratos existentes
label_prato = tk.Label(self, text="Pratos existentes:", font=LARGE_FONT)
label_prato.place(x=150, y=300)
prato = Combobox(self, values=v)
prato.place(x=350, y = 300)
button_detalhes = tk.Button(self, text="Ver detalhes", command=lambda: controller.show_detalhes_prato(prato.get()))
button_detalhes.place(x=260, y=550)
button_detalhes.config(width=25,height=2, background="#87E193", fg="black")
self.label_ERRO = tk.Label(self, text="", font=LARGE_FONT)
self.label_ERRO.place(x=250, y=400)
self.label_ERRO.config(fg="red")
if admin == 1:
button_inserir = tk.Button(self, text="Criar Prato", command=lambda: controller.show_frame(Inserir_Prato))
button_inserir.place(x=260, y=600)
button_inserir.config(width=25,height=2, background="#FFFFFA", fg="black")
button_remover = tk.Button(self, text="Remover Prato", command=lambda: self.remover_prato(prato.get(), controller))
button_remover.place(x=260, y=650)
button_remover.config(width=25,height=2, background="#FFFFFA", fg="black")
button_voltar = tk.Button(self, text="Voltar", command=lambda: controller.show_frame(Ementas))
else:
button_voltar = tk.Button(self, text="Voltar", command=lambda: controller.show_frame(Ementas_func))
button_voltar.place(x=25, y=650)
button_voltar.config(width=25,height=2, background="#FF7F50", fg="black")
button_sair = tk.Button(self, text="Sair", command=lambda: controller.endApp())
button_sair.place(x=500, y=650)
button_sair.config(width=25,height=2, background="#BC0022", fg="white")
def remover_prato(self, p, controller):
if p != '':
conn = psycopg2.connect(host = "localhost", database = "RestaurantManagement", user = "postgres", password = "8DE2DF6A4D")
conn.autocommit = True
cur = conn.cursor()
cur.execute("CALL remover_prato('" + p + "')")
cur.close()
conn.close()
controller.show_frame(Ementas)
return True
self.label_ERRO.config(text="Deve selecionar o prato\na eliminar!")
return False
class Consultar_Ementa(tk.Frame):
def __init__(self, parent, controller):
tk.Frame.__init__(self, parent)
self.pagina(parent, controller)
def pagina(self, parent, controller):
labelEmenta = tk.Label(self, text="Ementa", font=("Helvetica", 20))
labelEmenta.place(x=60, y=50)
colunas = ('ID', 'Dia semanal', 'Prato', 'Preço')
self.lista = ttk.Treeview(self, columns=colunas, show='headings')
# set column headings
for col in colunas:
self.lista.heading(col, text=col, anchor='center')
self.lista.column(col, width=25, anchor='center')
self.lista.grid(row=1, column=0, columnspan=2, ipadx=250, padx=50, pady=200)
vvv = self.ver_ementa()
for data in vvv:
self.lista.insert("", "end", values=data)
self.label_ERRO = tk.Label(self, text="", font=LARGE_FONT)
self.label_ERRO.place(x=200, y=500)
self.label_ERRO.config(fg="red")
self.prato = ''
self.lista.bind('<<TreeviewSelect>>', self.selectItem)
if admin == 1:
button_inserir = tk.Button(self, text="Inserir prato na ementa >", command=lambda: controller.show_frame(Inserir_Prato_Ementa))
button_inserir.place(x=450, y=50)
button_inserir.config(width=25,height=2, background="white", fg="black")
button_removerPrato = tk.Button(self, text="Remover prato da ementa", command=lambda: self.remover_prato(controller, self.prato))
button_removerPrato.place(x=260, y=600)
button_removerPrato.config(width=25,height=2, background="#c4cbd5", fg="black")
button_reset = tk.Button(self, text="Limpar ementa", command=lambda: self.limpar_ementa(controller))
button_reset.place(x=260, y=650)
button_reset.config(width=25,height=2, background="#c4cbd5", fg="black")
button_voltar = tk.Button(self, text="Voltar", command=lambda: controller.show_frame(Ementas))
button_voltar.place(x=25, y=650)
button_voltar.config(width=25,height=2, background="#FF7F50", fg="black")
else:
button_voltarfunc = tk.Button(self, text="Voltar", command=lambda: controller.show_frame(Ementas_func))
button_voltarfunc.place(x=25, y=650)
button_voltarfunc.config(width=25,height=2, background="#FF7F50", fg="black")
button_sair = tk.Button(self, text="Sair", command=lambda: controller.endApp())
button_sair.place(x=500, y=650)
button_sair.config(width=25,height=2, background="#BC0022", fg="white")
def limpar_ementa(self, controller):
conn = psycopg2.connect(host = "localhost", database = "RestaurantManagement", user = "postgres", password = "8DE2DF6A4D")
conn.autocommit = True
cur = conn.cursor()
cur.execute("CALL limpar_ementa(" + str(restaurante) + ")")
cur.close()
conn.close()
controller.show_frame(Ementas)
def remover_prato(self, controller, id_e):
if id_e != '':
conn = psycopg2.connect(host = "localhost", database = "RestaurantManagement", user = "postgres", password = "8DE2DF6A4D")
conn.autocommit = True
cur = conn.cursor()
cur.execute("CALL remover_prato_ementa(" + str(id_e) + ")")
cur.close()
conn.close()
controller.show_frame(Ementas)
return True
self.label_ERRO.config(text="Deve selecionar um prato para remover!")
return False
def selectItem(self, event):
for item in self.lista.selection():
item_text = self.lista.item(item, 'values')
vvv = []
x = 0
for i in item_text:
if (i == '(' or i == "'") and x <= 2:
x = x + 1
else:
vvv.append(i)
break
vv = str(vvv).replace("['", "")
p = str(vv).replace("']", "")
self.prato = p
def ver_ementa(self):
conn = psycopg2.connect(host = "localhost", database = "RestaurantManagement", user = "postgres", password = "8DE2DF6A4D")
conn.autocommit = True
cur = conn.cursor()
cur.execute("select * from ver_ementa(" + str(restaurante) + ")")
v = cur.fetchall()
cur.close()
conn.close()
return v
class Consultar_HistoricoEmenta(tk.Frame):
def __init__(self, parent, controller):
tk.Frame.__init__(self, parent)
self.pagina(parent, controller)
def pagina(self, parent, controller):
labelEmenta = tk.Label(self, text="Histórico de Ementa", font=("Helvetica", 20))
labelEmenta.place(x=230, y=80)
colunas = ('Prato', 'Preço', 'Data', 'Dia Semanal')
self.lista = ttk.Treeview(self, columns=colunas, show='headings')
# set column headings
for col in colunas:
self.lista.heading(col, text=col, anchor='center')
self.lista.column(col, width=15, anchor='center')
self.lista.grid(row=1, column=0, columnspan=2, ipadx=250, padx=75, pady=200)
vvv = self.ver_hementa()
for data in vvv:
self.lista.insert("", "end", values=data)
button_voltar = tk.Button(self, text="Voltar", command=lambda: controller.show_frame(Ementas))
button_voltar.place(x=25, y=650)
button_voltar.config(width=25,height=2, background="#FF7F50", fg="black")
button_sair = tk.Button(self, text="Sair", command=lambda: controller.endApp())
button_sair.place(x=500, y=650)
button_sair.config(width=25,height=2, background="#BC0022", fg="white")
def ver_hementa(self):
conn = psycopg2.connect(host = "localhost", database = "RestaurantManagement", user = "postgres", password = "8DE2DF6A4D")
conn.autocommit = True
cur = conn.cursor()
cur.execute("select * from ver_historicoementa(" + str(restaurante) + ")")
v = cur.fetchall()
cur.close()
conn.close()
return v
class Consultar_Alergias(tk.Frame):
def __init__(self, parent, controller):
tk.Frame.__init__(self, parent)
self.pagina(parent, controller)
def pagina(self, parent, controller):
label_pratosAlergias = tk.Label(self, text="Pratos com alergias", font=("Helvetica", 20))
label_pratosAlergias.place(x=230, y=50)
colunas = ('Prato', 'Produto', 'Alergia')
lista = ttk.Treeview(self, columns=colunas, show='headings')
# set column headings
for col in colunas:
lista.heading(col, text=col, anchor='center')
lista.column(col, width=15, anchor='center')
lista.grid(row=1, column=0, columnspan=2)
lista.pack(side=tk.TOP,fill=tk.X, pady=200, padx=20)
vvv = self.ver_alergias()
for data in vvv:
lista.insert("", "end", values=data)
if admin == 1:
button_voltar = tk.Button(self, text="Voltar", command=lambda: controller.show_frame(Ementas))
button_associar = tk.Button(self, text="Associar alergia\na produto", command=lambda: controller.show_frame(Associar_Alergia_Produto))
button_associar.place(x=260, y=650)
button_associar.config(width=25,height=2, background="white", fg="black")
else:
button_voltar = tk.Button(self, text="Voltar", command=lambda: controller.show_frame(Ementas_func))
button_voltar.place(x=25, y=650)
button_voltar.config(width=25,height=2, background="#FF7F50", fg="black")
button_sair = tk.Button(self, text="Sair", command=lambda: controller.endApp())
button_sair.place(x=500, y=650)
button_sair.config(width=25,height=2, background="#BC0022", fg="white")
def ver_alergias(self):
conn = psycopg2.connect(host = "localhost", database = "RestaurantManagement", user = "postgres", password = "8DE2DF6A4D")
conn.autocommit = True
cur = conn.cursor()
cur.execute('SELECT * FROM ver_pratos_alergias()')
vvv = cur.fetchall()
cur.close()
conn.close()
return vvv
class Ver_Produtos(tk.Frame):
def __init__(self, parent, controller):
tk.Frame.__init__(self,parent)
self.pagina(parent, controller)
def pagina(self, parent, controller):
label = tk.Label(self, text="Consultar produtos", font=("Helvetica", 20))
label.place(x=230, y=60)
colunas = ('Nome', 'Preço', 'IVA', 'Stock', 'Zona de confeção')
self.lista = ttk.Treeview(self, columns=colunas, show='headings')
# set column headings
for col in colunas:
self.lista.heading(col, text=col, anchor='center')
self.lista.column(col, width=15, anchor='center')
self.lista.grid(row=1, column=0, columnspan=2, ipadx=250, padx=50, pady=150)
vvv = self.consultar_produtos()
for data in vvv:
self.lista.insert("", "end", values=data)
self.label_ERRO = tk.Label(self, text="", font=LARGE_FONT)
self.label_ERRO.place(x=175, y=410)
self.label_ERRO.config(fg="red")
self.prod = ''
self.lista.bind('<<TreeviewSelect>>', self.selectItem)
if admin == 1:
label_qnt = tk.Label(self, text="Quantidade:", font=LARGE_FONT)
label_qnt.place(x=50, y= 500)
qnt = tk.Entry(self, font=30)
qnt.place(relwidth=0.30, rely=0.01, relheight=0.04, y=495, x=170)
button_inserir = tk.Button(self, text="Criar novo", command=lambda: controller.show_frame(Inserir_Produtos))
button_inserir.place(x=200, y=600)
button_inserir.config(width=15,height=2, background="#87E193", fg="black")
button_remover = tk.Button(self, text="Remover", command=lambda: self.remover_produto(self.prod, controller))
button_remover.place(x=400, y=600)
button_remover.config(width=15,height=2, background="#FFFFFA", fg="black")
button_voltar = tk.Button(self, text="Voltar", command=lambda: controller.show_frame(Ementas))
button_adicionar = tk.Button(self, text="Adicionar produtos",command=lambda: self.add_prod(self.prod, qnt.get(), controller))
button_adicionar.place(x=420, y=495)
button_adicionar.config(width=25,height=2, background="white", fg="black")
else:
button_voltar = tk.Button(self, text="Voltar", command=lambda: controller.show_frame(Ementas_func))
button_voltar.place(x=25, y=650)
button_voltar.config(width=25,height=2, background="#FF7F50", fg="black")
button_sair = tk.Button(self, text="Sair", command=lambda: controller.endApp())
button_sair.place(x=500, y=650)
button_sair.config(width=25,height=2, background="#BC0022", fg="white")
def selectItem(self, event):
for item in self.lista.selection():
item_text = self.lista.item(item, 'values')
vvv = []
x = 0
for i in item_text:
if (i == '(' or i == "'") and x <= 2:
x = x + 1
else:
vvv.append(i)
break
vv = str(vvv).replace("['", "")
p = str(vv).replace("']", "")
self.prod = p
def remover_produto(self, p, controller):
if p != '':
conn = psycopg2.connect(host = "localhost", database = "RestaurantManagement", user = "postgres", password = "8DE2DF6A4D")
conn.autocommit = True
cur = conn.cursor()
cur.execute("CALL remover_produto('" + p + "')")
cur.close()
conn.close()
controller.show_frame(Ementas)
return True
self.label_ERRO.config(text="Deve selecionar um produto para remover!")
return False
def add_prod(self, p, quantidade, controller):
if p != '' and controller.verifica_Numeros(quantidade):
conn = psycopg2.connect(host = "localhost", database = "RestaurantManagement", user = "postgres", password = "8DE2DF6A4D")
conn.autocommit = True
cur = conn.cursor()
cur.execute("CALL adicionar_produtos('" + p + "'," + str(quantidade) + ")")
cur.close()
conn.close()
controller.show_frame(Ementas)
return True
self.label_ERRO.config(text="Deve selecionar um produto e quantidade a adicionar!")
return False
def consultar_produtos(self):
conn = psycopg2.connect(host = "localhost", database = "RestaurantManagement", user = "postgres", password = "8DE2DF6A4D")
conn.autocommit = True
cur = conn.cursor()
cur.execute('SELECT * FROM ver_produtos()')
valor = cur.fetchall()
cur.close()
conn.close()
return valor
class Consultar_RegistoContas(tk.Frame):
def __init__(self, parent, controller):
tk.Frame.__init__(self, parent)
self.pagina(parent, controller)
def pagina(self, parent, controller):
label_pedidos = tk.Label(self, text="Pedidos de criação\nde contas", font=("Helvetica", 20))
label_pedidos.place(x=220, y=50)
colunas = ('ID', 'Data', 'Administrador', 'Username', 'Password')
self.lista = ttk.Treeview(self, columns=colunas, show='headings')
# set column headings
for col in colunas:
self.lista.heading(col, text=col, anchor='center')
self.lista.column(col, width=15, anchor='center')
self.lista.grid(row=1, column=0, columnspan=2, ipadx=250, padx=50, pady=200)
vvv = self.ver_pedidos_registo()
for data in vvv:
self.lista.insert("", "end", values=data)
self.label_ERRO = tk.Label(self, text="", font=LARGE_FONT)
self.label_ERRO.place(x=170, y=450)
self.label_ERRO.config(fg="red")
self.conta = ''
self.lista.bind('<<TreeviewSelect>>', self.selectItem)
button_confirmar = tk.Button(self, text="Confirmar conta", command=lambda: self.aceitar_criacao(controller, self.conta))
button_confirmar.place(x=260, y=600)
button_confirmar.config(width=25,height=2, background="#c4cbd5", fg="black")
button_rejeitar = tk.Button(self, text="Rejeitar pedido", command=lambda: self.remover_pedido_conta(controller, self.conta))
button_rejeitar.place(x=260, y=650)
button_rejeitar.config(width=25,height=2, background="#c4cbd5", fg="black")
button_voltar = tk.Button(self, text="Voltar", command=lambda: controller.show_frame(Ementas))
button_voltar.place(x=25, y=650)
button_voltar.config(width=25,height=2, background="#FF7F50", fg="black")
button_sair = tk.Button(self, text="Sair", command=lambda: controller.endApp())
button_sair.place(x=500, y=650)
button_sair.config(width=25,height=2, background="#BC0022", fg="white")
def aceitar_criacao(self, controller, conta):
if conta != '':
conn = psycopg2.connect(host = "localhost", database = "RestaurantManagement", user = "postgres", password = "8DE2DF6A4D")
conn.autocommit = True
cur = conn.cursor()
cur.execute("CALL confirmar_criacao_conta(" + str(conta) + ")")
cur.close()
conn.close()
controller.show_frame(Ementas)
return True
self.label_ERRO.config(text="Deve selecionar uma conta para confirmar!")
return False
def remover_pedido_conta(self, controller, conta):
if conta != '':
conn = psycopg2.connect(host = "localhost", database = "RestaurantManagement", user = "postgres", password = "8DE2DF6A4D")
conn.autocommit = True
cur = conn.cursor()
cur.execute("CALL remover_pedido_conta(" + str(conta) + ")")
cur.close()
conn.close()
controller.show_frame(Ementas)
return True
self.label_ERRO.config(text="Deve selecionar uma conta para rejeitar!")
return False
def selectItem(self, event):
for item in self.lista.selection():
item_text = self.lista.item(item, 'values')
vvv = []
x = 0
for i in item_text:
if (i == '(' or i == "'") and x <= 2:
x = x + 1
else:
vvv.append(i)
break
vv = str(vvv).replace("['", "")
p = str(vv).replace("']", "")
self.conta = p
def ver_pedidos_registo(self):
conn = psycopg2.connect(host = "localhost", database = "RestaurantManagement", user = "postgres", password = "8DE2DF6A4D")
conn.autocommit = True
cur = conn.cursor()
cur.execute("select * from consultar_pedidos_registo(" + str(restaurante) + ")")
v = cur.fetchall()
cur.close()
conn.close()
return v
class CriarConta(tk.Frame):
def __init__(self, parent, controller):
tk.Frame.__init__(self,parent)
label_titulo = tk.Label(self, text="Criação de conta", font=("Helvetica", 20))
label_titulo.place(x=240, y=110)
label_user = tk.Label(self, text="Utilizador:", font=LARGE_FONT)
label_user.place(x=150, y=250)
user = tk.Entry(self, font=LARGE_FONT)
user.place(relwidth=0.40, rely=0.01, relheight=0.05, y=240, x=265)
user.focus_set()
label_pass = tk.Label(self, text="Password:", font=LARGE_FONT)
label_pass.place(x=150, y=330)
password = tk.Entry(self, font=LARGE_FONT, show="*")
password.place(relwidth=0.40, rely=0.01, relheight=0.05, y=320, x=265)
password.focus_set()
label_confirmar = tk.Label(self, text="Confirmar password:", font=LARGE_FONT)
label_confirmar.place(x=65, y=410)
confirmar_password = tk.Entry(self, font=LARGE_FONT, show="*")
confirmar_password.place(relwidth=0.40, rely=0.01, relheight=0.05, y=400, x=265)
confirmar_password.focus_set()
label_funcao = tk.Label(self, text="Função:", font=LARGE_FONT) #dia da semana
label_funcao.pack(pady=30,padx=2)
label_funcao.place(x=80, y=490)
combo_funcao=Combobox(self, values=['Administrador', 'Funcionário'])
combo_funcao.place(x=165, y = 490)
label_rest = tk.Label(self, text="Restaurante:", font=LARGE_FONT) #dia da semana
label_rest.pack(pady=30,padx=2)
label_rest.place(x=340, y=490)
combo_rest=Combobox(self, values=['Titanic', 'Ti João', 'A caverna', 'Tons e Sabores'])
combo_rest.place(x=455, y = 490)
self.label_ERRO = tk.Label(self, text="", font=LARGE_FONT)
self.label_ERRO.place(x=185, y=570)
self.label_ERRO.config(fg="red")
button_criar = tk.Button(self, text="Criar conta",command=lambda: self.criar_conta(user.get(), password.get(), confirmar_password.get(), combo_funcao.get(), combo_rest.get(), controller))
button_criar.place(x=260, y=650)
button_criar.config(width=25,height=2, background="#BDB76B", fg="black")
if restaurante != 0 :
button_voltar = tk.Button(self, text="Voltar", command=lambda: controller.show_frame(Ementas))
else:
button_voltar = tk.Button(self, text="Voltar", command=lambda: controller.show_frame(Inicio))
button_voltar.place(x=25, y=650)
button_voltar.config(width=25,height=2, background="#FF7F50", fg="black")
button_sair = tk.Button(self, text="Sair", command=lambda: controller.endApp())
button_sair.place(x=500, y=650)
button_sair.config(width=25,height=2, background="#BC0022", fg="white")
def criar_conta(self, user, password, conf_password, funcao, r, controller):
rest = 0
if user != '':
if password != '':
if funcao != '':
if r != '':
if r == 'Titanic':
rest = 1
elif r == 'Ti João':
rest = 2
elif r == 'A caverna':
rest = 3
else:
rest = 4
if password != conf_password:
self.label_ERRO.config(text="Já existe uma conta com esse nome de utilizador!")
return False
else:
enc_pass = hashlib.md5(password.encode()).hexdigest()
conn = psycopg2.connect(host = "localhost", database = "RestaurantManagement", user = "postgres", password = "8DE2DF6A4D")
conn.autocommit = True
cur = conn.cursor()
cur.execute("SELECT criar_conta('" + user + "','" + enc_pass + "','" + funcao + "'," + str(rest) + ")")
cur.close()
conn.close()
controller.show_frame(Inicio)
return True
else:
self.label_ERRO.config(text="É necessário preencher todos os campos!")
return False
class Stock_Fornecedores(tk.Frame):
def __init__(self, parent, controller):
tk.Frame.__init__(self,parent)
self.pagina(parent, controller)
def pagina(self, parent, controller):
label_stock = tk.Label(self, text="Alerta de stock", font=("Helvetica", 20))
label_stock.place(x=230, y=50)
colunas = ('Produto', 'Quantidade', 'Stock Min.', 'Data', 'Fornecedor', 'Contacto')
lista = ttk.Treeview(self, columns=colunas, show='headings')
# set column headings
for col in colunas:
lista.heading(col, text=col, anchor='center')
lista.column(col, width=15, anchor='center')
lista.grid(row=1, column=0, columnspan=2)
lista.pack(side=tk.TOP,fill=tk.X, pady=200, padx=20)
v = self.ver_alerta_stock()
for data in v:
lista.insert("", "end", values=data)
button_voltar = tk.Button(self, text="Voltar", command=lambda: controller.show_frame(Ementas))
button_voltar.place(x=25, y=650)
button_voltar.config(width=25,height=2, background="#FF7F50", fg="black")
button_sair = tk.Button(self, text="Sair", command=lambda: controller.endApp())
button_sair.place(x=500, y=650)
button_sair.config(width=25,height=2, background="#BC0022", fg="white")
def ver_alerta_stock(self):
conn = psycopg2.connect(host = "localhost", database = "RestaurantManagement", user = "postgres", password = "8DE2DF6A4D")
conn.autocommit = True
cur = conn.cursor()
cur.execute("SELECT * FROM consultar_alertastock("+ str(restaurante) +")")
v = cur.fetchall()
cur.close()
conn.close()
return v
class Consultar_Clientes(tk.Frame):
def __init__(self, parent, controller):
tk.Frame.__init__(self, parent)
self.pagina(parent, controller)
def pagina(self, parent, controller):
labelEmenta = tk.Label(self, text="Contas dos clientes", font=("Helvetica", 20))
labelEmenta.place(x=60, y=50)
colunas = ('Nº Cliente', 'Nome', 'NIF', 'Descrição')
self.lista = ttk.Treeview(self, columns=colunas, show='headings')
# set column headings
for col in colunas:
self.lista.heading(col, text=col, anchor='center')
self.lista.column(col, width=25, anchor='center')
self.lista.grid(row=1, column=0, columnspan=2, ipadx=250, padx=50, pady=100)
vvv = self.ver_clientes()
for data in vvv:
self.lista.insert("", "end", values=data)
self.label_ERRO = tk.Label(self, text="", font=LARGE_FONT)
self.label_ERRO.place(x=130, y=550)
self.label_ERRO.config(fg="red")
self.cliente = ''
self.lista.bind('<<TreeviewSelect>>', self.selectItem)
label_nif = tk.Label(self, text="NIF:", font=LARGE_FONT)
label_nif.place(x=50, y= 350)
nif = tk.Entry(self, font=30)
nif.place(relwidth=0.30, rely=0.01, relheight=0.04, y=345, x=110)
label_obs = tk.Label(self, text="Obs:", font=LARGE_FONT)
label_obs.place(x=50, y= 400)
obs = tk.Entry(self, font=30)
obs.place(relwidth=0.30, rely=0.01, relheight=0.04, y=395, x=110)
label_nome = tk.Label(self, text="Nome:", font=LARGE_FONT)
label_nome.place(x=50, y= 450)
nome = tk.Entry(self, font=30)
nome.place(relwidth=0.30, rely=0.01, relheight=0.04, y=445, x=110)
button_atualizarNome = tk.Button(self, text="Atualizar nome", command=lambda: self.atualizar_nome(self.cliente, nome.get(), controller))
button_atualizarNome.place(x=400, y=445)
button_atualizarNome.config(width=25,height=2, background="#c4cbd5", fg="black")
button_atualizarNIF = tk.Button(self, text="Atualizar NIF", command=lambda: self.atualizar_nif(self.cliente, nif.get(), controller))
button_atualizarNIF.place(x=400, y=345)
button_atualizarNIF.config(width=25,height=2, background="#c4cbd5", fg="black")
button_atualizarObs = tk.Button(self, text="Atualizar observações", command=lambda: self.atualizar_obs(self.cliente, obs.get(), controller))
button_atualizarObs.place(x=400, y=395)
button_atualizarObs.config(width=25,height=2, background="#c4cbd5", fg="black")
button_voltar = tk.Button(self, text="Voltar", command=lambda: controller.show_frame(Ementas))
button_voltar.place(x=25, y=650)
button_voltar.config(width=25,height=2, background="#FF7F50", fg="black")
button_sair = tk.Button(self, text="Sair", command=lambda: controller.endApp())
button_sair.place(x=500, y=650)
button_sair.config(width=25,height=2, background="#BC0022", fg="white")
def selectItem(self, event):
for item in self.lista.selection():
item_text = self.lista.item(item, 'values')
vvv = []
x = 0
for i in item_text:
if (i == '(' or i == "'") and x <= 2:
x = x + 1
else:
vvv.append(i)
break
vv = str(vvv).replace("['", "")
p = str(vv).replace("']", "")
self.cliente = p
def ver_clientes(self):
conn = psycopg2.connect(host = "localhost", database = "RestaurantManagement", user = "postgres", password = "8DE2DF6A4D")
conn.autocommit = True
cur = conn.cursor()
cur.execute("select * from consultar_clientes(" + str(restaurante) + ")")
v = cur.fetchall()
cur.close()
conn.close()
return v
def atualizar_nif(self, cliente, nif, controller):
if cliente != '' and nif != '':
if controller.verifica_Numeros(nif):
conn = psycopg2.connect(host = "localhost", database = "RestaurantManagement", user = "postgres", password = "8DE2DF6A4D")
conn.autocommit = True
cur = conn.cursor()
cur.execute("CALL atualizarnif_cliente(" + cliente + "," + nif + ")")
cur.close()
conn.close()
controller.show_frame(Ementas)
return True
self.label_ERRO.config(text="É necessário preencher todos os campos devidamente, \nassim como selecionar o cliente!")
return False
def atualizar_nome(self, cliente, nome, controller):
if cliente != '' and nome != '':
if not controller.verifica_Numeros(nome):
conn = psycopg2.connect(host = "localhost", database = "RestaurantManagement", user = "postgres", password = "8DE2DF6A4D")
conn.autocommit = True
cur = conn.cursor()
cur.execute("CALL atualizarnome_cliente(" + cliente + ",'" + nome + "')")
cur.close()
conn.close()
controller.show_frame(Ementas)
return True
self.label_ERRO.config(text="É necessário preencher todos os campos devidamente, \nassim como selecionar o cliente!")
return False
def atualizar_obs(self, cliente, obs, controller):
if cliente != '' and obs != '':
conn = psycopg2.connect(host = "localhost", database = "RestaurantManagement", user = "postgres", password = "8DE2DF6A4D")
conn.autocommit = True
cur = conn.cursor()
cur.execute("CALL atualizarnome_cliente(" + cliente + ",'" + obs + "')")
cur.close()
conn.close()
controller.show_frame(Ementas)
return True
self.label_ERRO.config(text="É necessário preencher todos os campos devidamente, \nassim como selecionar o cliente!")
return False
app = ecra_entrada()
app.mainloop() | 50.870659 | 233 | 0.596646 | 12,170 | 101,080 | 4.832293 | 0.045111 | 0.025098 | 0.027139 | 0.036185 | 0.821201 | 0.79226 | 0.768726 | 0.752742 | 0.740822 | 0.729412 | 0 | 0.040756 | 0.263761 | 101,080 | 1,987 | 234 | 50.870659 | 0.749419 | 0.008746 | 0 | 0.671325 | 0 | 0.000559 | 0.110193 | 0.0049 | 0 | 0 | 0 | 0 | 0 | 1 | 0.063723 | false | 0.045277 | 0.005031 | 0 | 0.127446 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
6c44a12fd52f71ed6851944da1d3f5378205a0b8 | 14,744 | py | Python | src/resnet_models.py | AmirmohammadRostami/ASV-anti-spoofing-with-EABN | ab0be6a013a72c62a2a9b17f517d1c8894afbece | [
"MIT"
] | 2 | 2021-09-28T19:49:29.000Z | 2021-10-04T07:49:04.000Z | src/resnet_models.py | AmirmohammadRostami/ASV-anti-spoofing-with-EABN | ab0be6a013a72c62a2a9b17f517d1c8894afbece | [
"MIT"
] | null | null | null | src/resnet_models.py | AmirmohammadRostami/ASV-anti-spoofing-with-EABN | ab0be6a013a72c62a2a9b17f517d1c8894afbece | [
"MIT"
] | 1 | 2021-11-22T09:27:18.000Z | 2021-11-22T09:27:18.000Z | import math
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.utils.model_zoo as model_zoo
from src.resnet_blocks import SELayer, BasicBlock, SEBasicBlock, Bottleneck, SEBottleneck, Bottle2neck, SEBottle2neck
from src.pooling import StatsPooling
class ResNet(nn.Module):
""" basic ResNet class: https://github.com/pytorch/vision/blob/master/torchvision/models/resnet.py """
def __init__(self, block, layers, num_classes, KaimingInit=False):
self.inplanes = 16
super(ResNet, self).__init__()
self.conv1 = nn.Conv2d(1, 16, kernel_size=7, stride=2, padding=3, bias=False)
self.bn1 = nn.BatchNorm2d(16)
self.relu = nn.ReLU(inplace=True)
self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
self.layer1 = self._make_layer(block, 16, layers[0])
self.layer2 = self._make_layer(block, 32, layers[1], stride=2)
self.layer3 = self._make_layer(block, 64, layers[2], stride=2)
self.layer4 = self._make_layer(block, 128, layers[3], stride=2)
self.avgpool = nn.AdaptiveAvgPool2d((1, 1))
self.classifier = nn.Linear(128 * block.expansion, num_classes)
if KaimingInit == True:
print('Using Kaiming Initialization.')
for m in self.modules():
if isinstance(m, nn.Conv2d):
nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu')
for m in self.modules():
if isinstance(m, nn.BatchNorm2d):
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 0)
def _make_layer(self, block, planes, blocks, stride=1):
downsample = None
if stride != 1 or self.inplanes != planes * block.expansion:
downsample = nn.Sequential(
nn.Conv2d(self.inplanes, planes * block.expansion,
kernel_size=1, stride=stride, bias=False),
nn.BatchNorm2d(planes * block.expansion),
)
layers = []
layers.append(block(self.inplanes, planes, stride, downsample))
self.inplanes = planes * block.expansion
for i in range(1, blocks):
layers.append(block(self.inplanes, planes))
return nn.Sequential(*layers)
def forward(self, x):
#print(x.size())
x = self.conv1(x)
x = self.bn1(x)
x = self.relu(x)
x = self.maxpool(x)
#print(x.size())
x = self.layer1(x)
#print(x.size())
x = self.layer2(x)
#print(x.size())
x = self.layer3(x)
#print(x.size())
x = self.layer4(x)
#print(x.size())
x = self.avgpool(x).view(x.size()[0], -1)
#print(x.shape)
out = self.classifier(x)
#print(out.shape)
return F.log_softmax(out, dim=-1)
class Res2Net(nn.Module):
def __init__(self, block, layers, baseWidth=26, scale=4, m=0.35, num_classes=1000, **kwargs):
self.inplanes = 16
super(Res2Net, self).__init__()
self.baseWidth = baseWidth
self.scale = scale
self.conv1 = nn.Sequential(nn.Conv2d(1, 16, 3, 1, 1, bias=False),
nn.BatchNorm2d(16), nn.ReLU(inplace=True),
nn.Conv2d(16, 16, 3, 1, 1, bias=False),
nn.BatchNorm2d(16), nn.ReLU(inplace=True),
nn.Conv2d(16, 16, 3, 1, 1, bias=False))
self.bn1 = nn.BatchNorm2d(16)
self.relu = nn.ReLU()
# self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
self.layer1 = self._make_layer(block, 16, layers[0]) # 64
self.layer2 = self._make_layer(block, 32, layers[1], stride=2) # 128
self.layer3 = self._make_layer(block, 64, layers[2], stride=2) # 256
self.layer4 = self._make_layer(block, 128, layers[3], stride=2) # 512
self.avgpool = nn.AdaptiveAvgPool2d(1)
# self.stats_pooling = StatsPooling()
self.features = nn.Linear(128*block.expansion, 128)
self.cls_layer = nn.Linear(128, num_classes)
for m in self.modules():
if isinstance(m, nn.Conv2d):
nn.init.kaiming_normal_(m.weight,
mode='fan_out',
nonlinearity='relu')
elif isinstance(m, nn.BatchNorm2d):
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 0)
def _make_layer(self, block, planes, blocks, stride=1):
downsample = None
if stride != 1 or self.inplanes != planes * block.expansion:
downsample = nn.Sequential(
nn.AvgPool2d(kernel_size=stride,
stride=stride,
ceil_mode=True,
count_include_pad=False),
nn.Conv2d(self.inplanes,
planes * block.expansion,
kernel_size=1,
stride=1,
bias=False),
nn.BatchNorm2d(planes * block.expansion),
)
layers = []
layers.append(
block(self.inplanes,
planes,
stride,
downsample=downsample,
stype='stage',
baseWidth=self.baseWidth,
scale=self.scale))
self.inplanes = planes * block.expansion
for i in range(1, blocks):
layers.append(
block(self.inplanes,
planes,
baseWidth=self.baseWidth,
scale=self.scale))
return nn.Sequential(*layers)
def _forward(self, x):
#x = x[:, None, ...]
x = self.conv1(x)
# print('conv1: ', x.size())
x = self.bn1(x)
x = self.relu(x)
# x = self.maxpool(x)
# print('maxpool: ', x.size())
x = self.layer1(x)
# print('layer1: ', x.size())
x = self.layer2(x)
# print('layer2: ', x.size())
x = self.layer3(x)
# print('layer3: ', x.size())
x = self.layer4(x)
# print('layer4: ', x.size())
# x = self.stats_pooling(x)
x = self.avgpool(x)
# print('avgpool:', x.size())
# x = x.view(x.size(0), -1)
x = torch.flatten(x, 1)
# print('flatten: ', x.size())
feats = self.features(x)
out = self.cls_layer(feats)
return F.log_softmax(out, dim=-1), feats
def extract(self, x):
# x = x[:, None, ...]
x = self.conv1(x)
# print('conv1: ', x.size())
x = self.bn1(x)
x = self.relu(x)
x = self.layer1(x)
# print('layer1: ', x.size())
x = self.layer2(x)
# print('layer2: ', x.size())
x = self.layer3(x)
# print('layer3: ', x.size())
x = self.layer4(x)
# print('layer4: ', x.size())
x = self.avgpool(x)
x = torch.flatten(x, 1)
feats = self.features(x)
# print('flatten: ', x.size())
return feats
# Allow for accessing forward method in a inherited class
forward = _forward
# class Res2Net(nn.Module):
# def __init__(self, block, layers, baseWidth=26, scale=4, m=0.35, num_classes=1000, loss='softmax', **kwargs):
# self.inplanes = 16
# super(Res2Net, self).__init__()
# self.loss = loss
# self.baseWidth = baseWidth
# self.scale = scale
# self.conv1 = nn.Sequential(nn.Conv2d(1, 16, 3, 1, 1, bias=False),
# nn.BatchNorm2d(16), nn.ReLU(inplace=True),
# nn.Conv2d(16, 16, 3, 1, 1, bias=False),
# nn.BatchNorm2d(16), nn.ReLU(inplace=True),
# nn.Conv2d(16, 16, 3, 1, 1, bias=False))
# self.bn1 = nn.BatchNorm2d(16)
# self.relu = nn.ReLU()
# # self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
# self.layer1 = self._make_layer(block, 16, layers[0])#64
# self.layer2 = self._make_layer(block, 32, layers[1], stride=2)#128
# self.layer3 = self._make_layer(block, 64, layers[2], stride=2)#256
# self.layer4 = self._make_layer(block, 128, layers[3], stride=2)#512
# self.avgpool = nn.AdaptiveAvgPool2d(1)
# # self.stats_pooling = StatsPooling()
# if self.loss == 'softmax':
# # self.cls_layer = nn.Linear(2*8*128*block.expansion, num_classes)
# self.cls_layer = nn.Linear(128*block.expansion, num_classes)
# else:
# raise NotImplementedError
# for m in self.modules():
# if isinstance(m, nn.Conv2d):
# nn.init.kaiming_normal_(m.weight,
# mode='fan_out',
# nonlinearity='relu')
# elif isinstance(m, nn.BatchNorm2d):
# nn.init.constant_(m.weight, 1)
# nn.init.constant_(m.bias, 0)
# def _make_layer(self, block, planes, blocks, stride=1):
# downsample = None
# if stride != 1 or self.inplanes != planes * block.expansion:
# downsample = nn.Sequential(
# nn.AvgPool2d(kernel_size=stride,
# stride=stride,
# ceil_mode=True,
# count_include_pad=False),
# nn.Conv2d(self.inplanes,
# planes * block.expansion,
# kernel_size=1,
# stride=1,
# bias=False),
# nn.BatchNorm2d(planes * block.expansion),
# )
# layers = []
# layers.append(
# block(self.inplanes,
# planes,
# stride,
# downsample=downsample,
# stype='stage',
# baseWidth=self.baseWidth,
# scale=self.scale))
# self.inplanes = planes * block.expansion
# for i in range(1, blocks):
# layers.append(
# block(self.inplanes,
# planes,
# baseWidth=self.baseWidth,
# scale=self.scale))
# return nn.Sequential(*layers)
# def _forward(self, x):
# #x = x[:, None, ...]
# x = self.conv1(x)
# # print('conv1: ', x.size())
# x = self.bn1(x)
# x = self.relu(x)
# # x = self.maxpool(x)
# # print('maxpool: ', x.size())
# x = self.layer1(x)
# # print('layer1: ', x.size())
# x = self.layer2(x)
# # print('layer2: ', x.size())
# x = self.layer3(x)
# # print('layer3: ', x.size())
# x = self.layer4(x)
# # print('layer4: ', x.size())
# # x = self.stats_pooling(x)
# x = self.avgpool(x)
# # print('avgpool:', x.size())
# # x = x.view(x.size(0), -1)
# x = torch.flatten(x, 1)
# # print('flatten: ', x.size())
# x = self.cls_layer(x)
# return F.log_softmax(x, dim=-1)
# def extract(self, x):
# # x = x[:, None, ...]
# x = self.conv1(x)
# # print('conv1: ', x.size())
# x = self.bn1(x)
# x = self.relu(x)
# x = self.layer1(x)
# # print('layer1: ', x.size())
# x = self.layer2(x)
# # print('layer2: ', x.size())
# x = self.layer3(x)
# # print('layer3: ', x.size())
# x = self.layer4(x)
# # print('layer4: ', x.size())
# x = self.avgpool(x)
# x = torch.flatten(x, 1)
# # print('flatten: ', x.size())
# return x
# # Allow for accessing forward method in a inherited class
# forward = _forward
''' ResNet models'''
def resnet18(**kwargs):
"""Constructs a ResNet-18 model.
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
"""
model = ResNet(BasicBlock, [2, 2, 2, 2], **kwargs)
return model
def se_resnet18(**kwargs):
model = ResNet(SEBasicBlock, [2, 2, 2, 2], **kwargs)
return model
def resnet34(**kwargs):
"""Constructs a ResNet-34 model.
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
"""
model = ResNet(BasicBlock, [3, 4, 6, 3], **kwargs)
return model
def se_resnet34(**kwargs):
model = ResNet(SEBasicBlock, [3, 4, 6, 3], **kwargs)
return model
def resnet50(**kwargs):
"""Constructs a ResNet-50 model.
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
"""
model = ResNet(Bottleneck, [3, 4, 6, 3], **kwargs)
return model
def se_resnet50(**kwargs):
model = ResNet(SEBottleneck, [3, 4, 6, 3], **kwargs)
return model
'''Res2Net models'''
def res2net50_v1b(**kwargs):
"""Constructs a Res2Net-50_v1b model.
Res2Net-50 refers to the Res2Net-50_v1b_26w_4s.
"""
model = Res2Net(Bottle2neck, [3, 4, 6, 3], baseWidth=26, scale=4, **kwargs)
return model
def se_res2net50_v1b(**kwargs):
"""Constructs a Res2Net-50_v1b model.
Res2Net-50 refers to the Res2Net-50_v1b_26w_4s.
"""
model = Res2Net(SEBottle2neck, [3, 4, 6, 3], baseWidth=26, scale=4, **kwargs)
return model
def res2net50_v1b_14w_8s(**kwargs):
"""Constructs a Res2Net-50_v1b model.
Res2Net-50 refers to the Res2Net-50_v1b_26w_4s.
"""
model = Res2Net(Bottle2neck, [3, 4, 6, 3], baseWidth=14, scale=8, **kwargs)
return model
def se_res2net50_v1b_14w_8s(**kwargs):
"""Constructs a Res2Net-50_v1b model.
Res2Net-50 refers to the Res2Net-50_v1b_26w_4s.
"""
model = Res2Net(SEBottle2neck, [3, 4, 6, 3], baseWidth=14, scale=8, **kwargs)
return model
def res2net50_v1b_26w_8s(**kwargs):
"""Constructs a Res2Net-50_v1b model.
Res2Net-50 refers to the Res2Net-50_v1b_26w_4s.
"""
model = Res2Net(Bottle2neck, [3, 4, 6, 3], baseWidth=26, scale=8, **kwargs)
return model
def se_res2net50_v1b_26w_8s(**kwargs):
"""Constructs a Res2Net-50_v1b model.
Res2Net-50 refers to the Res2Net-50_v1b_26w_4s.
"""
model = Res2Net(SEBottle2neck, [3, 4, 6, 3], baseWidth=26, scale=8, **kwargs)
return model
if __name__ == '__main__':
images = torch.rand(2, 1, 257, 400)
label = torch.randint(0, 2, (2,)).long()
model = se_res2net50_v1b(pretrained=False, num_classes=3)
#model = model.cuda(0)
output = model(images)
print(images.size())
print(output.size())
| 34.773585 | 117 | 0.527876 | 1,773 | 14,744 | 4.296672 | 0.104907 | 0.030192 | 0.024416 | 0.038068 | 0.846548 | 0.82607 | 0.8157 | 0.785639 | 0.771069 | 0.747572 | 0 | 0.057166 | 0.330846 | 14,744 | 423 | 118 | 34.855792 | 0.714981 | 0.4199 | 0 | 0.486486 | 0 | 0 | 0.007808 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.102703 | false | 0 | 0.037838 | 0 | 0.248649 | 0.016216 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
6c540915237333411be752d1210906bd88660c89 | 104,660 | py | Python | tests/test_unittest_on_hw.py | iRomi14/drmlib | 0e7da6f9ec7aca3c167db667f1251b33c989bc5b | [
"Apache-2.0"
] | null | null | null | tests/test_unittest_on_hw.py | iRomi14/drmlib | 0e7da6f9ec7aca3c167db667f1251b33c989bc5b | [
"Apache-2.0"
] | null | null | null | tests/test_unittest_on_hw.py | iRomi14/drmlib | 0e7da6f9ec7aca3c167db667f1251b33c989bc5b | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Test node-locked behavior of DRM Library.
"""
import pytest
import gc
from glob import glob
from os import remove, getpid
from os.path import getsize, isfile, dirname, join, realpath
from re import search, finditer, MULTILINE
from time import sleep, time
from json import loads
from datetime import datetime, timedelta
LOG_FORMAT_SHORT = "[%^%=8l%$] %-6t, %v"
LOG_FORMAT_LONG = "%Y-%m-%d %H:%M:%S.%e - %18s:%-4# [%=8l] %=6t, %v"
REGEX_FORMAT_SHORT = r'\[\s*(\w+)\s*\] \s*\d+\s*, %s'
REGEX_FORMAT_LONG = r'\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}.\d{3} - \s*\S+:\d+\s* \[\s*(\w+)\s*\] \s*\d+\s*, %s'
_PARAM_LIST = ['license_type',
'license_duration',
'num_activators',
'session_id',
'session_status',
'license_status',
'metered_data',
'nodelocked_request_file',
'drm_frequency',
'drm_license_type',
'product_info',
'mailbox_size',
'token_string',
'token_validity',
'token_time_left',
'log_verbosity',
'log_format',
'log_file_verbosity',
'log_file_format',
'log_file_path',
'log_file_type',
'log_file_rotating_size',
'log_file_rotating_num',
'frequency_detection_threshold',
'frequency_detection_period',
'custom_field',
'mailbox_data',
'ws_retry_period_long',
'ws_retry_period_short',
'ws_request_timeout',
'log_message_level',
'list_all',
'dump_all',
'log_service_verbosity',
'log_service_format',
'log_service_path',
'log_service_type',
'log_service_rotating_size',
'log_service_rotating_num',
'page_ctrlreg',
'page_vlnvfile',
'page_licfile',
'page_tracefile',
'page_meteringfile',
'page_mailbox',
'hw_report',
'log_service_create',
'trigger_async_callback',
'bad_product_id',
'bad_oauth2_token',
'log_message']
def ordered_json(obj):
if isinstance(obj, dict):
return sorted((k, ordered_json(v)) for k, v in obj.items())
if isinstance(obj, list):
return sorted(ordered_json(x) for x in obj)
else:
return obj
@pytest.mark.minimum
def test_backward_compatibility(accelize_drm, conf_json, cred_json, async_handler):
"""Test API is not compatible with DRM HDK < 3.0"""
refdesign = accelize_drm.pytest_ref_designs
driver = accelize_drm.pytest_fpga_driver[0]
fpga_image_bkp = driver.fpga_image
async_cb = async_handler.create()
drm_manager = None
try:
# Program FPGA with old HDK 2.x.x
hdk = list(filter(lambda x: x.startswith('2.'), refdesign.hdk_versions))[0]
assert hdk.startswith('2.')
image_id = refdesign.get_image_id(hdk)
driver.program_fpga(image_id)
# Test compatibility issue
with pytest.raises(accelize_drm.exceptions.DRMCtlrError) as excinfo:
async_cb.reset()
drm_manager = accelize_drm.DrmManager(
conf_json.path,
cred_json.path,
driver.read_register_callback,
driver.write_register_callback,
async_cb.callback
)
assert 'Unable to find DRM Controller registers. Please check:' in str(excinfo.value)
assert 'The DRM offset in your read/write callback implementation' in str(excinfo.value)
assert 'The compatibility between the SDK and DRM HDK in use' in str(excinfo.value)
assert async_handler.get_error_code(str(excinfo.value)) == accelize_drm.exceptions.DRMCtlrError.error_code
async_cb.assert_NoError()
# Program FPGA with old HDK 3.0.x
hdk = list(filter(lambda x: x.startswith('3.0'), refdesign.hdk_versions))[0]
assert hdk.startswith('3.0.')
image_id = refdesign.get_image_id(hdk)
driver.program_fpga(image_id)
# Test compatibility issue
async_cb = async_handler.create()
with pytest.raises(accelize_drm.exceptions.DRMCtlrError) as excinfo:
async_cb.reset()
drm_manager = accelize_drm.DrmManager(
conf_json.path,
cred_json.path,
driver.read_register_callback,
driver.write_register_callback,
async_cb.callback
)
assert search(r'This DRM Library version .* is not compatible with the DRM HDK version .*',
str(excinfo.value)) is not None
assert async_handler.get_error_code(str(excinfo.value)) == accelize_drm.exceptions.DRMCtlrError.error_code
async_cb.assert_NoError()
finally:
if drm_manager:
del drm_manager
gc.collect()
# Reprogram FPGA with original image
driver.program_fpga(fpga_image_bkp)
@pytest.mark.minimum
def test_get_version(accelize_drm):
"""Test the versions of the DRM Lib and its dependencies are well displayed"""
versions = accelize_drm.get_api_version()
assert search(r'\d+\.\d+\.\d+', versions.version) is not None
@pytest.mark.minimum
def test_wrong_drm_controller_address(accelize_drm, conf_json, cred_json, async_handler):
"""Test when a wrong DRM Controller offset is given"""
async_cb = async_handler.create()
async_cb.reset()
driver = accelize_drm.pytest_fpga_driver[0]
ctrl_base_addr_backup = driver._drm_ctrl_base_addr
driver._drm_ctrl_base_addr += 0x10000
try:
with pytest.raises(accelize_drm.exceptions.DRMCtlrError) as excinfo:
drm_manager = accelize_drm.DrmManager(
conf_json.path,
cred_json.path,
driver.read_register_callback,
driver.write_register_callback,
async_cb.callback
)
assert 'Unable to find DRM Controller registers. Please check:' in str(excinfo.value)
assert 'The DRM offset in your read/write callback implementation' in str(excinfo.value)
assert 'The compatibility between the SDK and DRM HDK in use' in str(excinfo.value)
finally:
driver._drm_ctrl_base_addr = ctrl_base_addr_backup
@pytest.mark.minimum
@pytest.mark.no_parallel
def test_users_entitlements(accelize_drm, conf_json, cred_json, async_handler, ws_admin):
"""
Test the entitlements for all accounts used in regression
"""
driver = accelize_drm.pytest_fpga_driver[0]
async_cb = async_handler.create()
print()
# Test user-01 entitlements
# Request metering license
async_cb.reset()
cred_json.set_user('accelize_accelerator_test_01')
conf_json.reset()
drm_manager = accelize_drm.DrmManager(
conf_json.path,
cred_json.path,
driver.read_register_callback,
driver.write_register_callback,
async_cb.callback
)
assert drm_manager.get('license_type') == 'Floating/Metering'
drmLicType = drm_manager.get('drm_license_type')
with pytest.raises(accelize_drm.exceptions.DRMWSReqError) as excinfo:
drm_manager.activate()
assert "License Web Service error 400" in str(excinfo.value)
assert "DRM WS request failed" in str(excinfo.value)
assert search(r'\\"No Entitlement\\" with .+ for accelize_accelerator_test_01@accelize.com', str(excinfo.value))
assert "User account has no entitlement. Purchase additional licenses via your portal" in str(excinfo.value)
assert async_handler.get_error_code(str(excinfo.value)) == accelize_drm.exceptions.DRMWSReqError.error_code
async_cb.assert_NoError()
# Request nodelock license
try:
async_cb.reset()
cred_json.set_user('accelize_accelerator_test_01')
conf_json.reset()
conf_json.addNodelock()
drm_manager = accelize_drm.DrmManager(
conf_json.path,
cred_json.path,
driver.read_register_callback,
driver.write_register_callback,
async_cb.callback
)
assert drm_manager.get('license_type') == 'Node-Locked'
assert drm_manager.get('drm_license_type') == drmLicType
with pytest.raises(accelize_drm.exceptions.DRMWSReqError) as excinfo:
drm_manager.activate()
assert "License Web Service error 400" in str(excinfo.value)
assert "DRM WS request failed" in str(excinfo.value)
assert search(r'\\"No Entitlement\\" with .+ for accelize_accelerator_test_01@accelize.com', str(excinfo.value))
assert "User account has no entitlement. Purchase additional licenses via your portal" in str(excinfo.value)
assert async_handler.get_error_code(str(excinfo.value)) == accelize_drm.exceptions.DRMWSReqError.error_code
async_cb.assert_NoError()
finally:
accelize_drm.clean_nodelock_env(conf_json=conf_json)
print('Test user-01 entitlements: PASS')
# Test user-02 entitlements
# Request metering license
async_cb.reset()
cred_json.set_user('accelize_accelerator_test_02')
conf_json.reset()
drm_manager = accelize_drm.DrmManager(
conf_json.path,
cred_json.path,
driver.read_register_callback,
driver.write_register_callback,
async_cb.callback
)
assert drm_manager.get('license_type') == 'Floating/Metering'
assert drm_manager.get('drm_license_type') == drmLicType
drm_manager.activate()
assert drm_manager.get('drm_license_type') == 'Floating/Metering'
drm_manager.deactivate()
async_cb.assert_NoError()
# Request nodelock license
try:
async_cb.reset()
conf_json.reset()
conf_json.addNodelock()
drm_manager = accelize_drm.DrmManager(
conf_json.path,
cred_json.path,
driver.read_register_callback,
driver.write_register_callback,
async_cb.callback
)
assert drm_manager.get('license_type') == 'Node-Locked'
assert drm_manager.get('drm_license_type') == 'Floating/Metering'
with pytest.raises(accelize_drm.exceptions.DRMWSReqError) as excinfo:
drm_manager.activate()
assert "License Web Service error 400" in str(excinfo.value)
assert "DRM WS request failed" in str(excinfo.value)
assert search(r'\\"No Entitlement\\" with .+ for accelize_accelerator_test_02@accelize.com', str(excinfo.value))
assert 'No valid NodeLocked entitlement found for your account' in str(excinfo.value)
assert async_handler.get_error_code(str(excinfo.value)) == accelize_drm.exceptions.DRMWSReqError.error_code
async_cb.assert_NoError()
finally:
accelize_drm.clean_nodelock_env(conf_json=conf_json)
print('Test user-02 entitlements: PASS')
# Test user-03 entitlements
# Request metering license
cred_json.set_user('accelize_accelerator_test_03')
async_cb.reset()
conf_json.reset()
accelize_drm.clean_metering_env(cred_json, ws_admin)
drm_manager = accelize_drm.DrmManager(
conf_json.path,
cred_json.path,
driver.read_register_callback,
driver.write_register_callback,
async_cb.callback
)
assert drm_manager.get('license_type') == 'Floating/Metering'
assert drm_manager.get('drm_license_type') == 'Floating/Metering'
drm_manager.activate()
assert drm_manager.get('drm_license_type') == 'Floating/Metering'
drm_manager.deactivate()
async_cb.assert_NoError()
# Request nodelock license
try:
async_cb.reset()
conf_json.reset()
conf_json.addNodelock()
accelize_drm.clean_nodelock_env(None, driver, conf_json, cred_json, ws_admin)
drm_manager = accelize_drm.DrmManager(
conf_json.path,
cred_json.path,
driver.read_register_callback,
driver.write_register_callback,
async_cb.callback
)
assert drm_manager.get('license_type') == 'Node-Locked'
# Start application
assert drm_manager.get('drm_license_type') == 'Idle'
drm_manager.activate()
assert drm_manager.get('drm_license_type') == 'Node-Locked'
drm_manager.deactivate()
async_cb.assert_NoError()
finally:
accelize_drm.clean_nodelock_env(drm_manager, driver, conf_json, cred_json, ws_admin)
print('Test user-03 entitlements: PASS')
# Test user-04 entitlements
# Request metering license
cred_json.set_user('accelize_accelerator_test_04')
async_cb.reset()
conf_json.reset()
drm_manager = accelize_drm.DrmManager(
conf_json.path,
cred_json.path,
driver.read_register_callback,
driver.write_register_callback,
async_cb.callback
)
drm_manager.set(log_verbosity=1)
assert drm_manager.get('license_type') == 'Floating/Metering'
assert drm_manager.get('drm_license_type') == 'Idle'
drm_manager.activate()
assert drm_manager.get('drm_license_type') == 'Floating/Metering'
drm_manager.deactivate()
async_cb.assert_NoError()
# Request nodelock license
try:
async_cb.reset()
conf_json.reset()
conf_json.addNodelock()
drm_manager = accelize_drm.DrmManager(
conf_json.path,
cred_json.path,
driver.read_register_callback,
driver.write_register_callback,
async_cb.callback
)
assert drm_manager.get('license_type') == 'Node-Locked'
assert drm_manager.get('drm_license_type') == 'Floating/Metering'
with pytest.raises(accelize_drm.exceptions.DRMWSReqError) as excinfo:
drm_manager.activate()
assert "License Web Service error 400" in str(excinfo.value)
assert "DRM WS request failed" in str(excinfo.value)
assert search(r'\\"No Entitlement\\" with .+ for accelize_accelerator_test_04@accelize.com', str(excinfo.value))
assert 'No valid NodeLocked entitlement found for your account' in str(excinfo.value)
assert async_handler.get_error_code(str(excinfo.value)) == accelize_drm.exceptions.DRMWSReqError.error_code
async_cb.assert_NoError()
finally:
accelize_drm.clean_nodelock_env(conf_json=conf_json)
print('Test user-04 entitlements: PASS')
@pytest.mark.minimum
def test_parameter_key_modification_with_config_file(accelize_drm, conf_json, cred_json,
async_handler):
"""Test accesses to parameters"""
driver = accelize_drm.pytest_fpga_driver[0]
async_cb = async_handler.create()
# First get all default value for all tested parameters
async_cb.reset()
conf_json.reset()
drm_manager = accelize_drm.DrmManager(
conf_json.path,
cred_json.path,
driver.read_register_callback,
driver.write_register_callback,
async_cb.callback
)
orig_log_verbosity = drm_manager.get('log_verbosity')
orig_log_format = drm_manager.get('log_format')
orig_frequency_mhz = drm_manager.get('drm_frequency')
orig_frequency_detect_period = drm_manager.get('frequency_detection_period')
orig_frequency_detect_threshold = drm_manager.get('frequency_detection_threshold')
orig_retry_period_long = drm_manager.get('ws_retry_period_long')
orig_retry_period_short = drm_manager.get('ws_retry_period_short')
orig_response_timeout = drm_manager.get('ws_request_timeout')
# Test parameter: log_verbosity
from random import choice
async_cb.reset()
conf_json.reset()
log_level_choice = list(range(0,6))
log_level_choice.remove(orig_log_verbosity)
exp_value = choice(log_level_choice)
assert exp_value != orig_log_verbosity
conf_json['settings']['log_verbosity'] = exp_value
conf_json.save()
drm_manager = accelize_drm.DrmManager(
conf_json.path,
cred_json.path,
driver.read_register_callback,
driver.write_register_callback,
async_cb.callback
)
assert drm_manager.get('log_verbosity') == exp_value
drm_manager.set(log_verbosity=orig_log_verbosity)
print("Test parameter 'log_verbosity': PASS")
# Test parameter: log_format
async_cb.reset()
conf_json.reset()
exp_value = LOG_FORMAT_LONG
conf_json['settings']['log_format'] = exp_value
conf_json.save()
drm_manager = accelize_drm.DrmManager(
conf_json.path,
cred_json.path,
driver.read_register_callback,
driver.write_register_callback,
async_cb.callback
)
assert drm_manager.get('log_format') == exp_value
async_cb.assert_NoError()
print("Test parameter 'log_format': PASS")
# Test parameter: log_file_verbosity
async_cb.reset()
conf_json.reset()
exp_value = 0
conf_json['settings']['log_file_verbosity'] = exp_value
conf_json.save()
drm_manager = accelize_drm.DrmManager(
conf_json.path,
cred_json.path,
driver.read_register_callback,
driver.write_register_callback,
async_cb.callback
)
assert drm_manager.get('log_file_verbosity') == exp_value
async_cb.assert_NoError()
print("Test parameter 'log_file_verbosity': PASS")
# Test parameter: log_file_format
async_cb.reset()
conf_json.reset()
exp_value = LOG_FORMAT_SHORT
conf_json['settings']['log_file_format'] = exp_value
conf_json.save()
drm_manager = accelize_drm.DrmManager(
conf_json.path,
cred_json.path,
driver.read_register_callback,
driver.write_register_callback,
async_cb.callback
)
assert drm_manager.get('log_file_format') == exp_value
async_cb.assert_NoError()
print("Test parameter 'log_file_format': PASS")
# Test parameter: log_file_path
async_cb.reset()
conf_json.reset()
exp_value = realpath("./drmlib.%d.log" % getpid())
conf_json['settings']['log_file_path'] = exp_value
conf_json.save()
drm_manager = accelize_drm.DrmManager(
conf_json.path,
cred_json.path,
driver.read_register_callback,
driver.write_register_callback,
async_cb.callback
)
assert drm_manager.get('log_file_path') == exp_value
async_cb.assert_NoError()
print("Test parameter 'log_file_path': PASS")
# Test parameter: log_file_type
async_cb.reset()
conf_json.reset()
exp_value = 1
conf_json['settings']['log_file_type'] = exp_value
conf_json.save()
drm_manager = accelize_drm.DrmManager(
conf_json.path,
cred_json.path,
driver.read_register_callback,
driver.write_register_callback,
async_cb.callback
)
assert drm_manager.get('log_file_type') == exp_value
async_cb.assert_NoError()
print("Test parameter 'log_file_type': PASS")
# Test parameter: log_file_rotating_size
async_cb.reset()
conf_json.reset()
exp_value = 1024
conf_json['settings']['log_file_rotating_size'] = exp_value
conf_json.save()
drm_manager = accelize_drm.DrmManager(
conf_json.path,
cred_json.path,
driver.read_register_callback,
driver.write_register_callback,
async_cb.callback
)
assert drm_manager.get('log_file_rotating_size') == exp_value
async_cb.assert_NoError()
print("Test parameter 'log_file_rotating_size': PASS")
# Test parameter: log_file_rotating_num
async_cb.reset()
conf_json.reset()
exp_value = 10
conf_json['settings']['log_file_rotating_num'] = exp_value
conf_json.save()
drm_manager = accelize_drm.DrmManager(
conf_json.path,
cred_json.path,
driver.read_register_callback,
driver.write_register_callback,
async_cb.callback
)
assert drm_manager.get('log_file_rotating_num') == exp_value
async_cb.assert_NoError()
print("Test parameter 'log_file_rotating_num': PASS")
# Test parameter: log_service_verbosity
async_cb.reset()
conf_json.reset()
exp_value = 0
conf_json['settings']['log_service_verbosity'] = exp_value
conf_json.save()
drm_manager = accelize_drm.DrmManager(
conf_json.path,
cred_json.path,
driver.read_register_callback,
driver.write_register_callback,
async_cb.callback
)
assert drm_manager.get('log_service_verbosity') == exp_value
async_cb.assert_NoError()
print("Test parameter 'log_service_verbosity': PASS")
# Test parameter: log_service_format
async_cb.reset()
conf_json.reset()
exp_value = LOG_FORMAT_SHORT
conf_json['settings']['log_service_format'] = exp_value
conf_json.save()
drm_manager = accelize_drm.DrmManager(
conf_json.path,
cred_json.path,
driver.read_register_callback,
driver.write_register_callback,
async_cb.callback
)
assert drm_manager.get('log_service_format') == exp_value
async_cb.assert_NoError()
print("Test parameter 'log_service_format': PASS")
# Test parameter: log_service_path
async_cb.reset()
conf_json.reset()
exp_value = realpath("./drmservice.%d.log" % getpid())
conf_json['settings']['log_service_path'] = exp_value
conf_json.save()
drm_manager = accelize_drm.DrmManager(
conf_json.path,
cred_json.path,
driver.read_register_callback,
driver.write_register_callback,
async_cb.callback
)
assert drm_manager.get('log_service_path') == exp_value
async_cb.assert_NoError()
print("Test parameter 'log_service_path': PASS")
# Test parameter: log_service_type
async_cb.reset()
conf_json.reset()
exp_value = 1
conf_json['settings']['log_service_type'] = exp_value
conf_json.save()
drm_manager = accelize_drm.DrmManager(
conf_json.path,
cred_json.path,
driver.read_register_callback,
driver.write_register_callback,
async_cb.callback
)
assert drm_manager.get('log_service_type') == exp_value
async_cb.assert_NoError()
print("Test parameter 'log_service_type': PASS")
# Test parameter: log_service_rotating_size
async_cb.reset()
conf_json.reset()
exp_value = 1024
conf_json['settings']['log_service_rotating_size'] = exp_value
conf_json.save()
drm_manager = accelize_drm.DrmManager(
conf_json.path,
cred_json.path,
driver.read_register_callback,
driver.write_register_callback,
async_cb.callback
)
assert drm_manager.get('log_service_rotating_size') == exp_value
async_cb.assert_NoError()
print("Test parameter 'log_service_rotating_size': PASS")
# Test parameter: log_service_rotating_num
async_cb.reset()
conf_json.reset()
exp_value = 10
conf_json['settings']['log_service_rotating_num'] = exp_value
conf_json.save()
drm_manager = accelize_drm.DrmManager(
conf_json.path,
cred_json.path,
driver.read_register_callback,
driver.write_register_callback,
async_cb.callback
)
assert drm_manager.get('log_service_rotating_num') == exp_value
async_cb.assert_NoError()
print("Test parameter 'log_service_rotating_num': PASS")
# Test parameter: drm_frequency
async_cb.reset()
conf_json.reset()
exp_value = 2*orig_frequency_mhz
conf_json['drm']['frequency_mhz'] = exp_value
conf_json.save()
drm_manager = accelize_drm.DrmManager(
conf_json.path,
cred_json.path,
driver.read_register_callback,
driver.write_register_callback,
async_cb.callback
)
value = drm_manager.get('drm_frequency')
assert value == exp_value
async_cb.assert_NoError()
print("Test parameter 'frequency_mhz': PASS")
# Test parameter: frequency_detection_period
async_cb.reset()
conf_json.reset()
exp_value = 2*orig_frequency_detect_period
conf_json['settings'] = {'frequency_detection_period': exp_value}
conf_json.save()
drm_manager = accelize_drm.DrmManager(
conf_json.path,
cred_json.path,
driver.read_register_callback,
driver.write_register_callback,
async_cb.callback
)
value = drm_manager.get('frequency_detection_period')
assert value == exp_value
async_cb.assert_NoError()
print("Test parameter 'frequency_detection_period': PASS")
# Test parameter: frequency_detection_threshold
async_cb.reset()
conf_json.reset()
exp_value = 2*orig_frequency_detect_threshold
conf_json['settings'] = {'frequency_detection_threshold': exp_value}
conf_json.save()
drm_manager = accelize_drm.DrmManager(
conf_json.path,
cred_json.path,
driver.read_register_callback,
driver.write_register_callback,
async_cb.callback
)
value = drm_manager.get('frequency_detection_threshold')
assert value == exp_value
async_cb.assert_NoError()
print("Test parameter 'frequency_detection_threshold': PASS")
# Test parameter: ws_retry_period_long
async_cb.reset()
conf_json.reset()
# Check error: ws_retry_period_long must be != ws_retry_period_short
conf_json['settings'] = {'ws_retry_period_long': orig_retry_period_short}
conf_json.save()
with pytest.raises(accelize_drm.exceptions.DRMBadArg) as excinfo:
accelize_drm.DrmManager(
conf_json.path,
cred_json.path,
driver.read_register_callback,
driver.write_register_callback,
async_cb.callback
)
assert search(r'ws_retry_period_long .+ must be greater than ws_retry_period_short .+',
str(excinfo.value)) is not None
err_code = async_handler.get_error_code(str(excinfo.value))
assert err_code == accelize_drm.exceptions.DRMBadArg.error_code
async_cb.assert_NoError()
async_cb.reset()
conf_json.reset()
exp_value = orig_retry_period_long + 1
conf_json['settings'] = {'ws_retry_period_long': exp_value}
conf_json.save()
drm_manager = accelize_drm.DrmManager(
conf_json.path,
cred_json.path,
driver.read_register_callback,
driver.write_register_callback,
async_cb.callback
)
value = drm_manager.get('ws_retry_period_long')
assert value == exp_value
async_cb.assert_NoError()
print("Test parameter 'ws_retry_period_long': PASS")
# Test parameter: ws_retry_period_short
async_cb.reset()
conf_json.reset()
# Check error: ws_retry_period_long must be != ws_retry_period_short
conf_json['settings'] = {'ws_retry_period_short': orig_retry_period_long}
conf_json.save()
with pytest.raises(accelize_drm.exceptions.DRMBadArg) as excinfo:
accelize_drm.DrmManager(
conf_json.path,
cred_json.path,
driver.read_register_callback,
driver.write_register_callback,
async_cb.callback
)
assert search(r'ws_retry_period_long .+ must be greater than ws_retry_period_short .+',
str(excinfo.value)) is not None
err_code = async_handler.get_error_code(str(excinfo.value))
assert err_code == accelize_drm.exceptions.DRMBadArg.error_code
async_cb.assert_NoError()
async_cb.reset()
conf_json.reset()
exp_value = orig_retry_period_short + 1
conf_json['settings'] = {'ws_retry_period_short': exp_value}
conf_json.save()
drm_manager = accelize_drm.DrmManager(
conf_json.path,
cred_json.path,
driver.read_register_callback,
driver.write_register_callback,
async_cb.callback
)
value = drm_manager.get('ws_retry_period_short')
assert value == exp_value
async_cb.assert_NoError()
print("Test parameter 'ws_retry_period_short': PASS")
# Test parameter: ws_request_timeout
async_cb.reset()
conf_json.reset()
conf_json['settings'] = {'ws_request_timeout': 0}
conf_json.save()
with pytest.raises(accelize_drm.exceptions.DRMBadArg) as excinfo:
accelize_drm.DrmManager(
conf_json.path,
cred_json.path,
driver.read_register_callback,
driver.write_register_callback,
async_cb.callback
)
assert "ws_request_timeout must not be 0" in str(excinfo.value)
err_code = async_handler.get_error_code(str(excinfo.value))
assert err_code == accelize_drm.exceptions.DRMBadArg.error_code
async_cb.reset()
conf_json.reset()
exp_value = 2*orig_response_timeout
conf_json['settings'] = {'ws_request_timeout': exp_value}
conf_json.save()
drm_manager = accelize_drm.DrmManager(
conf_json.path,
cred_json.path,
driver.read_register_callback,
driver.write_register_callback,
async_cb.callback
)
value = drm_manager.get('ws_request_timeout')
assert value == exp_value
async_cb.assert_NoError()
print("Test parameter 'ws_request_timeout': PASS")
# Test unsupported parameter
async_cb.reset()
conf_json.reset()
conf_json['settings'] = {'unsupported_param': 10.2}
conf_json.save()
accelize_drm.DrmManager(
conf_json.path,
cred_json.path,
driver.read_register_callback,
driver.write_register_callback,
async_cb.callback
)
async_cb.assert_NoError()
print("Test unsupported parameter: PASS")
# Test empty parameter
async_cb.reset()
conf_json.reset()
conf_json['settings'] = {'': 10.2}
conf_json.save()
accelize_drm.DrmManager(
conf_json.path,
cred_json.path,
driver.read_register_callback,
driver.write_register_callback,
async_cb.callback
)
async_cb.assert_NoError()
print("Test empty parameter: PASS")
@pytest.mark.aws
def test_c_unittests(accelize_drm, exec_func):
"""Test errors when missing arguments are given to DRM Controller Constructor"""
driver = accelize_drm.pytest_fpga_driver[0]
exec_lib = exec_func.load('unittests', driver._fpga_slot_id)
# Test when read register callback is null
exec_lib.run('test_null_read_callback')
assert exec_lib.returncode == accelize_drm.exceptions.DRMBadArg.error_code
assert 'Read register callback function must not be NULL' in exec_lib.stdout
assert exec_lib.asyncmsg is None
# Test when write register callback is null
exec_lib.run('test_null_write_callback')
assert exec_lib.returncode == accelize_drm.exceptions.DRMBadArg.error_code
assert 'Write register callback function must not be NULL' in exec_lib.stdout
assert exec_lib.asyncmsg is None
# Test when asynchronous error callback is null
exec_lib.run('test_null_error_callback')
assert exec_lib.returncode == accelize_drm.exceptions.DRMBadArg.error_code
assert 'Asynchronous error callback function must not be NULL' in exec_lib.stdout
assert exec_lib.asyncmsg is None
# Test various types of get and set functions
exec_lib.run('test_types_of_get_and_set_functions')
assert exec_lib.returncode == 0
assert exec_lib.asyncmsg is None
# Test out of range of get function
exec_lib.run('test_get_function_out_of_range')
assert exec_lib.returncode == accelize_drm.exceptions.DRMBadArg.error_code
assert 'Cannot find parameter with ID: ' in exec_lib.stdout
assert exec_lib.asyncmsg is None
# Test get_json_string with bad format
exec_lib.run('test_get_json_string_with_bad_format')
assert exec_lib.returncode == accelize_drm.exceptions.DRMBadFormat.error_code
assert 'Cannot parse JSON string because' in exec_lib.stdout
assert exec_lib.asyncmsg is None
# Test get_json_string with empty string
exec_lib.run('test_get_json_string_with_empty_string')
assert exec_lib.returncode == accelize_drm.exceptions.DRMBadFormat.error_code
assert 'Cannot parse an empty JSON string' in exec_lib.stdout
assert exec_lib.asyncmsg is None
def test_parameter_key_modification_with_get_set(accelize_drm, conf_json, cred_json, async_handler,
ws_admin):
"""Test accesses to parameter"""
driver = accelize_drm.pytest_fpga_driver[0]
async_cb = async_handler.create()
activators = accelize_drm.pytest_fpga_activators[0]
activators.reset_coin()
print()
# Test with a nodelocked user
# Test parameter: license_type and drm_license_type in nodelocked and nodelocked_request_file
# => Done in test_nodelock_mode_on_hw
# Test with a floating/metered user
async_cb.reset()
cred_json.set_user('accelize_accelerator_test_02')
conf_json.reset()
drm_manager = accelize_drm.DrmManager(
conf_json.path,
cred_json.path,
driver.read_register_callback,
driver.write_register_callback,
async_cb.callback
)
# Test when parameter is a list
value = drm_manager.get('num_activators','license_type')
assert isinstance(value, dict)
assert value['num_activators'] == activators.length
assert value['license_type'] == 'Floating/Metering'
async_cb.assert_NoError()
print("Test when parameter is a list: PASS")
# Test parameter: log_verbosity
orig_val = drm_manager.get('log_verbosity')
exp_val = 1 if orig_val == 0 else 0
drm_manager.set(log_verbosity=exp_val)
assert drm_manager.get('log_verbosity') == exp_val
drm_manager.set(log_verbosity=orig_val)
async_cb.assert_NoError()
print("Test parameter 'log_verbosity': PASS")
# Test parameter: log_format
orig_val = drm_manager.get('log_format')
exp_val = LOG_FORMAT_LONG
drm_manager.set(log_format=exp_val)
assert drm_manager.get('log_format') == exp_val
drm_manager.set(log_format=orig_val)
async_cb.assert_NoError()
print("Test parameter 'log_format': PASS")
# Test parameter: log_file_verbosity
orig_val = drm_manager.get('log_file_verbosity')
exp_val = 1 if orig_val == 0 else 0
drm_manager.set(log_file_verbosity=exp_val)
assert drm_manager.get('log_file_verbosity') == exp_val
drm_manager.set(log_file_verbosity=orig_val)
async_cb.assert_NoError()
print("Test parameter 'log_file_verbosity': PASS")
# Test parameter: log_file_format
orig_val = drm_manager.get('log_file_format')
assert orig_val == LOG_FORMAT_LONG
exp_val = LOG_FORMAT_SHORT
drm_manager.set(log_file_format=exp_val)
assert drm_manager.get('log_file_format') == exp_val
drm_manager.set(log_file_format=orig_val)
async_cb.assert_NoError()
print("Test parameter 'log_file_format': PASS")
# Test parameter: log_file_path, log_file_type, log_file_rotating_size, log_file_rotating_num
# => Cannot be written programmatically
# Test parameter: log_service_verbosity
orig_val = drm_manager.get('log_service_verbosity')
exp_val = 1 if orig_val == 0 else 0
drm_manager.set(log_service_verbosity=exp_val)
assert drm_manager.get('log_service_verbosity') == exp_val
drm_manager.set(log_service_verbosity=orig_val)
async_cb.assert_NoError()
print("Test parameter 'log_service_verbosity': PASS")
# Test parameter: log_service_format
orig_val = drm_manager.get('log_service_format')
assert orig_val == LOG_FORMAT_LONG
exp_val = LOG_FORMAT_SHORT
drm_manager.set(log_service_format=exp_val)
assert drm_manager.get('log_service_format') == exp_val
drm_manager.set(log_service_format=orig_val)
async_cb.assert_NoError()
print("Test parameter 'log_service_format': PASS")
# Test parameter: log_service_path
orig_val = drm_manager.get('log_service_path')
assert search(r'accelize_drmservice_\d+\.log', orig_val)
exp_path = 'test.log'
drm_manager.set(log_service_path=exp_path)
assert drm_manager.get('log_service_path') == exp_path
drm_manager.set(log_service_path=orig_val)
async_cb.assert_NoError()
print("Test parameter 'log_service_path': PASS")
# Test parameter: log_service_type
orig_val = drm_manager.get('log_service_type')
assert orig_val == 0
exp_type = 2
drm_manager.set(log_service_type=exp_type)
assert drm_manager.get('log_service_type') == exp_type
drm_manager.set(log_service_type=orig_val)
async_cb.assert_NoError()
print("Test parameter 'log_service_type': PASS")
# Test parameter: log_service_rotating_size
orig_val = drm_manager.get('log_service_rotating_size')
assert orig_val == 100*1024*1024
exp_type = 10*1024
drm_manager.set(log_service_rotating_size=exp_type)
assert drm_manager.get('log_service_rotating_size') == exp_type
drm_manager.set(log_service_rotating_size=orig_val)
async_cb.assert_NoError()
print("Test parameter 'log_service_rotating_size': PASS")
# Test parameter: log_service_rotating_num
orig_val = drm_manager.get('log_service_rotating_num')
assert orig_val == 3
exp_type = 5
drm_manager.set(log_service_rotating_num=exp_type)
assert drm_manager.get('log_service_rotating_num') == exp_type
drm_manager.set(log_service_rotating_num=orig_val)
async_cb.assert_NoError()
print("Test parameter 'log_service_rotating_num': PASS")
# Test parameter: license_type in metering
assert drm_manager.get('license_type') == 'Floating/Metering'
async_cb.assert_NoError()
print("Test parameter 'license_type' in Metered: PASS")
# Test parameter: drm_license_type, license_duration
drm_manager.activate()
assert drm_manager.get('drm_license_type') == 'Floating/Metering'
assert drm_manager.get('license_duration') != 0
drm_manager.deactivate()
async_cb.assert_NoError()
print("Test parameter 'drm_license_type', 'license_duration': PASS")
# Test parameter: num_activators
nb_activator = drm_manager.get('num_activators')
assert nb_activator == activators.length, 'Unexpected number of activators'
print("Test parameter 'num_activators': PASS")
# Test parameter: session_id
drm_manager.activate()
sessionId = drm_manager.get('session_id')
assert len(sessionId) == 16, 'Unexpected length of session ID'
drm_manager.deactivate()
async_cb.assert_NoError()
print("Test parameter 'session_id': PASS")
# Test parameter: session_status
session_state = drm_manager.get('session_status')
assert not session_state
drm_manager.activate()
session_state = drm_manager.get('session_status')
assert session_state
drm_manager.deactivate()
session_state = drm_manager.get('session_status')
assert not session_state
async_cb.assert_NoError()
print("Test parameter 'session_status': PASS")
# Test parameter: license_status
assert not drm_manager.get('license_status')
drm_manager.activate()
assert drm_manager.get('license_status')
drm_manager.deactivate()
assert not drm_manager.get('license_status')
async_cb.assert_NoError()
print("Test parameter 'license_status': PASS")
# Test parameter: metered_data
drm_manager.activate()
activators[0].generate_coin(10)
activators[0].check_coin(drm_manager.get('metered_data'))
async_cb.assert_NoError()
drm_manager.deactivate()
activators[0].reset_coin()
print("Test parameter 'metered_data': PASS")
# Test parameter: page_ctrlreg
page = drm_manager.get('page_ctrlreg')
assert search(r'Register\s+@0x00:\s+0x00000000', page), 'Unexpected content of page_ctrlreg'
print("Test parameter 'page_ctrlreg': PASS")
# Test parameter: page_vlnvfile
page = drm_manager.get('page_vlnvfile')
assert search(r'Register\s+@0x00:\s+0x00000001', page), 'Unexpected content of page_vlnvfile'
print("Test parameter 'page_vlnvfile': PASS")
# Test parameter: page_licfile
page = drm_manager.get('page_licfile')
assert search(r'Register\s+@0x00:\s+0x00000002', page), 'Unexpected content of page_licfile'
print("Test parameter 'page_licfile': PASS")
# Test parameter: page_tracefile
page = drm_manager.get('page_tracefile')
assert search(r'Register\s+@0x00:\s+0x00000003', page), 'Unexpected content of page_tracefile'
print("Test parameter 'page_tracefile': PASS")
# Test parameter: page_meteringfile
page = drm_manager.get('page_meteringfile')
assert search(r'Register\s+@0x00:\s+0x00000004', page), 'Unexpected content of page_meteringfile'
print("Test parameter 'page_meteringfile': PASS")
# Test parameter: page_mailbox
page = drm_manager.get('page_mailbox')
assert search(r'Register\s+@0x00:\s+0x00000005', page), 'Unexpected content of page_mailbox'
print("Test parameter 'page_mailbox': PASS")
# Test parameter: hw_report
hw_report = drm_manager.get('hw_report')
nb_lines = len(tuple(finditer(r'\n', hw_report)))
assert nb_lines > 10, 'Unexpected HW report content'
print("Test parameter 'hw_report': PASS")
# Test parameter: frequency_detection_threshold
orig_freq_threhsold = drm_manager.get('frequency_detection_threshold') # Save original threshold
exp_freq_threhsold = orig_freq_threhsold * 2
drm_manager.set(frequency_detection_threshold=exp_freq_threhsold)
new_freq_threhsold = drm_manager.get('frequency_detection_threshold')
assert new_freq_threhsold == exp_freq_threhsold, 'Unexpected frequency dectection threshold percentage'
drm_manager.set(frequency_detection_threshold=orig_freq_threhsold) # Restore original threshold
print("Test parameter 'frequency_detection_threshold': PASS")
# Test parameter: frequency_detection_period
orig_freq_period = drm_manager.get('frequency_detection_period') # Save original period
exp_freq_period = orig_freq_period * 2
drm_manager.set(frequency_detection_period=exp_freq_period)
new_freq_period = drm_manager.get('frequency_detection_period')
assert new_freq_period == exp_freq_period, 'Unexpected frequency dectection period'
drm_manager.set(frequency_detection_period=orig_freq_period) # Restore original period
print("Test parameter 'frequency_detection_period': PASS")
# Test parameter: drm_frequency
freq_period = drm_manager.get('frequency_detection_period') # Save original period
drm_manager.activate()
sleep(2.0*freq_period/1000)
freq_drm = drm_manager.get('drm_frequency')
drm_manager.deactivate()
assert freq_drm == 125, 'Unexpected frequency gap threshold'
print("Test parameter 'drm_frequency': PASS")
# Test parameter: product_info
from pprint import pformat
product_id = pformat(drm_manager.get('product_info'))
exp_product_id = pformat(activators.product_id)
assert product_id == exp_product_id, 'Unexpected product ID'
print("Test parameter 'product_info': PASS")
# Test parameter: mailbox_size
mailbox_size = drm_manager.get('mailbox_size')
assert mailbox_size == 14, 'Unexpected Mailbox size'
print("Test parameter 'mailbox_size': PASS")
# Test parameter: token_string, token_validity and token_time_left
drm_manager.activate()
token_time_left = drm_manager.get('token_time_left')
if token_time_left < 15:
drm_manager.deactivate()
sleep(16)
drm_manager.activate()
token_string = drm_manager.get('token_string')
assert len(token_string) > 0
token_validity = drm_manager.get('token_validity')
assert token_validity > 15
token_time_left = drm_manager.get('token_time_left')
sleep(2)
assert 2 <= token_time_left - drm_manager.get('token_time_left') <= 3
assert drm_manager.get('token_validity') == token_validity
assert token_string == drm_manager.get('token_string')
drm_manager.deactivate()
print("Test parameter 'token_string', 'token_validity' and 'token_time_left': PASS")
# Test parameter: list_all
list_param = drm_manager.get('list_all')
assert isinstance(list_param , list)
assert len(list_param) == len(_PARAM_LIST)
assert all(key in _PARAM_LIST for key in list_param)
print("Test parameter 'list_all': PASS")
# Test parameter: dump_all
dump_param = drm_manager.get('dump_all')
assert isinstance(dump_param, dict)
assert len(dump_param) == _PARAM_LIST.index('dump_all')
assert all(key in _PARAM_LIST for key in dump_param.keys())
print("Test parameter 'dump_all': PASS")
# Test parameter: custom_field
from random import randint
val_exp = randint(0,0xFFFFFFFF)
val_init = drm_manager.get('custom_field')
assert val_exp != val_init
drm_manager.set(custom_field=val_exp)
assert drm_manager.get('custom_field') == val_exp
print("Test parameter 'custom_field': PASS")
# Test parameter: mailbox_data
from random import sample
mailbox_size = drm_manager.get('mailbox_size')
wr_msg = sample(range(0xFFFFFFFF), mailbox_size)
drm_manager.set(mailbox_data=wr_msg)
rd_msg = drm_manager.get('mailbox_data')
assert type(rd_msg) == type(wr_msg) == list
assert rd_msg == wr_msg
print("Test parameter 'mailbox_data': PASS")
# Test parameter: ws_retry_period_long
orig_retry_period_long = drm_manager.get('ws_retry_period_long') # Save original value
orig_retry_period_short = drm_manager.get('ws_retry_period_short')
with pytest.raises(accelize_drm.exceptions.DRMBadArg) as excinfo:
drm_manager.set(ws_retry_period_long=orig_retry_period_short)
assert search(r'ws_retry_period_long .+ must be greater than ws_retry_period_short .+',
str(excinfo.value)) is not None
err_code = async_handler.get_error_code(str(excinfo.value))
assert err_code == accelize_drm.exceptions.DRMBadArg.error_code
exp_value = orig_retry_period_long + 1
drm_manager.set(ws_retry_period_long=exp_value)
assert drm_manager.get('ws_retry_period_long') == exp_value
drm_manager.set(ws_retry_period_long=orig_retry_period_long) # Restore original value
async_cb.assert_NoError(async_cb.assert_NoError)
print("Test parameter 'ws_retry_period_long': PASS")
# Test parameter: ws_retry_period_short
orig_retry_period_short = drm_manager.get('ws_retry_period_short') # Save original value
orig_retry_period_long = drm_manager.get('ws_retry_period_long')
with pytest.raises(accelize_drm.exceptions.DRMBadArg) as excinfo:
drm_manager.set(ws_retry_period_short=orig_retry_period_long)
assert search(r'ws_retry_period_long .+ must be greater than ws_retry_period_short .+',
str(excinfo.value)) is not None
err_code = async_handler.get_error_code(str(excinfo.value))
assert err_code == accelize_drm.exceptions.DRMBadArg.error_code
exp_value = orig_retry_period_short + 1
drm_manager.set(ws_retry_period_short=exp_value)
assert drm_manager.get('ws_retry_period_short') == exp_value
drm_manager.set(ws_retry_period_short=orig_retry_period_short) # Restore original value
async_cb.assert_NoError(async_cb.assert_NoError)
print("Test parameter 'ws_retry_period_short': PASS")
# Test parameter: ws_request_timeout
orig_response_timeout = drm_manager.get('ws_request_timeout') # Save original value
with pytest.raises(accelize_drm.exceptions.DRMBadArg) as excinfo:
drm_manager.set(ws_request_timeout=0)
assert "ws_request_timeout must not be 0" in str(excinfo.value)
err_code = async_handler.get_error_code(str(excinfo.value))
assert err_code == accelize_drm.exceptions.DRMBadArg.error_code
exp_value = orig_response_timeout + 100
drm_manager.set(ws_request_timeout=exp_value)
assert drm_manager.get('ws_request_timeout') == exp_value
drm_manager.set(ws_request_timeout=orig_response_timeout) # Restore original value
async_cb.assert_NoError(async_cb.assert_NoError)
print("Test parameter 'ws_request_timeout': PASS")
# Test parameter: log_message_level
level = drm_manager.get('log_message_level')
exp_level = 5 if level!=5 else 4
drm_manager.set(log_message_level=exp_level)
assert drm_manager.get('log_message_level') == exp_level
async_cb.assert_NoError()
with pytest.raises(accelize_drm.exceptions.DRMBadArg) as excinfo:
drm_manager.set(log_message_level=100)
assert 'log_message_level (100) is out of range [0:6]' in str(excinfo.value)
assert async_handler.get_error_code(str(excinfo.value)) == accelize_drm.exceptions.DRMBadArg.error_code
async_cb.assert_NoError()
print("Test parameter 'log_message_level': PASS")
# Test parameter: trigger_async_callback
drm_manager.activate()
test_message = 'Test message'
drm_manager.set(trigger_async_callback=test_message)
assert async_cb.was_called, 'Asynchronous callback has not been called.'
assert async_cb.message is not None, 'Asynchronous callback did not report any message'
assert test_message in async_cb.message, 'Asynchronous callback has not received the correct message'
assert async_cb.errcode == accelize_drm.exceptions.DRMDebug.error_code, \
'Asynchronous callback has not received the correct error code'
drm_manager.deactivate()
async_cb.reset()
print("Test parameter 'trigger_async_callback': PASS")
# Test parameter: bad_product_id
# => Skipped: Tested in test_configuration_file_bad_product_id
# Test parameter: bad_oauth2_token
# => Skipped: Tested in test_configuration_file_with_bad_authentication
# Test parameter: ParameterKeyCount
assert drm_manager.get('ParameterKeyCount') == len(_PARAM_LIST)
async_cb.assert_NoError()
print("Test parameter 'ParameterKeyCount': PASS")
# Test parameter: log_message
from time import time
from os.path import isfile
async_cb.reset()
conf_json.reset()
logpath = realpath("./drmlib.%d.log" % getpid())
verbosity = 5
conf_json['settings']['log_file_verbosity'] = verbosity
conf_json['settings']['log_file_type'] = 1
conf_json['settings']['log_file_path'] = logpath
conf_json.save()
try:
drm_manager = accelize_drm.DrmManager(
conf_json.path,
cred_json.path,
driver.read_register_callback,
driver.write_register_callback,
async_cb.callback
)
drm_manager.set(log_message_level=verbosity)
msg = 'This line should appear in log file'
drm_manager.set(log_message=msg)
del drm_manager
gc.collect()
assert isfile(logpath)
with open(logpath, 'rt') as f:
log_content = f.read()
assert "critical" in log_content
assert msg in log_content
finally:
if isfile(logpath):
remove(logpath)
async_cb.assert_NoError()
print("Test parameter 'log_message': PASS")
def test_configuration_file_with_bad_authentication(accelize_drm, conf_json, cred_json,
async_handler):
"""Test errors when bad authentication parameters are provided to
DRM Manager Constructor or Web Service."""
driver = accelize_drm.pytest_fpga_driver[0]
async_cb = async_handler.create()
drm_manager = None
print()
try:
# Test when authentication url in configuration file is wrong
async_cb.reset()
cred_json.set_user('accelize_accelerator_test_02')
conf_json.reset()
conf_json['licensing']['url'] = "http://accelize.com"
conf_json['settings']['ws_request_timeout'] = 5
conf_json['settings']['ws_retry_period_short'] = 1
conf_json.save()
assert conf_json['licensing']['url'] == "http://accelize.com"
drm_manager = accelize_drm.DrmManager(
conf_json.path,
cred_json.path,
driver.read_register_callback,
driver.write_register_callback,
async_cb.callback
)
with pytest.raises(accelize_drm.exceptions.DRMWSReqError) as excinfo:
drm_manager.activate()
assert "OAuth2 Web Service error 404" in str(excinfo.value)
assert async_handler.get_error_code(str(excinfo.value)) == accelize_drm.exceptions.DRMWSReqError.error_code
async_cb.assert_NoError()
print('Test when authentication url in configuration file is wrong: PASS')
# Test when client_id is wrong
async_cb.reset()
conf_json.reset()
cred_json.set_user('accelize_accelerator_test_02')
orig_client_id = cred_json.client_id
replaced_char = 'A' if orig_client_id[0]!='A' else 'B'
cred_json.client_id = orig_client_id.replace(orig_client_id[0], replaced_char)
assert orig_client_id != cred_json.client_id
cred_json.save()
drm_manager = accelize_drm.DrmManager(
conf_json.path,
cred_json.path,
driver.read_register_callback,
driver.write_register_callback,
async_cb.callback
)
with pytest.raises(accelize_drm.exceptions.DRMWSReqError) as excinfo:
drm_manager.activate()
assert "OAuth2 Web Service error 401" in str(excinfo.value)
assert "invalid_client" in str(excinfo.value)
assert async_handler.get_error_code(str(excinfo.value)) == accelize_drm.exceptions.DRMWSReqError.error_code
async_cb.assert_NoError()
print('Test when client_id is wrong: PASS')
# Test when client_secret is wrong
async_cb.reset()
conf_json.reset()
cred_json.set_user('accelize_accelerator_test_02')
orig_client_secret = cred_json.client_secret
replaced_char = 'A' if orig_client_secret[0]!='A' else 'B'
cred_json.client_secret = orig_client_secret.replace(orig_client_secret[0], replaced_char)
cred_json.save()
assert orig_client_secret != cred_json.client_secret
drm_manager = accelize_drm.DrmManager(
conf_json.path,
cred_json.path,
driver.read_register_callback,
driver.write_register_callback,
async_cb.callback
)
with pytest.raises(accelize_drm.exceptions.DRMWSReqError) as excinfo:
drm_manager.activate()
assert "OAuth2 Web Service error 401" in str(excinfo.value)
assert "invalid_client" in str(excinfo.value)
assert async_handler.get_error_code(str(excinfo.value)) == accelize_drm.exceptions.DRMWSReqError.error_code
async_cb.assert_NoError()
print('Test when client_secret is wrong: PASS')
# Test when token is wrong
async_cb.reset()
conf_json.reset()
drm_manager = accelize_drm.DrmManager(
conf_json.path,
cred_json.path,
driver.read_register_callback,
driver.write_register_callback,
async_cb.callback
)
drm_manager.set(bad_oauth2_token=1)
with pytest.raises(accelize_drm.exceptions.DRMWSReqError) as excinfo:
drm_manager.activate()
assert "Authentication credentials" in str(excinfo.value)
assert async_handler.get_error_code(str(excinfo.value)) == accelize_drm.exceptions.DRMWSReqError.error_code
async_cb.assert_NoError()
print('Test when token is wrong: PASS')
# Test token validity after deactivate
async_cb.reset()
conf_json.reset()
cred_json.set_user('accelize_accelerator_test_02')
drm_manager = accelize_drm.DrmManager(
conf_json.path,
cred_json.path,
driver.read_register_callback,
driver.write_register_callback,
async_cb.callback
)
drm_manager.activate()
token_time_left = drm_manager.get('token_time_left')
if token_time_left < 15:
drm_manager.deactivate()
# Wait expiration of current oauth2 token before starting test
sleep(16)
drm_manager.activate()
token_validity = drm_manager.get('token_validity')
assert token_validity > 15
exp_token_string = drm_manager.get('token_string')
drm_manager.deactivate()
token_string = drm_manager.get('token_string')
assert token_string == exp_token_string
drm_manager.activate()
token_string = drm_manager.get('token_string')
assert token_string == exp_token_string
drm_manager.deactivate()
token_string = drm_manager.get('token_string')
assert token_string == exp_token_string
async_cb.assert_NoError()
print('Test token validity after deactivate: PASS')
# # Test when token has expired
# async_cb.reset()
# conf_json.reset()
# drm_manager = accelize_drm.DrmManager(
# conf_json.path,
# cred_json.path,
# driver.read_register_callback,
# driver.write_register_callback,
# async_cb.callback
# )
# drm_manager.activate()
# start = datetime.now()
# drm_manager.deactivate()
# exp_token_string = drm_manager.get('token_string')
# token_validity = drm_manager.get('token_validity')
# token_expired_in = drm_manager.get('token_expired_in')
# exp_token_validity = 10
# drm_manager.set(token_validity=exp_token_validity)
# token_validity = drm_manager.get('token_validity')
# assert token_validity == exp_token_validity
# token_expired_in = drm_manager.get('token_expired_in')
# ts = drm_manager.get('token_string')
# assert token_expired_in > token_validity/3
# assert token_expired_in > 3
# # Wait right before the token expires and verifiy it is the same
# wait_period = start + timedelta(seconds=token_expired_in-3) - datetime.now()
# sleep(wait_period.total_seconds())
# drm_manager.activate()
# drm_manager.deactivate()
# token_string = drm_manager.get('token_string')
# assert token_string == exp_token_string
# sleep(4)
# drm_manager.activate()
# drm_manager.deactivate()
# token_string = drm_manager.get('token_string')
# assert token_string != exp_token_string
# async_cb.assert_NoError()
# print('Test when token has expired: PASS')
finally:
if drm_manager:
drm_manager.deactivate()
def test_configuration_file_with_bad_frequency(accelize_drm, conf_json, cred_json, async_handler):
"""Test errors when wrong frequency is given to DRM Controller Constructor"""
from math import ceil, floor
driver = accelize_drm.pytest_fpga_driver[0]
async_cb = async_handler.create()
cred_json.set_user('accelize_accelerator_test_02')
# Before any test, get the real DRM frequency and the gap threshold
async_cb.reset()
drm_manager = accelize_drm.DrmManager(
conf_json.path,
cred_json.path,
driver.read_register_callback,
driver.write_register_callback,
async_cb.callback
)
freq_threshold = drm_manager.get('frequency_detection_threshold')
freq_period = drm_manager.get('frequency_detection_period')
drm_manager.activate()
sleep(2.0*freq_period/1000)
frequency = drm_manager.get('drm_frequency')
drm_manager.deactivate()
# Test no error is returned by asynchronous error callback when the frequency
# in configuration file differs from the DRM frequency by less than threshold
async_cb.reset()
conf_json.reset()
conf_json['drm']['frequency_mhz'] = int(floor(frequency * (100.0 + freq_threshold - 1) / 100.0))
assert abs(conf_json['drm']['frequency_mhz'] - frequency) * 100.0 / frequency < freq_threshold
conf_json.save()
drm_manager = accelize_drm.DrmManager(
conf_json.path,
cred_json.path,
driver.read_register_callback,
driver.write_register_callback,
async_cb.callback
)
drm_manager.activate()
sleep(2.0*freq_period/1000)
drm_manager.deactivate()
async_cb.assert_NoError('freq_period=%d ms, freq_threshold=%d%%, frequency=%d MHz'
% (freq_period, freq_threshold, frequency))
print('Test frequency mismatch < threshold: PASS')
# Test a BADFrequency error is returned by asynchronous error callback when the frequency
# in configuration file differs from the DRM frequency by more than 2%
async_cb.reset()
conf_json.reset()
conf_json['drm']['frequency_mhz'] = int(ceil(frequency * (100.0 + freq_threshold + 1) / 100.0))
assert abs(conf_json['drm']['frequency_mhz'] - frequency) * 100.0 / frequency > freq_threshold
conf_json.save()
drm_manager = accelize_drm.DrmManager(
conf_json.path,
cred_json.path,
driver.read_register_callback,
driver.write_register_callback,
async_cb.callback
)
drm_manager.activate()
sleep(1)
drm_manager.deactivate()
assert async_cb.was_called, 'Asynchronous callback NOT called'
assert async_cb.message is not None, 'Asynchronous callback did not report any message'
assert search(r'DRM frequency .* differs from .* configuration file',
async_cb.message) is not None, 'Unexpected message reported by asynchronous callback'
assert async_cb.errcode == accelize_drm.exceptions.DRMBadFrequency.error_code, \
'Unexpected error code reported by asynchronous callback'
print('Test frequency mismatch > threshold: PASS')
# Test web service detects a frequency underflow
async_cb.reset()
conf_json.reset()
conf_json['drm']['frequency_mhz'] = 40
conf_json.save()
assert conf_json['drm']['frequency_mhz'] == 40
drm_manager = accelize_drm.DrmManager(
conf_json.path,
cred_json.path,
driver.read_register_callback,
driver.write_register_callback
)
with pytest.raises(accelize_drm.exceptions.DRMWSReqError) as excinfo:
drm_manager.activate()
assert 'License Web Service error 400' in str(excinfo.value)
assert 'Ensure this value is greater than or equal to 50' in str(excinfo.value)
err_code = async_handler.get_error_code(str(excinfo.value))
assert err_code == accelize_drm.exceptions.DRMWSReqError.error_code
print('Test frequency underflow: PASS')
# Test web service detects a frequency overflow
async_cb.reset()
conf_json.reset()
conf_json['drm']['frequency_mhz'] = 400
conf_json.save()
assert conf_json['drm']['frequency_mhz'] == 400
drm_manager = accelize_drm.DrmManager(
conf_json.path,
cred_json.path,
driver.read_register_callback,
driver.write_register_callback
)
with pytest.raises(accelize_drm.exceptions.DRMWSReqError) as excinfo:
drm_manager.activate()
assert 'License Web Service error 400' in str(excinfo.value)
assert 'Ensure this value is less than or equal to 320' in str(excinfo.value)
err_code = async_handler.get_error_code(str(excinfo.value))
assert err_code == accelize_drm.exceptions.DRMWSReqError.error_code
print('Test frequency overflow: PASS')
def test_mailbox_write_overflow(accelize_drm, conf_json, cred_json, async_handler):
from random import sample
driver = accelize_drm.pytest_fpga_driver[0]
async_cb = async_handler.create()
# Test with a null crendential file
async_cb.reset()
cred_json.reset()
conf_json.reset()
drm_manager = accelize_drm.DrmManager(
conf_json.path,
cred_json.path,
driver.read_register_callback,
driver.write_register_callback,
async_cb.callback
)
mb_size = drm_manager.get('mailbox_size')
assert mb_size > 0
mb_data = sample(range(0xFFFFFFFF), mb_size + 1)
with pytest.raises(accelize_drm.exceptions.DRMBadArg) as excinfo:
drm_manager.set(mailbox_data=mb_data)
assert 'Trying to write out of Mailbox memory space' in str(excinfo.value)
err_code = async_handler.get_error_code(str(excinfo.value))
assert err_code == accelize_drm.exceptions.DRMBadArg.error_code
async_cb.assert_NoError()
def test_mailbox_type_error(accelize_drm, conf_json, cred_json, async_handler):
from random import sample
driver = accelize_drm.pytest_fpga_driver[0]
async_cb = async_handler.create()
# Test with a null crendential file
async_cb.reset()
cred_json.reset()
conf_json.reset()
drm_manager = accelize_drm.DrmManager(
conf_json.path,
cred_json.path,
driver.read_register_callback,
driver.write_register_callback,
async_cb.callback
)
with pytest.raises(accelize_drm.exceptions.DRMBadArg) as excinfo:
drm_manager.set(mailbox_data='this is bad type')
assert 'Value must be an array of integers' in str(excinfo.value)
err_code = async_handler.get_error_code(str(excinfo.value))
assert err_code == accelize_drm.exceptions.DRMBadArg.error_code
async_cb.assert_NoError()
def test_configuration_file_bad_product_id(accelize_drm, conf_json, cred_json, async_handler):
"""Test errors when an incorrect product ID is requested to License Web Server"""
driver = accelize_drm.pytest_fpga_driver[0]
fpga_image_bkp = driver.fpga_image
async_cb = async_handler.create()
cred_json.set_user('accelize_accelerator_test_02')
# Test Web Service when an unexisting product ID is provided
async_cb.reset()
drm_manager = accelize_drm.DrmManager(
conf_json.path,
cred_json.path,
driver.read_register_callback,
driver.write_register_callback,
async_cb.callback
)
drm_manager.set(bad_product_id=1)
product_id = drm_manager.get('product_info')
pid_string = '{vendor}/{library}/{name}'.format(**product_id)
assert pid_string == 'accelize.com/refdesign/BAD_NAME_JUST_FOR_TEST'
with pytest.raises(accelize_drm.exceptions.DRMWSReqError) as excinfo:
drm_manager.activate()
assert 'License Web Service error 400' in str(excinfo.value)
assert 'DRM WS request failed' in str(excinfo.value)
assert search(r'\\"Unknown Product ID\\" \s*%s for' % pid_string, str(excinfo.value)) is not None
assert search(r'Product ID \s*%s from license request is unknown' % pid_string, str(excinfo.value)) is not None
assert async_handler.get_error_code(str(excinfo.value)) == accelize_drm.exceptions.DRMWSReqError.error_code
async_cb.assert_NoError()
print('Test Web Service when an unexisting product ID is provided: PASS')
try:
# Test Web Service when an empty product ID is provided
driver.program_fpga('agfi-09afa8cd9d0e9d725')
async_cb.reset()
drm_manager = accelize_drm.DrmManager(
conf_json.path,
cred_json.path,
driver.read_register_callback,
driver.write_register_callback,
async_cb.callback
)
assert drm_manager.get('product_info') is None
with pytest.raises(accelize_drm.exceptions.DRMWSReqError) as excinfo:
drm_manager.activate()
assert 'License Web Service error 400' in str(excinfo.value)
assert 'DRM WS request failed' in str(excinfo.value)
assert search(r'\\"Unknown Product ID\\" for ', str(excinfo.value)) is not None
assert 'Product ID from license request is not set' in str(excinfo.value)
assert async_handler.get_error_code(str(excinfo.value)) == accelize_drm.exceptions.DRMWSReqError.error_code
async_cb.assert_NoError()
print('Test Web Service when an empty product ID is provided: PASS')
# Test Web Service when a misformatted product ID is provided
driver.program_fpga('agfi-07bec847264a84aa6')
async_cb.reset()
with pytest.raises(accelize_drm.exceptions.DRMBadFormat) as excinfo:
drm_manager = accelize_drm.DrmManager(
conf_json.path,
cred_json.path,
driver.read_register_callback,
driver.write_register_callback,
async_cb.callback
)
assert 'Failed to parse Read-Only Mailbox in DRM Controller:' in str(excinfo.value)
assert 'Cannot parse JSON string because ' in str(excinfo.value)
assert async_handler.get_error_code(str(excinfo.value)) == accelize_drm.exceptions.DRMBadFormat.error_code
async_cb.assert_NoError()
print('Test Web Service when a misformatted product ID is provided: PASS')
finally:
# Reprogram FPGA with original image
driver.program_fpga(fpga_image_bkp)
@pytest.mark.skip(reason='Not currently specified')
def test_2_drm_manager_concurrently(accelize_drm, conf_json, cred_json, async_handler):
"""Test errors when 2 DrmManager instances are used."""
driver = accelize_drm.pytest_fpga_driver[0]
async_cb1 = async_handler.create()
async_cb2 = async_handler.create()
cred_json.set_user('accelize_accelerator_test_02')
drm_manager1 = accelize_drm.DrmManager(
conf_json.path,
cred_json.path,
driver.read_register_callback,
driver.write_register_callback,
async_cb1.callback
)
with pytest.raises(accelize_drm.exceptions.DRMBadUsage) as excinfo:
drm_manager2 = accelize_drm.DrmManager(
conf_json.path,
cred_json.path,
driver.read_register_callback,
driver.write_register_callback,
async_cb2.callback
)
assert 'Another instance of the DRM Manager is currently owning the HW' in str(excinfo.value)
@pytest.mark.long_run
def test_activation_and_license_status(accelize_drm, conf_json, cred_json, async_handler):
"""Test status of IP activators"""
driver = accelize_drm.pytest_fpga_driver[0]
async_cb = async_handler.create()
activators = accelize_drm.pytest_fpga_activators[0]
cred_json.set_user('accelize_accelerator_test_02')
drm_manager = accelize_drm.DrmManager(
conf_json.path,
cred_json.path,
driver.read_register_callback,
driver.write_register_callback,
async_cb.callback
)
try:
print()
# Test license status on start/stop
# Check all activators are locked
assert not drm_manager.get('license_status'), 'License is not inactive'
activators.autotest(is_activated=False)
# Activate all activators
drm_manager.activate()
# Check all activators are unlocked
assert drm_manager.get('license_status'), 'License is not active'
activators.autotest(is_activated=True)
# Deactivate all activators
drm_manager.deactivate()
# Check all activators are locked again
assert not drm_manager.get('license_status'), 'License is not inactive'
activators.autotest(is_activated=False)
async_cb.assert_NoError()
print('Test license status on start/stop: PASS')
# Test license status on start/pause
# Check all activators are locked
assert not drm_manager.get('license_status'), 'License is not inactive'
activators.autotest(is_activated=False)
# Activate all activators
drm_manager.activate()
start = datetime.now()
# Check all activators are unlocked
assert drm_manager.get('license_status'), 'License is not active'
activators.autotest(is_activated=True)
# Pause all activators
drm_manager.deactivate(True)
# Check all activators are still unlocked
assert drm_manager.get('license_status'), 'License is not active'
activators.autotest(is_activated=True)
async_cb.assert_NoError()
print('Test license status on start/pause: PASS')
# Test license status on resume from valid license/pause
# Check all activators are unlocked
assert drm_manager.get('license_status'), 'License is not active'
activators.autotest(is_activated=True)
# Resume all activators
drm_manager.activate(True)
# Check all activators are still unlocked
assert drm_manager.get('license_status'), 'License is not active'
activators.autotest(is_activated=True)
# Pause all activators
drm_manager.deactivate(True)
# Check all activators are still unlocked
activators.autotest(is_activated=True)
# Wait until license expires
lic_duration = drm_manager.get('license_duration')
wait_period = start + timedelta(seconds=2 * lic_duration + 1) - datetime.now()
sleep(wait_period.total_seconds())
# Check all activators are now locked again
assert not drm_manager.get('license_status'), 'License is not inactive'
activators.autotest(is_activated=False)
async_cb.assert_NoError()
print('Test license status on resume from valid license/pause: PASS')
# Test license status on resume from expired license/pause
# Check all activators are locked
assert not drm_manager.get('license_status'), 'License is not inactive'
activators.autotest(is_activated=False)
# Resume all activators
drm_manager.activate(True)
# Check all activators are unlocked
assert drm_manager.get('license_status'), 'License is not active'
activators.autotest(is_activated=True)
# Pause all activators
drm_manager.deactivate(True)
# Check all activators are still unlocked
assert drm_manager.get('license_status'), 'License is not active'
activators.autotest(is_activated=True)
async_cb.assert_NoError()
print('Test license status on resume from expired license/pause: PASS')
# Test license status on resume/stop
# Check all activators are still unlocked
assert drm_manager.get('license_status'), 'License is not active'
activators.autotest(is_activated=True)
async_cb.assert_NoError()
# Resume all activators
drm_manager.activate(True)
# Check all activators are still unlocked
assert drm_manager.get('license_status'), 'License is not active'
activators.autotest(is_activated=True)
# Deactivate all activators
drm_manager.deactivate()
# Check all activators are locked again
assert not drm_manager.get('license_status'), 'License is not inactive'
activators.autotest(is_activated=False)
async_cb.assert_NoError()
print('Test license status on resume/stop: PASS')
# Test license status on restart from paused session/stop
# Check all activators are locked again
assert not drm_manager.get('license_status'), 'License is not inactive'
activators.autotest(is_activated=False)
async_cb.assert_NoError()
# Activate all activators
drm_manager.activate()
# Check all activators are unlocked
assert drm_manager.get('license_status'), 'License is not active'
activators.autotest(is_activated=True)
# Pause activators
drm_manager.deactivate(True)
# Check all activators are still unlocked
assert drm_manager.get('license_status'), 'License is not active'
activators.autotest(is_activated=True)
# Restart all activators
drm_manager.activate()
# Check all activators are still unlocked
assert drm_manager.get('license_status'), 'License is not active'
activators.autotest(is_activated=True)
async_cb.assert_NoError()
print('Test license status on restart: PASS')
finally:
if drm_manager:
drm_manager.deactivate()
@pytest.mark.long_run
def test_session_status(accelize_drm, conf_json, cred_json, async_handler):
"""Test status of session"""
driver = accelize_drm.pytest_fpga_driver[0]
async_cb = async_handler.create()
cred_json.set_user('accelize_accelerator_test_02')
drm_manager = accelize_drm.DrmManager(
conf_json.path,
cred_json.path,
driver.read_register_callback,
driver.write_register_callback,
async_cb.callback
)
try:
print()
# Test session status on start/stop
# Check no session is running and no ID is available
status = drm_manager.get('session_status')
session_id = drm_manager.get('session_id')
assert not status, 'A session is running'
assert len(session_id) == 0, 'A session ID exists'
# Activate new session
drm_manager.activate()
# Check a session is running with a valid ID
status = drm_manager.get('session_status')
session_id = drm_manager.get('session_id')
assert status, 'No session is running'
assert len(session_id) == 16, 'No session ID is returned'
# Deactivate current session
drm_manager.deactivate()
# Check session is closed
status = drm_manager.get('session_status')
session_id = drm_manager.get('session_id')
assert not status, 'A session is running'
assert len(session_id) == 0, 'A session ID exists'
print('Test session status on start/stop: PASS')
# Test session status on start/pause
# Check no session is running and no ID is available
status = drm_manager.get('session_status')
session_id = drm_manager.get('session_id')
assert not status, 'A session is running'
assert len(session_id) == 0, 'A session ID exists'
# Activate new session
drm_manager.activate()
start = datetime.now()
# Check a session is running with a valid ID
status = drm_manager.get('session_status')
id_ref = drm_manager.get('session_id')
assert status, 'No session is running'
assert len(id_ref) == 16, 'No session ID is returned'
# Pause current session
drm_manager.deactivate(True)
# Check a session is still alive with the same ID
status = drm_manager.get('session_status')
session_id = drm_manager.get('session_id')
assert status, 'No session is running'
assert len(session_id) == 16, 'No session ID is returned'
assert session_id == id_ref, 'Return different session ID'
async_cb.assert_NoError()
print('Test session status on start/pause: PASS')
# Test session status on resume from valid license/pause
# Check a session is still alive with the same ID
status = drm_manager.get('session_status')
session_id = drm_manager.get('session_id')
assert status, 'No session is running'
assert len(session_id) == 16, 'No session ID is returned'
assert session_id == id_ref, 'Return different session ID'
# Resume current session
drm_manager.activate(True)
# Check a session is still alive with the same ID
status = drm_manager.get('session_status')
session_id = drm_manager.get('session_id')
assert status, 'No session is running'
assert len(session_id) == 16, 'No session ID is returned'
assert session_id == id_ref, 'Return different session ID'
# Pause current session
drm_manager.deactivate(True)
# Check a session is still alive with the same ID
status = drm_manager.get('session_status')
session_id = drm_manager.get('session_id')
assert status, 'No session is running'
assert len(session_id) == 16, 'No session ID is returned'
assert session_id == id_ref, 'Return different session ID'
# Wait until license expires
lic_duration = drm_manager.get('license_duration')
wait_period = start + timedelta(seconds=2 * lic_duration + 1) - datetime.now()
sleep(wait_period.total_seconds())
# Check a session is still alive with the same ID
status = drm_manager.get('session_status')
session_id = drm_manager.get('session_id')
assert status, 'No session is running'
assert len(session_id) == 16, 'No session ID is returned'
assert session_id == id_ref, 'Return different session ID'
async_cb.assert_NoError()
print('Test session status on resume from valid license/pause: PASS')
# Test session status on resume from expired license/pause
# Check a session is still alive with the same ID
status = drm_manager.get('session_status')
session_id = drm_manager.get('session_id')
assert status, 'No session is running'
assert len(session_id) == 16, 'No session ID is returned'
assert session_id == id_ref, 'Return different session ID'
# Resume current session
drm_manager.activate(True)
# Check a session is still alive with the same ID
status = drm_manager.get('session_status')
session_id = drm_manager.get('session_id')
assert status, 'No session is running'
assert len(session_id) == 16, 'No session ID is returned'
assert session_id == id_ref, 'Return different session ID'
# Pause current session
drm_manager.deactivate(True)
# Check a session is still alive with the same ID
status = drm_manager.get('session_status')
session_id = drm_manager.get('session_id')
assert status, 'No session is running'
assert len(session_id) == 16, 'No session ID is returned'
assert session_id == id_ref, 'Return different session ID'
async_cb.assert_NoError()
print('Test session status on resume from expired license/pause: PASS')
# Test session status on resume/stop
# Check a session is still alive with the same ID
status = drm_manager.get('session_status')
session_id = drm_manager.get('session_id')
assert status, 'No session is running'
assert len(session_id) == 16, 'No session ID is returned'
assert session_id == id_ref, 'Return different session ID'
# Resume current session
drm_manager.activate(True)
# Check a session is still alive with the same ID
status = drm_manager.get('session_status')
session_id = drm_manager.get('session_id')
assert status, 'No session is running'
assert len(session_id) == 16, 'No session ID is returned'
assert session_id == id_ref, 'Return different session ID'
# Close session
drm_manager.deactivate()
# Check session is closed
status = drm_manager.get('session_status')
session_id = drm_manager.get('session_id')
assert not status, 'A session is running'
assert len(session_id) == 0, 'A session ID exists'
async_cb.assert_NoError()
print('Test session status on resume/stop: PASS')
# Test session status on start from paused session/stop
# Check no session is running
status = drm_manager.get('session_status')
session_id = drm_manager.get('session_id')
assert not status, 'A session is running'
assert len(session_id) == 0, 'A session ID exists'
# Start a new session
drm_manager.activate()
# Check a session is alive with a new ID
status = drm_manager.get('session_status')
session_id = drm_manager.get('session_id')
assert status, 'No session is running'
assert len(session_id) == 16, 'No session ID is returned'
assert session_id != id_ref, 'Return different session ID'
id_ref = session_id
# Pause session
drm_manager.deactivate(True)
# Check a session is still alive with the same ID
status = drm_manager.get('session_status')
session_id = drm_manager.get('session_id')
assert status, 'No session is running'
assert len(session_id) == 16, 'No session ID is returned'
assert session_id == id_ref, 'Return different session ID'
# Start a new session
drm_manager.activate()
# Check a new session has been created with a new ID
status = drm_manager.get('session_status')
session_id = drm_manager.get('session_id')
assert status, 'No session is running'
assert len(session_id) == 16, 'No session ID is returned'
assert session_id != id_ref, 'Return different session ID'
# Close session
drm_manager.deactivate()
# Check session is closed
status = drm_manager.get('session_status')
session_id = drm_manager.get('session_id')
assert not status, 'A session is running'
assert len(session_id) == 0, 'A session ID exists'
async_cb.assert_NoError()
print('Test session status on restart: PASS')
finally:
if drm_manager:
drm_manager.deactivate()
@pytest.mark.long_run
def test_license_expiration(accelize_drm, conf_json, cred_json, async_handler):
"""Test license expiration"""
driver = accelize_drm.pytest_fpga_driver[0]
async_cb = async_handler.create()
activators = accelize_drm.pytest_fpga_activators[0]
cred_json.set_user('accelize_accelerator_test_02')
drm_manager = accelize_drm.DrmManager(
conf_json.path,
cred_json.path,
driver.read_register_callback,
driver.write_register_callback,
async_cb.callback
)
try:
print()
# Test license expires after 2 duration periods when start/pause
# Check no license is running
assert not drm_manager.get('license_status')
activators.autotest(is_activated=False)
# Start
drm_manager.activate()
start = datetime.now()
lic_duration = drm_manager.get('license_duration')
# Pause
sleep(lic_duration/2)
drm_manager.deactivate(True)
# Check license is still running and activator are all unlocked
assert drm_manager.get('license_status')
activators.autotest(is_activated=True)
# Wait right before expiration
wait_period = start + timedelta(seconds=2*lic_duration-2) - datetime.now()
sleep(wait_period.total_seconds())
# Check license is still running and activators are all unlocked
assert drm_manager.get('license_status')
activators.autotest(is_activated=True)
# Wait a bit more time the expiration
sleep(3)
# Check no license is running
assert not drm_manager.get('license_status')
activators.autotest(is_activated=False)
drm_manager.deactivate()
# Check no license is running
assert not drm_manager.get('license_status')
activators.autotest(is_activated=False)
async_cb.assert_NoError()
print('Test license expires after 2 duration periods when start/pause/stop: PASS')
# Test license does not expire after 3 duration periods when start
# Check no license is running
assert not drm_manager.get('license_status')
activators.autotest(is_activated=False)
# Start
drm_manager.activate()
start = datetime.now()
# Check license is running
assert drm_manager.get('license_status')
activators.autotest(is_activated=True)
# Wait 3 duration periods
lic_duration = drm_manager.get('license_duration')
wait_period = start + timedelta(seconds=3*lic_duration+2) - datetime.now()
sleep(wait_period.total_seconds())
# Check license is still running
assert drm_manager.get('license_status')
activators.autotest(is_activated=True)
# Stop
drm_manager.deactivate()
# Check no license is running
assert not drm_manager.get('license_status')
activators.autotest(is_activated=False)
async_cb.assert_NoError()
print('Test license does not expire after 3 duration periods when start: PASS')
# Test license does not expire after 3 duration periods when start/pause
# Check no license is running
assert not drm_manager.get('license_status')
activators.autotest(is_activated=False)
# Start
drm_manager.activate()
start = datetime.now()
lic_duration = drm_manager.get('license_duration')
# Check license is running
assert drm_manager.get('license_status')
activators.autotest(is_activated=True)
# Wait 1 full duration period
wait_period = start + timedelta(seconds=lic_duration+lic_duration/2) - datetime.now()
sleep(wait_period.total_seconds())
# Check license is still running
assert drm_manager.get('license_status')
activators.autotest(is_activated=True)
# Pause
drm_manager.deactivate(True)
# Wait right before the next 2 duration periods expire
wait_period = start + timedelta(seconds=3*lic_duration-2) - datetime.now()
sleep(wait_period.total_seconds())
# Check license is still running
assert drm_manager.get('license_status')
activators.autotest(is_activated=True)
# Wait a bit more time the expiration
sleep(3)
# Check license has expired
assert not drm_manager.get('license_status')
activators.autotest(is_activated=False)
drm_manager.deactivate()
# Check no license is running
assert not drm_manager.get('license_status')
activators.autotest(is_activated=False)
async_cb.assert_NoError()
print('Test license does not expire after 3 duration periods when start/pause: PASS')
finally:
if drm_manager:
drm_manager.deactivate()
def test_multiple_call(accelize_drm, conf_json, cred_json, async_handler):
"""Test multiple calls to activate and deactivate"""
driver = accelize_drm.pytest_fpga_driver[0]
async_cb = async_handler.create()
cred_json.set_user('accelize_accelerator_test_02')
drm_manager = accelize_drm.DrmManager(
conf_json.path,
cred_json.path,
driver.read_register_callback,
driver.write_register_callback,
async_cb.callback
)
try:
print()
# Test multiple activate
# Check license is inactive
assert not drm_manager.get('license_status')
# Start
drm_manager.activate()
# Check license is active
assert drm_manager.get('license_status')
# Check a session is valid
session_id = drm_manager.get('session_id')
assert len(session_id) == 16
# Resume
drm_manager.activate(True)
# Check license is active
assert drm_manager.get('license_status')
# Check a session is valid
session_id2 = drm_manager.get('session_id')
assert len(session_id2) == 16
assert session_id2 == session_id
# Start again
drm_manager.activate()
# Check license is active
assert drm_manager.get('license_status')
# Check a session is valid
session_id = drm_manager.get('session_id')
assert len(session_id) == 16
assert session_id != session_id2
# Start again
drm_manager.activate()
# Check license is active
assert drm_manager.get('license_status')
# Check a session is valid
session_id2 = drm_manager.get('session_id')
assert len(session_id2) == 16
assert session_id2 != session_id
async_cb.assert_NoError()
# Test multiple deactivate
# Check license is active
assert drm_manager.get('license_status')
# Pause
drm_manager.deactivate(True)
# Check license is active
assert drm_manager.get('license_status')
# Check a session is valid
session_id = drm_manager.get('session_id')
assert len(session_id) == 16
assert session_id == session_id2
# Resume
drm_manager.deactivate(True)
# Check license is active
assert drm_manager.get('license_status')
# Check a session is valid
session_id = drm_manager.get('session_id')
assert len(session_id) == 16
assert session_id == session_id2
# Stop
drm_manager.deactivate()
# Check license is in active
assert not drm_manager.get('license_status')
# Check session ID is invalid
session_id = drm_manager.get('session_id')
assert len(session_id) == 0
# Stop
drm_manager.deactivate()
# Check license is in active
assert not drm_manager.get('license_status')
# Check session ID is invalid
session_id = drm_manager.get('session_id')
assert len(session_id) == 0
async_cb.assert_NoError()
finally:
if drm_manager:
drm_manager.deactivate()
@pytest.mark.on_2_fpga
def test_retry_function(accelize_drm, conf_json, cred_json, async_handler):
"""
Test retry mechanism on API function (not including the retry in background thread)
The retry is tested with one FPGA actiavted with a floating license and a 2nd FGPA
that's requesting the same floating license but with a limit to 1 node.
"""
driver0 = accelize_drm.pytest_fpga_driver[0]
driver1 = accelize_drm.pytest_fpga_driver[1]
async_cb0 = async_handler.create()
async_cb1 = async_handler.create()
cred_json.set_user('accelize_accelerator_test_04')
# Test no retry
conf_json.reset()
retry_period = 0
conf_json['settings']['ws_retry_period_short'] = retry_period
conf_json.save()
async_cb0.reset()
drm_manager0 = accelize_drm.DrmManager(
conf_json.path,
cred_json.path,
driver0.read_register_callback,
driver0.write_register_callback,
async_cb0.callback
)
async_cb1.reset()
drm_manager1 = accelize_drm.DrmManager(
conf_json.path,
cred_json.path,
driver1.read_register_callback,
driver1.write_register_callback,
async_cb1.callback
)
assert not drm_manager0.get('license_status')
assert not drm_manager1.get('license_status')
try:
drm_manager0.activate()
assert drm_manager0.get('license_status')
start = datetime.now()
with pytest.raises(accelize_drm.exceptions.DRMWSMayRetry) as excinfo:
drm_manager1.activate()
end = datetime.now()
assert (end - start).total_seconds() < 1
assert 'License Web Service error 470' in str(excinfo.value)
assert 'DRM WS request failed' in str(excinfo.value)
assert search(r'\\"Entitlement Limit Reached\\" with .+ for accelize_accelerator_test_04@accelize.com', str(excinfo.value)) is not None
assert 'You have reached the maximum quantity of 1 seat(s) for floating entitlement' in str(excinfo.value)
assert async_handler.get_error_code(str(excinfo.value)) == accelize_drm.exceptions.DRMWSMayRetry.error_code
finally:
drm_manager0.deactivate()
assert not drm_manager0.get('license_status')
assert not drm_manager1.get('license_status')
async_cb0.assert_NoError()
async_cb1.assert_NoError()
print('Test no retry: PASS')
# Test 10s retry
conf_json.reset()
timeout = 10
retry = 1
conf_json['settings']['ws_request_timeout'] = timeout
conf_json['settings']['ws_retry_period_short'] = retry
conf_json.save()
async_cb0.reset()
drm_manager0 = accelize_drm.DrmManager(
conf_json.path,
cred_json.path,
driver0.read_register_callback,
driver0.write_register_callback,
async_cb0.callback
)
async_cb1.reset()
drm_manager1 = accelize_drm.DrmManager(
conf_json.path,
cred_json.path,
driver1.read_register_callback,
driver1.write_register_callback,
async_cb1.callback
)
assert not drm_manager0.get('license_status')
assert not drm_manager1.get('license_status')
try:
drm_manager0.activate()
assert drm_manager0.get('license_status')
start = datetime.now()
with pytest.raises(accelize_drm.exceptions.DRMWSError) as excinfo:
drm_manager1.activate()
end = datetime.now()
m = search(r'Timeout on License request after (\d+) attempts', str(excinfo.value))
assert m is not None
assert int(m.group(1)) > 1
assert async_handler.get_error_code(str(excinfo.value)) == accelize_drm.exceptions.DRMWSError.error_code
assert (end - start).total_seconds() >= timeout
assert (end - start).total_seconds() <= timeout + 1
finally:
drm_manager0.deactivate()
assert not drm_manager0.get('license_status')
assert not drm_manager1.get('license_status')
async_cb0.assert_NoError()
async_cb1.assert_NoError()
print('Test 10s retry: PASS')
def test_security_stop(accelize_drm, conf_json, cred_json, async_handler):
"""
Test the session is stopped in case of abnormal termination
"""
driver = accelize_drm.pytest_fpga_driver[0]
async_cb = async_handler.create()
cred_json.set_user('accelize_accelerator_test_02')
drm_manager0 = accelize_drm.DrmManager(
conf_json.path,
cred_json.path,
driver.read_register_callback,
driver.write_register_callback,
async_cb.callback
)
drm_manager0.activate()
assert drm_manager0.get('session_status')
session_id = drm_manager0.get('session_id')
assert len(session_id) > 0
del drm_manager0
drm_manager1 = accelize_drm.DrmManager(
conf_json.path,
cred_json.path,
driver.read_register_callback,
driver.write_register_callback,
async_cb.callback
)
assert not drm_manager1.get('session_status')
session_id = drm_manager1.get('session_id')
assert len(session_id) == 0
async_cb.assert_NoError()
def test_readonly_and_writeonly_parameters(accelize_drm, conf_json, cred_json, async_handler):
"""
Test readonly parameter cannot be written and writeonly parameter cannot be read
"""
driver = accelize_drm.pytest_fpga_driver[0]
async_cb = async_handler.create()
drm_manager = accelize_drm.DrmManager(
conf_json.path,
cred_json.path,
driver.read_register_callback,
driver.write_register_callback,
async_cb.callback
)
# Test write-only parameter cannot be read
with pytest.raises(accelize_drm.exceptions.DRMBadArg) as excinfo:
drm_manager.get('trigger_async_callback')
assert "Parameter 'trigger_async_callback' cannot be read" in str(excinfo.value)
assert async_handler.get_error_code(str(excinfo.value)) == accelize_drm.exceptions.DRMBadArg.error_code
async_cb.assert_NoError()
# Test read-only parameter cannot be written
with pytest.raises(accelize_drm.exceptions.DRMBadArg) as excinfo:
drm_manager.set(license_duration=10)
assert "Parameter 'license_duration' cannot be overwritten" in str(excinfo.value)
assert async_handler.get_error_code(str(excinfo.value)) == accelize_drm.exceptions.DRMBadArg.error_code
async_cb.assert_NoError()
@pytest.mark.endurance
def test_authentication_expiration(accelize_drm, conf_json, cred_json, async_handler):
from random import sample
driver = accelize_drm.pytest_fpga_driver[0]
activator = accelize_drm.pytest_fpga_activators[0][0]
async_cb = async_handler.create()
cred_json.set_user('accelize_accelerator_test_02')
drm_manager = accelize_drm.DrmManager(
conf_json.path,
cred_json.path,
driver.read_register_callback,
driver.write_register_callback,
async_cb.callback
)
activator.generate_coin(1000)
assert not drm_manager.get('license_status')
activator.autotest(is_activated=False)
drm_manager.activate()
lic_duration = drm_manager.get('license_duration')
assert drm_manager.get('license_status')
activator.autotest(is_activated=True)
activators[0].check_coin(drm_manager.get('metered_data'))
start = datetime.now()
expiration_period = 12000
while True:
seconds_left = (expiration_period + 2*lic_duration) - (datetime.now() - start).total_seconds()
if seconds_left < 0:
break
assert drm_manager.get('license_status')
assert activator.generate_coin(1)
activators[0].check_coin(drm_manager.get('metered_data'))
print('Remaining time: ', seconds_left, ' s / current coins: ', activators[0].metering_data)
sleep(5)
drm_manager.deactivate()
assert not drm_manager.get('license_status')
activator.autotest(is_activated=False)
def test_directory_creation(accelize_drm, conf_json, cred_json, async_handler):
from shutil import rmtree
from subprocess import check_call
from os import makedirs, access, R_OK, W_OK
from os.path import isdir, expanduser
driver = accelize_drm.pytest_fpga_driver[0]
async_cb = async_handler.create()
log_type = 1
log_dir = realpath(expanduser('~/tmp_log_dir'))
if not isdir(log_dir):
makedirs(log_dir)
try:
# Test error when creating directory
# Create immutable folder
check_call('sudo chattr +i %s' % log_dir, shell=True)
try:
assert not access(log_dir, W_OK)
log_path = join(log_dir, "tmp", "drmservice-%d.log" % getpid())
assert not isdir(dirname(log_path))
async_cb.reset()
conf_json.reset()
conf_json['settings']['log_file_path'] = log_path
conf_json['settings']['log_file_type'] = log_type
conf_json.save()
with pytest.raises(accelize_drm.exceptions.DRMExternFail) as excinfo:
drm_manager = accelize_drm.DrmManager(
conf_json.path,
cred_json.path,
driver.read_register_callback,
driver.write_register_callback,
async_cb.callback
)
assert "Failed to create log file %s" % log_path in str(excinfo.value)
finally:
check_call('sudo chattr -i %s' % log_dir, shell=True)
assert access(log_dir, W_OK)
if isfile(log_path):
remove(log_path)
print('Test folder creation error: PASS')
# Test directory already exists
assert isdir(log_dir)
assert access(log_dir, W_OK)
log_path = join(log_dir, "drmservice-%d.log" % getpid())
assert not isfile(log_path)
async_cb.reset()
conf_json.reset()
conf_json['settings']['log_file_path'] = log_path
conf_json['settings']['log_file_type'] = log_type
conf_json.save()
try:
drm_manager = accelize_drm.DrmManager(
conf_json.path,
cred_json.path,
driver.read_register_callback,
driver.write_register_callback,
async_cb.callback
)
del drm_manager
gc.collect()
assert isfile(log_path)
finally:
if isfile(log_path):
remove(log_path)
print('Test already existing folder: PASS')
# Test directory creation
assert isdir(log_dir)
assert access(log_dir, W_OK)
intermediate_dir = join(log_dir, 'tmp')
assert not isdir(intermediate_dir)
log_path = join(intermediate_dir, "drmservice-%d.log" % getpid())
async_cb.reset()
conf_json.reset()
conf_json['settings']['log_file_path'] = log_path
conf_json['settings']['log_file_type'] = log_type
conf_json.save()
try:
drm_manager = accelize_drm.DrmManager(
conf_json.path,
cred_json.path,
driver.read_register_callback,
driver.write_register_callback,
async_cb.callback
)
del drm_manager
gc.collect()
assert isfile(log_path)
finally:
if isdir(intermediate_dir):
rmtree(intermediate_dir)
print('Test creation of new folder: PASS')
finally:
if isdir(log_dir):
rmtree(log_dir)
| 39.479442 | 143 | 0.684741 | 13,360 | 104,660 | 5.07253 | 0.043263 | 0.070239 | 0.047382 | 0.027446 | 0.84456 | 0.814738 | 0.780549 | 0.735661 | 0.708392 | 0.675781 | 0 | 0.007906 | 0.227785 | 104,660 | 2,650 | 144 | 39.49434 | 0.830611 | 0.116816 | 0 | 0.688167 | 0 | 0.00097 | 0.188976 | 0.034687 | 0 | 0 | 0.001316 | 0 | 0.231814 | 1 | 0.011154 | false | 0.048497 | 0.011154 | 0 | 0.023763 | 0.052861 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
666747a3f04116ddd56459514db87bad857c86ba | 98 | py | Python | Bricks/bricks/home/__init__.py | iOsnaaente/Web-Applications- | 94bfa5318fca1b651c9c659cb5a54a73e19a7927 | [
"MIT"
] | null | null | null | Bricks/bricks/home/__init__.py | iOsnaaente/Web-Applications- | 94bfa5318fca1b651c9c659cb5a54a73e19a7927 | [
"MIT"
] | null | null | null | Bricks/bricks/home/__init__.py | iOsnaaente/Web-Applications- | 94bfa5318fca1b651c9c659cb5a54a73e19a7927 | [
"MIT"
] | null | null | null | from flask import Blueprint
home_bp = Blueprint('home',__name__)
from bricks.home import routes
| 16.333333 | 36 | 0.795918 | 14 | 98 | 5.214286 | 0.642857 | 0.356164 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.132653 | 98 | 5 | 37 | 19.6 | 0.858824 | 0 | 0 | 0 | 0 | 0 | 0.040816 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.666667 | 0 | 0.666667 | 0.666667 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 1 | 0 | 6 |
6672a58a0381799a9854f50367e9603ff4bbaddb | 37 | py | Python | __init__.py | h-roy/Attentive-Group-Equivariant-Convolutional-Networks | 6ff0757e594c070f42b81fdd0bf4a7d27740be7c | [
"MIT"
] | null | null | null | __init__.py | h-roy/Attentive-Group-Equivariant-Convolutional-Networks | 6ff0757e594c070f42b81fdd0bf4a7d27740be7c | [
"MIT"
] | null | null | null | __init__.py | h-roy/Attentive-Group-Equivariant-Convolutional-Networks | 6ff0757e594c070f42b81fdd0bf4a7d27740be7c | [
"MIT"
] | null | null | null | from AttentionLayers.layers import *
| 18.5 | 36 | 0.837838 | 4 | 37 | 7.75 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.108108 | 37 | 1 | 37 | 37 | 0.939394 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
668a455d361f2545b652770068bdf6262b04b653 | 165 | py | Python | Proj/admin.py | narae0409/playpay | 8ca27083639c5c6567216155417351a6eb1f100d | [
"MIT"
] | 2 | 2021-01-18T14:31:26.000Z | 2021-01-18T15:52:54.000Z | Proj/admin.py | narae0409/playpay | 8ca27083639c5c6567216155417351a6eb1f100d | [
"MIT"
] | null | null | null | Proj/admin.py | narae0409/playpay | 8ca27083639c5c6567216155417351a6eb1f100d | [
"MIT"
] | null | null | null | from django.contrib import admin
from . models import *
# Register your models here.
admin.site.register(User)
admin.site.register(Board)
admin.site.register(Text) | 20.625 | 32 | 0.787879 | 24 | 165 | 5.416667 | 0.541667 | 0.207692 | 0.392308 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.109091 | 165 | 8 | 33 | 20.625 | 0.884354 | 0.157576 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.4 | 0 | 0.4 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 6 |
669c6adfe39575565f91caf561843cea52bdc22c | 8,431 | py | Python | VirtualGym_WebProject/src/users/tests.py | LuBrian/VirtualGym | 9fe184214be4c23ed5842298e3c400fbe8de9f6d | [
"MIT"
] | null | null | null | VirtualGym_WebProject/src/users/tests.py | LuBrian/VirtualGym | 9fe184214be4c23ed5842298e3c400fbe8de9f6d | [
"MIT"
] | null | null | null | VirtualGym_WebProject/src/users/tests.py | LuBrian/VirtualGym | 9fe184214be4c23ed5842298e3c400fbe8de9f6d | [
"MIT"
] | null | null | null | from django.test import TestCase
import datetime
from VirtualGym.models import *
from selenium import webdriver
import time
import os,sys
# this test is a UI test for User app(basiclly sign in and sign out)
class UITestCase(TestCase):
# set up browser to firefox
def setUp(self):
self.browser = webdriver.Firefox()
# close the browser after test
def tearDown(self):
self.browser.quit()
# test if user can signIn with their vg account through type
# in their username and password
def test_signIn(self):
try:
self.browser.get("http://localhost:8000/")
self.assertIn("http://localhost:8000/", self.browser.current_url)
signIn = self.browser.find_element_by_xpath("//*[@id='signIn_popup_trigger']").click()
email = self.browser.find_element_by_id('email')
email.send_keys('sprint4_test@vg.ca')
password = self.browser.find_element_by_id('password')
password.send_keys('123')
signIn = self.browser.find_element_by_id('signInButton').click()
self.assertIn("http://localhost:8000/", self.browser.current_url)
print("User sign in test passed!")
except:
print("Sign in test failed! server down or no internet for now")
print("Please check or contact us! ")
# test if user can sign out through navgation bar
def test_signOut(self):
try:
self.browser.get("http://localhost:8000/")
self.assertIn("http://localhost:8000/", self.browser.current_url)
signIn = self.browser.find_element_by_xpath("//*[@id='signIn_popup_trigger']").click()
email = self.browser.find_element_by_id('email')
email.send_keys('sprint4_test@vg.ca')
password = self.browser.find_element_by_id('password')
password.send_keys('123')
signIn = self.browser.find_element_by_id('signInButton').click()
self.assertIn("http://localhost:8000/", self.browser.current_url)
signOut = self.browser.find_element_by_xpath('/html/body/div/div[1]/nav/div/div[2]/ul/li[6]/a').click()
check = self.browser.find_element_by_xpath("//*[@id='signIn_popup_trigger']")
self.assertTrue(check)
print("User sign out test passed!")
except:
print("Sign out test failed! server down or no internet for now")
print("Please check or contact us! ")
# test if user can sign in through their google account
def test_google_SignIn(self):
try:
self.browser.get("http://localhost:8000/")
self.assertIn("http://localhost:8000/", self.browser.current_url)
signIn = self.browser.find_element_by_xpath("//*[@id='signIn_popup_trigger']").click()
Google_signIn = self.browser.find_element_by_xpath('//*[@id="googleSignIn"]').click()
time.sleep(1)
email = self.browser.find_element_by_xpath("//*[@id='identifierId']")
email.send_keys("virthalgymlogintest@gmail.com")
time.sleep(1)
next_step = self.browser.find_element_by_xpath("//*[@id='identifierNext']/content/span").click()
time.sleep(3)
password = self.browser.find_element_by_xpath("//*[@id='password']/div[1]/div/div[1]/input")
password.send_keys("virtualgym")
time.sleep(1)
password_step = self.browser.find_element_by_xpath("//*[@id='passwordNext']/content/span").click()
time.sleep(1)
self.assertIn("http://localhost:8000/", self.browser.current_url)
print("Google sign in test passed!")
except:
print("Google sign in test failed! server down or no internet for now")
print("Please check or contact us! ")
# test if moderator can login through admin page by type in
# their superUser username and password
def test_moderator_Login(self):
try:
self.browser.get("http://localhost:8000/admin/")
self.assertIn("http://localhost:8000/admin/", self.browser.current_url)
username = self.browser.find_element_by_xpath("//*[@id='id_username']")
username.send_keys("sprint4_admin@vg.ca")
password = self.browser.find_element_by_xpath("//*[@id='id_password']")
password.send_keys("admin1234")
Log_in = self.browser.find_element_by_xpath("//*[@id='login-form']/div[3]/input").click()
self.assertIn("http://localhost:8000/admin/", self.browser.current_url)
print("Moderator login test passed!")
except:
print("Moderator login test failed! server down or no internet for now")
print("Please check or contact us! ")
# test if user can sign in through their facebook account
def test_facebook_SignIn(self):
try:
self.browser.get("http://localhost:8000/")
self.assertIn("http://localhost:8000/", self.browser.current_url)
signIn = self.browser.find_element_by_xpath("//*[@id='signIn_popup_trigger']").click()
facebook_signIn = self.browser.find_element_by_xpath('//*[@id="facebookSignIn"]').click()
time.sleep(1)
email = self.browser.find_element_by_xpath("//*[@id='email']")
email.send_keys("virthalgymlogintest@gmail.com")
time.sleep(1)
password = self.browser.find_element_by_xpath("//*[@id='pass']")
password.send_keys("virtualgym")
time.sleep(1)
logIn = self.browser.find_element_by_xpath("//*[@id='loginbutton']").click()
try:
continueLogin = self.browser.find_element_by_xpath("//*[@id='u_0_x']/div[2]/div[1]/div[1]/button").click()
except:
print("user already signed Up with this facebook account")
print("Log in successful!")
time.sleep(3)
self.assertIn("http://localhost:8000/", self.browser.current_url)
print("Facebook sign in test passed!")
except:
print("Facebook sign in test failed! server down or no internet for now")
print("Please check or contact us! ")
# test if user can sign in through their twitter account
def test_twitter_SignIn(self):
try:
self.browser.get("http://localhost:8000/")
self.assertIn("http://localhost:8000/", self.browser.current_url)
signIn = self.browser.find_element_by_xpath("//*[@id='signIn_popup_trigger']").click()
twitter_signIn = self.browser.find_element_by_xpath('//*[@id="twitterSignIn"]').click()
time.sleep(1)
email = self.browser.find_element_by_xpath("//*[@id='username_or_email']")
email.send_keys("virthalgymlogintest@gmail.com")
time.sleep(1)
password = self.browser.find_element_by_xpath("//*[@id='password']")
password.send_keys("virtualgym")
time.sleep(1)
logIn = self.browser.find_element_by_xpath("//*[@id='allow']").click()
time.sleep(3)
self.assertIn("http://localhost:8000/", self.browser.current_url)
print("Twitter sign in test passed!")
except:
print("Twitter sign in test failed! server down or no internet for now")
print("Please check or contact us! ")
# test if we can give moderator privileges to an account
def test_give_moderator_privileges(self):
try:
self.browser.get("http://localhost:8000/admin/")
self.assertIn("http://localhost:8000/admin/", self.browser.current_url)
username = self.browser.find_element_by_xpath("//*[@id='id_username']")
username.send_keys("sprint4_admin@vg.ca")
password = self.browser.find_element_by_xpath("//*[@id='id_password']")
password.send_keys("admin1234")
Log_in = self.browser.find_element_by_xpath("//*[@id='login-form']/div[3]/input").click()
self.browser.find_element_by_xpath("//*[@id='menu-content']/li[8]/i/a").click()
self.browser.find_element_by_xpath("//*[@id='result_list']/tbody/tr[5]/th/a").click()
self.browser.find_element_by_xpath("//*[@id='id_is_admin']").click()
self.browser.find_element_by_xpath("//*[@id='id_is_superuser']").click()
self.browser.find_element_by_xpath("//*[@id='myusers_form']/div/div/input[1]").click()
self.browser.find_element_by_xpath("//*[@id='result_list']/tbody/tr[5]/th/a").click()
admin_box = self.browser.find_element_by_xpath("//*[@id='id_is_admin']").is_selected()
self.assertTrue(admin_box == True)
super_box = self.browser.find_element_by_xpath("//*[@id='id_is_superuser']").is_selected()
self.assertTrue(super_box == True)
self.browser.find_element_by_xpath("//*[@id='myusers_form']/div/div/input[1]").click()
# everything goes ok, cancel privileges
self.browser.find_element_by_xpath("//*[@id='result_list']/tbody/tr[5]/th/a").click()
self.browser.find_element_by_xpath("//*[@id='id_is_admin']").click()
self.browser.find_element_by_xpath("//*[@id='id_is_superuser']").click()
self.browser.find_element_by_xpath("//*[@id='myusers_form']/div/div/input[1]").click()
print("Give moderator privileges test passed!")
except:
print("Give moderator privileges test failed! server down or no internet for now")
print("Please check or contact us! ")
| 46.838889 | 110 | 0.713557 | 1,216 | 8,431 | 4.755757 | 0.132401 | 0.129345 | 0.119315 | 0.174996 | 0.795954 | 0.75549 | 0.7318 | 0.720214 | 0.684247 | 0.665917 | 0 | 0.017353 | 0.118254 | 8,431 | 179 | 111 | 47.100559 | 0.76056 | 0.073182 | 0 | 0.633987 | 0 | 0.006536 | 0.360303 | 0.153353 | 0 | 0 | 0 | 0 | 0.104575 | 1 | 0.058824 | false | 0.143791 | 0.039216 | 0 | 0.104575 | 0.176471 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 6 |
66aaaa0791636a0b7183020ee082cfea6ad8a9ac | 96 | py | Python | venv/lib/python3.8/site-packages/numpy/core/tests/test_hashtable.py | Retraces/UkraineBot | 3d5d7f8aaa58fa0cb8b98733b8808e5dfbdb8b71 | [
"MIT"
] | 2 | 2022-03-13T01:58:52.000Z | 2022-03-31T06:07:54.000Z | venv/lib/python3.8/site-packages/numpy/core/tests/test_hashtable.py | DesmoSearch/Desmobot | b70b45df3485351f471080deb5c785c4bc5c4beb | [
"MIT"
] | 19 | 2021-11-20T04:09:18.000Z | 2022-03-23T15:05:55.000Z | venv/lib/python3.8/site-packages/numpy/core/tests/test_hashtable.py | DesmoSearch/Desmobot | b70b45df3485351f471080deb5c785c4bc5c4beb | [
"MIT"
] | null | null | null | /home/runner/.cache/pip/pool/65/5f/07/2fc3640e7a1065f9e37bb04fd1fc88a787d216a8cab10738d0f6938207 | 96 | 96 | 0.895833 | 9 | 96 | 9.555556 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.447917 | 0 | 96 | 1 | 96 | 96 | 0.447917 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | null | 0 | 0 | null | null | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
dd1281db414b28a2fd6c38278487c7478af51784 | 71 | py | Python | trie_search/__init__.py | joein/trie-search | be1e407b572edee0307943028a2e0149ff842f07 | [
"MIT"
] | 15 | 2018-09-09T17:12:22.000Z | 2021-12-10T14:17:29.000Z | trie_search/__init__.py | joein/trie-search | be1e407b572edee0307943028a2e0149ff842f07 | [
"MIT"
] | 1 | 2019-03-05T16:39:33.000Z | 2019-03-05T16:39:33.000Z | trie_search/__init__.py | joein/trie-search | be1e407b572edee0307943028a2e0149ff842f07 | [
"MIT"
] | 6 | 2017-03-13T12:54:43.000Z | 2020-12-02T14:58:18.000Z | from .trie import TrieSearch
from .record_trie import RecordTrieSearch
| 23.666667 | 41 | 0.859155 | 9 | 71 | 6.666667 | 0.666667 | 0.333333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.112676 | 71 | 2 | 42 | 35.5 | 0.952381 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
dd23ec3f830ec9af128f22847e35a6c229c49bbb | 12,017 | py | Python | SimModel_Python_API/simmodel_swig/Release/SimSite_BuildingSite_Default.py | EnEff-BIM/EnEffBIM-Framework | 6328d39b498dc4065a60b5cc9370b8c2a9a1cddf | [
"MIT"
] | 3 | 2016-05-30T15:12:16.000Z | 2022-03-22T08:11:13.000Z | SimModel_Python_API/simmodel_swig/Release/SimSite_BuildingSite_Default.py | EnEff-BIM/EnEffBIM-Framework | 6328d39b498dc4065a60b5cc9370b8c2a9a1cddf | [
"MIT"
] | 21 | 2016-06-13T11:33:45.000Z | 2017-05-23T09:46:52.000Z | SimModel_Python_API/simmodel_swig/Release/SimSite_BuildingSite_Default.py | EnEff-BIM/EnEffBIM-Framework | 6328d39b498dc4065a60b5cc9370b8c2a9a1cddf | [
"MIT"
] | null | null | null | # This file was automatically generated by SWIG (http://www.swig.org).
# Version 3.0.7
#
# Do not make changes to this file unless you know what you are doing--modify
# the SWIG interface file instead.
from sys import version_info
if version_info >= (2, 6, 0):
def swig_import_helper():
from os.path import dirname
import imp
fp = None
try:
fp, pathname, description = imp.find_module('_SimSite_BuildingSite_Default', [dirname(__file__)])
except ImportError:
import _SimSite_BuildingSite_Default
return _SimSite_BuildingSite_Default
if fp is not None:
try:
_mod = imp.load_module('_SimSite_BuildingSite_Default', fp, pathname, description)
finally:
fp.close()
return _mod
_SimSite_BuildingSite_Default = swig_import_helper()
del swig_import_helper
else:
import _SimSite_BuildingSite_Default
del version_info
try:
_swig_property = property
except NameError:
pass # Python < 2.2 doesn't have 'property'.
def _swig_setattr_nondynamic(self, class_type, name, value, static=1):
if (name == "thisown"):
return self.this.own(value)
if (name == "this"):
if type(value).__name__ == 'SwigPyObject':
self.__dict__[name] = value
return
method = class_type.__swig_setmethods__.get(name, None)
if method:
return method(self, value)
if (not static):
if _newclass:
object.__setattr__(self, name, value)
else:
self.__dict__[name] = value
else:
raise AttributeError("You cannot add attributes to %s" % self)
def _swig_setattr(self, class_type, name, value):
return _swig_setattr_nondynamic(self, class_type, name, value, 0)
def _swig_getattr_nondynamic(self, class_type, name, static=1):
if (name == "thisown"):
return self.this.own()
method = class_type.__swig_getmethods__.get(name, None)
if method:
return method(self)
if (not static):
return object.__getattr__(self, name)
else:
raise AttributeError(name)
def _swig_getattr(self, class_type, name):
return _swig_getattr_nondynamic(self, class_type, name, 0)
def _swig_repr(self):
try:
strthis = "proxy of " + self.this.__repr__()
except:
strthis = ""
return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,)
try:
_object = object
_newclass = 1
except AttributeError:
class _object:
pass
_newclass = 0
try:
import weakref
weakref_proxy = weakref.proxy
except:
weakref_proxy = lambda x: x
import base
class SimSite(base.SimSpatialStructureElement):
__swig_setmethods__ = {}
for _s in [base.SimSpatialStructureElement]:
__swig_setmethods__.update(getattr(_s, '__swig_setmethods__', {}))
__setattr__ = lambda self, name, value: _swig_setattr(self, SimSite, name, value)
__swig_getmethods__ = {}
for _s in [base.SimSpatialStructureElement]:
__swig_getmethods__.update(getattr(_s, '__swig_getmethods__', {}))
__getattr__ = lambda self, name: _swig_getattr(self, SimSite, name)
__repr__ = _swig_repr
def Name(self, *args):
return _SimSite_BuildingSite_Default.SimSite_Name(self, *args)
def Longitude(self, *args):
return _SimSite_BuildingSite_Default.SimSite_Longitude(self, *args)
def Latitude(self, *args):
return _SimSite_BuildingSite_Default.SimSite_Latitude(self, *args)
def SiteReferenceElevation(self, *args):
return _SimSite_BuildingSite_Default.SimSite_SiteReferenceElevation(self, *args)
def BuildableArea(self, *args):
return _SimSite_BuildingSite_Default.SimSite_BuildableArea(self, *args)
def BuildingHeightLimit(self, *args):
return _SimSite_BuildingSite_Default.SimSite_BuildingHeightLimit(self, *args)
def SitePerimeter(self, *args):
return _SimSite_BuildingSite_Default.SimSite_SitePerimeter(self, *args)
def SiteGrossLandArea(self, *args):
return _SimSite_BuildingSite_Default.SimSite_SiteGrossLandArea(self, *args)
def SiteExcavationCutVolume(self, *args):
return _SimSite_BuildingSite_Default.SimSite_SiteExcavationCutVolume(self, *args)
def SiteExcavationBackfillVolume(self, *args):
return _SimSite_BuildingSite_Default.SimSite_SiteExcavationBackfillVolume(self, *args)
def GeometricRepresentations(self, *args):
return _SimSite_BuildingSite_Default.SimSite_GeometricRepresentations(self, *args)
def SiteInSpatialContainer(self, *args):
return _SimSite_BuildingSite_Default.SimSite_SiteInSpatialContainer(self, *args)
def LandTitleNumber(self, *args):
return _SimSite_BuildingSite_Default.SimSite_LandTitleNumber(self, *args)
def SiteAddress(self, *args):
return _SimSite_BuildingSite_Default.SimSite_SiteAddress(self, *args)
def WeatherLocationRegion(self, *args):
return _SimSite_BuildingSite_Default.SimSite_WeatherLocationRegion(self, *args)
def WeatherLocationCity(self, *args):
return _SimSite_BuildingSite_Default.SimSite_WeatherLocationCity(self, *args)
def WeatherLocationID(self, *args):
return _SimSite_BuildingSite_Default.SimSite_WeatherLocationID(self, *args)
def WeatherLocationIDType(self, *args):
return _SimSite_BuildingSite_Default.SimSite_WeatherLocationIDType(self, *args)
def DesignDays(self, *args):
return _SimSite_BuildingSite_Default.SimSite_DesignDays(self, *args)
def TimeZone(self, *args):
return _SimSite_BuildingSite_Default.SimSite_TimeZone(self, *args)
def LocationsTemplate(self, *args):
return _SimSite_BuildingSite_Default.SimSite_LocationsTemplate(self, *args)
def TemplateOverrideValues(self, *args):
return _SimSite_BuildingSite_Default.SimSite_TemplateOverrideValues(self, *args)
def __init__(self, *args):
this = _SimSite_BuildingSite_Default.new_SimSite(*args)
try:
self.this.append(this)
except:
self.this = this
def _clone(self, f=0, c=None):
return _SimSite_BuildingSite_Default.SimSite__clone(self, f, c)
__swig_destroy__ = _SimSite_BuildingSite_Default.delete_SimSite
__del__ = lambda self: None
SimSite_swigregister = _SimSite_BuildingSite_Default.SimSite_swigregister
SimSite_swigregister(SimSite)
class SimSite_BuildingSite(SimSite):
__swig_setmethods__ = {}
for _s in [SimSite]:
__swig_setmethods__.update(getattr(_s, '__swig_setmethods__', {}))
__setattr__ = lambda self, name, value: _swig_setattr(self, SimSite_BuildingSite, name, value)
__swig_getmethods__ = {}
for _s in [SimSite]:
__swig_getmethods__.update(getattr(_s, '__swig_getmethods__', {}))
__getattr__ = lambda self, name: _swig_getattr(self, SimSite_BuildingSite, name)
__repr__ = _swig_repr
def __init__(self, *args):
this = _SimSite_BuildingSite_Default.new_SimSite_BuildingSite(*args)
try:
self.this.append(this)
except:
self.this = this
def _clone(self, f=0, c=None):
return _SimSite_BuildingSite_Default.SimSite_BuildingSite__clone(self, f, c)
__swig_destroy__ = _SimSite_BuildingSite_Default.delete_SimSite_BuildingSite
__del__ = lambda self: None
SimSite_BuildingSite_swigregister = _SimSite_BuildingSite_Default.SimSite_BuildingSite_swigregister
SimSite_BuildingSite_swigregister(SimSite_BuildingSite)
class SimSite_BuildingSite_Default(SimSite_BuildingSite):
__swig_setmethods__ = {}
for _s in [SimSite_BuildingSite]:
__swig_setmethods__.update(getattr(_s, '__swig_setmethods__', {}))
__setattr__ = lambda self, name, value: _swig_setattr(self, SimSite_BuildingSite_Default, name, value)
__swig_getmethods__ = {}
for _s in [SimSite_BuildingSite]:
__swig_getmethods__.update(getattr(_s, '__swig_getmethods__', {}))
__getattr__ = lambda self, name: _swig_getattr(self, SimSite_BuildingSite_Default, name)
__repr__ = _swig_repr
def __init__(self, *args):
this = _SimSite_BuildingSite_Default.new_SimSite_BuildingSite_Default(*args)
try:
self.this.append(this)
except:
self.this = this
def _clone(self, f=0, c=None):
return _SimSite_BuildingSite_Default.SimSite_BuildingSite_Default__clone(self, f, c)
__swig_destroy__ = _SimSite_BuildingSite_Default.delete_SimSite_BuildingSite_Default
__del__ = lambda self: None
SimSite_BuildingSite_Default_swigregister = _SimSite_BuildingSite_Default.SimSite_BuildingSite_Default_swigregister
SimSite_BuildingSite_Default_swigregister(SimSite_BuildingSite_Default)
class SimSite_BuildingSite_Default_sequence(base.sequence_common):
__swig_setmethods__ = {}
for _s in [base.sequence_common]:
__swig_setmethods__.update(getattr(_s, '__swig_setmethods__', {}))
__setattr__ = lambda self, name, value: _swig_setattr(self, SimSite_BuildingSite_Default_sequence, name, value)
__swig_getmethods__ = {}
for _s in [base.sequence_common]:
__swig_getmethods__.update(getattr(_s, '__swig_getmethods__', {}))
__getattr__ = lambda self, name: _swig_getattr(self, SimSite_BuildingSite_Default_sequence, name)
__repr__ = _swig_repr
def __init__(self, *args):
this = _SimSite_BuildingSite_Default.new_SimSite_BuildingSite_Default_sequence(*args)
try:
self.this.append(this)
except:
self.this = this
def assign(self, n, x):
return _SimSite_BuildingSite_Default.SimSite_BuildingSite_Default_sequence_assign(self, n, x)
def begin(self, *args):
return _SimSite_BuildingSite_Default.SimSite_BuildingSite_Default_sequence_begin(self, *args)
def end(self, *args):
return _SimSite_BuildingSite_Default.SimSite_BuildingSite_Default_sequence_end(self, *args)
def rbegin(self, *args):
return _SimSite_BuildingSite_Default.SimSite_BuildingSite_Default_sequence_rbegin(self, *args)
def rend(self, *args):
return _SimSite_BuildingSite_Default.SimSite_BuildingSite_Default_sequence_rend(self, *args)
def at(self, *args):
return _SimSite_BuildingSite_Default.SimSite_BuildingSite_Default_sequence_at(self, *args)
def front(self, *args):
return _SimSite_BuildingSite_Default.SimSite_BuildingSite_Default_sequence_front(self, *args)
def back(self, *args):
return _SimSite_BuildingSite_Default.SimSite_BuildingSite_Default_sequence_back(self, *args)
def push_back(self, *args):
return _SimSite_BuildingSite_Default.SimSite_BuildingSite_Default_sequence_push_back(self, *args)
def pop_back(self):
return _SimSite_BuildingSite_Default.SimSite_BuildingSite_Default_sequence_pop_back(self)
def detach_back(self, pop=True):
return _SimSite_BuildingSite_Default.SimSite_BuildingSite_Default_sequence_detach_back(self, pop)
def insert(self, *args):
return _SimSite_BuildingSite_Default.SimSite_BuildingSite_Default_sequence_insert(self, *args)
def erase(self, *args):
return _SimSite_BuildingSite_Default.SimSite_BuildingSite_Default_sequence_erase(self, *args)
def detach(self, position, r, erase=True):
return _SimSite_BuildingSite_Default.SimSite_BuildingSite_Default_sequence_detach(self, position, r, erase)
def swap(self, x):
return _SimSite_BuildingSite_Default.SimSite_BuildingSite_Default_sequence_swap(self, x)
__swig_destroy__ = _SimSite_BuildingSite_Default.delete_SimSite_BuildingSite_Default_sequence
__del__ = lambda self: None
SimSite_BuildingSite_Default_sequence_swigregister = _SimSite_BuildingSite_Default.SimSite_BuildingSite_Default_sequence_swigregister
SimSite_BuildingSite_Default_sequence_swigregister(SimSite_BuildingSite_Default_sequence)
# This file is compatible with both classic and new-style classes.
| 38.516026 | 133 | 0.737289 | 1,333 | 12,017 | 6.109527 | 0.123781 | 0.244966 | 0.293713 | 0.182343 | 0.656803 | 0.62279 | 0.588777 | 0.422397 | 0.369352 | 0.319008 | 0 | 0.001728 | 0.18116 | 12,017 | 311 | 134 | 38.639871 | 0.825915 | 0.024465 | 0 | 0.334746 | 1 | 0 | 0.02493 | 0.004952 | 0 | 0 | 0 | 0 | 0 | 1 | 0.211864 | false | 0.008475 | 0.04661 | 0.177966 | 0.614407 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 6 |
dd3b8f23d468e97a09bbe2722a9534334d202c0c | 97 | py | Python | VideoxD/__init__.py | BlackVirusOfficial/VideoStreamBot | c51d78905c6400dc9199b88b8a8cef7033193404 | [
"MIT"
] | 5 | 2021-12-09T15:27:12.000Z | 2022-02-26T11:30:45.000Z | VideoxD/__init__.py | BlackVirusOfficial/VideoStreamBot | c51d78905c6400dc9199b88b8a8cef7033193404 | [
"MIT"
] | null | null | null | VideoxD/__init__.py | BlackVirusOfficial/VideoStreamBot | c51d78905c6400dc9199b88b8a8cef7033193404 | [
"MIT"
] | 12 | 2021-12-16T10:29:23.000Z | 2022-02-13T13:14:23.000Z | from misc import app, bot, HELP, Calls
from config import VIDEO_CHAT_ID
chat_id = VIDEO_CHAT_ID
| 24.25 | 38 | 0.804124 | 18 | 97 | 4.055556 | 0.611111 | 0.246575 | 0.30137 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.154639 | 97 | 3 | 39 | 32.333333 | 0.890244 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.666667 | 0 | 0.666667 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
dd990de244794b6b8a6dc1c4ef4df98fbe7556b3 | 38 | py | Python | sejongapi/settings/development.py | jamesj0918/sejongapi-v1 | 5945b959f7d4a97ec64ee40a28a797b30067421b | [
"MIT"
] | null | null | null | sejongapi/settings/development.py | jamesj0918/sejongapi-v1 | 5945b959f7d4a97ec64ee40a28a797b30067421b | [
"MIT"
] | 2 | 2020-02-11T23:35:52.000Z | 2020-06-05T19:55:32.000Z | sejongapi/settings/development.py | jamesj0918/sejongapi-v1 | 5945b959f7d4a97ec64ee40a28a797b30067421b | [
"MIT"
] | null | null | null | from sejongapi.settings.base import *
| 19 | 37 | 0.815789 | 5 | 38 | 6.2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.105263 | 38 | 1 | 38 | 38 | 0.911765 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
06e6f39adaf59f1b7757e3bf0579100b4a517303 | 70 | py | Python | antelope_core/catalog/__init__.py | AntelopeLCA/core | ee40685add52ba41a462e2147fe8c377c6ba2a80 | [
"BSD-3-Clause"
] | 1 | 2021-10-06T18:42:49.000Z | 2021-10-06T18:42:49.000Z | antelope_core/catalog/__init__.py | AntelopeLCA/core | ee40685add52ba41a462e2147fe8c377c6ba2a80 | [
"BSD-3-Clause"
] | 6 | 2021-01-09T08:56:46.000Z | 2022-03-29T08:26:21.000Z | antelope_core/catalog/__init__.py | AntelopeLCA/core | ee40685add52ba41a462e2147fe8c377c6ba2a80 | [
"BSD-3-Clause"
] | null | null | null | from .catalog import StaticCatalog
from .lc_catalog import LcCatalog
| 17.5 | 34 | 0.842857 | 9 | 70 | 6.444444 | 0.666667 | 0.448276 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.128571 | 70 | 3 | 35 | 23.333333 | 0.95082 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
664b52474da460892a6f7106fad6d896f7c1cbe5 | 3,528 | py | Python | src/generators/gmm.py | Arzik1987/prelim | 03d96558575cccff4ac85a3e8fb04944e34d1076 | [
"MIT"
] | 2 | 2021-10-16T06:25:30.000Z | 2021-10-21T13:11:00.000Z | src/generators/gmm.py | Arzik1987/prelim | 03d96558575cccff4ac85a3e8fb04944e34d1076 | [
"MIT"
] | null | null | null | src/generators/gmm.py | Arzik1987/prelim | 03d96558575cccff4ac85a3e8fb04944e34d1076 | [
"MIT"
] | null | null | null | import numpy as np
from sklearn.mixture import GaussianMixture
from sklearn.model_selection import GridSearchCV
class Gen_gmm:
def __init__(self, params: dict = None, cv=5):
if params is None:
self.params_ = {
"covariance_type": ["full", "tied", "diag", "spherical"],
"n_components": list(range(1,31))
}
else:
self.params_ = params
self.model_ = None
self.cv_ = cv
def fit(self, X, y=None, metamodel=None):
self.model_ = GridSearchCV(GaussianMixture(), self.params_, cv = self.cv_).fit(X).best_estimator_
return self
def sample(self, n_samples=1, X_new=None):
return self.model_.sample(n_samples)[0]
def my_name(self):
return "gmmcv"
class Gen_gmmbic:
def __init__(self, params: dict = None, cv=None):
if params is None:
self.params_ = {
"covariance_type": ["full", "tied", "diag", "spherical"],
"n_components": list(range(1,31))
}
else:
self.params_ = params
self.model_ = None
def fit(self, X, y=None, metamodel=None):
# see https://scikit-learn.org/stable/auto_examples/mixture/plot_gmm_selection.html
lowest_bic = np.infty
for cv_type in self.params_['covariance_type']:
for n_components in self.params_['n_components']:
gmm = GaussianMixture(n_components = n_components, covariance_type = cv_type)
gmm.fit(X)
bic = gmm.bic(X)
if bic < lowest_bic:
lowest_bic = bic
best_gmm = gmm
self.model_ = best_gmm
return self
def sample(self, n_samples = 1):
return self.model_.sample(n_samples)[0]
def my_name(self):
return "gmm"
class Gen_gmmbical:
def __init__(self, params: dict = None, cv=None):
if params is None:
self.params_ = {"n_components": list(range(1,31))}
else:
self.params_ = params
def fit(self, X, y=None, metamodel=None):
# see https://scikit-learn.org/stable/auto_examples/mixture/plot_gmm_selection.html
lowest_bic = np.infty
for n_components in self.params_['n_components']:
gmm = GaussianMixture(n_components = n_components, covariance_type = "diag")
gmm.fit(X)
bic = gmm.bic(X)
if bic < lowest_bic:
lowest_bic = bic
best_gmm = gmm
self.model_ = best_gmm
return self
def sample(self, n_samples = 1):
return self.model_.sample(n_samples)[0]
def my_name(self):
return "gmmal"
# =============================================================================
# TEST
# mean = [0, 0]
# cov = [[1, 0], [0, 1]]
# x = np.random.multivariate_normal(mean, cov, 500)
# mean = [5, 5]
# x = np.vstack((x,np.random.multivariate_normal(mean, cov, 500)))
# import matplotlib.pyplot as plt
# plt.scatter(x[:,0], x[:,1])
# plt.show()
#
# gmm = Gen_gmm()
# gmm.fit(x)
# df = gmm.sample(n_samples = 201)
# plt.scatter(df[:,0], df[:,1])
# plt.show()
#
# gmm = Gen_gmmbic()
# gmm.fit(x)
# df = gmm.sample(n_samples = 201)
# plt.scatter(df[:,0], df[:,1])
# plt.show()
#
# gmm = Gen_gmmbical()
# gmm.fit(x)
# df = gmm.sample(n_samples = 201)
# plt.scatter(df[:,0], df[:,1])
# plt.show()
# =============================================================================
| 27.779528 | 105 | 0.544501 | 443 | 3,528 | 4.133183 | 0.189616 | 0.070999 | 0.045877 | 0.027854 | 0.797925 | 0.790279 | 0.790279 | 0.775533 | 0.701802 | 0.701802 | 0 | 0.018606 | 0.284014 | 3,528 | 126 | 106 | 28 | 0.706255 | 0.247166 | 0 | 0.720588 | 0 | 0 | 0.062572 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.176471 | false | 0 | 0.044118 | 0.088235 | 0.397059 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.